From f3f4807c0a1b5f51a3848977de7a5b8719534446 Mon Sep 17 00:00:00 2001 From: Brad Richardson Date: Sun, 20 Nov 2022 22:31:04 -0500 Subject: [PATCH 01/11] Update ffmpeg, preserving existing cbs build pattern Include new files --- CMakeLists.txt | 31 + src/cbs.cpp | 2 +- src/config.cpp | 12 +- third-party/cbs/CMakeLists.txt | 21 +- third-party/cbs/avcodec.h | 3170 ++++++++++++++++ third-party/cbs/bytestream.h | 495 +-- third-party/cbs/cbs.c | 1637 +++++---- third-party/cbs/cbs_av1.c | 2210 +++++------ third-party/cbs/cbs_av1_syntax_template.c | 3482 +++++++++--------- third-party/cbs/cbs_h2645.c | 2778 +++++++------- third-party/cbs/cbs_h264_syntax_template.c | 2045 ++++++----- third-party/cbs/cbs_h265_syntax_template.c | 3679 ++++++++++--------- third-party/cbs/cbs_internal.h | 280 +- third-party/cbs/cbs_jpeg.c | 711 ++-- third-party/cbs/cbs_jpeg_syntax_template.c | 231 +- third-party/cbs/cbs_mpeg2.c | 669 ++-- third-party/cbs/cbs_mpeg2_syntax_template.c | 620 ++-- third-party/cbs/cbs_sei.c | 585 +-- third-party/cbs/cbs_sei_syntax_template.c | 438 +-- third-party/cbs/cbs_vp9.c | 1029 +++--- third-party/cbs/cbs_vp9_syntax_template.c | 627 ++-- third-party/cbs/codec.h | 387 ++ third-party/cbs/defs.h | 51 - third-party/cbs/get_bits.h | 831 ----- third-party/cbs/h2645_parse.c | 855 ++--- third-party/cbs/h264_levels.c | 124 + third-party/cbs/h264_ps.h | 220 +- third-party/cbs/h264_sei.h | 190 +- third-party/cbs/hevc_sei.h | 147 +- third-party/cbs/include/cbs/attributes.h | 173 + third-party/cbs/include/cbs/av1.h | 229 +- third-party/cbs/include/cbs/cbs.h | 169 +- third-party/cbs/include/cbs/cbs_av1.h | 732 ++-- third-party/cbs/include/cbs/cbs_bsf.h | 124 +- third-party/cbs/include/cbs/cbs_h264.h | 579 +-- third-party/cbs/include/cbs/cbs_h2645.h | 14 +- third-party/cbs/include/cbs/cbs_h265.h | 1157 +++--- third-party/cbs/include/cbs/cbs_jpeg.h | 116 +- third-party/cbs/include/cbs/cbs_mpeg2.h | 274 +- third-party/cbs/include/cbs/cbs_sei.h | 153 +- third-party/cbs/include/cbs/cbs_vp9.h | 264 +- third-party/cbs/include/cbs/codec_desc.h | 128 + third-party/cbs/include/cbs/codec_id.h | 634 ++++ third-party/cbs/include/cbs/codec_par.h | 246 ++ third-party/cbs/{ => include/cbs}/config.h | 1 + third-party/cbs/include/cbs/defs.h | 170 + third-party/cbs/include/cbs/get_bits.h | 858 +++++ third-party/cbs/include/cbs/h264.h | 134 +- third-party/cbs/include/cbs/h2645_parse.h | 125 +- third-party/cbs/include/cbs/h264_levels.h | 51 + third-party/cbs/include/cbs/hevc.h | 226 +- third-party/cbs/include/cbs/mathops.h | 245 ++ third-party/cbs/include/cbs/packet.h | 731 ++++ third-party/cbs/include/cbs/sei.h | 216 +- third-party/cbs/include/cbs/video_levels.h | 112 - third-party/cbs/{ => include/cbs}/vlc.h | 118 +- third-party/cbs/intmath.h | 120 +- third-party/cbs/log2_tab.c | 33 + third-party/cbs/mathops.h | 243 -- third-party/cbs/put_bits.h | 414 ++- third-party/cbs/startcode.h | 36 + third-party/cbs/version_major.h | 54 + third-party/cbs/video_levels.c | 349 -- 63 files changed, 21335 insertions(+), 15450 deletions(-) create mode 100644 third-party/cbs/avcodec.h create mode 100644 third-party/cbs/codec.h delete mode 100644 third-party/cbs/defs.h delete mode 100644 third-party/cbs/get_bits.h create mode 100644 third-party/cbs/h264_levels.c create mode 100644 third-party/cbs/include/cbs/attributes.h create mode 100644 third-party/cbs/include/cbs/codec_desc.h create mode 100644 third-party/cbs/include/cbs/codec_id.h create mode 100644 third-party/cbs/include/cbs/codec_par.h rename third-party/cbs/{ => include/cbs}/config.h (96%) create mode 100644 third-party/cbs/include/cbs/defs.h create mode 100644 third-party/cbs/include/cbs/get_bits.h create mode 100644 third-party/cbs/include/cbs/h264_levels.h create mode 100644 third-party/cbs/include/cbs/mathops.h create mode 100644 third-party/cbs/include/cbs/packet.h delete mode 100644 third-party/cbs/include/cbs/video_levels.h rename third-party/cbs/{ => include/cbs}/vlc.h (52%) create mode 100644 third-party/cbs/log2_tab.c delete mode 100644 third-party/cbs/mathops.h create mode 100644 third-party/cbs/startcode.h create mode 100644 third-party/cbs/version_major.h delete mode 100644 third-party/cbs/video_levels.c diff --git a/CMakeLists.txt b/CMakeLists.txt index 95718bed29a..a0f84a49de5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -214,6 +214,37 @@ else() set(WAYLAND_FOUND OFF) endif() +<<<<<<< HEAD +======= + file( + DOWNLOAD "https://github.com/LizardByte/ffmpeg-prebuilt/releases/download/v1/pre-compiled-debian.zip" "${CMAKE_CURRENT_BINARY_DIR}/pre-compiled.zip" + TIMEOUT 60 + EXPECTED_HASH SHA256=baa26844f4bf25bad4e4de6e74026f3f083edb018e950bc09983210bb46a6a7d) + + file(ARCHIVE_EXTRACT + INPUT "${CMAKE_CURRENT_BINARY_DIR}/pre-compiled.zip" + DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/pre-compiled) + + if(NOT DEFINED SUNSHINE_PREPARED_BINARIES) + set(SUNSHINE_PREPARED_BINARIES "${CMAKE_CURRENT_BINARY_DIR}/pre-compiled") + endif() + + set(FFMPEG_INCLUDE_DIRS + ${SUNSHINE_PREPARED_BINARIES}/include) + set(FFMPEG_LIBRARIES + ${SUNSHINE_PREPARED_BINARIES}/lib/libavcodec.a + ${SUNSHINE_PREPARED_BINARIES}/lib/libavdevice.a + ${SUNSHINE_PREPARED_BINARIES}/lib/libavfilter.a + ${SUNSHINE_PREPARED_BINARIES}/lib/libavformat.a + ${SUNSHINE_PREPARED_BINARIES}/lib/libavutil.a + ${SUNSHINE_PREPARED_BINARIES}/lib/libpostproc.a + ${SUNSHINE_PREPARED_BINARIES}/lib/libswresample.a + ${SUNSHINE_PREPARED_BINARIES}/lib/libswscale.a + ${SUNSHINE_PREPARED_BINARIES}/lib/libx264.a + ${SUNSHINE_PREPARED_BINARIES}/lib/libx265.a + z lzma) + +>>>>>>> 5aeccc2 (Update ffmpeg, preserving existing cbs build pattern) if(X11_FOUND) add_compile_definitions(SUNSHINE_BUILD_X11) include_directories(${X11_INCLUDE_DIR}) diff --git a/src/cbs.cpp b/src/cbs.cpp index d50bd1951fb..6844d91aa57 100644 --- a/src/cbs.cpp +++ b/src/cbs.cpp @@ -1,7 +1,7 @@ extern "C" { #include #include -#include +#include #include #include } diff --git a/src/config.cpp b/src/config.cpp index b7f483ac6f3..b888a5937c5 100644 --- a/src/config.cpp +++ b/src/config.cpp @@ -41,7 +41,7 @@ enum preset_e : int { }; enum rc_e : int { - constqp = 0x0, /**< Constant QP mode */ + cqp = 0x0, /**< Constant QP mode */ vbr = 0x1, /**< Variable bitrate mode */ cbr = 0x2, /**< Constant bitrate mode */ cbr_ld_hq = 0x8, /**< low-delay CBR, high quality */ @@ -76,7 +76,7 @@ std::optional preset_from_view(const std::string_view &preset) { std::optional rc_from_view(const std::string_view &rc) { #define _CONVERT_(x) \ if(rc == #x##sv) return x - _CONVERT_(constqp); + _CONVERT_(cqp); _CONVERT_(vbr); _CONVERT_(cbr); _CONVERT_(cbr_hq); @@ -103,14 +103,14 @@ enum quality_e : int { }; enum class rc_hevc_e : int { - constqp, /**< Constant QP mode */ + cqp, /**< Constant QP mode */ vbr_latency, /**< Latency Constrained Variable Bitrate */ vbr_peak, /**< Peak Constrained Variable Bitrate */ cbr, /**< Constant bitrate mode */ }; enum class rc_h264_e : int { - constqp, /**< Constant QP mode */ + cqp, /**< Constant QP mode */ cbr, /**< Constant bitrate mode */ vbr_peak, /**< Peak Constrained Variable Bitrate */ vbr_latency, /**< Latency Constrained Variable Bitrate */ @@ -135,7 +135,7 @@ std::optional quality_from_view(const std::string_view &quality) { std::optional rc_h264_from_view(const std::string_view &rc) { #define _CONVERT_(x) \ if(rc == #x##sv) return (int)rc_h264_e::x - _CONVERT_(constqp); + _CONVERT_(cqp); _CONVERT_(vbr_latency); _CONVERT_(vbr_peak); _CONVERT_(cbr); @@ -146,7 +146,7 @@ std::optional rc_h264_from_view(const std::string_view &rc) { std::optional rc_hevc_from_view(const std::string_view &rc) { #define _CONVERT_(x) \ if(rc == #x##sv) return (int)rc_hevc_e::x - _CONVERT_(constqp); + _CONVERT_(cqp); _CONVERT_(vbr_latency); _CONVERT_(vbr_peak); _CONVERT_(cbr); diff --git a/third-party/cbs/CMakeLists.txt b/third-party/cbs/CMakeLists.txt index b30a8031341..1287b4c4b2c 100644 --- a/third-party/cbs/CMakeLists.txt +++ b/third-party/cbs/CMakeLists.txt @@ -14,11 +14,20 @@ include/cbs/cbs_jpeg.h include/cbs/cbs_mpeg2.h include/cbs/cbs_sei.h include/cbs/cbs_vp9.h +include/cbs/codec_desc.h +include/cbs/codec_id.h +include/cbs/codec_par.h +include/cbs/config.h +include/cbs/defs.h +include/cbs/get_bits.h +include/cbs/h264_levels.h include/cbs/h2645_parse.h include/cbs/h264.h include/cbs/hevc.h +include/cbs/mathops.h +include/cbs/packet.h include/cbs/sei.h -include/cbs/video_levels.h +include/cbs/vlc.h cbs.c cbs_h2645.c @@ -27,21 +36,19 @@ cbs_vp9.c cbs_mpeg2.c cbs_jpeg.c cbs_sei.c +h264_levels.c h2645_parse.c -video_levels.c +avcodec.h bytestream.h cbs_internal.h -defs.h -get_bits.h +codec.h h264_ps.h h264_sei.h hevc_sei.h intmath.h -mathops.h put_bits.h -vlc.h -config.h +version_major.h ) include_directories(include) diff --git a/third-party/cbs/avcodec.h b/third-party/cbs/avcodec.h new file mode 100644 index 00000000000..c4883ddce58 --- /dev/null +++ b/third-party/cbs/avcodec.h @@ -0,0 +1,3170 @@ +/* + * copyright (c) 2001 Fabrice Bellard + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_AVCODEC_H +#define AVCODEC_AVCODEC_H + +/** + * @file + * @ingroup libavc + * Libavcodec external API header + */ + +#include "libavutil/samplefmt.h" +#include "libavutil/attributes.h" +#include "libavutil/avutil.h" +#include "libavutil/buffer.h" +#include "libavutil/dict.h" +#include "libavutil/frame.h" +#include "libavutil/log.h" +#include "libavutil/pixfmt.h" +#include "libavutil/rational.h" + +// [manual] Changed include path +#include "cbs/codec.h" +#include "cbs/codec_desc.h" +#include "cbs/codec_par.h" +#include "cbs/codec_id.h" +#include "cbs/defs.h" +#include "cbs/packet.h" +#include "version_major.h" +#ifndef HAVE_AV_CONFIG_H +/* When included as part of the ffmpeg build, only include the major version + * to avoid unnecessary rebuilds. When included externally, keep including + * the full version information. */ +#include "version.h" +#endif + +/** + * @defgroup libavc libavcodec + * Encoding/Decoding Library + * + * @{ + * + * @defgroup lavc_decoding Decoding + * @{ + * @} + * + * @defgroup lavc_encoding Encoding + * @{ + * @} + * + * @defgroup lavc_codec Codecs + * @{ + * @defgroup lavc_codec_native Native Codecs + * @{ + * @} + * @defgroup lavc_codec_wrappers External library wrappers + * @{ + * @} + * @defgroup lavc_codec_hwaccel Hardware Accelerators bridge + * @{ + * @} + * @} + * @defgroup lavc_internal Internal + * @{ + * @} + * @} + */ + +/** + * @ingroup libavc + * @defgroup lavc_encdec send/receive encoding and decoding API overview + * @{ + * + * The avcodec_send_packet()/avcodec_receive_frame()/avcodec_send_frame()/ + * avcodec_receive_packet() functions provide an encode/decode API, which + * decouples input and output. + * + * The API is very similar for encoding/decoding and audio/video, and works as + * follows: + * - Set up and open the AVCodecContext as usual. + * - Send valid input: + * - For decoding, call avcodec_send_packet() to give the decoder raw + * compressed data in an AVPacket. + * - For encoding, call avcodec_send_frame() to give the encoder an AVFrame + * containing uncompressed audio or video. + * + * In both cases, it is recommended that AVPackets and AVFrames are + * refcounted, or libavcodec might have to copy the input data. (libavformat + * always returns refcounted AVPackets, and av_frame_get_buffer() allocates + * refcounted AVFrames.) + * - Receive output in a loop. Periodically call one of the avcodec_receive_*() + * functions and process their output: + * - For decoding, call avcodec_receive_frame(). On success, it will return + * an AVFrame containing uncompressed audio or video data. + * - For encoding, call avcodec_receive_packet(). On success, it will return + * an AVPacket with a compressed frame. + * + * Repeat this call until it returns AVERROR(EAGAIN) or an error. The + * AVERROR(EAGAIN) return value means that new input data is required to + * return new output. In this case, continue with sending input. For each + * input frame/packet, the codec will typically return 1 output frame/packet, + * but it can also be 0 or more than 1. + * + * At the beginning of decoding or encoding, the codec might accept multiple + * input frames/packets without returning a frame, until its internal buffers + * are filled. This situation is handled transparently if you follow the steps + * outlined above. + * + * In theory, sending input can result in EAGAIN - this should happen only if + * not all output was received. You can use this to structure alternative decode + * or encode loops other than the one suggested above. For example, you could + * try sending new input on each iteration, and try to receive output if that + * returns EAGAIN. + * + * End of stream situations. These require "flushing" (aka draining) the codec, + * as the codec might buffer multiple frames or packets internally for + * performance or out of necessity (consider B-frames). + * This is handled as follows: + * - Instead of valid input, send NULL to the avcodec_send_packet() (decoding) + * or avcodec_send_frame() (encoding) functions. This will enter draining + * mode. + * - Call avcodec_receive_frame() (decoding) or avcodec_receive_packet() + * (encoding) in a loop until AVERROR_EOF is returned. The functions will + * not return AVERROR(EAGAIN), unless you forgot to enter draining mode. + * - Before decoding can be resumed again, the codec has to be reset with + * avcodec_flush_buffers(). + * + * Using the API as outlined above is highly recommended. But it is also + * possible to call functions outside of this rigid schema. For example, you can + * call avcodec_send_packet() repeatedly without calling + * avcodec_receive_frame(). In this case, avcodec_send_packet() will succeed + * until the codec's internal buffer has been filled up (which is typically of + * size 1 per output frame, after initial input), and then reject input with + * AVERROR(EAGAIN). Once it starts rejecting input, you have no choice but to + * read at least some output. + * + * Not all codecs will follow a rigid and predictable dataflow; the only + * guarantee is that an AVERROR(EAGAIN) return value on a send/receive call on + * one end implies that a receive/send call on the other end will succeed, or + * at least will not fail with AVERROR(EAGAIN). In general, no codec will + * permit unlimited buffering of input or output. + * + * A codec is not allowed to return AVERROR(EAGAIN) for both sending and receiving. This + * would be an invalid state, which could put the codec user into an endless + * loop. The API has no concept of time either: it cannot happen that trying to + * do avcodec_send_packet() results in AVERROR(EAGAIN), but a repeated call 1 second + * later accepts the packet (with no other receive/flush API calls involved). + * The API is a strict state machine, and the passage of time is not supposed + * to influence it. Some timing-dependent behavior might still be deemed + * acceptable in certain cases. But it must never result in both send/receive + * returning EAGAIN at the same time at any point. It must also absolutely be + * avoided that the current state is "unstable" and can "flip-flop" between + * the send/receive APIs allowing progress. For example, it's not allowed that + * the codec randomly decides that it actually wants to consume a packet now + * instead of returning a frame, after it just returned AVERROR(EAGAIN) on an + * avcodec_send_packet() call. + * @} + */ + +/** + * @defgroup lavc_core Core functions/structures. + * @ingroup libavc + * + * Basic definitions, functions for querying libavcodec capabilities, + * allocating core structures, etc. + * @{ + */ + +/** + * @ingroup lavc_encoding + * minimum encoding buffer size + * Used to avoid some checks during header writing. + */ +#define AV_INPUT_BUFFER_MIN_SIZE 16384 + +/** + * @ingroup lavc_encoding + */ +typedef struct RcOverride{ + int start_frame; + int end_frame; + int qscale; // If this is 0 then quality_factor will be used instead. + float quality_factor; +} RcOverride; + +/* encoding support + These flags can be passed in AVCodecContext.flags before initialization. + Note: Not everything is supported yet. +*/ + +/** + * Allow decoders to produce frames with data planes that are not aligned + * to CPU requirements (e.g. due to cropping). + */ +#define AV_CODEC_FLAG_UNALIGNED (1 << 0) +/** + * Use fixed qscale. + */ +#define AV_CODEC_FLAG_QSCALE (1 << 1) +/** + * 4 MV per MB allowed / advanced prediction for H.263. + */ +#define AV_CODEC_FLAG_4MV (1 << 2) +/** + * Output even those frames that might be corrupted. + */ +#define AV_CODEC_FLAG_OUTPUT_CORRUPT (1 << 3) +/** + * Use qpel MC. + */ +#define AV_CODEC_FLAG_QPEL (1 << 4) +/** + * Don't output frames whose parameters differ from first + * decoded frame in stream. + */ +#define AV_CODEC_FLAG_DROPCHANGED (1 << 5) +/** + * Use internal 2pass ratecontrol in first pass mode. + */ +#define AV_CODEC_FLAG_PASS1 (1 << 9) +/** + * Use internal 2pass ratecontrol in second pass mode. + */ +#define AV_CODEC_FLAG_PASS2 (1 << 10) +/** + * loop filter. + */ +#define AV_CODEC_FLAG_LOOP_FILTER (1 << 11) +/** + * Only decode/encode grayscale. + */ +#define AV_CODEC_FLAG_GRAY (1 << 13) +/** + * error[?] variables will be set during encoding. + */ +#define AV_CODEC_FLAG_PSNR (1 << 15) +#if FF_API_FLAG_TRUNCATED +/** + * Input bitstream might be truncated at a random location + * instead of only at frame boundaries. + * + * @deprecated use codec parsers for packetizing input + */ +#define AV_CODEC_FLAG_TRUNCATED (1 << 16) +#endif +/** + * Use interlaced DCT. + */ +#define AV_CODEC_FLAG_INTERLACED_DCT (1 << 18) +/** + * Force low delay. + */ +#define AV_CODEC_FLAG_LOW_DELAY (1 << 19) +/** + * Place global headers in extradata instead of every keyframe. + */ +#define AV_CODEC_FLAG_GLOBAL_HEADER (1 << 22) +/** + * Use only bitexact stuff (except (I)DCT). + */ +#define AV_CODEC_FLAG_BITEXACT (1 << 23) +/* Fx : Flag for H.263+ extra options */ +/** + * H.263 advanced intra coding / MPEG-4 AC prediction + */ +#define AV_CODEC_FLAG_AC_PRED (1 << 24) +/** + * interlaced motion estimation + */ +#define AV_CODEC_FLAG_INTERLACED_ME (1 << 29) +#define AV_CODEC_FLAG_CLOSED_GOP (1U << 31) + +/** + * Allow non spec compliant speedup tricks. + */ +#define AV_CODEC_FLAG2_FAST (1 << 0) +/** + * Skip bitstream encoding. + */ +#define AV_CODEC_FLAG2_NO_OUTPUT (1 << 2) +/** + * Place global headers at every keyframe instead of in extradata. + */ +#define AV_CODEC_FLAG2_LOCAL_HEADER (1 << 3) + +/** + * timecode is in drop frame format. DEPRECATED!!!! + */ +#define AV_CODEC_FLAG2_DROP_FRAME_TIMECODE (1 << 13) + +/** + * Input bitstream might be truncated at a packet boundaries + * instead of only at frame boundaries. + */ +#define AV_CODEC_FLAG2_CHUNKS (1 << 15) +/** + * Discard cropping information from SPS. + */ +#define AV_CODEC_FLAG2_IGNORE_CROP (1 << 16) + +/** + * Show all frames before the first keyframe + */ +#define AV_CODEC_FLAG2_SHOW_ALL (1 << 22) +/** + * Export motion vectors through frame side data + */ +#define AV_CODEC_FLAG2_EXPORT_MVS (1 << 28) +/** + * Do not skip samples and export skip information as frame side data + */ +#define AV_CODEC_FLAG2_SKIP_MANUAL (1 << 29) +/** + * Do not reset ASS ReadOrder field on flush (subtitles decoding) + */ +#define AV_CODEC_FLAG2_RO_FLUSH_NOOP (1 << 30) + +/* Unsupported options : + * Syntax Arithmetic coding (SAC) + * Reference Picture Selection + * Independent Segment Decoding */ +/* /Fx */ +/* codec capabilities */ + +/* Exported side data. + These flags can be passed in AVCodecContext.export_side_data before initialization. +*/ +/** + * Export motion vectors through frame side data + */ +#define AV_CODEC_EXPORT_DATA_MVS (1 << 0) +/** + * Export encoder Producer Reference Time through packet side data + */ +#define AV_CODEC_EXPORT_DATA_PRFT (1 << 1) +/** + * Decoding only. + * Export the AVVideoEncParams structure through frame side data. + */ +#define AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS (1 << 2) +/** + * Decoding only. + * Do not apply film grain, export it instead. + */ +#define AV_CODEC_EXPORT_DATA_FILM_GRAIN (1 << 3) + +/** + * The decoder will keep a reference to the frame and may reuse it later. + */ +#define AV_GET_BUFFER_FLAG_REF (1 << 0) + +/** + * The encoder will keep a reference to the packet and may reuse it later. + */ +#define AV_GET_ENCODE_BUFFER_FLAG_REF (1 << 0) + +struct AVCodecInternal; + +/** + * main external API structure. + * New fields can be added to the end with minor version bumps. + * Removal, reordering and changes to existing fields require a major + * version bump. + * You can use AVOptions (av_opt* / av_set/get*()) to access these fields from user + * applications. + * The name string for AVOptions options matches the associated command line + * parameter name and can be found in libavcodec/options_table.h + * The AVOption/command line parameter names differ in some cases from the C + * structure field names for historic reasons or brevity. + * sizeof(AVCodecContext) must not be used outside libav*. + */ +typedef struct AVCodecContext { + /** + * information on struct for av_log + * - set by avcodec_alloc_context3 + */ + const AVClass *av_class; + int log_level_offset; + + enum AVMediaType codec_type; /* see AVMEDIA_TYPE_xxx */ + const struct AVCodec *codec; + enum AVCodecID codec_id; /* see AV_CODEC_ID_xxx */ + + /** + * fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A'). + * This is used to work around some encoder bugs. + * A demuxer should set this to what is stored in the field used to identify the codec. + * If there are multiple such fields in a container then the demuxer should choose the one + * which maximizes the information about the used codec. + * If the codec tag field in a container is larger than 32 bits then the demuxer should + * remap the longer ID to 32 bits with a table or other structure. Alternatively a new + * extra_codec_tag + size could be added but for this a clear advantage must be demonstrated + * first. + * - encoding: Set by user, if not then the default based on codec_id will be used. + * - decoding: Set by user, will be converted to uppercase by libavcodec during init. + */ + unsigned int codec_tag; + + void *priv_data; + + /** + * Private context used for internal data. + * + * Unlike priv_data, this is not codec-specific. It is used in general + * libavcodec functions. + */ + struct AVCodecInternal *internal; + + /** + * Private data of the user, can be used to carry app specific stuff. + * - encoding: Set by user. + * - decoding: Set by user. + */ + void *opaque; + + /** + * the average bitrate + * - encoding: Set by user; unused for constant quantizer encoding. + * - decoding: Set by user, may be overwritten by libavcodec + * if this info is available in the stream + */ + int64_t bit_rate; + + /** + * number of bits the bitstream is allowed to diverge from the reference. + * the reference can be CBR (for CBR pass1) or VBR (for pass2) + * - encoding: Set by user; unused for constant quantizer encoding. + * - decoding: unused + */ + int bit_rate_tolerance; + + /** + * Global quality for codecs which cannot change it per frame. + * This should be proportional to MPEG-1/2/4 qscale. + * - encoding: Set by user. + * - decoding: unused + */ + int global_quality; + + /** + * - encoding: Set by user. + * - decoding: unused + */ + int compression_level; +#define FF_COMPRESSION_DEFAULT -1 + + /** + * AV_CODEC_FLAG_*. + * - encoding: Set by user. + * - decoding: Set by user. + */ + int flags; + + /** + * AV_CODEC_FLAG2_* + * - encoding: Set by user. + * - decoding: Set by user. + */ + int flags2; + + /** + * some codecs need / can use extradata like Huffman tables. + * MJPEG: Huffman tables + * rv10: additional flags + * MPEG-4: global headers (they can be in the bitstream or here) + * The allocated memory should be AV_INPUT_BUFFER_PADDING_SIZE bytes larger + * than extradata_size to avoid problems if it is read with the bitstream reader. + * The bytewise contents of extradata must not depend on the architecture or CPU endianness. + * Must be allocated with the av_malloc() family of functions. + * - encoding: Set/allocated/freed by libavcodec. + * - decoding: Set/allocated/freed by user. + */ + uint8_t *extradata; + int extradata_size; + + /** + * This is the fundamental unit of time (in seconds) in terms + * of which frame timestamps are represented. For fixed-fps content, + * timebase should be 1/framerate and timestamp increments should be + * identically 1. + * This often, but not always is the inverse of the frame rate or field rate + * for video. 1/time_base is not the average frame rate if the frame rate is not + * constant. + * + * Like containers, elementary streams also can store timestamps, 1/time_base + * is the unit in which these timestamps are specified. + * As example of such codec time base see ISO/IEC 14496-2:2001(E) + * vop_time_increment_resolution and fixed_vop_rate + * (fixed_vop_rate == 0 implies that it is different from the framerate) + * + * - encoding: MUST be set by user. + * - decoding: the use of this field for decoding is deprecated. + * Use framerate instead. + */ + AVRational time_base; + + /** + * For some codecs, the time base is closer to the field rate than the frame rate. + * Most notably, H.264 and MPEG-2 specify time_base as half of frame duration + * if no telecine is used ... + * + * Set to time_base ticks per frame. Default 1, e.g., H.264/MPEG-2 set it to 2. + */ + int ticks_per_frame; + + /** + * Codec delay. + * + * Encoding: Number of frames delay there will be from the encoder input to + * the decoder output. (we assume the decoder matches the spec) + * Decoding: Number of frames delay in addition to what a standard decoder + * as specified in the spec would produce. + * + * Video: + * Number of frames the decoded output will be delayed relative to the + * encoded input. + * + * Audio: + * For encoding, this field is unused (see initial_padding). + * + * For decoding, this is the number of samples the decoder needs to + * output before the decoder's output is valid. When seeking, you should + * start decoding this many samples prior to your desired seek point. + * + * - encoding: Set by libavcodec. + * - decoding: Set by libavcodec. + */ + int delay; + + + /* video only */ + /** + * picture width / height. + * + * @note Those fields may not match the values of the last + * AVFrame output by avcodec_receive_frame() due frame + * reordering. + * + * - encoding: MUST be set by user. + * - decoding: May be set by the user before opening the decoder if known e.g. + * from the container. Some decoders will require the dimensions + * to be set by the caller. During decoding, the decoder may + * overwrite those values as required while parsing the data. + */ + int width, height; + + /** + * Bitstream width / height, may be different from width/height e.g. when + * the decoded frame is cropped before being output or lowres is enabled. + * + * @note Those field may not match the value of the last + * AVFrame output by avcodec_receive_frame() due frame + * reordering. + * + * - encoding: unused + * - decoding: May be set by the user before opening the decoder if known + * e.g. from the container. During decoding, the decoder may + * overwrite those values as required while parsing the data. + */ + int coded_width, coded_height; + + /** + * the number of pictures in a group of pictures, or 0 for intra_only + * - encoding: Set by user. + * - decoding: unused + */ + int gop_size; + + /** + * Pixel format, see AV_PIX_FMT_xxx. + * May be set by the demuxer if known from headers. + * May be overridden by the decoder if it knows better. + * + * @note This field may not match the value of the last + * AVFrame output by avcodec_receive_frame() due frame + * reordering. + * + * - encoding: Set by user. + * - decoding: Set by user if known, overridden by libavcodec while + * parsing the data. + */ + enum AVPixelFormat pix_fmt; + + /** + * If non NULL, 'draw_horiz_band' is called by the libavcodec + * decoder to draw a horizontal band. It improves cache usage. Not + * all codecs can do that. You must check the codec capabilities + * beforehand. + * When multithreading is used, it may be called from multiple threads + * at the same time; threads might draw different parts of the same AVFrame, + * or multiple AVFrames, and there is no guarantee that slices will be drawn + * in order. + * The function is also used by hardware acceleration APIs. + * It is called at least once during frame decoding to pass + * the data needed for hardware render. + * In that mode instead of pixel data, AVFrame points to + * a structure specific to the acceleration API. The application + * reads the structure and can change some fields to indicate progress + * or mark state. + * - encoding: unused + * - decoding: Set by user. + * @param height the height of the slice + * @param y the y position of the slice + * @param type 1->top field, 2->bottom field, 3->frame + * @param offset offset into the AVFrame.data from which the slice should be read + */ + void (*draw_horiz_band)(struct AVCodecContext *s, + const AVFrame *src, int offset[AV_NUM_DATA_POINTERS], + int y, int type, int height); + + /** + * Callback to negotiate the pixel format. Decoding only, may be set by the + * caller before avcodec_open2(). + * + * Called by some decoders to select the pixel format that will be used for + * the output frames. This is mainly used to set up hardware acceleration, + * then the provided format list contains the corresponding hwaccel pixel + * formats alongside the "software" one. The software pixel format may also + * be retrieved from \ref sw_pix_fmt. + * + * This callback will be called when the coded frame properties (such as + * resolution, pixel format, etc.) change and more than one output format is + * supported for those new properties. If a hardware pixel format is chosen + * and initialization for it fails, the callback may be called again + * immediately. + * + * This callback may be called from different threads if the decoder is + * multi-threaded, but not from more than one thread simultaneously. + * + * @param fmt list of formats which may be used in the current + * configuration, terminated by AV_PIX_FMT_NONE. + * @warning Behavior is undefined if the callback returns a value other + * than one of the formats in fmt or AV_PIX_FMT_NONE. + * @return the chosen format or AV_PIX_FMT_NONE + */ + enum AVPixelFormat (*get_format)(struct AVCodecContext *s, const enum AVPixelFormat * fmt); + + /** + * maximum number of B-frames between non-B-frames + * Note: The output will be delayed by max_b_frames+1 relative to the input. + * - encoding: Set by user. + * - decoding: unused + */ + int max_b_frames; + + /** + * qscale factor between IP and B-frames + * If > 0 then the last P-frame quantizer will be used (q= lastp_q*factor+offset). + * If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). + * - encoding: Set by user. + * - decoding: unused + */ + float b_quant_factor; + + /** + * qscale offset between IP and B-frames + * - encoding: Set by user. + * - decoding: unused + */ + float b_quant_offset; + + /** + * Size of the frame reordering buffer in the decoder. + * For MPEG-2 it is 1 IPB or 0 low delay IP. + * - encoding: Set by libavcodec. + * - decoding: Set by libavcodec. + */ + int has_b_frames; + + /** + * qscale factor between P- and I-frames + * If > 0 then the last P-frame quantizer will be used (q = lastp_q * factor + offset). + * If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). + * - encoding: Set by user. + * - decoding: unused + */ + float i_quant_factor; + + /** + * qscale offset between P and I-frames + * - encoding: Set by user. + * - decoding: unused + */ + float i_quant_offset; + + /** + * luminance masking (0-> disabled) + * - encoding: Set by user. + * - decoding: unused + */ + float lumi_masking; + + /** + * temporary complexity masking (0-> disabled) + * - encoding: Set by user. + * - decoding: unused + */ + float temporal_cplx_masking; + + /** + * spatial complexity masking (0-> disabled) + * - encoding: Set by user. + * - decoding: unused + */ + float spatial_cplx_masking; + + /** + * p block masking (0-> disabled) + * - encoding: Set by user. + * - decoding: unused + */ + float p_masking; + + /** + * darkness masking (0-> disabled) + * - encoding: Set by user. + * - decoding: unused + */ + float dark_masking; + + /** + * slice count + * - encoding: Set by libavcodec. + * - decoding: Set by user (or 0). + */ + int slice_count; + + /** + * slice offsets in the frame in bytes + * - encoding: Set/allocated by libavcodec. + * - decoding: Set/allocated by user (or NULL). + */ + int *slice_offset; + + /** + * sample aspect ratio (0 if unknown) + * That is the width of a pixel divided by the height of the pixel. + * Numerator and denominator must be relatively prime and smaller than 256 for some video standards. + * - encoding: Set by user. + * - decoding: Set by libavcodec. + */ + AVRational sample_aspect_ratio; + + /** + * motion estimation comparison function + * - encoding: Set by user. + * - decoding: unused + */ + int me_cmp; + /** + * subpixel motion estimation comparison function + * - encoding: Set by user. + * - decoding: unused + */ + int me_sub_cmp; + /** + * macroblock comparison function (not supported yet) + * - encoding: Set by user. + * - decoding: unused + */ + int mb_cmp; + /** + * interlaced DCT comparison function + * - encoding: Set by user. + * - decoding: unused + */ + int ildct_cmp; +#define FF_CMP_SAD 0 +#define FF_CMP_SSE 1 +#define FF_CMP_SATD 2 +#define FF_CMP_DCT 3 +#define FF_CMP_PSNR 4 +#define FF_CMP_BIT 5 +#define FF_CMP_RD 6 +#define FF_CMP_ZERO 7 +#define FF_CMP_VSAD 8 +#define FF_CMP_VSSE 9 +#define FF_CMP_NSSE 10 +#define FF_CMP_W53 11 +#define FF_CMP_W97 12 +#define FF_CMP_DCTMAX 13 +#define FF_CMP_DCT264 14 +#define FF_CMP_MEDIAN_SAD 15 +#define FF_CMP_CHROMA 256 + + /** + * ME diamond size & shape + * - encoding: Set by user. + * - decoding: unused + */ + int dia_size; + + /** + * amount of previous MV predictors (2a+1 x 2a+1 square) + * - encoding: Set by user. + * - decoding: unused + */ + int last_predictor_count; + + /** + * motion estimation prepass comparison function + * - encoding: Set by user. + * - decoding: unused + */ + int me_pre_cmp; + + /** + * ME prepass diamond size & shape + * - encoding: Set by user. + * - decoding: unused + */ + int pre_dia_size; + + /** + * subpel ME quality + * - encoding: Set by user. + * - decoding: unused + */ + int me_subpel_quality; + + /** + * maximum motion estimation search range in subpel units + * If 0 then no limit. + * + * - encoding: Set by user. + * - decoding: unused + */ + int me_range; + + /** + * slice flags + * - encoding: unused + * - decoding: Set by user. + */ + int slice_flags; +#define SLICE_FLAG_CODED_ORDER 0x0001 ///< draw_horiz_band() is called in coded order instead of display +#define SLICE_FLAG_ALLOW_FIELD 0x0002 ///< allow draw_horiz_band() with field slices (MPEG-2 field pics) +#define SLICE_FLAG_ALLOW_PLANE 0x0004 ///< allow draw_horiz_band() with 1 component at a time (SVQ1) + + /** + * macroblock decision mode + * - encoding: Set by user. + * - decoding: unused + */ + int mb_decision; +#define FF_MB_DECISION_SIMPLE 0 ///< uses mb_cmp +#define FF_MB_DECISION_BITS 1 ///< chooses the one which needs the fewest bits +#define FF_MB_DECISION_RD 2 ///< rate distortion + + /** + * custom intra quantization matrix + * Must be allocated with the av_malloc() family of functions, and will be freed in + * avcodec_free_context(). + * - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. + * - decoding: Set/allocated/freed by libavcodec. + */ + uint16_t *intra_matrix; + + /** + * custom inter quantization matrix + * Must be allocated with the av_malloc() family of functions, and will be freed in + * avcodec_free_context(). + * - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. + * - decoding: Set/allocated/freed by libavcodec. + */ + uint16_t *inter_matrix; + + /** + * precision of the intra DC coefficient - 8 + * - encoding: Set by user. + * - decoding: Set by libavcodec + */ + int intra_dc_precision; + + /** + * Number of macroblock rows at the top which are skipped. + * - encoding: unused + * - decoding: Set by user. + */ + int skip_top; + + /** + * Number of macroblock rows at the bottom which are skipped. + * - encoding: unused + * - decoding: Set by user. + */ + int skip_bottom; + + /** + * minimum MB Lagrange multiplier + * - encoding: Set by user. + * - decoding: unused + */ + int mb_lmin; + + /** + * maximum MB Lagrange multiplier + * - encoding: Set by user. + * - decoding: unused + */ + int mb_lmax; + + /** + * - encoding: Set by user. + * - decoding: unused + */ + int bidir_refine; + + /** + * minimum GOP size + * - encoding: Set by user. + * - decoding: unused + */ + int keyint_min; + + /** + * number of reference frames + * - encoding: Set by user. + * - decoding: Set by lavc. + */ + int refs; + + /** + * Note: Value depends upon the compare function used for fullpel ME. + * - encoding: Set by user. + * - decoding: unused + */ + int mv0_threshold; + + /** + * Chromaticity coordinates of the source primaries. + * - encoding: Set by user + * - decoding: Set by libavcodec + */ + enum AVColorPrimaries color_primaries; + + /** + * Color Transfer Characteristic. + * - encoding: Set by user + * - decoding: Set by libavcodec + */ + enum AVColorTransferCharacteristic color_trc; + + /** + * YUV colorspace type. + * - encoding: Set by user + * - decoding: Set by libavcodec + */ + enum AVColorSpace colorspace; + + /** + * MPEG vs JPEG YUV range. + * - encoding: Set by user + * - decoding: Set by libavcodec + */ + enum AVColorRange color_range; + + /** + * This defines the location of chroma samples. + * - encoding: Set by user + * - decoding: Set by libavcodec + */ + enum AVChromaLocation chroma_sample_location; + + /** + * Number of slices. + * Indicates number of picture subdivisions. Used for parallelized + * decoding. + * - encoding: Set by user + * - decoding: unused + */ + int slices; + + /** Field order + * - encoding: set by libavcodec + * - decoding: Set by user. + */ + enum AVFieldOrder field_order; + + /* audio only */ + int sample_rate; ///< samples per second + +#if FF_API_OLD_CHANNEL_LAYOUT + /** + * number of audio channels + * @deprecated use ch_layout.nb_channels + */ + attribute_deprecated + int channels; +#endif + + /** + * audio sample format + * - encoding: Set by user. + * - decoding: Set by libavcodec. + */ + enum AVSampleFormat sample_fmt; ///< sample format + + /* The following data should not be initialized. */ + /** + * Number of samples per channel in an audio frame. + * + * - encoding: set by libavcodec in avcodec_open2(). Each submitted frame + * except the last must contain exactly frame_size samples per channel. + * May be 0 when the codec has AV_CODEC_CAP_VARIABLE_FRAME_SIZE set, then the + * frame size is not restricted. + * - decoding: may be set by some decoders to indicate constant frame size + */ + int frame_size; + + /** + * Frame counter, set by libavcodec. + * + * - decoding: total number of frames returned from the decoder so far. + * - encoding: total number of frames passed to the encoder so far. + * + * @note the counter is not incremented if encoding/decoding resulted in + * an error. + */ + int frame_number; + + /** + * number of bytes per packet if constant and known or 0 + * Used by some WAV based audio codecs. + */ + int block_align; + + /** + * Audio cutoff bandwidth (0 means "automatic") + * - encoding: Set by user. + * - decoding: unused + */ + int cutoff; + +#if FF_API_OLD_CHANNEL_LAYOUT + /** + * Audio channel layout. + * - encoding: set by user. + * - decoding: set by user, may be overwritten by libavcodec. + * @deprecated use ch_layout + */ + attribute_deprecated + uint64_t channel_layout; + + /** + * Request decoder to use this channel layout if it can (0 for default) + * - encoding: unused + * - decoding: Set by user. + * @deprecated use "downmix" codec private option + */ + attribute_deprecated + uint64_t request_channel_layout; +#endif + + /** + * Type of service that the audio stream conveys. + * - encoding: Set by user. + * - decoding: Set by libavcodec. + */ + enum AVAudioServiceType audio_service_type; + + /** + * desired sample format + * - encoding: Not used. + * - decoding: Set by user. + * Decoder will decode to this format if it can. + */ + enum AVSampleFormat request_sample_fmt; + + /** + * This callback is called at the beginning of each frame to get data + * buffer(s) for it. There may be one contiguous buffer for all the data or + * there may be a buffer per each data plane or anything in between. What + * this means is, you may set however many entries in buf[] you feel necessary. + * Each buffer must be reference-counted using the AVBuffer API (see description + * of buf[] below). + * + * The following fields will be set in the frame before this callback is + * called: + * - format + * - width, height (video only) + * - sample_rate, channel_layout, nb_samples (audio only) + * Their values may differ from the corresponding values in + * AVCodecContext. This callback must use the frame values, not the codec + * context values, to calculate the required buffer size. + * + * This callback must fill the following fields in the frame: + * - data[] + * - linesize[] + * - extended_data: + * * if the data is planar audio with more than 8 channels, then this + * callback must allocate and fill extended_data to contain all pointers + * to all data planes. data[] must hold as many pointers as it can. + * extended_data must be allocated with av_malloc() and will be freed in + * av_frame_unref(). + * * otherwise extended_data must point to data + * - buf[] must contain one or more pointers to AVBufferRef structures. Each of + * the frame's data and extended_data pointers must be contained in these. That + * is, one AVBufferRef for each allocated chunk of memory, not necessarily one + * AVBufferRef per data[] entry. See: av_buffer_create(), av_buffer_alloc(), + * and av_buffer_ref(). + * - extended_buf and nb_extended_buf must be allocated with av_malloc() by + * this callback and filled with the extra buffers if there are more + * buffers than buf[] can hold. extended_buf will be freed in + * av_frame_unref(). + * + * If AV_CODEC_CAP_DR1 is not set then get_buffer2() must call + * avcodec_default_get_buffer2() instead of providing buffers allocated by + * some other means. + * + * Each data plane must be aligned to the maximum required by the target + * CPU. + * + * @see avcodec_default_get_buffer2() + * + * Video: + * + * If AV_GET_BUFFER_FLAG_REF is set in flags then the frame may be reused + * (read and/or written to if it is writable) later by libavcodec. + * + * avcodec_align_dimensions2() should be used to find the required width and + * height, as they normally need to be rounded up to the next multiple of 16. + * + * Some decoders do not support linesizes changing between frames. + * + * If frame multithreading is used, this callback may be called from a + * different thread, but not from more than one at once. Does not need to be + * reentrant. + * + * @see avcodec_align_dimensions2() + * + * Audio: + * + * Decoders request a buffer of a particular size by setting + * AVFrame.nb_samples prior to calling get_buffer2(). The decoder may, + * however, utilize only part of the buffer by setting AVFrame.nb_samples + * to a smaller value in the output frame. + * + * As a convenience, av_samples_get_buffer_size() and + * av_samples_fill_arrays() in libavutil may be used by custom get_buffer2() + * functions to find the required data size and to fill data pointers and + * linesize. In AVFrame.linesize, only linesize[0] may be set for audio + * since all planes must be the same size. + * + * @see av_samples_get_buffer_size(), av_samples_fill_arrays() + * + * - encoding: unused + * - decoding: Set by libavcodec, user can override. + */ + int (*get_buffer2)(struct AVCodecContext *s, AVFrame *frame, int flags); + + /* - encoding parameters */ + float qcompress; ///< amount of qscale change between easy & hard scenes (0.0-1.0) + float qblur; ///< amount of qscale smoothing over time (0.0-1.0) + + /** + * minimum quantizer + * - encoding: Set by user. + * - decoding: unused + */ + int qmin; + + /** + * maximum quantizer + * - encoding: Set by user. + * - decoding: unused + */ + int qmax; + + /** + * maximum quantizer difference between frames + * - encoding: Set by user. + * - decoding: unused + */ + int max_qdiff; + + /** + * decoder bitstream buffer size + * - encoding: Set by user. + * - decoding: unused + */ + int rc_buffer_size; + + /** + * ratecontrol override, see RcOverride + * - encoding: Allocated/set/freed by user. + * - decoding: unused + */ + int rc_override_count; + RcOverride *rc_override; + + /** + * maximum bitrate + * - encoding: Set by user. + * - decoding: Set by user, may be overwritten by libavcodec. + */ + int64_t rc_max_rate; + + /** + * minimum bitrate + * - encoding: Set by user. + * - decoding: unused + */ + int64_t rc_min_rate; + + /** + * Ratecontrol attempt to use, at maximum, of what can be used without an underflow. + * - encoding: Set by user. + * - decoding: unused. + */ + float rc_max_available_vbv_use; + + /** + * Ratecontrol attempt to use, at least, times the amount needed to prevent a vbv overflow. + * - encoding: Set by user. + * - decoding: unused. + */ + float rc_min_vbv_overflow_use; + + /** + * Number of bits which should be loaded into the rc buffer before decoding starts. + * - encoding: Set by user. + * - decoding: unused + */ + int rc_initial_buffer_occupancy; + + /** + * trellis RD quantization + * - encoding: Set by user. + * - decoding: unused + */ + int trellis; + + /** + * pass1 encoding statistics output buffer + * - encoding: Set by libavcodec. + * - decoding: unused + */ + char *stats_out; + + /** + * pass2 encoding statistics input buffer + * Concatenated stuff from stats_out of pass1 should be placed here. + * - encoding: Allocated/set/freed by user. + * - decoding: unused + */ + char *stats_in; + + /** + * Work around bugs in encoders which sometimes cannot be detected automatically. + * - encoding: Set by user + * - decoding: Set by user + */ + int workaround_bugs; +#define FF_BUG_AUTODETECT 1 ///< autodetection +#define FF_BUG_XVID_ILACE 4 +#define FF_BUG_UMP4 8 +#define FF_BUG_NO_PADDING 16 +#define FF_BUG_AMV 32 +#define FF_BUG_QPEL_CHROMA 64 +#define FF_BUG_STD_QPEL 128 +#define FF_BUG_QPEL_CHROMA2 256 +#define FF_BUG_DIRECT_BLOCKSIZE 512 +#define FF_BUG_EDGE 1024 +#define FF_BUG_HPEL_CHROMA 2048 +#define FF_BUG_DC_CLIP 4096 +#define FF_BUG_MS 8192 ///< Work around various bugs in Microsoft's broken decoders. +#define FF_BUG_TRUNCATED 16384 +#define FF_BUG_IEDGE 32768 + + /** + * strictly follow the standard (MPEG-4, ...). + * - encoding: Set by user. + * - decoding: Set by user. + * Setting this to STRICT or higher means the encoder and decoder will + * generally do stupid things, whereas setting it to unofficial or lower + * will mean the encoder might produce output that is not supported by all + * spec-compliant decoders. Decoders don't differentiate between normal, + * unofficial and experimental (that is, they always try to decode things + * when they can) unless they are explicitly asked to behave stupidly + * (=strictly conform to the specs) + */ + int strict_std_compliance; +#define FF_COMPLIANCE_VERY_STRICT 2 ///< Strictly conform to an older more strict version of the spec or reference software. +#define FF_COMPLIANCE_STRICT 1 ///< Strictly conform to all the things in the spec no matter what consequences. +#define FF_COMPLIANCE_NORMAL 0 +#define FF_COMPLIANCE_UNOFFICIAL -1 ///< Allow unofficial extensions +#define FF_COMPLIANCE_EXPERIMENTAL -2 ///< Allow nonstandardized experimental things. + + /** + * error concealment flags + * - encoding: unused + * - decoding: Set by user. + */ + int error_concealment; +#define FF_EC_GUESS_MVS 1 +#define FF_EC_DEBLOCK 2 +#define FF_EC_FAVOR_INTER 256 + + /** + * debug + * - encoding: Set by user. + * - decoding: Set by user. + */ + int debug; +#define FF_DEBUG_PICT_INFO 1 +#define FF_DEBUG_RC 2 +#define FF_DEBUG_BITSTREAM 4 +#define FF_DEBUG_MB_TYPE 8 +#define FF_DEBUG_QP 16 +#define FF_DEBUG_DCT_COEFF 0x00000040 +#define FF_DEBUG_SKIP 0x00000080 +#define FF_DEBUG_STARTCODE 0x00000100 +#define FF_DEBUG_ER 0x00000400 +#define FF_DEBUG_MMCO 0x00000800 +#define FF_DEBUG_BUGS 0x00001000 +#define FF_DEBUG_BUFFERS 0x00008000 +#define FF_DEBUG_THREADS 0x00010000 +#define FF_DEBUG_GREEN_MD 0x00800000 +#define FF_DEBUG_NOMC 0x01000000 + + /** + * Error recognition; may misdetect some more or less valid parts as errors. + * - encoding: Set by user. + * - decoding: Set by user. + */ + int err_recognition; + +/** + * Verify checksums embedded in the bitstream (could be of either encoded or + * decoded data, depending on the codec) and print an error message on mismatch. + * If AV_EF_EXPLODE is also set, a mismatching checksum will result in the + * decoder returning an error. + */ +#define AV_EF_CRCCHECK (1<<0) +#define AV_EF_BITSTREAM (1<<1) ///< detect bitstream specification deviations +#define AV_EF_BUFFER (1<<2) ///< detect improper bitstream length +#define AV_EF_EXPLODE (1<<3) ///< abort decoding on minor error detection + +#define AV_EF_IGNORE_ERR (1<<15) ///< ignore errors and continue +#define AV_EF_CAREFUL (1<<16) ///< consider things that violate the spec, are fast to calculate and have not been seen in the wild as errors +#define AV_EF_COMPLIANT (1<<17) ///< consider all spec non compliances as errors +#define AV_EF_AGGRESSIVE (1<<18) ///< consider things that a sane encoder should not do as an error + + + /** + * opaque 64-bit number (generally a PTS) that will be reordered and + * output in AVFrame.reordered_opaque + * - encoding: Set by libavcodec to the reordered_opaque of the input + * frame corresponding to the last returned packet. Only + * supported by encoders with the + * AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE capability. + * - decoding: Set by user. + */ + int64_t reordered_opaque; + + /** + * Hardware accelerator in use + * - encoding: unused. + * - decoding: Set by libavcodec + */ + const struct AVHWAccel *hwaccel; + + /** + * Hardware accelerator context. + * For some hardware accelerators, a global context needs to be + * provided by the user. In that case, this holds display-dependent + * data FFmpeg cannot instantiate itself. Please refer to the + * FFmpeg HW accelerator documentation to know how to fill this. + * - encoding: unused + * - decoding: Set by user + */ + void *hwaccel_context; + + /** + * error + * - encoding: Set by libavcodec if flags & AV_CODEC_FLAG_PSNR. + * - decoding: unused + */ + uint64_t error[AV_NUM_DATA_POINTERS]; + + /** + * DCT algorithm, see FF_DCT_* below + * - encoding: Set by user. + * - decoding: unused + */ + int dct_algo; +#define FF_DCT_AUTO 0 +#define FF_DCT_FASTINT 1 +#define FF_DCT_INT 2 +#define FF_DCT_MMX 3 +#define FF_DCT_ALTIVEC 5 +#define FF_DCT_FAAN 6 + + /** + * IDCT algorithm, see FF_IDCT_* below. + * - encoding: Set by user. + * - decoding: Set by user. + */ + int idct_algo; +#define FF_IDCT_AUTO 0 +#define FF_IDCT_INT 1 +#define FF_IDCT_SIMPLE 2 +#define FF_IDCT_SIMPLEMMX 3 +#define FF_IDCT_ARM 7 +#define FF_IDCT_ALTIVEC 8 +#define FF_IDCT_SIMPLEARM 10 +#define FF_IDCT_XVID 14 +#define FF_IDCT_SIMPLEARMV5TE 16 +#define FF_IDCT_SIMPLEARMV6 17 +#define FF_IDCT_FAAN 20 +#define FF_IDCT_SIMPLENEON 22 +#if FF_API_IDCT_NONE +// formerly used by xvmc +#define FF_IDCT_NONE 24 +#endif +#define FF_IDCT_SIMPLEAUTO 128 + + /** + * bits per sample/pixel from the demuxer (needed for huffyuv). + * - encoding: Set by libavcodec. + * - decoding: Set by user. + */ + int bits_per_coded_sample; + + /** + * Bits per sample/pixel of internal libavcodec pixel/sample format. + * - encoding: set by user. + * - decoding: set by libavcodec. + */ + int bits_per_raw_sample; + + /** + * low resolution decoding, 1-> 1/2 size, 2->1/4 size + * - encoding: unused + * - decoding: Set by user. + */ + int lowres; + + /** + * thread count + * is used to decide how many independent tasks should be passed to execute() + * - encoding: Set by user. + * - decoding: Set by user. + */ + int thread_count; + + /** + * Which multithreading methods to use. + * Use of FF_THREAD_FRAME will increase decoding delay by one frame per thread, + * so clients which cannot provide future frames should not use it. + * + * - encoding: Set by user, otherwise the default is used. + * - decoding: Set by user, otherwise the default is used. + */ + int thread_type; +#define FF_THREAD_FRAME 1 ///< Decode more than one frame at once +#define FF_THREAD_SLICE 2 ///< Decode more than one part of a single frame at once + + /** + * Which multithreading methods are in use by the codec. + * - encoding: Set by libavcodec. + * - decoding: Set by libavcodec. + */ + int active_thread_type; + +#if FF_API_THREAD_SAFE_CALLBACKS + /** + * Set by the client if its custom get_buffer() callback can be called + * synchronously from another thread, which allows faster multithreaded decoding. + * draw_horiz_band() will be called from other threads regardless of this setting. + * Ignored if the default get_buffer() is used. + * - encoding: Set by user. + * - decoding: Set by user. + * + * @deprecated the custom get_buffer2() callback should always be + * thread-safe. Thread-unsafe get_buffer2() implementations will be + * invalid starting with LIBAVCODEC_VERSION_MAJOR=60; in other words, + * libavcodec will behave as if this field was always set to 1. + * Callers that want to be forward compatible with future libavcodec + * versions should wrap access to this field in + * #if LIBAVCODEC_VERSION_MAJOR < 60 + */ + attribute_deprecated + int thread_safe_callbacks; +#endif + + /** + * The codec may call this to execute several independent things. + * It will return only after finishing all tasks. + * The user may replace this with some multithreaded implementation, + * the default implementation will execute the parts serially. + * @param count the number of things to execute + * - encoding: Set by libavcodec, user can override. + * - decoding: Set by libavcodec, user can override. + */ + int (*execute)(struct AVCodecContext *c, int (*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size); + + /** + * The codec may call this to execute several independent things. + * It will return only after finishing all tasks. + * The user may replace this with some multithreaded implementation, + * the default implementation will execute the parts serially. + * @param c context passed also to func + * @param count the number of things to execute + * @param arg2 argument passed unchanged to func + * @param ret return values of executed functions, must have space for "count" values. May be NULL. + * @param func function that will be called count times, with jobnr from 0 to count-1. + * threadnr will be in the range 0 to c->thread_count-1 < MAX_THREADS and so that no + * two instances of func executing at the same time will have the same threadnr. + * @return always 0 currently, but code should handle a future improvement where when any call to func + * returns < 0 no further calls to func may be done and < 0 is returned. + * - encoding: Set by libavcodec, user can override. + * - decoding: Set by libavcodec, user can override. + */ + int (*execute2)(struct AVCodecContext *c, int (*func)(struct AVCodecContext *c2, void *arg, int jobnr, int threadnr), void *arg2, int *ret, int count); + + /** + * noise vs. sse weight for the nsse comparison function + * - encoding: Set by user. + * - decoding: unused + */ + int nsse_weight; + + /** + * profile + * - encoding: Set by user. + * - decoding: Set by libavcodec. + */ + int profile; +#define FF_PROFILE_UNKNOWN -99 +#define FF_PROFILE_RESERVED -100 + +#define FF_PROFILE_AAC_MAIN 0 +#define FF_PROFILE_AAC_LOW 1 +#define FF_PROFILE_AAC_SSR 2 +#define FF_PROFILE_AAC_LTP 3 +#define FF_PROFILE_AAC_HE 4 +#define FF_PROFILE_AAC_HE_V2 28 +#define FF_PROFILE_AAC_LD 22 +#define FF_PROFILE_AAC_ELD 38 +#define FF_PROFILE_MPEG2_AAC_LOW 128 +#define FF_PROFILE_MPEG2_AAC_HE 131 + +#define FF_PROFILE_DNXHD 0 +#define FF_PROFILE_DNXHR_LB 1 +#define FF_PROFILE_DNXHR_SQ 2 +#define FF_PROFILE_DNXHR_HQ 3 +#define FF_PROFILE_DNXHR_HQX 4 +#define FF_PROFILE_DNXHR_444 5 + +#define FF_PROFILE_DTS 20 +#define FF_PROFILE_DTS_ES 30 +#define FF_PROFILE_DTS_96_24 40 +#define FF_PROFILE_DTS_HD_HRA 50 +#define FF_PROFILE_DTS_HD_MA 60 +#define FF_PROFILE_DTS_EXPRESS 70 + +#define FF_PROFILE_MPEG2_422 0 +#define FF_PROFILE_MPEG2_HIGH 1 +#define FF_PROFILE_MPEG2_SS 2 +#define FF_PROFILE_MPEG2_SNR_SCALABLE 3 +#define FF_PROFILE_MPEG2_MAIN 4 +#define FF_PROFILE_MPEG2_SIMPLE 5 + +#define FF_PROFILE_H264_CONSTRAINED (1<<9) // 8+1; constraint_set1_flag +#define FF_PROFILE_H264_INTRA (1<<11) // 8+3; constraint_set3_flag + +#define FF_PROFILE_H264_BASELINE 66 +#define FF_PROFILE_H264_CONSTRAINED_BASELINE (66|FF_PROFILE_H264_CONSTRAINED) +#define FF_PROFILE_H264_MAIN 77 +#define FF_PROFILE_H264_EXTENDED 88 +#define FF_PROFILE_H264_HIGH 100 +#define FF_PROFILE_H264_HIGH_10 110 +#define FF_PROFILE_H264_HIGH_10_INTRA (110|FF_PROFILE_H264_INTRA) +#define FF_PROFILE_H264_MULTIVIEW_HIGH 118 +#define FF_PROFILE_H264_HIGH_422 122 +#define FF_PROFILE_H264_HIGH_422_INTRA (122|FF_PROFILE_H264_INTRA) +#define FF_PROFILE_H264_STEREO_HIGH 128 +#define FF_PROFILE_H264_HIGH_444 144 +#define FF_PROFILE_H264_HIGH_444_PREDICTIVE 244 +#define FF_PROFILE_H264_HIGH_444_INTRA (244|FF_PROFILE_H264_INTRA) +#define FF_PROFILE_H264_CAVLC_444 44 + +#define FF_PROFILE_VC1_SIMPLE 0 +#define FF_PROFILE_VC1_MAIN 1 +#define FF_PROFILE_VC1_COMPLEX 2 +#define FF_PROFILE_VC1_ADVANCED 3 + +#define FF_PROFILE_MPEG4_SIMPLE 0 +#define FF_PROFILE_MPEG4_SIMPLE_SCALABLE 1 +#define FF_PROFILE_MPEG4_CORE 2 +#define FF_PROFILE_MPEG4_MAIN 3 +#define FF_PROFILE_MPEG4_N_BIT 4 +#define FF_PROFILE_MPEG4_SCALABLE_TEXTURE 5 +#define FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION 6 +#define FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE 7 +#define FF_PROFILE_MPEG4_HYBRID 8 +#define FF_PROFILE_MPEG4_ADVANCED_REAL_TIME 9 +#define FF_PROFILE_MPEG4_CORE_SCALABLE 10 +#define FF_PROFILE_MPEG4_ADVANCED_CODING 11 +#define FF_PROFILE_MPEG4_ADVANCED_CORE 12 +#define FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE 13 +#define FF_PROFILE_MPEG4_SIMPLE_STUDIO 14 +#define FF_PROFILE_MPEG4_ADVANCED_SIMPLE 15 + +#define FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 1 +#define FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 2 +#define FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION 32768 +#define FF_PROFILE_JPEG2000_DCINEMA_2K 3 +#define FF_PROFILE_JPEG2000_DCINEMA_4K 4 + +#define FF_PROFILE_VP9_0 0 +#define FF_PROFILE_VP9_1 1 +#define FF_PROFILE_VP9_2 2 +#define FF_PROFILE_VP9_3 3 + +#define FF_PROFILE_HEVC_MAIN 1 +#define FF_PROFILE_HEVC_MAIN_10 2 +#define FF_PROFILE_HEVC_MAIN_STILL_PICTURE 3 +#define FF_PROFILE_HEVC_REXT 4 + +#define FF_PROFILE_VVC_MAIN_10 1 +#define FF_PROFILE_VVC_MAIN_10_444 33 + +#define FF_PROFILE_AV1_MAIN 0 +#define FF_PROFILE_AV1_HIGH 1 +#define FF_PROFILE_AV1_PROFESSIONAL 2 + +#define FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT 0xc0 +#define FF_PROFILE_MJPEG_HUFFMAN_EXTENDED_SEQUENTIAL_DCT 0xc1 +#define FF_PROFILE_MJPEG_HUFFMAN_PROGRESSIVE_DCT 0xc2 +#define FF_PROFILE_MJPEG_HUFFMAN_LOSSLESS 0xc3 +#define FF_PROFILE_MJPEG_JPEG_LS 0xf7 + +#define FF_PROFILE_SBC_MSBC 1 + +#define FF_PROFILE_PRORES_PROXY 0 +#define FF_PROFILE_PRORES_LT 1 +#define FF_PROFILE_PRORES_STANDARD 2 +#define FF_PROFILE_PRORES_HQ 3 +#define FF_PROFILE_PRORES_4444 4 +#define FF_PROFILE_PRORES_XQ 5 + +#define FF_PROFILE_ARIB_PROFILE_A 0 +#define FF_PROFILE_ARIB_PROFILE_C 1 + +#define FF_PROFILE_KLVA_SYNC 0 +#define FF_PROFILE_KLVA_ASYNC 1 + + /** + * level + * - encoding: Set by user. + * - decoding: Set by libavcodec. + */ + int level; +#define FF_LEVEL_UNKNOWN -99 + + /** + * Skip loop filtering for selected frames. + * - encoding: unused + * - decoding: Set by user. + */ + enum AVDiscard skip_loop_filter; + + /** + * Skip IDCT/dequantization for selected frames. + * - encoding: unused + * - decoding: Set by user. + */ + enum AVDiscard skip_idct; + + /** + * Skip decoding for selected frames. + * - encoding: unused + * - decoding: Set by user. + */ + enum AVDiscard skip_frame; + + /** + * Header containing style information for text subtitles. + * For SUBTITLE_ASS subtitle type, it should contain the whole ASS + * [Script Info] and [V4+ Styles] section, plus the [Events] line and + * the Format line following. It shouldn't include any Dialogue line. + * - encoding: Set/allocated/freed by user (before avcodec_open2()) + * - decoding: Set/allocated/freed by libavcodec (by avcodec_open2()) + */ + uint8_t *subtitle_header; + int subtitle_header_size; + + /** + * Audio only. The number of "priming" samples (padding) inserted by the + * encoder at the beginning of the audio. I.e. this number of leading + * decoded samples must be discarded by the caller to get the original audio + * without leading padding. + * + * - decoding: unused + * - encoding: Set by libavcodec. The timestamps on the output packets are + * adjusted by the encoder so that they always refer to the + * first sample of the data actually contained in the packet, + * including any added padding. E.g. if the timebase is + * 1/samplerate and the timestamp of the first input sample is + * 0, the timestamp of the first output packet will be + * -initial_padding. + */ + int initial_padding; + + /** + * - decoding: For codecs that store a framerate value in the compressed + * bitstream, the decoder may export it here. { 0, 1} when + * unknown. + * - encoding: May be used to signal the framerate of CFR content to an + * encoder. + */ + AVRational framerate; + + /** + * Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx. + * - encoding: unused. + * - decoding: Set by libavcodec before calling get_format() + */ + enum AVPixelFormat sw_pix_fmt; + + /** + * Timebase in which pkt_dts/pts and AVPacket.dts/pts are. + * - encoding unused. + * - decoding set by user. + */ + AVRational pkt_timebase; + + /** + * AVCodecDescriptor + * - encoding: unused. + * - decoding: set by libavcodec. + */ + const AVCodecDescriptor *codec_descriptor; + + /** + * Current statistics for PTS correction. + * - decoding: maintained and used by libavcodec, not intended to be used by user apps + * - encoding: unused + */ + int64_t pts_correction_num_faulty_pts; /// Number of incorrect PTS values so far + int64_t pts_correction_num_faulty_dts; /// Number of incorrect DTS values so far + int64_t pts_correction_last_pts; /// PTS of the last frame + int64_t pts_correction_last_dts; /// DTS of the last frame + + /** + * Character encoding of the input subtitles file. + * - decoding: set by user + * - encoding: unused + */ + char *sub_charenc; + + /** + * Subtitles character encoding mode. Formats or codecs might be adjusting + * this setting (if they are doing the conversion themselves for instance). + * - decoding: set by libavcodec + * - encoding: unused + */ + int sub_charenc_mode; +#define FF_SUB_CHARENC_MODE_DO_NOTHING -1 ///< do nothing (demuxer outputs a stream supposed to be already in UTF-8, or the codec is bitmap for instance) +#define FF_SUB_CHARENC_MODE_AUTOMATIC 0 ///< libavcodec will select the mode itself +#define FF_SUB_CHARENC_MODE_PRE_DECODER 1 ///< the AVPacket data needs to be recoded to UTF-8 before being fed to the decoder, requires iconv +#define FF_SUB_CHARENC_MODE_IGNORE 2 ///< neither convert the subtitles, nor check them for valid UTF-8 + + /** + * Skip processing alpha if supported by codec. + * Note that if the format uses pre-multiplied alpha (common with VP6, + * and recommended due to better video quality/compression) + * the image will look as if alpha-blended onto a black background. + * However for formats that do not use pre-multiplied alpha + * there might be serious artefacts (though e.g. libswscale currently + * assumes pre-multiplied alpha anyway). + * + * - decoding: set by user + * - encoding: unused + */ + int skip_alpha; + + /** + * Number of samples to skip after a discontinuity + * - decoding: unused + * - encoding: set by libavcodec + */ + int seek_preroll; + +#if FF_API_DEBUG_MV + /** + * @deprecated unused + */ + attribute_deprecated + int debug_mv; +#define FF_DEBUG_VIS_MV_P_FOR 0x00000001 //visualize forward predicted MVs of P frames +#define FF_DEBUG_VIS_MV_B_FOR 0x00000002 //visualize forward predicted MVs of B frames +#define FF_DEBUG_VIS_MV_B_BACK 0x00000004 //visualize backward predicted MVs of B frames +#endif + + /** + * custom intra quantization matrix + * - encoding: Set by user, can be NULL. + * - decoding: unused. + */ + uint16_t *chroma_intra_matrix; + + /** + * dump format separator. + * can be ", " or "\n " or anything else + * - encoding: Set by user. + * - decoding: Set by user. + */ + uint8_t *dump_separator; + + /** + * ',' separated list of allowed decoders. + * If NULL then all are allowed + * - encoding: unused + * - decoding: set by user + */ + char *codec_whitelist; + + /** + * Properties of the stream that gets decoded + * - encoding: unused + * - decoding: set by libavcodec + */ + unsigned properties; +#define FF_CODEC_PROPERTY_LOSSLESS 0x00000001 +#define FF_CODEC_PROPERTY_CLOSED_CAPTIONS 0x00000002 +#define FF_CODEC_PROPERTY_FILM_GRAIN 0x00000004 + + /** + * Additional data associated with the entire coded stream. + * + * - decoding: unused + * - encoding: may be set by libavcodec after avcodec_open2(). + */ + AVPacketSideData *coded_side_data; + int nb_coded_side_data; + + /** + * A reference to the AVHWFramesContext describing the input (for encoding) + * or output (decoding) frames. The reference is set by the caller and + * afterwards owned (and freed) by libavcodec - it should never be read by + * the caller after being set. + * + * - decoding: This field should be set by the caller from the get_format() + * callback. The previous reference (if any) will always be + * unreffed by libavcodec before the get_format() call. + * + * If the default get_buffer2() is used with a hwaccel pixel + * format, then this AVHWFramesContext will be used for + * allocating the frame buffers. + * + * - encoding: For hardware encoders configured to use a hwaccel pixel + * format, this field should be set by the caller to a reference + * to the AVHWFramesContext describing input frames. + * AVHWFramesContext.format must be equal to + * AVCodecContext.pix_fmt. + * + * This field should be set before avcodec_open2() is called. + */ + AVBufferRef *hw_frames_ctx; + +#if FF_API_SUB_TEXT_FORMAT + /** + * @deprecated unused + */ + attribute_deprecated + int sub_text_format; +#define FF_SUB_TEXT_FMT_ASS 0 +#endif + + /** + * Audio only. The amount of padding (in samples) appended by the encoder to + * the end of the audio. I.e. this number of decoded samples must be + * discarded by the caller from the end of the stream to get the original + * audio without any trailing padding. + * + * - decoding: unused + * - encoding: unused + */ + int trailing_padding; + + /** + * The number of pixels per image to maximally accept. + * + * - decoding: set by user + * - encoding: set by user + */ + int64_t max_pixels; + + /** + * A reference to the AVHWDeviceContext describing the device which will + * be used by a hardware encoder/decoder. The reference is set by the + * caller and afterwards owned (and freed) by libavcodec. + * + * This should be used if either the codec device does not require + * hardware frames or any that are used are to be allocated internally by + * libavcodec. If the user wishes to supply any of the frames used as + * encoder input or decoder output then hw_frames_ctx should be used + * instead. When hw_frames_ctx is set in get_format() for a decoder, this + * field will be ignored while decoding the associated stream segment, but + * may again be used on a following one after another get_format() call. + * + * For both encoders and decoders this field should be set before + * avcodec_open2() is called and must not be written to thereafter. + * + * Note that some decoders may require this field to be set initially in + * order to support hw_frames_ctx at all - in that case, all frames + * contexts used must be created on the same device. + */ + AVBufferRef *hw_device_ctx; + + /** + * Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated + * decoding (if active). + * - encoding: unused + * - decoding: Set by user (either before avcodec_open2(), or in the + * AVCodecContext.get_format callback) + */ + int hwaccel_flags; + + /** + * Video decoding only. Certain video codecs support cropping, meaning that + * only a sub-rectangle of the decoded frame is intended for display. This + * option controls how cropping is handled by libavcodec. + * + * When set to 1 (the default), libavcodec will apply cropping internally. + * I.e. it will modify the output frame width/height fields and offset the + * data pointers (only by as much as possible while preserving alignment, or + * by the full amount if the AV_CODEC_FLAG_UNALIGNED flag is set) so that + * the frames output by the decoder refer only to the cropped area. The + * crop_* fields of the output frames will be zero. + * + * When set to 0, the width/height fields of the output frames will be set + * to the coded dimensions and the crop_* fields will describe the cropping + * rectangle. Applying the cropping is left to the caller. + * + * @warning When hardware acceleration with opaque output frames is used, + * libavcodec is unable to apply cropping from the top/left border. + * + * @note when this option is set to zero, the width/height fields of the + * AVCodecContext and output AVFrames have different meanings. The codec + * context fields store display dimensions (with the coded dimensions in + * coded_width/height), while the frame fields store the coded dimensions + * (with the display dimensions being determined by the crop_* fields). + */ + int apply_cropping; + + /* + * Video decoding only. Sets the number of extra hardware frames which + * the decoder will allocate for use by the caller. This must be set + * before avcodec_open2() is called. + * + * Some hardware decoders require all frames that they will use for + * output to be defined in advance before decoding starts. For such + * decoders, the hardware frame pool must therefore be of a fixed size. + * The extra frames set here are on top of any number that the decoder + * needs internally in order to operate normally (for example, frames + * used as reference pictures). + */ + int extra_hw_frames; + + /** + * The percentage of damaged samples to discard a frame. + * + * - decoding: set by user + * - encoding: unused + */ + int discard_damaged_percentage; + + /** + * The number of samples per frame to maximally accept. + * + * - decoding: set by user + * - encoding: set by user + */ + int64_t max_samples; + + /** + * Bit set of AV_CODEC_EXPORT_DATA_* flags, which affects the kind of + * metadata exported in frame, packet, or coded stream side data by + * decoders and encoders. + * + * - decoding: set by user + * - encoding: set by user + */ + int export_side_data; + + /** + * This callback is called at the beginning of each packet to get a data + * buffer for it. + * + * The following field will be set in the packet before this callback is + * called: + * - size + * This callback must use the above value to calculate the required buffer size, + * which must padded by at least AV_INPUT_BUFFER_PADDING_SIZE bytes. + * + * In some specific cases, the encoder may not use the entire buffer allocated by this + * callback. This will be reflected in the size value in the packet once returned by + * avcodec_receive_packet(). + * + * This callback must fill the following fields in the packet: + * - data: alignment requirements for AVPacket apply, if any. Some architectures and + * encoders may benefit from having aligned data. + * - buf: must contain a pointer to an AVBufferRef structure. The packet's + * data pointer must be contained in it. See: av_buffer_create(), av_buffer_alloc(), + * and av_buffer_ref(). + * + * If AV_CODEC_CAP_DR1 is not set then get_encode_buffer() must call + * avcodec_default_get_encode_buffer() instead of providing a buffer allocated by + * some other means. + * + * The flags field may contain a combination of AV_GET_ENCODE_BUFFER_FLAG_ flags. + * They may be used for example to hint what use the buffer may get after being + * created. + * Implementations of this callback may ignore flags they don't understand. + * If AV_GET_ENCODE_BUFFER_FLAG_REF is set in flags then the packet may be reused + * (read and/or written to if it is writable) later by libavcodec. + * + * This callback must be thread-safe, as when frame threading is used, it may + * be called from multiple threads simultaneously. + * + * @see avcodec_default_get_encode_buffer() + * + * - encoding: Set by libavcodec, user can override. + * - decoding: unused + */ + int (*get_encode_buffer)(struct AVCodecContext *s, AVPacket *pkt, int flags); + + /** + * Audio channel layout. + * - encoding: must be set by the caller, to one of AVCodec.ch_layouts. + * - decoding: may be set by the caller if known e.g. from the container. + * The decoder can then override during decoding as needed. + */ + AVChannelLayout ch_layout; +} AVCodecContext; + +/** + * @defgroup lavc_hwaccel AVHWAccel + * + * @note Nothing in this structure should be accessed by the user. At some + * point in future it will not be externally visible at all. + * + * @{ + */ +typedef struct AVHWAccel { + /** + * Name of the hardware accelerated codec. + * The name is globally unique among encoders and among decoders (but an + * encoder and a decoder can share the same name). + */ + const char *name; + + /** + * Type of codec implemented by the hardware accelerator. + * + * See AVMEDIA_TYPE_xxx + */ + enum AVMediaType type; + + /** + * Codec implemented by the hardware accelerator. + * + * See AV_CODEC_ID_xxx + */ + enum AVCodecID id; + + /** + * Supported pixel format. + * + * Only hardware accelerated formats are supported here. + */ + enum AVPixelFormat pix_fmt; + + /** + * Hardware accelerated codec capabilities. + * see AV_HWACCEL_CODEC_CAP_* + */ + int capabilities; + + /***************************************************************** + * No fields below this line are part of the public API. They + * may not be used outside of libavcodec and can be changed and + * removed at will. + * New public fields should be added right above. + ***************************************************************** + */ + + /** + * Allocate a custom buffer + */ + int (*alloc_frame)(AVCodecContext *avctx, AVFrame *frame); + + /** + * Called at the beginning of each frame or field picture. + * + * Meaningful frame information (codec specific) is guaranteed to + * be parsed at this point. This function is mandatory. + * + * Note that buf can be NULL along with buf_size set to 0. + * Otherwise, this means the whole frame is available at this point. + * + * @param avctx the codec context + * @param buf the frame data buffer base + * @param buf_size the size of the frame in bytes + * @return zero if successful, a negative value otherwise + */ + int (*start_frame)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size); + + /** + * Callback for parameter data (SPS/PPS/VPS etc). + * + * Useful for hardware decoders which keep persistent state about the + * video parameters, and need to receive any changes to update that state. + * + * @param avctx the codec context + * @param type the nal unit type + * @param buf the nal unit data buffer + * @param buf_size the size of the nal unit in bytes + * @return zero if successful, a negative value otherwise + */ + int (*decode_params)(AVCodecContext *avctx, int type, const uint8_t *buf, uint32_t buf_size); + + /** + * Callback for each slice. + * + * Meaningful slice information (codec specific) is guaranteed to + * be parsed at this point. This function is mandatory. + * + * @param avctx the codec context + * @param buf the slice data buffer base + * @param buf_size the size of the slice in bytes + * @return zero if successful, a negative value otherwise + */ + int (*decode_slice)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size); + + /** + * Called at the end of each frame or field picture. + * + * The whole picture is parsed at this point and can now be sent + * to the hardware accelerator. This function is mandatory. + * + * @param avctx the codec context + * @return zero if successful, a negative value otherwise + */ + int (*end_frame)(AVCodecContext *avctx); + + /** + * Size of per-frame hardware accelerator private data. + * + * Private data is allocated with av_mallocz() before + * AVCodecContext.get_buffer() and deallocated after + * AVCodecContext.release_buffer(). + */ + int frame_priv_data_size; + + /** + * Initialize the hwaccel private data. + * + * This will be called from ff_get_format(), after hwaccel and + * hwaccel_context are set and the hwaccel private data in AVCodecInternal + * is allocated. + */ + int (*init)(AVCodecContext *avctx); + + /** + * Uninitialize the hwaccel private data. + * + * This will be called from get_format() or avcodec_close(), after hwaccel + * and hwaccel_context are already uninitialized. + */ + int (*uninit)(AVCodecContext *avctx); + + /** + * Size of the private data to allocate in + * AVCodecInternal.hwaccel_priv_data. + */ + int priv_data_size; + + /** + * Internal hwaccel capabilities. + */ + int caps_internal; + + /** + * Fill the given hw_frames context with current codec parameters. Called + * from get_format. Refer to avcodec_get_hw_frames_parameters() for + * details. + * + * This CAN be called before AVHWAccel.init is called, and you must assume + * that avctx->hwaccel_priv_data is invalid. + */ + int (*frame_params)(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx); +} AVHWAccel; + +/** + * HWAccel is experimental and is thus avoided in favor of non experimental + * codecs + */ +#define AV_HWACCEL_CODEC_CAP_EXPERIMENTAL 0x0200 + +/** + * Hardware acceleration should be used for decoding even if the codec level + * used is unknown or higher than the maximum supported level reported by the + * hardware driver. + * + * It's generally a good idea to pass this flag unless you have a specific + * reason not to, as hardware tends to under-report supported levels. + */ +#define AV_HWACCEL_FLAG_IGNORE_LEVEL (1 << 0) + +/** + * Hardware acceleration can output YUV pixel formats with a different chroma + * sampling than 4:2:0 and/or other than 8 bits per component. + */ +#define AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH (1 << 1) + +/** + * Hardware acceleration should still be attempted for decoding when the + * codec profile does not match the reported capabilities of the hardware. + * + * For example, this can be used to try to decode baseline profile H.264 + * streams in hardware - it will often succeed, because many streams marked + * as baseline profile actually conform to constrained baseline profile. + * + * @warning If the stream is actually not supported then the behaviour is + * undefined, and may include returning entirely incorrect output + * while indicating success. + */ +#define AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH (1 << 2) + +/** + * @} + */ + +enum AVSubtitleType { + SUBTITLE_NONE, + + SUBTITLE_BITMAP, ///< A bitmap, pict will be set + + /** + * Plain text, the text field must be set by the decoder and is + * authoritative. ass and pict fields may contain approximations. + */ + SUBTITLE_TEXT, + + /** + * Formatted text, the ass field must be set by the decoder and is + * authoritative. pict and text fields may contain approximations. + */ + SUBTITLE_ASS, +}; + +#define AV_SUBTITLE_FLAG_FORCED 0x00000001 + +typedef struct AVSubtitleRect { + int x; ///< top left corner of pict, undefined when pict is not set + int y; ///< top left corner of pict, undefined when pict is not set + int w; ///< width of pict, undefined when pict is not set + int h; ///< height of pict, undefined when pict is not set + int nb_colors; ///< number of colors in pict, undefined when pict is not set + + /** + * data+linesize for the bitmap of this subtitle. + * Can be set for text/ass as well once they are rendered. + */ + uint8_t *data[4]; + int linesize[4]; + + enum AVSubtitleType type; + + char *text; ///< 0 terminated plain UTF-8 text + + /** + * 0 terminated ASS/SSA compatible event line. + * The presentation of this is unaffected by the other values in this + * struct. + */ + char *ass; + + int flags; +} AVSubtitleRect; + +typedef struct AVSubtitle { + uint16_t format; /* 0 = graphics */ + uint32_t start_display_time; /* relative to packet pts, in ms */ + uint32_t end_display_time; /* relative to packet pts, in ms */ + unsigned num_rects; + AVSubtitleRect **rects; + int64_t pts; ///< Same as packet pts, in AV_TIME_BASE +} AVSubtitle; + +/** + * Return the LIBAVCODEC_VERSION_INT constant. + */ +unsigned avcodec_version(void); + +/** + * Return the libavcodec build-time configuration. + */ +const char *avcodec_configuration(void); + +/** + * Return the libavcodec license. + */ +const char *avcodec_license(void); + +/** + * Allocate an AVCodecContext and set its fields to default values. The + * resulting struct should be freed with avcodec_free_context(). + * + * @param codec if non-NULL, allocate private data and initialize defaults + * for the given codec. It is illegal to then call avcodec_open2() + * with a different codec. + * If NULL, then the codec-specific defaults won't be initialized, + * which may result in suboptimal default settings (this is + * important mainly for encoders, e.g. libx264). + * + * @return An AVCodecContext filled with default values or NULL on failure. + */ +AVCodecContext *avcodec_alloc_context3(const AVCodec *codec); + +/** + * Free the codec context and everything associated with it and write NULL to + * the provided pointer. + */ +void avcodec_free_context(AVCodecContext **avctx); + +/** + * Get the AVClass for AVCodecContext. It can be used in combination with + * AV_OPT_SEARCH_FAKE_OBJ for examining options. + * + * @see av_opt_find(). + */ +const AVClass *avcodec_get_class(void); + +#if FF_API_GET_FRAME_CLASS +/** + * @deprecated This function should not be used. + */ +attribute_deprecated +const AVClass *avcodec_get_frame_class(void); +#endif + +/** + * Get the AVClass for AVSubtitleRect. It can be used in combination with + * AV_OPT_SEARCH_FAKE_OBJ for examining options. + * + * @see av_opt_find(). + */ +const AVClass *avcodec_get_subtitle_rect_class(void); + +/** + * Fill the parameters struct based on the values from the supplied codec + * context. Any allocated fields in par are freed and replaced with duplicates + * of the corresponding fields in codec. + * + * @return >= 0 on success, a negative AVERROR code on failure + */ +int avcodec_parameters_from_context(AVCodecParameters *par, + const AVCodecContext *codec); + +/** + * Fill the codec context based on the values from the supplied codec + * parameters. Any allocated fields in codec that have a corresponding field in + * par are freed and replaced with duplicates of the corresponding field in par. + * Fields in codec that do not have a counterpart in par are not touched. + * + * @return >= 0 on success, a negative AVERROR code on failure. + */ +int avcodec_parameters_to_context(AVCodecContext *codec, + const AVCodecParameters *par); + +/** + * Initialize the AVCodecContext to use the given AVCodec. Prior to using this + * function the context has to be allocated with avcodec_alloc_context3(). + * + * The functions avcodec_find_decoder_by_name(), avcodec_find_encoder_by_name(), + * avcodec_find_decoder() and avcodec_find_encoder() provide an easy way for + * retrieving a codec. + * + * @note Always call this function before using decoding routines (such as + * @ref avcodec_receive_frame()). + * + * @code + * av_dict_set(&opts, "b", "2.5M", 0); + * codec = avcodec_find_decoder(AV_CODEC_ID_H264); + * if (!codec) + * exit(1); + * + * context = avcodec_alloc_context3(codec); + * + * if (avcodec_open2(context, codec, opts) < 0) + * exit(1); + * @endcode + * + * @param avctx The context to initialize. + * @param codec The codec to open this context for. If a non-NULL codec has been + * previously passed to avcodec_alloc_context3() or + * for this context, then this parameter MUST be either NULL or + * equal to the previously passed codec. + * @param options A dictionary filled with AVCodecContext and codec-private options. + * On return this object will be filled with options that were not found. + * + * @return zero on success, a negative value on error + * @see avcodec_alloc_context3(), avcodec_find_decoder(), avcodec_find_encoder(), + * av_dict_set(), av_opt_find(). + */ +int avcodec_open2(AVCodecContext *avctx, const AVCodec *codec, AVDictionary **options); + +/** + * Close a given AVCodecContext and free all the data associated with it + * (but not the AVCodecContext itself). + * + * Calling this function on an AVCodecContext that hasn't been opened will free + * the codec-specific data allocated in avcodec_alloc_context3() with a non-NULL + * codec. Subsequent calls will do nothing. + * + * @note Do not use this function. Use avcodec_free_context() to destroy a + * codec context (either open or closed). Opening and closing a codec context + * multiple times is not supported anymore -- use multiple codec contexts + * instead. + */ +int avcodec_close(AVCodecContext *avctx); + +/** + * Free all allocated data in the given subtitle struct. + * + * @param sub AVSubtitle to free. + */ +void avsubtitle_free(AVSubtitle *sub); + +/** + * @} + */ + +/** + * @addtogroup lavc_decoding + * @{ + */ + +/** + * The default callback for AVCodecContext.get_buffer2(). It is made public so + * it can be called by custom get_buffer2() implementations for decoders without + * AV_CODEC_CAP_DR1 set. + */ +int avcodec_default_get_buffer2(AVCodecContext *s, AVFrame *frame, int flags); + +/** + * The default callback for AVCodecContext.get_encode_buffer(). It is made public so + * it can be called by custom get_encode_buffer() implementations for encoders without + * AV_CODEC_CAP_DR1 set. + */ +int avcodec_default_get_encode_buffer(AVCodecContext *s, AVPacket *pkt, int flags); + +/** + * Modify width and height values so that they will result in a memory + * buffer that is acceptable for the codec if you do not use any horizontal + * padding. + * + * May only be used if a codec with AV_CODEC_CAP_DR1 has been opened. + */ +void avcodec_align_dimensions(AVCodecContext *s, int *width, int *height); + +/** + * Modify width and height values so that they will result in a memory + * buffer that is acceptable for the codec if you also ensure that all + * line sizes are a multiple of the respective linesize_align[i]. + * + * May only be used if a codec with AV_CODEC_CAP_DR1 has been opened. + */ +void avcodec_align_dimensions2(AVCodecContext *s, int *width, int *height, + int linesize_align[AV_NUM_DATA_POINTERS]); + +/** + * Converts AVChromaLocation to swscale x/y chroma position. + * + * The positions represent the chroma (0,0) position in a coordinates system + * with luma (0,0) representing the origin and luma(1,1) representing 256,256 + * + * @param xpos horizontal chroma sample position + * @param ypos vertical chroma sample position + */ +int avcodec_enum_to_chroma_pos(int *xpos, int *ypos, enum AVChromaLocation pos); + +/** + * Converts swscale x/y chroma position to AVChromaLocation. + * + * The positions represent the chroma (0,0) position in a coordinates system + * with luma (0,0) representing the origin and luma(1,1) representing 256,256 + * + * @param xpos horizontal chroma sample position + * @param ypos vertical chroma sample position + */ +enum AVChromaLocation avcodec_chroma_pos_to_enum(int xpos, int ypos); + +/** + * Decode a subtitle message. + * Return a negative value on error, otherwise return the number of bytes used. + * If no subtitle could be decompressed, got_sub_ptr is zero. + * Otherwise, the subtitle is stored in *sub. + * Note that AV_CODEC_CAP_DR1 is not available for subtitle codecs. This is for + * simplicity, because the performance difference is expected to be negligible + * and reusing a get_buffer written for video codecs would probably perform badly + * due to a potentially very different allocation pattern. + * + * Some decoders (those marked with AV_CODEC_CAP_DELAY) have a delay between input + * and output. This means that for some packets they will not immediately + * produce decoded output and need to be flushed at the end of decoding to get + * all the decoded data. Flushing is done by calling this function with packets + * with avpkt->data set to NULL and avpkt->size set to 0 until it stops + * returning subtitles. It is safe to flush even those decoders that are not + * marked with AV_CODEC_CAP_DELAY, then no subtitles will be returned. + * + * @note The AVCodecContext MUST have been opened with @ref avcodec_open2() + * before packets may be fed to the decoder. + * + * @param avctx the codec context + * @param[out] sub The preallocated AVSubtitle in which the decoded subtitle will be stored, + * must be freed with avsubtitle_free if *got_sub_ptr is set. + * @param[in,out] got_sub_ptr Zero if no subtitle could be decompressed, otherwise, it is nonzero. + * @param[in] avpkt The input AVPacket containing the input buffer. + */ +int avcodec_decode_subtitle2(AVCodecContext *avctx, AVSubtitle *sub, + int *got_sub_ptr, + AVPacket *avpkt); + +/** + * Supply raw packet data as input to a decoder. + * + * Internally, this call will copy relevant AVCodecContext fields, which can + * influence decoding per-packet, and apply them when the packet is actually + * decoded. (For example AVCodecContext.skip_frame, which might direct the + * decoder to drop the frame contained by the packet sent with this function.) + * + * @warning The input buffer, avpkt->data must be AV_INPUT_BUFFER_PADDING_SIZE + * larger than the actual read bytes because some optimized bitstream + * readers read 32 or 64 bits at once and could read over the end. + * + * @note The AVCodecContext MUST have been opened with @ref avcodec_open2() + * before packets may be fed to the decoder. + * + * @param avctx codec context + * @param[in] avpkt The input AVPacket. Usually, this will be a single video + * frame, or several complete audio frames. + * Ownership of the packet remains with the caller, and the + * decoder will not write to the packet. The decoder may create + * a reference to the packet data (or copy it if the packet is + * not reference-counted). + * Unlike with older APIs, the packet is always fully consumed, + * and if it contains multiple frames (e.g. some audio codecs), + * will require you to call avcodec_receive_frame() multiple + * times afterwards before you can send a new packet. + * It can be NULL (or an AVPacket with data set to NULL and + * size set to 0); in this case, it is considered a flush + * packet, which signals the end of the stream. Sending the + * first flush packet will return success. Subsequent ones are + * unnecessary and will return AVERROR_EOF. If the decoder + * still has frames buffered, it will return them after sending + * a flush packet. + * + * @return 0 on success, otherwise negative error code: + * AVERROR(EAGAIN): input is not accepted in the current state - user + * must read output with avcodec_receive_frame() (once + * all output is read, the packet should be resent, and + * the call will not fail with EAGAIN). + * AVERROR_EOF: the decoder has been flushed, and no new packets can + * be sent to it (also returned if more than 1 flush + * packet is sent) + * AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush + * AVERROR(ENOMEM): failed to add packet to internal queue, or similar + * other errors: legitimate decoding errors + */ +int avcodec_send_packet(AVCodecContext *avctx, const AVPacket *avpkt); + +/** + * Return decoded output data from a decoder. + * + * @param avctx codec context + * @param frame This will be set to a reference-counted video or audio + * frame (depending on the decoder type) allocated by the + * decoder. Note that the function will always call + * av_frame_unref(frame) before doing anything else. + * + * @return + * 0: success, a frame was returned + * AVERROR(EAGAIN): output is not available in this state - user must try + * to send new input + * AVERROR_EOF: the decoder has been fully flushed, and there will be + * no more output frames + * AVERROR(EINVAL): codec not opened, or it is an encoder + * AVERROR_INPUT_CHANGED: current decoded frame has changed parameters + * with respect to first decoded frame. Applicable + * when flag AV_CODEC_FLAG_DROPCHANGED is set. + * other negative values: legitimate decoding errors + */ +int avcodec_receive_frame(AVCodecContext *avctx, AVFrame *frame); + +/** + * Supply a raw video or audio frame to the encoder. Use avcodec_receive_packet() + * to retrieve buffered output packets. + * + * @param avctx codec context + * @param[in] frame AVFrame containing the raw audio or video frame to be encoded. + * Ownership of the frame remains with the caller, and the + * encoder will not write to the frame. The encoder may create + * a reference to the frame data (or copy it if the frame is + * not reference-counted). + * It can be NULL, in which case it is considered a flush + * packet. This signals the end of the stream. If the encoder + * still has packets buffered, it will return them after this + * call. Once flushing mode has been entered, additional flush + * packets are ignored, and sending frames will return + * AVERROR_EOF. + * + * For audio: + * If AV_CODEC_CAP_VARIABLE_FRAME_SIZE is set, then each frame + * can have any number of samples. + * If it is not set, frame->nb_samples must be equal to + * avctx->frame_size for all frames except the last. + * The final frame may be smaller than avctx->frame_size. + * @return 0 on success, otherwise negative error code: + * AVERROR(EAGAIN): input is not accepted in the current state - user + * must read output with avcodec_receive_packet() (once + * all output is read, the packet should be resent, and + * the call will not fail with EAGAIN). + * AVERROR_EOF: the encoder has been flushed, and no new frames can + * be sent to it + * AVERROR(EINVAL): codec not opened, it is a decoder, or requires flush + * AVERROR(ENOMEM): failed to add packet to internal queue, or similar + * other errors: legitimate encoding errors + */ +int avcodec_send_frame(AVCodecContext *avctx, const AVFrame *frame); + +/** + * Read encoded data from the encoder. + * + * @param avctx codec context + * @param avpkt This will be set to a reference-counted packet allocated by the + * encoder. Note that the function will always call + * av_packet_unref(avpkt) before doing anything else. + * @return 0 on success, otherwise negative error code: + * AVERROR(EAGAIN): output is not available in the current state - user + * must try to send input + * AVERROR_EOF: the encoder has been fully flushed, and there will be + * no more output packets + * AVERROR(EINVAL): codec not opened, or it is a decoder + * other errors: legitimate encoding errors + */ +int avcodec_receive_packet(AVCodecContext *avctx, AVPacket *avpkt); + +/** + * Create and return a AVHWFramesContext with values adequate for hardware + * decoding. This is meant to get called from the get_format callback, and is + * a helper for preparing a AVHWFramesContext for AVCodecContext.hw_frames_ctx. + * This API is for decoding with certain hardware acceleration modes/APIs only. + * + * The returned AVHWFramesContext is not initialized. The caller must do this + * with av_hwframe_ctx_init(). + * + * Calling this function is not a requirement, but makes it simpler to avoid + * codec or hardware API specific details when manually allocating frames. + * + * Alternatively to this, an API user can set AVCodecContext.hw_device_ctx, + * which sets up AVCodecContext.hw_frames_ctx fully automatically, and makes + * it unnecessary to call this function or having to care about + * AVHWFramesContext initialization at all. + * + * There are a number of requirements for calling this function: + * + * - It must be called from get_format with the same avctx parameter that was + * passed to get_format. Calling it outside of get_format is not allowed, and + * can trigger undefined behavior. + * - The function is not always supported (see description of return values). + * Even if this function returns successfully, hwaccel initialization could + * fail later. (The degree to which implementations check whether the stream + * is actually supported varies. Some do this check only after the user's + * get_format callback returns.) + * - The hw_pix_fmt must be one of the choices suggested by get_format. If the + * user decides to use a AVHWFramesContext prepared with this API function, + * the user must return the same hw_pix_fmt from get_format. + * - The device_ref passed to this function must support the given hw_pix_fmt. + * - After calling this API function, it is the user's responsibility to + * initialize the AVHWFramesContext (returned by the out_frames_ref parameter), + * and to set AVCodecContext.hw_frames_ctx to it. If done, this must be done + * before returning from get_format (this is implied by the normal + * AVCodecContext.hw_frames_ctx API rules). + * - The AVHWFramesContext parameters may change every time time get_format is + * called. Also, AVCodecContext.hw_frames_ctx is reset before get_format. So + * you are inherently required to go through this process again on every + * get_format call. + * - It is perfectly possible to call this function without actually using + * the resulting AVHWFramesContext. One use-case might be trying to reuse a + * previously initialized AVHWFramesContext, and calling this API function + * only to test whether the required frame parameters have changed. + * - Fields that use dynamically allocated values of any kind must not be set + * by the user unless setting them is explicitly allowed by the documentation. + * If the user sets AVHWFramesContext.free and AVHWFramesContext.user_opaque, + * the new free callback must call the potentially set previous free callback. + * This API call may set any dynamically allocated fields, including the free + * callback. + * + * The function will set at least the following fields on AVHWFramesContext + * (potentially more, depending on hwaccel API): + * + * - All fields set by av_hwframe_ctx_alloc(). + * - Set the format field to hw_pix_fmt. + * - Set the sw_format field to the most suited and most versatile format. (An + * implication is that this will prefer generic formats over opaque formats + * with arbitrary restrictions, if possible.) + * - Set the width/height fields to the coded frame size, rounded up to the + * API-specific minimum alignment. + * - Only _if_ the hwaccel requires a pre-allocated pool: set the initial_pool_size + * field to the number of maximum reference surfaces possible with the codec, + * plus 1 surface for the user to work (meaning the user can safely reference + * at most 1 decoded surface at a time), plus additional buffering introduced + * by frame threading. If the hwaccel does not require pre-allocation, the + * field is left to 0, and the decoder will allocate new surfaces on demand + * during decoding. + * - Possibly AVHWFramesContext.hwctx fields, depending on the underlying + * hardware API. + * + * Essentially, out_frames_ref returns the same as av_hwframe_ctx_alloc(), but + * with basic frame parameters set. + * + * The function is stateless, and does not change the AVCodecContext or the + * device_ref AVHWDeviceContext. + * + * @param avctx The context which is currently calling get_format, and which + * implicitly contains all state needed for filling the returned + * AVHWFramesContext properly. + * @param device_ref A reference to the AVHWDeviceContext describing the device + * which will be used by the hardware decoder. + * @param hw_pix_fmt The hwaccel format you are going to return from get_format. + * @param out_frames_ref On success, set to a reference to an _uninitialized_ + * AVHWFramesContext, created from the given device_ref. + * Fields will be set to values required for decoding. + * Not changed if an error is returned. + * @return zero on success, a negative value on error. The following error codes + * have special semantics: + * AVERROR(ENOENT): the decoder does not support this functionality. Setup + * is always manual, or it is a decoder which does not + * support setting AVCodecContext.hw_frames_ctx at all, + * or it is a software format. + * AVERROR(EINVAL): it is known that hardware decoding is not supported for + * this configuration, or the device_ref is not supported + * for the hwaccel referenced by hw_pix_fmt. + */ +int avcodec_get_hw_frames_parameters(AVCodecContext *avctx, + AVBufferRef *device_ref, + enum AVPixelFormat hw_pix_fmt, + AVBufferRef **out_frames_ref); + + + +/** + * @defgroup lavc_parsing Frame parsing + * @{ + */ + +enum AVPictureStructure { + AV_PICTURE_STRUCTURE_UNKNOWN, //< unknown + AV_PICTURE_STRUCTURE_TOP_FIELD, //< coded as top field + AV_PICTURE_STRUCTURE_BOTTOM_FIELD, //< coded as bottom field + AV_PICTURE_STRUCTURE_FRAME, //< coded as frame +}; + +typedef struct AVCodecParserContext { + void *priv_data; + const struct AVCodecParser *parser; + int64_t frame_offset; /* offset of the current frame */ + int64_t cur_offset; /* current offset + (incremented by each av_parser_parse()) */ + int64_t next_frame_offset; /* offset of the next frame */ + /* video info */ + int pict_type; /* XXX: Put it back in AVCodecContext. */ + /** + * This field is used for proper frame duration computation in lavf. + * It signals, how much longer the frame duration of the current frame + * is compared to normal frame duration. + * + * frame_duration = (1 + repeat_pict) * time_base + * + * It is used by codecs like H.264 to display telecined material. + */ + int repeat_pict; /* XXX: Put it back in AVCodecContext. */ + int64_t pts; /* pts of the current frame */ + int64_t dts; /* dts of the current frame */ + + /* private data */ + int64_t last_pts; + int64_t last_dts; + int fetch_timestamp; + +#define AV_PARSER_PTS_NB 4 + int cur_frame_start_index; + int64_t cur_frame_offset[AV_PARSER_PTS_NB]; + int64_t cur_frame_pts[AV_PARSER_PTS_NB]; + int64_t cur_frame_dts[AV_PARSER_PTS_NB]; + + int flags; +#define PARSER_FLAG_COMPLETE_FRAMES 0x0001 +#define PARSER_FLAG_ONCE 0x0002 +/// Set if the parser has a valid file offset +#define PARSER_FLAG_FETCHED_OFFSET 0x0004 +#define PARSER_FLAG_USE_CODEC_TS 0x1000 + + int64_t offset; ///< byte offset from starting packet start + int64_t cur_frame_end[AV_PARSER_PTS_NB]; + + /** + * Set by parser to 1 for key frames and 0 for non-key frames. + * It is initialized to -1, so if the parser doesn't set this flag, + * old-style fallback using AV_PICTURE_TYPE_I picture type as key frames + * will be used. + */ + int key_frame; + + // Timestamp generation support: + /** + * Synchronization point for start of timestamp generation. + * + * Set to >0 for sync point, 0 for no sync point and <0 for undefined + * (default). + * + * For example, this corresponds to presence of H.264 buffering period + * SEI message. + */ + int dts_sync_point; + + /** + * Offset of the current timestamp against last timestamp sync point in + * units of AVCodecContext.time_base. + * + * Set to INT_MIN when dts_sync_point unused. Otherwise, it must + * contain a valid timestamp offset. + * + * Note that the timestamp of sync point has usually a nonzero + * dts_ref_dts_delta, which refers to the previous sync point. Offset of + * the next frame after timestamp sync point will be usually 1. + * + * For example, this corresponds to H.264 cpb_removal_delay. + */ + int dts_ref_dts_delta; + + /** + * Presentation delay of current frame in units of AVCodecContext.time_base. + * + * Set to INT_MIN when dts_sync_point unused. Otherwise, it must + * contain valid non-negative timestamp delta (presentation time of a frame + * must not lie in the past). + * + * This delay represents the difference between decoding and presentation + * time of the frame. + * + * For example, this corresponds to H.264 dpb_output_delay. + */ + int pts_dts_delta; + + /** + * Position of the packet in file. + * + * Analogous to cur_frame_pts/dts + */ + int64_t cur_frame_pos[AV_PARSER_PTS_NB]; + + /** + * Byte position of currently parsed frame in stream. + */ + int64_t pos; + + /** + * Previous frame byte position. + */ + int64_t last_pos; + + /** + * Duration of the current frame. + * For audio, this is in units of 1 / AVCodecContext.sample_rate. + * For all other types, this is in units of AVCodecContext.time_base. + */ + int duration; + + enum AVFieldOrder field_order; + + /** + * Indicate whether a picture is coded as a frame, top field or bottom field. + * + * For example, H.264 field_pic_flag equal to 0 corresponds to + * AV_PICTURE_STRUCTURE_FRAME. An H.264 picture with field_pic_flag + * equal to 1 and bottom_field_flag equal to 0 corresponds to + * AV_PICTURE_STRUCTURE_TOP_FIELD. + */ + enum AVPictureStructure picture_structure; + + /** + * Picture number incremented in presentation or output order. + * This field may be reinitialized at the first picture of a new sequence. + * + * For example, this corresponds to H.264 PicOrderCnt. + */ + int output_picture_number; + + /** + * Dimensions of the decoded video intended for presentation. + */ + int width; + int height; + + /** + * Dimensions of the coded video. + */ + int coded_width; + int coded_height; + + /** + * The format of the coded data, corresponds to enum AVPixelFormat for video + * and for enum AVSampleFormat for audio. + * + * Note that a decoder can have considerable freedom in how exactly it + * decodes the data, so the format reported here might be different from the + * one returned by a decoder. + */ + int format; +} AVCodecParserContext; + +typedef struct AVCodecParser { + int codec_ids[7]; /* several codec IDs are permitted */ + int priv_data_size; + int (*parser_init)(AVCodecParserContext *s); + /* This callback never returns an error, a negative value means that + * the frame start was in a previous packet. */ + int (*parser_parse)(AVCodecParserContext *s, + AVCodecContext *avctx, + const uint8_t **poutbuf, int *poutbuf_size, + const uint8_t *buf, int buf_size); + void (*parser_close)(AVCodecParserContext *s); + int (*split)(AVCodecContext *avctx, const uint8_t *buf, int buf_size); +} AVCodecParser; + +/** + * Iterate over all registered codec parsers. + * + * @param opaque a pointer where libavcodec will store the iteration state. Must + * point to NULL to start the iteration. + * + * @return the next registered codec parser or NULL when the iteration is + * finished + */ +const AVCodecParser *av_parser_iterate(void **opaque); + +AVCodecParserContext *av_parser_init(int codec_id); + +/** + * Parse a packet. + * + * @param s parser context. + * @param avctx codec context. + * @param poutbuf set to pointer to parsed buffer or NULL if not yet finished. + * @param poutbuf_size set to size of parsed buffer or zero if not yet finished. + * @param buf input buffer. + * @param buf_size buffer size in bytes without the padding. I.e. the full buffer + size is assumed to be buf_size + AV_INPUT_BUFFER_PADDING_SIZE. + To signal EOF, this should be 0 (so that the last frame + can be output). + * @param pts input presentation timestamp. + * @param dts input decoding timestamp. + * @param pos input byte position in stream. + * @return the number of bytes of the input bitstream used. + * + * Example: + * @code + * while(in_len){ + * len = av_parser_parse2(myparser, AVCodecContext, &data, &size, + * in_data, in_len, + * pts, dts, pos); + * in_data += len; + * in_len -= len; + * + * if(size) + * decode_frame(data, size); + * } + * @endcode + */ +int av_parser_parse2(AVCodecParserContext *s, + AVCodecContext *avctx, + uint8_t **poutbuf, int *poutbuf_size, + const uint8_t *buf, int buf_size, + int64_t pts, int64_t dts, + int64_t pos); + +void av_parser_close(AVCodecParserContext *s); + +/** + * @} + * @} + */ + +/** + * @addtogroup lavc_encoding + * @{ + */ + +int avcodec_encode_subtitle(AVCodecContext *avctx, uint8_t *buf, int buf_size, + const AVSubtitle *sub); + + +/** + * @} + */ + +/** + * @defgroup lavc_misc Utility functions + * @ingroup libavc + * + * Miscellaneous utility functions related to both encoding and decoding + * (or neither). + * @{ + */ + +/** + * @defgroup lavc_misc_pixfmt Pixel formats + * + * Functions for working with pixel formats. + * @{ + */ + +/** + * Return a value representing the fourCC code associated to the + * pixel format pix_fmt, or 0 if no associated fourCC code can be + * found. + */ +unsigned int avcodec_pix_fmt_to_codec_tag(enum AVPixelFormat pix_fmt); + +/** + * Find the best pixel format to convert to given a certain source pixel + * format. When converting from one pixel format to another, information loss + * may occur. For example, when converting from RGB24 to GRAY, the color + * information will be lost. Similarly, other losses occur when converting from + * some formats to other formats. avcodec_find_best_pix_fmt_of_2() searches which of + * the given pixel formats should be used to suffer the least amount of loss. + * The pixel formats from which it chooses one, are determined by the + * pix_fmt_list parameter. + * + * + * @param[in] pix_fmt_list AV_PIX_FMT_NONE terminated array of pixel formats to choose from + * @param[in] src_pix_fmt source pixel format + * @param[in] has_alpha Whether the source pixel format alpha channel is used. + * @param[out] loss_ptr Combination of flags informing you what kind of losses will occur. + * @return The best pixel format to convert to or -1 if none was found. + */ +enum AVPixelFormat avcodec_find_best_pix_fmt_of_list(const enum AVPixelFormat *pix_fmt_list, + enum AVPixelFormat src_pix_fmt, + int has_alpha, int *loss_ptr); + +enum AVPixelFormat avcodec_default_get_format(struct AVCodecContext *s, const enum AVPixelFormat * fmt); + +/** + * @} + */ + +void avcodec_string(char *buf, int buf_size, AVCodecContext *enc, int encode); + +int avcodec_default_execute(AVCodecContext *c, int (*func)(AVCodecContext *c2, void *arg2),void *arg, int *ret, int count, int size); +int avcodec_default_execute2(AVCodecContext *c, int (*func)(AVCodecContext *c2, void *arg2, int, int),void *arg, int *ret, int count); +//FIXME func typedef + +/** + * Fill AVFrame audio data and linesize pointers. + * + * The buffer buf must be a preallocated buffer with a size big enough + * to contain the specified samples amount. The filled AVFrame data + * pointers will point to this buffer. + * + * AVFrame extended_data channel pointers are allocated if necessary for + * planar audio. + * + * @param frame the AVFrame + * frame->nb_samples must be set prior to calling the + * function. This function fills in frame->data, + * frame->extended_data, frame->linesize[0]. + * @param nb_channels channel count + * @param sample_fmt sample format + * @param buf buffer to use for frame data + * @param buf_size size of buffer + * @param align plane size sample alignment (0 = default) + * @return >=0 on success, negative error code on failure + * @todo return the size in bytes required to store the samples in + * case of success, at the next libavutil bump + */ +int avcodec_fill_audio_frame(AVFrame *frame, int nb_channels, + enum AVSampleFormat sample_fmt, const uint8_t *buf, + int buf_size, int align); + +/** + * Reset the internal codec state / flush internal buffers. Should be called + * e.g. when seeking or when switching to a different stream. + * + * @note for decoders, this function just releases any references the decoder + * might keep internally, but the caller's references remain valid. + * + * @note for encoders, this function will only do something if the encoder + * declares support for AV_CODEC_CAP_ENCODER_FLUSH. When called, the encoder + * will drain any remaining packets, and can then be re-used for a different + * stream (as opposed to sending a null frame which will leave the encoder + * in a permanent EOF state after draining). This can be desirable if the + * cost of tearing down and replacing the encoder instance is high. + */ +void avcodec_flush_buffers(AVCodecContext *avctx); + +/** + * Return audio frame duration. + * + * @param avctx codec context + * @param frame_bytes size of the frame, or 0 if unknown + * @return frame duration, in samples, if known. 0 if not able to + * determine. + */ +int av_get_audio_frame_duration(AVCodecContext *avctx, int frame_bytes); + +/* memory */ + +/** + * Same behaviour av_fast_malloc but the buffer has additional + * AV_INPUT_BUFFER_PADDING_SIZE at the end which will always be 0. + * + * In addition the whole buffer will initially and after resizes + * be 0-initialized so that no uninitialized data will ever appear. + */ +void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size); + +/** + * Same behaviour av_fast_padded_malloc except that buffer will always + * be 0-initialized after call. + */ +void av_fast_padded_mallocz(void *ptr, unsigned int *size, size_t min_size); + +/** + * @return a positive value if s is open (i.e. avcodec_open2() was called on it + * with no corresponding avcodec_close()), 0 otherwise. + */ +int avcodec_is_open(AVCodecContext *s); + +/** + * @} + */ + +#endif /* AVCODEC_AVCODEC_H */ diff --git a/third-party/cbs/bytestream.h b/third-party/cbs/bytestream.h index 7892af92b11..d0033f14f36 100644 --- a/third-party/cbs/bytestream.h +++ b/third-party/cbs/bytestream.h @@ -23,329 +23,358 @@ #ifndef AVCODEC_BYTESTREAM_H #define AVCODEC_BYTESTREAM_H -#include "config.h" - #include #include -#include -#include -#include +#include "libavutil/avassert.h" +#include "libavutil/common.h" +#include "libavutil/intreadwrite.h" typedef struct GetByteContext { - const uint8_t *buffer, *buffer_end, *buffer_start; + const uint8_t *buffer, *buffer_end, *buffer_start; } GetByteContext; typedef struct PutByteContext { - uint8_t *buffer, *buffer_end, *buffer_start; - int eof; + uint8_t *buffer, *buffer_end, *buffer_start; + int eof; } PutByteContext; -#define DEF(type, name, bytes, read, write) \ - static av_always_inline type bytestream_get_##name(const uint8_t **b) { \ +#define DEF(type, name, bytes, read, write) \ +static av_always_inline type bytestream_get_ ## name(const uint8_t **b) \ +{ \ (*b) += bytes; \ return read(*b - bytes); \ - } \ - static av_always_inline void bytestream_put_##name(uint8_t **b, \ - const type value) { \ +} \ +static av_always_inline void bytestream_put_ ## name(uint8_t **b, \ + const type value) \ +{ \ write(*b, value); \ (*b) += bytes; \ - } \ - static av_always_inline void bytestream2_put_##name##u(PutByteContext *p, \ - const type value) { \ - bytestream_put_##name(&p->buffer, value); \ - } \ - static av_always_inline void bytestream2_put_##name(PutByteContext *p, \ - const type value) { \ - if(!p->eof && (p->buffer_end - p->buffer >= bytes)) { \ - write(p->buffer, value); \ - p->buffer += bytes; \ - } \ - else \ - p->eof = 1; \ - } \ - static av_always_inline type bytestream2_get_##name##u(GetByteContext *g) { \ - return bytestream_get_##name(&g->buffer); \ - } \ - static av_always_inline type bytestream2_get_##name(GetByteContext *g) { \ - if(g->buffer_end - g->buffer < bytes) { \ - g->buffer = g->buffer_end; \ - return 0; \ +} \ +static av_always_inline void bytestream2_put_ ## name ## u(PutByteContext *p, \ + const type value) \ +{ \ + bytestream_put_ ## name(&p->buffer, value); \ +} \ +static av_always_inline void bytestream2_put_ ## name(PutByteContext *p, \ + const type value) \ +{ \ + if (!p->eof && (p->buffer_end - p->buffer >= bytes)) { \ + write(p->buffer, value); \ + p->buffer += bytes; \ + } else \ + p->eof = 1; \ +} \ +static av_always_inline type bytestream2_get_ ## name ## u(GetByteContext *g) \ +{ \ + return bytestream_get_ ## name(&g->buffer); \ +} \ +static av_always_inline type bytestream2_get_ ## name(GetByteContext *g) \ +{ \ + if (g->buffer_end - g->buffer < bytes) { \ + g->buffer = g->buffer_end; \ + return 0; \ } \ - return bytestream2_get_##name##u(g); \ - } \ - static av_always_inline type bytestream2_peek_##name##u(GetByteContext *g) { \ + return bytestream2_get_ ## name ## u(g); \ +} \ +static av_always_inline type bytestream2_peek_ ## name ## u(GetByteContext *g) \ +{ \ return read(g->buffer); \ - } \ - static av_always_inline type bytestream2_peek_##name(GetByteContext *g) { \ - if(g->buffer_end - g->buffer < bytes) \ - return 0; \ - return bytestream2_peek_##name##u(g); \ - } - -DEF(uint64_t, le64, 8, AV_RL64, AV_WL64) +} \ +static av_always_inline type bytestream2_peek_ ## name(GetByteContext *g) \ +{ \ + if (g->buffer_end - g->buffer < bytes) \ + return 0; \ + return bytestream2_peek_ ## name ## u(g); \ +} + +DEF(uint64_t, le64, 8, AV_RL64, AV_WL64) DEF(unsigned int, le32, 4, AV_RL32, AV_WL32) DEF(unsigned int, le24, 3, AV_RL24, AV_WL24) DEF(unsigned int, le16, 2, AV_RL16, AV_WL16) -DEF(uint64_t, be64, 8, AV_RB64, AV_WB64) +DEF(uint64_t, be64, 8, AV_RB64, AV_WB64) DEF(unsigned int, be32, 4, AV_RB32, AV_WB32) DEF(unsigned int, be24, 3, AV_RB24, AV_WB24) DEF(unsigned int, be16, 2, AV_RB16, AV_WB16) -DEF(unsigned int, byte, 1, AV_RB8, AV_WB8) +DEF(unsigned int, byte, 1, AV_RB8 , AV_WB8) #if AV_HAVE_BIGENDIAN -#define bytestream2_get_ne16 bytestream2_get_be16 -#define bytestream2_get_ne24 bytestream2_get_be24 -#define bytestream2_get_ne32 bytestream2_get_be32 -#define bytestream2_get_ne64 bytestream2_get_be64 -#define bytestream2_get_ne16u bytestream2_get_be16u -#define bytestream2_get_ne24u bytestream2_get_be24u -#define bytestream2_get_ne32u bytestream2_get_be32u -#define bytestream2_get_ne64u bytestream2_get_be64u -#define bytestream2_put_ne16 bytestream2_put_be16 -#define bytestream2_put_ne24 bytestream2_put_be24 -#define bytestream2_put_ne32 bytestream2_put_be32 -#define bytestream2_put_ne64 bytestream2_put_be64 -#define bytestream2_peek_ne16 bytestream2_peek_be16 -#define bytestream2_peek_ne24 bytestream2_peek_be24 -#define bytestream2_peek_ne32 bytestream2_peek_be32 -#define bytestream2_peek_ne64 bytestream2_peek_be64 +# define bytestream2_get_ne16 bytestream2_get_be16 +# define bytestream2_get_ne24 bytestream2_get_be24 +# define bytestream2_get_ne32 bytestream2_get_be32 +# define bytestream2_get_ne64 bytestream2_get_be64 +# define bytestream2_get_ne16u bytestream2_get_be16u +# define bytestream2_get_ne24u bytestream2_get_be24u +# define bytestream2_get_ne32u bytestream2_get_be32u +# define bytestream2_get_ne64u bytestream2_get_be64u +# define bytestream2_put_ne16 bytestream2_put_be16 +# define bytestream2_put_ne24 bytestream2_put_be24 +# define bytestream2_put_ne32 bytestream2_put_be32 +# define bytestream2_put_ne64 bytestream2_put_be64 +# define bytestream2_peek_ne16 bytestream2_peek_be16 +# define bytestream2_peek_ne24 bytestream2_peek_be24 +# define bytestream2_peek_ne32 bytestream2_peek_be32 +# define bytestream2_peek_ne64 bytestream2_peek_be64 #else -#define bytestream2_get_ne16 bytestream2_get_le16 -#define bytestream2_get_ne24 bytestream2_get_le24 -#define bytestream2_get_ne32 bytestream2_get_le32 -#define bytestream2_get_ne64 bytestream2_get_le64 -#define bytestream2_get_ne16u bytestream2_get_le16u -#define bytestream2_get_ne24u bytestream2_get_le24u -#define bytestream2_get_ne32u bytestream2_get_le32u -#define bytestream2_get_ne64u bytestream2_get_le64u -#define bytestream2_put_ne16 bytestream2_put_le16 -#define bytestream2_put_ne24 bytestream2_put_le24 -#define bytestream2_put_ne32 bytestream2_put_le32 -#define bytestream2_put_ne64 bytestream2_put_le64 -#define bytestream2_peek_ne16 bytestream2_peek_le16 -#define bytestream2_peek_ne24 bytestream2_peek_le24 -#define bytestream2_peek_ne32 bytestream2_peek_le32 -#define bytestream2_peek_ne64 bytestream2_peek_le64 +# define bytestream2_get_ne16 bytestream2_get_le16 +# define bytestream2_get_ne24 bytestream2_get_le24 +# define bytestream2_get_ne32 bytestream2_get_le32 +# define bytestream2_get_ne64 bytestream2_get_le64 +# define bytestream2_get_ne16u bytestream2_get_le16u +# define bytestream2_get_ne24u bytestream2_get_le24u +# define bytestream2_get_ne32u bytestream2_get_le32u +# define bytestream2_get_ne64u bytestream2_get_le64u +# define bytestream2_put_ne16 bytestream2_put_le16 +# define bytestream2_put_ne24 bytestream2_put_le24 +# define bytestream2_put_ne32 bytestream2_put_le32 +# define bytestream2_put_ne64 bytestream2_put_le64 +# define bytestream2_peek_ne16 bytestream2_peek_le16 +# define bytestream2_peek_ne24 bytestream2_peek_le24 +# define bytestream2_peek_ne32 bytestream2_peek_le32 +# define bytestream2_peek_ne64 bytestream2_peek_le64 #endif static av_always_inline void bytestream2_init(GetByteContext *g, - const uint8_t *buf, - int buf_size) { - av_assert0(buf_size >= 0); - g->buffer = buf; - g->buffer_start = buf; - g->buffer_end = buf + buf_size; + const uint8_t *buf, + int buf_size) +{ + av_assert0(buf_size >= 0); + g->buffer = buf; + g->buffer_start = buf; + g->buffer_end = buf + buf_size; } static av_always_inline void bytestream2_init_writer(PutByteContext *p, - uint8_t *buf, - int buf_size) { - av_assert0(buf_size >= 0); - p->buffer = buf; - p->buffer_start = buf; - p->buffer_end = buf + buf_size; - p->eof = 0; + uint8_t *buf, + int buf_size) +{ + av_assert0(buf_size >= 0); + p->buffer = buf; + p->buffer_start = buf; + p->buffer_end = buf + buf_size; + p->eof = 0; } -static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g) { - return g->buffer_end - g->buffer; +static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g) +{ + return g->buffer_end - g->buffer; } -static av_always_inline int bytestream2_get_bytes_left_p(PutByteContext *p) { - return p->buffer_end - p->buffer; +static av_always_inline int bytestream2_get_bytes_left_p(PutByteContext *p) +{ + return p->buffer_end - p->buffer; } static av_always_inline void bytestream2_skip(GetByteContext *g, - unsigned int size) { - g->buffer += FFMIN(g->buffer_end - g->buffer, size); + unsigned int size) +{ + g->buffer += FFMIN(g->buffer_end - g->buffer, size); } static av_always_inline void bytestream2_skipu(GetByteContext *g, - unsigned int size) { - g->buffer += size; + unsigned int size) +{ + g->buffer += size; } static av_always_inline void bytestream2_skip_p(PutByteContext *p, - unsigned int size) { - int size2; - if(p->eof) - return; - size2 = FFMIN(p->buffer_end - p->buffer, size); - if(size2 != size) - p->eof = 1; - p->buffer += size2; + unsigned int size) +{ + int size2; + if (p->eof) + return; + size2 = FFMIN(p->buffer_end - p->buffer, size); + if (size2 != size) + p->eof = 1; + p->buffer += size2; } -static av_always_inline int bytestream2_tell(GetByteContext *g) { - return (int)(g->buffer - g->buffer_start); +static av_always_inline int bytestream2_tell(GetByteContext *g) +{ + return (int)(g->buffer - g->buffer_start); } -static av_always_inline int bytestream2_tell_p(PutByteContext *p) { - return (int)(p->buffer - p->buffer_start); +static av_always_inline int bytestream2_tell_p(PutByteContext *p) +{ + return (int)(p->buffer - p->buffer_start); } -static av_always_inline int bytestream2_size(GetByteContext *g) { - return (int)(g->buffer_end - g->buffer_start); +static av_always_inline int bytestream2_size(GetByteContext *g) +{ + return (int)(g->buffer_end - g->buffer_start); } -static av_always_inline int bytestream2_size_p(PutByteContext *p) { - return (int)(p->buffer_end - p->buffer_start); +static av_always_inline int bytestream2_size_p(PutByteContext *p) +{ + return (int)(p->buffer_end - p->buffer_start); } static av_always_inline int bytestream2_seek(GetByteContext *g, - int offset, - int whence) { - switch(whence) { - case SEEK_CUR: - offset = av_clip(offset, -(g->buffer - g->buffer_start), - g->buffer_end - g->buffer); - g->buffer += offset; - break; - case SEEK_END: - offset = av_clip(offset, -(g->buffer_end - g->buffer_start), 0); - g->buffer = g->buffer_end + offset; - break; - case SEEK_SET: - offset = av_clip(offset, 0, g->buffer_end - g->buffer_start); - g->buffer = g->buffer_start + offset; - break; - default: - return AVERROR(EINVAL); - } - return bytestream2_tell(g); + int offset, + int whence) +{ + switch (whence) { + case SEEK_CUR: + offset = av_clip(offset, -(g->buffer - g->buffer_start), + g->buffer_end - g->buffer); + g->buffer += offset; + break; + case SEEK_END: + offset = av_clip(offset, -(g->buffer_end - g->buffer_start), 0); + g->buffer = g->buffer_end + offset; + break; + case SEEK_SET: + offset = av_clip(offset, 0, g->buffer_end - g->buffer_start); + g->buffer = g->buffer_start + offset; + break; + default: + return AVERROR(EINVAL); + } + return bytestream2_tell(g); } static av_always_inline int bytestream2_seek_p(PutByteContext *p, - int offset, - int whence) { - p->eof = 0; - switch(whence) { - case SEEK_CUR: - if(p->buffer_end - p->buffer < offset) - p->eof = 1; - offset = av_clip(offset, -(p->buffer - p->buffer_start), - p->buffer_end - p->buffer); - p->buffer += offset; - break; - case SEEK_END: - if(offset > 0) - p->eof = 1; - offset = av_clip(offset, -(p->buffer_end - p->buffer_start), 0); - p->buffer = p->buffer_end + offset; - break; - case SEEK_SET: - if(p->buffer_end - p->buffer_start < offset) - p->eof = 1; - offset = av_clip(offset, 0, p->buffer_end - p->buffer_start); - p->buffer = p->buffer_start + offset; - break; - default: - return AVERROR(EINVAL); - } - return bytestream2_tell_p(p); + int offset, + int whence) +{ + p->eof = 0; + switch (whence) { + case SEEK_CUR: + if (p->buffer_end - p->buffer < offset) + p->eof = 1; + offset = av_clip(offset, -(p->buffer - p->buffer_start), + p->buffer_end - p->buffer); + p->buffer += offset; + break; + case SEEK_END: + if (offset > 0) + p->eof = 1; + offset = av_clip(offset, -(p->buffer_end - p->buffer_start), 0); + p->buffer = p->buffer_end + offset; + break; + case SEEK_SET: + if (p->buffer_end - p->buffer_start < offset) + p->eof = 1; + offset = av_clip(offset, 0, p->buffer_end - p->buffer_start); + p->buffer = p->buffer_start + offset; + break; + default: + return AVERROR(EINVAL); + } + return bytestream2_tell_p(p); } static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, - uint8_t *dst, - unsigned int size) { - int size2 = FFMIN(g->buffer_end - g->buffer, size); - memcpy(dst, g->buffer, size2); - g->buffer += size2; - return size2; + uint8_t *dst, + unsigned int size) +{ + int size2 = FFMIN(g->buffer_end - g->buffer, size); + memcpy(dst, g->buffer, size2); + g->buffer += size2; + return size2; } static av_always_inline unsigned int bytestream2_get_bufferu(GetByteContext *g, - uint8_t *dst, - unsigned int size) { - memcpy(dst, g->buffer, size); - g->buffer += size; - return size; + uint8_t *dst, + unsigned int size) +{ + memcpy(dst, g->buffer, size); + g->buffer += size; + return size; } static av_always_inline unsigned int bytestream2_put_buffer(PutByteContext *p, - const uint8_t *src, - unsigned int size) { - int size2; - if(p->eof) - return 0; - size2 = FFMIN(p->buffer_end - p->buffer, size); - if(size2 != size) - p->eof = 1; - memcpy(p->buffer, src, size2); - p->buffer += size2; - return size2; + const uint8_t *src, + unsigned int size) +{ + int size2; + if (p->eof) + return 0; + size2 = FFMIN(p->buffer_end - p->buffer, size); + if (size2 != size) + p->eof = 1; + memcpy(p->buffer, src, size2); + p->buffer += size2; + return size2; } static av_always_inline unsigned int bytestream2_put_bufferu(PutByteContext *p, - const uint8_t *src, - unsigned int size) { - memcpy(p->buffer, src, size); - p->buffer += size; - return size; + const uint8_t *src, + unsigned int size) +{ + memcpy(p->buffer, src, size); + p->buffer += size; + return size; } static av_always_inline void bytestream2_set_buffer(PutByteContext *p, - const uint8_t c, - unsigned int size) { - int size2; - if(p->eof) - return; - size2 = FFMIN(p->buffer_end - p->buffer, size); - if(size2 != size) - p->eof = 1; - memset(p->buffer, c, size2); - p->buffer += size2; + const uint8_t c, + unsigned int size) +{ + int size2; + if (p->eof) + return; + size2 = FFMIN(p->buffer_end - p->buffer, size); + if (size2 != size) + p->eof = 1; + memset(p->buffer, c, size2); + p->buffer += size2; } static av_always_inline void bytestream2_set_bufferu(PutByteContext *p, - const uint8_t c, - unsigned int size) { - memset(p->buffer, c, size); - p->buffer += size; + const uint8_t c, + unsigned int size) +{ + memset(p->buffer, c, size); + p->buffer += size; } -static av_always_inline unsigned int bytestream2_get_eof(PutByteContext *p) { - return p->eof; +static av_always_inline unsigned int bytestream2_get_eof(PutByteContext *p) +{ + return p->eof; } static av_always_inline unsigned int bytestream2_copy_bufferu(PutByteContext *p, - GetByteContext *g, - unsigned int size) { - memcpy(p->buffer, g->buffer, size); - p->buffer += size; - g->buffer += size; - return size; + GetByteContext *g, + unsigned int size) +{ + memcpy(p->buffer, g->buffer, size); + p->buffer += size; + g->buffer += size; + return size; } static av_always_inline unsigned int bytestream2_copy_buffer(PutByteContext *p, - GetByteContext *g, - unsigned int size) { - int size2; - - if(p->eof) - return 0; - size = FFMIN(g->buffer_end - g->buffer, size); - size2 = FFMIN(p->buffer_end - p->buffer, size); - if(size2 != size) - p->eof = 1; - - return bytestream2_copy_bufferu(p, g, size2); + GetByteContext *g, + unsigned int size) +{ + int size2; + + if (p->eof) + return 0; + size = FFMIN(g->buffer_end - g->buffer, size); + size2 = FFMIN(p->buffer_end - p->buffer, size); + if (size2 != size) + p->eof = 1; + + return bytestream2_copy_bufferu(p, g, size2); } static av_always_inline unsigned int bytestream_get_buffer(const uint8_t **b, - uint8_t *dst, - unsigned int size) { - memcpy(dst, *b, size); - (*b) += size; - return size; + uint8_t *dst, + unsigned int size) +{ + memcpy(dst, *b, size); + (*b) += size; + return size; } static av_always_inline void bytestream_put_buffer(uint8_t **b, - const uint8_t *src, - unsigned int size) { - memcpy(*b, src, size); - (*b) += size; + const uint8_t *src, + unsigned int size) +{ + memcpy(*b, src, size); + (*b) += size; } #endif /* AVCODEC_BYTESTREAM_H */ diff --git a/third-party/cbs/cbs.c b/third-party/cbs/cbs.c index 456cac20b03..b067bce21c3 100644 --- a/third-party/cbs/cbs.c +++ b/third-party/cbs/cbs.c @@ -18,1033 +18,1070 @@ #include -#include -#include -#include -#include +// [manual] Changed include path +#include "cbs/config.h" -#include +#include "libavutil/avassert.h" +#include "libavutil/buffer.h" +#include "libavutil/common.h" +#include "libavutil/opt.h" +#include "avcodec.h" +// [manual] Changed include path #include "cbs/cbs.h" #include "cbs_internal.h" -#include "get_bits.h" - static const CodedBitstreamType *const cbs_type_table[] = { #if CONFIG_CBS_AV1 - &ff_cbs_type_av1, + &ff_cbs_type_av1, #endif #if CONFIG_CBS_H264 - &ff_cbs_type_h264, + &ff_cbs_type_h264, #endif #if CONFIG_CBS_H265 - &ff_cbs_type_h265, + &ff_cbs_type_h265, #endif #if CONFIG_CBS_JPEG - &ff_cbs_type_jpeg, + &ff_cbs_type_jpeg, #endif #if CONFIG_CBS_MPEG2 - &ff_cbs_type_mpeg2, + &ff_cbs_type_mpeg2, #endif #if CONFIG_CBS_VP9 - &ff_cbs_type_vp9, + &ff_cbs_type_vp9, #endif }; const enum AVCodecID ff_cbs_all_codec_ids[] = { #if CONFIG_CBS_AV1 - AV_CODEC_ID_AV1, + AV_CODEC_ID_AV1, #endif #if CONFIG_CBS_H264 - AV_CODEC_ID_H264, + AV_CODEC_ID_H264, #endif #if CONFIG_CBS_H265 - AV_CODEC_ID_H265, + AV_CODEC_ID_H265, #endif #if CONFIG_CBS_JPEG - AV_CODEC_ID_MJPEG, + AV_CODEC_ID_MJPEG, #endif #if CONFIG_CBS_MPEG2 - AV_CODEC_ID_MPEG2VIDEO, + AV_CODEC_ID_MPEG2VIDEO, #endif #if CONFIG_CBS_VP9 - AV_CODEC_ID_VP9, + AV_CODEC_ID_VP9, #endif - AV_CODEC_ID_NONE + AV_CODEC_ID_NONE }; -int ff_cbs_init(CodedBitstreamContext **ctx_ptr, - enum AVCodecID codec_id, void *log_ctx) { - CodedBitstreamContext *ctx; - const CodedBitstreamType *type; - int i; - - type = NULL; - for(i = 0; i < FF_ARRAY_ELEMS(cbs_type_table); i++) { - if(cbs_type_table[i]->codec_id == codec_id) { - type = cbs_type_table[i]; - break; - } - } - if(!type) - return AVERROR(EINVAL); - - ctx = av_mallocz(sizeof(*ctx)); - if(!ctx) - return AVERROR(ENOMEM); - - ctx->log_ctx = log_ctx; - ctx->codec = type; /* Must be before any error */ - - if(type->priv_data_size) { - ctx->priv_data = av_mallocz(ctx->codec->priv_data_size); - if(!ctx->priv_data) { - av_freep(&ctx); - return AVERROR(ENOMEM); +av_cold int ff_cbs_init(CodedBitstreamContext **ctx_ptr, + enum AVCodecID codec_id, void *log_ctx) +{ + CodedBitstreamContext *ctx; + const CodedBitstreamType *type; + int i; + + type = NULL; + for (i = 0; i < FF_ARRAY_ELEMS(cbs_type_table); i++) { + if (cbs_type_table[i]->codec_id == codec_id) { + type = cbs_type_table[i]; + break; + } } - if(type->priv_class) { - *(const AVClass **)ctx->priv_data = type->priv_class; - av_opt_set_defaults(ctx->priv_data); + if (!type) + return AVERROR(EINVAL); + + ctx = av_mallocz(sizeof(*ctx)); + if (!ctx) + return AVERROR(ENOMEM); + + ctx->log_ctx = log_ctx; + ctx->codec = type; /* Must be before any error */ + + if (type->priv_data_size) { + ctx->priv_data = av_mallocz(ctx->codec->priv_data_size); + if (!ctx->priv_data) { + av_freep(&ctx); + return AVERROR(ENOMEM); + } + if (type->priv_class) { + *(const AVClass **)ctx->priv_data = type->priv_class; + av_opt_set_defaults(ctx->priv_data); + } } - } - ctx->decompose_unit_types = NULL; + ctx->decompose_unit_types = NULL; - ctx->trace_enable = 0; - ctx->trace_level = AV_LOG_TRACE; + ctx->trace_enable = 0; + ctx->trace_level = AV_LOG_TRACE; - *ctx_ptr = ctx; - return 0; + *ctx_ptr = ctx; + return 0; } -void ff_cbs_flush(CodedBitstreamContext *ctx) { - if(ctx->codec->flush) - ctx->codec->flush(ctx); +av_cold void ff_cbs_flush(CodedBitstreamContext *ctx) +{ + if (ctx->codec->flush) + ctx->codec->flush(ctx); } -void ff_cbs_close(CodedBitstreamContext **ctx_ptr) { - CodedBitstreamContext *ctx = *ctx_ptr; +av_cold void ff_cbs_close(CodedBitstreamContext **ctx_ptr) +{ + CodedBitstreamContext *ctx = *ctx_ptr; - if(!ctx) - return; + if (!ctx) + return; - if(ctx->codec->close) - ctx->codec->close(ctx); + if (ctx->codec->close) + ctx->codec->close(ctx); - av_freep(&ctx->write_buffer); + av_freep(&ctx->write_buffer); - if(ctx->codec->priv_class && ctx->priv_data) - av_opt_free(ctx->priv_data); + if (ctx->codec->priv_class && ctx->priv_data) + av_opt_free(ctx->priv_data); - av_freep(&ctx->priv_data); - av_freep(ctx_ptr); + av_freep(&ctx->priv_data); + av_freep(ctx_ptr); } -static void cbs_unit_uninit(CodedBitstreamUnit *unit) { - av_buffer_unref(&unit->content_ref); - unit->content = NULL; +static void cbs_unit_uninit(CodedBitstreamUnit *unit) +{ + av_buffer_unref(&unit->content_ref); + unit->content = NULL; - av_buffer_unref(&unit->data_ref); - unit->data = NULL; - unit->data_size = 0; - unit->data_bit_padding = 0; + av_buffer_unref(&unit->data_ref); + unit->data = NULL; + unit->data_size = 0; + unit->data_bit_padding = 0; } -void ff_cbs_fragment_reset(CodedBitstreamFragment *frag) { - int i; +void ff_cbs_fragment_reset(CodedBitstreamFragment *frag) +{ + int i; - for(i = 0; i < frag->nb_units; i++) - cbs_unit_uninit(&frag->units[i]); - frag->nb_units = 0; + for (i = 0; i < frag->nb_units; i++) + cbs_unit_uninit(&frag->units[i]); + frag->nb_units = 0; - av_buffer_unref(&frag->data_ref); - frag->data = NULL; - frag->data_size = 0; - frag->data_bit_padding = 0; + av_buffer_unref(&frag->data_ref); + frag->data = NULL; + frag->data_size = 0; + frag->data_bit_padding = 0; } -void ff_cbs_fragment_free(CodedBitstreamFragment *frag) { - ff_cbs_fragment_reset(frag); +av_cold void ff_cbs_fragment_free(CodedBitstreamFragment *frag) +{ + ff_cbs_fragment_reset(frag); - av_freep(&frag->units); - frag->nb_units_allocated = 0; + av_freep(&frag->units); + frag->nb_units_allocated = 0; } static int cbs_read_fragment_content(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) { - int err, i, j; - - for(i = 0; i < frag->nb_units; i++) { - CodedBitstreamUnit *unit = &frag->units[i]; - - if(ctx->decompose_unit_types) { - for(j = 0; j < ctx->nb_decompose_unit_types; j++) { - if(ctx->decompose_unit_types[j] == unit->type) - break; - } - if(j >= ctx->nb_decompose_unit_types) - continue; + CodedBitstreamFragment *frag) +{ + int err, i, j; + + for (i = 0; i < frag->nb_units; i++) { + CodedBitstreamUnit *unit = &frag->units[i]; + + if (ctx->decompose_unit_types) { + for (j = 0; j < ctx->nb_decompose_unit_types; j++) { + if (ctx->decompose_unit_types[j] == unit->type) + break; + } + if (j >= ctx->nb_decompose_unit_types) + continue; + } + + av_buffer_unref(&unit->content_ref); + unit->content = NULL; + + av_assert0(unit->data && unit->data_ref); + + err = ctx->codec->read_unit(ctx, unit); + if (err == AVERROR(ENOSYS)) { + av_log(ctx->log_ctx, AV_LOG_VERBOSE, + "Decomposition unimplemented for unit %d " + "(type %"PRIu32").\n", i, unit->type); + } else if (err == AVERROR(EAGAIN)) { + av_log(ctx->log_ctx, AV_LOG_VERBOSE, + "Skipping decomposition of unit %d " + "(type %"PRIu32").\n", i, unit->type); + av_buffer_unref(&unit->content_ref); + unit->content = NULL; + } else if (err < 0) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to read unit %d " + "(type %"PRIu32").\n", i, unit->type); + return err; + } } - av_buffer_unref(&unit->content_ref); - unit->content = NULL; - - av_assert0(unit->data && unit->data_ref); - - err = ctx->codec->read_unit(ctx, unit); - if(err == AVERROR(ENOSYS)) { - av_log(ctx->log_ctx, AV_LOG_VERBOSE, - "Decomposition unimplemented for unit %d " - "(type %" PRIu32 ").\n", - i, unit->type); - } - else if(err == AVERROR(EAGAIN)) { - av_log(ctx->log_ctx, AV_LOG_VERBOSE, - "Skipping decomposition of unit %d " - "(type %" PRIu32 ").\n", - i, unit->type); - av_buffer_unref(&unit->content_ref); - unit->content = NULL; - } - else if(err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to read unit %d " - "(type %" PRIu32 ").\n", - i, unit->type); - return err; - } - } - - return 0; + return 0; } static int cbs_fill_fragment_data(CodedBitstreamFragment *frag, - const uint8_t *data, size_t size) { - av_assert0(!frag->data && !frag->data_ref); + const uint8_t *data, size_t size) +{ + av_assert0(!frag->data && !frag->data_ref); - frag->data_ref = - av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if(!frag->data_ref) - return AVERROR(ENOMEM); + frag->data_ref = + av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); + if (!frag->data_ref) + return AVERROR(ENOMEM); - frag->data = frag->data_ref->data; - frag->data_size = size; + frag->data = frag->data_ref->data; + frag->data_size = size; - memcpy(frag->data, data, size); - memset(frag->data + size, 0, - AV_INPUT_BUFFER_PADDING_SIZE); + memcpy(frag->data, data, size); + memset(frag->data + size, 0, + AV_INPUT_BUFFER_PADDING_SIZE); - return 0; + return 0; } static int cbs_read_data(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - AVBufferRef *buf, - const uint8_t *data, size_t size, - int header) { - int err; - - if(buf) { - frag->data_ref = av_buffer_ref(buf); - if(!frag->data_ref) - return AVERROR(ENOMEM); - - frag->data = (uint8_t *)data; - frag->data_size = size; - } - else { - err = cbs_fill_fragment_data(frag, data, size); - if(err < 0) - return err; - } - - err = ctx->codec->split_fragment(ctx, frag, header); - if(err < 0) - return err; + CodedBitstreamFragment *frag, + AVBufferRef *buf, + const uint8_t *data, size_t size, + int header) +{ + int err; + + if (buf) { + frag->data_ref = av_buffer_ref(buf); + if (!frag->data_ref) + return AVERROR(ENOMEM); + + frag->data = (uint8_t *)data; + frag->data_size = size; + + } else { + err = cbs_fill_fragment_data(frag, data, size); + if (err < 0) + return err; + } + + err = ctx->codec->split_fragment(ctx, frag, header); + if (err < 0) + return err; - return cbs_read_fragment_content(ctx, frag); + return cbs_read_fragment_content(ctx, frag); } int ff_cbs_read_extradata(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVCodecParameters *par) { - return cbs_read_data(ctx, frag, NULL, - par->extradata, - par->extradata_size, 1); + CodedBitstreamFragment *frag, + const AVCodecParameters *par) +{ + return cbs_read_data(ctx, frag, NULL, + par->extradata, + par->extradata_size, 1); } int ff_cbs_read_extradata_from_codec(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVCodecContext *avctx) { - return cbs_read_data(ctx, frag, NULL, - avctx->extradata, - avctx->extradata_size, 1); + CodedBitstreamFragment *frag, + const AVCodecContext *avctx) +{ + return cbs_read_data(ctx, frag, NULL, + avctx->extradata, + avctx->extradata_size, 1); } int ff_cbs_read_packet(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVPacket *pkt) { - return cbs_read_data(ctx, frag, pkt->buf, - pkt->data, pkt->size, 0); + CodedBitstreamFragment *frag, + const AVPacket *pkt) +{ + return cbs_read_data(ctx, frag, pkt->buf, + pkt->data, pkt->size, 0); +} + +int ff_cbs_read_packet_side_data(CodedBitstreamContext *ctx, + CodedBitstreamFragment *frag, + const AVPacket *pkt) +{ + size_t side_data_size; + const uint8_t *side_data = + av_packet_get_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, + &side_data_size); + + return cbs_read_data(ctx, frag, NULL, + side_data, side_data_size, 1); } int ff_cbs_read(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const uint8_t *data, size_t size) { - return cbs_read_data(ctx, frag, NULL, - data, size, 0); + CodedBitstreamFragment *frag, + const uint8_t *data, size_t size) +{ + return cbs_read_data(ctx, frag, NULL, + data, size, 0); +} + +/** + * Allocate a new internal data buffer of the given size in the unit. + * + * The data buffer will have input padding. + */ +static int cbs_alloc_unit_data(CodedBitstreamUnit *unit, + size_t size) +{ + av_assert0(!unit->data && !unit->data_ref); + + unit->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); + if (!unit->data_ref) + return AVERROR(ENOMEM); + + unit->data = unit->data_ref->data; + unit->data_size = size; + + memset(unit->data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); + + return 0; } static int cbs_write_unit_data(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - PutBitContext pbc; - int ret; - - if(!ctx->write_buffer) { - // Initial write buffer size is 1MB. - ctx->write_buffer_size = 1024 * 1024; - - reallocate_and_try_again: - ret = av_reallocp(&ctx->write_buffer, ctx->write_buffer_size); - if(ret < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Unable to allocate a " - "sufficiently large write buffer (last attempt " - "%zu bytes).\n", - ctx->write_buffer_size); - return ret; + CodedBitstreamUnit *unit) +{ + PutBitContext pbc; + int ret; + + if (!ctx->write_buffer) { + // Initial write buffer size is 1MB. + ctx->write_buffer_size = 1024 * 1024; + + reallocate_and_try_again: + ret = av_reallocp(&ctx->write_buffer, ctx->write_buffer_size); + if (ret < 0) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Unable to allocate a " + "sufficiently large write buffer (last attempt " + "%zu bytes).\n", ctx->write_buffer_size); + return ret; + } } - } - - init_put_bits(&pbc, ctx->write_buffer, ctx->write_buffer_size); - ret = ctx->codec->write_unit(ctx, unit, &pbc); - if(ret < 0) { - if(ret == AVERROR(ENOSPC)) { - // Overflow. - if(ctx->write_buffer_size == INT_MAX / 8) - return AVERROR(ENOMEM); - ctx->write_buffer_size = FFMIN(2 * ctx->write_buffer_size, INT_MAX / 8); - goto reallocate_and_try_again; + init_put_bits(&pbc, ctx->write_buffer, ctx->write_buffer_size); + + ret = ctx->codec->write_unit(ctx, unit, &pbc); + if (ret < 0) { + if (ret == AVERROR(ENOSPC)) { + // Overflow. + if (ctx->write_buffer_size == INT_MAX / 8) + return AVERROR(ENOMEM); + ctx->write_buffer_size = FFMIN(2 * ctx->write_buffer_size, INT_MAX / 8); + goto reallocate_and_try_again; + } + // Write failed for some other reason. + return ret; } - // Write failed for some other reason. - return ret; - } - // Overflow but we didn't notice. - av_assert0(put_bits_count(&pbc) <= 8 * ctx->write_buffer_size); + // Overflow but we didn't notice. + av_assert0(put_bits_count(&pbc) <= 8 * ctx->write_buffer_size); - if(put_bits_count(&pbc) % 8) - unit->data_bit_padding = 8 - put_bits_count(&pbc) % 8; - else - unit->data_bit_padding = 0; + if (put_bits_count(&pbc) % 8) + unit->data_bit_padding = 8 - put_bits_count(&pbc) % 8; + else + unit->data_bit_padding = 0; - flush_put_bits(&pbc); + flush_put_bits(&pbc); - ret = ff_cbs_alloc_unit_data(unit, put_bytes_output(&pbc)); - if(ret < 0) - return ret; + ret = cbs_alloc_unit_data(unit, put_bytes_output(&pbc)); + if (ret < 0) + return ret; - memcpy(unit->data, ctx->write_buffer, unit->data_size); + memcpy(unit->data, ctx->write_buffer, unit->data_size); - return 0; + return 0; } int ff_cbs_write_fragment_data(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) { - int err, i; - - for(i = 0; i < frag->nb_units; i++) { - CodedBitstreamUnit *unit = &frag->units[i]; - - if(!unit->content) - continue; - - av_buffer_unref(&unit->data_ref); - unit->data = NULL; - - err = cbs_write_unit_data(ctx, unit); - if(err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to write unit %d " - "(type %" PRIu32 ").\n", - i, unit->type); - return err; + CodedBitstreamFragment *frag) +{ + int err, i; + + for (i = 0; i < frag->nb_units; i++) { + CodedBitstreamUnit *unit = &frag->units[i]; + + if (!unit->content) + continue; + + av_buffer_unref(&unit->data_ref); + unit->data = NULL; + + err = cbs_write_unit_data(ctx, unit); + if (err < 0) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to write unit %d " + "(type %"PRIu32").\n", i, unit->type); + return err; + } + av_assert0(unit->data && unit->data_ref); } - av_assert0(unit->data && unit->data_ref); - } - av_buffer_unref(&frag->data_ref); - frag->data = NULL; + av_buffer_unref(&frag->data_ref); + frag->data = NULL; - err = ctx->codec->assemble_fragment(ctx, frag); - if(err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to assemble fragment.\n"); - return err; - } - av_assert0(frag->data && frag->data_ref); + err = ctx->codec->assemble_fragment(ctx, frag); + if (err < 0) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to assemble fragment.\n"); + return err; + } + av_assert0(frag->data && frag->data_ref); - return 0; + return 0; } int ff_cbs_write_extradata(CodedBitstreamContext *ctx, - AVCodecParameters *par, - CodedBitstreamFragment *frag) { - int err; + AVCodecParameters *par, + CodedBitstreamFragment *frag) +{ + int err; - err = ff_cbs_write_fragment_data(ctx, frag); - if(err < 0) - return err; + err = ff_cbs_write_fragment_data(ctx, frag); + if (err < 0) + return err; - av_freep(&par->extradata); + av_freep(&par->extradata); - par->extradata = av_malloc(frag->data_size + - AV_INPUT_BUFFER_PADDING_SIZE); - if(!par->extradata) - return AVERROR(ENOMEM); + par->extradata = av_malloc(frag->data_size + + AV_INPUT_BUFFER_PADDING_SIZE); + if (!par->extradata) + return AVERROR(ENOMEM); - memcpy(par->extradata, frag->data, frag->data_size); - memset(par->extradata + frag->data_size, 0, - AV_INPUT_BUFFER_PADDING_SIZE); - par->extradata_size = frag->data_size; + memcpy(par->extradata, frag->data, frag->data_size); + memset(par->extradata + frag->data_size, 0, + AV_INPUT_BUFFER_PADDING_SIZE); + par->extradata_size = frag->data_size; - return 0; + return 0; } int ff_cbs_write_packet(CodedBitstreamContext *ctx, - AVPacket *pkt, - CodedBitstreamFragment *frag) { - AVBufferRef *buf; - int err; - - err = ff_cbs_write_fragment_data(ctx, frag); - if(err < 0) - return err; - - buf = av_buffer_ref(frag->data_ref); - if(!buf) - return AVERROR(ENOMEM); + AVPacket *pkt, + CodedBitstreamFragment *frag) +{ + AVBufferRef *buf; + int err; + + err = ff_cbs_write_fragment_data(ctx, frag); + if (err < 0) + return err; + + buf = av_buffer_ref(frag->data_ref); + if (!buf) + return AVERROR(ENOMEM); - av_buffer_unref(&pkt->buf); + av_buffer_unref(&pkt->buf); - pkt->buf = buf; - pkt->data = frag->data; - pkt->size = frag->data_size; + pkt->buf = buf; + pkt->data = frag->data; + pkt->size = frag->data_size; - return 0; + return 0; } void ff_cbs_trace_header(CodedBitstreamContext *ctx, - const char *name) { - if(!ctx->trace_enable) - return; + const char *name) +{ + if (!ctx->trace_enable) + return; - av_log(ctx->log_ctx, ctx->trace_level, "%s\n", name); + av_log(ctx->log_ctx, ctx->trace_level, "%s\n", name); } void ff_cbs_trace_syntax_element(CodedBitstreamContext *ctx, int position, - const char *str, const int *subscripts, - const char *bits, int64_t value) { - char name[256]; - size_t name_len, bits_len; - int pad, subs, i, j, k, n; - - if(!ctx->trace_enable) - return; - - av_assert0(value >= INT_MIN && value <= UINT32_MAX); - - subs = subscripts ? subscripts[0] : 0; - n = 0; - for(i = j = 0; str[i];) { - if(str[i] == '[') { - if(n < subs) { - ++n; - k = snprintf(name + j, sizeof(name) - j, "[%d", subscripts[n]); - av_assert0(k > 0 && j + k < sizeof(name)); - j += k; - for(++i; str[i] && str[i] != ']'; i++) - ; - av_assert0(str[i] == ']'); - } - else { - while(str[i] && str[i] != ']') - name[j++] = str[i++]; - av_assert0(str[i] == ']'); - } + const char *str, const int *subscripts, + const char *bits, int64_t value) +{ + char name[256]; + size_t name_len, bits_len; + int pad, subs, i, j, k, n; + + if (!ctx->trace_enable) + return; + + av_assert0(value >= INT_MIN && value <= UINT32_MAX); + + subs = subscripts ? subscripts[0] : 0; + n = 0; + for (i = j = 0; str[i];) { + if (str[i] == '[') { + if (n < subs) { + ++n; + k = snprintf(name + j, sizeof(name) - j, "[%d", subscripts[n]); + av_assert0(k > 0 && j + k < sizeof(name)); + j += k; + for (++i; str[i] && str[i] != ']'; i++); + av_assert0(str[i] == ']'); + } else { + while (str[i] && str[i] != ']') + name[j++] = str[i++]; + av_assert0(str[i] == ']'); + } + } else { + av_assert0(j + 1 < sizeof(name)); + name[j++] = str[i++]; + } } - else { - av_assert0(j + 1 < sizeof(name)); - name[j++] = str[i++]; - } - } - av_assert0(j + 1 < sizeof(name)); - name[j] = 0; - av_assert0(n == subs); + av_assert0(j + 1 < sizeof(name)); + name[j] = 0; + av_assert0(n == subs); - name_len = strlen(name); - bits_len = strlen(bits); + name_len = strlen(name); + bits_len = strlen(bits); - if(name_len + bits_len > 60) - pad = bits_len + 2; - else - pad = 61 - name_len; + if (name_len + bits_len > 60) + pad = bits_len + 2; + else + pad = 61 - name_len; - av_log(ctx->log_ctx, ctx->trace_level, "%-10d %s%*s = %" PRId64 "\n", - position, name, pad, bits, value); + av_log(ctx->log_ctx, ctx->trace_level, "%-10d %s%*s = %"PRId64"\n", + position, name, pad, bits, value); } int ff_cbs_read_unsigned(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, uint32_t *write_to, - uint32_t range_min, uint32_t range_max) { - uint32_t value; - int position; + int width, const char *name, + const int *subscripts, uint32_t *write_to, + uint32_t range_min, uint32_t range_max) +{ + uint32_t value; + int position; + + av_assert0(width > 0 && width <= 32); + + if (get_bits_left(gbc) < width) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid value at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } - av_assert0(width > 0 && width <= 32); + if (ctx->trace_enable) + position = get_bits_count(gbc); - if(get_bits_left(gbc) < width) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid value at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; - } + value = get_bits_long(gbc, width); - if(ctx->trace_enable) - position = get_bits_count(gbc); + if (ctx->trace_enable) { + char bits[33]; + int i; + for (i = 0; i < width; i++) + bits[i] = value >> (width - i - 1) & 1 ? '1' : '0'; + bits[i] = 0; - value = get_bits_long(gbc, width); + ff_cbs_trace_syntax_element(ctx, position, name, subscripts, + bits, value); + } - if(ctx->trace_enable) { - char bits[33]; - int i; - for(i = 0; i < width; i++) - bits[i] = value >> (width - i - 1) & 1 ? '1' : '0'; - bits[i] = 0; - - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - } - - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRIu32 ", but must be in [%" PRIu32 ",%" PRIu32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - *write_to = value; - return 0; + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; + } + + *write_to = value; + return 0; } int ff_cbs_write_unsigned(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, uint32_t value, - uint32_t range_min, uint32_t range_max) { - av_assert0(width > 0 && width <= 32); - - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRIu32 ", but must be in [%" PRIu32 ",%" PRIu32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - if(put_bits_left(pbc) < width) - return AVERROR(ENOSPC); - - if(ctx->trace_enable) { - char bits[33]; - int i; - for(i = 0; i < width; i++) - bits[i] = value >> (width - i - 1) & 1 ? '1' : '0'; - bits[i] = 0; + int width, const char *name, + const int *subscripts, uint32_t value, + uint32_t range_min, uint32_t range_max) +{ + av_assert0(width > 0 && width <= 32); + + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; + } + + if (put_bits_left(pbc) < width) + return AVERROR(ENOSPC); - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } + if (ctx->trace_enable) { + char bits[33]; + int i; + for (i = 0; i < width; i++) + bits[i] = value >> (width - i - 1) & 1 ? '1' : '0'; + bits[i] = 0; - if(width < 32) - put_bits(pbc, width, value); - else - put_bits32(pbc, value); + ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), + name, subscripts, bits, value); + } + + if (width < 32) + put_bits(pbc, width, value); + else + put_bits32(pbc, value); - return 0; + return 0; } int ff_cbs_read_signed(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, int32_t *write_to, - int32_t range_min, int32_t range_max) { - int32_t value; - int position; + int width, const char *name, + const int *subscripts, int32_t *write_to, + int32_t range_min, int32_t range_max) +{ + int32_t value; + int position; + + av_assert0(width > 0 && width <= 32); + + if (get_bits_left(gbc) < width) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid value at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } - av_assert0(width > 0 && width <= 32); + if (ctx->trace_enable) + position = get_bits_count(gbc); - if(get_bits_left(gbc) < width) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid value at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; - } + value = get_sbits_long(gbc, width); - if(ctx->trace_enable) - position = get_bits_count(gbc); + if (ctx->trace_enable) { + char bits[33]; + int i; + for (i = 0; i < width; i++) + bits[i] = value & (1U << (width - i - 1)) ? '1' : '0'; + bits[i] = 0; - value = get_sbits_long(gbc, width); + ff_cbs_trace_syntax_element(ctx, position, name, subscripts, + bits, value); + } - if(ctx->trace_enable) { - char bits[33]; - int i; - for(i = 0; i < width; i++) - bits[i] = value & (1U << (width - i - 1)) ? '1' : '0'; - bits[i] = 0; - - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - } - - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRId32 ", but must be in [%" PRId32 ",%" PRId32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - *write_to = value; - return 0; + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRId32", but must be in [%"PRId32",%"PRId32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; + } + + *write_to = value; + return 0; } int ff_cbs_write_signed(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, int32_t value, - int32_t range_min, int32_t range_max) { - av_assert0(width > 0 && width <= 32); - - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRId32 ", but must be in [%" PRId32 ",%" PRId32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - if(put_bits_left(pbc) < width) - return AVERROR(ENOSPC); - - if(ctx->trace_enable) { - char bits[33]; - int i; - for(i = 0; i < width; i++) - bits[i] = value & (1U << (width - i - 1)) ? '1' : '0'; - bits[i] = 0; + int width, const char *name, + const int *subscripts, int32_t value, + int32_t range_min, int32_t range_max) +{ + av_assert0(width > 0 && width <= 32); + + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRId32", but must be in [%"PRId32",%"PRId32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; + } - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } + if (put_bits_left(pbc) < width) + return AVERROR(ENOSPC); - if(width < 32) - put_sbits(pbc, width, value); - else - put_bits32(pbc, value); + if (ctx->trace_enable) { + char bits[33]; + int i; + for (i = 0; i < width; i++) + bits[i] = value & (1U << (width - i - 1)) ? '1' : '0'; + bits[i] = 0; - return 0; -} + ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), + name, subscripts, bits, value); + } + if (width < 32) + put_sbits(pbc, width, value); + else + put_bits32(pbc, value); -int ff_cbs_alloc_unit_content(CodedBitstreamUnit *unit, - size_t size, - void (*free)(void *opaque, uint8_t *data)) { - av_assert0(!unit->content && !unit->content_ref); - - unit->content = av_mallocz(size); - if(!unit->content) - return AVERROR(ENOMEM); - - unit->content_ref = av_buffer_create(unit->content, size, - free, NULL, 0); - if(!unit->content_ref) { - av_freep(&unit->content); - return AVERROR(ENOMEM); - } - - return 0; + return 0; } -int ff_cbs_alloc_unit_data(CodedBitstreamUnit *unit, - size_t size) { - av_assert0(!unit->data && !unit->data_ref); - unit->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if(!unit->data_ref) - return AVERROR(ENOMEM); +int ff_cbs_alloc_unit_content(CodedBitstreamUnit *unit, + size_t size, + void (*free)(void *opaque, uint8_t *data)) +{ + av_assert0(!unit->content && !unit->content_ref); - unit->data = unit->data_ref->data; - unit->data_size = size; + unit->content = av_mallocz(size); + if (!unit->content) + return AVERROR(ENOMEM); - memset(unit->data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); + unit->content_ref = av_buffer_create(unit->content, size, + free, NULL, 0); + if (!unit->content_ref) { + av_freep(&unit->content); + return AVERROR(ENOMEM); + } - return 0; + return 0; } static int cbs_insert_unit(CodedBitstreamFragment *frag, - int position) { - CodedBitstreamUnit *units; + int position) +{ + CodedBitstreamUnit *units; - if(frag->nb_units < frag->nb_units_allocated) { - units = frag->units; + if (frag->nb_units < frag->nb_units_allocated) { + units = frag->units; - if(position < frag->nb_units) - memmove(units + position + 1, units + position, - (frag->nb_units - position) * sizeof(*units)); - } - else { - units = av_malloc_array(frag->nb_units * 2 + 1, sizeof(*units)); - if(!units) - return AVERROR(ENOMEM); + if (position < frag->nb_units) + memmove(units + position + 1, units + position, + (frag->nb_units - position) * sizeof(*units)); + } else { + units = av_malloc_array(frag->nb_units*2 + 1, sizeof(*units)); + if (!units) + return AVERROR(ENOMEM); - frag->nb_units_allocated = 2 * frag->nb_units_allocated + 1; + frag->nb_units_allocated = 2*frag->nb_units_allocated + 1; - if(position > 0) - memcpy(units, frag->units, position * sizeof(*units)); + if (position > 0) + memcpy(units, frag->units, position * sizeof(*units)); - if(position < frag->nb_units) - memcpy(units + position + 1, frag->units + position, - (frag->nb_units - position) * sizeof(*units)); - } + if (position < frag->nb_units) + memcpy(units + position + 1, frag->units + position, + (frag->nb_units - position) * sizeof(*units)); + } - memset(units + position, 0, sizeof(*units)); + memset(units + position, 0, sizeof(*units)); - if(units != frag->units) { - av_free(frag->units); - frag->units = units; - } + if (units != frag->units) { + av_free(frag->units); + frag->units = units; + } - ++frag->nb_units; + ++frag->nb_units; - return 0; + return 0; } int ff_cbs_insert_unit_content(CodedBitstreamFragment *frag, - int position, - CodedBitstreamUnitType type, - void *content, - AVBufferRef *content_buf) { - CodedBitstreamUnit *unit; - AVBufferRef *content_ref; - int err; - - if(position == -1) - position = frag->nb_units; - av_assert0(position >= 0 && position <= frag->nb_units); - - if(content_buf) { - content_ref = av_buffer_ref(content_buf); - if(!content_ref) - return AVERROR(ENOMEM); - } - else { - content_ref = NULL; - } - - err = cbs_insert_unit(frag, position); - if(err < 0) { - av_buffer_unref(&content_ref); - return err; - } + int position, + CodedBitstreamUnitType type, + void *content, + AVBufferRef *content_buf) +{ + CodedBitstreamUnit *unit; + AVBufferRef *content_ref; + int err; + + if (position == -1) + position = frag->nb_units; + av_assert0(position >= 0 && position <= frag->nb_units); + + if (content_buf) { + content_ref = av_buffer_ref(content_buf); + if (!content_ref) + return AVERROR(ENOMEM); + } else { + content_ref = NULL; + } + + err = cbs_insert_unit(frag, position); + if (err < 0) { + av_buffer_unref(&content_ref); + return err; + } - unit = &frag->units[position]; - unit->type = type; - unit->content = content; - unit->content_ref = content_ref; + unit = &frag->units[position]; + unit->type = type; + unit->content = content; + unit->content_ref = content_ref; - return 0; + return 0; } -int ff_cbs_insert_unit_data(CodedBitstreamFragment *frag, - int position, - CodedBitstreamUnitType type, - uint8_t *data, size_t data_size, - AVBufferRef *data_buf) { - CodedBitstreamUnit *unit; - AVBufferRef *data_ref; - int err; - - if(position == -1) - position = frag->nb_units; - av_assert0(position >= 0 && position <= frag->nb_units); - - if(data_buf) - data_ref = av_buffer_ref(data_buf); - else - data_ref = av_buffer_create(data, data_size, NULL, NULL, 0); - if(!data_ref) { - if(!data_buf) - av_free(data); - return AVERROR(ENOMEM); - } - - err = cbs_insert_unit(frag, position); - if(err < 0) { - av_buffer_unref(&data_ref); - return err; - } +static int cbs_insert_unit_data(CodedBitstreamFragment *frag, + CodedBitstreamUnitType type, + uint8_t *data, size_t data_size, + AVBufferRef *data_buf, + int position) +{ + CodedBitstreamUnit *unit; + AVBufferRef *data_ref; + int err; + + av_assert0(position >= 0 && position <= frag->nb_units); + + if (data_buf) + data_ref = av_buffer_ref(data_buf); + else + data_ref = av_buffer_create(data, data_size, NULL, NULL, 0); + if (!data_ref) { + if (!data_buf) + av_free(data); + return AVERROR(ENOMEM); + } - unit = &frag->units[position]; - unit->type = type; - unit->data = data; - unit->data_size = data_size; - unit->data_ref = data_ref; + err = cbs_insert_unit(frag, position); + if (err < 0) { + av_buffer_unref(&data_ref); + return err; + } + + unit = &frag->units[position]; + unit->type = type; + unit->data = data; + unit->data_size = data_size; + unit->data_ref = data_ref; + + return 0; +} - return 0; +int ff_cbs_append_unit_data(CodedBitstreamFragment *frag, + CodedBitstreamUnitType type, + uint8_t *data, size_t data_size, + AVBufferRef *data_buf) +{ + return cbs_insert_unit_data(frag, type, + data, data_size, data_buf, + frag->nb_units); } void ff_cbs_delete_unit(CodedBitstreamFragment *frag, - int position) { - av_assert0(0 <= position && position < frag->nb_units && "Unit to be deleted not in fragment."); + int position) +{ + av_assert0(0 <= position && position < frag->nb_units + && "Unit to be deleted not in fragment."); - cbs_unit_uninit(&frag->units[position]); + cbs_unit_uninit(&frag->units[position]); - --frag->nb_units; + --frag->nb_units; - if(frag->nb_units > 0) - memmove(frag->units + position, - frag->units + position + 1, - (frag->nb_units - position) * sizeof(*frag->units)); + if (frag->nb_units > 0) + memmove(frag->units + position, + frag->units + position + 1, + (frag->nb_units - position) * sizeof(*frag->units)); } -static void cbs_default_free_unit_content(void *opaque, uint8_t *data) { - const CodedBitstreamUnitTypeDescriptor *desc = opaque; - if(desc->content_type == CBS_CONTENT_TYPE_INTERNAL_REFS) { - int i; - for(i = 0; i < desc->nb_ref_offsets; i++) { - void **ptr = (void **)(data + desc->ref_offsets[i]); - av_buffer_unref((AVBufferRef **)(ptr + 1)); +static void cbs_default_free_unit_content(void *opaque, uint8_t *data) +{ + const CodedBitstreamUnitTypeDescriptor *desc = opaque; + if (desc->content_type == CBS_CONTENT_TYPE_INTERNAL_REFS) { + int i; + for (i = 0; i < desc->nb_ref_offsets; i++) { + void **ptr = (void**)(data + desc->ref_offsets[i]); + av_buffer_unref((AVBufferRef**)(ptr + 1)); + } } - } - av_free(data); + av_free(data); } static const CodedBitstreamUnitTypeDescriptor - * - cbs_find_unit_type_desc(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - const CodedBitstreamUnitTypeDescriptor *desc; - int i, j; - - if(!ctx->codec->unit_types) - return NULL; - - for(i = 0;; i++) { - desc = &ctx->codec->unit_types[i]; - if(desc->nb_unit_types == 0) - break; - if(desc->nb_unit_types == CBS_UNIT_TYPE_RANGE) { - if(unit->type >= desc->unit_type_range_start && - unit->type <= desc->unit_type_range_end) - return desc; + *cbs_find_unit_type_desc(CodedBitstreamContext *ctx, + CodedBitstreamUnit *unit) +{ + const CodedBitstreamUnitTypeDescriptor *desc; + int i, j; + + if (!ctx->codec->unit_types) + return NULL; + + for (i = 0;; i++) { + desc = &ctx->codec->unit_types[i]; + if (desc->nb_unit_types == 0) + break; + if (desc->nb_unit_types == CBS_UNIT_TYPE_RANGE) { + if (unit->type >= desc->unit_type_range_start && + unit->type <= desc->unit_type_range_end) + return desc; + } else { + for (j = 0; j < desc->nb_unit_types; j++) { + if (desc->unit_types[j] == unit->type) + return desc; + } + } } - else { - for(j = 0; j < desc->nb_unit_types; j++) { - if(desc->unit_types[j] == unit->type) - return desc; - } - } - } - return NULL; + return NULL; } int ff_cbs_alloc_unit_content2(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - const CodedBitstreamUnitTypeDescriptor *desc; + CodedBitstreamUnit *unit) +{ + const CodedBitstreamUnitTypeDescriptor *desc; - av_assert0(!unit->content && !unit->content_ref); + av_assert0(!unit->content && !unit->content_ref); - desc = cbs_find_unit_type_desc(ctx, unit); - if(!desc) - return AVERROR(ENOSYS); + desc = cbs_find_unit_type_desc(ctx, unit); + if (!desc) + return AVERROR(ENOSYS); - unit->content = av_mallocz(desc->content_size); - if(!unit->content) - return AVERROR(ENOMEM); + unit->content = av_mallocz(desc->content_size); + if (!unit->content) + return AVERROR(ENOMEM); - unit->content_ref = - av_buffer_create(unit->content, desc->content_size, - desc->content_free ? desc->content_free : cbs_default_free_unit_content, - (void *)desc, 0); - if(!unit->content_ref) { - av_freep(&unit->content); - return AVERROR(ENOMEM); - } + unit->content_ref = + av_buffer_create(unit->content, desc->content_size, + desc->content_free ? desc->content_free + : cbs_default_free_unit_content, + (void*)desc, 0); + if (!unit->content_ref) { + av_freep(&unit->content); + return AVERROR(ENOMEM); + } - return 0; + return 0; } static int cbs_clone_unit_content(AVBufferRef **clone_ref, - CodedBitstreamUnit *unit, - const CodedBitstreamUnitTypeDescriptor *desc) { - uint8_t *src, *copy; - uint8_t **src_ptr, **copy_ptr; - AVBufferRef **src_buf, **copy_buf; - int err, i; - - av_assert0(unit->content); - src = unit->content; - - copy = av_memdup(src, desc->content_size); - if(!copy) - return AVERROR(ENOMEM); - - for(i = 0; i < desc->nb_ref_offsets; i++) { - src_ptr = (uint8_t **)(src + desc->ref_offsets[i]); - src_buf = (AVBufferRef **)(src_ptr + 1); - copy_ptr = (uint8_t **)(copy + desc->ref_offsets[i]); - copy_buf = (AVBufferRef **)(copy_ptr + 1); - - if(!*src_ptr) { - av_assert0(!*src_buf); - continue; - } - if(!*src_buf) { - // We can't handle a non-refcounted pointer here - we don't - // have enough information to handle whatever structure lies - // at the other end of it. - err = AVERROR(EINVAL); - goto fail; - } - - // src_ptr is required to point somewhere inside src_buf. If it - // doesn't, there is a bug somewhere. - av_assert0(*src_ptr >= (*src_buf)->data && - *src_ptr < (*src_buf)->data + (*src_buf)->size); + CodedBitstreamUnit *unit, + const CodedBitstreamUnitTypeDescriptor *desc) +{ + uint8_t *src, *copy; + uint8_t **src_ptr, **copy_ptr; + AVBufferRef **src_buf, **copy_buf; + int err, i; + + av_assert0(unit->content); + src = unit->content; + + copy = av_memdup(src, desc->content_size); + if (!copy) + return AVERROR(ENOMEM); - *copy_buf = av_buffer_ref(*src_buf); - if(!*copy_buf) { - err = AVERROR(ENOMEM); - goto fail; + for (i = 0; i < desc->nb_ref_offsets; i++) { + src_ptr = (uint8_t**)(src + desc->ref_offsets[i]); + src_buf = (AVBufferRef**)(src_ptr + 1); + copy_ptr = (uint8_t**)(copy + desc->ref_offsets[i]); + copy_buf = (AVBufferRef**)(copy_ptr + 1); + + if (!*src_ptr) { + av_assert0(!*src_buf); + continue; + } + if (!*src_buf) { + // We can't handle a non-refcounted pointer here - we don't + // have enough information to handle whatever structure lies + // at the other end of it. + err = AVERROR(EINVAL); + goto fail; + } + + // src_ptr is required to point somewhere inside src_buf. If it + // doesn't, there is a bug somewhere. + av_assert0(*src_ptr >= (*src_buf)->data && + *src_ptr < (*src_buf)->data + (*src_buf)->size); + + *copy_buf = av_buffer_ref(*src_buf); + if (!*copy_buf) { + err = AVERROR(ENOMEM); + goto fail; + } + *copy_ptr = (*copy_buf)->data + (*src_ptr - (*src_buf)->data); } - *copy_ptr = (*copy_buf)->data + (*src_ptr - (*src_buf)->data); - } - *clone_ref = av_buffer_create(copy, desc->content_size, - desc->content_free ? desc->content_free : - cbs_default_free_unit_content, - (void *)desc, 0); - if(!*clone_ref) { - err = AVERROR(ENOMEM); - goto fail; - } + *clone_ref = av_buffer_create(copy, desc->content_size, + desc->content_free ? desc->content_free : + cbs_default_free_unit_content, + (void*)desc, 0); + if (!*clone_ref) { + err = AVERROR(ENOMEM); + goto fail; + } - return 0; + return 0; fail: - for(--i; i >= 0; i--) - av_buffer_unref((AVBufferRef **)(copy + desc->ref_offsets[i])); - av_freep(©); - *clone_ref = NULL; - return err; + for (--i; i >= 0; i--) + av_buffer_unref((AVBufferRef**)(copy + desc->ref_offsets[i])); + av_freep(©); + *clone_ref = NULL; + return err; } int ff_cbs_make_unit_refcounted(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - const CodedBitstreamUnitTypeDescriptor *desc; - AVBufferRef *ref; - int err; - - av_assert0(unit->content); - if(unit->content_ref) { - // Already refcounted, nothing to do. - return 0; - } - - desc = cbs_find_unit_type_desc(ctx, unit); - if(!desc) - return AVERROR(ENOSYS); - - switch(desc->content_type) { - case CBS_CONTENT_TYPE_POD: - ref = av_buffer_alloc(desc->content_size); - if(!ref) - return AVERROR(ENOMEM); - memcpy(ref->data, unit->content, desc->content_size); - err = 0; - break; - - case CBS_CONTENT_TYPE_INTERNAL_REFS: - err = cbs_clone_unit_content(&ref, unit, desc); - break; - - case CBS_CONTENT_TYPE_COMPLEX: - if(!desc->content_clone) - return AVERROR_PATCHWELCOME; - err = desc->content_clone(&ref, unit); - break; - - default: - av_assert0(0 && "Invalid content type."); - } - - if(err < 0) - return err; + CodedBitstreamUnit *unit) +{ + const CodedBitstreamUnitTypeDescriptor *desc; + AVBufferRef *ref; + int err; + + av_assert0(unit->content); + if (unit->content_ref) { + // Already refcounted, nothing to do. + return 0; + } - unit->content_ref = ref; - unit->content = ref->data; - return 0; -} + desc = cbs_find_unit_type_desc(ctx, unit); + if (!desc) + return AVERROR(ENOSYS); + + switch (desc->content_type) { + case CBS_CONTENT_TYPE_POD: + ref = av_buffer_alloc(desc->content_size); + if (!ref) + return AVERROR(ENOMEM); + memcpy(ref->data, unit->content, desc->content_size); + err = 0; + break; + + case CBS_CONTENT_TYPE_INTERNAL_REFS: + err = cbs_clone_unit_content(&ref, unit, desc); + break; + + case CBS_CONTENT_TYPE_COMPLEX: + if (!desc->content_clone) + return AVERROR_PATCHWELCOME; + err = desc->content_clone(&ref, unit); + break; + + default: + av_assert0(0 && "Invalid content type."); + } -int ff_cbs_make_unit_writable(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - const CodedBitstreamUnitTypeDescriptor *desc; - AVBufferRef *ref; - int err; - - // This can only be applied to refcounted units. - err = ff_cbs_make_unit_refcounted(ctx, unit); - if(err < 0) - return err; - av_assert0(unit->content && unit->content_ref); + if (err < 0) + return err; - if(av_buffer_is_writable(unit->content_ref)) + unit->content_ref = ref; + unit->content = ref->data; return 0; +} - desc = cbs_find_unit_type_desc(ctx, unit); - if(!desc) - return AVERROR(ENOSYS); - - switch(desc->content_type) { - case CBS_CONTENT_TYPE_POD: - err = av_buffer_make_writable(&unit->content_ref); - break; - - case CBS_CONTENT_TYPE_INTERNAL_REFS: - err = cbs_clone_unit_content(&ref, unit, desc); - break; - - case CBS_CONTENT_TYPE_COMPLEX: - if(!desc->content_clone) - return AVERROR_PATCHWELCOME; - err = desc->content_clone(&ref, unit); - break; - - default: - av_assert0(0 && "Invalid content type."); - } - if(err < 0) - return err; +int ff_cbs_make_unit_writable(CodedBitstreamContext *ctx, + CodedBitstreamUnit *unit) +{ + const CodedBitstreamUnitTypeDescriptor *desc; + AVBufferRef *ref; + int err; + + // This can only be applied to refcounted units. + err = ff_cbs_make_unit_refcounted(ctx, unit); + if (err < 0) + return err; + av_assert0(unit->content && unit->content_ref); + + if (av_buffer_is_writable(unit->content_ref)) + return 0; + + desc = cbs_find_unit_type_desc(ctx, unit); + if (!desc) + return AVERROR(ENOSYS); + + switch (desc->content_type) { + case CBS_CONTENT_TYPE_POD: + err = av_buffer_make_writable(&unit->content_ref); + break; + + case CBS_CONTENT_TYPE_INTERNAL_REFS: + err = cbs_clone_unit_content(&ref, unit, desc); + break; + + case CBS_CONTENT_TYPE_COMPLEX: + if (!desc->content_clone) + return AVERROR_PATCHWELCOME; + err = desc->content_clone(&ref, unit); + break; + + default: + av_assert0(0 && "Invalid content type."); + } + if (err < 0) + return err; - if(desc->content_type != CBS_CONTENT_TYPE_POD) { - av_buffer_unref(&unit->content_ref); - unit->content_ref = ref; - } - unit->content = unit->content_ref->data; - return 0; + if (desc->content_type != CBS_CONTENT_TYPE_POD) { + av_buffer_unref(&unit->content_ref); + unit->content_ref = ref; + } + unit->content = unit->content_ref->data; + return 0; } - -const uint8_t ff_log2_tab[256] = { - 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, - 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, - 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, - 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, - 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, - 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, - 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7 -}; \ No newline at end of file diff --git a/third-party/cbs/cbs_av1.c b/third-party/cbs/cbs_av1.c index 7bb53b9342c..9035eed3521 100644 --- a/third-party/cbs/cbs_av1.c +++ b/third-party/cbs/cbs_av1.c @@ -16,637 +16,628 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -#include -#include -#include +#include "libavutil/avassert.h" +#include "libavutil/opt.h" +#include "libavutil/pixfmt.h" +// [manual] Changed include path +#include "avcodec.h" #include "cbs/cbs.h" -#include "cbs/cbs_av1.h" - #include "cbs_internal.h" +#include "cbs/cbs_av1.h" static int cbs_av1_read_uvlc(CodedBitstreamContext *ctx, GetBitContext *gbc, - const char *name, uint32_t *write_to, - uint32_t range_min, uint32_t range_max) { - uint32_t zeroes, bits_value, value; - int position; - - if(ctx->trace_enable) - position = get_bits_count(gbc); - - zeroes = 0; - while(1) { - if(get_bits_left(gbc) < 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid uvlc code at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; + const char *name, uint32_t *write_to, + uint32_t range_min, uint32_t range_max) +{ + uint32_t zeroes, bits_value, value; + int position; + + if (ctx->trace_enable) + position = get_bits_count(gbc); + + zeroes = 0; + while (1) { + if (get_bits_left(gbc) < 1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid uvlc code at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } + + if (get_bits1(gbc)) + break; + ++zeroes; } - if(get_bits1(gbc)) - break; - ++zeroes; - } - - if(zeroes >= 32) { - value = MAX_UINT_BITS(32); - } - else { - if(get_bits_left(gbc) < zeroes) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid uvlc code at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; + if (zeroes >= 32) { + value = MAX_UINT_BITS(32); + } else { + if (get_bits_left(gbc) < zeroes) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid uvlc code at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } + + bits_value = get_bits_long(gbc, zeroes); + value = bits_value + (UINT32_C(1) << zeroes) - 1; } - bits_value = get_bits_long(gbc, zeroes); - value = bits_value + (UINT32_C(1) << zeroes) - 1; - } + if (ctx->trace_enable) { + char bits[65]; + int i, j, k; + + if (zeroes >= 32) { + while (zeroes > 32) { + k = FFMIN(zeroes - 32, 32); + for (i = 0; i < k; i++) + bits[i] = '0'; + bits[i] = 0; + ff_cbs_trace_syntax_element(ctx, position, name, + NULL, bits, 0); + zeroes -= k; + position += k; + } + } + + for (i = 0; i < zeroes; i++) + bits[i] = '0'; + bits[i++] = '1'; + + if (zeroes < 32) { + for (j = 0; j < zeroes; j++) + bits[i++] = (bits_value >> (zeroes - j - 1) & 1) ? '1' : '0'; + } - if(ctx->trace_enable) { - char bits[65]; - int i, j, k; - - if(zeroes >= 32) { - while(zeroes > 32) { - k = FFMIN(zeroes - 32, 32); - for(i = 0; i < k; i++) - bits[i] = '0'; bits[i] = 0; ff_cbs_trace_syntax_element(ctx, position, name, - NULL, bits, 0); - zeroes -= k; - position += k; - } + NULL, bits, value); } - for(i = 0; i < zeroes; i++) - bits[i] = '0'; - bits[i++] = '1'; - - if(zeroes < 32) { - for(j = 0; j < zeroes; j++) - bits[i++] = (bits_value >> (zeroes - j - 1) & 1) ? '1' : '0'; + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; } - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, position, name, - NULL, bits, value); - } - - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRIu32 ", but must be in [%" PRIu32 ",%" PRIu32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - *write_to = value; - return 0; + *write_to = value; + return 0; } static int cbs_av1_write_uvlc(CodedBitstreamContext *ctx, PutBitContext *pbc, - const char *name, uint32_t value, - uint32_t range_min, uint32_t range_max) { - uint32_t v; - int position, zeroes; - - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRIu32 ", but must be in [%" PRIu32 ",%" PRIu32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - if(ctx->trace_enable) - position = put_bits_count(pbc); - - zeroes = av_log2(value + 1); - v = value - (1U << zeroes) + 1; - put_bits(pbc, zeroes, 0); - put_bits(pbc, 1, 1); - put_bits(pbc, zeroes, v); - - if(ctx->trace_enable) { - char bits[65]; - int i, j; - i = 0; - for(j = 0; j < zeroes; j++) - bits[i++] = '0'; - bits[i++] = '1'; - for(j = 0; j < zeroes; j++) - bits[i++] = (v >> (zeroes - j - 1) & 1) ? '1' : '0'; - bits[i++] = 0; - ff_cbs_trace_syntax_element(ctx, position, name, NULL, - bits, value); - } - - return 0; + const char *name, uint32_t value, + uint32_t range_min, uint32_t range_max) +{ + uint32_t v; + int position, zeroes; + + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; + } + + if (ctx->trace_enable) + position = put_bits_count(pbc); + + zeroes = av_log2(value + 1); + v = value - (1U << zeroes) + 1; + put_bits(pbc, zeroes, 0); + put_bits(pbc, 1, 1); + put_bits(pbc, zeroes, v); + + if (ctx->trace_enable) { + char bits[65]; + int i, j; + i = 0; + for (j = 0; j < zeroes; j++) + bits[i++] = '0'; + bits[i++] = '1'; + for (j = 0; j < zeroes; j++) + bits[i++] = (v >> (zeroes - j - 1) & 1) ? '1' : '0'; + bits[i++] = 0; + ff_cbs_trace_syntax_element(ctx, position, name, NULL, + bits, value); + } + + return 0; } static int cbs_av1_read_leb128(CodedBitstreamContext *ctx, GetBitContext *gbc, - const char *name, uint64_t *write_to) { - uint64_t value; - int position, err, i; - - if(ctx->trace_enable) - position = get_bits_count(gbc); - - value = 0; - for(i = 0; i < 8; i++) { - int subscript[2] = { 1, i }; - uint32_t byte; - err = ff_cbs_read_unsigned(ctx, gbc, 8, "leb128_byte[i]", subscript, - &byte, 0x00, 0xff); - if(err < 0) - return err; - - value |= (uint64_t)(byte & 0x7f) << (i * 7); - if(!(byte & 0x80)) - break; - } - - if(value > UINT32_MAX) - return AVERROR_INVALIDDATA; - - if(ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, name, NULL, "", value); - - *write_to = value; - return 0; + const char *name, uint64_t *write_to) +{ + uint64_t value; + int position, err, i; + + if (ctx->trace_enable) + position = get_bits_count(gbc); + + value = 0; + for (i = 0; i < 8; i++) { + int subscript[2] = { 1, i }; + uint32_t byte; + err = ff_cbs_read_unsigned(ctx, gbc, 8, "leb128_byte[i]", subscript, + &byte, 0x00, 0xff); + if (err < 0) + return err; + + value |= (uint64_t)(byte & 0x7f) << (i * 7); + if (!(byte & 0x80)) + break; + } + + if (value > UINT32_MAX) + return AVERROR_INVALIDDATA; + + if (ctx->trace_enable) + ff_cbs_trace_syntax_element(ctx, position, name, NULL, "", value); + + *write_to = value; + return 0; } static int cbs_av1_write_leb128(CodedBitstreamContext *ctx, PutBitContext *pbc, - const char *name, uint64_t value) { - int position, err, len, i; - uint8_t byte; + const char *name, uint64_t value) +{ + int position, err, len, i; + uint8_t byte; - len = (av_log2(value) + 7) / 7; + len = (av_log2(value) + 7) / 7; - if(ctx->trace_enable) - position = put_bits_count(pbc); + if (ctx->trace_enable) + position = put_bits_count(pbc); - for(i = 0; i < len; i++) { - int subscript[2] = { 1, i }; + for (i = 0; i < len; i++) { + int subscript[2] = { 1, i }; - byte = value >> (7 * i) & 0x7f; - if(i < len - 1) - byte |= 0x80; + byte = value >> (7 * i) & 0x7f; + if (i < len - 1) + byte |= 0x80; - err = ff_cbs_write_unsigned(ctx, pbc, 8, "leb128_byte[i]", subscript, - byte, 0x00, 0xff); - if(err < 0) - return err; - } + err = ff_cbs_write_unsigned(ctx, pbc, 8, "leb128_byte[i]", subscript, + byte, 0x00, 0xff); + if (err < 0) + return err; + } - if(ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, name, NULL, "", value); + if (ctx->trace_enable) + ff_cbs_trace_syntax_element(ctx, position, name, NULL, "", value); - return 0; + return 0; } static int cbs_av1_read_ns(CodedBitstreamContext *ctx, GetBitContext *gbc, - uint32_t n, const char *name, - const int *subscripts, uint32_t *write_to) { - uint32_t m, v, extra_bit, value; - int position, w; - - av_assert0(n > 0); - - if(ctx->trace_enable) - position = get_bits_count(gbc); - - w = av_log2(n) + 1; - m = (1 << w) - n; - - if(get_bits_left(gbc) < w) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid non-symmetric value at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; - } - - if(w - 1 > 0) - v = get_bits(gbc, w - 1); - else - v = 0; - - if(v < m) { - value = v; - } - else { - extra_bit = get_bits1(gbc); - value = (v << 1) - m + extra_bit; - } - - if(ctx->trace_enable) { - char bits[33]; - int i; - for(i = 0; i < w - 1; i++) - bits[i] = (v >> i & 1) ? '1' : '0'; - if(v >= m) - bits[i++] = extra_bit ? '1' : '0'; - bits[i] = 0; - - ff_cbs_trace_syntax_element(ctx, position, - name, subscripts, bits, value); - } - - *write_to = value; - return 0; + uint32_t n, const char *name, + const int *subscripts, uint32_t *write_to) +{ + uint32_t m, v, extra_bit, value; + int position, w; + + av_assert0(n > 0); + + if (ctx->trace_enable) + position = get_bits_count(gbc); + + w = av_log2(n) + 1; + m = (1 << w) - n; + + if (get_bits_left(gbc) < w) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid non-symmetric value at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } + + if (w - 1 > 0) + v = get_bits(gbc, w - 1); + else + v = 0; + + if (v < m) { + value = v; + } else { + extra_bit = get_bits1(gbc); + value = (v << 1) - m + extra_bit; + } + + if (ctx->trace_enable) { + char bits[33]; + int i; + for (i = 0; i < w - 1; i++) + bits[i] = (v >> i & 1) ? '1' : '0'; + if (v >= m) + bits[i++] = extra_bit ? '1' : '0'; + bits[i] = 0; + + ff_cbs_trace_syntax_element(ctx, position, + name, subscripts, bits, value); + } + + *write_to = value; + return 0; } static int cbs_av1_write_ns(CodedBitstreamContext *ctx, PutBitContext *pbc, - uint32_t n, const char *name, - const int *subscripts, uint32_t value) { - uint32_t w, m, v, extra_bit; - int position; - - if(value > n) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRIu32 ", but must be in [0,%" PRIu32 "].\n", - name, value, n); - return AVERROR_INVALIDDATA; - } - - if(ctx->trace_enable) - position = put_bits_count(pbc); - - w = av_log2(n) + 1; - m = (1 << w) - n; - - if(put_bits_left(pbc) < w) - return AVERROR(ENOSPC); - - if(value < m) { - v = value; - put_bits(pbc, w - 1, v); - } - else { - v = m + ((value - m) >> 1); - extra_bit = (value - m) & 1; - put_bits(pbc, w - 1, v); - put_bits(pbc, 1, extra_bit); - } - - if(ctx->trace_enable) { - char bits[33]; - int i; - for(i = 0; i < w - 1; i++) - bits[i] = (v >> i & 1) ? '1' : '0'; - if(value >= m) - bits[i++] = extra_bit ? '1' : '0'; - bits[i] = 0; + uint32_t n, const char *name, + const int *subscripts, uint32_t value) +{ + uint32_t w, m, v, extra_bit; + int position; + + if (value > n) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRIu32", but must be in [0,%"PRIu32"].\n", + name, value, n); + return AVERROR_INVALIDDATA; + } - ff_cbs_trace_syntax_element(ctx, position, - name, subscripts, bits, value); - } + if (ctx->trace_enable) + position = put_bits_count(pbc); - return 0; -} + w = av_log2(n) + 1; + m = (1 << w) - n; -static int cbs_av1_read_increment(CodedBitstreamContext *ctx, GetBitContext *gbc, - uint32_t range_min, uint32_t range_max, - const char *name, uint32_t *write_to) { - uint32_t value; - int position, i; - char bits[33]; - - av_assert0(range_min <= range_max && range_max - range_min < sizeof(bits) - 1); - if(ctx->trace_enable) - position = get_bits_count(gbc); - - for(i = 0, value = range_min; value < range_max;) { - if(get_bits_left(gbc) < 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid increment value at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; + if (put_bits_left(pbc) < w) + return AVERROR(ENOSPC); + + if (value < m) { + v = value; + put_bits(pbc, w - 1, v); + } else { + v = m + ((value - m) >> 1); + extra_bit = (value - m) & 1; + put_bits(pbc, w - 1, v); + put_bits(pbc, 1, extra_bit); } - if(get_bits1(gbc)) { - bits[i++] = '1'; - ++value; + + if (ctx->trace_enable) { + char bits[33]; + int i; + for (i = 0; i < w - 1; i++) + bits[i] = (v >> i & 1) ? '1' : '0'; + if (value >= m) + bits[i++] = extra_bit ? '1' : '0'; + bits[i] = 0; + + ff_cbs_trace_syntax_element(ctx, position, + name, subscripts, bits, value); } - else { - bits[i++] = '0'; - break; + + return 0; +} + +static int cbs_av1_read_increment(CodedBitstreamContext *ctx, GetBitContext *gbc, + uint32_t range_min, uint32_t range_max, + const char *name, uint32_t *write_to) +{ + uint32_t value; + int position, i; + char bits[33]; + + av_assert0(range_min <= range_max && range_max - range_min < sizeof(bits) - 1); + if (ctx->trace_enable) + position = get_bits_count(gbc); + + for (i = 0, value = range_min; value < range_max;) { + if (get_bits_left(gbc) < 1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid increment value at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } + if (get_bits1(gbc)) { + bits[i++] = '1'; + ++value; + } else { + bits[i++] = '0'; + break; + } } - } - if(ctx->trace_enable) { - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, position, - name, NULL, bits, value); - } + if (ctx->trace_enable) { + bits[i] = 0; + ff_cbs_trace_syntax_element(ctx, position, + name, NULL, bits, value); + } - *write_to = value; - return 0; + *write_to = value; + return 0; } static int cbs_av1_write_increment(CodedBitstreamContext *ctx, PutBitContext *pbc, - uint32_t range_min, uint32_t range_max, - const char *name, uint32_t value) { - int len; - - av_assert0(range_min <= range_max && range_max - range_min < 32); - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRIu32 ", but must be in [%" PRIu32 ",%" PRIu32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - if(value == range_max) - len = range_max - range_min; - else - len = value - range_min + 1; - if(put_bits_left(pbc) < len) - return AVERROR(ENOSPC); - - if(ctx->trace_enable) { - char bits[33]; - int i; - for(i = 0; i < len; i++) { - if(range_min + i == value) - bits[i] = '0'; - else - bits[i] = '1'; + uint32_t range_min, uint32_t range_max, + const char *name, uint32_t value) +{ + int len; + + av_assert0(range_min <= range_max && range_max - range_min < 32); + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; + } + + if (value == range_max) + len = range_max - range_min; + else + len = value - range_min + 1; + if (put_bits_left(pbc) < len) + return AVERROR(ENOSPC); + + if (ctx->trace_enable) { + char bits[33]; + int i; + for (i = 0; i < len; i++) { + if (range_min + i == value) + bits[i] = '0'; + else + bits[i] = '1'; + } + bits[i] = 0; + ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), + name, NULL, bits, value); } - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, NULL, bits, value); - } - if(len > 0) - put_bits(pbc, len, (1 << len) - 1 - (value != range_max)); + if (len > 0) + put_bits(pbc, len, (1 << len) - 1 - (value != range_max)); - return 0; + return 0; } static int cbs_av1_read_subexp(CodedBitstreamContext *ctx, GetBitContext *gbc, - uint32_t range_max, const char *name, - const int *subscripts, uint32_t *write_to) { - uint32_t value; - int position, err; - uint32_t max_len, len, range_offset, range_bits; + uint32_t range_max, const char *name, + const int *subscripts, uint32_t *write_to) +{ + uint32_t value; + int position, err; + uint32_t max_len, len, range_offset, range_bits; + + if (ctx->trace_enable) + position = get_bits_count(gbc); + + av_assert0(range_max > 0); + max_len = av_log2(range_max - 1) - 3; + + err = cbs_av1_read_increment(ctx, gbc, 0, max_len, + "subexp_more_bits", &len); + if (err < 0) + return err; + + if (len) { + range_bits = 2 + len; + range_offset = 1 << range_bits; + } else { + range_bits = 3; + range_offset = 0; + } - if(ctx->trace_enable) - position = get_bits_count(gbc); + if (len < max_len) { + err = ff_cbs_read_unsigned(ctx, gbc, range_bits, + "subexp_bits", NULL, &value, + 0, MAX_UINT_BITS(range_bits)); + if (err < 0) + return err; + + } else { + err = cbs_av1_read_ns(ctx, gbc, range_max - range_offset, + "subexp_final_bits", NULL, &value); + if (err < 0) + return err; + } + value += range_offset; - av_assert0(range_max > 0); - max_len = av_log2(range_max - 1) - 3; + if (ctx->trace_enable) + ff_cbs_trace_syntax_element(ctx, position, + name, subscripts, "", value); - err = cbs_av1_read_increment(ctx, gbc, 0, max_len, - "subexp_more_bits", &len); - if(err < 0) + *write_to = value; return err; - - if(len) { - range_bits = 2 + len; - range_offset = 1 << range_bits; - } - else { - range_bits = 3; - range_offset = 0; - } - - if(len < max_len) { - err = ff_cbs_read_unsigned(ctx, gbc, range_bits, - "subexp_bits", NULL, &value, - 0, MAX_UINT_BITS(range_bits)); - if(err < 0) - return err; - } - else { - err = cbs_av1_read_ns(ctx, gbc, range_max - range_offset, - "subexp_final_bits", NULL, &value); - if(err < 0) - return err; - } - value += range_offset; - - if(ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, - name, subscripts, "", value); - - *write_to = value; - return err; } static int cbs_av1_write_subexp(CodedBitstreamContext *ctx, PutBitContext *pbc, - uint32_t range_max, const char *name, - const int *subscripts, uint32_t value) { - int position, err; - uint32_t max_len, len, range_offset, range_bits; - - if(value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRIu32 ", but must be in [0,%" PRIu32 "].\n", - name, value, range_max); - return AVERROR_INVALIDDATA; - } - - if(ctx->trace_enable) - position = put_bits_count(pbc); - - av_assert0(range_max > 0); - max_len = av_log2(range_max - 1) - 3; - - if(value < 8) { - range_bits = 3; - range_offset = 0; - len = 0; - } - else { - range_bits = av_log2(value); - len = range_bits - 2; - if(len > max_len) { - // The top bin is combined with the one below it. - av_assert0(len == max_len + 1); - --range_bits; - len = max_len; + uint32_t range_max, const char *name, + const int *subscripts, uint32_t value) +{ + int position, err; + uint32_t max_len, len, range_offset, range_bits; + + if (value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRIu32", but must be in [0,%"PRIu32"].\n", + name, value, range_max); + return AVERROR_INVALIDDATA; } - range_offset = 1 << range_bits; - } - err = cbs_av1_write_increment(ctx, pbc, 0, max_len, - "subexp_more_bits", len); - if(err < 0) - return err; + if (ctx->trace_enable) + position = put_bits_count(pbc); + + av_assert0(range_max > 0); + max_len = av_log2(range_max - 1) - 3; + + if (value < 8) { + range_bits = 3; + range_offset = 0; + len = 0; + } else { + range_bits = av_log2(value); + len = range_bits - 2; + if (len > max_len) { + // The top bin is combined with the one below it. + av_assert0(len == max_len + 1); + --range_bits; + len = max_len; + } + range_offset = 1 << range_bits; + } + + err = cbs_av1_write_increment(ctx, pbc, 0, max_len, + "subexp_more_bits", len); + if (err < 0) + return err; + + if (len < max_len) { + err = ff_cbs_write_unsigned(ctx, pbc, range_bits, + "subexp_bits", NULL, + value - range_offset, + 0, MAX_UINT_BITS(range_bits)); + if (err < 0) + return err; + + } else { + err = cbs_av1_write_ns(ctx, pbc, range_max - range_offset, + "subexp_final_bits", NULL, + value - range_offset); + if (err < 0) + return err; + } + + if (ctx->trace_enable) + ff_cbs_trace_syntax_element(ctx, position, + name, subscripts, "", value); - if(len < max_len) { - err = ff_cbs_write_unsigned(ctx, pbc, range_bits, - "subexp_bits", NULL, - value - range_offset, - 0, MAX_UINT_BITS(range_bits)); - if(err < 0) - return err; - } - else { - err = cbs_av1_write_ns(ctx, pbc, range_max - range_offset, - "subexp_final_bits", NULL, - value - range_offset); - if(err < 0) - return err; - } - - if(ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, - name, subscripts, "", value); - - return err; + return err; } -static int cbs_av1_tile_log2(int blksize, int target) { - int k; - for(k = 0; (blksize << k) < target; k++) - ; - return k; +static int cbs_av1_tile_log2(int blksize, int target) +{ + int k; + for (k = 0; (blksize << k) < target; k++); + return k; } static int cbs_av1_get_relative_dist(const AV1RawSequenceHeader *seq, - unsigned int a, unsigned int b) { - unsigned int diff, m; - if(!seq->enable_order_hint) - return 0; - diff = a - b; - m = 1 << seq->order_hint_bits_minus_1; - diff = (diff & (m - 1)) - (diff & m); - return diff; + unsigned int a, unsigned int b) +{ + unsigned int diff, m; + if (!seq->enable_order_hint) + return 0; + diff = a - b; + m = 1 << seq->order_hint_bits_minus_1; + diff = (diff & (m - 1)) - (diff & m); + return diff; } -static size_t cbs_av1_get_payload_bytes_left(GetBitContext *gbc) { - GetBitContext tmp = *gbc; - size_t size = 0; - for(int i = 0; get_bits_left(&tmp) >= 8; i++) { - if(get_bits(&tmp, 8)) - size = i; - } - return size; +static size_t cbs_av1_get_payload_bytes_left(GetBitContext *gbc) +{ + GetBitContext tmp = *gbc; + size_t size = 0; + for (int i = 0; get_bits_left(&tmp) >= 8; i++) { + if (get_bits(&tmp, 8)) + size = i; + } + return size; } -#define HEADER(name) \ - do { \ - ff_cbs_trace_header(ctx, name); \ - } while(0) +#define HEADER(name) do { \ + ff_cbs_trace_header(ctx, name); \ + } while (0) -#define CHECK(call) \ - do { \ - err = (call); \ - if(err < 0) \ - return err; \ - } while(0) +#define CHECK(call) do { \ + err = (call); \ + if (err < 0) \ + return err; \ + } while (0) -#define FUNC_NAME(rw, codec, name) cbs_##codec##_##rw##_##name +#define FUNC_NAME(rw, codec, name) cbs_ ## codec ## _ ## rw ## _ ## name #define FUNC_AV1(rw, name) FUNC_NAME(rw, av1, name) #define FUNC(name) FUNC_AV1(READWRITE, name) -#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]) { subs, __VA_ARGS__ }) : NULL) +#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]){ subs, __VA_ARGS__ }) : NULL) #define fb(width, name) \ - xf(width, name, current->name, 0, MAX_UINT_BITS(width), 0, ) + xf(width, name, current->name, 0, MAX_UINT_BITS(width), 0, ) #define fc(width, name, range_min, range_max) \ - xf(width, name, current->name, range_min, range_max, 0, ) + xf(width, name, current->name, range_min, range_max, 0, ) #define flag(name) fb(1, name) #define su(width, name) \ - xsu(width, name, current->name, 0, ) + xsu(width, name, current->name, 0, ) #define fbs(width, name, subs, ...) \ - xf(width, name, current->name, 0, MAX_UINT_BITS(width), subs, __VA_ARGS__) + xf(width, name, current->name, 0, MAX_UINT_BITS(width), subs, __VA_ARGS__) #define fcs(width, name, range_min, range_max, subs, ...) \ - xf(width, name, current->name, range_min, range_max, subs, __VA_ARGS__) + xf(width, name, current->name, range_min, range_max, subs, __VA_ARGS__) #define flags(name, subs, ...) \ - xf(1, name, current->name, 0, 1, subs, __VA_ARGS__) + xf(1, name, current->name, 0, 1, subs, __VA_ARGS__) #define sus(width, name, subs, ...) \ - xsu(width, name, current->name, subs, __VA_ARGS__) + xsu(width, name, current->name, subs, __VA_ARGS__) -#define fixed(width, name, value) \ - do { \ - av_unused uint32_t fixed_value = value; \ - xf(width, name, fixed_value, value, value, 0, ); \ - } while(0) +#define fixed(width, name, value) do { \ + av_unused uint32_t fixed_value = value; \ + xf(width, name, fixed_value, value, value, 0, ); \ + } while (0) #define READ #define READWRITE read #define RWContext GetBitContext -#define xf(width, name, var, range_min, range_max, subs, ...) \ - do { \ - uint32_t value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while(0) - -#define xsu(width, name, var, subs, ...) \ - do { \ - int32_t value; \ - CHECK(ff_cbs_read_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), &value, \ - MIN_INT_BITS(width), \ - MAX_INT_BITS(width))); \ - var = value; \ - } while(0) - -#define uvlc(name, range_min, range_max) \ - do { \ - uint32_t value; \ - CHECK(cbs_av1_read_uvlc(ctx, rw, #name, \ - &value, range_min, range_max)); \ - current->name = value; \ - } while(0) - -#define ns(max_value, name, subs, ...) \ - do { \ - uint32_t value; \ - CHECK(cbs_av1_read_ns(ctx, rw, max_value, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), &value)); \ - current->name = value; \ - } while(0) - -#define increment(name, min, max) \ - do { \ - uint32_t value; \ - CHECK(cbs_av1_read_increment(ctx, rw, min, max, #name, &value)); \ - current->name = value; \ - } while(0) - -#define subexp(name, max, subs, ...) \ - do { \ - uint32_t value; \ - CHECK(cbs_av1_read_subexp(ctx, rw, max, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), &value)); \ - current->name = value; \ - } while(0) - -#define delta_q(name) \ - do { \ - uint8_t delta_coded; \ - int8_t delta_q; \ - xf(1, name.delta_coded, delta_coded, 0, 1, 0, ); \ - if(delta_coded) \ - xsu(1 + 6, name.delta_q, delta_q, 0, ); \ - else \ - delta_q = 0; \ - current->name = delta_q; \ - } while(0) - -#define leb128(name) \ - do { \ - uint64_t value; \ - CHECK(cbs_av1_read_leb128(ctx, rw, #name, &value)); \ - current->name = value; \ - } while(0) - -#define infer(name, value) \ - do { \ - current->name = value; \ - } while(0) +#define xf(width, name, var, range_min, range_max, subs, ...) do { \ + uint32_t value; \ + CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + &value, range_min, range_max)); \ + var = value; \ + } while (0) + +#define xsu(width, name, var, subs, ...) do { \ + int32_t value; \ + CHECK(ff_cbs_read_signed(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), &value, \ + MIN_INT_BITS(width), \ + MAX_INT_BITS(width))); \ + var = value; \ + } while (0) + +#define uvlc(name, range_min, range_max) do { \ + uint32_t value; \ + CHECK(cbs_av1_read_uvlc(ctx, rw, #name, \ + &value, range_min, range_max)); \ + current->name = value; \ + } while (0) + +#define ns(max_value, name, subs, ...) do { \ + uint32_t value; \ + CHECK(cbs_av1_read_ns(ctx, rw, max_value, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), &value)); \ + current->name = value; \ + } while (0) + +#define increment(name, min, max) do { \ + uint32_t value; \ + CHECK(cbs_av1_read_increment(ctx, rw, min, max, #name, &value)); \ + current->name = value; \ + } while (0) + +#define subexp(name, max, subs, ...) do { \ + uint32_t value; \ + CHECK(cbs_av1_read_subexp(ctx, rw, max, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), &value)); \ + current->name = value; \ + } while (0) + +#define delta_q(name) do { \ + uint8_t delta_coded; \ + int8_t delta_q; \ + xf(1, name.delta_coded, delta_coded, 0, 1, 0, ); \ + if (delta_coded) \ + xsu(1 + 6, name.delta_q, delta_q, 0, ); \ + else \ + delta_q = 0; \ + current->name = delta_q; \ + } while (0) + +#define leb128(name) do { \ + uint64_t value; \ + CHECK(cbs_av1_read_leb128(ctx, rw, #name, &value)); \ + current->name = value; \ + } while (0) + +#define infer(name, value) do { \ + current->name = value; \ + } while (0) #define byte_alignment(rw) (get_bits_count(rw) % 8) @@ -671,69 +662,60 @@ static size_t cbs_av1_get_payload_bytes_left(GetBitContext *gbc) { #define READWRITE write #define RWContext PutBitContext -#define xf(width, name, var, range_min, range_max, subs, ...) \ - do { \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - var, range_min, range_max)); \ - } while(0) - -#define xsu(width, name, var, subs, ...) \ - do { \ - CHECK(ff_cbs_write_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), var, \ - MIN_INT_BITS(width), \ - MAX_INT_BITS(width))); \ - } while(0) - -#define uvlc(name, range_min, range_max) \ - do { \ - CHECK(cbs_av1_write_uvlc(ctx, rw, #name, current->name, \ - range_min, range_max)); \ - } while(0) - -#define ns(max_value, name, subs, ...) \ - do { \ - CHECK(cbs_av1_write_ns(ctx, rw, max_value, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - current->name)); \ - } while(0) - -#define increment(name, min, max) \ - do { \ - CHECK(cbs_av1_write_increment(ctx, rw, min, max, #name, \ - current->name)); \ - } while(0) - -#define subexp(name, max, subs, ...) \ - do { \ - CHECK(cbs_av1_write_subexp(ctx, rw, max, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - current->name)); \ - } while(0) - -#define delta_q(name) \ - do { \ - xf(1, name.delta_coded, current->name != 0, 0, 1, 0, ); \ - if(current->name) \ - xsu(1 + 6, name.delta_q, current->name, 0, ); \ - } while(0) - -#define leb128(name) \ - do { \ - CHECK(cbs_av1_write_leb128(ctx, rw, #name, current->name)); \ - } while(0) - -#define infer(name, value) \ - do { \ - if(current->name != (value)) { \ - av_log(ctx->log_ctx, AV_LOG_ERROR, \ - "%s does not match inferred value: " \ - "%" PRId64 ", but should be %" PRId64 ".\n", \ - #name, (int64_t)current->name, (int64_t)(value)); \ - return AVERROR_INVALIDDATA; \ - } \ - } while(0) +#define xf(width, name, var, range_min, range_max, subs, ...) do { \ + CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + var, range_min, range_max)); \ + } while (0) + +#define xsu(width, name, var, subs, ...) do { \ + CHECK(ff_cbs_write_signed(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), var, \ + MIN_INT_BITS(width), \ + MAX_INT_BITS(width))); \ + } while (0) + +#define uvlc(name, range_min, range_max) do { \ + CHECK(cbs_av1_write_uvlc(ctx, rw, #name, current->name, \ + range_min, range_max)); \ + } while (0) + +#define ns(max_value, name, subs, ...) do { \ + CHECK(cbs_av1_write_ns(ctx, rw, max_value, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + current->name)); \ + } while (0) + +#define increment(name, min, max) do { \ + CHECK(cbs_av1_write_increment(ctx, rw, min, max, #name, \ + current->name)); \ + } while (0) + +#define subexp(name, max, subs, ...) do { \ + CHECK(cbs_av1_write_subexp(ctx, rw, max, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + current->name)); \ + } while (0) + +#define delta_q(name) do { \ + xf(1, name.delta_coded, current->name != 0, 0, 1, 0, ); \ + if (current->name) \ + xsu(1 + 6, name.delta_q, current->name, 0, ); \ + } while (0) + +#define leb128(name) do { \ + CHECK(cbs_av1_write_leb128(ctx, rw, #name, current->name)); \ + } while (0) + +#define infer(name, value) do { \ + if (current->name != (value)) { \ + av_log(ctx->log_ctx, AV_LOG_ERROR, \ + "%s does not match inferred value: " \ + "%"PRId64", but should be %"PRId64".\n", \ + #name, (int64_t)current->name, (int64_t)(value)); \ + return AVERROR_INVALIDDATA; \ + } \ + } while (0) #define byte_alignment(rw) (put_bits_count(rw) % 8) @@ -755,569 +737,601 @@ static size_t cbs_av1_get_payload_bytes_left(GetBitContext *gbc) { static int cbs_av1_split_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header) { - GetBitContext gbc; - uint8_t *data; - size_t size; - uint64_t obu_length; - int pos, err, trace; - - // Don't include this parsing in trace output. - trace = ctx->trace_enable; - ctx->trace_enable = 0; - - data = frag->data; - size = frag->data_size; - - if(INT_MAX / 8 < size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid fragment: " - "too large (%zu bytes).\n", - size); - err = AVERROR_INVALIDDATA; - goto fail; - } - - if(header && size && data[0] & 0x80) { - // first bit is nonzero, the extradata does not consist purely of - // OBUs. Expect MP4/Matroska AV1CodecConfigurationRecord - int config_record_version = data[0] & 0x7f; - - if(config_record_version != 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "Unknown version %d of AV1CodecConfigurationRecord " - "found!\n", - config_record_version); - err = AVERROR_INVALIDDATA; - goto fail; - } - - if(size <= 4) { - if(size < 4) { - av_log(ctx->log_ctx, AV_LOG_WARNING, - "Undersized AV1CodecConfigurationRecord v%d found!\n", - config_record_version); + CodedBitstreamFragment *frag, + int header) +{ + GetBitContext gbc; + uint8_t *data; + size_t size; + uint64_t obu_length; + int pos, err, trace; + + // Don't include this parsing in trace output. + trace = ctx->trace_enable; + ctx->trace_enable = 0; + + data = frag->data; + size = frag->data_size; + + if (INT_MAX / 8 < size) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid fragment: " + "too large (%zu bytes).\n", size); err = AVERROR_INVALIDDATA; goto fail; - } - - goto success; } - // In AV1CodecConfigurationRecord v1, actual OBUs start after - // four bytes. Thus set the offset as required for properly - // parsing them. - data += 4; - size -= 4; - } - - while(size > 0) { - AV1RawOBUHeader header; - uint64_t obu_size; - - init_get_bits(&gbc, data, 8 * size); - - err = cbs_av1_read_obu_header(ctx, &gbc, &header); - if(err < 0) - goto fail; - - if(header.obu_has_size_field) { - if(get_bits_left(&gbc) < 8) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid OBU: fragment " - "too short (%zu bytes).\n", - size); - err = AVERROR_INVALIDDATA; - goto fail; - } - err = cbs_av1_read_leb128(ctx, &gbc, "obu_size", &obu_size); - if(err < 0) - goto fail; + if (header && size && data[0] & 0x80) { + // first bit is nonzero, the extradata does not consist purely of + // OBUs. Expect MP4/Matroska AV1CodecConfigurationRecord + int config_record_version = data[0] & 0x7f; + + if (config_record_version != 1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "Unknown version %d of AV1CodecConfigurationRecord " + "found!\n", + config_record_version); + err = AVERROR_INVALIDDATA; + goto fail; + } + + if (size <= 4) { + if (size < 4) { + av_log(ctx->log_ctx, AV_LOG_WARNING, + "Undersized AV1CodecConfigurationRecord v%d found!\n", + config_record_version); + err = AVERROR_INVALIDDATA; + goto fail; + } + + goto success; + } + + // In AV1CodecConfigurationRecord v1, actual OBUs start after + // four bytes. Thus set the offset as required for properly + // parsing them. + data += 4; + size -= 4; } - else - obu_size = size - 1 - header.obu_extension_flag; - pos = get_bits_count(&gbc); - av_assert0(pos % 8 == 0 && pos / 8 <= size); - - obu_length = pos / 8 + obu_size; - - if(size < obu_length) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid OBU length: " - "%" PRIu64 ", but only %zu bytes remaining in fragment.\n", - obu_length, size); - err = AVERROR_INVALIDDATA; - goto fail; + while (size > 0) { + AV1RawOBUHeader header; + uint64_t obu_size; + + init_get_bits(&gbc, data, 8 * size); + + err = cbs_av1_read_obu_header(ctx, &gbc, &header); + if (err < 0) + goto fail; + + if (header.obu_has_size_field) { + if (get_bits_left(&gbc) < 8) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid OBU: fragment " + "too short (%zu bytes).\n", size); + err = AVERROR_INVALIDDATA; + goto fail; + } + err = cbs_av1_read_leb128(ctx, &gbc, "obu_size", &obu_size); + if (err < 0) + goto fail; + } else + obu_size = size - 1 - header.obu_extension_flag; + + pos = get_bits_count(&gbc); + av_assert0(pos % 8 == 0 && pos / 8 <= size); + + obu_length = pos / 8 + obu_size; + + if (size < obu_length) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid OBU length: " + "%"PRIu64", but only %zu bytes remaining in fragment.\n", + obu_length, size); + err = AVERROR_INVALIDDATA; + goto fail; + } + + err = ff_cbs_append_unit_data(frag, header.obu_type, + data, obu_length, frag->data_ref); + if (err < 0) + goto fail; + + data += obu_length; + size -= obu_length; } - err = ff_cbs_insert_unit_data(frag, -1, header.obu_type, - data, obu_length, frag->data_ref); - if(err < 0) - goto fail; - - data += obu_length; - size -= obu_length; - } - success: - err = 0; + err = 0; fail: - ctx->trace_enable = trace; - return err; + ctx->trace_enable = trace; + return err; } static int cbs_av1_ref_tile_data(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - GetBitContext *gbc, - AV1RawTileData *td) { - int pos; - - pos = get_bits_count(gbc); - if(pos >= 8 * unit->data_size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Bitstream ended before " - "any data in tile group (%d bits read).\n", - pos); - return AVERROR_INVALIDDATA; - } - // Must be byte-aligned at this point. - av_assert0(pos % 8 == 0); - - td->data_ref = av_buffer_ref(unit->data_ref); - if(!td->data_ref) - return AVERROR(ENOMEM); - - td->data = unit->data + pos / 8; - td->data_size = unit->data_size - pos / 8; - - return 0; -} + CodedBitstreamUnit *unit, + GetBitContext *gbc, + AV1RawTileData *td) +{ + int pos; + + pos = get_bits_count(gbc); + if (pos >= 8 * unit->data_size) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Bitstream ended before " + "any data in tile group (%d bits read).\n", pos); + return AVERROR_INVALIDDATA; + } + // Must be byte-aligned at this point. + av_assert0(pos % 8 == 0); -static int cbs_av1_read_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - AV1RawOBU *obu; - GetBitContext gbc; - int err, start_pos, end_pos; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if(err < 0) - return err; - obu = unit->content; + td->data_ref = av_buffer_ref(unit->data_ref); + if (!td->data_ref) + return AVERROR(ENOMEM); - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if(err < 0) - return err; + td->data = unit->data + pos / 8; + td->data_size = unit->data_size - pos / 8; - err = cbs_av1_read_obu_header(ctx, &gbc, &obu->header); - if(err < 0) - return err; - av_assert0(obu->header.obu_type == unit->type); - - if(obu->header.obu_has_size_field) { - uint64_t obu_size; - err = cbs_av1_read_leb128(ctx, &gbc, "obu_size", &obu_size); - if(err < 0) - return err; - obu->obu_size = obu_size; - } - else { - if(unit->data_size < 1 + obu->header.obu_extension_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid OBU length: " - "unit too short (%zu).\n", - unit->data_size); - return AVERROR_INVALIDDATA; - } - obu->obu_size = unit->data_size - 1 - obu->header.obu_extension_flag; - } - - start_pos = get_bits_count(&gbc); - - if(obu->header.obu_extension_flag) { - if(obu->header.obu_type != AV1_OBU_SEQUENCE_HEADER && - obu->header.obu_type != AV1_OBU_TEMPORAL_DELIMITER && - priv->operating_point_idc) { - int in_temporal_layer = - (priv->operating_point_idc >> priv->temporal_id) & 1; - int in_spatial_layer = - (priv->operating_point_idc >> (priv->spatial_id + 8)) & 1; - if(!in_temporal_layer || !in_spatial_layer) { - return AVERROR(EAGAIN); // drop_obu() - } - } - } - - switch(obu->header.obu_type) { - case AV1_OBU_SEQUENCE_HEADER: { - err = cbs_av1_read_sequence_header_obu(ctx, &gbc, - &obu->obu.sequence_header); - if(err < 0) - return err; - - if(priv->operating_point >= 0) { - AV1RawSequenceHeader *sequence_header = &obu->obu.sequence_header; - - if(priv->operating_point > sequence_header->operating_points_cnt_minus_1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid Operating Point %d requested. " - "Must not be higher than %u.\n", - priv->operating_point, sequence_header->operating_points_cnt_minus_1); - return AVERROR(EINVAL); - } - priv->operating_point_idc = sequence_header->operating_point_idc[priv->operating_point]; - } + return 0; +} - av_buffer_unref(&priv->sequence_header_ref); - priv->sequence_header = NULL; +static int cbs_av1_read_unit(CodedBitstreamContext *ctx, + CodedBitstreamUnit *unit) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + AV1RawOBU *obu; + GetBitContext gbc; + int err, start_pos, end_pos; + + err = ff_cbs_alloc_unit_content2(ctx, unit); + if (err < 0) + return err; + obu = unit->content; + + err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); + if (err < 0) + return err; + + err = cbs_av1_read_obu_header(ctx, &gbc, &obu->header); + if (err < 0) + return err; + av_assert0(obu->header.obu_type == unit->type); + + if (obu->header.obu_has_size_field) { + uint64_t obu_size; + err = cbs_av1_read_leb128(ctx, &gbc, "obu_size", &obu_size); + if (err < 0) + return err; + obu->obu_size = obu_size; + } else { + if (unit->data_size < 1 + obu->header.obu_extension_flag) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid OBU length: " + "unit too short (%zu).\n", unit->data_size); + return AVERROR_INVALIDDATA; + } + obu->obu_size = unit->data_size - 1 - obu->header.obu_extension_flag; + } - priv->sequence_header_ref = av_buffer_ref(unit->content_ref); - if(!priv->sequence_header_ref) - return AVERROR(ENOMEM); - priv->sequence_header = &obu->obu.sequence_header; - } break; - case AV1_OBU_TEMPORAL_DELIMITER: { - err = cbs_av1_read_temporal_delimiter_obu(ctx, &gbc); - if(err < 0) - return err; - } break; - case AV1_OBU_FRAME_HEADER: - case AV1_OBU_REDUNDANT_FRAME_HEADER: { - err = cbs_av1_read_frame_header_obu(ctx, &gbc, - &obu->obu.frame_header, - obu->header.obu_type == - AV1_OBU_REDUNDANT_FRAME_HEADER, - unit->data_ref); - if(err < 0) - return err; - } break; - case AV1_OBU_TILE_GROUP: { - err = cbs_av1_read_tile_group_obu(ctx, &gbc, - &obu->obu.tile_group); - if(err < 0) - return err; - - err = cbs_av1_ref_tile_data(ctx, unit, &gbc, - &obu->obu.tile_group.tile_data); - if(err < 0) - return err; - } break; - case AV1_OBU_FRAME: { - err = cbs_av1_read_frame_obu(ctx, &gbc, &obu->obu.frame, - unit->data_ref); - if(err < 0) - return err; - - err = cbs_av1_ref_tile_data(ctx, unit, &gbc, - &obu->obu.frame.tile_group.tile_data); - if(err < 0) - return err; - } break; - case AV1_OBU_TILE_LIST: { - err = cbs_av1_read_tile_list_obu(ctx, &gbc, - &obu->obu.tile_list); - if(err < 0) - return err; - - err = cbs_av1_ref_tile_data(ctx, unit, &gbc, - &obu->obu.tile_list.tile_data); - if(err < 0) - return err; - } break; - case AV1_OBU_METADATA: { - err = cbs_av1_read_metadata_obu(ctx, &gbc, &obu->obu.metadata); - if(err < 0) - return err; - } break; - case AV1_OBU_PADDING: { - err = cbs_av1_read_padding_obu(ctx, &gbc, &obu->obu.padding); - if(err < 0) - return err; - } break; - default: - return AVERROR(ENOSYS); - } - - end_pos = get_bits_count(&gbc); - av_assert0(end_pos <= unit->data_size * 8); - - if(obu->obu_size > 0 && - obu->header.obu_type != AV1_OBU_TILE_GROUP && - obu->header.obu_type != AV1_OBU_TILE_LIST && - obu->header.obu_type != AV1_OBU_FRAME) { - int nb_bits = obu->obu_size * 8 + start_pos - end_pos; - - if(nb_bits <= 0) - return AVERROR_INVALIDDATA; - - err = cbs_av1_read_trailing_bits(ctx, &gbc, nb_bits); - if(err < 0) - return err; - } - - return 0; -} + start_pos = get_bits_count(&gbc); + + if (obu->header.obu_extension_flag) { + if (obu->header.obu_type != AV1_OBU_SEQUENCE_HEADER && + obu->header.obu_type != AV1_OBU_TEMPORAL_DELIMITER && + priv->operating_point_idc) { + int in_temporal_layer = + (priv->operating_point_idc >> priv->temporal_id ) & 1; + int in_spatial_layer = + (priv->operating_point_idc >> (priv->spatial_id + 8)) & 1; + if (!in_temporal_layer || !in_spatial_layer) { + return AVERROR(EAGAIN); // drop_obu() + } + } + } -static int cbs_av1_write_obu(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - AV1RawOBU *obu = unit->content; - PutBitContext pbc_tmp; - AV1RawTileData *td; - size_t header_size; - int err, start_pos, end_pos, data_pos; - - // OBUs in the normal bitstream format must contain a size field - // in every OBU (in annex B it is optional, but we don't support - // writing that). - obu->header.obu_has_size_field = 1; - - err = cbs_av1_write_obu_header(ctx, pbc, &obu->header); - if(err < 0) - return err; + switch (obu->header.obu_type) { + case AV1_OBU_SEQUENCE_HEADER: + { + err = cbs_av1_read_sequence_header_obu(ctx, &gbc, + &obu->obu.sequence_header); + if (err < 0) + return err; + + if (priv->operating_point >= 0) { + AV1RawSequenceHeader *sequence_header = &obu->obu.sequence_header; + + if (priv->operating_point > sequence_header->operating_points_cnt_minus_1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid Operating Point %d requested. " + "Must not be higher than %u.\n", + priv->operating_point, sequence_header->operating_points_cnt_minus_1); + return AVERROR(EINVAL); + } + priv->operating_point_idc = sequence_header->operating_point_idc[priv->operating_point]; + } + + av_buffer_unref(&priv->sequence_header_ref); + priv->sequence_header = NULL; + + priv->sequence_header_ref = av_buffer_ref(unit->content_ref); + if (!priv->sequence_header_ref) + return AVERROR(ENOMEM); + priv->sequence_header = &obu->obu.sequence_header; + } + break; + case AV1_OBU_TEMPORAL_DELIMITER: + { + err = cbs_av1_read_temporal_delimiter_obu(ctx, &gbc); + if (err < 0) + return err; + } + break; + case AV1_OBU_FRAME_HEADER: + case AV1_OBU_REDUNDANT_FRAME_HEADER: + { + err = cbs_av1_read_frame_header_obu(ctx, &gbc, + &obu->obu.frame_header, + obu->header.obu_type == + AV1_OBU_REDUNDANT_FRAME_HEADER, + unit->data_ref); + if (err < 0) + return err; + } + break; + case AV1_OBU_TILE_GROUP: + { + err = cbs_av1_read_tile_group_obu(ctx, &gbc, + &obu->obu.tile_group); + if (err < 0) + return err; + + err = cbs_av1_ref_tile_data(ctx, unit, &gbc, + &obu->obu.tile_group.tile_data); + if (err < 0) + return err; + } + break; + case AV1_OBU_FRAME: + { + err = cbs_av1_read_frame_obu(ctx, &gbc, &obu->obu.frame, + unit->data_ref); + if (err < 0) + return err; + + err = cbs_av1_ref_tile_data(ctx, unit, &gbc, + &obu->obu.frame.tile_group.tile_data); + if (err < 0) + return err; + } + break; + case AV1_OBU_TILE_LIST: + { + err = cbs_av1_read_tile_list_obu(ctx, &gbc, + &obu->obu.tile_list); + if (err < 0) + return err; + + err = cbs_av1_ref_tile_data(ctx, unit, &gbc, + &obu->obu.tile_list.tile_data); + if (err < 0) + return err; + } + break; + case AV1_OBU_METADATA: + { + err = cbs_av1_read_metadata_obu(ctx, &gbc, &obu->obu.metadata); + if (err < 0) + return err; + } + break; + case AV1_OBU_PADDING: + { + err = cbs_av1_read_padding_obu(ctx, &gbc, &obu->obu.padding); + if (err < 0) + return err; + } + break; + default: + return AVERROR(ENOSYS); + } - if(obu->header.obu_has_size_field) { - pbc_tmp = *pbc; - // Add space for the size field to fill later. - put_bits32(pbc, 0); - put_bits32(pbc, 0); - } + end_pos = get_bits_count(&gbc); + av_assert0(end_pos <= unit->data_size * 8); - td = NULL; - start_pos = put_bits_count(pbc); + if (obu->obu_size > 0 && + obu->header.obu_type != AV1_OBU_TILE_GROUP && + obu->header.obu_type != AV1_OBU_TILE_LIST && + obu->header.obu_type != AV1_OBU_FRAME) { + int nb_bits = obu->obu_size * 8 + start_pos - end_pos; - switch(obu->header.obu_type) { - case AV1_OBU_SEQUENCE_HEADER: { - err = cbs_av1_write_sequence_header_obu(ctx, pbc, - &obu->obu.sequence_header); - if(err < 0) - return err; + if (nb_bits <= 0) + return AVERROR_INVALIDDATA; - av_buffer_unref(&priv->sequence_header_ref); - priv->sequence_header = NULL; + err = cbs_av1_read_trailing_bits(ctx, &gbc, nb_bits); + if (err < 0) + return err; + } - err = ff_cbs_make_unit_refcounted(ctx, unit); - if(err < 0) - return err; - - priv->sequence_header_ref = av_buffer_ref(unit->content_ref); - if(!priv->sequence_header_ref) - return AVERROR(ENOMEM); - priv->sequence_header = &obu->obu.sequence_header; - } break; - case AV1_OBU_TEMPORAL_DELIMITER: { - err = cbs_av1_write_temporal_delimiter_obu(ctx, pbc); - if(err < 0) - return err; - } break; - case AV1_OBU_FRAME_HEADER: - case AV1_OBU_REDUNDANT_FRAME_HEADER: { - err = cbs_av1_write_frame_header_obu(ctx, pbc, - &obu->obu.frame_header, - obu->header.obu_type == - AV1_OBU_REDUNDANT_FRAME_HEADER, - NULL); - if(err < 0) - return err; - } break; - case AV1_OBU_TILE_GROUP: { - err = cbs_av1_write_tile_group_obu(ctx, pbc, - &obu->obu.tile_group); - if(err < 0) - return err; - - td = &obu->obu.tile_group.tile_data; - } break; - case AV1_OBU_FRAME: { - err = cbs_av1_write_frame_obu(ctx, pbc, &obu->obu.frame, NULL); - if(err < 0) - return err; - - td = &obu->obu.frame.tile_group.tile_data; - } break; - case AV1_OBU_TILE_LIST: { - err = cbs_av1_write_tile_list_obu(ctx, pbc, &obu->obu.tile_list); - if(err < 0) - return err; - - td = &obu->obu.tile_list.tile_data; - } break; - case AV1_OBU_METADATA: { - err = cbs_av1_write_metadata_obu(ctx, pbc, &obu->obu.metadata); - if(err < 0) - return err; - } break; - case AV1_OBU_PADDING: { - err = cbs_av1_write_padding_obu(ctx, pbc, &obu->obu.padding); - if(err < 0) - return err; - } break; - default: - return AVERROR(ENOSYS); - } - - end_pos = put_bits_count(pbc); - header_size = (end_pos - start_pos + 7) / 8; - if(td) { - obu->obu_size = header_size + td->data_size; - } - else if(header_size > 0) { - // Add trailing bits and recalculate. - err = cbs_av1_write_trailing_bits(ctx, pbc, 8 - end_pos % 8); - if(err < 0) - return err; - end_pos = put_bits_count(pbc); - obu->obu_size = header_size = (end_pos - start_pos + 7) / 8; - } - else { - // Empty OBU. - obu->obu_size = 0; - } - - end_pos = put_bits_count(pbc); - // Must now be byte-aligned. - av_assert0(end_pos % 8 == 0); - flush_put_bits(pbc); - start_pos /= 8; - end_pos /= 8; - - *pbc = pbc_tmp; - err = cbs_av1_write_leb128(ctx, pbc, "obu_size", obu->obu_size); - if(err < 0) - return err; + return 0; +} - data_pos = put_bits_count(pbc) / 8; - flush_put_bits(pbc); - av_assert0(data_pos <= start_pos); +static int cbs_av1_write_obu(CodedBitstreamContext *ctx, + CodedBitstreamUnit *unit, + PutBitContext *pbc) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + AV1RawOBU *obu = unit->content; + PutBitContext pbc_tmp; + AV1RawTileData *td; + size_t header_size; + int err, start_pos, end_pos, data_pos; + + // OBUs in the normal bitstream format must contain a size field + // in every OBU (in annex B it is optional, but we don't support + // writing that). + obu->header.obu_has_size_field = 1; + + err = cbs_av1_write_obu_header(ctx, pbc, &obu->header); + if (err < 0) + return err; + + if (obu->header.obu_has_size_field) { + pbc_tmp = *pbc; + // Add space for the size field to fill later. + put_bits32(pbc, 0); + put_bits32(pbc, 0); + } - if(8 * obu->obu_size > put_bits_left(pbc)) - return AVERROR(ENOSPC); + td = NULL; + start_pos = put_bits_count(pbc); + + switch (obu->header.obu_type) { + case AV1_OBU_SEQUENCE_HEADER: + { + err = cbs_av1_write_sequence_header_obu(ctx, pbc, + &obu->obu.sequence_header); + if (err < 0) + return err; + + av_buffer_unref(&priv->sequence_header_ref); + priv->sequence_header = NULL; + + err = ff_cbs_make_unit_refcounted(ctx, unit); + if (err < 0) + return err; + + priv->sequence_header_ref = av_buffer_ref(unit->content_ref); + if (!priv->sequence_header_ref) + return AVERROR(ENOMEM); + priv->sequence_header = &obu->obu.sequence_header; + } + break; + case AV1_OBU_TEMPORAL_DELIMITER: + { + err = cbs_av1_write_temporal_delimiter_obu(ctx, pbc); + if (err < 0) + return err; + } + break; + case AV1_OBU_FRAME_HEADER: + case AV1_OBU_REDUNDANT_FRAME_HEADER: + { + err = cbs_av1_write_frame_header_obu(ctx, pbc, + &obu->obu.frame_header, + obu->header.obu_type == + AV1_OBU_REDUNDANT_FRAME_HEADER, + NULL); + if (err < 0) + return err; + } + break; + case AV1_OBU_TILE_GROUP: + { + err = cbs_av1_write_tile_group_obu(ctx, pbc, + &obu->obu.tile_group); + if (err < 0) + return err; + + td = &obu->obu.tile_group.tile_data; + } + break; + case AV1_OBU_FRAME: + { + err = cbs_av1_write_frame_obu(ctx, pbc, &obu->obu.frame, NULL); + if (err < 0) + return err; + + td = &obu->obu.frame.tile_group.tile_data; + } + break; + case AV1_OBU_TILE_LIST: + { + err = cbs_av1_write_tile_list_obu(ctx, pbc, &obu->obu.tile_list); + if (err < 0) + return err; + + td = &obu->obu.tile_list.tile_data; + } + break; + case AV1_OBU_METADATA: + { + err = cbs_av1_write_metadata_obu(ctx, pbc, &obu->obu.metadata); + if (err < 0) + return err; + } + break; + case AV1_OBU_PADDING: + { + err = cbs_av1_write_padding_obu(ctx, pbc, &obu->obu.padding); + if (err < 0) + return err; + } + break; + default: + return AVERROR(ENOSYS); + } - if(obu->obu_size > 0) { - memmove(pbc->buf + data_pos, - pbc->buf + start_pos, header_size); - skip_put_bytes(pbc, header_size); + end_pos = put_bits_count(pbc); + header_size = (end_pos - start_pos + 7) / 8; + if (td) { + obu->obu_size = header_size + td->data_size; + } else if (header_size > 0) { + // Add trailing bits and recalculate. + err = cbs_av1_write_trailing_bits(ctx, pbc, 8 - end_pos % 8); + if (err < 0) + return err; + end_pos = put_bits_count(pbc); + obu->obu_size = header_size = (end_pos - start_pos + 7) / 8; + } else { + // Empty OBU. + obu->obu_size = 0; + } - if(td) { - memcpy(pbc->buf + data_pos + header_size, - td->data, td->data_size); - skip_put_bytes(pbc, td->data_size); + end_pos = put_bits_count(pbc); + // Must now be byte-aligned. + av_assert0(end_pos % 8 == 0); + flush_put_bits(pbc); + start_pos /= 8; + end_pos /= 8; + + *pbc = pbc_tmp; + err = cbs_av1_write_leb128(ctx, pbc, "obu_size", obu->obu_size); + if (err < 0) + return err; + + data_pos = put_bits_count(pbc) / 8; + flush_put_bits(pbc); + av_assert0(data_pos <= start_pos); + + if (8 * obu->obu_size > put_bits_left(pbc)) + return AVERROR(ENOSPC); + + if (obu->obu_size > 0) { + memmove(pbc->buf + data_pos, + pbc->buf + start_pos, header_size); + skip_put_bytes(pbc, header_size); + + if (td) { + memcpy(pbc->buf + data_pos + header_size, + td->data, td->data_size); + skip_put_bytes(pbc, td->data_size); + } } - } - // OBU data must be byte-aligned. - av_assert0(put_bits_count(pbc) % 8 == 0); + // OBU data must be byte-aligned. + av_assert0(put_bits_count(pbc) % 8 == 0); - return 0; + return 0; } static int cbs_av1_assemble_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) { - size_t size, pos; - int i; - - size = 0; - for(i = 0; i < frag->nb_units; i++) - size += frag->units[i].data_size; - - frag->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if(!frag->data_ref) - return AVERROR(ENOMEM); - frag->data = frag->data_ref->data; - memset(frag->data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - - pos = 0; - for(i = 0; i < frag->nb_units; i++) { - memcpy(frag->data + pos, frag->units[i].data, - frag->units[i].data_size); - pos += frag->units[i].data_size; - } - av_assert0(pos == size); - frag->data_size = size; - - return 0; + CodedBitstreamFragment *frag) +{ + size_t size, pos; + int i; + + size = 0; + for (i = 0; i < frag->nb_units; i++) + size += frag->units[i].data_size; + + frag->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); + if (!frag->data_ref) + return AVERROR(ENOMEM); + frag->data = frag->data_ref->data; + memset(frag->data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); + + pos = 0; + for (i = 0; i < frag->nb_units; i++) { + memcpy(frag->data + pos, frag->units[i].data, + frag->units[i].data_size); + pos += frag->units[i].data_size; + } + av_assert0(pos == size); + frag->data_size = size; + + return 0; } -static void cbs_av1_flush(CodedBitstreamContext *ctx) { - CodedBitstreamAV1Context *priv = ctx->priv_data; +static void cbs_av1_flush(CodedBitstreamContext *ctx) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; - av_buffer_unref(&priv->frame_header_ref); - priv->sequence_header = NULL; - priv->frame_header = NULL; + av_buffer_unref(&priv->frame_header_ref); + priv->sequence_header = NULL; + priv->frame_header = NULL; - memset(priv->ref, 0, sizeof(priv->ref)); - priv->operating_point_idc = 0; - priv->seen_frame_header = 0; - priv->tile_num = 0; + memset(priv->ref, 0, sizeof(priv->ref)); + priv->operating_point_idc = 0; + priv->seen_frame_header = 0; + priv->tile_num = 0; } -static void cbs_av1_close(CodedBitstreamContext *ctx) { - CodedBitstreamAV1Context *priv = ctx->priv_data; +static void cbs_av1_close(CodedBitstreamContext *ctx) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; - av_buffer_unref(&priv->sequence_header_ref); - av_buffer_unref(&priv->frame_header_ref); + av_buffer_unref(&priv->sequence_header_ref); + av_buffer_unref(&priv->frame_header_ref); } -static void cbs_av1_free_metadata(void *unit, uint8_t *content) { - AV1RawOBU *obu = (AV1RawOBU *)content; - AV1RawMetadata *md; +static void cbs_av1_free_metadata(void *unit, uint8_t *content) +{ + AV1RawOBU *obu = (AV1RawOBU*)content; + AV1RawMetadata *md; - av_assert0(obu->header.obu_type == AV1_OBU_METADATA); - md = &obu->obu.metadata; + av_assert0(obu->header.obu_type == AV1_OBU_METADATA); + md = &obu->obu.metadata; - switch(md->metadata_type) { - case AV1_METADATA_TYPE_ITUT_T35: - av_buffer_unref(&md->metadata.itut_t35.payload_ref); - break; - } - av_free(content); + switch (md->metadata_type) { + case AV1_METADATA_TYPE_ITUT_T35: + av_buffer_unref(&md->metadata.itut_t35.payload_ref); + break; + } + av_free(content); } static const CodedBitstreamUnitTypeDescriptor cbs_av1_unit_types[] = { - CBS_UNIT_TYPE_POD(AV1_OBU_SEQUENCE_HEADER, AV1RawOBU), - CBS_UNIT_TYPE_POD(AV1_OBU_TEMPORAL_DELIMITER, AV1RawOBU), - CBS_UNIT_TYPE_POD(AV1_OBU_FRAME_HEADER, AV1RawOBU), - CBS_UNIT_TYPE_POD(AV1_OBU_REDUNDANT_FRAME_HEADER, AV1RawOBU), - - CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_TILE_GROUP, AV1RawOBU, - obu.tile_group.tile_data.data), - CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_FRAME, AV1RawOBU, - obu.frame.tile_group.tile_data.data), - CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_TILE_LIST, AV1RawOBU, - obu.tile_list.tile_data.data), - CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_PADDING, AV1RawOBU, - obu.padding.payload), - - CBS_UNIT_TYPE_COMPLEX(AV1_OBU_METADATA, AV1RawOBU, - &cbs_av1_free_metadata), - - CBS_UNIT_TYPE_END_OF_LIST + CBS_UNIT_TYPE_POD(AV1_OBU_SEQUENCE_HEADER, AV1RawOBU), + CBS_UNIT_TYPE_POD(AV1_OBU_TEMPORAL_DELIMITER, AV1RawOBU), + CBS_UNIT_TYPE_POD(AV1_OBU_FRAME_HEADER, AV1RawOBU), + CBS_UNIT_TYPE_POD(AV1_OBU_REDUNDANT_FRAME_HEADER, AV1RawOBU), + + CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_TILE_GROUP, AV1RawOBU, + obu.tile_group.tile_data.data), + CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_FRAME, AV1RawOBU, + obu.frame.tile_group.tile_data.data), + CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_TILE_LIST, AV1RawOBU, + obu.tile_list.tile_data.data), + CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_PADDING, AV1RawOBU, + obu.padding.payload), + + CBS_UNIT_TYPE_COMPLEX(AV1_OBU_METADATA, AV1RawOBU, + &cbs_av1_free_metadata), + + CBS_UNIT_TYPE_END_OF_LIST }; #define OFFSET(x) offsetof(CodedBitstreamAV1Context, x) static const AVOption cbs_av1_options[] = { - { "operating_point", "Set operating point to select layers to parse from a scalable bitstream", - OFFSET(operating_point), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AV1_MAX_OPERATING_POINTS - 1, 0 }, - { NULL } + { "operating_point", "Set operating point to select layers to parse from a scalable bitstream", + OFFSET(operating_point), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AV1_MAX_OPERATING_POINTS - 1, 0 }, + { NULL } }; static const AVClass cbs_av1_class = { - .class_name = "cbs_av1", - .item_name = av_default_item_name, - .option = cbs_av1_options, - .version = LIBAVUTIL_VERSION_INT, + .class_name = "cbs_av1", + .item_name = av_default_item_name, + .option = cbs_av1_options, + .version = LIBAVUTIL_VERSION_INT, }; const CodedBitstreamType ff_cbs_type_av1 = { - .codec_id = AV_CODEC_ID_AV1, + .codec_id = AV_CODEC_ID_AV1, - .priv_class = &cbs_av1_class, - .priv_data_size = sizeof(CodedBitstreamAV1Context), + .priv_class = &cbs_av1_class, + .priv_data_size = sizeof(CodedBitstreamAV1Context), - .unit_types = cbs_av1_unit_types, + .unit_types = cbs_av1_unit_types, - .split_fragment = &cbs_av1_split_fragment, - .read_unit = &cbs_av1_read_unit, - .write_unit = &cbs_av1_write_obu, - .assemble_fragment = &cbs_av1_assemble_fragment, + .split_fragment = &cbs_av1_split_fragment, + .read_unit = &cbs_av1_read_unit, + .write_unit = &cbs_av1_write_obu, + .assemble_fragment = &cbs_av1_assemble_fragment, - .flush = &cbs_av1_flush, - .close = &cbs_av1_close, + .flush = &cbs_av1_flush, + .close = &cbs_av1_close, }; diff --git a/third-party/cbs/cbs_av1_syntax_template.c b/third-party/cbs/cbs_av1_syntax_template.c index 1768a6d2f6a..d98d3d42dea 100644 --- a/third-party/cbs/cbs_av1_syntax_template.c +++ b/third-party/cbs/cbs_av1_syntax_template.c @@ -17,2047 +17,2037 @@ */ static int FUNC(obu_header)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawOBUHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - int err; + AV1RawOBUHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + int err; - HEADER("OBU header"); + HEADER("OBU header"); - fc(1, obu_forbidden_bit, 0, 0); + fc(1, obu_forbidden_bit, 0, 0); - fc(4, obu_type, 0, AV1_OBU_PADDING); - flag(obu_extension_flag); - flag(obu_has_size_field); + fc(4, obu_type, 0, AV1_OBU_PADDING); + flag(obu_extension_flag); + flag(obu_has_size_field); - fc(1, obu_reserved_1bit, 0, 0); + fc(1, obu_reserved_1bit, 0, 0); - if(current->obu_extension_flag) { - fb(3, temporal_id); - fb(2, spatial_id); - fc(3, extension_header_reserved_3bits, 0, 0); - } - else { - infer(temporal_id, 0); - infer(spatial_id, 0); - } + if (current->obu_extension_flag) { + fb(3, temporal_id); + fb(2, spatial_id); + fc(3, extension_header_reserved_3bits, 0, 0); + } else { + infer(temporal_id, 0); + infer(spatial_id, 0); + } - priv->temporal_id = current->temporal_id; - priv->spatial_id = current->spatial_id; + priv->temporal_id = current->temporal_id; + priv->spatial_id = current->spatial_id; - return 0; + return 0; } -static int FUNC(trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw, int nb_bits) { - int err; - - av_assert0(nb_bits > 0); +static int FUNC(trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw, int nb_bits) +{ + int err; - fixed(1, trailing_one_bit, 1); - --nb_bits; + av_assert0(nb_bits > 0); - while(nb_bits > 0) { - fixed(1, trailing_zero_bit, 0); + fixed(1, trailing_one_bit, 1); --nb_bits; - } - return 0; + while (nb_bits > 0) { + fixed(1, trailing_zero_bit, 0); + --nb_bits; + } + + return 0; } -static int FUNC(byte_alignment)(CodedBitstreamContext *ctx, RWContext *rw) { - int err; +static int FUNC(byte_alignment)(CodedBitstreamContext *ctx, RWContext *rw) +{ + int err; - while(byte_alignment(rw) != 0) - fixed(1, zero_bit, 0); + while (byte_alignment(rw) != 0) + fixed(1, zero_bit, 0); - return 0; + return 0; } static int FUNC(color_config)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawColorConfig *current, int seq_profile) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - int err; - - flag(high_bitdepth); - - if(seq_profile == FF_PROFILE_AV1_PROFESSIONAL && - current->high_bitdepth) { - flag(twelve_bit); - priv->bit_depth = current->twelve_bit ? 12 : 10; - } - else { - priv->bit_depth = current->high_bitdepth ? 10 : 8; - } - - if(seq_profile == FF_PROFILE_AV1_HIGH) - infer(mono_chrome, 0); - else - flag(mono_chrome); - priv->num_planes = current->mono_chrome ? 1 : 3; - - flag(color_description_present_flag); - if(current->color_description_present_flag) { - fb(8, color_primaries); - fb(8, transfer_characteristics); - fb(8, matrix_coefficients); - } - else { - infer(color_primaries, AVCOL_PRI_UNSPECIFIED); - infer(transfer_characteristics, AVCOL_TRC_UNSPECIFIED); - infer(matrix_coefficients, AVCOL_SPC_UNSPECIFIED); - } - - if(current->mono_chrome) { - flag(color_range); - - infer(subsampling_x, 1); - infer(subsampling_y, 1); - infer(chroma_sample_position, AV1_CSP_UNKNOWN); - infer(separate_uv_delta_q, 0); - } - else if(current->color_primaries == AVCOL_PRI_BT709 && - current->transfer_characteristics == AVCOL_TRC_IEC61966_2_1 && - current->matrix_coefficients == AVCOL_SPC_RGB) { - infer(color_range, 1); - infer(subsampling_x, 0); - infer(subsampling_y, 0); - flag(separate_uv_delta_q); - } - else { - flag(color_range); - - if(seq_profile == FF_PROFILE_AV1_MAIN) { - infer(subsampling_x, 1); - infer(subsampling_y, 1); - } - else if(seq_profile == FF_PROFILE_AV1_HIGH) { - infer(subsampling_x, 0); - infer(subsampling_y, 0); - } - else { - if(priv->bit_depth == 12) { - fb(1, subsampling_x); - if(current->subsampling_x) - fb(1, subsampling_y); - else - infer(subsampling_y, 0); - } - else { - infer(subsampling_x, 1); - infer(subsampling_y, 0); - } + AV1RawColorConfig *current, int seq_profile) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + int err; + + flag(high_bitdepth); + + if (seq_profile == FF_PROFILE_AV1_PROFESSIONAL && + current->high_bitdepth) { + flag(twelve_bit); + priv->bit_depth = current->twelve_bit ? 12 : 10; + } else { + priv->bit_depth = current->high_bitdepth ? 10 : 8; } - if(current->subsampling_x && current->subsampling_y) { - fc(2, chroma_sample_position, AV1_CSP_UNKNOWN, - AV1_CSP_COLOCATED); + + if (seq_profile == FF_PROFILE_AV1_HIGH) + infer(mono_chrome, 0); + else + flag(mono_chrome); + priv->num_planes = current->mono_chrome ? 1 : 3; + + flag(color_description_present_flag); + if (current->color_description_present_flag) { + fb(8, color_primaries); + fb(8, transfer_characteristics); + fb(8, matrix_coefficients); + } else { + infer(color_primaries, AVCOL_PRI_UNSPECIFIED); + infer(transfer_characteristics, AVCOL_TRC_UNSPECIFIED); + infer(matrix_coefficients, AVCOL_SPC_UNSPECIFIED); } - flag(separate_uv_delta_q); - } + if (current->mono_chrome) { + flag(color_range); - return 0; + infer(subsampling_x, 1); + infer(subsampling_y, 1); + infer(chroma_sample_position, AV1_CSP_UNKNOWN); + infer(separate_uv_delta_q, 0); + + } else if (current->color_primaries == AVCOL_PRI_BT709 && + current->transfer_characteristics == AVCOL_TRC_IEC61966_2_1 && + current->matrix_coefficients == AVCOL_SPC_RGB) { + infer(color_range, 1); + infer(subsampling_x, 0); + infer(subsampling_y, 0); + flag(separate_uv_delta_q); + + } else { + flag(color_range); + + if (seq_profile == FF_PROFILE_AV1_MAIN) { + infer(subsampling_x, 1); + infer(subsampling_y, 1); + } else if (seq_profile == FF_PROFILE_AV1_HIGH) { + infer(subsampling_x, 0); + infer(subsampling_y, 0); + } else { + if (priv->bit_depth == 12) { + fb(1, subsampling_x); + if (current->subsampling_x) + fb(1, subsampling_y); + else + infer(subsampling_y, 0); + } else { + infer(subsampling_x, 1); + infer(subsampling_y, 0); + } + } + if (current->subsampling_x && current->subsampling_y) { + fc(2, chroma_sample_position, AV1_CSP_UNKNOWN, + AV1_CSP_COLOCATED); + } + + flag(separate_uv_delta_q); + } + + return 0; } static int FUNC(timing_info)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawTimingInfo *current) { - int err; + AV1RawTimingInfo *current) +{ + int err; - fc(32, num_units_in_display_tick, 1, MAX_UINT_BITS(32)); - fc(32, time_scale, 1, MAX_UINT_BITS(32)); + fc(32, num_units_in_display_tick, 1, MAX_UINT_BITS(32)); + fc(32, time_scale, 1, MAX_UINT_BITS(32)); - flag(equal_picture_interval); - if(current->equal_picture_interval) - uvlc(num_ticks_per_picture_minus_1, 0, MAX_UINT_BITS(32) - 1); + flag(equal_picture_interval); + if (current->equal_picture_interval) + uvlc(num_ticks_per_picture_minus_1, 0, MAX_UINT_BITS(32) - 1); - return 0; + return 0; } static int FUNC(decoder_model_info)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawDecoderModelInfo *current) { - int err; + AV1RawDecoderModelInfo *current) +{ + int err; - fb(5, buffer_delay_length_minus_1); - fb(32, num_units_in_decoding_tick); - fb(5, buffer_removal_time_length_minus_1); - fb(5, frame_presentation_time_length_minus_1); + fb(5, buffer_delay_length_minus_1); + fb(32, num_units_in_decoding_tick); + fb(5, buffer_removal_time_length_minus_1); + fb(5, frame_presentation_time_length_minus_1); - return 0; + return 0; } static int FUNC(sequence_header_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawSequenceHeader *current) { - int i, err; - - HEADER("Sequence Header"); - - fc(3, seq_profile, FF_PROFILE_AV1_MAIN, - FF_PROFILE_AV1_PROFESSIONAL); - flag(still_picture); - flag(reduced_still_picture_header); - - if(current->reduced_still_picture_header) { - infer(timing_info_present_flag, 0); - infer(decoder_model_info_present_flag, 0); - infer(initial_display_delay_present_flag, 0); - infer(operating_points_cnt_minus_1, 0); - infer(operating_point_idc[0], 0); - - fb(5, seq_level_idx[0]); - - infer(seq_tier[0], 0); - infer(decoder_model_present_for_this_op[0], 0); - infer(initial_display_delay_present_for_this_op[0], 0); - } - else { - flag(timing_info_present_flag); - if(current->timing_info_present_flag) { - CHECK(FUNC(timing_info)(ctx, rw, ¤t->timing_info)); - - flag(decoder_model_info_present_flag); - if(current->decoder_model_info_present_flag) { - CHECK(FUNC(decoder_model_info)(ctx, rw, ¤t->decoder_model_info)); - } - } - else { - infer(decoder_model_info_present_flag, 0); - } - - flag(initial_display_delay_present_flag); - - fb(5, operating_points_cnt_minus_1); - for(i = 0; i <= current->operating_points_cnt_minus_1; i++) { - fbs(12, operating_point_idc[i], 1, i); - fbs(5, seq_level_idx[i], 1, i); - - if(current->seq_level_idx[i] > 7) - flags(seq_tier[i], 1, i); - else - infer(seq_tier[i], 0); - - if(current->decoder_model_info_present_flag) { - flags(decoder_model_present_for_this_op[i], 1, i); - if(current->decoder_model_present_for_this_op[i]) { - int n = current->decoder_model_info.buffer_delay_length_minus_1 + 1; - fbs(n, decoder_buffer_delay[i], 1, i); - fbs(n, encoder_buffer_delay[i], 1, i); - flags(low_delay_mode_flag[i], 1, i); + AV1RawSequenceHeader *current) +{ + int i, err; + + HEADER("Sequence Header"); + + fc(3, seq_profile, FF_PROFILE_AV1_MAIN, + FF_PROFILE_AV1_PROFESSIONAL); + flag(still_picture); + flag(reduced_still_picture_header); + + if (current->reduced_still_picture_header) { + infer(timing_info_present_flag, 0); + infer(decoder_model_info_present_flag, 0); + infer(initial_display_delay_present_flag, 0); + infer(operating_points_cnt_minus_1, 0); + infer(operating_point_idc[0], 0); + + fb(5, seq_level_idx[0]); + + infer(seq_tier[0], 0); + infer(decoder_model_present_for_this_op[0], 0); + infer(initial_display_delay_present_for_this_op[0], 0); + + } else { + flag(timing_info_present_flag); + if (current->timing_info_present_flag) { + CHECK(FUNC(timing_info)(ctx, rw, ¤t->timing_info)); + + flag(decoder_model_info_present_flag); + if (current->decoder_model_info_present_flag) { + CHECK(FUNC(decoder_model_info) + (ctx, rw, ¤t->decoder_model_info)); + } + } else { + infer(decoder_model_info_present_flag, 0); } - } - else { - infer(decoder_model_present_for_this_op[i], 0); - } - - if(current->initial_display_delay_present_flag) { - flags(initial_display_delay_present_for_this_op[i], 1, i); - if(current->initial_display_delay_present_for_this_op[i]) - fbs(4, initial_display_delay_minus_1[i], 1, i); - } - } - } - - fb(4, frame_width_bits_minus_1); - fb(4, frame_height_bits_minus_1); - - fb(current->frame_width_bits_minus_1 + 1, max_frame_width_minus_1); - fb(current->frame_height_bits_minus_1 + 1, max_frame_height_minus_1); - - if(current->reduced_still_picture_header) - infer(frame_id_numbers_present_flag, 0); - else - flag(frame_id_numbers_present_flag); - if(current->frame_id_numbers_present_flag) { - fb(4, delta_frame_id_length_minus_2); - fb(3, additional_frame_id_length_minus_1); - } - - flag(use_128x128_superblock); - flag(enable_filter_intra); - flag(enable_intra_edge_filter); - - if(current->reduced_still_picture_header) { - infer(enable_interintra_compound, 0); - infer(enable_masked_compound, 0); - infer(enable_warped_motion, 0); - infer(enable_dual_filter, 0); - infer(enable_order_hint, 0); - infer(enable_jnt_comp, 0); - infer(enable_ref_frame_mvs, 0); - - infer(seq_force_screen_content_tools, - AV1_SELECT_SCREEN_CONTENT_TOOLS); - infer(seq_force_integer_mv, - AV1_SELECT_INTEGER_MV); - } - else { - flag(enable_interintra_compound); - flag(enable_masked_compound); - flag(enable_warped_motion); - flag(enable_dual_filter); - - flag(enable_order_hint); - if(current->enable_order_hint) { - flag(enable_jnt_comp); - flag(enable_ref_frame_mvs); - } - else { - infer(enable_jnt_comp, 0); - infer(enable_ref_frame_mvs, 0); - } - - flag(seq_choose_screen_content_tools); - if(current->seq_choose_screen_content_tools) - infer(seq_force_screen_content_tools, - AV1_SELECT_SCREEN_CONTENT_TOOLS); + + flag(initial_display_delay_present_flag); + + fb(5, operating_points_cnt_minus_1); + for (i = 0; i <= current->operating_points_cnt_minus_1; i++) { + fbs(12, operating_point_idc[i], 1, i); + fbs(5, seq_level_idx[i], 1, i); + + if (current->seq_level_idx[i] > 7) + flags(seq_tier[i], 1, i); + else + infer(seq_tier[i], 0); + + if (current->decoder_model_info_present_flag) { + flags(decoder_model_present_for_this_op[i], 1, i); + if (current->decoder_model_present_for_this_op[i]) { + int n = current->decoder_model_info.buffer_delay_length_minus_1 + 1; + fbs(n, decoder_buffer_delay[i], 1, i); + fbs(n, encoder_buffer_delay[i], 1, i); + flags(low_delay_mode_flag[i], 1, i); + } + } else { + infer(decoder_model_present_for_this_op[i], 0); + } + + if (current->initial_display_delay_present_flag) { + flags(initial_display_delay_present_for_this_op[i], 1, i); + if (current->initial_display_delay_present_for_this_op[i]) + fbs(4, initial_display_delay_minus_1[i], 1, i); + } + } + } + + fb(4, frame_width_bits_minus_1); + fb(4, frame_height_bits_minus_1); + + fb(current->frame_width_bits_minus_1 + 1, max_frame_width_minus_1); + fb(current->frame_height_bits_minus_1 + 1, max_frame_height_minus_1); + + if (current->reduced_still_picture_header) + infer(frame_id_numbers_present_flag, 0); else - fb(1, seq_force_screen_content_tools); - if(current->seq_force_screen_content_tools > 0) { - flag(seq_choose_integer_mv); - if(current->seq_choose_integer_mv) + flag(frame_id_numbers_present_flag); + if (current->frame_id_numbers_present_flag) { + fb(4, delta_frame_id_length_minus_2); + fb(3, additional_frame_id_length_minus_1); + } + + flag(use_128x128_superblock); + flag(enable_filter_intra); + flag(enable_intra_edge_filter); + + if (current->reduced_still_picture_header) { + infer(enable_interintra_compound, 0); + infer(enable_masked_compound, 0); + infer(enable_warped_motion, 0); + infer(enable_dual_filter, 0); + infer(enable_order_hint, 0); + infer(enable_jnt_comp, 0); + infer(enable_ref_frame_mvs, 0); + + infer(seq_force_screen_content_tools, + AV1_SELECT_SCREEN_CONTENT_TOOLS); infer(seq_force_integer_mv, - AV1_SELECT_INTEGER_MV); - else - fb(1, seq_force_integer_mv); - } - else { - infer(seq_force_integer_mv, AV1_SELECT_INTEGER_MV); - } + AV1_SELECT_INTEGER_MV); + } else { + flag(enable_interintra_compound); + flag(enable_masked_compound); + flag(enable_warped_motion); + flag(enable_dual_filter); + + flag(enable_order_hint); + if (current->enable_order_hint) { + flag(enable_jnt_comp); + flag(enable_ref_frame_mvs); + } else { + infer(enable_jnt_comp, 0); + infer(enable_ref_frame_mvs, 0); + } - if(current->enable_order_hint) - fb(3, order_hint_bits_minus_1); - } + flag(seq_choose_screen_content_tools); + if (current->seq_choose_screen_content_tools) + infer(seq_force_screen_content_tools, + AV1_SELECT_SCREEN_CONTENT_TOOLS); + else + fb(1, seq_force_screen_content_tools); + if (current->seq_force_screen_content_tools > 0) { + flag(seq_choose_integer_mv); + if (current->seq_choose_integer_mv) + infer(seq_force_integer_mv, + AV1_SELECT_INTEGER_MV); + else + fb(1, seq_force_integer_mv); + } else { + infer(seq_force_integer_mv, AV1_SELECT_INTEGER_MV); + } - flag(enable_superres); - flag(enable_cdef); - flag(enable_restoration); + if (current->enable_order_hint) + fb(3, order_hint_bits_minus_1); + } + + flag(enable_superres); + flag(enable_cdef); + flag(enable_restoration); - CHECK(FUNC(color_config)(ctx, rw, ¤t->color_config, - current->seq_profile)); + CHECK(FUNC(color_config)(ctx, rw, ¤t->color_config, + current->seq_profile)); - flag(film_grain_params_present); + flag(film_grain_params_present); - return 0; + return 0; } -static int FUNC(temporal_delimiter_obu)(CodedBitstreamContext *ctx, RWContext *rw) { - CodedBitstreamAV1Context *priv = ctx->priv_data; +static int FUNC(temporal_delimiter_obu)(CodedBitstreamContext *ctx, RWContext *rw) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; - HEADER("Temporal Delimiter"); + HEADER("Temporal Delimiter"); - priv->seen_frame_header = 0; + priv->seen_frame_header = 0; - return 0; + return 0; } static int FUNC(set_frame_refs)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - static const uint8_t ref_frame_list[AV1_NUM_REF_FRAMES - 2] = { - AV1_REF_FRAME_LAST2, AV1_REF_FRAME_LAST3, AV1_REF_FRAME_BWDREF, - AV1_REF_FRAME_ALTREF2, AV1_REF_FRAME_ALTREF - }; - int8_t ref_frame_idx[AV1_REFS_PER_FRAME], used_frame[AV1_NUM_REF_FRAMES]; - int8_t shifted_order_hints[AV1_NUM_REF_FRAMES]; - int cur_frame_hint, latest_order_hint, earliest_order_hint, ref; - int i, j; - - for(i = 0; i < AV1_REFS_PER_FRAME; i++) - ref_frame_idx[i] = -1; - ref_frame_idx[AV1_REF_FRAME_LAST - AV1_REF_FRAME_LAST] = current->last_frame_idx; - ref_frame_idx[AV1_REF_FRAME_GOLDEN - AV1_REF_FRAME_LAST] = current->golden_frame_idx; - - for(i = 0; i < AV1_NUM_REF_FRAMES; i++) - used_frame[i] = 0; - used_frame[current->last_frame_idx] = 1; - used_frame[current->golden_frame_idx] = 1; - - cur_frame_hint = 1 << (seq->order_hint_bits_minus_1); - for(i = 0; i < AV1_NUM_REF_FRAMES; i++) - shifted_order_hints[i] = cur_frame_hint + - cbs_av1_get_relative_dist(seq, priv->ref[i].order_hint, - priv->order_hint); - - latest_order_hint = shifted_order_hints[current->last_frame_idx]; - earliest_order_hint = shifted_order_hints[current->golden_frame_idx]; - - ref = -1; - for(i = 0; i < AV1_NUM_REF_FRAMES; i++) { - int hint = shifted_order_hints[i]; - if(!used_frame[i] && hint >= cur_frame_hint && - (ref < 0 || hint >= latest_order_hint)) { - ref = i; - latest_order_hint = hint; - } - } - if(ref >= 0) { - ref_frame_idx[AV1_REF_FRAME_ALTREF - AV1_REF_FRAME_LAST] = ref; - used_frame[ref] = 1; - } - - ref = -1; - for(i = 0; i < AV1_NUM_REF_FRAMES; i++) { - int hint = shifted_order_hints[i]; - if(!used_frame[i] && hint >= cur_frame_hint && - (ref < 0 || hint < earliest_order_hint)) { - ref = i; - earliest_order_hint = hint; - } - } - if(ref >= 0) { - ref_frame_idx[AV1_REF_FRAME_BWDREF - AV1_REF_FRAME_LAST] = ref; - used_frame[ref] = 1; - } - - ref = -1; - for(i = 0; i < AV1_NUM_REF_FRAMES; i++) { - int hint = shifted_order_hints[i]; - if(!used_frame[i] && hint >= cur_frame_hint && - (ref < 0 || hint < earliest_order_hint)) { - ref = i; - earliest_order_hint = hint; - } - } - if(ref >= 0) { - ref_frame_idx[AV1_REF_FRAME_ALTREF2 - AV1_REF_FRAME_LAST] = ref; - used_frame[ref] = 1; - } - - for(i = 0; i < AV1_REFS_PER_FRAME - 2; i++) { - int ref_frame = ref_frame_list[i]; - if(ref_frame_idx[ref_frame - AV1_REF_FRAME_LAST] < 0) { - ref = -1; - for(j = 0; j < AV1_NUM_REF_FRAMES; j++) { - int hint = shifted_order_hints[j]; - if(!used_frame[j] && hint < cur_frame_hint && - (ref < 0 || hint >= latest_order_hint)) { - ref = j; - latest_order_hint = hint; + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq = priv->sequence_header; + static const uint8_t ref_frame_list[AV1_NUM_REF_FRAMES - 2] = { + AV1_REF_FRAME_LAST2, AV1_REF_FRAME_LAST3, AV1_REF_FRAME_BWDREF, + AV1_REF_FRAME_ALTREF2, AV1_REF_FRAME_ALTREF + }; + int8_t ref_frame_idx[AV1_REFS_PER_FRAME], used_frame[AV1_NUM_REF_FRAMES]; + int16_t shifted_order_hints[AV1_NUM_REF_FRAMES]; + int cur_frame_hint, latest_order_hint, earliest_order_hint, ref; + int i, j; + + for (i = 0; i < AV1_REFS_PER_FRAME; i++) + ref_frame_idx[i] = -1; + ref_frame_idx[AV1_REF_FRAME_LAST - AV1_REF_FRAME_LAST] = current->last_frame_idx; + ref_frame_idx[AV1_REF_FRAME_GOLDEN - AV1_REF_FRAME_LAST] = current->golden_frame_idx; + + for (i = 0; i < AV1_NUM_REF_FRAMES; i++) + used_frame[i] = 0; + used_frame[current->last_frame_idx] = 1; + used_frame[current->golden_frame_idx] = 1; + + cur_frame_hint = 1 << (seq->order_hint_bits_minus_1); + for (i = 0; i < AV1_NUM_REF_FRAMES; i++) + shifted_order_hints[i] = cur_frame_hint + + cbs_av1_get_relative_dist(seq, priv->ref[i].order_hint, + priv->order_hint); + + latest_order_hint = shifted_order_hints[current->last_frame_idx]; + earliest_order_hint = shifted_order_hints[current->golden_frame_idx]; + + ref = -1; + for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { + int hint = shifted_order_hints[i]; + if (!used_frame[i] && hint >= cur_frame_hint && + (ref < 0 || hint >= latest_order_hint)) { + ref = i; + latest_order_hint = hint; + } + } + if (ref >= 0) { + ref_frame_idx[AV1_REF_FRAME_ALTREF - AV1_REF_FRAME_LAST] = ref; + used_frame[ref] = 1; + } + + ref = -1; + for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { + int hint = shifted_order_hints[i]; + if (!used_frame[i] && hint >= cur_frame_hint && + (ref < 0 || hint < earliest_order_hint)) { + ref = i; + earliest_order_hint = hint; + } + } + if (ref >= 0) { + ref_frame_idx[AV1_REF_FRAME_BWDREF - AV1_REF_FRAME_LAST] = ref; + used_frame[ref] = 1; + } + + ref = -1; + for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { + int hint = shifted_order_hints[i]; + if (!used_frame[i] && hint >= cur_frame_hint && + (ref < 0 || hint < earliest_order_hint)) { + ref = i; + earliest_order_hint = hint; + } + } + if (ref >= 0) { + ref_frame_idx[AV1_REF_FRAME_ALTREF2 - AV1_REF_FRAME_LAST] = ref; + used_frame[ref] = 1; + } + + for (i = 0; i < AV1_REFS_PER_FRAME - 2; i++) { + int ref_frame = ref_frame_list[i]; + if (ref_frame_idx[ref_frame - AV1_REF_FRAME_LAST] < 0 ) { + ref = -1; + for (j = 0; j < AV1_NUM_REF_FRAMES; j++) { + int hint = shifted_order_hints[j]; + if (!used_frame[j] && hint < cur_frame_hint && + (ref < 0 || hint >= latest_order_hint)) { + ref = j; + latest_order_hint = hint; + } + } + if (ref >= 0) { + ref_frame_idx[ref_frame - AV1_REF_FRAME_LAST] = ref; + used_frame[ref] = 1; + } } - } - if(ref >= 0) { - ref_frame_idx[ref_frame - AV1_REF_FRAME_LAST] = ref; - used_frame[ref] = 1; - } - } - } - - ref = -1; - for(i = 0; i < AV1_NUM_REF_FRAMES; i++) { - int hint = shifted_order_hints[i]; - if(ref < 0 || hint < earliest_order_hint) { - ref = i; - earliest_order_hint = hint; - } - } - for(i = 0; i < AV1_REFS_PER_FRAME; i++) { - if(ref_frame_idx[i] < 0) - ref_frame_idx[i] = ref; - infer(ref_frame_idx[i], ref_frame_idx[i]); - } - - return 0; + } + + ref = -1; + for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { + int hint = shifted_order_hints[i]; + if (ref < 0 || hint < earliest_order_hint) { + ref = i; + earliest_order_hint = hint; + } + } + for (i = 0; i < AV1_REFS_PER_FRAME; i++) { + if (ref_frame_idx[i] < 0) + ref_frame_idx[i] = ref; + infer(ref_frame_idx[i], ref_frame_idx[i]); + } + + return 0; } static int FUNC(superres_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int denom, err; - - if(seq->enable_superres) - flag(use_superres); - else - infer(use_superres, 0); - - if(current->use_superres) { - fb(3, coded_denom); - denom = current->coded_denom + AV1_SUPERRES_DENOM_MIN; - } - else { - denom = AV1_SUPERRES_NUM; - } - - priv->upscaled_width = priv->frame_width; - priv->frame_width = (priv->upscaled_width * AV1_SUPERRES_NUM + - denom / 2) / - denom; - - return 0; + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq = priv->sequence_header; + int denom, err; + + if (seq->enable_superres) + flag(use_superres); + else + infer(use_superres, 0); + + if (current->use_superres) { + fb(3, coded_denom); + denom = current->coded_denom + AV1_SUPERRES_DENOM_MIN; + } else { + denom = AV1_SUPERRES_NUM; + } + + priv->upscaled_width = priv->frame_width; + priv->frame_width = (priv->upscaled_width * AV1_SUPERRES_NUM + + denom / 2) / denom; + + return 0; } static int FUNC(frame_size)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int err; - - if(current->frame_size_override_flag) { - fb(seq->frame_width_bits_minus_1 + 1, frame_width_minus_1); - fb(seq->frame_height_bits_minus_1 + 1, frame_height_minus_1); - } - else { - infer(frame_width_minus_1, seq->max_frame_width_minus_1); - infer(frame_height_minus_1, seq->max_frame_height_minus_1); - } - - priv->frame_width = current->frame_width_minus_1 + 1; - priv->frame_height = current->frame_height_minus_1 + 1; - - CHECK(FUNC(superres_params)(ctx, rw, current)); - - return 0; + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq = priv->sequence_header; + int err; + + if (current->frame_size_override_flag) { + fb(seq->frame_width_bits_minus_1 + 1, frame_width_minus_1); + fb(seq->frame_height_bits_minus_1 + 1, frame_height_minus_1); + } else { + infer(frame_width_minus_1, seq->max_frame_width_minus_1); + infer(frame_height_minus_1, seq->max_frame_height_minus_1); + } + + priv->frame_width = current->frame_width_minus_1 + 1; + priv->frame_height = current->frame_height_minus_1 + 1; + + CHECK(FUNC(superres_params)(ctx, rw, current)); + + return 0; } static int FUNC(render_size)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - int err; + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + int err; - flag(render_and_frame_size_different); + flag(render_and_frame_size_different); - if(current->render_and_frame_size_different) { - fb(16, render_width_minus_1); - fb(16, render_height_minus_1); - } - else { - infer(render_width_minus_1, current->frame_width_minus_1); - infer(render_height_minus_1, current->frame_height_minus_1); - } + if (current->render_and_frame_size_different) { + fb(16, render_width_minus_1); + fb(16, render_height_minus_1); + } else { + infer(render_width_minus_1, current->frame_width_minus_1); + infer(render_height_minus_1, current->frame_height_minus_1); + } - priv->render_width = current->render_width_minus_1 + 1; - priv->render_height = current->render_height_minus_1 + 1; + priv->render_width = current->render_width_minus_1 + 1; + priv->render_height = current->render_height_minus_1 + 1; - return 0; + return 0; } static int FUNC(frame_size_with_refs)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - int i, err; - - for(i = 0; i < AV1_REFS_PER_FRAME; i++) { - flags(found_ref[i], 1, i); - if(current->found_ref[i]) { - AV1ReferenceFrameState *ref = - &priv->ref[current->ref_frame_idx[i]]; - - if(!ref->valid) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "Missing reference frame needed for frame size " - "(ref = %d, ref_frame_idx = %d).\n", - i, current->ref_frame_idx[i]); - return AVERROR_INVALIDDATA; - } - - infer(frame_width_minus_1, ref->upscaled_width - 1); - infer(frame_height_minus_1, ref->frame_height - 1); - infer(render_width_minus_1, ref->render_width - 1); - infer(render_height_minus_1, ref->render_height - 1); - - priv->upscaled_width = ref->upscaled_width; - priv->frame_width = priv->upscaled_width; - priv->frame_height = ref->frame_height; - priv->render_width = ref->render_width; - priv->render_height = ref->render_height; - break; - } - } - - if(i >= AV1_REFS_PER_FRAME) { - CHECK(FUNC(frame_size)(ctx, rw, current)); - CHECK(FUNC(render_size)(ctx, rw, current)); - } - else { - CHECK(FUNC(superres_params)(ctx, rw, current)); - } + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + int i, err; + + for (i = 0; i < AV1_REFS_PER_FRAME; i++) { + flags(found_ref[i], 1, i); + if (current->found_ref[i]) { + AV1ReferenceFrameState *ref = + &priv->ref[current->ref_frame_idx[i]]; + + if (!ref->valid) { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "Missing reference frame needed for frame size " + "(ref = %d, ref_frame_idx = %d).\n", + i, current->ref_frame_idx[i]); + return AVERROR_INVALIDDATA; + } + + infer(frame_width_minus_1, ref->upscaled_width - 1); + infer(frame_height_minus_1, ref->frame_height - 1); + infer(render_width_minus_1, ref->render_width - 1); + infer(render_height_minus_1, ref->render_height - 1); + + priv->upscaled_width = ref->upscaled_width; + priv->frame_width = priv->upscaled_width; + priv->frame_height = ref->frame_height; + priv->render_width = ref->render_width; + priv->render_height = ref->render_height; + break; + } + } + + if (i >= AV1_REFS_PER_FRAME) { + CHECK(FUNC(frame_size)(ctx, rw, current)); + CHECK(FUNC(render_size)(ctx, rw, current)); + } else { + CHECK(FUNC(superres_params)(ctx, rw, current)); + } - return 0; + return 0; } static int FUNC(interpolation_filter)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - int err; - - flag(is_filter_switchable); - if(current->is_filter_switchable) - infer(interpolation_filter, - AV1_INTERPOLATION_FILTER_SWITCHABLE); - else - fb(2, interpolation_filter); + AV1RawFrameHeader *current) +{ + int err; + + flag(is_filter_switchable); + if (current->is_filter_switchable) + infer(interpolation_filter, + AV1_INTERPOLATION_FILTER_SWITCHABLE); + else + fb(2, interpolation_filter); - return 0; + return 0; } static int FUNC(tile_info)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int mi_cols, mi_rows, sb_cols, sb_rows, sb_shift, sb_size; - int max_tile_width_sb, max_tile_height_sb, max_tile_area_sb; - int min_log2_tile_cols, max_log2_tile_cols, max_log2_tile_rows; - int min_log2_tiles, min_log2_tile_rows; - int i, err; - - mi_cols = 2 * ((priv->frame_width + 7) >> 3); - mi_rows = 2 * ((priv->frame_height + 7) >> 3); - - sb_cols = seq->use_128x128_superblock ? ((mi_cols + 31) >> 5) : ((mi_cols + 15) >> 4); - sb_rows = seq->use_128x128_superblock ? ((mi_rows + 31) >> 5) : ((mi_rows + 15) >> 4); - - sb_shift = seq->use_128x128_superblock ? 5 : 4; - sb_size = sb_shift + 2; - - max_tile_width_sb = AV1_MAX_TILE_WIDTH >> sb_size; - max_tile_area_sb = AV1_MAX_TILE_AREA >> (2 * sb_size); - - min_log2_tile_cols = cbs_av1_tile_log2(max_tile_width_sb, sb_cols); - max_log2_tile_cols = cbs_av1_tile_log2(1, FFMIN(sb_cols, AV1_MAX_TILE_COLS)); - max_log2_tile_rows = cbs_av1_tile_log2(1, FFMIN(sb_rows, AV1_MAX_TILE_ROWS)); - min_log2_tiles = FFMAX(min_log2_tile_cols, - cbs_av1_tile_log2(max_tile_area_sb, sb_rows * sb_cols)); - - flag(uniform_tile_spacing_flag); - - if(current->uniform_tile_spacing_flag) { - int tile_width_sb, tile_height_sb; - - increment(tile_cols_log2, min_log2_tile_cols, max_log2_tile_cols); - - tile_width_sb = (sb_cols + (1 << current->tile_cols_log2) - 1) >> - current->tile_cols_log2; - current->tile_cols = (sb_cols + tile_width_sb - 1) / tile_width_sb; - - min_log2_tile_rows = FFMAX(min_log2_tiles - current->tile_cols_log2, 0); - - increment(tile_rows_log2, min_log2_tile_rows, max_log2_tile_rows); - - tile_height_sb = (sb_rows + (1 << current->tile_rows_log2) - 1) >> - current->tile_rows_log2; - current->tile_rows = (sb_rows + tile_height_sb - 1) / tile_height_sb; - - for(i = 0; i < current->tile_cols - 1; i++) - infer(width_in_sbs_minus_1[i], tile_width_sb - 1); - infer(width_in_sbs_minus_1[i], - sb_cols - (current->tile_cols - 1) * tile_width_sb - 1); - for(i = 0; i < current->tile_rows - 1; i++) - infer(height_in_sbs_minus_1[i], tile_height_sb - 1); - infer(height_in_sbs_minus_1[i], - sb_rows - (current->tile_rows - 1) * tile_height_sb - 1); - } - else { - int widest_tile_sb, start_sb, size_sb, max_width, max_height; - - widest_tile_sb = 0; - - start_sb = 0; - for(i = 0; start_sb < sb_cols && i < AV1_MAX_TILE_COLS; i++) { - max_width = FFMIN(sb_cols - start_sb, max_tile_width_sb); - ns(max_width, width_in_sbs_minus_1[i], 1, i); - size_sb = current->width_in_sbs_minus_1[i] + 1; - widest_tile_sb = FFMAX(size_sb, widest_tile_sb); - start_sb += size_sb; - } - current->tile_cols_log2 = cbs_av1_tile_log2(1, i); - current->tile_cols = i; - - if(min_log2_tiles > 0) - max_tile_area_sb = (sb_rows * sb_cols) >> (min_log2_tiles + 1); - else - max_tile_area_sb = sb_rows * sb_cols; - max_tile_height_sb = FFMAX(max_tile_area_sb / widest_tile_sb, 1); - - start_sb = 0; - for(i = 0; start_sb < sb_rows && i < AV1_MAX_TILE_ROWS; i++) { - max_height = FFMIN(sb_rows - start_sb, max_tile_height_sb); - ns(max_height, height_in_sbs_minus_1[i], 1, i); - size_sb = current->height_in_sbs_minus_1[i] + 1; - start_sb += size_sb; - } - current->tile_rows_log2 = cbs_av1_tile_log2(1, i); - current->tile_rows = i; - } - - if(current->tile_cols_log2 > 0 || - current->tile_rows_log2 > 0) { - fb(current->tile_cols_log2 + current->tile_rows_log2, - context_update_tile_id); - fb(2, tile_size_bytes_minus1); - } - else { - infer(context_update_tile_id, 0); - } - - priv->tile_cols = current->tile_cols; - priv->tile_rows = current->tile_rows; - - return 0; + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq = priv->sequence_header; + int mi_cols, mi_rows, sb_cols, sb_rows, sb_shift, sb_size; + int max_tile_width_sb, max_tile_height_sb, max_tile_area_sb; + int min_log2_tile_cols, max_log2_tile_cols, max_log2_tile_rows; + int min_log2_tiles, min_log2_tile_rows; + int i, err; + + mi_cols = 2 * ((priv->frame_width + 7) >> 3); + mi_rows = 2 * ((priv->frame_height + 7) >> 3); + + sb_cols = seq->use_128x128_superblock ? ((mi_cols + 31) >> 5) + : ((mi_cols + 15) >> 4); + sb_rows = seq->use_128x128_superblock ? ((mi_rows + 31) >> 5) + : ((mi_rows + 15) >> 4); + + sb_shift = seq->use_128x128_superblock ? 5 : 4; + sb_size = sb_shift + 2; + + max_tile_width_sb = AV1_MAX_TILE_WIDTH >> sb_size; + max_tile_area_sb = AV1_MAX_TILE_AREA >> (2 * sb_size); + + min_log2_tile_cols = cbs_av1_tile_log2(max_tile_width_sb, sb_cols); + max_log2_tile_cols = cbs_av1_tile_log2(1, FFMIN(sb_cols, AV1_MAX_TILE_COLS)); + max_log2_tile_rows = cbs_av1_tile_log2(1, FFMIN(sb_rows, AV1_MAX_TILE_ROWS)); + min_log2_tiles = FFMAX(min_log2_tile_cols, + cbs_av1_tile_log2(max_tile_area_sb, sb_rows * sb_cols)); + + flag(uniform_tile_spacing_flag); + + if (current->uniform_tile_spacing_flag) { + int tile_width_sb, tile_height_sb; + + increment(tile_cols_log2, min_log2_tile_cols, max_log2_tile_cols); + + tile_width_sb = (sb_cols + (1 << current->tile_cols_log2) - 1) >> + current->tile_cols_log2; + current->tile_cols = (sb_cols + tile_width_sb - 1) / tile_width_sb; + + min_log2_tile_rows = FFMAX(min_log2_tiles - current->tile_cols_log2, 0); + + increment(tile_rows_log2, min_log2_tile_rows, max_log2_tile_rows); + + tile_height_sb = (sb_rows + (1 << current->tile_rows_log2) - 1) >> + current->tile_rows_log2; + current->tile_rows = (sb_rows + tile_height_sb - 1) / tile_height_sb; + + for (i = 0; i < current->tile_cols - 1; i++) + infer(width_in_sbs_minus_1[i], tile_width_sb - 1); + infer(width_in_sbs_minus_1[i], + sb_cols - (current->tile_cols - 1) * tile_width_sb - 1); + for (i = 0; i < current->tile_rows - 1; i++) + infer(height_in_sbs_minus_1[i], tile_height_sb - 1); + infer(height_in_sbs_minus_1[i], + sb_rows - (current->tile_rows - 1) * tile_height_sb - 1); + + } else { + int widest_tile_sb, start_sb, size_sb, max_width, max_height; + + widest_tile_sb = 0; + + start_sb = 0; + for (i = 0; start_sb < sb_cols && i < AV1_MAX_TILE_COLS; i++) { + max_width = FFMIN(sb_cols - start_sb, max_tile_width_sb); + ns(max_width, width_in_sbs_minus_1[i], 1, i); + size_sb = current->width_in_sbs_minus_1[i] + 1; + widest_tile_sb = FFMAX(size_sb, widest_tile_sb); + start_sb += size_sb; + } + current->tile_cols_log2 = cbs_av1_tile_log2(1, i); + current->tile_cols = i; + + if (min_log2_tiles > 0) + max_tile_area_sb = (sb_rows * sb_cols) >> (min_log2_tiles + 1); + else + max_tile_area_sb = sb_rows * sb_cols; + max_tile_height_sb = FFMAX(max_tile_area_sb / widest_tile_sb, 1); + + start_sb = 0; + for (i = 0; start_sb < sb_rows && i < AV1_MAX_TILE_ROWS; i++) { + max_height = FFMIN(sb_rows - start_sb, max_tile_height_sb); + ns(max_height, height_in_sbs_minus_1[i], 1, i); + size_sb = current->height_in_sbs_minus_1[i] + 1; + start_sb += size_sb; + } + current->tile_rows_log2 = cbs_av1_tile_log2(1, i); + current->tile_rows = i; + } + + if (current->tile_cols_log2 > 0 || + current->tile_rows_log2 > 0) { + fb(current->tile_cols_log2 + current->tile_rows_log2, + context_update_tile_id); + fb(2, tile_size_bytes_minus1); + } else { + infer(context_update_tile_id, 0); + } + + priv->tile_cols = current->tile_cols; + priv->tile_rows = current->tile_rows; + + return 0; } static int FUNC(quantization_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int err; + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq = priv->sequence_header; + int err; - fb(8, base_q_idx); + fb(8, base_q_idx); - delta_q(delta_q_y_dc); + delta_q(delta_q_y_dc); - if(priv->num_planes > 1) { - if(seq->color_config.separate_uv_delta_q) - flag(diff_uv_delta); - else - infer(diff_uv_delta, 0); - - delta_q(delta_q_u_dc); - delta_q(delta_q_u_ac); - - if(current->diff_uv_delta) { - delta_q(delta_q_v_dc); - delta_q(delta_q_v_ac); - } - else { - infer(delta_q_v_dc, current->delta_q_u_dc); - infer(delta_q_v_ac, current->delta_q_u_ac); - } - } - else { - infer(delta_q_u_dc, 0); - infer(delta_q_u_ac, 0); - infer(delta_q_v_dc, 0); - infer(delta_q_v_ac, 0); - } - - flag(using_qmatrix); - if(current->using_qmatrix) { - fb(4, qm_y); - fb(4, qm_u); - if(seq->color_config.separate_uv_delta_q) - fb(4, qm_v); - else - infer(qm_v, current->qm_u); - } + if (priv->num_planes > 1) { + if (seq->color_config.separate_uv_delta_q) + flag(diff_uv_delta); + else + infer(diff_uv_delta, 0); + + delta_q(delta_q_u_dc); + delta_q(delta_q_u_ac); - return 0; + if (current->diff_uv_delta) { + delta_q(delta_q_v_dc); + delta_q(delta_q_v_ac); + } else { + infer(delta_q_v_dc, current->delta_q_u_dc); + infer(delta_q_v_ac, current->delta_q_u_ac); + } + } else { + infer(delta_q_u_dc, 0); + infer(delta_q_u_ac, 0); + infer(delta_q_v_dc, 0); + infer(delta_q_v_ac, 0); + } + + flag(using_qmatrix); + if (current->using_qmatrix) { + fb(4, qm_y); + fb(4, qm_u); + if (seq->color_config.separate_uv_delta_q) + fb(4, qm_v); + else + infer(qm_v, current->qm_u); + } + + return 0; } static int FUNC(segmentation_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - static const uint8_t bits[AV1_SEG_LVL_MAX] = { 8, 6, 6, 6, 6, 3, 0, 0 }; - static const uint8_t sign[AV1_SEG_LVL_MAX] = { 1, 1, 1, 1, 1, 0, 0, 0 }; - static const uint8_t default_feature_enabled[AV1_SEG_LVL_MAX] = { 0 }; - static const int16_t default_feature_value[AV1_SEG_LVL_MAX] = { 0 }; - int i, j, err; - - flag(segmentation_enabled); - - if(current->segmentation_enabled) { - if(current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { - infer(segmentation_update_map, 1); - infer(segmentation_temporal_update, 0); - infer(segmentation_update_data, 1); - } - else { - flag(segmentation_update_map); - if(current->segmentation_update_map) - flag(segmentation_temporal_update); - else - infer(segmentation_temporal_update, 0); - flag(segmentation_update_data); - } - - for(i = 0; i < AV1_MAX_SEGMENTS; i++) { - const uint8_t *ref_feature_enabled; - const int16_t *ref_feature_value; - - if(current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { - ref_feature_enabled = default_feature_enabled; - ref_feature_value = default_feature_value; - } - else { - ref_feature_enabled = - priv->ref[current->ref_frame_idx[current->primary_ref_frame]].feature_enabled[i]; - ref_feature_value = - priv->ref[current->ref_frame_idx[current->primary_ref_frame]].feature_value[i]; - } - - for(j = 0; j < AV1_SEG_LVL_MAX; j++) { - if(current->segmentation_update_data) { - flags(feature_enabled[i][j], 2, i, j); - - if(current->feature_enabled[i][j] && bits[j] > 0) { - if(sign[j]) - sus(1 + bits[j], feature_value[i][j], 2, i, j); + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + static const uint8_t bits[AV1_SEG_LVL_MAX] = { 8, 6, 6, 6, 6, 3, 0, 0 }; + static const uint8_t sign[AV1_SEG_LVL_MAX] = { 1, 1, 1, 1, 1, 0, 0, 0 }; + static const uint8_t default_feature_enabled[AV1_SEG_LVL_MAX] = { 0 }; + static const int16_t default_feature_value[AV1_SEG_LVL_MAX] = { 0 }; + int i, j, err; + + flag(segmentation_enabled); + + if (current->segmentation_enabled) { + if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { + infer(segmentation_update_map, 1); + infer(segmentation_temporal_update, 0); + infer(segmentation_update_data, 1); + } else { + flag(segmentation_update_map); + if (current->segmentation_update_map) + flag(segmentation_temporal_update); else - fbs(bits[j], feature_value[i][j], 2, i, j); - } - else { - infer(feature_value[i][j], 0); - } + infer(segmentation_temporal_update, 0); + flag(segmentation_update_data); } - else { - infer(feature_enabled[i][j], ref_feature_enabled[j]); - infer(feature_value[i][j], ref_feature_value[j]); + + for (i = 0; i < AV1_MAX_SEGMENTS; i++) { + const uint8_t *ref_feature_enabled; + const int16_t *ref_feature_value; + + if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { + ref_feature_enabled = default_feature_enabled; + ref_feature_value = default_feature_value; + } else { + ref_feature_enabled = + priv->ref[current->ref_frame_idx[current->primary_ref_frame]].feature_enabled[i]; + ref_feature_value = + priv->ref[current->ref_frame_idx[current->primary_ref_frame]].feature_value[i]; + } + + for (j = 0; j < AV1_SEG_LVL_MAX; j++) { + if (current->segmentation_update_data) { + flags(feature_enabled[i][j], 2, i, j); + + if (current->feature_enabled[i][j] && bits[j] > 0) { + if (sign[j]) + sus(1 + bits[j], feature_value[i][j], 2, i, j); + else + fbs(bits[j], feature_value[i][j], 2, i, j); + } else { + infer(feature_value[i][j], 0); + } + } else { + infer(feature_enabled[i][j], ref_feature_enabled[j]); + infer(feature_value[i][j], ref_feature_value[j]); + } + } + } + } else { + for (i = 0; i < AV1_MAX_SEGMENTS; i++) { + for (j = 0; j < AV1_SEG_LVL_MAX; j++) { + infer(feature_enabled[i][j], 0); + infer(feature_value[i][j], 0); + } } - } - } - } - else { - for(i = 0; i < AV1_MAX_SEGMENTS; i++) { - for(j = 0; j < AV1_SEG_LVL_MAX; j++) { - infer(feature_enabled[i][j], 0); - infer(feature_value[i][j], 0); - } } - } - return 0; + return 0; } static int FUNC(delta_q_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - int err; + AV1RawFrameHeader *current) +{ + int err; - if(current->base_q_idx > 0) - flag(delta_q_present); - else - infer(delta_q_present, 0); + if (current->base_q_idx > 0) + flag(delta_q_present); + else + infer(delta_q_present, 0); - if(current->delta_q_present) - fb(2, delta_q_res); + if (current->delta_q_present) + fb(2, delta_q_res); - return 0; + return 0; } static int FUNC(delta_lf_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - int err; + AV1RawFrameHeader *current) +{ + int err; - if(current->delta_q_present) { - if(!current->allow_intrabc) - flag(delta_lf_present); - else - infer(delta_lf_present, 0); - if(current->delta_lf_present) { - fb(2, delta_lf_res); - flag(delta_lf_multi); - } - else { - infer(delta_lf_res, 0); - infer(delta_lf_multi, 0); - } - } - else { - infer(delta_lf_present, 0); - infer(delta_lf_res, 0); - infer(delta_lf_multi, 0); - } - - return 0; -} + if (current->delta_q_present) { + if (!current->allow_intrabc) + flag(delta_lf_present); + else + infer(delta_lf_present, 0); + if (current->delta_lf_present) { + fb(2, delta_lf_res); + flag(delta_lf_multi); + } else { + infer(delta_lf_res, 0); + infer(delta_lf_multi, 0); + } + } else { + infer(delta_lf_present, 0); + infer(delta_lf_res, 0); + infer(delta_lf_multi, 0); + } -static int FUNC(loop_filter_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - static const int8_t default_loop_filter_ref_deltas[AV1_TOTAL_REFS_PER_FRAME] = { 1, 0, 0, 0, -1, 0, -1, -1 }; - static const int8_t default_loop_filter_mode_deltas[2] = { 0, 0 }; - int i, err; - - if(priv->coded_lossless || current->allow_intrabc) { - infer(loop_filter_level[0], 0); - infer(loop_filter_level[1], 0); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_INTRA], 1); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST], 0); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST2], 0); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST3], 0); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_BWDREF], 0); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_GOLDEN], -1); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_ALTREF], -1); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_ALTREF2], -1); - for(i = 0; i < 2; i++) - infer(loop_filter_mode_deltas[i], 0); return 0; - } - - fb(6, loop_filter_level[0]); - fb(6, loop_filter_level[1]); - - if(priv->num_planes > 1) { - if(current->loop_filter_level[0] || - current->loop_filter_level[1]) { - fb(6, loop_filter_level[2]); - fb(6, loop_filter_level[3]); - } - } - - fb(3, loop_filter_sharpness); - - flag(loop_filter_delta_enabled); - if(current->loop_filter_delta_enabled) { - const int8_t *ref_loop_filter_ref_deltas, *ref_loop_filter_mode_deltas; - - if(current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { - ref_loop_filter_ref_deltas = default_loop_filter_ref_deltas; - ref_loop_filter_mode_deltas = default_loop_filter_mode_deltas; - } - else { - ref_loop_filter_ref_deltas = - priv->ref[current->ref_frame_idx[current->primary_ref_frame]].loop_filter_ref_deltas; - ref_loop_filter_mode_deltas = - priv->ref[current->ref_frame_idx[current->primary_ref_frame]].loop_filter_mode_deltas; - } - - flag(loop_filter_delta_update); - for(i = 0; i < AV1_TOTAL_REFS_PER_FRAME; i++) { - if(current->loop_filter_delta_update) - flags(update_ref_delta[i], 1, i); - else - infer(update_ref_delta[i], 0); - if(current->update_ref_delta[i]) - sus(1 + 6, loop_filter_ref_deltas[i], 1, i); - else - infer(loop_filter_ref_deltas[i], ref_loop_filter_ref_deltas[i]); - } - for(i = 0; i < 2; i++) { - if(current->loop_filter_delta_update) - flags(update_mode_delta[i], 1, i); - else - infer(update_mode_delta[i], 0); - if(current->update_mode_delta[i]) - sus(1 + 6, loop_filter_mode_deltas[i], 1, i); - else - infer(loop_filter_mode_deltas[i], ref_loop_filter_mode_deltas[i]); - } - } - else { - for(i = 0; i < AV1_TOTAL_REFS_PER_FRAME; i++) - infer(loop_filter_ref_deltas[i], default_loop_filter_ref_deltas[i]); - for(i = 0; i < 2; i++) - infer(loop_filter_mode_deltas[i], default_loop_filter_mode_deltas[i]); - } - - return 0; } -static int FUNC(cdef_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int i, err; - - if(priv->coded_lossless || current->allow_intrabc || - !seq->enable_cdef) { - infer(cdef_damping_minus_3, 0); - infer(cdef_bits, 0); - infer(cdef_y_pri_strength[0], 0); - infer(cdef_y_sec_strength[0], 0); - infer(cdef_uv_pri_strength[0], 0); - infer(cdef_uv_sec_strength[0], 0); +static int FUNC(loop_filter_params)(CodedBitstreamContext *ctx, RWContext *rw, + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + static const int8_t default_loop_filter_ref_deltas[AV1_TOTAL_REFS_PER_FRAME] = + { 1, 0, 0, 0, -1, 0, -1, -1 }; + static const int8_t default_loop_filter_mode_deltas[2] = { 0, 0 }; + int i, err; + + if (priv->coded_lossless || current->allow_intrabc) { + infer(loop_filter_level[0], 0); + infer(loop_filter_level[1], 0); + infer(loop_filter_ref_deltas[AV1_REF_FRAME_INTRA], 1); + infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST], 0); + infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST2], 0); + infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST3], 0); + infer(loop_filter_ref_deltas[AV1_REF_FRAME_BWDREF], 0); + infer(loop_filter_ref_deltas[AV1_REF_FRAME_GOLDEN], -1); + infer(loop_filter_ref_deltas[AV1_REF_FRAME_ALTREF], -1); + infer(loop_filter_ref_deltas[AV1_REF_FRAME_ALTREF2], -1); + for (i = 0; i < 2; i++) + infer(loop_filter_mode_deltas[i], 0); + return 0; + } + + fb(6, loop_filter_level[0]); + fb(6, loop_filter_level[1]); + + if (priv->num_planes > 1) { + if (current->loop_filter_level[0] || + current->loop_filter_level[1]) { + fb(6, loop_filter_level[2]); + fb(6, loop_filter_level[3]); + } + } - return 0; - } + fb(3, loop_filter_sharpness); - fb(2, cdef_damping_minus_3); - fb(2, cdef_bits); + flag(loop_filter_delta_enabled); + if (current->loop_filter_delta_enabled) { + const int8_t *ref_loop_filter_ref_deltas, *ref_loop_filter_mode_deltas; - for(i = 0; i < (1 << current->cdef_bits); i++) { - fbs(4, cdef_y_pri_strength[i], 1, i); - fbs(2, cdef_y_sec_strength[i], 1, i); + if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { + ref_loop_filter_ref_deltas = default_loop_filter_ref_deltas; + ref_loop_filter_mode_deltas = default_loop_filter_mode_deltas; + } else { + ref_loop_filter_ref_deltas = + priv->ref[current->ref_frame_idx[current->primary_ref_frame]].loop_filter_ref_deltas; + ref_loop_filter_mode_deltas = + priv->ref[current->ref_frame_idx[current->primary_ref_frame]].loop_filter_mode_deltas; + } - if(priv->num_planes > 1) { - fbs(4, cdef_uv_pri_strength[i], 1, i); - fbs(2, cdef_uv_sec_strength[i], 1, i); + flag(loop_filter_delta_update); + for (i = 0; i < AV1_TOTAL_REFS_PER_FRAME; i++) { + if (current->loop_filter_delta_update) + flags(update_ref_delta[i], 1, i); + else + infer(update_ref_delta[i], 0); + if (current->update_ref_delta[i]) + sus(1 + 6, loop_filter_ref_deltas[i], 1, i); + else + infer(loop_filter_ref_deltas[i], ref_loop_filter_ref_deltas[i]); + } + for (i = 0; i < 2; i++) { + if (current->loop_filter_delta_update) + flags(update_mode_delta[i], 1, i); + else + infer(update_mode_delta[i], 0); + if (current->update_mode_delta[i]) + sus(1 + 6, loop_filter_mode_deltas[i], 1, i); + else + infer(loop_filter_mode_deltas[i], ref_loop_filter_mode_deltas[i]); + } + } else { + for (i = 0; i < AV1_TOTAL_REFS_PER_FRAME; i++) + infer(loop_filter_ref_deltas[i], default_loop_filter_ref_deltas[i]); + for (i = 0; i < 2; i++) + infer(loop_filter_mode_deltas[i], default_loop_filter_mode_deltas[i]); } - } - return 0; + return 0; } -static int FUNC(lr_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int uses_lr, uses_chroma_lr; - int i, err; - - if(priv->all_lossless || current->allow_intrabc || - !seq->enable_restoration) { - return 0; - } +static int FUNC(cdef_params)(CodedBitstreamContext *ctx, RWContext *rw, + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq = priv->sequence_header; + int i, err; + + if (priv->coded_lossless || current->allow_intrabc || + !seq->enable_cdef) { + infer(cdef_damping_minus_3, 0); + infer(cdef_bits, 0); + infer(cdef_y_pri_strength[0], 0); + infer(cdef_y_sec_strength[0], 0); + infer(cdef_uv_pri_strength[0], 0); + infer(cdef_uv_sec_strength[0], 0); + + return 0; + } + + fb(2, cdef_damping_minus_3); + fb(2, cdef_bits); + + for (i = 0; i < (1 << current->cdef_bits); i++) { + fbs(4, cdef_y_pri_strength[i], 1, i); + fbs(2, cdef_y_sec_strength[i], 1, i); + + if (priv->num_planes > 1) { + fbs(4, cdef_uv_pri_strength[i], 1, i); + fbs(2, cdef_uv_sec_strength[i], 1, i); + } + } - uses_lr = uses_chroma_lr = 0; - for(i = 0; i < priv->num_planes; i++) { - fbs(2, lr_type[i], 1, i); + return 0; +} - if(current->lr_type[i] != AV1_RESTORE_NONE) { - uses_lr = 1; - if(i > 0) - uses_chroma_lr = 1; +static int FUNC(lr_params)(CodedBitstreamContext *ctx, RWContext *rw, + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq = priv->sequence_header; + int uses_lr, uses_chroma_lr; + int i, err; + + if (priv->all_lossless || current->allow_intrabc || + !seq->enable_restoration) { + return 0; + } + + uses_lr = uses_chroma_lr = 0; + for (i = 0; i < priv->num_planes; i++) { + fbs(2, lr_type[i], 1, i); + + if (current->lr_type[i] != AV1_RESTORE_NONE) { + uses_lr = 1; + if (i > 0) + uses_chroma_lr = 1; + } } - } - if(uses_lr) { - if(seq->use_128x128_superblock) - increment(lr_unit_shift, 1, 2); - else - increment(lr_unit_shift, 0, 2); + if (uses_lr) { + if (seq->use_128x128_superblock) + increment(lr_unit_shift, 1, 2); + else + increment(lr_unit_shift, 0, 2); - if(seq->color_config.subsampling_x && - seq->color_config.subsampling_y && uses_chroma_lr) { - fb(1, lr_uv_shift); - } - else { - infer(lr_uv_shift, 0); + if(seq->color_config.subsampling_x && + seq->color_config.subsampling_y && uses_chroma_lr) { + fb(1, lr_uv_shift); + } else { + infer(lr_uv_shift, 0); + } } - } - return 0; + return 0; } static int FUNC(read_tx_mode)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - int err; + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + int err; - if(priv->coded_lossless) - infer(tx_mode, 0); - else - increment(tx_mode, 1, 2); + if (priv->coded_lossless) + infer(tx_mode, 0); + else + increment(tx_mode, 1, 2); - return 0; + return 0; } static int FUNC(frame_reference_mode)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - int err; + AV1RawFrameHeader *current) +{ + int err; - if(current->frame_type == AV1_FRAME_INTRA_ONLY || - current->frame_type == AV1_FRAME_KEY) - infer(reference_select, 0); - else - flag(reference_select); + if (current->frame_type == AV1_FRAME_INTRA_ONLY || + current->frame_type == AV1_FRAME_KEY) + infer(reference_select, 0); + else + flag(reference_select); - return 0; + return 0; } static int FUNC(skip_mode_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int skip_mode_allowed; - int err; - - if(current->frame_type == AV1_FRAME_KEY || - current->frame_type == AV1_FRAME_INTRA_ONLY || - !current->reference_select || !seq->enable_order_hint) { - skip_mode_allowed = 0; - } - else { - int forward_idx, backward_idx; - int forward_hint, backward_hint; - int ref_hint, dist, i; - - forward_idx = -1; - backward_idx = -1; - for(i = 0; i < AV1_REFS_PER_FRAME; i++) { - ref_hint = priv->ref[current->ref_frame_idx[i]].order_hint; - dist = cbs_av1_get_relative_dist(seq, ref_hint, - priv->order_hint); - if(dist < 0) { - if(forward_idx < 0 || - cbs_av1_get_relative_dist(seq, ref_hint, - forward_hint) > 0) { - forward_idx = i; - forward_hint = ref_hint; - } - } - else if(dist > 0) { - if(backward_idx < 0 || - cbs_av1_get_relative_dist(seq, ref_hint, - backward_hint) < 0) { - backward_idx = i; - backward_hint = ref_hint; - } - } - } - - if(forward_idx < 0) { - skip_mode_allowed = 0; - } - else if(backward_idx >= 0) { - skip_mode_allowed = 1; - // Frames for skip mode are forward_idx and backward_idx. - } - else { - int second_forward_idx; - int second_forward_hint; - - second_forward_idx = -1; - for(i = 0; i < AV1_REFS_PER_FRAME; i++) { - ref_hint = priv->ref[current->ref_frame_idx[i]].order_hint; - if(cbs_av1_get_relative_dist(seq, ref_hint, - forward_hint) < 0) { - if(second_forward_idx < 0 || - cbs_av1_get_relative_dist(seq, ref_hint, - second_forward_hint) > 0) { - second_forward_idx = i; - second_forward_hint = ref_hint; - } + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq = priv->sequence_header; + int skip_mode_allowed; + int err; + + if (current->frame_type == AV1_FRAME_KEY || + current->frame_type == AV1_FRAME_INTRA_ONLY || + !current->reference_select || !seq->enable_order_hint) { + skip_mode_allowed = 0; + } else { + int forward_idx, backward_idx; + int forward_hint, backward_hint; + int ref_hint, dist, i; + + forward_idx = -1; + backward_idx = -1; + for (i = 0; i < AV1_REFS_PER_FRAME; i++) { + ref_hint = priv->ref[current->ref_frame_idx[i]].order_hint; + dist = cbs_av1_get_relative_dist(seq, ref_hint, + priv->order_hint); + if (dist < 0) { + if (forward_idx < 0 || + cbs_av1_get_relative_dist(seq, ref_hint, + forward_hint) > 0) { + forward_idx = i; + forward_hint = ref_hint; + } + } else if (dist > 0) { + if (backward_idx < 0 || + cbs_av1_get_relative_dist(seq, ref_hint, + backward_hint) < 0) { + backward_idx = i; + backward_hint = ref_hint; + } + } } - } - if(second_forward_idx < 0) { - skip_mode_allowed = 0; - } - else { - skip_mode_allowed = 1; - // Frames for skip mode are forward_idx and second_forward_idx. - } + if (forward_idx < 0) { + skip_mode_allowed = 0; + } else if (backward_idx >= 0) { + skip_mode_allowed = 1; + // Frames for skip mode are forward_idx and backward_idx. + } else { + int second_forward_idx; + int second_forward_hint; + + second_forward_idx = -1; + for (i = 0; i < AV1_REFS_PER_FRAME; i++) { + ref_hint = priv->ref[current->ref_frame_idx[i]].order_hint; + if (cbs_av1_get_relative_dist(seq, ref_hint, + forward_hint) < 0) { + if (second_forward_idx < 0 || + cbs_av1_get_relative_dist(seq, ref_hint, + second_forward_hint) > 0) { + second_forward_idx = i; + second_forward_hint = ref_hint; + } + } + } + + if (second_forward_idx < 0) { + skip_mode_allowed = 0; + } else { + skip_mode_allowed = 1; + // Frames for skip mode are forward_idx and second_forward_idx. + } + } } - } - if(skip_mode_allowed) - flag(skip_mode_present); - else - infer(skip_mode_present, 0); + if (skip_mode_allowed) + flag(skip_mode_present); + else + infer(skip_mode_present, 0); - return 0; + return 0; } static int FUNC(global_motion_param)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current, - int type, int ref, int idx) { - uint32_t abs_bits, prec_bits, num_syms; - int err; - - if(idx < 2) { - if(type == AV1_WARP_MODEL_TRANSLATION) { - abs_bits = AV1_GM_ABS_TRANS_ONLY_BITS - !current->allow_high_precision_mv; - prec_bits = AV1_GM_TRANS_ONLY_PREC_BITS - !current->allow_high_precision_mv; - } - else { - abs_bits = AV1_GM_ABS_TRANS_BITS; - prec_bits = AV1_GM_TRANS_PREC_BITS; + AV1RawFrameHeader *current, + int type, int ref, int idx) +{ + uint32_t abs_bits, prec_bits, num_syms; + int err; + + if (idx < 2) { + if (type == AV1_WARP_MODEL_TRANSLATION) { + abs_bits = AV1_GM_ABS_TRANS_ONLY_BITS - !current->allow_high_precision_mv; + prec_bits = AV1_GM_TRANS_ONLY_PREC_BITS - !current->allow_high_precision_mv; + } else { + abs_bits = AV1_GM_ABS_TRANS_BITS; + prec_bits = AV1_GM_TRANS_PREC_BITS; + } + } else { + abs_bits = AV1_GM_ABS_ALPHA_BITS; + prec_bits = AV1_GM_ALPHA_PREC_BITS; } - } - else { - abs_bits = AV1_GM_ABS_ALPHA_BITS; - prec_bits = AV1_GM_ALPHA_PREC_BITS; - } - num_syms = 2 * (1 << abs_bits) + 1; - subexp(gm_params[ref][idx], num_syms, 2, ref, idx); + num_syms = 2 * (1 << abs_bits) + 1; + subexp(gm_params[ref][idx], num_syms, 2, ref, idx); - // Actual gm_params value is not reconstructed here. - (void)prec_bits; + // Actual gm_params value is not reconstructed here. + (void)prec_bits; - return 0; + return 0; } static int FUNC(global_motion_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - int ref, type; - int err; + AV1RawFrameHeader *current) +{ + int ref, type; + int err; + + if (current->frame_type == AV1_FRAME_KEY || + current->frame_type == AV1_FRAME_INTRA_ONLY) + return 0; + + for (ref = AV1_REF_FRAME_LAST; ref <= AV1_REF_FRAME_ALTREF; ref++) { + flags(is_global[ref], 1, ref); + if (current->is_global[ref]) { + flags(is_rot_zoom[ref], 1, ref); + if (current->is_rot_zoom[ref]) { + type = AV1_WARP_MODEL_ROTZOOM; + } else { + flags(is_translation[ref], 1, ref); + type = current->is_translation[ref] ? AV1_WARP_MODEL_TRANSLATION + : AV1_WARP_MODEL_AFFINE; + } + } else { + type = AV1_WARP_MODEL_IDENTITY; + } - if(current->frame_type == AV1_FRAME_KEY || - current->frame_type == AV1_FRAME_INTRA_ONLY) - return 0; + if (type >= AV1_WARP_MODEL_ROTZOOM) { + CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 2)); + CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 3)); + if (type == AV1_WARP_MODEL_AFFINE) { + CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 4)); + CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 5)); + } else { + // gm_params[ref][4] = -gm_params[ref][3] + // gm_params[ref][5] = gm_params[ref][2] + } + } + if (type >= AV1_WARP_MODEL_TRANSLATION) { + CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 0)); + CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 1)); + } + } - for(ref = AV1_REF_FRAME_LAST; ref <= AV1_REF_FRAME_ALTREF; ref++) { - flags(is_global[ref], 1, ref); - if(current->is_global[ref]) { - flags(is_rot_zoom[ref], 1, ref); - if(current->is_rot_zoom[ref]) { - type = AV1_WARP_MODEL_ROTZOOM; - } - else { - flags(is_translation[ref], 1, ref); - type = current->is_translation[ref] ? AV1_WARP_MODEL_TRANSLATION : AV1_WARP_MODEL_AFFINE; - } - } - else { - type = AV1_WARP_MODEL_IDENTITY; - } - - if(type >= AV1_WARP_MODEL_ROTZOOM) { - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 2)); - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 3)); - if(type == AV1_WARP_MODEL_AFFINE) { - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 4)); - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 5)); - } - else { - // gm_params[ref][4] = -gm_params[ref][3] - // gm_params[ref][5] = gm_params[ref][2] - } - } - if(type >= AV1_WARP_MODEL_TRANSLATION) { - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 0)); - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 1)); - } - } - - return 0; + return 0; } static int FUNC(film_grain_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFilmGrainParams *current, - AV1RawFrameHeader *frame_header) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int num_pos_luma, num_pos_chroma; - int i, err; - - if(!seq->film_grain_params_present || - (!frame_header->show_frame && !frame_header->showable_frame)) - return 0; + AV1RawFilmGrainParams *current, + AV1RawFrameHeader *frame_header) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq = priv->sequence_header; + int num_pos_luma, num_pos_chroma; + int i, err; - flag(apply_grain); + if (!seq->film_grain_params_present || + (!frame_header->show_frame && !frame_header->showable_frame)) + return 0; - if(!current->apply_grain) - return 0; + flag(apply_grain); - fb(16, grain_seed); + if (!current->apply_grain) + return 0; - if(frame_header->frame_type == AV1_FRAME_INTER) - flag(update_grain); - else - infer(update_grain, 1); + fb(16, grain_seed); + + if (frame_header->frame_type == AV1_FRAME_INTER) + flag(update_grain); + else + infer(update_grain, 1); + + if (!current->update_grain) { + fb(3, film_grain_params_ref_idx); + return 0; + } + + fc(4, num_y_points, 0, 14); + for (i = 0; i < current->num_y_points; i++) { + fcs(8, point_y_value[i], + i ? current->point_y_value[i - 1] + 1 : 0, + MAX_UINT_BITS(8) - (current->num_y_points - i - 1), + 1, i); + fbs(8, point_y_scaling[i], 1, i); + } + + if (seq->color_config.mono_chrome) + infer(chroma_scaling_from_luma, 0); + else + flag(chroma_scaling_from_luma); + + if (seq->color_config.mono_chrome || + current->chroma_scaling_from_luma || + (seq->color_config.subsampling_x == 1 && + seq->color_config.subsampling_y == 1 && + current->num_y_points == 0)) { + infer(num_cb_points, 0); + infer(num_cr_points, 0); + } else { + fc(4, num_cb_points, 0, 10); + for (i = 0; i < current->num_cb_points; i++) { + fcs(8, point_cb_value[i], + i ? current->point_cb_value[i - 1] + 1 : 0, + MAX_UINT_BITS(8) - (current->num_cb_points - i - 1), + 1, i); + fbs(8, point_cb_scaling[i], 1, i); + } + fc(4, num_cr_points, 0, 10); + for (i = 0; i < current->num_cr_points; i++) { + fcs(8, point_cr_value[i], + i ? current->point_cr_value[i - 1] + 1 : 0, + MAX_UINT_BITS(8) - (current->num_cr_points - i - 1), + 1, i); + fbs(8, point_cr_scaling[i], 1, i); + } + } + + fb(2, grain_scaling_minus_8); + fb(2, ar_coeff_lag); + num_pos_luma = 2 * current->ar_coeff_lag * (current->ar_coeff_lag + 1); + if (current->num_y_points) { + num_pos_chroma = num_pos_luma + 1; + for (i = 0; i < num_pos_luma; i++) + fbs(8, ar_coeffs_y_plus_128[i], 1, i); + } else { + num_pos_chroma = num_pos_luma; + } + if (current->chroma_scaling_from_luma || current->num_cb_points) { + for (i = 0; i < num_pos_chroma; i++) + fbs(8, ar_coeffs_cb_plus_128[i], 1, i); + } + if (current->chroma_scaling_from_luma || current->num_cr_points) { + for (i = 0; i < num_pos_chroma; i++) + fbs(8, ar_coeffs_cr_plus_128[i], 1, i); + } + fb(2, ar_coeff_shift_minus_6); + fb(2, grain_scale_shift); + if (current->num_cb_points) { + fb(8, cb_mult); + fb(8, cb_luma_mult); + fb(9, cb_offset); + } + if (current->num_cr_points) { + fb(8, cr_mult); + fb(8, cr_luma_mult); + fb(9, cr_offset); + } + + flag(overlap_flag); + flag(clip_to_restricted_range); - if(!current->update_grain) { - fb(3, film_grain_params_ref_idx); return 0; - } - - fc(4, num_y_points, 0, 14); - for(i = 0; i < current->num_y_points; i++) { - fcs(8, point_y_value[i], - i ? current->point_y_value[i - 1] + 1 : 0, - MAX_UINT_BITS(8) - (current->num_y_points - i - 1), - 1, i); - fbs(8, point_y_scaling[i], 1, i); - } - - if(seq->color_config.mono_chrome) - infer(chroma_scaling_from_luma, 0); - else - flag(chroma_scaling_from_luma); - - if(seq->color_config.mono_chrome || - current->chroma_scaling_from_luma || - (seq->color_config.subsampling_x == 1 && - seq->color_config.subsampling_y == 1 && - current->num_y_points == 0)) { - infer(num_cb_points, 0); - infer(num_cr_points, 0); - } - else { - fc(4, num_cb_points, 0, 10); - for(i = 0; i < current->num_cb_points; i++) { - fcs(8, point_cb_value[i], - i ? current->point_cb_value[i - 1] + 1 : 0, - MAX_UINT_BITS(8) - (current->num_cb_points - i - 1), - 1, i); - fbs(8, point_cb_scaling[i], 1, i); - } - fc(4, num_cr_points, 0, 10); - for(i = 0; i < current->num_cr_points; i++) { - fcs(8, point_cr_value[i], - i ? current->point_cr_value[i - 1] + 1 : 0, - MAX_UINT_BITS(8) - (current->num_cr_points - i - 1), - 1, i); - fbs(8, point_cr_scaling[i], 1, i); - } - } - - fb(2, grain_scaling_minus_8); - fb(2, ar_coeff_lag); - num_pos_luma = 2 * current->ar_coeff_lag * (current->ar_coeff_lag + 1); - if(current->num_y_points) { - num_pos_chroma = num_pos_luma + 1; - for(i = 0; i < num_pos_luma; i++) - fbs(8, ar_coeffs_y_plus_128[i], 1, i); - } - else { - num_pos_chroma = num_pos_luma; - } - if(current->chroma_scaling_from_luma || current->num_cb_points) { - for(i = 0; i < num_pos_chroma; i++) - fbs(8, ar_coeffs_cb_plus_128[i], 1, i); - } - if(current->chroma_scaling_from_luma || current->num_cr_points) { - for(i = 0; i < num_pos_chroma; i++) - fbs(8, ar_coeffs_cr_plus_128[i], 1, i); - } - fb(2, ar_coeff_shift_minus_6); - fb(2, grain_scale_shift); - if(current->num_cb_points) { - fb(8, cb_mult); - fb(8, cb_luma_mult); - fb(9, cb_offset); - } - if(current->num_cr_points) { - fb(8, cr_mult); - fb(8, cr_luma_mult); - fb(9, cr_offset); - } - - flag(overlap_flag); - flag(clip_to_restricted_range); - - return 0; } static int FUNC(uncompressed_header)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq; - int id_len, diff_len, all_frames, frame_is_intra, order_hint_bits; - int i, err; - - if(!priv->sequence_header) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "No sequence header available: " - "unable to decode frame header.\n"); - return AVERROR_INVALIDDATA; - } - seq = priv->sequence_header; - - id_len = seq->additional_frame_id_length_minus_1 + - seq->delta_frame_id_length_minus_2 + 3; - all_frames = (1 << AV1_NUM_REF_FRAMES) - 1; - - if(seq->reduced_still_picture_header) { - infer(show_existing_frame, 0); - infer(frame_type, AV1_FRAME_KEY); - infer(show_frame, 1); - infer(showable_frame, 0); - frame_is_intra = 1; - } - else { - flag(show_existing_frame); - - if(current->show_existing_frame) { - AV1ReferenceFrameState *ref; - - fb(3, frame_to_show_map_idx); - ref = &priv->ref[current->frame_to_show_map_idx]; - - if(!ref->valid) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Missing reference frame needed for " - "show_existing_frame (frame_to_show_map_idx = %d).\n", - current->frame_to_show_map_idx); + AV1RawFrameHeader *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq; + int id_len, diff_len, all_frames, frame_is_intra, order_hint_bits; + int i, err; + + if (!priv->sequence_header) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "No sequence header available: " + "unable to decode frame header.\n"); return AVERROR_INVALIDDATA; - } - - if(seq->decoder_model_info_present_flag && - !seq->timing_info.equal_picture_interval) { - fb(seq->decoder_model_info.frame_presentation_time_length_minus_1 + 1, - frame_presentation_time); - } + } + seq = priv->sequence_header; + + id_len = seq->additional_frame_id_length_minus_1 + + seq->delta_frame_id_length_minus_2 + 3; + all_frames = (1 << AV1_NUM_REF_FRAMES) - 1; + + if (seq->reduced_still_picture_header) { + infer(show_existing_frame, 0); + infer(frame_type, AV1_FRAME_KEY); + infer(show_frame, 1); + infer(showable_frame, 0); + frame_is_intra = 1; + + } else { + flag(show_existing_frame); + + if (current->show_existing_frame) { + AV1ReferenceFrameState *ref; + + fb(3, frame_to_show_map_idx); + ref = &priv->ref[current->frame_to_show_map_idx]; + + if (!ref->valid) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Missing reference frame needed for " + "show_existing_frame (frame_to_show_map_idx = %d).\n", + current->frame_to_show_map_idx); + return AVERROR_INVALIDDATA; + } + + if (seq->decoder_model_info_present_flag && + !seq->timing_info.equal_picture_interval) { + fb(seq->decoder_model_info.frame_presentation_time_length_minus_1 + 1, + frame_presentation_time); + } + + if (seq->frame_id_numbers_present_flag) + fb(id_len, display_frame_id); + + infer(frame_type, ref->frame_type); + if (current->frame_type == AV1_FRAME_KEY) { + infer(refresh_frame_flags, all_frames); + + // Section 7.21 + infer(current_frame_id, ref->frame_id); + priv->upscaled_width = ref->upscaled_width; + priv->frame_width = ref->frame_width; + priv->frame_height = ref->frame_height; + priv->render_width = ref->render_width; + priv->render_height = ref->render_height; + priv->bit_depth = ref->bit_depth; + priv->order_hint = ref->order_hint; + } else + infer(refresh_frame_flags, 0); + + infer(frame_width_minus_1, ref->upscaled_width - 1); + infer(frame_height_minus_1, ref->frame_height - 1); + infer(render_width_minus_1, ref->render_width - 1); + infer(render_height_minus_1, ref->render_height - 1); + + // Section 7.20 + goto update_refs; + } - if(seq->frame_id_numbers_present_flag) - fb(id_len, display_frame_id); + fb(2, frame_type); + frame_is_intra = (current->frame_type == AV1_FRAME_INTRA_ONLY || + current->frame_type == AV1_FRAME_KEY); - infer(frame_type, ref->frame_type); - if(current->frame_type == AV1_FRAME_KEY) { - infer(refresh_frame_flags, all_frames); + flag(show_frame); + if (current->show_frame && + seq->decoder_model_info_present_flag && + !seq->timing_info.equal_picture_interval) { + fb(seq->decoder_model_info.frame_presentation_time_length_minus_1 + 1, + frame_presentation_time); + } + if (current->show_frame) + infer(showable_frame, current->frame_type != AV1_FRAME_KEY); + else + flag(showable_frame); - // Section 7.21 - infer(current_frame_id, ref->frame_id); - priv->upscaled_width = ref->upscaled_width; - priv->frame_width = ref->frame_width; - priv->frame_height = ref->frame_height; - priv->render_width = ref->render_width; - priv->render_height = ref->render_height; - priv->bit_depth = ref->bit_depth; - priv->order_hint = ref->order_hint; - } - else - infer(refresh_frame_flags, 0); - - infer(frame_width_minus_1, ref->upscaled_width - 1); - infer(frame_height_minus_1, ref->frame_height - 1); - infer(render_width_minus_1, ref->render_width - 1); - infer(render_height_minus_1, ref->render_height - 1); - - // Section 7.20 - goto update_refs; - } - - fb(2, frame_type); - frame_is_intra = (current->frame_type == AV1_FRAME_INTRA_ONLY || - current->frame_type == AV1_FRAME_KEY); - - flag(show_frame); - if(current->show_frame && - seq->decoder_model_info_present_flag && - !seq->timing_info.equal_picture_interval) { - fb(seq->decoder_model_info.frame_presentation_time_length_minus_1 + 1, - frame_presentation_time); - } - if(current->show_frame) - infer(showable_frame, current->frame_type != AV1_FRAME_KEY); - else - flag(showable_frame); + if (current->frame_type == AV1_FRAME_SWITCH || + (current->frame_type == AV1_FRAME_KEY && current->show_frame)) + infer(error_resilient_mode, 1); + else + flag(error_resilient_mode); + } - if(current->frame_type == AV1_FRAME_SWITCH || - (current->frame_type == AV1_FRAME_KEY && current->show_frame)) - infer(error_resilient_mode, 1); - else - flag(error_resilient_mode); - } - - if(current->frame_type == AV1_FRAME_KEY && current->show_frame) { - for(i = 0; i < AV1_NUM_REF_FRAMES; i++) { - priv->ref[i].valid = 0; - priv->ref[i].order_hint = 0; - } - } - - flag(disable_cdf_update); - - if(seq->seq_force_screen_content_tools == - AV1_SELECT_SCREEN_CONTENT_TOOLS) { - flag(allow_screen_content_tools); - } - else { - infer(allow_screen_content_tools, - seq->seq_force_screen_content_tools); - } - if(current->allow_screen_content_tools) { - if(seq->seq_force_integer_mv == AV1_SELECT_INTEGER_MV) - flag(force_integer_mv); - else - infer(force_integer_mv, seq->seq_force_integer_mv); - } - else { - infer(force_integer_mv, 0); - } - - if(seq->frame_id_numbers_present_flag) { - fb(id_len, current_frame_id); - - diff_len = seq->delta_frame_id_length_minus_2 + 2; - for(i = 0; i < AV1_NUM_REF_FRAMES; i++) { - if(current->current_frame_id > (1 << diff_len)) { - if(priv->ref[i].frame_id > current->current_frame_id || - priv->ref[i].frame_id < (current->current_frame_id - - (1 << diff_len))) - priv->ref[i].valid = 0; - } - else { - if(priv->ref[i].frame_id > current->current_frame_id && - priv->ref[i].frame_id < ((1 << id_len) + - current->current_frame_id - - (1 << diff_len))) - priv->ref[i].valid = 0; - } - } - } - else { - infer(current_frame_id, 0); - } - - if(current->frame_type == AV1_FRAME_SWITCH) - infer(frame_size_override_flag, 1); - else if(seq->reduced_still_picture_header) - infer(frame_size_override_flag, 0); - else - flag(frame_size_override_flag); - - order_hint_bits = - seq->enable_order_hint ? seq->order_hint_bits_minus_1 + 1 : 0; - if(order_hint_bits > 0) - fb(order_hint_bits, order_hint); - else - infer(order_hint, 0); - priv->order_hint = current->order_hint; - - if(frame_is_intra || current->error_resilient_mode) - infer(primary_ref_frame, AV1_PRIMARY_REF_NONE); - else - fb(3, primary_ref_frame); - - if(seq->decoder_model_info_present_flag) { - flag(buffer_removal_time_present_flag); - if(current->buffer_removal_time_present_flag) { - for(i = 0; i <= seq->operating_points_cnt_minus_1; i++) { - if(seq->decoder_model_present_for_this_op[i]) { - int op_pt_idc = seq->operating_point_idc[i]; - int in_temporal_layer = (op_pt_idc >> priv->temporal_id) & 1; - int in_spatial_layer = (op_pt_idc >> (priv->spatial_id + 8)) & 1; - if(seq->operating_point_idc[i] == 0 || - (in_temporal_layer && in_spatial_layer)) { - fbs(seq->decoder_model_info.buffer_removal_time_length_minus_1 + 1, - buffer_removal_time[i], 1, i); - } + if (current->frame_type == AV1_FRAME_KEY && current->show_frame) { + for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { + priv->ref[i].valid = 0; + priv->ref[i].order_hint = 0; } - } } - } - if(current->frame_type == AV1_FRAME_SWITCH || - (current->frame_type == AV1_FRAME_KEY && current->show_frame)) - infer(refresh_frame_flags, all_frames); - else - fb(8, refresh_frame_flags); + flag(disable_cdf_update); - if(!frame_is_intra || current->refresh_frame_flags != all_frames) { - if(seq->enable_order_hint) { - for(i = 0; i < AV1_NUM_REF_FRAMES; i++) { - if(current->error_resilient_mode) - fbs(order_hint_bits, ref_order_hint[i], 1, i); + if (seq->seq_force_screen_content_tools == + AV1_SELECT_SCREEN_CONTENT_TOOLS) { + flag(allow_screen_content_tools); + } else { + infer(allow_screen_content_tools, + seq->seq_force_screen_content_tools); + } + if (current->allow_screen_content_tools) { + if (seq->seq_force_integer_mv == AV1_SELECT_INTEGER_MV) + flag(force_integer_mv); else - infer(ref_order_hint[i], priv->ref[i].order_hint); - if(current->ref_order_hint[i] != priv->ref[i].order_hint) - priv->ref[i].valid = 0; - } + infer(force_integer_mv, seq->seq_force_integer_mv); + } else { + infer(force_integer_mv, 0); + } + + if (seq->frame_id_numbers_present_flag) { + fb(id_len, current_frame_id); + + diff_len = seq->delta_frame_id_length_minus_2 + 2; + for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { + if (current->current_frame_id > (1 << diff_len)) { + if (priv->ref[i].frame_id > current->current_frame_id || + priv->ref[i].frame_id < (current->current_frame_id - + (1 << diff_len))) + priv->ref[i].valid = 0; + } else { + if (priv->ref[i].frame_id > current->current_frame_id && + priv->ref[i].frame_id < ((1 << id_len) + + current->current_frame_id - + (1 << diff_len))) + priv->ref[i].valid = 0; + } + } + } else { + infer(current_frame_id, 0); } - } - if(current->frame_type == AV1_FRAME_KEY || - current->frame_type == AV1_FRAME_INTRA_ONLY) { - CHECK(FUNC(frame_size)(ctx, rw, current)); - CHECK(FUNC(render_size)(ctx, rw, current)); + if (current->frame_type == AV1_FRAME_SWITCH) + infer(frame_size_override_flag, 1); + else if(seq->reduced_still_picture_header) + infer(frame_size_override_flag, 0); + else + flag(frame_size_override_flag); - if(current->allow_screen_content_tools && - priv->upscaled_width == priv->frame_width) - flag(allow_intrabc); + order_hint_bits = + seq->enable_order_hint ? seq->order_hint_bits_minus_1 + 1 : 0; + if (order_hint_bits > 0) + fb(order_hint_bits, order_hint); else - infer(allow_intrabc, 0); - } - else { - if(!seq->enable_order_hint) { - infer(frame_refs_short_signaling, 0); - } - else { - flag(frame_refs_short_signaling); - if(current->frame_refs_short_signaling) { - fb(3, last_frame_idx); - fb(3, golden_frame_idx); - CHECK(FUNC(set_frame_refs)(ctx, rw, current)); - } - } + infer(order_hint, 0); + priv->order_hint = current->order_hint; - for(i = 0; i < AV1_REFS_PER_FRAME; i++) { - if(!current->frame_refs_short_signaling) - fbs(3, ref_frame_idx[i], 1, i); - if(seq->frame_id_numbers_present_flag) { - fbs(seq->delta_frame_id_length_minus_2 + 2, - delta_frame_id_minus1[i], 1, i); - } + if (frame_is_intra || current->error_resilient_mode) + infer(primary_ref_frame, AV1_PRIMARY_REF_NONE); + else + fb(3, primary_ref_frame); + + if (seq->decoder_model_info_present_flag) { + flag(buffer_removal_time_present_flag); + if (current->buffer_removal_time_present_flag) { + for (i = 0; i <= seq->operating_points_cnt_minus_1; i++) { + if (seq->decoder_model_present_for_this_op[i]) { + int op_pt_idc = seq->operating_point_idc[i]; + int in_temporal_layer = (op_pt_idc >> priv->temporal_id ) & 1; + int in_spatial_layer = (op_pt_idc >> (priv->spatial_id + 8)) & 1; + if (seq->operating_point_idc[i] == 0 || + (in_temporal_layer && in_spatial_layer)) { + fbs(seq->decoder_model_info.buffer_removal_time_length_minus_1 + 1, + buffer_removal_time[i], 1, i); + } + } + } + } } - if(current->frame_size_override_flag && - !current->error_resilient_mode) { - CHECK(FUNC(frame_size_with_refs)(ctx, rw, current)); - } - else { - CHECK(FUNC(frame_size)(ctx, rw, current)); - CHECK(FUNC(render_size)(ctx, rw, current)); + if (current->frame_type == AV1_FRAME_SWITCH || + (current->frame_type == AV1_FRAME_KEY && current->show_frame)) + infer(refresh_frame_flags, all_frames); + else + fb(8, refresh_frame_flags); + + if (!frame_is_intra || current->refresh_frame_flags != all_frames) { + if (seq->enable_order_hint) { + for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { + if (current->error_resilient_mode) + fbs(order_hint_bits, ref_order_hint[i], 1, i); + else + infer(ref_order_hint[i], priv->ref[i].order_hint); + if (current->ref_order_hint[i] != priv->ref[i].order_hint) + priv->ref[i].valid = 0; + } + } } - if(current->force_integer_mv) - infer(allow_high_precision_mv, 0); - else - flag(allow_high_precision_mv); + if (current->frame_type == AV1_FRAME_KEY || + current->frame_type == AV1_FRAME_INTRA_ONLY) { + CHECK(FUNC(frame_size)(ctx, rw, current)); + CHECK(FUNC(render_size)(ctx, rw, current)); - CHECK(FUNC(interpolation_filter)(ctx, rw, current)); + if (current->allow_screen_content_tools && + priv->upscaled_width == priv->frame_width) + flag(allow_intrabc); + else + infer(allow_intrabc, 0); + + } else { + if (!seq->enable_order_hint) { + infer(frame_refs_short_signaling, 0); + } else { + flag(frame_refs_short_signaling); + if (current->frame_refs_short_signaling) { + fb(3, last_frame_idx); + fb(3, golden_frame_idx); + CHECK(FUNC(set_frame_refs)(ctx, rw, current)); + } + } - flag(is_motion_mode_switchable); + for (i = 0; i < AV1_REFS_PER_FRAME; i++) { + if (!current->frame_refs_short_signaling) + fbs(3, ref_frame_idx[i], 1, i); + if (seq->frame_id_numbers_present_flag) { + fbs(seq->delta_frame_id_length_minus_2 + 2, + delta_frame_id_minus1[i], 1, i); + } + } - if(current->error_resilient_mode || - !seq->enable_ref_frame_mvs) - infer(use_ref_frame_mvs, 0); - else - flag(use_ref_frame_mvs); + if (current->frame_size_override_flag && + !current->error_resilient_mode) { + CHECK(FUNC(frame_size_with_refs)(ctx, rw, current)); + } else { + CHECK(FUNC(frame_size)(ctx, rw, current)); + CHECK(FUNC(render_size)(ctx, rw, current)); + } - infer(allow_intrabc, 0); - } + if (current->force_integer_mv) + infer(allow_high_precision_mv, 0); + else + flag(allow_high_precision_mv); - if(!frame_is_intra) { - // Derive reference frame sign biases. - } + CHECK(FUNC(interpolation_filter)(ctx, rw, current)); - if(seq->reduced_still_picture_header || current->disable_cdf_update) - infer(disable_frame_end_update_cdf, 1); - else - flag(disable_frame_end_update_cdf); + flag(is_motion_mode_switchable); - if(current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { - // Init non-coeff CDFs. - // Setup past independence. - } - else { - // Load CDF tables from previous frame. - // Load params from previous frame. - } + if (current->error_resilient_mode || + !seq->enable_ref_frame_mvs) + infer(use_ref_frame_mvs, 0); + else + flag(use_ref_frame_mvs); - if(current->use_ref_frame_mvs) { - // Perform motion field estimation process. - } + infer(allow_intrabc, 0); + } - CHECK(FUNC(tile_info)(ctx, rw, current)); + if (!frame_is_intra) { + // Derive reference frame sign biases. + } - CHECK(FUNC(quantization_params)(ctx, rw, current)); + if (seq->reduced_still_picture_header || current->disable_cdf_update) + infer(disable_frame_end_update_cdf, 1); + else + flag(disable_frame_end_update_cdf); - CHECK(FUNC(segmentation_params)(ctx, rw, current)); + if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { + // Init non-coeff CDFs. + // Setup past independence. + } else { + // Load CDF tables from previous frame. + // Load params from previous frame. + } - CHECK(FUNC(delta_q_params)(ctx, rw, current)); + if (current->use_ref_frame_mvs) { + // Perform motion field estimation process. + } - CHECK(FUNC(delta_lf_params)(ctx, rw, current)); + CHECK(FUNC(tile_info)(ctx, rw, current)); - // Init coeff CDFs / load previous segments. + CHECK(FUNC(quantization_params)(ctx, rw, current)); - priv->coded_lossless = 1; - for(i = 0; i < AV1_MAX_SEGMENTS; i++) { - int qindex; - if(current->feature_enabled[i][AV1_SEG_LVL_ALT_Q]) { - qindex = (current->base_q_idx + - current->feature_value[i][AV1_SEG_LVL_ALT_Q]); - } - else { - qindex = current->base_q_idx; - } - qindex = av_clip_uintp2(qindex, 8); + CHECK(FUNC(segmentation_params)(ctx, rw, current)); - if(qindex || current->delta_q_y_dc || - current->delta_q_u_ac || current->delta_q_u_dc || - current->delta_q_v_ac || current->delta_q_v_dc) { - priv->coded_lossless = 0; + CHECK(FUNC(delta_q_params)(ctx, rw, current)); + + CHECK(FUNC(delta_lf_params)(ctx, rw, current)); + + // Init coeff CDFs / load previous segments. + + priv->coded_lossless = 1; + for (i = 0; i < AV1_MAX_SEGMENTS; i++) { + int qindex; + if (current->feature_enabled[i][AV1_SEG_LVL_ALT_Q]) { + qindex = (current->base_q_idx + + current->feature_value[i][AV1_SEG_LVL_ALT_Q]); + } else { + qindex = current->base_q_idx; + } + qindex = av_clip_uintp2(qindex, 8); + + if (qindex || current->delta_q_y_dc || + current->delta_q_u_ac || current->delta_q_u_dc || + current->delta_q_v_ac || current->delta_q_v_dc) { + priv->coded_lossless = 0; + } } - } - priv->all_lossless = priv->coded_lossless && - priv->frame_width == priv->upscaled_width; + priv->all_lossless = priv->coded_lossless && + priv->frame_width == priv->upscaled_width; - CHECK(FUNC(loop_filter_params)(ctx, rw, current)); + CHECK(FUNC(loop_filter_params)(ctx, rw, current)); - CHECK(FUNC(cdef_params)(ctx, rw, current)); + CHECK(FUNC(cdef_params)(ctx, rw, current)); - CHECK(FUNC(lr_params)(ctx, rw, current)); + CHECK(FUNC(lr_params)(ctx, rw, current)); - CHECK(FUNC(read_tx_mode)(ctx, rw, current)); + CHECK(FUNC(read_tx_mode)(ctx, rw, current)); - CHECK(FUNC(frame_reference_mode)(ctx, rw, current)); + CHECK(FUNC(frame_reference_mode)(ctx, rw, current)); - CHECK(FUNC(skip_mode_params)(ctx, rw, current)); + CHECK(FUNC(skip_mode_params)(ctx, rw, current)); - if(frame_is_intra || current->error_resilient_mode || - !seq->enable_warped_motion) - infer(allow_warped_motion, 0); - else - flag(allow_warped_motion); + if (frame_is_intra || current->error_resilient_mode || + !seq->enable_warped_motion) + infer(allow_warped_motion, 0); + else + flag(allow_warped_motion); - flag(reduced_tx_set); + flag(reduced_tx_set); - CHECK(FUNC(global_motion_params)(ctx, rw, current)); + CHECK(FUNC(global_motion_params)(ctx, rw, current)); - CHECK(FUNC(film_grain_params)(ctx, rw, ¤t->film_grain, current)); + CHECK(FUNC(film_grain_params)(ctx, rw, ¤t->film_grain, current)); - av_log(ctx->log_ctx, AV_LOG_DEBUG, "Frame %d: size %dx%d " - "upscaled %d render %dx%d subsample %dx%d " - "bitdepth %d tiles %dx%d.\n", - priv->order_hint, - priv->frame_width, priv->frame_height, priv->upscaled_width, - priv->render_width, priv->render_height, - seq->color_config.subsampling_x + 1, - seq->color_config.subsampling_y + 1, priv->bit_depth, - priv->tile_rows, priv->tile_cols); + av_log(ctx->log_ctx, AV_LOG_DEBUG, "Frame %d: size %dx%d " + "upscaled %d render %dx%d subsample %dx%d " + "bitdepth %d tiles %dx%d.\n", priv->order_hint, + priv->frame_width, priv->frame_height, priv->upscaled_width, + priv->render_width, priv->render_height, + seq->color_config.subsampling_x + 1, + seq->color_config.subsampling_y + 1, priv->bit_depth, + priv->tile_rows, priv->tile_cols); update_refs: - for(i = 0; i < AV1_NUM_REF_FRAMES; i++) { - if(current->refresh_frame_flags & (1 << i)) { - priv->ref[i] = (AV1ReferenceFrameState) { - .valid = 1, - .frame_id = current->current_frame_id, - .upscaled_width = priv->upscaled_width, - .frame_width = priv->frame_width, - .frame_height = priv->frame_height, - .render_width = priv->render_width, - .render_height = priv->render_height, - .frame_type = current->frame_type, - .subsampling_x = seq->color_config.subsampling_x, - .subsampling_y = seq->color_config.subsampling_y, - .bit_depth = priv->bit_depth, - .order_hint = priv->order_hint, - }; - memcpy(priv->ref[i].loop_filter_ref_deltas, current->loop_filter_ref_deltas, - sizeof(current->loop_filter_ref_deltas)); - memcpy(priv->ref[i].loop_filter_mode_deltas, current->loop_filter_mode_deltas, - sizeof(current->loop_filter_mode_deltas)); - memcpy(priv->ref[i].feature_enabled, current->feature_enabled, - sizeof(current->feature_enabled)); - memcpy(priv->ref[i].feature_value, current->feature_value, - sizeof(current->feature_value)); - } - } - - return 0; + for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { + if (current->refresh_frame_flags & (1 << i)) { + priv->ref[i] = (AV1ReferenceFrameState) { + .valid = 1, + .frame_id = current->current_frame_id, + .upscaled_width = priv->upscaled_width, + .frame_width = priv->frame_width, + .frame_height = priv->frame_height, + .render_width = priv->render_width, + .render_height = priv->render_height, + .frame_type = current->frame_type, + .subsampling_x = seq->color_config.subsampling_x, + .subsampling_y = seq->color_config.subsampling_y, + .bit_depth = priv->bit_depth, + .order_hint = priv->order_hint, + }; + memcpy(priv->ref[i].loop_filter_ref_deltas, current->loop_filter_ref_deltas, + sizeof(current->loop_filter_ref_deltas)); + memcpy(priv->ref[i].loop_filter_mode_deltas, current->loop_filter_mode_deltas, + sizeof(current->loop_filter_mode_deltas)); + memcpy(priv->ref[i].feature_enabled, current->feature_enabled, + sizeof(current->feature_enabled)); + memcpy(priv->ref[i].feature_value, current->feature_value, + sizeof(current->feature_value)); + } + } + + return 0; } static int FUNC(frame_header_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current, int redundant, - AVBufferRef *rw_buffer_ref) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - int start_pos, fh_bits, fh_bytes, err; - uint8_t *fh_start; - - if(priv->seen_frame_header) { - if(!redundant) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid repeated " - "frame header OBU.\n"); - return AVERROR_INVALIDDATA; - } - else { - GetBitContext fh; - size_t i, b; - uint32_t val; - - HEADER("Redundant Frame Header"); - - av_assert0(priv->frame_header_ref && priv->frame_header); - - init_get_bits(&fh, priv->frame_header, - priv->frame_header_size); - for(i = 0; i < priv->frame_header_size; i += 8) { - b = FFMIN(priv->frame_header_size - i, 8); - val = get_bits(&fh, b); - xf(b, frame_header_copy[i], - val, val, val, 1, i / 8); - } - } - } - else { - if(redundant) - HEADER("Redundant Frame Header (used as Frame Header)"); - else - HEADER("Frame Header"); + AV1RawFrameHeader *current, int redundant, + AVBufferRef *rw_buffer_ref) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + int start_pos, fh_bits, fh_bytes, err; + uint8_t *fh_start; + + if (priv->seen_frame_header) { + if (!redundant) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid repeated " + "frame header OBU.\n"); + return AVERROR_INVALIDDATA; + } else { + GetBitContext fh; + size_t i, b; + uint32_t val; + + HEADER("Redundant Frame Header"); + + av_assert0(priv->frame_header_ref && priv->frame_header); + + init_get_bits(&fh, priv->frame_header, + priv->frame_header_size); + for (i = 0; i < priv->frame_header_size; i += 8) { + b = FFMIN(priv->frame_header_size - i, 8); + val = get_bits(&fh, b); + xf(b, frame_header_copy[i], + val, val, val, 1, i / 8); + } + } + } else { + if (redundant) + HEADER("Redundant Frame Header (used as Frame Header)"); + else + HEADER("Frame Header"); #ifdef READ - start_pos = get_bits_count(rw); + start_pos = get_bits_count(rw); #else - start_pos = put_bits_count(rw); + start_pos = put_bits_count(rw); #endif - CHECK(FUNC(uncompressed_header)(ctx, rw, current)); + CHECK(FUNC(uncompressed_header)(ctx, rw, current)); - priv->tile_num = 0; + priv->tile_num = 0; - if(current->show_existing_frame) { - priv->seen_frame_header = 0; - } - else { - priv->seen_frame_header = 1; + if (current->show_existing_frame) { + priv->seen_frame_header = 0; + } else { + priv->seen_frame_header = 1; - av_buffer_unref(&priv->frame_header_ref); + av_buffer_unref(&priv->frame_header_ref); #ifdef READ - fh_bits = get_bits_count(rw) - start_pos; - fh_start = (uint8_t *)rw->buffer + start_pos / 8; + fh_bits = get_bits_count(rw) - start_pos; + fh_start = (uint8_t*)rw->buffer + start_pos / 8; #else - // Need to flush the bitwriter so that we can copy its output, - // but use a copy so we don't affect the caller's structure. - { - PutBitContext tmp = *rw; - flush_put_bits(&tmp); - } - - fh_bits = put_bits_count(rw) - start_pos; - fh_start = rw->buf + start_pos / 8; + // Need to flush the bitwriter so that we can copy its output, + // but use a copy so we don't affect the caller's structure. + { + PutBitContext tmp = *rw; + flush_put_bits(&tmp); + } + + fh_bits = put_bits_count(rw) - start_pos; + fh_start = rw->buf + start_pos / 8; #endif - fh_bytes = (fh_bits + 7) / 8; - - priv->frame_header_size = fh_bits; - - if(rw_buffer_ref) { - priv->frame_header_ref = av_buffer_ref(rw_buffer_ref); - if(!priv->frame_header_ref) - return AVERROR(ENOMEM); - priv->frame_header = fh_start; - } - else { - priv->frame_header_ref = - av_buffer_alloc(fh_bytes + AV_INPUT_BUFFER_PADDING_SIZE); - if(!priv->frame_header_ref) - return AVERROR(ENOMEM); - priv->frame_header = priv->frame_header_ref->data; - memcpy(priv->frame_header, fh_start, fh_bytes); - } - } - } - - return 0; + fh_bytes = (fh_bits + 7) / 8; + + priv->frame_header_size = fh_bits; + + if (rw_buffer_ref) { + priv->frame_header_ref = av_buffer_ref(rw_buffer_ref); + if (!priv->frame_header_ref) + return AVERROR(ENOMEM); + priv->frame_header = fh_start; + } else { + priv->frame_header_ref = + av_buffer_alloc(fh_bytes + AV_INPUT_BUFFER_PADDING_SIZE); + if (!priv->frame_header_ref) + return AVERROR(ENOMEM); + priv->frame_header = priv->frame_header_ref->data; + memcpy(priv->frame_header, fh_start, fh_bytes); + } + } + } + + return 0; } static int FUNC(tile_group_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawTileGroup *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - int num_tiles, tile_bits; - int err; - - HEADER("Tile Group"); - - num_tiles = priv->tile_cols * priv->tile_rows; - if(num_tiles > 1) - flag(tile_start_and_end_present_flag); - else - infer(tile_start_and_end_present_flag, 0); - - if(num_tiles == 1 || !current->tile_start_and_end_present_flag) { - infer(tg_start, 0); - infer(tg_end, num_tiles - 1); - } - else { - tile_bits = cbs_av1_tile_log2(1, priv->tile_cols) + - cbs_av1_tile_log2(1, priv->tile_rows); - fc(tile_bits, tg_start, priv->tile_num, num_tiles - 1); - fc(tile_bits, tg_end, current->tg_start, num_tiles - 1); - } - - priv->tile_num = current->tg_end + 1; - - CHECK(FUNC(byte_alignment)(ctx, rw)); - - // Reset header for next frame. - if(current->tg_end == num_tiles - 1) - priv->seen_frame_header = 0; + AV1RawTileGroup *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + int num_tiles, tile_bits; + int err; + + HEADER("Tile Group"); + + num_tiles = priv->tile_cols * priv->tile_rows; + if (num_tiles > 1) + flag(tile_start_and_end_present_flag); + else + infer(tile_start_and_end_present_flag, 0); + + if (num_tiles == 1 || !current->tile_start_and_end_present_flag) { + infer(tg_start, 0); + infer(tg_end, num_tiles - 1); + } else { + tile_bits = cbs_av1_tile_log2(1, priv->tile_cols) + + cbs_av1_tile_log2(1, priv->tile_rows); + fc(tile_bits, tg_start, priv->tile_num, num_tiles - 1); + fc(tile_bits, tg_end, current->tg_start, num_tiles - 1); + } + + priv->tile_num = current->tg_end + 1; - // Tile data follows. + CHECK(FUNC(byte_alignment)(ctx, rw)); - return 0; + // Reset header for next frame. + if (current->tg_end == num_tiles - 1) + priv->seen_frame_header = 0; + + // Tile data follows. + + return 0; } static int FUNC(frame_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrame *current, - AVBufferRef *rw_buffer_ref) { - int err; + AV1RawFrame *current, + AVBufferRef *rw_buffer_ref) +{ + int err; - CHECK(FUNC(frame_header_obu)(ctx, rw, ¤t->header, - 0, rw_buffer_ref)); + CHECK(FUNC(frame_header_obu)(ctx, rw, ¤t->header, + 0, rw_buffer_ref)); - CHECK(FUNC(byte_alignment)(ctx, rw)); + CHECK(FUNC(byte_alignment)(ctx, rw)); - CHECK(FUNC(tile_group_obu)(ctx, rw, ¤t->tile_group)); + CHECK(FUNC(tile_group_obu)(ctx, rw, ¤t->tile_group)); - return 0; + return 0; } static int FUNC(tile_list_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawTileList *current) { - int err; + AV1RawTileList *current) +{ + int err; - fb(8, output_frame_width_in_tiles_minus_1); - fb(8, output_frame_height_in_tiles_minus_1); + fb(8, output_frame_width_in_tiles_minus_1); + fb(8, output_frame_height_in_tiles_minus_1); - fb(16, tile_count_minus_1); + fb(16, tile_count_minus_1); - // Tile data follows. + // Tile data follows. - return 0; + return 0; } static int FUNC(metadata_hdr_cll)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataHDRCLL *current) { - int err; + AV1RawMetadataHDRCLL *current) +{ + int err; - fb(16, max_cll); - fb(16, max_fall); + fb(16, max_cll); + fb(16, max_fall); - return 0; + return 0; } static int FUNC(metadata_hdr_mdcv)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataHDRMDCV *current) { - int err, i; + AV1RawMetadataHDRMDCV *current) +{ + int err, i; - for(i = 0; i < 3; i++) { - fbs(16, primary_chromaticity_x[i], 1, i); - fbs(16, primary_chromaticity_y[i], 1, i); - } + for (i = 0; i < 3; i++) { + fbs(16, primary_chromaticity_x[i], 1, i); + fbs(16, primary_chromaticity_y[i], 1, i); + } - fb(16, white_point_chromaticity_x); - fb(16, white_point_chromaticity_y); + fb(16, white_point_chromaticity_x); + fb(16, white_point_chromaticity_y); - fc(32, luminance_max, 1, MAX_UINT_BITS(32)); - // luminance_min must be lower than luminance_max. Convert luminance_max from - // 24.8 fixed point to 18.14 fixed point in order to compare them. - fc(32, luminance_min, 0, FFMIN(((uint64_t)current->luminance_max << 6) - 1, MAX_UINT_BITS(32))); + fc(32, luminance_max, 1, MAX_UINT_BITS(32)); + // luminance_min must be lower than luminance_max. Convert luminance_max from + // 24.8 fixed point to 18.14 fixed point in order to compare them. + fc(32, luminance_min, 0, FFMIN(((uint64_t)current->luminance_max << 6) - 1, + MAX_UINT_BITS(32))); - return 0; + return 0; } static int FUNC(scalability_structure)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataScalability *current) { - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq; - int err, i, j; - - if(!priv->sequence_header) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "No sequence header available: " - "unable to parse scalability metadata.\n"); - return AVERROR_INVALIDDATA; - } - seq = priv->sequence_header; - - fb(2, spatial_layers_cnt_minus_1); - flag(spatial_layer_dimensions_present_flag); - flag(spatial_layer_description_present_flag); - flag(temporal_group_description_present_flag); - fc(3, scalability_structure_reserved_3bits, 0, 0); - if(current->spatial_layer_dimensions_present_flag) { - for(i = 0; i <= current->spatial_layers_cnt_minus_1; i++) { - fcs(16, spatial_layer_max_width[i], - 0, seq->max_frame_width_minus_1 + 1, 1, i); - fcs(16, spatial_layer_max_height[i], - 0, seq->max_frame_height_minus_1 + 1, 1, i); - } - } - if(current->spatial_layer_description_present_flag) { - for(i = 0; i <= current->spatial_layers_cnt_minus_1; i++) - fbs(8, spatial_layer_ref_id[i], 1, i); - } - if(current->temporal_group_description_present_flag) { - fb(8, temporal_group_size); - for(i = 0; i < current->temporal_group_size; i++) { - fbs(3, temporal_group_temporal_id[i], 1, i); - flags(temporal_group_temporal_switching_up_point_flag[i], 1, i); - flags(temporal_group_spatial_switching_up_point_flag[i], 1, i); - fbs(3, temporal_group_ref_cnt[i], 1, i); - for(j = 0; j < current->temporal_group_ref_cnt[i]; j++) { - fbs(8, temporal_group_ref_pic_diff[i][j], 2, i, j); - } - } - } - - return 0; + AV1RawMetadataScalability *current) +{ + CodedBitstreamAV1Context *priv = ctx->priv_data; + const AV1RawSequenceHeader *seq; + int err, i, j; + + if (!priv->sequence_header) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "No sequence header available: " + "unable to parse scalability metadata.\n"); + return AVERROR_INVALIDDATA; + } + seq = priv->sequence_header; + + fb(2, spatial_layers_cnt_minus_1); + flag(spatial_layer_dimensions_present_flag); + flag(spatial_layer_description_present_flag); + flag(temporal_group_description_present_flag); + fc(3, scalability_structure_reserved_3bits, 0, 0); + if (current->spatial_layer_dimensions_present_flag) { + for (i = 0; i <= current->spatial_layers_cnt_minus_1; i++) { + fcs(16, spatial_layer_max_width[i], + 0, seq->max_frame_width_minus_1 + 1, 1, i); + fcs(16, spatial_layer_max_height[i], + 0, seq->max_frame_height_minus_1 + 1, 1, i); + } + } + if (current->spatial_layer_description_present_flag) { + for (i = 0; i <= current->spatial_layers_cnt_minus_1; i++) + fbs(8, spatial_layer_ref_id[i], 1, i); + } + if (current->temporal_group_description_present_flag) { + fb(8, temporal_group_size); + for (i = 0; i < current->temporal_group_size; i++) { + fbs(3, temporal_group_temporal_id[i], 1, i); + flags(temporal_group_temporal_switching_up_point_flag[i], 1, i); + flags(temporal_group_spatial_switching_up_point_flag[i], 1, i); + fbs(3, temporal_group_ref_cnt[i], 1, i); + for (j = 0; j < current->temporal_group_ref_cnt[i]; j++) { + fbs(8, temporal_group_ref_pic_diff[i][j], 2, i, j); + } + } + } + + return 0; } static int FUNC(metadata_scalability)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataScalability *current) { - int err; + AV1RawMetadataScalability *current) +{ + int err; - fb(8, scalability_mode_idc); + fb(8, scalability_mode_idc); - if(current->scalability_mode_idc == AV1_SCALABILITY_SS) - CHECK(FUNC(scalability_structure)(ctx, rw, current)); + if (current->scalability_mode_idc == AV1_SCALABILITY_SS) + CHECK(FUNC(scalability_structure)(ctx, rw, current)); - return 0; + return 0; } static int FUNC(metadata_itut_t35)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataITUTT35 *current) { - int err; - size_t i; + AV1RawMetadataITUTT35 *current) +{ + int err; + size_t i; - fb(8, itu_t_t35_country_code); - if(current->itu_t_t35_country_code == 0xff) - fb(8, itu_t_t35_country_code_extension_byte); + fb(8, itu_t_t35_country_code); + if (current->itu_t_t35_country_code == 0xff) + fb(8, itu_t_t35_country_code_extension_byte); #ifdef READ - // The payload runs up to the start of the trailing bits, but there might - // be arbitrarily many trailing zeroes so we need to read through twice. - current->payload_size = cbs_av1_get_payload_bytes_left(rw); - - current->payload_ref = av_buffer_alloc(current->payload_size); - if(!current->payload_ref) - return AVERROR(ENOMEM); - current->payload = current->payload_ref->data; + // The payload runs up to the start of the trailing bits, but there might + // be arbitrarily many trailing zeroes so we need to read through twice. + current->payload_size = cbs_av1_get_payload_bytes_left(rw); + + current->payload_ref = av_buffer_alloc(current->payload_size); + if (!current->payload_ref) + return AVERROR(ENOMEM); + current->payload = current->payload_ref->data; #endif - for(i = 0; i < current->payload_size; i++) - xf(8, itu_t_t35_payload_bytes[i], current->payload[i], - 0x00, 0xff, 1, i); + for (i = 0; i < current->payload_size; i++) + xf(8, itu_t_t35_payload_bytes[i], current->payload[i], + 0x00, 0xff, 1, i); - return 0; + return 0; } static int FUNC(metadata_timecode)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataTimecode *current) { - int err; - - fb(5, counting_type); - flag(full_timestamp_flag); - flag(discontinuity_flag); - flag(cnt_dropped_flag); - fb(9, n_frames); - - if(current->full_timestamp_flag) { - fc(6, seconds_value, 0, 59); - fc(6, minutes_value, 0, 59); - fc(5, hours_value, 0, 23); - } - else { - flag(seconds_flag); - if(current->seconds_flag) { - fc(6, seconds_value, 0, 59); - flag(minutes_flag); - if(current->minutes_flag) { + AV1RawMetadataTimecode *current) +{ + int err; + + fb(5, counting_type); + flag(full_timestamp_flag); + flag(discontinuity_flag); + flag(cnt_dropped_flag); + fb(9, n_frames); + + if (current->full_timestamp_flag) { + fc(6, seconds_value, 0, 59); fc(6, minutes_value, 0, 59); - flag(hours_flag); - if(current->hours_flag) - fc(5, hours_value, 0, 23); - } + fc(5, hours_value, 0, 23); + } else { + flag(seconds_flag); + if (current->seconds_flag) { + fc(6, seconds_value, 0, 59); + flag(minutes_flag); + if (current->minutes_flag) { + fc(6, minutes_value, 0, 59); + flag(hours_flag); + if (current->hours_flag) + fc(5, hours_value, 0, 23); + } + } } - } - fb(5, time_offset_length); - if(current->time_offset_length > 0) - fb(current->time_offset_length, time_offset_value); - else - infer(time_offset_length, 0); + fb(5, time_offset_length); + if (current->time_offset_length > 0) + fb(current->time_offset_length, time_offset_value); + else + infer(time_offset_length, 0); - return 0; + return 0; } static int FUNC(metadata_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadata *current) { - int err; - - leb128(metadata_type); - - switch(current->metadata_type) { - case AV1_METADATA_TYPE_HDR_CLL: - CHECK(FUNC(metadata_hdr_cll)(ctx, rw, ¤t->metadata.hdr_cll)); - break; - case AV1_METADATA_TYPE_HDR_MDCV: - CHECK(FUNC(metadata_hdr_mdcv)(ctx, rw, ¤t->metadata.hdr_mdcv)); - break; - case AV1_METADATA_TYPE_SCALABILITY: - CHECK(FUNC(metadata_scalability)(ctx, rw, ¤t->metadata.scalability)); - break; - case AV1_METADATA_TYPE_ITUT_T35: - CHECK(FUNC(metadata_itut_t35)(ctx, rw, ¤t->metadata.itut_t35)); - break; - case AV1_METADATA_TYPE_TIMECODE: - CHECK(FUNC(metadata_timecode)(ctx, rw, ¤t->metadata.timecode)); - break; - default: - // Unknown metadata type. - return AVERROR_PATCHWELCOME; - } - - return 0; + AV1RawMetadata *current) +{ + int err; + + leb128(metadata_type); + + switch (current->metadata_type) { + case AV1_METADATA_TYPE_HDR_CLL: + CHECK(FUNC(metadata_hdr_cll)(ctx, rw, ¤t->metadata.hdr_cll)); + break; + case AV1_METADATA_TYPE_HDR_MDCV: + CHECK(FUNC(metadata_hdr_mdcv)(ctx, rw, ¤t->metadata.hdr_mdcv)); + break; + case AV1_METADATA_TYPE_SCALABILITY: + CHECK(FUNC(metadata_scalability)(ctx, rw, ¤t->metadata.scalability)); + break; + case AV1_METADATA_TYPE_ITUT_T35: + CHECK(FUNC(metadata_itut_t35)(ctx, rw, ¤t->metadata.itut_t35)); + break; + case AV1_METADATA_TYPE_TIMECODE: + CHECK(FUNC(metadata_timecode)(ctx, rw, ¤t->metadata.timecode)); + break; + default: + // Unknown metadata type. + return AVERROR_PATCHWELCOME; + } + + return 0; } static int FUNC(padding_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawPadding *current) { - int i, err; + AV1RawPadding *current) +{ + int i, err; - HEADER("Padding"); + HEADER("Padding"); #ifdef READ - // The payload runs up to the start of the trailing bits, but there might - // be arbitrarily many trailing zeroes so we need to read through twice. - current->payload_size = cbs_av1_get_payload_bytes_left(rw); - - current->payload_ref = av_buffer_alloc(current->payload_size); - if(!current->payload_ref) - return AVERROR(ENOMEM); - current->payload = current->payload_ref->data; + // The payload runs up to the start of the trailing bits, but there might + // be arbitrarily many trailing zeroes so we need to read through twice. + current->payload_size = cbs_av1_get_payload_bytes_left(rw); + + current->payload_ref = av_buffer_alloc(current->payload_size); + if (!current->payload_ref) + return AVERROR(ENOMEM); + current->payload = current->payload_ref->data; #endif - for(i = 0; i < current->payload_size; i++) - xf(8, obu_padding_byte[i], current->payload[i], 0x00, 0xff, 1, i); + for (i = 0; i < current->payload_size; i++) + xf(8, obu_padding_byte[i], current->payload[i], 0x00, 0xff, 1, i); - return 0; + return 0; } diff --git a/third-party/cbs/cbs_h2645.c b/third-party/cbs/cbs_h2645.c index 4a60072d522..96a3efa2643 100644 --- a/third-party/cbs/cbs_h2645.c +++ b/third-party/cbs/cbs_h2645.c @@ -16,357 +16,350 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -#include -#include +#include "libavutil/attributes.h" +#include "libavutil/avassert.h" +// [manual] Changed include path +#include "bytestream.h" #include "cbs/cbs.h" +#include "cbs_internal.h" #include "cbs/cbs_h264.h" #include "cbs/cbs_h265.h" #include "cbs/h264.h" #include "cbs/h2645_parse.h" #include "cbs/hevc.h" -#include "bytestream.h" -#include "cbs_internal.h" -#include "h264_sei.h" -#include "hevc_sei.h" +// [manual] Added to resolve missing symbols #include "intmath.h" - +#include "log2_tab.c" static int cbs_read_ue_golomb(CodedBitstreamContext *ctx, GetBitContext *gbc, - const char *name, const int *subscripts, - uint32_t *write_to, - uint32_t range_min, uint32_t range_max) { - uint32_t value; - int position, i, j; - unsigned int k; - char bits[65]; - - position = get_bits_count(gbc); - - for(i = 0; i < 32; i++) { - if(get_bits_left(gbc) < i + 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid ue-golomb code at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; + const char *name, const int *subscripts, + uint32_t *write_to, + uint32_t range_min, uint32_t range_max) +{ + uint32_t value; + int position, i, j; + unsigned int k; + char bits[65]; + + position = get_bits_count(gbc); + + for (i = 0; i < 32; i++) { + if (get_bits_left(gbc) < i + 1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid ue-golomb code at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } + k = get_bits1(gbc); + bits[i] = k ? '1' : '0'; + if (k) + break; + } + if (i >= 32) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid ue-golomb code at " + "%s: more than 31 zeroes.\n", name); + return AVERROR_INVALIDDATA; + } + value = 1; + for (j = 0; j < i; j++) { + k = get_bits1(gbc); + bits[i + j + 1] = k ? '1' : '0'; + value = value << 1 | k; } - k = get_bits1(gbc); - bits[i] = k ? '1' : '0'; - if(k) - break; - } - if(i >= 32) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid ue-golomb code at " - "%s: more than 31 zeroes.\n", - name); - return AVERROR_INVALIDDATA; - } - value = 1; - for(j = 0; j < i; j++) { - k = get_bits1(gbc); - bits[i + j + 1] = k ? '1' : '0'; - value = value << 1 | k; - } - bits[i + j + 1] = 0; - --value; - - if(ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRIu32 ", but must be in [%" PRIu32 ",%" PRIu32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - *write_to = value; - return 0; + bits[i + j + 1] = 0; + --value; + + if (ctx->trace_enable) + ff_cbs_trace_syntax_element(ctx, position, name, subscripts, + bits, value); + + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; + } + + *write_to = value; + return 0; } static int cbs_read_se_golomb(CodedBitstreamContext *ctx, GetBitContext *gbc, - const char *name, const int *subscripts, - int32_t *write_to, - int32_t range_min, int32_t range_max) { - int32_t value; - int position, i, j; - unsigned int k; - uint32_t v; - char bits[65]; - - position = get_bits_count(gbc); - - for(i = 0; i < 32; i++) { - if(get_bits_left(gbc) < i + 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid se-golomb code at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; + const char *name, const int *subscripts, + int32_t *write_to, + int32_t range_min, int32_t range_max) +{ + int32_t value; + int position, i, j; + unsigned int k; + uint32_t v; + char bits[65]; + + position = get_bits_count(gbc); + + for (i = 0; i < 32; i++) { + if (get_bits_left(gbc) < i + 1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid se-golomb code at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } + k = get_bits1(gbc); + bits[i] = k ? '1' : '0'; + if (k) + break; } - k = get_bits1(gbc); - bits[i] = k ? '1' : '0'; - if(k) - break; - } - if(i >= 32) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid se-golomb code at " - "%s: more than 31 zeroes.\n", - name); - return AVERROR_INVALIDDATA; - } - v = 1; - for(j = 0; j < i; j++) { - k = get_bits1(gbc); - bits[i + j + 1] = k ? '1' : '0'; - v = v << 1 | k; - } - bits[i + j + 1] = 0; - if(v & 1) - value = -(int32_t)(v / 2); - else - value = v / 2; - - if(ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRId32 ", but must be in [%" PRId32 ",%" PRId32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - *write_to = value; - return 0; + if (i >= 32) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid se-golomb code at " + "%s: more than 31 zeroes.\n", name); + return AVERROR_INVALIDDATA; + } + v = 1; + for (j = 0; j < i; j++) { + k = get_bits1(gbc); + bits[i + j + 1] = k ? '1' : '0'; + v = v << 1 | k; + } + bits[i + j + 1] = 0; + if (v & 1) + value = -(int32_t)(v / 2); + else + value = v / 2; + + if (ctx->trace_enable) + ff_cbs_trace_syntax_element(ctx, position, name, subscripts, + bits, value); + + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRId32", but must be in [%"PRId32",%"PRId32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; + } + + *write_to = value; + return 0; } static int cbs_write_ue_golomb(CodedBitstreamContext *ctx, PutBitContext *pbc, - const char *name, const int *subscripts, - uint32_t value, - uint32_t range_min, uint32_t range_max) { - int len; - - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRIu32 ", but must be in [%" PRIu32 ",%" PRIu32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - av_assert0(value != UINT32_MAX); - - len = av_log2(value + 1); - if(put_bits_left(pbc) < 2 * len + 1) - return AVERROR(ENOSPC); - - if(ctx->trace_enable) { - char bits[65]; - int i; + const char *name, const int *subscripts, + uint32_t value, + uint32_t range_min, uint32_t range_max) +{ + int len; + + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; + } + av_assert0(value != UINT32_MAX); - for(i = 0; i < len; i++) - bits[i] = '0'; - bits[len] = '1'; - for(i = 0; i < len; i++) - bits[len + i + 1] = (value + 1) >> (len - i - 1) & 1 ? '1' : '0'; - bits[len + len + 1] = 0; + len = av_log2(value + 1); + if (put_bits_left(pbc) < 2 * len + 1) + return AVERROR(ENOSPC); - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } + if (ctx->trace_enable) { + char bits[65]; + int i; - put_bits(pbc, len, 0); - if(len + 1 < 32) - put_bits(pbc, len + 1, value + 1); - else - put_bits32(pbc, value + 1); + for (i = 0; i < len; i++) + bits[i] = '0'; + bits[len] = '1'; + for (i = 0; i < len; i++) + bits[len + i + 1] = (value + 1) >> (len - i - 1) & 1 ? '1' : '0'; + bits[len + len + 1] = 0; - return 0; -} + ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), + name, subscripts, bits, value); + } -static int cbs_write_se_golomb(CodedBitstreamContext *ctx, PutBitContext *pbc, - const char *name, const int *subscripts, - int32_t value, - int32_t range_min, int32_t range_max) { - int len; - uint32_t uvalue; - - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRId32 ", but must be in [%" PRId32 ",%" PRId32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - av_assert0(value != INT32_MIN); - - if(value == 0) - uvalue = 0; - else if(value > 0) - uvalue = 2 * (uint32_t)value - 1; - else - uvalue = 2 * (uint32_t)-value; - - len = av_log2(uvalue + 1); - if(put_bits_left(pbc) < 2 * len + 1) - return AVERROR(ENOSPC); - - if(ctx->trace_enable) { - char bits[65]; - int i; + put_bits(pbc, len, 0); + if (len + 1 < 32) + put_bits(pbc, len + 1, value + 1); + else + put_bits32(pbc, value + 1); - for(i = 0; i < len; i++) - bits[i] = '0'; - bits[len] = '1'; - for(i = 0; i < len; i++) - bits[len + i + 1] = (uvalue + 1) >> (len - i - 1) & 1 ? '1' : '0'; - bits[len + len + 1] = 0; + return 0; +} - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } +static int cbs_write_se_golomb(CodedBitstreamContext *ctx, PutBitContext *pbc, + const char *name, const int *subscripts, + int32_t value, + int32_t range_min, int32_t range_max) +{ + int len; + uint32_t uvalue; + + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRId32", but must be in [%"PRId32",%"PRId32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; + } + av_assert0(value != INT32_MIN); + + if (value == 0) + uvalue = 0; + else if (value > 0) + uvalue = 2 * (uint32_t)value - 1; + else + uvalue = 2 * (uint32_t)-value; + + len = av_log2(uvalue + 1); + if (put_bits_left(pbc) < 2 * len + 1) + return AVERROR(ENOSPC); + + if (ctx->trace_enable) { + char bits[65]; + int i; + + for (i = 0; i < len; i++) + bits[i] = '0'; + bits[len] = '1'; + for (i = 0; i < len; i++) + bits[len + i + 1] = (uvalue + 1) >> (len - i - 1) & 1 ? '1' : '0'; + bits[len + len + 1] = 0; + + ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), + name, subscripts, bits, value); + } - put_bits(pbc, len, 0); - if(len + 1 < 32) - put_bits(pbc, len + 1, uvalue + 1); - else - put_bits32(pbc, uvalue + 1); + put_bits(pbc, len, 0); + if (len + 1 < 32) + put_bits(pbc, len + 1, uvalue + 1); + else + put_bits32(pbc, uvalue + 1); - return 0; + return 0; } // payload_extension_present() - true if we are before the last 1-bit // in the payload structure, which must be in the last byte. static int cbs_h265_payload_extension_present(GetBitContext *gbc, uint32_t payload_size, - int cur_pos) { - int bits_left = payload_size * 8 - cur_pos; - return (bits_left > 0 && - (bits_left > 7 || show_bits(gbc, bits_left) & MAX_UINT_BITS(bits_left - 1))); + int cur_pos) +{ + int bits_left = payload_size * 8 - cur_pos; + return (bits_left > 0 && + (bits_left > 7 || show_bits(gbc, bits_left) & MAX_UINT_BITS(bits_left - 1))); } -#define HEADER(name) \ - do { \ - ff_cbs_trace_header(ctx, name); \ - } while(0) +#define HEADER(name) do { \ + ff_cbs_trace_header(ctx, name); \ + } while (0) -#define CHECK(call) \ - do { \ - err = (call); \ - if(err < 0) \ - return err; \ - } while(0) +#define CHECK(call) do { \ + err = (call); \ + if (err < 0) \ + return err; \ + } while (0) -#define FUNC_NAME2(rw, codec, name) cbs_##codec##_##rw##_##name +#define FUNC_NAME2(rw, codec, name) cbs_ ## codec ## _ ## rw ## _ ## name #define FUNC_NAME1(rw, codec, name) FUNC_NAME2(rw, codec, name) #define FUNC_H264(name) FUNC_NAME1(READWRITE, h264, name) #define FUNC_H265(name) FUNC_NAME1(READWRITE, h265, name) -#define FUNC_SEI(name) FUNC_NAME1(READWRITE, sei, name) +#define FUNC_SEI(name) FUNC_NAME1(READWRITE, sei, name) -#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]) { subs, __VA_ARGS__ }) : NULL) +#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]){ subs, __VA_ARGS__ }) : NULL) #define u(width, name, range_min, range_max) \ - xu(width, name, current->name, range_min, range_max, 0, ) + xu(width, name, current->name, range_min, range_max, 0, ) #define ub(width, name) \ - xu(width, name, current->name, 0, MAX_UINT_BITS(width), 0, ) + xu(width, name, current->name, 0, MAX_UINT_BITS(width), 0, ) #define flag(name) ub(1, name) #define ue(name, range_min, range_max) \ - xue(name, current->name, range_min, range_max, 0, ) + xue(name, current->name, range_min, range_max, 0, ) #define i(width, name, range_min, range_max) \ - xi(width, name, current->name, range_min, range_max, 0, ) + xi(width, name, current->name, range_min, range_max, 0, ) #define ib(width, name) \ - xi(width, name, current->name, MIN_INT_BITS(width), MAX_INT_BITS(width), 0, ) + xi(width, name, current->name, MIN_INT_BITS(width), MAX_INT_BITS(width), 0, ) #define se(name, range_min, range_max) \ - xse(name, current->name, range_min, range_max, 0, ) + xse(name, current->name, range_min, range_max, 0, ) #define us(width, name, range_min, range_max, subs, ...) \ - xu(width, name, current->name, range_min, range_max, subs, __VA_ARGS__) + xu(width, name, current->name, range_min, range_max, subs, __VA_ARGS__) #define ubs(width, name, subs, ...) \ - xu(width, name, current->name, 0, MAX_UINT_BITS(width), subs, __VA_ARGS__) + xu(width, name, current->name, 0, MAX_UINT_BITS(width), subs, __VA_ARGS__) #define flags(name, subs, ...) \ - xu(1, name, current->name, 0, 1, subs, __VA_ARGS__) + xu(1, name, current->name, 0, 1, subs, __VA_ARGS__) #define ues(name, range_min, range_max, subs, ...) \ - xue(name, current->name, range_min, range_max, subs, __VA_ARGS__) + xue(name, current->name, range_min, range_max, subs, __VA_ARGS__) #define is(width, name, range_min, range_max, subs, ...) \ - xi(width, name, current->name, range_min, range_max, subs, __VA_ARGS__) + xi(width, name, current->name, range_min, range_max, subs, __VA_ARGS__) #define ibs(width, name, subs, ...) \ - xi(width, name, current->name, MIN_INT_BITS(width), MAX_INT_BITS(width), subs, __VA_ARGS__) + xi(width, name, current->name, MIN_INT_BITS(width), MAX_INT_BITS(width), subs, __VA_ARGS__) #define ses(name, range_min, range_max, subs, ...) \ - xse(name, current->name, range_min, range_max, subs, __VA_ARGS__) + xse(name, current->name, range_min, range_max, subs, __VA_ARGS__) -#define fixed(width, name, value) \ - do { \ - av_unused uint32_t fixed_value = value; \ - xu(width, name, fixed_value, value, value, 0, ); \ - } while(0) +#define fixed(width, name, value) do { \ + av_unused uint32_t fixed_value = value; \ + xu(width, name, fixed_value, value, value, 0, ); \ + } while (0) #define READ #define READWRITE read #define RWContext GetBitContext -#define xu(width, name, var, range_min, range_max, subs, ...) \ - do { \ - uint32_t value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while(0) -#define xue(name, var, range_min, range_max, subs, ...) \ - do { \ - uint32_t value; \ - CHECK(cbs_read_ue_golomb(ctx, rw, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while(0) -#define xi(width, name, var, range_min, range_max, subs, ...) \ - do { \ - int32_t value; \ - CHECK(ff_cbs_read_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while(0) -#define xse(name, var, range_min, range_max, subs, ...) \ - do { \ - int32_t value; \ - CHECK(cbs_read_se_golomb(ctx, rw, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while(0) - - -#define infer(name, value) \ - do { \ - current->name = value; \ - } while(0) - -static int cbs_h2645_read_more_rbsp_data(GetBitContext *gbc) { - int bits_left = get_bits_left(gbc); - if(bits_left > 8) - return 1; - if(bits_left == 0) +#define xu(width, name, var, range_min, range_max, subs, ...) do { \ + uint32_t value; \ + CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + &value, range_min, range_max)); \ + var = value; \ + } while (0) +#define xue(name, var, range_min, range_max, subs, ...) do { \ + uint32_t value; \ + CHECK(cbs_read_ue_golomb(ctx, rw, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + &value, range_min, range_max)); \ + var = value; \ + } while (0) +#define xi(width, name, var, range_min, range_max, subs, ...) do { \ + int32_t value; \ + CHECK(ff_cbs_read_signed(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + &value, range_min, range_max)); \ + var = value; \ + } while (0) +#define xse(name, var, range_min, range_max, subs, ...) do { \ + int32_t value; \ + CHECK(cbs_read_se_golomb(ctx, rw, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + &value, range_min, range_max)); \ + var = value; \ + } while (0) + + +#define infer(name, value) do { \ + current->name = value; \ + } while (0) + +static int cbs_h2645_read_more_rbsp_data(GetBitContext *gbc) +{ + int bits_left = get_bits_left(gbc); + if (bits_left > 8) + return 1; + if (bits_left == 0) + return 0; + if (show_bits(gbc, bits_left) & MAX_UINT_BITS(bits_left - 1)) + return 1; return 0; - if(show_bits(gbc, bits_left) & MAX_UINT_BITS(bits_left - 1)) - return 1; - return 0; } #define more_rbsp_data(var) ((var) = cbs_h2645_read_more_rbsp_data(rw)) -#define bit_position(rw) (get_bits_count(rw)) +#define bit_position(rw) (get_bits_count(rw)) #define byte_alignment(rw) (get_bits_count(rw) % 8) -#define allocate(name, size) \ - do { \ - name##_ref = av_buffer_allocz(size + \ - AV_INPUT_BUFFER_PADDING_SIZE); \ - if(!name##_ref) \ - return AVERROR(ENOMEM); \ - name = name##_ref->data; \ - } while(0) +#define allocate(name, size) do { \ + name ## _ref = av_buffer_allocz(size + \ + AV_INPUT_BUFFER_PADDING_SIZE); \ + if (!name ## _ref) \ + return AVERROR(ENOMEM); \ + name = name ## _ref->data; \ + } while (0) #define FUNC(name) FUNC_SEI(name) #include "cbs_sei_syntax_template.c" @@ -398,60 +391,53 @@ static int cbs_h2645_read_more_rbsp_data(GetBitContext *gbc) { #define READWRITE write #define RWContext PutBitContext -#define xu(width, name, var, range_min, range_max, subs, ...) \ - do { \ - uint32_t value = var; \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - value, range_min, range_max)); \ - } while(0) -#define xue(name, var, range_min, range_max, subs, ...) \ - do { \ - uint32_t value = var; \ - CHECK(cbs_write_ue_golomb(ctx, rw, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - value, range_min, range_max)); \ - } while(0) -#define xi(width, name, var, range_min, range_max, subs, ...) \ - do { \ - int32_t value = var; \ - CHECK(ff_cbs_write_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - value, range_min, range_max)); \ - } while(0) -#define xse(name, var, range_min, range_max, subs, ...) \ - do { \ - int32_t value = var; \ - CHECK(cbs_write_se_golomb(ctx, rw, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - value, range_min, range_max)); \ - } while(0) - -#define infer(name, value) \ - do { \ - if(current->name != (value)) { \ - av_log(ctx->log_ctx, AV_LOG_ERROR, \ - "%s does not match inferred value: " \ - "%" PRId64 ", but should be %" PRId64 ".\n", \ - #name, (int64_t)current->name, (int64_t)(value)); \ - return AVERROR_INVALIDDATA; \ - } \ - } while(0) +#define xu(width, name, var, range_min, range_max, subs, ...) do { \ + uint32_t value = var; \ + CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + value, range_min, range_max)); \ + } while (0) +#define xue(name, var, range_min, range_max, subs, ...) do { \ + uint32_t value = var; \ + CHECK(cbs_write_ue_golomb(ctx, rw, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + value, range_min, range_max)); \ + } while (0) +#define xi(width, name, var, range_min, range_max, subs, ...) do { \ + int32_t value = var; \ + CHECK(ff_cbs_write_signed(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + value, range_min, range_max)); \ + } while (0) +#define xse(name, var, range_min, range_max, subs, ...) do { \ + int32_t value = var; \ + CHECK(cbs_write_se_golomb(ctx, rw, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + value, range_min, range_max)); \ + } while (0) + +#define infer(name, value) do { \ + if (current->name != (value)) { \ + av_log(ctx->log_ctx, AV_LOG_ERROR, \ + "%s does not match inferred value: " \ + "%"PRId64", but should be %"PRId64".\n", \ + #name, (int64_t)current->name, (int64_t)(value)); \ + return AVERROR_INVALIDDATA; \ + } \ + } while (0) #define more_rbsp_data(var) (var) -#define bit_position(rw) (put_bits_count(rw)) +#define bit_position(rw) (put_bits_count(rw)) #define byte_alignment(rw) (put_bits_count(rw) % 8) -#define allocate(name, size) \ - do { \ - if(!name) { \ - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s must be set " \ - "for writing.\n", \ - #name); \ - return AVERROR_INVALIDDATA; \ - } \ - } while(0) +#define allocate(name, size) do { \ + if (!name) { \ + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s must be set " \ + "for writing.\n", #name); \ + return AVERROR_INVALIDDATA; \ + } \ + } while (0) #define FUNC(name) FUNC_SEI(name) #include "cbs_sei_syntax_template.c" @@ -485,1148 +471,1212 @@ static int cbs_h2645_read_more_rbsp_data(GetBitContext *gbc) { static int cbs_h2645_fragment_add_nals(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const H2645Packet *packet) { - int err, i; - - for(i = 0; i < packet->nb_nals; i++) { - const H2645NAL *nal = &packet->nals[i]; - AVBufferRef *ref; - size_t size = nal->size; - - if(nal->nuh_layer_id > 0) - continue; - - // Remove trailing zeroes. - while(size > 0 && nal->data[size - 1] == 0) - --size; - if(size == 0) { - av_log(ctx->log_ctx, AV_LOG_VERBOSE, "Discarding empty 0 NAL unit\n"); - continue; - } + CodedBitstreamFragment *frag, + const H2645Packet *packet) +{ + int err, i; + + for (i = 0; i < packet->nb_nals; i++) { + const H2645NAL *nal = &packet->nals[i]; + AVBufferRef *ref; + size_t size = nal->size; + + if (nal->nuh_layer_id > 0) + continue; + + // Remove trailing zeroes. + while (size > 0 && nal->data[size - 1] == 0) + --size; + if (size == 0) { + av_log(ctx->log_ctx, AV_LOG_VERBOSE, "Discarding empty 0 NAL unit\n"); + continue; + } - ref = (nal->data == nal->raw_data) ? frag->data_ref : packet->rbsp.rbsp_buffer_ref; + ref = (nal->data == nal->raw_data) ? frag->data_ref + : packet->rbsp.rbsp_buffer_ref; - err = ff_cbs_insert_unit_data(frag, -1, nal->type, - (uint8_t *)nal->data, size, ref); - if(err < 0) - return err; - } + err = ff_cbs_append_unit_data(frag, nal->type, + (uint8_t*)nal->data, size, ref); + if (err < 0) + return err; + } - return 0; + return 0; } static int cbs_h2645_split_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header) { - enum AVCodecID codec_id = ctx->codec->codec_id; - CodedBitstreamH2645Context *priv = ctx->priv_data; - GetByteContext gbc; - int err; - - av_assert0(frag->data && frag->nb_units == 0); - if(frag->data_size == 0) - return 0; + CodedBitstreamFragment *frag, + int header) +{ + enum AVCodecID codec_id = ctx->codec->codec_id; + CodedBitstreamH2645Context *priv = ctx->priv_data; + GetByteContext gbc; + int err; + + av_assert0(frag->data && frag->nb_units == 0); + if (frag->data_size == 0) + return 0; + + if (header && frag->data[0] && codec_id == AV_CODEC_ID_H264) { + // AVCC header. + size_t size, start, end; + int i, count, version; + + priv->mp4 = 1; + + bytestream2_init(&gbc, frag->data, frag->data_size); + + if (bytestream2_get_bytes_left(&gbc) < 6) + return AVERROR_INVALIDDATA; + + version = bytestream2_get_byte(&gbc); + if (version != 1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid AVCC header: " + "first byte %u.\n", version); + return AVERROR_INVALIDDATA; + } - if(header && frag->data[0] && codec_id == AV_CODEC_ID_H264) { - // AVCC header. - size_t size, start, end; - int i, count, version; + bytestream2_skip(&gbc, 3); + priv->nal_length_size = (bytestream2_get_byte(&gbc) & 3) + 1; + + // SPS array. + count = bytestream2_get_byte(&gbc) & 0x1f; + start = bytestream2_tell(&gbc); + for (i = 0; i < count; i++) { + if (bytestream2_get_bytes_left(&gbc) < 2 * (count - i)) + return AVERROR_INVALIDDATA; + size = bytestream2_get_be16(&gbc); + if (bytestream2_get_bytes_left(&gbc) < size) + return AVERROR_INVALIDDATA; + bytestream2_skip(&gbc, size); + } + end = bytestream2_tell(&gbc); + + err = ff_h2645_packet_split(&priv->read_packet, + frag->data + start, end - start, + ctx->log_ctx, 1, 2, AV_CODEC_ID_H264, 1, 1); + if (err < 0) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split AVCC SPS array.\n"); + return err; + } + err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); + if (err < 0) + return err; + + // PPS array. + count = bytestream2_get_byte(&gbc); + start = bytestream2_tell(&gbc); + for (i = 0; i < count; i++) { + if (bytestream2_get_bytes_left(&gbc) < 2 * (count - i)) + return AVERROR_INVALIDDATA; + size = bytestream2_get_be16(&gbc); + if (bytestream2_get_bytes_left(&gbc) < size) + return AVERROR_INVALIDDATA; + bytestream2_skip(&gbc, size); + } + end = bytestream2_tell(&gbc); + + err = ff_h2645_packet_split(&priv->read_packet, + frag->data + start, end - start, + ctx->log_ctx, 1, 2, AV_CODEC_ID_H264, 1, 1); + if (err < 0) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split AVCC PPS array.\n"); + return err; + } + err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); + if (err < 0) + return err; - priv->mp4 = 1; + if (bytestream2_get_bytes_left(&gbc) > 0) { + av_log(ctx->log_ctx, AV_LOG_WARNING, "%u bytes left at end of AVCC " + "header.\n", bytestream2_get_bytes_left(&gbc)); + } - bytestream2_init(&gbc, frag->data, frag->data_size); + } else if (header && frag->data[0] && codec_id == AV_CODEC_ID_HEVC) { + // HVCC header. + size_t size, start, end; + int i, j, nb_arrays, nal_unit_type, nb_nals, version; - if(bytestream2_get_bytes_left(&gbc) < 6) - return AVERROR_INVALIDDATA; + priv->mp4 = 1; - version = bytestream2_get_byte(&gbc); - if(version != 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid AVCC header: " - "first byte %u.\n", - version); - return AVERROR_INVALIDDATA; - } + bytestream2_init(&gbc, frag->data, frag->data_size); - bytestream2_skip(&gbc, 3); - priv->nal_length_size = (bytestream2_get_byte(&gbc) & 3) + 1; + if (bytestream2_get_bytes_left(&gbc) < 23) + return AVERROR_INVALIDDATA; - // SPS array. - count = bytestream2_get_byte(&gbc) & 0x1f; - start = bytestream2_tell(&gbc); - for(i = 0; i < count; i++) { - if(bytestream2_get_bytes_left(&gbc) < 2 * (count - i)) - return AVERROR_INVALIDDATA; - size = bytestream2_get_be16(&gbc); - if(bytestream2_get_bytes_left(&gbc) < size) - return AVERROR_INVALIDDATA; - bytestream2_skip(&gbc, size); - } - end = bytestream2_tell(&gbc); - - err = ff_h2645_packet_split(&priv->read_packet, - frag->data + start, end - start, - ctx->log_ctx, 1, 2, AV_CODEC_ID_H264, 1, 1); - if(err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split AVCC SPS array.\n"); - return err; - } - err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); - if(err < 0) - return err; - - // PPS array. - count = bytestream2_get_byte(&gbc); - start = bytestream2_tell(&gbc); - for(i = 0; i < count; i++) { - if(bytestream2_get_bytes_left(&gbc) < 2 * (count - i)) - return AVERROR_INVALIDDATA; - size = bytestream2_get_be16(&gbc); - if(bytestream2_get_bytes_left(&gbc) < size) - return AVERROR_INVALIDDATA; - bytestream2_skip(&gbc, size); - } - end = bytestream2_tell(&gbc); - - err = ff_h2645_packet_split(&priv->read_packet, - frag->data + start, end - start, - ctx->log_ctx, 1, 2, AV_CODEC_ID_H264, 1, 1); - if(err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split AVCC PPS array.\n"); - return err; - } - err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); - if(err < 0) - return err; - - if(bytestream2_get_bytes_left(&gbc) > 0) { - av_log(ctx->log_ctx, AV_LOG_WARNING, "%u bytes left at end of AVCC " - "header.\n", - bytestream2_get_bytes_left(&gbc)); - } - } - else if(header && frag->data[0] && codec_id == AV_CODEC_ID_HEVC) { - // HVCC header. - size_t size, start, end; - int i, j, nb_arrays, nal_unit_type, nb_nals, version; + version = bytestream2_get_byte(&gbc); + if (version != 1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid HVCC header: " + "first byte %u.\n", version); + return AVERROR_INVALIDDATA; + } - priv->mp4 = 1; + bytestream2_skip(&gbc, 20); + priv->nal_length_size = (bytestream2_get_byte(&gbc) & 3) + 1; + + nb_arrays = bytestream2_get_byte(&gbc); + for (i = 0; i < nb_arrays; i++) { + nal_unit_type = bytestream2_get_byte(&gbc) & 0x3f; + nb_nals = bytestream2_get_be16(&gbc); + + start = bytestream2_tell(&gbc); + for (j = 0; j < nb_nals; j++) { + if (bytestream2_get_bytes_left(&gbc) < 2) + return AVERROR_INVALIDDATA; + size = bytestream2_get_be16(&gbc); + if (bytestream2_get_bytes_left(&gbc) < size) + return AVERROR_INVALIDDATA; + bytestream2_skip(&gbc, size); + } + end = bytestream2_tell(&gbc); + + err = ff_h2645_packet_split(&priv->read_packet, + frag->data + start, end - start, + ctx->log_ctx, 1, 2, AV_CODEC_ID_HEVC, 1, 1); + if (err < 0) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split " + "HVCC array %d (%d NAL units of type %d).\n", + i, nb_nals, nal_unit_type); + return err; + } + err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); + if (err < 0) + return err; + } - bytestream2_init(&gbc, frag->data, frag->data_size); + } else { + // Annex B, or later MP4 with already-known parameters. - if(bytestream2_get_bytes_left(&gbc) < 23) - return AVERROR_INVALIDDATA; + err = ff_h2645_packet_split(&priv->read_packet, + frag->data, frag->data_size, + ctx->log_ctx, + priv->mp4, priv->nal_length_size, + codec_id, 1, 1); + if (err < 0) + return err; - version = bytestream2_get_byte(&gbc); - if(version != 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid HVCC header: " - "first byte %u.\n", - version); - return AVERROR_INVALIDDATA; + err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); + if (err < 0) + return err; } - bytestream2_skip(&gbc, 20); - priv->nal_length_size = (bytestream2_get_byte(&gbc) & 3) + 1; - - nb_arrays = bytestream2_get_byte(&gbc); - for(i = 0; i < nb_arrays; i++) { - nal_unit_type = bytestream2_get_byte(&gbc) & 0x3f; - nb_nals = bytestream2_get_be16(&gbc); - - start = bytestream2_tell(&gbc); - for(j = 0; j < nb_nals; j++) { - if(bytestream2_get_bytes_left(&gbc) < 2) - return AVERROR_INVALIDDATA; - size = bytestream2_get_be16(&gbc); - if(bytestream2_get_bytes_left(&gbc) < size) - return AVERROR_INVALIDDATA; - bytestream2_skip(&gbc, size); - } - end = bytestream2_tell(&gbc); - - err = ff_h2645_packet_split(&priv->read_packet, - frag->data + start, end - start, - ctx->log_ctx, 1, 2, AV_CODEC_ID_HEVC, 1, 1); - if(err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split " - "HVCC array %d (%d NAL units of type %d).\n", - i, nb_nals, nal_unit_type); - return err; - } - err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); - if(err < 0) - return err; - } - } - else { - // Annex B, or later MP4 with already-known parameters. - - err = ff_h2645_packet_split(&priv->read_packet, - frag->data, frag->data_size, - ctx->log_ctx, - priv->mp4, priv->nal_length_size, - codec_id, 1, 1); - if(err < 0) - return err; - - err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); - if(err < 0) - return err; - } - - return 0; + return 0; } -#define cbs_h2645_replace_ps(h26n, ps_name, ps_var, id_element) \ - static int cbs_h26##h26n##_replace_##ps_var(CodedBitstreamContext *ctx, \ - CodedBitstreamUnit *unit) { \ - CodedBitstreamH26##h26n##Context *priv = ctx->priv_data; \ - H26##h26n##Raw##ps_name *ps_var = unit->content; \ - unsigned int id = ps_var->id_element; \ - int err; \ - if(id >= FF_ARRAY_ELEMS(priv->ps_var)) { \ - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid " #ps_name " id : %d.\n", id); \ - return AVERROR_INVALIDDATA; \ - } \ - err = ff_cbs_make_unit_refcounted(ctx, unit); \ - if(err < 0) \ - return err; \ - if(priv->ps_var[id] == priv->active_##ps_var) \ - priv->active_##ps_var = NULL; \ - av_buffer_unref(&priv->ps_var##_ref[id]); \ - av_assert0(unit->content_ref); \ - priv->ps_var##_ref[id] = av_buffer_ref(unit->content_ref); \ - if(!priv->ps_var##_ref[id]) \ - return AVERROR(ENOMEM); \ - priv->ps_var[id] = (H26##h26n##Raw##ps_name *)priv->ps_var##_ref[id]->data; \ - return 0; \ - } +#define cbs_h2645_replace_ps(h26n, ps_name, ps_var, id_element) \ +static int cbs_h26 ## h26n ## _replace_ ## ps_var(CodedBitstreamContext *ctx, \ + CodedBitstreamUnit *unit) \ +{ \ + CodedBitstreamH26 ## h26n ## Context *priv = ctx->priv_data; \ + H26 ## h26n ## Raw ## ps_name *ps_var = unit->content; \ + unsigned int id = ps_var->id_element; \ + int err; \ + if (id >= FF_ARRAY_ELEMS(priv->ps_var)) { \ + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid " #ps_name \ + " id : %d.\n", id); \ + return AVERROR_INVALIDDATA; \ + } \ + err = ff_cbs_make_unit_refcounted(ctx, unit); \ + if (err < 0) \ + return err; \ + if (priv->ps_var[id] == priv->active_ ## ps_var) \ + priv->active_ ## ps_var = NULL ; \ + av_buffer_unref(&priv->ps_var ## _ref[id]); \ + av_assert0(unit->content_ref); \ + priv->ps_var ## _ref[id] = av_buffer_ref(unit->content_ref); \ + if (!priv->ps_var ## _ref[id]) \ + return AVERROR(ENOMEM); \ + priv->ps_var[id] = (H26 ## h26n ## Raw ## ps_name *)priv->ps_var ## _ref[id]->data; \ + return 0; \ +} cbs_h2645_replace_ps(4, SPS, sps, seq_parameter_set_id) - cbs_h2645_replace_ps(4, PPS, pps, pic_parameter_set_id) - cbs_h2645_replace_ps(5, VPS, vps, vps_video_parameter_set_id) - cbs_h2645_replace_ps(5, SPS, sps, sps_seq_parameter_set_id) - cbs_h2645_replace_ps(5, PPS, pps, pps_pic_parameter_set_id) - - static int cbs_h264_read_nal_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - GetBitContext gbc; - int err; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if(err < 0) - return err; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if(err < 0) - return err; - - switch(unit->type) { - case H264_NAL_SPS: { - H264RawSPS *sps = unit->content; - - err = cbs_h264_read_sps(ctx, &gbc, sps); - if(err < 0) - return err; - - err = cbs_h264_replace_sps(ctx, unit); - if(err < 0) - return err; - } break; - - case H264_NAL_SPS_EXT: { - err = cbs_h264_read_sps_extension(ctx, &gbc, unit->content); - if(err < 0) - return err; - } break; - - case H264_NAL_PPS: { - H264RawPPS *pps = unit->content; - - err = cbs_h264_read_pps(ctx, &gbc, pps); - if(err < 0) - return err; - - err = cbs_h264_replace_pps(ctx, unit); - if(err < 0) - return err; - } break; - - case H264_NAL_SLICE: - case H264_NAL_IDR_SLICE: - case H264_NAL_AUXILIARY_SLICE: { - H264RawSlice *slice = unit->content; - int pos, len; - - err = cbs_h264_read_slice_header(ctx, &gbc, &slice->header); - if(err < 0) - return err; - - if(!cbs_h2645_read_more_rbsp_data(&gbc)) - return AVERROR_INVALIDDATA; - - pos = get_bits_count(&gbc); - len = unit->data_size; - - slice->data_size = len - pos / 8; - slice->data_ref = av_buffer_ref(unit->data_ref); - if(!slice->data_ref) - return AVERROR(ENOMEM); - slice->data = unit->data + pos / 8; - slice->data_bit_start = pos % 8; - } break; - - case H264_NAL_AUD: { - err = cbs_h264_read_aud(ctx, &gbc, unit->content); - if(err < 0) - return err; - } break; - - case H264_NAL_SEI: { - err = cbs_h264_read_sei(ctx, &gbc, unit->content); - if(err < 0) - return err; - } break; - - case H264_NAL_FILLER_DATA: { - err = cbs_h264_read_filler(ctx, &gbc, unit->content); - if(err < 0) - return err; - } break; - - case H264_NAL_END_SEQUENCE: - case H264_NAL_END_STREAM: { - err = (unit->type == H264_NAL_END_SEQUENCE ? - cbs_h264_read_end_of_sequence : - cbs_h264_read_end_of_stream)(ctx, &gbc, unit->content); - if(err < 0) - return err; - } break; - - default: - return AVERROR(ENOSYS); - } - - return 0; +cbs_h2645_replace_ps(4, PPS, pps, pic_parameter_set_id) +cbs_h2645_replace_ps(5, VPS, vps, vps_video_parameter_set_id) +cbs_h2645_replace_ps(5, SPS, sps, sps_seq_parameter_set_id) +cbs_h2645_replace_ps(5, PPS, pps, pps_pic_parameter_set_id) + +static int cbs_h264_read_nal_unit(CodedBitstreamContext *ctx, + CodedBitstreamUnit *unit) +{ + GetBitContext gbc; + int err; + + err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); + if (err < 0) + return err; + + err = ff_cbs_alloc_unit_content2(ctx, unit); + if (err < 0) + return err; + + switch (unit->type) { + case H264_NAL_SPS: + { + H264RawSPS *sps = unit->content; + + err = cbs_h264_read_sps(ctx, &gbc, sps); + if (err < 0) + return err; + + err = cbs_h264_replace_sps(ctx, unit); + if (err < 0) + return err; + } + break; + + case H264_NAL_SPS_EXT: + { + err = cbs_h264_read_sps_extension(ctx, &gbc, unit->content); + if (err < 0) + return err; + } + break; + + case H264_NAL_PPS: + { + H264RawPPS *pps = unit->content; + + err = cbs_h264_read_pps(ctx, &gbc, pps); + if (err < 0) + return err; + + err = cbs_h264_replace_pps(ctx, unit); + if (err < 0) + return err; + } + break; + + case H264_NAL_SLICE: + case H264_NAL_IDR_SLICE: + case H264_NAL_AUXILIARY_SLICE: + { + H264RawSlice *slice = unit->content; + int pos, len; + + err = cbs_h264_read_slice_header(ctx, &gbc, &slice->header); + if (err < 0) + return err; + + if (!cbs_h2645_read_more_rbsp_data(&gbc)) + return AVERROR_INVALIDDATA; + + pos = get_bits_count(&gbc); + len = unit->data_size; + + slice->data_size = len - pos / 8; + slice->data_ref = av_buffer_ref(unit->data_ref); + if (!slice->data_ref) + return AVERROR(ENOMEM); + slice->data = unit->data + pos / 8; + slice->data_bit_start = pos % 8; + } + break; + + case H264_NAL_AUD: + { + err = cbs_h264_read_aud(ctx, &gbc, unit->content); + if (err < 0) + return err; + } + break; + + case H264_NAL_SEI: + { + err = cbs_h264_read_sei(ctx, &gbc, unit->content); + if (err < 0) + return err; + } + break; + + case H264_NAL_FILLER_DATA: + { + err = cbs_h264_read_filler(ctx, &gbc, unit->content); + if (err < 0) + return err; + } + break; + + case H264_NAL_END_SEQUENCE: + case H264_NAL_END_STREAM: + { + err = (unit->type == H264_NAL_END_SEQUENCE ? + cbs_h264_read_end_of_sequence : + cbs_h264_read_end_of_stream)(ctx, &gbc, unit->content); + if (err < 0) + return err; + } + break; + + default: + return AVERROR(ENOSYS); + } + + return 0; } static int cbs_h265_read_nal_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - GetBitContext gbc; - int err; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if(err < 0) - return err; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if(err < 0) - return err; - - switch(unit->type) { - case HEVC_NAL_VPS: { - H265RawVPS *vps = unit->content; - - err = cbs_h265_read_vps(ctx, &gbc, vps); - if(err < 0) - return err; - - err = cbs_h265_replace_vps(ctx, unit); - if(err < 0) - return err; - } break; - case HEVC_NAL_SPS: { - H265RawSPS *sps = unit->content; - - err = cbs_h265_read_sps(ctx, &gbc, sps); - if(err < 0) - return err; - - err = cbs_h265_replace_sps(ctx, unit); - if(err < 0) - return err; - } break; - - case HEVC_NAL_PPS: { - H265RawPPS *pps = unit->content; - - err = cbs_h265_read_pps(ctx, &gbc, pps); - if(err < 0) - return err; - - err = cbs_h265_replace_pps(ctx, unit); - if(err < 0) - return err; - } break; - - case HEVC_NAL_TRAIL_N: - case HEVC_NAL_TRAIL_R: - case HEVC_NAL_TSA_N: - case HEVC_NAL_TSA_R: - case HEVC_NAL_STSA_N: - case HEVC_NAL_STSA_R: - case HEVC_NAL_RADL_N: - case HEVC_NAL_RADL_R: - case HEVC_NAL_RASL_N: - case HEVC_NAL_RASL_R: - case HEVC_NAL_BLA_W_LP: - case HEVC_NAL_BLA_W_RADL: - case HEVC_NAL_BLA_N_LP: - case HEVC_NAL_IDR_W_RADL: - case HEVC_NAL_IDR_N_LP: - case HEVC_NAL_CRA_NUT: { - H265RawSlice *slice = unit->content; - int pos, len; - - err = cbs_h265_read_slice_segment_header(ctx, &gbc, &slice->header); - if(err < 0) - return err; - - if(!cbs_h2645_read_more_rbsp_data(&gbc)) - return AVERROR_INVALIDDATA; - - pos = get_bits_count(&gbc); - len = unit->data_size; - - slice->data_size = len - pos / 8; - slice->data_ref = av_buffer_ref(unit->data_ref); - if(!slice->data_ref) - return AVERROR(ENOMEM); - slice->data = unit->data + pos / 8; - slice->data_bit_start = pos % 8; - } break; - - case HEVC_NAL_AUD: { - err = cbs_h265_read_aud(ctx, &gbc, unit->content); - if(err < 0) - return err; - } break; - - case HEVC_NAL_SEI_PREFIX: - case HEVC_NAL_SEI_SUFFIX: { - err = cbs_h265_read_sei(ctx, &gbc, unit->content, - unit->type == HEVC_NAL_SEI_PREFIX); - - if(err < 0) - return err; - } break; - - default: - return AVERROR(ENOSYS); - } - - return 0; -} + CodedBitstreamUnit *unit) +{ + GetBitContext gbc; + int err; -static int cbs_h2645_write_slice_data(CodedBitstreamContext *ctx, - PutBitContext *pbc, const uint8_t *data, - size_t data_size, int data_bit_start) { - size_t rest = data_size - (data_bit_start + 7) / 8; - const uint8_t *pos = data + data_bit_start / 8; - - av_assert0(data_bit_start >= 0 && - data_size > data_bit_start / 8); - - if(data_size * 8 + 8 > put_bits_left(pbc)) - return AVERROR(ENOSPC); - - if(!rest) - goto rbsp_stop_one_bit; - - // First copy the remaining bits of the first byte - // The above check ensures that we do not accidentally - // copy beyond the rbsp_stop_one_bit. - if(data_bit_start % 8) - put_bits(pbc, 8 - data_bit_start % 8, - *pos++ & MAX_UINT_BITS(8 - data_bit_start % 8)); - - if(put_bits_count(pbc) % 8 == 0) { - // If the writer is aligned at this point, - // memcpy can be used to improve performance. - // This happens normally for CABAC. - flush_put_bits(pbc); - memcpy(put_bits_ptr(pbc), pos, rest); - skip_put_bytes(pbc, rest); - } - else { - // If not, we have to copy manually. - // rbsp_stop_one_bit forces us to special-case - // the last byte. - uint8_t temp; - int i; + err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); + if (err < 0) + return err; + + err = ff_cbs_alloc_unit_content2(ctx, unit); + if (err < 0) + return err; + + switch (unit->type) { + case HEVC_NAL_VPS: + { + H265RawVPS *vps = unit->content; + + err = cbs_h265_read_vps(ctx, &gbc, vps); + if (err < 0) + return err; + + err = cbs_h265_replace_vps(ctx, unit); + if (err < 0) + return err; + } + break; + case HEVC_NAL_SPS: + { + H265RawSPS *sps = unit->content; + + err = cbs_h265_read_sps(ctx, &gbc, sps); + if (err < 0) + return err; + + err = cbs_h265_replace_sps(ctx, unit); + if (err < 0) + return err; + } + break; - for(; rest > 4; rest -= 4, pos += 4) - put_bits32(pbc, AV_RB32(pos)); + case HEVC_NAL_PPS: + { + H265RawPPS *pps = unit->content; - for(; rest > 1; rest--, pos++) - put_bits(pbc, 8, *pos); + err = cbs_h265_read_pps(ctx, &gbc, pps); + if (err < 0) + return err; - rbsp_stop_one_bit: - temp = rest ? *pos : *pos & MAX_UINT_BITS(8 - data_bit_start % 8); + err = cbs_h265_replace_pps(ctx, unit); + if (err < 0) + return err; + } + break; + + case HEVC_NAL_TRAIL_N: + case HEVC_NAL_TRAIL_R: + case HEVC_NAL_TSA_N: + case HEVC_NAL_TSA_R: + case HEVC_NAL_STSA_N: + case HEVC_NAL_STSA_R: + case HEVC_NAL_RADL_N: + case HEVC_NAL_RADL_R: + case HEVC_NAL_RASL_N: + case HEVC_NAL_RASL_R: + case HEVC_NAL_BLA_W_LP: + case HEVC_NAL_BLA_W_RADL: + case HEVC_NAL_BLA_N_LP: + case HEVC_NAL_IDR_W_RADL: + case HEVC_NAL_IDR_N_LP: + case HEVC_NAL_CRA_NUT: + { + H265RawSlice *slice = unit->content; + int pos, len; + + err = cbs_h265_read_slice_segment_header(ctx, &gbc, &slice->header); + if (err < 0) + return err; + + if (!cbs_h2645_read_more_rbsp_data(&gbc)) + return AVERROR_INVALIDDATA; + + pos = get_bits_count(&gbc); + len = unit->data_size; + + slice->data_size = len - pos / 8; + slice->data_ref = av_buffer_ref(unit->data_ref); + if (!slice->data_ref) + return AVERROR(ENOMEM); + slice->data = unit->data + pos / 8; + slice->data_bit_start = pos % 8; + } + break; - av_assert0(temp); - i = ff_ctz(*pos); - temp = temp >> i; - i = rest ? (8 - i) : (8 - i - data_bit_start % 8); - put_bits(pbc, i, temp); - if(put_bits_count(pbc) % 8) - put_bits(pbc, 8 - put_bits_count(pbc) % 8, 0); - } + case HEVC_NAL_AUD: + { + err = cbs_h265_read_aud(ctx, &gbc, unit->content); + if (err < 0) + return err; + } + break; + + case HEVC_NAL_SEI_PREFIX: + case HEVC_NAL_SEI_SUFFIX: + { + err = cbs_h265_read_sei(ctx, &gbc, unit->content, + unit->type == HEVC_NAL_SEI_PREFIX); + + if (err < 0) + return err; + } + break; - return 0; + default: + return AVERROR(ENOSYS); + } + + return 0; } -static int cbs_h264_write_nal_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) { - int err; - - switch(unit->type) { - case H264_NAL_SPS: { - H264RawSPS *sps = unit->content; - - err = cbs_h264_write_sps(ctx, pbc, sps); - if(err < 0) - return err; - - err = cbs_h264_replace_sps(ctx, unit); - if(err < 0) - return err; - } break; - - case H264_NAL_SPS_EXT: { - H264RawSPSExtension *sps_ext = unit->content; - - err = cbs_h264_write_sps_extension(ctx, pbc, sps_ext); - if(err < 0) - return err; - } break; - - case H264_NAL_PPS: { - H264RawPPS *pps = unit->content; - - err = cbs_h264_write_pps(ctx, pbc, pps); - if(err < 0) - return err; - - err = cbs_h264_replace_pps(ctx, unit); - if(err < 0) - return err; - } break; - - case H264_NAL_SLICE: - case H264_NAL_IDR_SLICE: - case H264_NAL_AUXILIARY_SLICE: { - H264RawSlice *slice = unit->content; - - err = cbs_h264_write_slice_header(ctx, pbc, &slice->header); - if(err < 0) - return err; - - if(slice->data) { - err = cbs_h2645_write_slice_data(ctx, pbc, slice->data, - slice->data_size, - slice->data_bit_start); - if(err < 0) - return err; +static int cbs_h2645_write_slice_data(CodedBitstreamContext *ctx, + PutBitContext *pbc, const uint8_t *data, + size_t data_size, int data_bit_start) +{ + size_t rest = data_size - (data_bit_start + 7) / 8; + const uint8_t *pos = data + data_bit_start / 8; + + av_assert0(data_bit_start >= 0 && + data_size > data_bit_start / 8); + + if (data_size * 8 + 8 > put_bits_left(pbc)) + return AVERROR(ENOSPC); + + if (!rest) + goto rbsp_stop_one_bit; + + // First copy the remaining bits of the first byte + // The above check ensures that we do not accidentally + // copy beyond the rbsp_stop_one_bit. + if (data_bit_start % 8) + put_bits(pbc, 8 - data_bit_start % 8, + *pos++ & MAX_UINT_BITS(8 - data_bit_start % 8)); + + if (put_bits_count(pbc) % 8 == 0) { + // If the writer is aligned at this point, + // memcpy can be used to improve performance. + // This happens normally for CABAC. + flush_put_bits(pbc); + memcpy(put_bits_ptr(pbc), pos, rest); + skip_put_bytes(pbc, rest); + } else { + // If not, we have to copy manually. + // rbsp_stop_one_bit forces us to special-case + // the last byte. + uint8_t temp; + int i; + + for (; rest > 4; rest -= 4, pos += 4) + put_bits32(pbc, AV_RB32(pos)); + + for (; rest > 1; rest--, pos++) + put_bits(pbc, 8, *pos); + + rbsp_stop_one_bit: + temp = rest ? *pos : *pos & MAX_UINT_BITS(8 - data_bit_start % 8); + + av_assert0(temp); + i = ff_ctz(*pos); + temp = temp >> i; + i = rest ? (8 - i) : (8 - i - data_bit_start % 8); + put_bits(pbc, i, temp); + if (put_bits_count(pbc) % 8) + put_bits(pbc, 8 - put_bits_count(pbc) % 8, 0); } - else { - // No slice data - that was just the header. - // (Bitstream may be unaligned!) + + return 0; +} + +static int cbs_h264_write_nal_unit(CodedBitstreamContext *ctx, + CodedBitstreamUnit *unit, + PutBitContext *pbc) +{ + int err; + + switch (unit->type) { + case H264_NAL_SPS: + { + H264RawSPS *sps = unit->content; + + err = cbs_h264_write_sps(ctx, pbc, sps); + if (err < 0) + return err; + + err = cbs_h264_replace_sps(ctx, unit); + if (err < 0) + return err; + } + break; + + case H264_NAL_SPS_EXT: + { + H264RawSPSExtension *sps_ext = unit->content; + + err = cbs_h264_write_sps_extension(ctx, pbc, sps_ext); + if (err < 0) + return err; + } + break; + + case H264_NAL_PPS: + { + H264RawPPS *pps = unit->content; + + err = cbs_h264_write_pps(ctx, pbc, pps); + if (err < 0) + return err; + + err = cbs_h264_replace_pps(ctx, unit); + if (err < 0) + return err; + } + break; + + case H264_NAL_SLICE: + case H264_NAL_IDR_SLICE: + case H264_NAL_AUXILIARY_SLICE: + { + H264RawSlice *slice = unit->content; + + err = cbs_h264_write_slice_header(ctx, pbc, &slice->header); + if (err < 0) + return err; + + if (slice->data) { + err = cbs_h2645_write_slice_data(ctx, pbc, slice->data, + slice->data_size, + slice->data_bit_start); + if (err < 0) + return err; + } else { + // No slice data - that was just the header. + // (Bitstream may be unaligned!) + } + } + break; + + case H264_NAL_AUD: + { + err = cbs_h264_write_aud(ctx, pbc, unit->content); + if (err < 0) + return err; + } + break; + + case H264_NAL_SEI: + { + err = cbs_h264_write_sei(ctx, pbc, unit->content); + if (err < 0) + return err; + } + break; + + case H264_NAL_FILLER_DATA: + { + err = cbs_h264_write_filler(ctx, pbc, unit->content); + if (err < 0) + return err; + } + break; + + case H264_NAL_END_SEQUENCE: + { + err = cbs_h264_write_end_of_sequence(ctx, pbc, unit->content); + if (err < 0) + return err; + } + break; + + case H264_NAL_END_STREAM: + { + err = cbs_h264_write_end_of_stream(ctx, pbc, unit->content); + if (err < 0) + return err; + } + break; + + default: + av_log(ctx->log_ctx, AV_LOG_ERROR, "Write unimplemented for " + "NAL unit type %"PRIu32".\n", unit->type); + return AVERROR_PATCHWELCOME; } - } break; - - case H264_NAL_AUD: { - err = cbs_h264_write_aud(ctx, pbc, unit->content); - if(err < 0) - return err; - } break; - - case H264_NAL_SEI: { - err = cbs_h264_write_sei(ctx, pbc, unit->content); - if(err < 0) - return err; - } break; - - case H264_NAL_FILLER_DATA: { - err = cbs_h264_write_filler(ctx, pbc, unit->content); - if(err < 0) - return err; - } break; - - case H264_NAL_END_SEQUENCE: { - err = cbs_h264_write_end_of_sequence(ctx, pbc, unit->content); - if(err < 0) - return err; - } break; - - case H264_NAL_END_STREAM: { - err = cbs_h264_write_end_of_stream(ctx, pbc, unit->content); - if(err < 0) - return err; - } break; - - default: - av_log(ctx->log_ctx, AV_LOG_ERROR, "Write unimplemented for " - "NAL unit type %" PRIu32 ".\n", - unit->type); - return AVERROR_PATCHWELCOME; - } - - return 0; + + return 0; } static int cbs_h265_write_nal_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) { - int err; - - switch(unit->type) { - case HEVC_NAL_VPS: { - H265RawVPS *vps = unit->content; - - err = cbs_h265_write_vps(ctx, pbc, vps); - if(err < 0) - return err; - - err = cbs_h265_replace_vps(ctx, unit); - if(err < 0) - return err; - } break; - - case HEVC_NAL_SPS: { - H265RawSPS *sps = unit->content; - - err = cbs_h265_write_sps(ctx, pbc, sps); - if(err < 0) - return err; - - err = cbs_h265_replace_sps(ctx, unit); - if(err < 0) - return err; - } break; - - case HEVC_NAL_PPS: { - H265RawPPS *pps = unit->content; - - err = cbs_h265_write_pps(ctx, pbc, pps); - if(err < 0) - return err; - - err = cbs_h265_replace_pps(ctx, unit); - if(err < 0) - return err; - } break; - - case HEVC_NAL_TRAIL_N: - case HEVC_NAL_TRAIL_R: - case HEVC_NAL_TSA_N: - case HEVC_NAL_TSA_R: - case HEVC_NAL_STSA_N: - case HEVC_NAL_STSA_R: - case HEVC_NAL_RADL_N: - case HEVC_NAL_RADL_R: - case HEVC_NAL_RASL_N: - case HEVC_NAL_RASL_R: - case HEVC_NAL_BLA_W_LP: - case HEVC_NAL_BLA_W_RADL: - case HEVC_NAL_BLA_N_LP: - case HEVC_NAL_IDR_W_RADL: - case HEVC_NAL_IDR_N_LP: - case HEVC_NAL_CRA_NUT: { - H265RawSlice *slice = unit->content; - - err = cbs_h265_write_slice_segment_header(ctx, pbc, &slice->header); - if(err < 0) - return err; - - if(slice->data) { - err = cbs_h2645_write_slice_data(ctx, pbc, slice->data, - slice->data_size, - slice->data_bit_start); - if(err < 0) - return err; - } - else { - // No slice data - that was just the header. + CodedBitstreamUnit *unit, + PutBitContext *pbc) +{ + int err; + + switch (unit->type) { + case HEVC_NAL_VPS: + { + H265RawVPS *vps = unit->content; + + err = cbs_h265_write_vps(ctx, pbc, vps); + if (err < 0) + return err; + + err = cbs_h265_replace_vps(ctx, unit); + if (err < 0) + return err; + } + break; + + case HEVC_NAL_SPS: + { + H265RawSPS *sps = unit->content; + + err = cbs_h265_write_sps(ctx, pbc, sps); + if (err < 0) + return err; + + err = cbs_h265_replace_sps(ctx, unit); + if (err < 0) + return err; + } + break; + + case HEVC_NAL_PPS: + { + H265RawPPS *pps = unit->content; + + err = cbs_h265_write_pps(ctx, pbc, pps); + if (err < 0) + return err; + + err = cbs_h265_replace_pps(ctx, unit); + if (err < 0) + return err; + } + break; + + case HEVC_NAL_TRAIL_N: + case HEVC_NAL_TRAIL_R: + case HEVC_NAL_TSA_N: + case HEVC_NAL_TSA_R: + case HEVC_NAL_STSA_N: + case HEVC_NAL_STSA_R: + case HEVC_NAL_RADL_N: + case HEVC_NAL_RADL_R: + case HEVC_NAL_RASL_N: + case HEVC_NAL_RASL_R: + case HEVC_NAL_BLA_W_LP: + case HEVC_NAL_BLA_W_RADL: + case HEVC_NAL_BLA_N_LP: + case HEVC_NAL_IDR_W_RADL: + case HEVC_NAL_IDR_N_LP: + case HEVC_NAL_CRA_NUT: + { + H265RawSlice *slice = unit->content; + + err = cbs_h265_write_slice_segment_header(ctx, pbc, &slice->header); + if (err < 0) + return err; + + if (slice->data) { + err = cbs_h2645_write_slice_data(ctx, pbc, slice->data, + slice->data_size, + slice->data_bit_start); + if (err < 0) + return err; + } else { + // No slice data - that was just the header. + } + } + break; + + case HEVC_NAL_AUD: + { + err = cbs_h265_write_aud(ctx, pbc, unit->content); + if (err < 0) + return err; + } + break; + + case HEVC_NAL_SEI_PREFIX: + case HEVC_NAL_SEI_SUFFIX: + { + err = cbs_h265_write_sei(ctx, pbc, unit->content, + unit->type == HEVC_NAL_SEI_PREFIX); + + if (err < 0) + return err; + } + break; + + default: + av_log(ctx->log_ctx, AV_LOG_ERROR, "Write unimplemented for " + "NAL unit type %"PRIu32".\n", unit->type); + return AVERROR_PATCHWELCOME; } - } break; - - case HEVC_NAL_AUD: { - err = cbs_h265_write_aud(ctx, pbc, unit->content); - if(err < 0) - return err; - } break; - - case HEVC_NAL_SEI_PREFIX: - case HEVC_NAL_SEI_SUFFIX: { - err = cbs_h265_write_sei(ctx, pbc, unit->content, - unit->type == HEVC_NAL_SEI_PREFIX); - - if(err < 0) - return err; - } break; - - default: - av_log(ctx->log_ctx, AV_LOG_ERROR, "Write unimplemented for " - "NAL unit type %" PRIu32 ".\n", - unit->type); - return AVERROR_PATCHWELCOME; - } - - return 0; + + return 0; } static int cbs_h2645_unit_requires_zero_byte(enum AVCodecID codec_id, - CodedBitstreamUnitType type, - int nal_unit_index) { - // Section B.1.2 in H.264, section B.2.2 in H.265. - if(nal_unit_index == 0) { - // Assume that this is the first NAL unit in an access unit. - return 1; - } - if(codec_id == AV_CODEC_ID_H264) - return type == H264_NAL_SPS || type == H264_NAL_PPS; - if(codec_id == AV_CODEC_ID_HEVC) - return type == HEVC_NAL_VPS || type == HEVC_NAL_SPS || type == HEVC_NAL_PPS; - return 0; + CodedBitstreamUnitType type, + int nal_unit_index) +{ + // Section B.1.2 in H.264, section B.2.2 in H.265. + if (nal_unit_index == 0) { + // Assume that this is the first NAL unit in an access unit. + return 1; + } + if (codec_id == AV_CODEC_ID_H264) + return type == H264_NAL_SPS || type == H264_NAL_PPS; + if (codec_id == AV_CODEC_ID_HEVC) + return type == HEVC_NAL_VPS || type == HEVC_NAL_SPS || type == HEVC_NAL_PPS; + return 0; } static int cbs_h2645_assemble_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) { - uint8_t *data; - size_t max_size, dp, sp; - int err, i, zero_run; - - for(i = 0; i < frag->nb_units; i++) { - // Data should already all have been written when we get here. - av_assert0(frag->units[i].data); - } - - max_size = 0; - for(i = 0; i < frag->nb_units; i++) { - // Start code + content with worst-case emulation prevention. - max_size += 4 + frag->units[i].data_size * 3 / 2; - } - - data = av_realloc(NULL, max_size + AV_INPUT_BUFFER_PADDING_SIZE); - if(!data) - return AVERROR(ENOMEM); - - dp = 0; - for(i = 0; i < frag->nb_units; i++) { - CodedBitstreamUnit *unit = &frag->units[i]; - - if(unit->data_bit_padding > 0) { - if(i < frag->nb_units - 1) - av_log(ctx->log_ctx, AV_LOG_WARNING, "Probably invalid " - "unaligned padding on non-final NAL unit.\n"); - else - frag->data_bit_padding = unit->data_bit_padding; + CodedBitstreamFragment *frag) +{ + uint8_t *data; + size_t max_size, dp, sp; + int err, i, zero_run; + + for (i = 0; i < frag->nb_units; i++) { + // Data should already all have been written when we get here. + av_assert0(frag->units[i].data); } - if(cbs_h2645_unit_requires_zero_byte(ctx->codec->codec_id, unit->type, i)) { - // zero_byte - data[dp++] = 0; + max_size = 0; + for (i = 0; i < frag->nb_units; i++) { + // Start code + content with worst-case emulation prevention. + max_size += 4 + frag->units[i].data_size * 3 / 2; } - // start_code_prefix_one_3bytes - data[dp++] = 0; - data[dp++] = 0; - data[dp++] = 1; - - zero_run = 0; - for(sp = 0; sp < unit->data_size; sp++) { - if(zero_run < 2) { - if(unit->data[sp] == 0) - ++zero_run; - else - zero_run = 0; - } - else { - if((unit->data[sp] & ~3) == 0) { - // emulation_prevention_three_byte - data[dp++] = 3; + + data = av_realloc(NULL, max_size + AV_INPUT_BUFFER_PADDING_SIZE); + if (!data) + return AVERROR(ENOMEM); + + dp = 0; + for (i = 0; i < frag->nb_units; i++) { + CodedBitstreamUnit *unit = &frag->units[i]; + + if (unit->data_bit_padding > 0) { + if (i < frag->nb_units - 1) + av_log(ctx->log_ctx, AV_LOG_WARNING, "Probably invalid " + "unaligned padding on non-final NAL unit.\n"); + else + frag->data_bit_padding = unit->data_bit_padding; + } + + if (cbs_h2645_unit_requires_zero_byte(ctx->codec->codec_id, unit->type, i)) { + // zero_byte + data[dp++] = 0; + } + // start_code_prefix_one_3bytes + data[dp++] = 0; + data[dp++] = 0; + data[dp++] = 1; + + zero_run = 0; + for (sp = 0; sp < unit->data_size; sp++) { + if (zero_run < 2) { + if (unit->data[sp] == 0) + ++zero_run; + else + zero_run = 0; + } else { + if ((unit->data[sp] & ~3) == 0) { + // emulation_prevention_three_byte + data[dp++] = 3; + } + zero_run = unit->data[sp] == 0; + } + data[dp++] = unit->data[sp]; } - zero_run = unit->data[sp] == 0; - } - data[dp++] = unit->data[sp]; } - } - av_assert0(dp <= max_size); - err = av_reallocp(&data, dp + AV_INPUT_BUFFER_PADDING_SIZE); - if(err) - return err; - memset(data + dp, 0, AV_INPUT_BUFFER_PADDING_SIZE); + av_assert0(dp <= max_size); + err = av_reallocp(&data, dp + AV_INPUT_BUFFER_PADDING_SIZE); + if (err) + return err; + memset(data + dp, 0, AV_INPUT_BUFFER_PADDING_SIZE); - frag->data_ref = av_buffer_create(data, dp + AV_INPUT_BUFFER_PADDING_SIZE, - NULL, NULL, 0); - if(!frag->data_ref) { - av_freep(&data); - return AVERROR(ENOMEM); - } + frag->data_ref = av_buffer_create(data, dp + AV_INPUT_BUFFER_PADDING_SIZE, + NULL, NULL, 0); + if (!frag->data_ref) { + av_freep(&data); + return AVERROR(ENOMEM); + } - frag->data = data; - frag->data_size = dp; + frag->data = data; + frag->data_size = dp; - return 0; + return 0; } -static void cbs_h264_flush(CodedBitstreamContext *ctx) { - CodedBitstreamH264Context *h264 = ctx->priv_data; - - for(int i = 0; i < FF_ARRAY_ELEMS(h264->sps); i++) { - av_buffer_unref(&h264->sps_ref[i]); - h264->sps[i] = NULL; - } - for(int i = 0; i < FF_ARRAY_ELEMS(h264->pps); i++) { - av_buffer_unref(&h264->pps_ref[i]); - h264->pps[i] = NULL; - } - - h264->active_sps = NULL; - h264->active_pps = NULL; - h264->last_slice_nal_unit_type = 0; +static void cbs_h264_flush(CodedBitstreamContext *ctx) +{ + CodedBitstreamH264Context *h264 = ctx->priv_data; + + for (int i = 0; i < FF_ARRAY_ELEMS(h264->sps); i++) { + av_buffer_unref(&h264->sps_ref[i]); + h264->sps[i] = NULL; + } + for (int i = 0; i < FF_ARRAY_ELEMS(h264->pps); i++) { + av_buffer_unref(&h264->pps_ref[i]); + h264->pps[i] = NULL; + } + + h264->active_sps = NULL; + h264->active_pps = NULL; + h264->last_slice_nal_unit_type = 0; } -static void cbs_h264_close(CodedBitstreamContext *ctx) { - CodedBitstreamH264Context *h264 = ctx->priv_data; - int i; +static void cbs_h264_close(CodedBitstreamContext *ctx) +{ + CodedBitstreamH264Context *h264 = ctx->priv_data; + int i; - ff_h2645_packet_uninit(&h264->common.read_packet); + ff_h2645_packet_uninit(&h264->common.read_packet); - for(i = 0; i < FF_ARRAY_ELEMS(h264->sps); i++) - av_buffer_unref(&h264->sps_ref[i]); - for(i = 0; i < FF_ARRAY_ELEMS(h264->pps); i++) - av_buffer_unref(&h264->pps_ref[i]); + for (i = 0; i < FF_ARRAY_ELEMS(h264->sps); i++) + av_buffer_unref(&h264->sps_ref[i]); + for (i = 0; i < FF_ARRAY_ELEMS(h264->pps); i++) + av_buffer_unref(&h264->pps_ref[i]); } -static void cbs_h265_flush(CodedBitstreamContext *ctx) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - - for(int i = 0; i < FF_ARRAY_ELEMS(h265->vps); i++) { - av_buffer_unref(&h265->vps_ref[i]); - h265->vps[i] = NULL; - } - for(int i = 0; i < FF_ARRAY_ELEMS(h265->sps); i++) { - av_buffer_unref(&h265->sps_ref[i]); - h265->sps[i] = NULL; - } - for(int i = 0; i < FF_ARRAY_ELEMS(h265->pps); i++) { - av_buffer_unref(&h265->pps_ref[i]); - h265->pps[i] = NULL; - } - - h265->active_vps = NULL; - h265->active_sps = NULL; - h265->active_pps = NULL; +static void cbs_h265_flush(CodedBitstreamContext *ctx) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + + for (int i = 0; i < FF_ARRAY_ELEMS(h265->vps); i++) { + av_buffer_unref(&h265->vps_ref[i]); + h265->vps[i] = NULL; + } + for (int i = 0; i < FF_ARRAY_ELEMS(h265->sps); i++) { + av_buffer_unref(&h265->sps_ref[i]); + h265->sps[i] = NULL; + } + for (int i = 0; i < FF_ARRAY_ELEMS(h265->pps); i++) { + av_buffer_unref(&h265->pps_ref[i]); + h265->pps[i] = NULL; + } + + h265->active_vps = NULL; + h265->active_sps = NULL; + h265->active_pps = NULL; } -static void cbs_h265_close(CodedBitstreamContext *ctx) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - int i; +static void cbs_h265_close(CodedBitstreamContext *ctx) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + int i; - ff_h2645_packet_uninit(&h265->common.read_packet); + ff_h2645_packet_uninit(&h265->common.read_packet); - for(i = 0; i < FF_ARRAY_ELEMS(h265->vps); i++) - av_buffer_unref(&h265->vps_ref[i]); - for(i = 0; i < FF_ARRAY_ELEMS(h265->sps); i++) - av_buffer_unref(&h265->sps_ref[i]); - for(i = 0; i < FF_ARRAY_ELEMS(h265->pps); i++) - av_buffer_unref(&h265->pps_ref[i]); + for (i = 0; i < FF_ARRAY_ELEMS(h265->vps); i++) + av_buffer_unref(&h265->vps_ref[i]); + for (i = 0; i < FF_ARRAY_ELEMS(h265->sps); i++) + av_buffer_unref(&h265->sps_ref[i]); + for (i = 0; i < FF_ARRAY_ELEMS(h265->pps); i++) + av_buffer_unref(&h265->pps_ref[i]); } -static void cbs_h264_free_sei(void *opaque, uint8_t *content) { - H264RawSEI *sei = (H264RawSEI *)content; - ff_cbs_sei_free_message_list(&sei->message_list); - av_free(content); +static void cbs_h264_free_sei(void *opaque, uint8_t *content) +{ + H264RawSEI *sei = (H264RawSEI*)content; + ff_cbs_sei_free_message_list(&sei->message_list); + av_free(content); } static const CodedBitstreamUnitTypeDescriptor cbs_h264_unit_types[] = { - CBS_UNIT_TYPE_POD(H264_NAL_SPS, H264RawSPS), - CBS_UNIT_TYPE_POD(H264_NAL_SPS_EXT, H264RawSPSExtension), - - CBS_UNIT_TYPE_INTERNAL_REF(H264_NAL_PPS, H264RawPPS, slice_group_id), - - { - .nb_unit_types = 3, - .unit_types = { - H264_NAL_IDR_SLICE, - H264_NAL_SLICE, - H264_NAL_AUXILIARY_SLICE, + CBS_UNIT_TYPE_POD(H264_NAL_SPS, H264RawSPS), + CBS_UNIT_TYPE_POD(H264_NAL_SPS_EXT, H264RawSPSExtension), + + CBS_UNIT_TYPE_INTERNAL_REF(H264_NAL_PPS, H264RawPPS, slice_group_id), + + { + .nb_unit_types = 3, + .unit_types = { + H264_NAL_IDR_SLICE, + H264_NAL_SLICE, + H264_NAL_AUXILIARY_SLICE, + }, + .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, + .content_size = sizeof(H264RawSlice), + .nb_ref_offsets = 1, + .ref_offsets = { offsetof(H264RawSlice, data) }, }, - .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, - .content_size = sizeof(H264RawSlice), - .nb_ref_offsets = 1, - .ref_offsets = { offsetof(H264RawSlice, data) }, - }, - CBS_UNIT_TYPE_POD(H264_NAL_AUD, H264RawAUD), CBS_UNIT_TYPE_POD(H264_NAL_FILLER_DATA, H264RawFiller), CBS_UNIT_TYPE_POD(H264_NAL_END_SEQUENCE, H264RawNALUnitHeader), CBS_UNIT_TYPE_POD(H264_NAL_END_STREAM, H264RawNALUnitHeader), + CBS_UNIT_TYPE_POD(H264_NAL_AUD, H264RawAUD), + CBS_UNIT_TYPE_POD(H264_NAL_FILLER_DATA, H264RawFiller), + CBS_UNIT_TYPE_POD(H264_NAL_END_SEQUENCE, H264RawNALUnitHeader), + CBS_UNIT_TYPE_POD(H264_NAL_END_STREAM, H264RawNALUnitHeader), - CBS_UNIT_TYPE_COMPLEX(H264_NAL_SEI, H264RawSEI, &cbs_h264_free_sei), + CBS_UNIT_TYPE_COMPLEX(H264_NAL_SEI, H264RawSEI, &cbs_h264_free_sei), - CBS_UNIT_TYPE_END_OF_LIST + CBS_UNIT_TYPE_END_OF_LIST }; -static void cbs_h265_free_sei(void *opaque, uint8_t *content) { - H265RawSEI *sei = (H265RawSEI *)content; - ff_cbs_sei_free_message_list(&sei->message_list); - av_free(content); +static void cbs_h265_free_sei(void *opaque, uint8_t *content) +{ + H265RawSEI *sei = (H265RawSEI*)content; + ff_cbs_sei_free_message_list(&sei->message_list); + av_free(content); } static const CodedBitstreamUnitTypeDescriptor cbs_h265_unit_types[] = { - CBS_UNIT_TYPE_INTERNAL_REF(HEVC_NAL_VPS, H265RawVPS, extension_data.data), - CBS_UNIT_TYPE_INTERNAL_REF(HEVC_NAL_SPS, H265RawSPS, extension_data.data), - CBS_UNIT_TYPE_INTERNAL_REF(HEVC_NAL_PPS, H265RawPPS, extension_data.data), - - CBS_UNIT_TYPE_POD(HEVC_NAL_AUD, H265RawAUD), - - { - // Slices of non-IRAP pictures. - .nb_unit_types = CBS_UNIT_TYPE_RANGE, - .unit_type_range_start = HEVC_NAL_TRAIL_N, - .unit_type_range_end = HEVC_NAL_RASL_R, - - .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, - .content_size = sizeof(H265RawSlice), - .nb_ref_offsets = 1, - .ref_offsets = { offsetof(H265RawSlice, data) }, - }, - - { - // Slices of IRAP pictures. - .nb_unit_types = CBS_UNIT_TYPE_RANGE, - .unit_type_range_start = HEVC_NAL_BLA_W_LP, - .unit_type_range_end = HEVC_NAL_CRA_NUT, - - .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, - .content_size = sizeof(H265RawSlice), - .nb_ref_offsets = 1, - .ref_offsets = { offsetof(H265RawSlice, data) }, - }, - - { - .nb_unit_types = 2, - .unit_types = { - HEVC_NAL_SEI_PREFIX, - HEVC_NAL_SEI_SUFFIX }, - .content_type = CBS_CONTENT_TYPE_COMPLEX, - .content_size = sizeof(H265RawSEI), - .content_free = &cbs_h265_free_sei, - }, - - CBS_UNIT_TYPE_END_OF_LIST + CBS_UNIT_TYPE_INTERNAL_REF(HEVC_NAL_VPS, H265RawVPS, extension_data.data), + CBS_UNIT_TYPE_INTERNAL_REF(HEVC_NAL_SPS, H265RawSPS, extension_data.data), + CBS_UNIT_TYPE_INTERNAL_REF(HEVC_NAL_PPS, H265RawPPS, extension_data.data), + + CBS_UNIT_TYPE_POD(HEVC_NAL_AUD, H265RawAUD), + + { + // Slices of non-IRAP pictures. + .nb_unit_types = CBS_UNIT_TYPE_RANGE, + .unit_type_range_start = HEVC_NAL_TRAIL_N, + .unit_type_range_end = HEVC_NAL_RASL_R, + + .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, + .content_size = sizeof(H265RawSlice), + .nb_ref_offsets = 1, + .ref_offsets = { offsetof(H265RawSlice, data) }, + }, + + { + // Slices of IRAP pictures. + .nb_unit_types = CBS_UNIT_TYPE_RANGE, + .unit_type_range_start = HEVC_NAL_BLA_W_LP, + .unit_type_range_end = HEVC_NAL_CRA_NUT, + + .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, + .content_size = sizeof(H265RawSlice), + .nb_ref_offsets = 1, + .ref_offsets = { offsetof(H265RawSlice, data) }, + }, + + { + .nb_unit_types = 2, + .unit_types = { + HEVC_NAL_SEI_PREFIX, + HEVC_NAL_SEI_SUFFIX + }, + .content_type = CBS_CONTENT_TYPE_COMPLEX, + .content_size = sizeof(H265RawSEI), + .content_free = &cbs_h265_free_sei, + }, + + CBS_UNIT_TYPE_END_OF_LIST }; const CodedBitstreamType ff_cbs_type_h264 = { - .codec_id = AV_CODEC_ID_H264, + .codec_id = AV_CODEC_ID_H264, - .priv_data_size = sizeof(CodedBitstreamH264Context), + .priv_data_size = sizeof(CodedBitstreamH264Context), - .unit_types = cbs_h264_unit_types, + .unit_types = cbs_h264_unit_types, - .split_fragment = &cbs_h2645_split_fragment, - .read_unit = &cbs_h264_read_nal_unit, - .write_unit = &cbs_h264_write_nal_unit, - .assemble_fragment = &cbs_h2645_assemble_fragment, + .split_fragment = &cbs_h2645_split_fragment, + .read_unit = &cbs_h264_read_nal_unit, + .write_unit = &cbs_h264_write_nal_unit, + .assemble_fragment = &cbs_h2645_assemble_fragment, - .flush = &cbs_h264_flush, - .close = &cbs_h264_close, + .flush = &cbs_h264_flush, + .close = &cbs_h264_close, }; const CodedBitstreamType ff_cbs_type_h265 = { - .codec_id = AV_CODEC_ID_HEVC, + .codec_id = AV_CODEC_ID_HEVC, - .priv_data_size = sizeof(CodedBitstreamH265Context), + .priv_data_size = sizeof(CodedBitstreamH265Context), - .unit_types = cbs_h265_unit_types, + .unit_types = cbs_h265_unit_types, - .split_fragment = &cbs_h2645_split_fragment, - .read_unit = &cbs_h265_read_nal_unit, - .write_unit = &cbs_h265_write_nal_unit, - .assemble_fragment = &cbs_h2645_assemble_fragment, + .split_fragment = &cbs_h2645_split_fragment, + .read_unit = &cbs_h265_read_nal_unit, + .write_unit = &cbs_h265_write_nal_unit, + .assemble_fragment = &cbs_h2645_assemble_fragment, - .flush = &cbs_h265_flush, - .close = &cbs_h265_close, + .flush = &cbs_h265_flush, + .close = &cbs_h265_close, }; static const SEIMessageTypeDescriptor cbs_sei_common_types[] = { - { - SEI_TYPE_FILLER_PAYLOAD, - 1, - 1, - sizeof(SEIRawFillerPayload), - SEI_MESSAGE_RW(sei, filler_payload), - }, - { - SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35, - 1, - 1, - sizeof(SEIRawUserDataRegistered), - SEI_MESSAGE_RW(sei, user_data_registered), - }, - { - SEI_TYPE_USER_DATA_UNREGISTERED, - 1, - 1, - sizeof(SEIRawUserDataUnregistered), - SEI_MESSAGE_RW(sei, user_data_unregistered), - }, - { - SEI_TYPE_MASTERING_DISPLAY_COLOUR_VOLUME, - 1, - 0, - sizeof(SEIRawMasteringDisplayColourVolume), - SEI_MESSAGE_RW(sei, mastering_display_colour_volume), - }, - { - SEI_TYPE_CONTENT_LIGHT_LEVEL_INFO, - 1, - 0, - sizeof(SEIRawContentLightLevelInfo), - SEI_MESSAGE_RW(sei, content_light_level_info), - }, - { - SEI_TYPE_ALTERNATIVE_TRANSFER_CHARACTERISTICS, - 1, - 0, - sizeof(SEIRawAlternativeTransferCharacteristics), - SEI_MESSAGE_RW(sei, alternative_transfer_characteristics), - }, - SEI_MESSAGE_TYPE_END, + { + SEI_TYPE_FILLER_PAYLOAD, + 1, 1, + sizeof(SEIRawFillerPayload), + SEI_MESSAGE_RW(sei, filler_payload), + }, + { + SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35, + 1, 1, + sizeof(SEIRawUserDataRegistered), + SEI_MESSAGE_RW(sei, user_data_registered), + }, + { + SEI_TYPE_USER_DATA_UNREGISTERED, + 1, 1, + sizeof(SEIRawUserDataUnregistered), + SEI_MESSAGE_RW(sei, user_data_unregistered), + }, + { + SEI_TYPE_MASTERING_DISPLAY_COLOUR_VOLUME, + 1, 0, + sizeof(SEIRawMasteringDisplayColourVolume), + SEI_MESSAGE_RW(sei, mastering_display_colour_volume), + }, + { + SEI_TYPE_CONTENT_LIGHT_LEVEL_INFO, + 1, 0, + sizeof(SEIRawContentLightLevelInfo), + SEI_MESSAGE_RW(sei, content_light_level_info), + }, + { + SEI_TYPE_ALTERNATIVE_TRANSFER_CHARACTERISTICS, + 1, 0, + sizeof(SEIRawAlternativeTransferCharacteristics), + SEI_MESSAGE_RW(sei, alternative_transfer_characteristics), + }, + SEI_MESSAGE_TYPE_END, }; static const SEIMessageTypeDescriptor cbs_sei_h264_types[] = { - { - SEI_TYPE_BUFFERING_PERIOD, - 1, - 0, - sizeof(H264RawSEIBufferingPeriod), - SEI_MESSAGE_RW(h264, sei_buffering_period), - }, - { - SEI_TYPE_PIC_TIMING, - 1, - 0, - sizeof(H264RawSEIPicTiming), - SEI_MESSAGE_RW(h264, sei_pic_timing), - }, - { - SEI_TYPE_PAN_SCAN_RECT, - 1, - 0, - sizeof(H264RawSEIPanScanRect), - SEI_MESSAGE_RW(h264, sei_pan_scan_rect), - }, - { - SEI_TYPE_RECOVERY_POINT, - 1, - 0, - sizeof(H264RawSEIRecoveryPoint), - SEI_MESSAGE_RW(h264, sei_recovery_point), - }, - { - SEI_TYPE_DISPLAY_ORIENTATION, - 1, - 0, - sizeof(H264RawSEIDisplayOrientation), - SEI_MESSAGE_RW(h264, sei_display_orientation), - }, - SEI_MESSAGE_TYPE_END + { + SEI_TYPE_BUFFERING_PERIOD, + 1, 0, + sizeof(H264RawSEIBufferingPeriod), + SEI_MESSAGE_RW(h264, sei_buffering_period), + }, + { + SEI_TYPE_PIC_TIMING, + 1, 0, + sizeof(H264RawSEIPicTiming), + SEI_MESSAGE_RW(h264, sei_pic_timing), + }, + { + SEI_TYPE_PAN_SCAN_RECT, + 1, 0, + sizeof(H264RawSEIPanScanRect), + SEI_MESSAGE_RW(h264, sei_pan_scan_rect), + }, + { + SEI_TYPE_RECOVERY_POINT, + 1, 0, + sizeof(H264RawSEIRecoveryPoint), + SEI_MESSAGE_RW(h264, sei_recovery_point), + }, + { + SEI_TYPE_FILM_GRAIN_CHARACTERISTICS, + 1, 0, + sizeof(H264RawFilmGrainCharacteristics), + SEI_MESSAGE_RW(h264, film_grain_characteristics), + }, + { + SEI_TYPE_DISPLAY_ORIENTATION, + 1, 0, + sizeof(H264RawSEIDisplayOrientation), + SEI_MESSAGE_RW(h264, sei_display_orientation), + }, + SEI_MESSAGE_TYPE_END }; static const SEIMessageTypeDescriptor cbs_sei_h265_types[] = { - { - SEI_TYPE_BUFFERING_PERIOD, - 1, - 0, - sizeof(H265RawSEIBufferingPeriod), - SEI_MESSAGE_RW(h265, sei_buffering_period), - }, - { - SEI_TYPE_PIC_TIMING, - 1, - 0, - sizeof(H265RawSEIPicTiming), - SEI_MESSAGE_RW(h265, sei_pic_timing), - }, - { - SEI_TYPE_PAN_SCAN_RECT, - 1, - 0, - sizeof(H265RawSEIPanScanRect), - SEI_MESSAGE_RW(h265, sei_pan_scan_rect), - }, - { - SEI_TYPE_RECOVERY_POINT, - 1, - 0, - sizeof(H265RawSEIRecoveryPoint), - SEI_MESSAGE_RW(h265, sei_recovery_point), - }, - { - SEI_TYPE_DISPLAY_ORIENTATION, - 1, - 0, - sizeof(H265RawSEIDisplayOrientation), - SEI_MESSAGE_RW(h265, sei_display_orientation), - }, - { - SEI_TYPE_ACTIVE_PARAMETER_SETS, - 1, - 0, - sizeof(H265RawSEIActiveParameterSets), - SEI_MESSAGE_RW(h265, sei_active_parameter_sets), - }, - { - SEI_TYPE_DECODED_PICTURE_HASH, - 0, - 1, - sizeof(H265RawSEIDecodedPictureHash), - SEI_MESSAGE_RW(h265, sei_decoded_picture_hash), - }, - { - SEI_TYPE_TIME_CODE, - 1, - 0, - sizeof(H265RawSEITimeCode), - SEI_MESSAGE_RW(h265, sei_time_code), - }, - { - SEI_TYPE_ALPHA_CHANNEL_INFO, - 1, - 0, - sizeof(H265RawSEIAlphaChannelInfo), - SEI_MESSAGE_RW(h265, sei_alpha_channel_info), - }, - SEI_MESSAGE_TYPE_END + { + SEI_TYPE_BUFFERING_PERIOD, + 1, 0, + sizeof(H265RawSEIBufferingPeriod), + SEI_MESSAGE_RW(h265, sei_buffering_period), + }, + { + SEI_TYPE_PIC_TIMING, + 1, 0, + sizeof(H265RawSEIPicTiming), + SEI_MESSAGE_RW(h265, sei_pic_timing), + }, + { + SEI_TYPE_PAN_SCAN_RECT, + 1, 0, + sizeof(H265RawSEIPanScanRect), + SEI_MESSAGE_RW(h265, sei_pan_scan_rect), + }, + { + SEI_TYPE_RECOVERY_POINT, + 1, 0, + sizeof(H265RawSEIRecoveryPoint), + SEI_MESSAGE_RW(h265, sei_recovery_point), + }, + { + SEI_TYPE_FILM_GRAIN_CHARACTERISTICS, + 1, 0, + sizeof(H265RawFilmGrainCharacteristics), + SEI_MESSAGE_RW(h265, film_grain_characteristics), + }, + { + SEI_TYPE_DISPLAY_ORIENTATION, + 1, 0, + sizeof(H265RawSEIDisplayOrientation), + SEI_MESSAGE_RW(h265, sei_display_orientation), + }, + { + SEI_TYPE_ACTIVE_PARAMETER_SETS, + 1, 0, + sizeof(H265RawSEIActiveParameterSets), + SEI_MESSAGE_RW(h265, sei_active_parameter_sets), + }, + { + SEI_TYPE_DECODED_PICTURE_HASH, + 0, 1, + sizeof(H265RawSEIDecodedPictureHash), + SEI_MESSAGE_RW(h265, sei_decoded_picture_hash), + }, + { + SEI_TYPE_TIME_CODE, + 1, 0, + sizeof(H265RawSEITimeCode), + SEI_MESSAGE_RW(h265, sei_time_code), + }, + { + SEI_TYPE_ALPHA_CHANNEL_INFO, + 1, 0, + sizeof(H265RawSEIAlphaChannelInfo), + SEI_MESSAGE_RW(h265, sei_alpha_channel_info), + }, + SEI_MESSAGE_TYPE_END }; const SEIMessageTypeDescriptor *ff_cbs_sei_find_type(CodedBitstreamContext *ctx, - int payload_type) { - const SEIMessageTypeDescriptor *codec_list; - int i; - - for(i = 0; cbs_sei_common_types[i].type >= 0; i++) { - if(cbs_sei_common_types[i].type == payload_type) - return &cbs_sei_common_types[i]; - } - - switch(ctx->codec->codec_id) { - case AV_CODEC_ID_H264: - codec_list = cbs_sei_h264_types; - break; - case AV_CODEC_ID_H265: - codec_list = cbs_sei_h265_types; - break; - default: - return NULL; - } + int payload_type) +{ + const SEIMessageTypeDescriptor *codec_list; + int i; + + for (i = 0; cbs_sei_common_types[i].type >= 0; i++) { + if (cbs_sei_common_types[i].type == payload_type) + return &cbs_sei_common_types[i]; + } - for(i = 0; codec_list[i].type >= 0; i++) { - if(codec_list[i].type == payload_type) - return &codec_list[i]; - } + switch (ctx->codec->codec_id) { + case AV_CODEC_ID_H264: + codec_list = cbs_sei_h264_types; + break; + case AV_CODEC_ID_H265: + codec_list = cbs_sei_h265_types; + break; + default: + return NULL; + } - return NULL; + for (i = 0; codec_list[i].type >= 0; i++) { + if (codec_list[i].type == payload_type) + return &codec_list[i]; + } + + return NULL; } diff --git a/third-party/cbs/cbs_h264_syntax_template.c b/third-party/cbs/cbs_h264_syntax_template.c index a2d3735b424..0f8bba4a0da 100644 --- a/third-party/cbs/cbs_h264_syntax_template.c +++ b/third-party/cbs/cbs_h264_syntax_template.c @@ -16,1160 +16,1247 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw) { - int err; +static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw) +{ + int err; - fixed(1, rbsp_stop_one_bit, 1); - while(byte_alignment(rw) != 0) - fixed(1, rbsp_alignment_zero_bit, 0); + fixed(1, rbsp_stop_one_bit, 1); + while (byte_alignment(rw) != 0) + fixed(1, rbsp_alignment_zero_bit, 0); - return 0; + return 0; } static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawNALUnitHeader *current, - uint32_t valid_type_mask) { - int err; - - fixed(1, forbidden_zero_bit, 0); - ub(2, nal_ref_idc); - ub(5, nal_unit_type); - - if(!(1 << current->nal_unit_type & valid_type_mask)) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid NAL unit type %d.\n", - current->nal_unit_type); - return AVERROR_INVALIDDATA; - } - - if(current->nal_unit_type == 14 || - current->nal_unit_type == 20 || - current->nal_unit_type == 21) { - if(current->nal_unit_type != 21) - flag(svc_extension_flag); - else - flag(avc_3d_extension_flag); - - if(current->svc_extension_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SVC not supported.\n"); - return AVERROR_PATCHWELCOME; - } - else if(current->avc_3d_extension_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "3DAVC not supported.\n"); - return AVERROR_PATCHWELCOME; + H264RawNALUnitHeader *current, + uint32_t valid_type_mask) +{ + int err; + + fixed(1, forbidden_zero_bit, 0); + ub(2, nal_ref_idc); + ub(5, nal_unit_type); + + if (!(1 << current->nal_unit_type & valid_type_mask)) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid NAL unit type %d.\n", + current->nal_unit_type); + return AVERROR_INVALIDDATA; } - else { - av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC not supported.\n"); - return AVERROR_PATCHWELCOME; + + if (current->nal_unit_type == 14 || + current->nal_unit_type == 20 || + current->nal_unit_type == 21) { + if (current->nal_unit_type != 21) + flag(svc_extension_flag); + else + flag(avc_3d_extension_flag); + + if (current->svc_extension_flag) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "SVC not supported.\n"); + return AVERROR_PATCHWELCOME; + + } else if (current->avc_3d_extension_flag) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "3DAVC not supported.\n"); + return AVERROR_PATCHWELCOME; + + } else { + av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC not supported.\n"); + return AVERROR_PATCHWELCOME; + } } - } - return 0; + return 0; } static int FUNC(scaling_list)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawScalingList *current, - int size_of_scaling_list) { - int err, i, scale; - - scale = 8; - for(i = 0; i < size_of_scaling_list; i++) { - ses(delta_scale[i], -128, +127, 1, i); - scale = (scale + current->delta_scale[i] + 256) % 256; - if(scale == 0) - break; - } - - return 0; + H264RawScalingList *current, + int size_of_scaling_list) +{ + int err, i, scale; + + scale = 8; + for (i = 0; i < size_of_scaling_list; i++) { + ses(delta_scale[i], -128, +127, 1, i); + scale = (scale + current->delta_scale[i] + 256) % 256; + if (scale == 0) + break; + } + + return 0; } static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawHRD *current) { - int err, i; - - ue(cpb_cnt_minus1, 0, 31); - ub(4, bit_rate_scale); - ub(4, cpb_size_scale); - - for(i = 0; i <= current->cpb_cnt_minus1; i++) { - ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - flags(cbr_flag[i], 1, i); - } + H264RawHRD *current) +{ + int err, i; + + ue(cpb_cnt_minus1, 0, 31); + ub(4, bit_rate_scale); + ub(4, cpb_size_scale); + + for (i = 0; i <= current->cpb_cnt_minus1; i++) { + ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i); + ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i); + flags(cbr_flag[i], 1, i); + } - ub(5, initial_cpb_removal_delay_length_minus1); - ub(5, cpb_removal_delay_length_minus1); - ub(5, dpb_output_delay_length_minus1); - ub(5, time_offset_length); + ub(5, initial_cpb_removal_delay_length_minus1); + ub(5, cpb_removal_delay_length_minus1); + ub(5, dpb_output_delay_length_minus1); + ub(5, time_offset_length); - return 0; + return 0; } static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawVUI *current, H264RawSPS *sps) { - int err; - - flag(aspect_ratio_info_present_flag); - if(current->aspect_ratio_info_present_flag) { - ub(8, aspect_ratio_idc); - if(current->aspect_ratio_idc == 255) { - ub(16, sar_width); - ub(16, sar_height); + H264RawVUI *current, H264RawSPS *sps) +{ + int err; + + flag(aspect_ratio_info_present_flag); + if (current->aspect_ratio_info_present_flag) { + ub(8, aspect_ratio_idc); + if (current->aspect_ratio_idc == 255) { + ub(16, sar_width); + ub(16, sar_height); + } + } else { + infer(aspect_ratio_idc, 0); } - } - else { - infer(aspect_ratio_idc, 0); - } - - flag(overscan_info_present_flag); - if(current->overscan_info_present_flag) - flag(overscan_appropriate_flag); - - flag(video_signal_type_present_flag); - if(current->video_signal_type_present_flag) { - ub(3, video_format); - flag(video_full_range_flag); - flag(colour_description_present_flag); - if(current->colour_description_present_flag) { - ub(8, colour_primaries); - ub(8, transfer_characteristics); - ub(8, matrix_coefficients); + + flag(overscan_info_present_flag); + if (current->overscan_info_present_flag) + flag(overscan_appropriate_flag); + + flag(video_signal_type_present_flag); + if (current->video_signal_type_present_flag) { + ub(3, video_format); + flag(video_full_range_flag); + flag(colour_description_present_flag); + if (current->colour_description_present_flag) { + ub(8, colour_primaries); + ub(8, transfer_characteristics); + ub(8, matrix_coefficients); + } else { + infer(colour_primaries, 2); + infer(transfer_characteristics, 2); + infer(matrix_coefficients, 2); + } + } else { + infer(video_format, 5); + infer(video_full_range_flag, 0); + infer(colour_primaries, 2); + infer(transfer_characteristics, 2); + infer(matrix_coefficients, 2); + } + + flag(chroma_loc_info_present_flag); + if (current->chroma_loc_info_present_flag) { + ue(chroma_sample_loc_type_top_field, 0, 5); + ue(chroma_sample_loc_type_bottom_field, 0, 5); + } else { + infer(chroma_sample_loc_type_top_field, 0); + infer(chroma_sample_loc_type_bottom_field, 0); } - else { - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); + + flag(timing_info_present_flag); + if (current->timing_info_present_flag) { + u(32, num_units_in_tick, 1, UINT32_MAX); + u(32, time_scale, 1, UINT32_MAX); + flag(fixed_frame_rate_flag); + } else { + infer(fixed_frame_rate_flag, 0); } - } - else { - infer(video_format, 5); - infer(video_full_range_flag, 0); - infer(colour_primaries, 2); + + flag(nal_hrd_parameters_present_flag); + if (current->nal_hrd_parameters_present_flag) + CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->nal_hrd_parameters)); + + flag(vcl_hrd_parameters_present_flag); + if (current->vcl_hrd_parameters_present_flag) + CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->vcl_hrd_parameters)); + + if (current->nal_hrd_parameters_present_flag || + current->vcl_hrd_parameters_present_flag) + flag(low_delay_hrd_flag); + else + infer(low_delay_hrd_flag, 1 - current->fixed_frame_rate_flag); + + flag(pic_struct_present_flag); + + flag(bitstream_restriction_flag); + if (current->bitstream_restriction_flag) { + flag(motion_vectors_over_pic_boundaries_flag); + ue(max_bytes_per_pic_denom, 0, 16); + ue(max_bits_per_mb_denom, 0, 16); + // The current version of the standard constrains this to be in + // [0,15], but older versions allow 16. + ue(log2_max_mv_length_horizontal, 0, 16); + ue(log2_max_mv_length_vertical, 0, 16); + ue(max_num_reorder_frames, 0, H264_MAX_DPB_FRAMES); + ue(max_dec_frame_buffering, 0, H264_MAX_DPB_FRAMES); + } else { + infer(motion_vectors_over_pic_boundaries_flag, 1); + infer(max_bytes_per_pic_denom, 2); + infer(max_bits_per_mb_denom, 1); + infer(log2_max_mv_length_horizontal, 15); + infer(log2_max_mv_length_vertical, 15); + + if ((sps->profile_idc == 44 || sps->profile_idc == 86 || + sps->profile_idc == 100 || sps->profile_idc == 110 || + sps->profile_idc == 122 || sps->profile_idc == 244) && + sps->constraint_set3_flag) { + infer(max_num_reorder_frames, 0); + infer(max_dec_frame_buffering, 0); + } else { + infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES); + infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES); + } + } + + return 0; +} + +static int FUNC(vui_parameters_default)(CodedBitstreamContext *ctx, + RWContext *rw, H264RawVUI *current, + H264RawSPS *sps) +{ + infer(aspect_ratio_idc, 0); + + infer(video_format, 5); + infer(video_full_range_flag, 0); + infer(colour_primaries, 2); infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - } - - flag(chroma_loc_info_present_flag); - if(current->chroma_loc_info_present_flag) { - ue(chroma_sample_loc_type_top_field, 0, 5); - ue(chroma_sample_loc_type_bottom_field, 0, 5); - } - else { - infer(chroma_sample_loc_type_top_field, 0); + infer(matrix_coefficients, 2); + + infer(chroma_sample_loc_type_top_field, 0); infer(chroma_sample_loc_type_bottom_field, 0); - } - - flag(timing_info_present_flag); - if(current->timing_info_present_flag) { - u(32, num_units_in_tick, 1, UINT32_MAX); - u(32, time_scale, 1, UINT32_MAX); - flag(fixed_frame_rate_flag); - } - else { + infer(fixed_frame_rate_flag, 0); - } - - flag(nal_hrd_parameters_present_flag); - if(current->nal_hrd_parameters_present_flag) - CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->nal_hrd_parameters)); - - flag(vcl_hrd_parameters_present_flag); - if(current->vcl_hrd_parameters_present_flag) - CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->vcl_hrd_parameters)); - - if(current->nal_hrd_parameters_present_flag || - current->vcl_hrd_parameters_present_flag) - flag(low_delay_hrd_flag); - else - infer(low_delay_hrd_flag, 1 - current->fixed_frame_rate_flag); - - flag(pic_struct_present_flag); - - flag(bitstream_restriction_flag); - if(current->bitstream_restriction_flag) { - flag(motion_vectors_over_pic_boundaries_flag); - ue(max_bytes_per_pic_denom, 0, 16); - ue(max_bits_per_mb_denom, 0, 16); - // The current version of the standard constrains this to be in - // [0,15], but older versions allow 16. - ue(log2_max_mv_length_horizontal, 0, 16); - ue(log2_max_mv_length_vertical, 0, 16); - ue(max_num_reorder_frames, 0, H264_MAX_DPB_FRAMES); - ue(max_dec_frame_buffering, 0, H264_MAX_DPB_FRAMES); - } - else { + infer(low_delay_hrd_flag, 1); + + infer(pic_struct_present_flag, 0); + infer(motion_vectors_over_pic_boundaries_flag, 1); infer(max_bytes_per_pic_denom, 2); - infer(max_bits_per_mb_denom, 1); + infer(max_bits_per_mb_denom, 1); infer(log2_max_mv_length_horizontal, 15); - infer(log2_max_mv_length_vertical, 15); + infer(log2_max_mv_length_vertical, 15); - if((sps->profile_idc == 44 || sps->profile_idc == 86 || + if ((sps->profile_idc == 44 || sps->profile_idc == 86 || sps->profile_idc == 100 || sps->profile_idc == 110 || sps->profile_idc == 122 || sps->profile_idc == 244) && - sps->constraint_set3_flag) { - infer(max_num_reorder_frames, 0); - infer(max_dec_frame_buffering, 0); - } - else { - infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES); - infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES); + sps->constraint_set3_flag) { + infer(max_num_reorder_frames, 0); + infer(max_dec_frame_buffering, 0); + } else { + infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES); + infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES); } - } - return 0; -} - -static int FUNC(vui_parameters_default)(CodedBitstreamContext *ctx, - RWContext *rw, H264RawVUI *current, - H264RawSPS *sps) { - infer(aspect_ratio_idc, 0); - - infer(video_format, 5); - infer(video_full_range_flag, 0); - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - - infer(chroma_sample_loc_type_top_field, 0); - infer(chroma_sample_loc_type_bottom_field, 0); - - infer(fixed_frame_rate_flag, 0); - infer(low_delay_hrd_flag, 1); - - infer(pic_struct_present_flag, 0); - - infer(motion_vectors_over_pic_boundaries_flag, 1); - infer(max_bytes_per_pic_denom, 2); - infer(max_bits_per_mb_denom, 1); - infer(log2_max_mv_length_horizontal, 15); - infer(log2_max_mv_length_vertical, 15); - - if((sps->profile_idc == 44 || sps->profile_idc == 86 || - sps->profile_idc == 100 || sps->profile_idc == 110 || - sps->profile_idc == 122 || sps->profile_idc == 244) && - sps->constraint_set3_flag) { - infer(max_num_reorder_frames, 0); - infer(max_dec_frame_buffering, 0); - } - else { - infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES); - infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES); - } - - return 0; + return 0; } static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSPS *current) { - int err, i; + H264RawSPS *current) +{ + int err, i; + + HEADER("Sequence Parameter Set"); + + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, + 1 << H264_NAL_SPS)); + + ub(8, profile_idc); + + flag(constraint_set0_flag); + flag(constraint_set1_flag); + flag(constraint_set2_flag); + flag(constraint_set3_flag); + flag(constraint_set4_flag); + flag(constraint_set5_flag); + + u(2, reserved_zero_2bits, 0, 0); + + ub(8, level_idc); + + ue(seq_parameter_set_id, 0, 31); + + if (current->profile_idc == 100 || current->profile_idc == 110 || + current->profile_idc == 122 || current->profile_idc == 244 || + current->profile_idc == 44 || current->profile_idc == 83 || + current->profile_idc == 86 || current->profile_idc == 118 || + current->profile_idc == 128 || current->profile_idc == 138) { + ue(chroma_format_idc, 0, 3); + + if (current->chroma_format_idc == 3) + flag(separate_colour_plane_flag); + else + infer(separate_colour_plane_flag, 0); + + ue(bit_depth_luma_minus8, 0, 6); + ue(bit_depth_chroma_minus8, 0, 6); + + flag(qpprime_y_zero_transform_bypass_flag); + + flag(seq_scaling_matrix_present_flag); + if (current->seq_scaling_matrix_present_flag) { + for (i = 0; i < ((current->chroma_format_idc != 3) ? 8 : 12); i++) { + flags(seq_scaling_list_present_flag[i], 1, i); + if (current->seq_scaling_list_present_flag[i]) { + if (i < 6) + CHECK(FUNC(scaling_list)(ctx, rw, + ¤t->scaling_list_4x4[i], + 16)); + else + CHECK(FUNC(scaling_list)(ctx, rw, + ¤t->scaling_list_8x8[i - 6], + 64)); + } + } + } + } else { + infer(chroma_format_idc, current->profile_idc == 183 ? 0 : 1); - HEADER("Sequence Parameter Set"); + infer(separate_colour_plane_flag, 0); + infer(bit_depth_luma_minus8, 0); + infer(bit_depth_chroma_minus8, 0); + } - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_SPS)); + ue(log2_max_frame_num_minus4, 0, 12); + ue(pic_order_cnt_type, 0, 2); - ub(8, profile_idc); + if (current->pic_order_cnt_type == 0) { + ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12); + } else if (current->pic_order_cnt_type == 1) { + flag(delta_pic_order_always_zero_flag); + se(offset_for_non_ref_pic, INT32_MIN + 1, INT32_MAX); + se(offset_for_top_to_bottom_field, INT32_MIN + 1, INT32_MAX); + ue(num_ref_frames_in_pic_order_cnt_cycle, 0, 255); - flag(constraint_set0_flag); - flag(constraint_set1_flag); - flag(constraint_set2_flag); - flag(constraint_set3_flag); - flag(constraint_set4_flag); - flag(constraint_set5_flag); + for (i = 0; i < current->num_ref_frames_in_pic_order_cnt_cycle; i++) + ses(offset_for_ref_frame[i], INT32_MIN + 1, INT32_MAX, 1, i); + } - u(2, reserved_zero_2bits, 0, 0); + ue(max_num_ref_frames, 0, H264_MAX_DPB_FRAMES); + flag(gaps_in_frame_num_allowed_flag); - ub(8, level_idc); + ue(pic_width_in_mbs_minus1, 0, H264_MAX_MB_WIDTH); + ue(pic_height_in_map_units_minus1, 0, H264_MAX_MB_HEIGHT); - ue(seq_parameter_set_id, 0, 31); + flag(frame_mbs_only_flag); + if (!current->frame_mbs_only_flag) + flag(mb_adaptive_frame_field_flag); - if(current->profile_idc == 100 || current->profile_idc == 110 || - current->profile_idc == 122 || current->profile_idc == 244 || - current->profile_idc == 44 || current->profile_idc == 83 || - current->profile_idc == 86 || current->profile_idc == 118 || - current->profile_idc == 128 || current->profile_idc == 138) { - ue(chroma_format_idc, 0, 3); + flag(direct_8x8_inference_flag); - if(current->chroma_format_idc == 3) - flag(separate_colour_plane_flag); - else - infer(separate_colour_plane_flag, 0); - - ue(bit_depth_luma_minus8, 0, 6); - ue(bit_depth_chroma_minus8, 0, 6); - - flag(qpprime_y_zero_transform_bypass_flag); - - flag(seq_scaling_matrix_present_flag); - if(current->seq_scaling_matrix_present_flag) { - for(i = 0; i < ((current->chroma_format_idc != 3) ? 8 : 12); i++) { - flags(seq_scaling_list_present_flag[i], 1, i); - if(current->seq_scaling_list_present_flag[i]) { - if(i < 6) - CHECK(FUNC(scaling_list)(ctx, rw, - ¤t->scaling_list_4x4[i], - 16)); - else - CHECK(FUNC(scaling_list)(ctx, rw, - ¤t->scaling_list_8x8[i - 6], - 64)); - } - } + flag(frame_cropping_flag); + if (current->frame_cropping_flag) { + ue(frame_crop_left_offset, 0, H264_MAX_WIDTH); + ue(frame_crop_right_offset, 0, H264_MAX_WIDTH); + ue(frame_crop_top_offset, 0, H264_MAX_HEIGHT); + ue(frame_crop_bottom_offset, 0, H264_MAX_HEIGHT); } - } - else { - infer(chroma_format_idc, current->profile_idc == 183 ? 0 : 1); - - infer(separate_colour_plane_flag, 0); - infer(bit_depth_luma_minus8, 0); - infer(bit_depth_chroma_minus8, 0); - } - - ue(log2_max_frame_num_minus4, 0, 12); - ue(pic_order_cnt_type, 0, 2); - - if(current->pic_order_cnt_type == 0) { - ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12); - } - else if(current->pic_order_cnt_type == 1) { - flag(delta_pic_order_always_zero_flag); - se(offset_for_non_ref_pic, INT32_MIN + 1, INT32_MAX); - se(offset_for_top_to_bottom_field, INT32_MIN + 1, INT32_MAX); - ue(num_ref_frames_in_pic_order_cnt_cycle, 0, 255); - - for(i = 0; i < current->num_ref_frames_in_pic_order_cnt_cycle; i++) - ses(offset_for_ref_frame[i], INT32_MIN + 1, INT32_MAX, 1, i); - } - - ue(max_num_ref_frames, 0, H264_MAX_DPB_FRAMES); - flag(gaps_in_frame_num_allowed_flag); - - ue(pic_width_in_mbs_minus1, 0, H264_MAX_MB_WIDTH); - ue(pic_height_in_map_units_minus1, 0, H264_MAX_MB_HEIGHT); - - flag(frame_mbs_only_flag); - if(!current->frame_mbs_only_flag) - flag(mb_adaptive_frame_field_flag); - - flag(direct_8x8_inference_flag); - - flag(frame_cropping_flag); - if(current->frame_cropping_flag) { - ue(frame_crop_left_offset, 0, H264_MAX_WIDTH); - ue(frame_crop_right_offset, 0, H264_MAX_WIDTH); - ue(frame_crop_top_offset, 0, H264_MAX_HEIGHT); - ue(frame_crop_bottom_offset, 0, H264_MAX_HEIGHT); - } - - flag(vui_parameters_present_flag); - if(current->vui_parameters_present_flag) - CHECK(FUNC(vui_parameters)(ctx, rw, ¤t->vui, current)); - else - CHECK(FUNC(vui_parameters_default)(ctx, rw, ¤t->vui, current)); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; + + flag(vui_parameters_present_flag); + if (current->vui_parameters_present_flag) + CHECK(FUNC(vui_parameters)(ctx, rw, ¤t->vui, current)); + else + CHECK(FUNC(vui_parameters_default)(ctx, rw, ¤t->vui, current)); + + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + + return 0; } static int FUNC(sps_extension)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSPSExtension *current) { - int err; + H264RawSPSExtension *current) +{ + int err; - HEADER("Sequence Parameter Set Extension"); + HEADER("Sequence Parameter Set Extension"); - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_SPS_EXT)); + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, + 1 << H264_NAL_SPS_EXT)); - ue(seq_parameter_set_id, 0, 31); + ue(seq_parameter_set_id, 0, 31); - ue(aux_format_idc, 0, 3); + ue(aux_format_idc, 0, 3); - if(current->aux_format_idc != 0) { - int bits; + if (current->aux_format_idc != 0) { + int bits; - ue(bit_depth_aux_minus8, 0, 4); - flag(alpha_incr_flag); + ue(bit_depth_aux_minus8, 0, 4); + flag(alpha_incr_flag); - bits = current->bit_depth_aux_minus8 + 9; - ub(bits, alpha_opaque_value); - ub(bits, alpha_transparent_value); - } + bits = current->bit_depth_aux_minus8 + 9; + ub(bits, alpha_opaque_value); + ub(bits, alpha_transparent_value); + } - flag(additional_extension_flag); + flag(additional_extension_flag); - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - return 0; + return 0; } static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawPPS *current) { - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps; - int err, i; - - HEADER("Picture Parameter Set"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_PPS)); - - ue(pic_parameter_set_id, 0, 255); - ue(seq_parameter_set_id, 0, 31); - - sps = h264->sps[current->seq_parameter_set_id]; - if(!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - current->seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } + H264RawPPS *current) +{ + CodedBitstreamH264Context *h264 = ctx->priv_data; + const H264RawSPS *sps; + int err, i; - flag(entropy_coding_mode_flag); - flag(bottom_field_pic_order_in_frame_present_flag); + HEADER("Picture Parameter Set"); - ue(num_slice_groups_minus1, 0, 7); - if(current->num_slice_groups_minus1 > 0) { - unsigned int pic_size; - int iGroup; + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, + 1 << H264_NAL_PPS)); - pic_size = (sps->pic_width_in_mbs_minus1 + 1) * - (sps->pic_height_in_map_units_minus1 + 1); + ue(pic_parameter_set_id, 0, 255); + ue(seq_parameter_set_id, 0, 31); - ue(slice_group_map_type, 0, 6); - - if(current->slice_group_map_type == 0) { - for(iGroup = 0; iGroup <= current->num_slice_groups_minus1; iGroup++) - ues(run_length_minus1[iGroup], 0, pic_size - 1, 1, iGroup); - } - else if(current->slice_group_map_type == 2) { - for(iGroup = 0; iGroup < current->num_slice_groups_minus1; iGroup++) { - ues(top_left[iGroup], 0, pic_size - 1, 1, iGroup); - ues(bottom_right[iGroup], - current->top_left[iGroup], pic_size - 1, 1, iGroup); - } - } - else if(current->slice_group_map_type == 3 || - current->slice_group_map_type == 4 || - current->slice_group_map_type == 5) { - flag(slice_group_change_direction_flag); - ue(slice_group_change_rate_minus1, 0, pic_size - 1); - } - else if(current->slice_group_map_type == 6) { - ue(pic_size_in_map_units_minus1, pic_size - 1, pic_size - 1); - - allocate(current->slice_group_id, - current->pic_size_in_map_units_minus1 + 1); - for(i = 0; i <= current->pic_size_in_map_units_minus1; i++) - us(av_log2(2 * current->num_slice_groups_minus1 + 1), - slice_group_id[i], 0, current->num_slice_groups_minus1, 1, i); + sps = h264->sps[current->seq_parameter_set_id]; + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", + current->seq_parameter_set_id); + return AVERROR_INVALIDDATA; } - } - - ue(num_ref_idx_l0_default_active_minus1, 0, 31); - ue(num_ref_idx_l1_default_active_minus1, 0, 31); - - flag(weighted_pred_flag); - u(2, weighted_bipred_idc, 0, 2); - - se(pic_init_qp_minus26, -26 - 6 * sps->bit_depth_luma_minus8, +25); - se(pic_init_qs_minus26, -26, +25); - se(chroma_qp_index_offset, -12, +12); - - flag(deblocking_filter_control_present_flag); - flag(constrained_intra_pred_flag); - flag(redundant_pic_cnt_present_flag); - - if(more_rbsp_data(current->more_rbsp_data)) { - flag(transform_8x8_mode_flag); - - flag(pic_scaling_matrix_present_flag); - if(current->pic_scaling_matrix_present_flag) { - for(i = 0; i < 6 + (((sps->chroma_format_idc != 3) ? 2 : 6) * - current->transform_8x8_mode_flag); - i++) { - flags(pic_scaling_list_present_flag[i], 1, i); - if(current->pic_scaling_list_present_flag[i]) { - if(i < 6) - CHECK(FUNC(scaling_list)(ctx, rw, - ¤t->scaling_list_4x4[i], - 16)); - else - CHECK(FUNC(scaling_list)(ctx, rw, - ¤t->scaling_list_8x8[i - 6], - 64)); + + flag(entropy_coding_mode_flag); + flag(bottom_field_pic_order_in_frame_present_flag); + + ue(num_slice_groups_minus1, 0, 7); + if (current->num_slice_groups_minus1 > 0) { + unsigned int pic_size; + int iGroup; + + pic_size = (sps->pic_width_in_mbs_minus1 + 1) * + (sps->pic_height_in_map_units_minus1 + 1); + + ue(slice_group_map_type, 0, 6); + + if (current->slice_group_map_type == 0) { + for (iGroup = 0; iGroup <= current->num_slice_groups_minus1; iGroup++) + ues(run_length_minus1[iGroup], 0, pic_size - 1, 1, iGroup); + + } else if (current->slice_group_map_type == 2) { + for (iGroup = 0; iGroup < current->num_slice_groups_minus1; iGroup++) { + ues(top_left[iGroup], 0, pic_size - 1, 1, iGroup); + ues(bottom_right[iGroup], + current->top_left[iGroup], pic_size - 1, 1, iGroup); + } + } else if (current->slice_group_map_type == 3 || + current->slice_group_map_type == 4 || + current->slice_group_map_type == 5) { + flag(slice_group_change_direction_flag); + ue(slice_group_change_rate_minus1, 0, pic_size - 1); + } else if (current->slice_group_map_type == 6) { + ue(pic_size_in_map_units_minus1, pic_size - 1, pic_size - 1); + + allocate(current->slice_group_id, + current->pic_size_in_map_units_minus1 + 1); + for (i = 0; i <= current->pic_size_in_map_units_minus1; i++) + us(av_log2(2 * current->num_slice_groups_minus1 + 1), + slice_group_id[i], 0, current->num_slice_groups_minus1, 1, i); } - } } - se(second_chroma_qp_index_offset, -12, +12); - } - else { - infer(transform_8x8_mode_flag, 0); - infer(pic_scaling_matrix_present_flag, 0); - infer(second_chroma_qp_index_offset, current->chroma_qp_index_offset); - } + ue(num_ref_idx_l0_default_active_minus1, 0, 31); + ue(num_ref_idx_l1_default_active_minus1, 0, 31); + + flag(weighted_pred_flag); + u(2, weighted_bipred_idc, 0, 2); + + se(pic_init_qp_minus26, -26 - 6 * sps->bit_depth_luma_minus8, +25); + se(pic_init_qs_minus26, -26, +25); + se(chroma_qp_index_offset, -12, +12); + + flag(deblocking_filter_control_present_flag); + flag(constrained_intra_pred_flag); + flag(redundant_pic_cnt_present_flag); + + if (more_rbsp_data(current->more_rbsp_data)) + { + flag(transform_8x8_mode_flag); + + flag(pic_scaling_matrix_present_flag); + if (current->pic_scaling_matrix_present_flag) { + for (i = 0; i < 6 + (((sps->chroma_format_idc != 3) ? 2 : 6) * + current->transform_8x8_mode_flag); i++) { + flags(pic_scaling_list_present_flag[i], 1, i); + if (current->pic_scaling_list_present_flag[i]) { + if (i < 6) + CHECK(FUNC(scaling_list)(ctx, rw, + ¤t->scaling_list_4x4[i], + 16)); + else + CHECK(FUNC(scaling_list)(ctx, rw, + ¤t->scaling_list_8x8[i - 6], + 64)); + } + } + } + + se(second_chroma_qp_index_offset, -12, +12); + } else { + infer(transform_8x8_mode_flag, 0); + infer(pic_scaling_matrix_present_flag, 0); + infer(second_chroma_qp_index_offset, current->chroma_qp_index_offset); + } - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - return 0; + return 0; } static int FUNC(sei_buffering_period)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIBufferingPeriod *current, - SEIMessageState *sei) { - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps; - int err, i, length; - - HEADER("Buffering Period"); - - ue(seq_parameter_set_id, 0, 31); - - sps = h264->sps[current->seq_parameter_set_id]; - if(!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - current->seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h264->active_sps = sps; - - if(sps->vui.nal_hrd_parameters_present_flag) { - for(i = 0; i <= sps->vui.nal_hrd_parameters.cpb_cnt_minus1; i++) { - length = sps->vui.nal_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1; - xu(length, initial_cpb_removal_delay[SchedSelIdx], - current->nal.initial_cpb_removal_delay[i], - 1, MAX_UINT_BITS(length), 1, i); - xu(length, initial_cpb_removal_delay_offset[SchedSelIdx], - current->nal.initial_cpb_removal_delay_offset[i], - 0, MAX_UINT_BITS(length), 1, i); + H264RawSEIBufferingPeriod *current, + SEIMessageState *sei) +{ + CodedBitstreamH264Context *h264 = ctx->priv_data; + const H264RawSPS *sps; + int err, i, length; + + HEADER("Buffering Period"); + + ue(seq_parameter_set_id, 0, 31); + + sps = h264->sps[current->seq_parameter_set_id]; + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", + current->seq_parameter_set_id); + return AVERROR_INVALIDDATA; } - } - - if(sps->vui.vcl_hrd_parameters_present_flag) { - for(i = 0; i <= sps->vui.vcl_hrd_parameters.cpb_cnt_minus1; i++) { - length = sps->vui.vcl_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1; - xu(length, initial_cpb_removal_delay[SchedSelIdx], - current->vcl.initial_cpb_removal_delay[i], - 1, MAX_UINT_BITS(length), 1, i); - xu(length, initial_cpb_removal_delay_offset[SchedSelIdx], - current->vcl.initial_cpb_removal_delay_offset[i], - 0, MAX_UINT_BITS(length), 1, i); + h264->active_sps = sps; + + if (sps->vui.nal_hrd_parameters_present_flag) { + for (i = 0; i <= sps->vui.nal_hrd_parameters.cpb_cnt_minus1; i++) { + length = sps->vui.nal_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1; + xu(length, initial_cpb_removal_delay[SchedSelIdx], + current->nal.initial_cpb_removal_delay[i], + 1, MAX_UINT_BITS(length), 1, i); + xu(length, initial_cpb_removal_delay_offset[SchedSelIdx], + current->nal.initial_cpb_removal_delay_offset[i], + 0, MAX_UINT_BITS(length), 1, i); + } + } + + if (sps->vui.vcl_hrd_parameters_present_flag) { + for (i = 0; i <= sps->vui.vcl_hrd_parameters.cpb_cnt_minus1; i++) { + length = sps->vui.vcl_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1; + xu(length, initial_cpb_removal_delay[SchedSelIdx], + current->vcl.initial_cpb_removal_delay[i], + 1, MAX_UINT_BITS(length), 1, i); + xu(length, initial_cpb_removal_delay_offset[SchedSelIdx], + current->vcl.initial_cpb_removal_delay_offset[i], + 0, MAX_UINT_BITS(length), 1, i); + } } - } - return 0; + return 0; } static int FUNC(sei_pic_timestamp)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIPicTimestamp *current, - const H264RawSPS *sps) { - uint8_t time_offset_length; - int err; - - u(2, ct_type, 0, 2); - flag(nuit_field_based_flag); - u(5, counting_type, 0, 6); - flag(full_timestamp_flag); - flag(discontinuity_flag); - flag(cnt_dropped_flag); - ub(8, n_frames); - if(current->full_timestamp_flag) { - u(6, seconds_value, 0, 59); - u(6, minutes_value, 0, 59); - u(5, hours_value, 0, 23); - } - else { - flag(seconds_flag); - if(current->seconds_flag) { - u(6, seconds_value, 0, 59); - flag(minutes_flag); - if(current->minutes_flag) { - u(6, minutes_value, 0, 59); - flag(hours_flag); - if(current->hours_flag) - u(5, hours_value, 0, 23); - } + H264RawSEIPicTimestamp *current, + const H264RawSPS *sps) +{ + uint8_t time_offset_length; + int err; + + u(2, ct_type, 0, 2); + flag(nuit_field_based_flag); + u(5, counting_type, 0, 6); + flag(full_timestamp_flag); + flag(discontinuity_flag); + flag(cnt_dropped_flag); + ub(8, n_frames); + if (current->full_timestamp_flag) { + u(6, seconds_value, 0, 59); + u(6, minutes_value, 0, 59); + u(5, hours_value, 0, 23); + } else { + flag(seconds_flag); + if (current->seconds_flag) { + u(6, seconds_value, 0, 59); + flag(minutes_flag); + if (current->minutes_flag) { + u(6, minutes_value, 0, 59); + flag(hours_flag); + if (current->hours_flag) + u(5, hours_value, 0, 23); + } + } } - } - if(sps->vui.nal_hrd_parameters_present_flag) - time_offset_length = sps->vui.nal_hrd_parameters.time_offset_length; - else if(sps->vui.vcl_hrd_parameters_present_flag) - time_offset_length = sps->vui.vcl_hrd_parameters.time_offset_length; - else - time_offset_length = 24; + if (sps->vui.nal_hrd_parameters_present_flag) + time_offset_length = sps->vui.nal_hrd_parameters.time_offset_length; + else if (sps->vui.vcl_hrd_parameters_present_flag) + time_offset_length = sps->vui.vcl_hrd_parameters.time_offset_length; + else + time_offset_length = 24; - if(time_offset_length > 0) - ib(time_offset_length, time_offset); - else - infer(time_offset, 0); + if (time_offset_length > 0) + ib(time_offset_length, time_offset); + else + infer(time_offset, 0); - return 0; + return 0; } static int FUNC(sei_pic_timing)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIPicTiming *current, - SEIMessageState *sei) { - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps; - int err; - - HEADER("Picture Timing"); - - sps = h264->active_sps; - if(!sps) { - // If there is exactly one possible SPS but it is not yet active - // then just assume that it should be the active one. - int i, k = -1; - for(i = 0; i < H264_MAX_SPS_COUNT; i++) { - if(h264->sps[i]) { - if(k >= 0) { - k = -1; - break; + H264RawSEIPicTiming *current, + SEIMessageState *sei) +{ + CodedBitstreamH264Context *h264 = ctx->priv_data; + const H264RawSPS *sps; + int err; + + HEADER("Picture Timing"); + + sps = h264->active_sps; + if (!sps) { + // If there is exactly one possible SPS but it is not yet active + // then just assume that it should be the active one. + int i, k = -1; + for (i = 0; i < H264_MAX_SPS_COUNT; i++) { + if (h264->sps[i]) { + if (k >= 0) { + k = -1; + break; + } + k = i; + } } - k = i; - } + if (k >= 0) + sps = h264->sps[k]; } - if(k >= 0) - sps = h264->sps[k]; - } - if(!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "No active SPS for pic_timing.\n"); - return AVERROR_INVALIDDATA; - } - - if(sps->vui.nal_hrd_parameters_present_flag || - sps->vui.vcl_hrd_parameters_present_flag) { - const H264RawHRD *hrd; - - if(sps->vui.nal_hrd_parameters_present_flag) - hrd = &sps->vui.nal_hrd_parameters; - else if(sps->vui.vcl_hrd_parameters_present_flag) - hrd = &sps->vui.vcl_hrd_parameters; - else { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "No HRD parameters for pic_timing.\n"); - return AVERROR_INVALIDDATA; + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "No active SPS for pic_timing.\n"); + return AVERROR_INVALIDDATA; } - ub(hrd->cpb_removal_delay_length_minus1 + 1, cpb_removal_delay); - ub(hrd->dpb_output_delay_length_minus1 + 1, dpb_output_delay); - } - - if(sps->vui.pic_struct_present_flag) { - static const uint8_t num_clock_ts[9] = { - 1, 1, 1, 2, 2, 3, 3, 2, 3 - }; - int i; - - u(4, pic_struct, 0, 8); - if(current->pic_struct > 8) - return AVERROR_INVALIDDATA; - - for(i = 0; i < num_clock_ts[current->pic_struct]; i++) { - flags(clock_timestamp_flag[i], 1, i); - if(current->clock_timestamp_flag[i]) - CHECK(FUNC(sei_pic_timestamp)(ctx, rw, - ¤t->timestamp[i], sps)); + if (sps->vui.nal_hrd_parameters_present_flag || + sps->vui.vcl_hrd_parameters_present_flag) { + const H264RawHRD *hrd; + + if (sps->vui.nal_hrd_parameters_present_flag) + hrd = &sps->vui.nal_hrd_parameters; + else if (sps->vui.vcl_hrd_parameters_present_flag) + hrd = &sps->vui.vcl_hrd_parameters; + else { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "No HRD parameters for pic_timing.\n"); + return AVERROR_INVALIDDATA; + } + + ub(hrd->cpb_removal_delay_length_minus1 + 1, cpb_removal_delay); + ub(hrd->dpb_output_delay_length_minus1 + 1, dpb_output_delay); + } + + if (sps->vui.pic_struct_present_flag) { + static const uint8_t num_clock_ts[9] = { + 1, 1, 1, 2, 2, 3, 3, 2, 3 + }; + int i; + + u(4, pic_struct, 0, 8); + if (current->pic_struct > 8) + return AVERROR_INVALIDDATA; + + for (i = 0; i < num_clock_ts[current->pic_struct]; i++) { + flags(clock_timestamp_flag[i], 1, i); + if (current->clock_timestamp_flag[i]) + CHECK(FUNC(sei_pic_timestamp)(ctx, rw, + ¤t->timestamp[i], sps)); + } } - } - return 0; + return 0; } static int FUNC(sei_pan_scan_rect)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIPanScanRect *current, - SEIMessageState *sei) { - int err, i; + H264RawSEIPanScanRect *current, + SEIMessageState *sei) +{ + int err, i; - HEADER("Pan-Scan Rectangle"); + HEADER("Pan-Scan Rectangle"); - ue(pan_scan_rect_id, 0, UINT32_MAX - 1); - flag(pan_scan_rect_cancel_flag); + ue(pan_scan_rect_id, 0, UINT32_MAX - 1); + flag(pan_scan_rect_cancel_flag); - if(!current->pan_scan_rect_cancel_flag) { - ue(pan_scan_cnt_minus1, 0, 2); + if (!current->pan_scan_rect_cancel_flag) { + ue(pan_scan_cnt_minus1, 0, 2); - for(i = 0; i <= current->pan_scan_cnt_minus1; i++) { - ses(pan_scan_rect_left_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_right_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_top_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_bottom_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - } + for (i = 0; i <= current->pan_scan_cnt_minus1; i++) { + ses(pan_scan_rect_left_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); + ses(pan_scan_rect_right_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); + ses(pan_scan_rect_top_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); + ses(pan_scan_rect_bottom_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); + } - ue(pan_scan_rect_repetition_period, 0, 16384); - } + ue(pan_scan_rect_repetition_period, 0, 16384); + } - return 0; + return 0; } static int FUNC(sei_recovery_point)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIRecoveryPoint *current, - SEIMessageState *sei) { - int err; + H264RawSEIRecoveryPoint *current, + SEIMessageState *sei) +{ + int err; + + HEADER("Recovery Point"); + + ue(recovery_frame_cnt, 0, 65535); + flag(exact_match_flag); + flag(broken_link_flag); + u(2, changing_slice_group_idc, 0, 2); + + return 0; +} + +static int FUNC(film_grain_characteristics)(CodedBitstreamContext *ctx, RWContext *rw, + H264RawFilmGrainCharacteristics *current, + SEIMessageState *state) +{ + CodedBitstreamH264Context *h264 = ctx->priv_data; + const H264RawSPS *sps; + int err, c, i, j; + + HEADER("Film Grain Characteristics"); + + sps = h264->active_sps; + if (!sps) { + // If there is exactly one possible SPS but it is not yet active + // then just assume that it should be the active one. + int i, k = -1; + for (i = 0; i < H264_MAX_SPS_COUNT; i++) { + if (h264->sps[i]) { + if (k >= 0) { + k = -1; + break; + } + k = i; + } + } + if (k >= 0) + sps = h264->sps[k]; + } - HEADER("Recovery Point"); + flag(film_grain_characteristics_cancel_flag); + if (!current->film_grain_characteristics_cancel_flag) { + int filmGrainBitDepth[3]; + + u(2, film_grain_model_id, 0, 1); + flag(separate_colour_description_present_flag); + if (current->separate_colour_description_present_flag) { + ub(3, film_grain_bit_depth_luma_minus8); + ub(3, film_grain_bit_depth_chroma_minus8); + flag(film_grain_full_range_flag); + ub(8, film_grain_colour_primaries); + ub(8, film_grain_transfer_characteristics); + ub(8, film_grain_matrix_coefficients); + } else { + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "No active SPS for film_grain_characteristics.\n"); + return AVERROR_INVALIDDATA; + } + infer(film_grain_bit_depth_luma_minus8, sps->bit_depth_luma_minus8); + infer(film_grain_bit_depth_chroma_minus8, sps->bit_depth_chroma_minus8); + infer(film_grain_full_range_flag, sps->vui.video_full_range_flag); + infer(film_grain_colour_primaries, sps->vui.colour_primaries); + infer(film_grain_transfer_characteristics, sps->vui.transfer_characteristics); + infer(film_grain_matrix_coefficients, sps->vui.matrix_coefficients); + } - ue(recovery_frame_cnt, 0, 65535); - flag(exact_match_flag); - flag(broken_link_flag); - u(2, changing_slice_group_idc, 0, 2); + filmGrainBitDepth[0] = current->film_grain_bit_depth_luma_minus8 + 8; + filmGrainBitDepth[1] = + filmGrainBitDepth[2] = current->film_grain_bit_depth_chroma_minus8 + 8; + + u(2, blending_mode_id, 0, 1); + ub(4, log2_scale_factor); + for (c = 0; c < 3; c++) + flags(comp_model_present_flag[c], 1, c); + for (c = 0; c < 3; c++) { + if (current->comp_model_present_flag[c]) { + ubs(8, num_intensity_intervals_minus1[c], 1, c); + us(3, num_model_values_minus1[c], 0, 5, 1, c); + for (i = 0; i <= current->num_intensity_intervals_minus1[c]; i++) { + ubs(8, intensity_interval_lower_bound[c][i], 2, c, i); + ubs(8, intensity_interval_upper_bound[c][i], 2, c, i); + for (j = 0; j <= current->num_model_values_minus1[c]; j++) + ses(comp_model_value[c][i][j], 0 - current->film_grain_model_id * (1 << (filmGrainBitDepth[c] - 1)), + ((1 << filmGrainBitDepth[c]) - 1) - current->film_grain_model_id * (1 << (filmGrainBitDepth[c] - 1)), + 3, c, i, j); + } + } + } + ue(film_grain_characteristics_repetition_period, 0, 16384); + } - return 0; + return 0; } static int FUNC(sei_display_orientation)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIDisplayOrientation *current, - SEIMessageState *sei) { - int err; - - HEADER("Display Orientation"); - - flag(display_orientation_cancel_flag); - if(!current->display_orientation_cancel_flag) { - flag(hor_flip); - flag(ver_flip); - ub(16, anticlockwise_rotation); - ue(display_orientation_repetition_period, 0, 16384); - flag(display_orientation_extension_flag); - } - - return 0; + H264RawSEIDisplayOrientation *current, + SEIMessageState *sei) +{ + int err; + + HEADER("Display Orientation"); + + flag(display_orientation_cancel_flag); + if (!current->display_orientation_cancel_flag) { + flag(hor_flip); + flag(ver_flip); + ub(16, anticlockwise_rotation); + ue(display_orientation_repetition_period, 0, 16384); + flag(display_orientation_extension_flag); + } + + return 0; } static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEI *current) { - int err; + H264RawSEI *current) +{ + int err; - HEADER("Supplemental Enhancement Information"); + HEADER("Supplemental Enhancement Information"); - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_SEI)); + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, + 1 << H264_NAL_SEI)); - CHECK(FUNC_SEI(message_list)(ctx, rw, ¤t->message_list, 1)); + CHECK(FUNC_SEI(message_list)(ctx, rw, ¤t->message_list, 1)); - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - return 0; + return 0; } static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawAUD *current) { - int err; + H264RawAUD *current) +{ + int err; - HEADER("Access Unit Delimiter"); + HEADER("Access Unit Delimiter"); - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_AUD)); + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, + 1 << H264_NAL_AUD)); - ub(3, primary_pic_type); + ub(3, primary_pic_type); - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - return 0; + return 0; } static int FUNC(ref_pic_list_modification)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSliceHeader *current) { - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps = h264->active_sps; - int err, i, mopn; - - if(current->slice_type % 5 != 2 && - current->slice_type % 5 != 4) { - flag(ref_pic_list_modification_flag_l0); - if(current->ref_pic_list_modification_flag_l0) { - for(i = 0; i < H264_MAX_RPLM_COUNT; i++) { - xue(modification_of_pic_nums_idc, - current->rplm_l0[i].modification_of_pic_nums_idc, 0, 3, 0); - - mopn = current->rplm_l0[i].modification_of_pic_nums_idc; - if(mopn == 3) - break; - - if(mopn == 0 || mopn == 1) - xue(abs_diff_pic_num_minus1, - current->rplm_l0[i].abs_diff_pic_num_minus1, - 0, (1 + current->field_pic_flag) * (1 << (sps->log2_max_frame_num_minus4 + 4)), 0); - else if(mopn == 2) - xue(long_term_pic_num, - current->rplm_l0[i].long_term_pic_num, - 0, sps->max_num_ref_frames - 1, 0); - } + H264RawSliceHeader *current) +{ + CodedBitstreamH264Context *h264 = ctx->priv_data; + const H264RawSPS *sps = h264->active_sps; + int err, i, mopn; + + if (current->slice_type % 5 != 2 && + current->slice_type % 5 != 4) { + flag(ref_pic_list_modification_flag_l0); + if (current->ref_pic_list_modification_flag_l0) { + for (i = 0; i < H264_MAX_RPLM_COUNT; i++) { + xue(modification_of_pic_nums_idc, + current->rplm_l0[i].modification_of_pic_nums_idc, 0, 3, 0); + + mopn = current->rplm_l0[i].modification_of_pic_nums_idc; + if (mopn == 3) + break; + + if (mopn == 0 || mopn == 1) + xue(abs_diff_pic_num_minus1, + current->rplm_l0[i].abs_diff_pic_num_minus1, + 0, (1 + current->field_pic_flag) * + (1 << (sps->log2_max_frame_num_minus4 + 4)), 0); + else if (mopn == 2) + xue(long_term_pic_num, + current->rplm_l0[i].long_term_pic_num, + 0, sps->max_num_ref_frames - 1, 0); + } + } } - } - - if(current->slice_type % 5 == 1) { - flag(ref_pic_list_modification_flag_l1); - if(current->ref_pic_list_modification_flag_l1) { - for(i = 0; i < H264_MAX_RPLM_COUNT; i++) { - xue(modification_of_pic_nums_idc, - current->rplm_l1[i].modification_of_pic_nums_idc, 0, 3, 0); - - mopn = current->rplm_l1[i].modification_of_pic_nums_idc; - if(mopn == 3) - break; - - if(mopn == 0 || mopn == 1) - xue(abs_diff_pic_num_minus1, - current->rplm_l1[i].abs_diff_pic_num_minus1, - 0, (1 + current->field_pic_flag) * (1 << (sps->log2_max_frame_num_minus4 + 4)), 0); - else if(mopn == 2) - xue(long_term_pic_num, - current->rplm_l1[i].long_term_pic_num, - 0, sps->max_num_ref_frames - 1, 0); - } + + if (current->slice_type % 5 == 1) { + flag(ref_pic_list_modification_flag_l1); + if (current->ref_pic_list_modification_flag_l1) { + for (i = 0; i < H264_MAX_RPLM_COUNT; i++) { + xue(modification_of_pic_nums_idc, + current->rplm_l1[i].modification_of_pic_nums_idc, 0, 3, 0); + + mopn = current->rplm_l1[i].modification_of_pic_nums_idc; + if (mopn == 3) + break; + + if (mopn == 0 || mopn == 1) + xue(abs_diff_pic_num_minus1, + current->rplm_l1[i].abs_diff_pic_num_minus1, + 0, (1 + current->field_pic_flag) * + (1 << (sps->log2_max_frame_num_minus4 + 4)), 0); + else if (mopn == 2) + xue(long_term_pic_num, + current->rplm_l1[i].long_term_pic_num, + 0, sps->max_num_ref_frames - 1, 0); + } + } } - } - return 0; + return 0; } static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSliceHeader *current) { - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps = h264->active_sps; - int chroma; - int err, i, j; - - ue(luma_log2_weight_denom, 0, 7); - - chroma = !sps->separate_colour_plane_flag && sps->chroma_format_idc != 0; - if(chroma) - ue(chroma_log2_weight_denom, 0, 7); - - for(i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { - flags(luma_weight_l0_flag[i], 1, i); - if(current->luma_weight_l0_flag[i]) { - ses(luma_weight_l0[i], -128, +127, 1, i); - ses(luma_offset_l0[i], -128, +127, 1, i); - } - if(chroma) { - flags(chroma_weight_l0_flag[i], 1, i); - if(current->chroma_weight_l0_flag[i]) { - for(j = 0; j < 2; j++) { - ses(chroma_weight_l0[i][j], -128, +127, 2, i, j); - ses(chroma_offset_l0[i][j], -128, +127, 2, i, j); + H264RawSliceHeader *current) +{ + CodedBitstreamH264Context *h264 = ctx->priv_data; + const H264RawSPS *sps = h264->active_sps; + int chroma; + int err, i, j; + + ue(luma_log2_weight_denom, 0, 7); + + chroma = !sps->separate_colour_plane_flag && sps->chroma_format_idc != 0; + if (chroma) + ue(chroma_log2_weight_denom, 0, 7); + + for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { + flags(luma_weight_l0_flag[i], 1, i); + if (current->luma_weight_l0_flag[i]) { + ses(luma_weight_l0[i], -128, +127, 1, i); + ses(luma_offset_l0[i], -128, +127, 1, i); + } + if (chroma) { + flags(chroma_weight_l0_flag[i], 1, i); + if (current->chroma_weight_l0_flag[i]) { + for (j = 0; j < 2; j++) { + ses(chroma_weight_l0[i][j], -128, +127, 2, i, j); + ses(chroma_offset_l0[i][j], -128, +127, 2, i, j); + } + } } - } } - } - - if(current->slice_type % 5 == 1) { - for(i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { - flags(luma_weight_l1_flag[i], 1, i); - if(current->luma_weight_l1_flag[i]) { - ses(luma_weight_l1[i], -128, +127, 1, i); - ses(luma_offset_l1[i], -128, +127, 1, i); - } - if(chroma) { - flags(chroma_weight_l1_flag[i], 1, i); - if(current->chroma_weight_l1_flag[i]) { - for(j = 0; j < 2; j++) { - ses(chroma_weight_l1[i][j], -128, +127, 2, i, j); - ses(chroma_offset_l1[i][j], -128, +127, 2, i, j); - } + + if (current->slice_type % 5 == 1) { + for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { + flags(luma_weight_l1_flag[i], 1, i); + if (current->luma_weight_l1_flag[i]) { + ses(luma_weight_l1[i], -128, +127, 1, i); + ses(luma_offset_l1[i], -128, +127, 1, i); + } + if (chroma) { + flags(chroma_weight_l1_flag[i], 1, i); + if (current->chroma_weight_l1_flag[i]) { + for (j = 0; j < 2; j++) { + ses(chroma_weight_l1[i][j], -128, +127, 2, i, j); + ses(chroma_offset_l1[i][j], -128, +127, 2, i, j); + } + } + } } - } } - } - return 0; + return 0; } static int FUNC(dec_ref_pic_marking)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSliceHeader *current, int idr_pic_flag) { - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps = h264->active_sps; - int err, i; - uint32_t mmco; - - if(idr_pic_flag) { - flag(no_output_of_prior_pics_flag); - flag(long_term_reference_flag); - } - else { - flag(adaptive_ref_pic_marking_mode_flag); - if(current->adaptive_ref_pic_marking_mode_flag) { - for(i = 0; i < H264_MAX_MMCO_COUNT; i++) { - xue(memory_management_control_operation, - current->mmco[i].memory_management_control_operation, - 0, 6, 0); - - mmco = current->mmco[i].memory_management_control_operation; - if(mmco == 0) - break; - - if(mmco == 1 || mmco == 3) - xue(difference_of_pic_nums_minus1, - current->mmco[i].difference_of_pic_nums_minus1, - 0, INT32_MAX, 0); - if(mmco == 2) - xue(long_term_pic_num, - current->mmco[i].long_term_pic_num, - 0, sps->max_num_ref_frames - 1, 0); - if(mmco == 3 || mmco == 6) - xue(long_term_frame_idx, - current->mmco[i].long_term_frame_idx, - 0, sps->max_num_ref_frames - 1, 0); - if(mmco == 4) - xue(max_long_term_frame_idx_plus1, - current->mmco[i].max_long_term_frame_idx_plus1, - 0, sps->max_num_ref_frames, 0); - } - if(i == H264_MAX_MMCO_COUNT) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many " - "memory management control operations.\n"); - return AVERROR_INVALIDDATA; - } + H264RawSliceHeader *current, int idr_pic_flag) +{ + CodedBitstreamH264Context *h264 = ctx->priv_data; + const H264RawSPS *sps = h264->active_sps; + int err, i; + uint32_t mmco; + + if (idr_pic_flag) { + flag(no_output_of_prior_pics_flag); + flag(long_term_reference_flag); + } else { + flag(adaptive_ref_pic_marking_mode_flag); + if (current->adaptive_ref_pic_marking_mode_flag) { + for (i = 0; i < H264_MAX_MMCO_COUNT; i++) { + xue(memory_management_control_operation, + current->mmco[i].memory_management_control_operation, + 0, 6, 0); + + mmco = current->mmco[i].memory_management_control_operation; + if (mmco == 0) + break; + + if (mmco == 1 || mmco == 3) + xue(difference_of_pic_nums_minus1, + current->mmco[i].difference_of_pic_nums_minus1, + 0, INT32_MAX, 0); + if (mmco == 2) + xue(long_term_pic_num, + current->mmco[i].long_term_pic_num, + 0, sps->max_num_ref_frames - 1, 0); + if (mmco == 3 || mmco == 6) + xue(long_term_frame_idx, + current->mmco[i].long_term_frame_idx, + 0, sps->max_num_ref_frames - 1, 0); + if (mmco == 4) + xue(max_long_term_frame_idx_plus1, + current->mmco[i].max_long_term_frame_idx_plus1, + 0, sps->max_num_ref_frames, 0); + } + if (i == H264_MAX_MMCO_COUNT) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many " + "memory management control operations.\n"); + return AVERROR_INVALIDDATA; + } + } } - } - return 0; + return 0; } static int FUNC(slice_header)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSliceHeader *current) { - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps; - const H264RawPPS *pps; - int err; - int idr_pic_flag; - int slice_type_i, slice_type_p, slice_type_b; - int slice_type_si, slice_type_sp; - - HEADER("Slice Header"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_SLICE | - 1 << H264_NAL_IDR_SLICE | - 1 << H264_NAL_AUXILIARY_SLICE)); - - if(current->nal_unit_header.nal_unit_type == H264_NAL_AUXILIARY_SLICE) { - if(!h264->last_slice_nal_unit_type) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Auxiliary slice " - "is not decodable without the main picture " - "in the same access unit.\n"); - return AVERROR_INVALIDDATA; + H264RawSliceHeader *current) +{ + CodedBitstreamH264Context *h264 = ctx->priv_data; + const H264RawSPS *sps; + const H264RawPPS *pps; + int err; + int idr_pic_flag; + int slice_type_i, slice_type_p, slice_type_b; + int slice_type_si, slice_type_sp; + + HEADER("Slice Header"); + + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, + 1 << H264_NAL_SLICE | + 1 << H264_NAL_IDR_SLICE | + 1 << H264_NAL_AUXILIARY_SLICE)); + + if (current->nal_unit_header.nal_unit_type == H264_NAL_AUXILIARY_SLICE) { + if (!h264->last_slice_nal_unit_type) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Auxiliary slice " + "is not decodable without the main picture " + "in the same access unit.\n"); + return AVERROR_INVALIDDATA; + } + idr_pic_flag = h264->last_slice_nal_unit_type == H264_NAL_IDR_SLICE; + } else { + idr_pic_flag = current->nal_unit_header.nal_unit_type == H264_NAL_IDR_SLICE; + } + + ue(first_mb_in_slice, 0, H264_MAX_MB_PIC_SIZE - 1); + ue(slice_type, 0, 9); + + slice_type_i = current->slice_type % 5 == 2; + slice_type_p = current->slice_type % 5 == 0; + slice_type_b = current->slice_type % 5 == 1; + slice_type_si = current->slice_type % 5 == 4; + slice_type_sp = current->slice_type % 5 == 3; + + if (idr_pic_flag && !(slice_type_i || slice_type_si)) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid slice type %d " + "for IDR picture.\n", current->slice_type); + return AVERROR_INVALIDDATA; } - idr_pic_flag = h264->last_slice_nal_unit_type == H264_NAL_IDR_SLICE; - } - else { - idr_pic_flag = current->nal_unit_header.nal_unit_type == H264_NAL_IDR_SLICE; - } - - ue(first_mb_in_slice, 0, H264_MAX_MB_PIC_SIZE - 1); - ue(slice_type, 0, 9); - - slice_type_i = current->slice_type % 5 == 2; - slice_type_p = current->slice_type % 5 == 0; - slice_type_b = current->slice_type % 5 == 1; - slice_type_si = current->slice_type % 5 == 4; - slice_type_sp = current->slice_type % 5 == 3; - - if(idr_pic_flag && !(slice_type_i || slice_type_si)) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid slice type %d " - "for IDR picture.\n", - current->slice_type); - return AVERROR_INVALIDDATA; - } - - ue(pic_parameter_set_id, 0, 255); - - pps = h264->pps[current->pic_parameter_set_id]; - if(!pps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n", - current->pic_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h264->active_pps = pps; - - sps = h264->sps[pps->seq_parameter_set_id]; - if(!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - pps->seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h264->active_sps = sps; - - if(sps->separate_colour_plane_flag) - u(2, colour_plane_id, 0, 2); - - ub(sps->log2_max_frame_num_minus4 + 4, frame_num); - - if(!sps->frame_mbs_only_flag) { - flag(field_pic_flag); - if(current->field_pic_flag) - flag(bottom_field_flag); + + ue(pic_parameter_set_id, 0, 255); + + pps = h264->pps[current->pic_parameter_set_id]; + if (!pps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n", + current->pic_parameter_set_id); + return AVERROR_INVALIDDATA; + } + h264->active_pps = pps; + + sps = h264->sps[pps->seq_parameter_set_id]; + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", + pps->seq_parameter_set_id); + return AVERROR_INVALIDDATA; + } + h264->active_sps = sps; + + if (sps->separate_colour_plane_flag) + u(2, colour_plane_id, 0, 2); + + ub(sps->log2_max_frame_num_minus4 + 4, frame_num); + + if (!sps->frame_mbs_only_flag) { + flag(field_pic_flag); + if (current->field_pic_flag) + flag(bottom_field_flag); + else + infer(bottom_field_flag, 0); + } else { + infer(field_pic_flag, 0); + infer(bottom_field_flag, 0); + } + + if (idr_pic_flag) + ue(idr_pic_id, 0, 65535); + + if (sps->pic_order_cnt_type == 0) { + ub(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, pic_order_cnt_lsb); + if (pps->bottom_field_pic_order_in_frame_present_flag && + !current->field_pic_flag) + se(delta_pic_order_cnt_bottom, INT32_MIN + 1, INT32_MAX); + + } else if (sps->pic_order_cnt_type == 1) { + if (!sps->delta_pic_order_always_zero_flag) { + se(delta_pic_order_cnt[0], INT32_MIN + 1, INT32_MAX); + if (pps->bottom_field_pic_order_in_frame_present_flag && + !current->field_pic_flag) + se(delta_pic_order_cnt[1], INT32_MIN + 1, INT32_MAX); + else + infer(delta_pic_order_cnt[1], 0); + } else { + infer(delta_pic_order_cnt[0], 0); + infer(delta_pic_order_cnt[1], 0); + } + } + + if (pps->redundant_pic_cnt_present_flag) + ue(redundant_pic_cnt, 0, 127); else - infer(bottom_field_flag, 0); - } - else { - infer(field_pic_flag, 0); - infer(bottom_field_flag, 0); - } - - if(idr_pic_flag) - ue(idr_pic_id, 0, 65535); - - if(sps->pic_order_cnt_type == 0) { - ub(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, pic_order_cnt_lsb); - if(pps->bottom_field_pic_order_in_frame_present_flag && - !current->field_pic_flag) - se(delta_pic_order_cnt_bottom, INT32_MIN + 1, INT32_MAX); - } - else if(sps->pic_order_cnt_type == 1) { - if(!sps->delta_pic_order_always_zero_flag) { - se(delta_pic_order_cnt[0], INT32_MIN + 1, INT32_MAX); - if(pps->bottom_field_pic_order_in_frame_present_flag && - !current->field_pic_flag) - se(delta_pic_order_cnt[1], INT32_MIN + 1, INT32_MAX); - else - infer(delta_pic_order_cnt[1], 0); + infer(redundant_pic_cnt, 0); + + if (current->nal_unit_header.nal_unit_type != H264_NAL_AUXILIARY_SLICE + && !current->redundant_pic_cnt) + h264->last_slice_nal_unit_type = + current->nal_unit_header.nal_unit_type; + + if (slice_type_b) + flag(direct_spatial_mv_pred_flag); + + if (slice_type_p || slice_type_sp || slice_type_b) { + flag(num_ref_idx_active_override_flag); + if (current->num_ref_idx_active_override_flag) { + ue(num_ref_idx_l0_active_minus1, 0, 31); + if (slice_type_b) + ue(num_ref_idx_l1_active_minus1, 0, 31); + } else { + infer(num_ref_idx_l0_active_minus1, + pps->num_ref_idx_l0_default_active_minus1); + infer(num_ref_idx_l1_active_minus1, + pps->num_ref_idx_l1_default_active_minus1); + } } - else { - infer(delta_pic_order_cnt[0], 0); - infer(delta_pic_order_cnt[1], 0); + + if (current->nal_unit_header.nal_unit_type == 20 || + current->nal_unit_header.nal_unit_type == 21) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC / 3DAVC not supported.\n"); + return AVERROR_PATCHWELCOME; + } else { + CHECK(FUNC(ref_pic_list_modification)(ctx, rw, current)); } - } - - if(pps->redundant_pic_cnt_present_flag) - ue(redundant_pic_cnt, 0, 127); - else - infer(redundant_pic_cnt, 0); - - if(current->nal_unit_header.nal_unit_type != H264_NAL_AUXILIARY_SLICE && !current->redundant_pic_cnt) - h264->last_slice_nal_unit_type = - current->nal_unit_header.nal_unit_type; - - if(slice_type_b) - flag(direct_spatial_mv_pred_flag); - - if(slice_type_p || slice_type_sp || slice_type_b) { - flag(num_ref_idx_active_override_flag); - if(current->num_ref_idx_active_override_flag) { - ue(num_ref_idx_l0_active_minus1, 0, 31); - if(slice_type_b) - ue(num_ref_idx_l1_active_minus1, 0, 31); + + if ((pps->weighted_pred_flag && (slice_type_p || slice_type_sp)) || + (pps->weighted_bipred_idc == 1 && slice_type_b)) { + CHECK(FUNC(pred_weight_table)(ctx, rw, current)); } - else { - infer(num_ref_idx_l0_active_minus1, - pps->num_ref_idx_l0_default_active_minus1); - infer(num_ref_idx_l1_active_minus1, - pps->num_ref_idx_l1_default_active_minus1); + + if (current->nal_unit_header.nal_ref_idc != 0) { + CHECK(FUNC(dec_ref_pic_marking)(ctx, rw, current, idr_pic_flag)); } - } - - if(current->nal_unit_header.nal_unit_type == 20 || - current->nal_unit_header.nal_unit_type == 21) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC / 3DAVC not supported.\n"); - return AVERROR_PATCHWELCOME; - } - else { - CHECK(FUNC(ref_pic_list_modification)(ctx, rw, current)); - } - - if((pps->weighted_pred_flag && (slice_type_p || slice_type_sp)) || - (pps->weighted_bipred_idc == 1 && slice_type_b)) { - CHECK(FUNC(pred_weight_table)(ctx, rw, current)); - } - - if(current->nal_unit_header.nal_ref_idc != 0) { - CHECK(FUNC(dec_ref_pic_marking)(ctx, rw, current, idr_pic_flag)); - } - - if(pps->entropy_coding_mode_flag && - !slice_type_i && !slice_type_si) { - ue(cabac_init_idc, 0, 2); - } - - se(slice_qp_delta, -51 - 6 * sps->bit_depth_luma_minus8, - +51 + 6 * sps->bit_depth_luma_minus8); - if(slice_type_sp || slice_type_si) { - if(slice_type_sp) - flag(sp_for_switch_flag); - se(slice_qs_delta, -51, +51); - } - - if(pps->deblocking_filter_control_present_flag) { - ue(disable_deblocking_filter_idc, 0, 2); - if(current->disable_deblocking_filter_idc != 1) { - se(slice_alpha_c0_offset_div2, -6, +6); - se(slice_beta_offset_div2, -6, +6); + + if (pps->entropy_coding_mode_flag && + !slice_type_i && !slice_type_si) { + ue(cabac_init_idc, 0, 2); } - else { - infer(slice_alpha_c0_offset_div2, 0); - infer(slice_beta_offset_div2, 0); + + se(slice_qp_delta, - 51 - 6 * sps->bit_depth_luma_minus8, + + 51 + 6 * sps->bit_depth_luma_minus8); + if (slice_type_sp || slice_type_si) { + if (slice_type_sp) + flag(sp_for_switch_flag); + se(slice_qs_delta, -51, +51); } - } - else { - infer(disable_deblocking_filter_idc, 0); - infer(slice_alpha_c0_offset_div2, 0); - infer(slice_beta_offset_div2, 0); - } - - if(pps->num_slice_groups_minus1 > 0 && - pps->slice_group_map_type >= 3 && - pps->slice_group_map_type <= 5) { - unsigned int pic_size, max, bits; - - pic_size = (sps->pic_width_in_mbs_minus1 + 1) * - (sps->pic_height_in_map_units_minus1 + 1); - max = (pic_size + pps->slice_group_change_rate_minus1) / - (pps->slice_group_change_rate_minus1 + 1); - bits = av_ceil_log2(max + 1); - - u(bits, slice_group_change_cycle, 0, max); - } - - if(pps->entropy_coding_mode_flag) { - while(byte_alignment(rw)) - fixed(1, cabac_alignment_one_bit, 1); - } - - return 0; + + if (pps->deblocking_filter_control_present_flag) { + ue(disable_deblocking_filter_idc, 0, 2); + if (current->disable_deblocking_filter_idc != 1) { + se(slice_alpha_c0_offset_div2, -6, +6); + se(slice_beta_offset_div2, -6, +6); + } else { + infer(slice_alpha_c0_offset_div2, 0); + infer(slice_beta_offset_div2, 0); + } + } else { + infer(disable_deblocking_filter_idc, 0); + infer(slice_alpha_c0_offset_div2, 0); + infer(slice_beta_offset_div2, 0); + } + + if (pps->num_slice_groups_minus1 > 0 && + pps->slice_group_map_type >= 3 && + pps->slice_group_map_type <= 5) { + unsigned int pic_size, max, bits; + + pic_size = (sps->pic_width_in_mbs_minus1 + 1) * + (sps->pic_height_in_map_units_minus1 + 1); + max = (pic_size + pps->slice_group_change_rate_minus1) / + (pps->slice_group_change_rate_minus1 + 1); + bits = av_ceil_log2(max + 1); + + u(bits, slice_group_change_cycle, 0, max); + } + + if (pps->entropy_coding_mode_flag) { + while (byte_alignment(rw)) + fixed(1, cabac_alignment_one_bit, 1); + } + + return 0; } static int FUNC(filler)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawFiller *current) { - int err; + H264RawFiller *current) +{ + int err; - HEADER("Filler Data"); + HEADER("Filler Data"); - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_FILLER_DATA)); + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, + 1 << H264_NAL_FILLER_DATA)); #ifdef READ - while(show_bits(rw, 8) == 0xff) { - fixed(8, ff_byte, 0xff); - ++current->filler_size; - } + while (show_bits(rw, 8) == 0xff) { + fixed(8, ff_byte, 0xff); + ++current->filler_size; + } #else - { - uint32_t i; - for(i = 0; i < current->filler_size; i++) - fixed(8, ff_byte, 0xff); - } + { + uint32_t i; + for (i = 0; i < current->filler_size; i++) + fixed(8, ff_byte, 0xff); + } #endif - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - return 0; + return 0; } static int FUNC(end_of_sequence)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawNALUnitHeader *current) { - HEADER("End of Sequence"); + H264RawNALUnitHeader *current) +{ + HEADER("End of Sequence"); - return FUNC(nal_unit_header)(ctx, rw, current, - 1 << H264_NAL_END_SEQUENCE); + return FUNC(nal_unit_header)(ctx, rw, current, + 1 << H264_NAL_END_SEQUENCE); } static int FUNC(end_of_stream)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawNALUnitHeader *current) { - HEADER("End of Stream"); + H264RawNALUnitHeader *current) +{ + HEADER("End of Stream"); - return FUNC(nal_unit_header)(ctx, rw, current, - 1 << H264_NAL_END_STREAM); + return FUNC(nal_unit_header)(ctx, rw, current, + 1 << H264_NAL_END_STREAM); } diff --git a/third-party/cbs/cbs_h265_syntax_template.c b/third-party/cbs/cbs_h265_syntax_template.c index 7ceefc6f314..2d4b9547185 100644 --- a/third-party/cbs/cbs_h265_syntax_template.c +++ b/third-party/cbs/cbs_h265_syntax_template.c @@ -16,2023 +16,2086 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw) { - int err; +static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw) +{ + int err; - fixed(1, rbsp_stop_one_bit, 1); - while(byte_alignment(rw) != 0) - fixed(1, rbsp_alignment_zero_bit, 0); + fixed(1, rbsp_stop_one_bit, 1); + while (byte_alignment(rw) != 0) + fixed(1, rbsp_alignment_zero_bit, 0); - return 0; + return 0; } static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawNALUnitHeader *current, - int expected_nal_unit_type) { - int err; + H265RawNALUnitHeader *current, + int expected_nal_unit_type) +{ + int err; - fixed(1, forbidden_zero_bit, 0); + fixed(1, forbidden_zero_bit, 0); - if(expected_nal_unit_type >= 0) - u(6, nal_unit_type, expected_nal_unit_type, - expected_nal_unit_type); - else - ub(6, nal_unit_type); + if (expected_nal_unit_type >= 0) + u(6, nal_unit_type, expected_nal_unit_type, + expected_nal_unit_type); + else + ub(6, nal_unit_type); - u(6, nuh_layer_id, 0, 62); - u(3, nuh_temporal_id_plus1, 1, 7); + u(6, nuh_layer_id, 0, 62); + u(3, nuh_temporal_id_plus1, 1, 7); - return 0; + return 0; } -static int FUNC(byte_alignment)(CodedBitstreamContext *ctx, RWContext *rw) { - int err; +static int FUNC(byte_alignment)(CodedBitstreamContext *ctx, RWContext *rw) +{ + int err; - fixed(1, alignment_bit_equal_to_one, 1); - while(byte_alignment(rw) != 0) - fixed(1, alignment_bit_equal_to_zero, 0); + fixed(1, alignment_bit_equal_to_one, 1); + while (byte_alignment(rw) != 0) + fixed(1, alignment_bit_equal_to_zero, 0); - return 0; + return 0; } static int FUNC(extension_data)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawExtensionData *current) { - int err; - size_t k; + H265RawExtensionData *current) +{ + int err; + size_t k; #ifdef READ - GetBitContext start; - uint8_t bit; - start = *rw; - for(k = 0; cbs_h2645_read_more_rbsp_data(rw); k++) - skip_bits(rw, 1); - current->bit_length = k; - if(k > 0) { - *rw = start; - allocate(current->data, (current->bit_length + 7) / 8); - for(k = 0; k < current->bit_length; k++) { - xu(1, extension_data, bit, 0, 1, 0); - current->data[k / 8] |= bit << (7 - k % 8); - } - } + GetBitContext start; + uint8_t bit; + start = *rw; + for (k = 0; cbs_h2645_read_more_rbsp_data(rw); k++) + skip_bits(rw, 1); + current->bit_length = k; + if (k > 0) { + *rw = start; + allocate(current->data, (current->bit_length + 7) / 8); + for (k = 0; k < current->bit_length; k++) { + xu(1, extension_data, bit, 0, 1, 0); + current->data[k / 8] |= bit << (7 - k % 8); + } + } #else - for(k = 0; k < current->bit_length; k++) - xu(1, extension_data, current->data[k / 8] >> (7 - k % 8) & 1, 0, 1, 0); + for (k = 0; k < current->bit_length; k++) + xu(1, extension_data, current->data[k / 8] >> (7 - k % 8) & 1, 0, 1, 0); #endif - return 0; + return 0; } static int FUNC(profile_tier_level)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawProfileTierLevel *current, - int profile_present_flag, - int max_num_sub_layers_minus1) { - int err, i, j; + H265RawProfileTierLevel *current, + int profile_present_flag, + int max_num_sub_layers_minus1) +{ + int err, i, j; - if(profile_present_flag) { - u(2, general_profile_space, 0, 0); - flag(general_tier_flag); - ub(5, general_profile_idc); + if (profile_present_flag) { + u(2, general_profile_space, 0, 0); + flag(general_tier_flag); + ub(5, general_profile_idc); - for(j = 0; j < 32; j++) - flags(general_profile_compatibility_flag[j], 1, j); + for (j = 0; j < 32; j++) + flags(general_profile_compatibility_flag[j], 1, j); - flag(general_progressive_source_flag); - flag(general_interlaced_source_flag); - flag(general_non_packed_constraint_flag); - flag(general_frame_only_constraint_flag); + flag(general_progressive_source_flag); + flag(general_interlaced_source_flag); + flag(general_non_packed_constraint_flag); + flag(general_frame_only_constraint_flag); #define profile_compatible(x) (current->general_profile_idc == (x) || \ current->general_profile_compatibility_flag[x]) - if(profile_compatible(4) || profile_compatible(5) || - profile_compatible(6) || profile_compatible(7) || - profile_compatible(8) || profile_compatible(9) || - profile_compatible(10)) { - flag(general_max_12bit_constraint_flag); - flag(general_max_10bit_constraint_flag); - flag(general_max_8bit_constraint_flag); - flag(general_max_422chroma_constraint_flag); - flag(general_max_420chroma_constraint_flag); - flag(general_max_monochrome_constraint_flag); - flag(general_intra_constraint_flag); - flag(general_one_picture_only_constraint_flag); - flag(general_lower_bit_rate_constraint_flag); - - if(profile_compatible(5) || profile_compatible(9) || - profile_compatible(10)) { - flag(general_max_14bit_constraint_flag); - fixed(24, general_reserved_zero_33bits, 0); - fixed(9, general_reserved_zero_33bits, 0); - } - else { - fixed(24, general_reserved_zero_34bits, 0); - fixed(10, general_reserved_zero_34bits, 0); - } - } - else if(profile_compatible(2)) { - fixed(7, general_reserved_zero_7bits, 0); - flag(general_one_picture_only_constraint_flag); - fixed(24, general_reserved_zero_35bits, 0); - fixed(11, general_reserved_zero_35bits, 0); - } - else { - fixed(24, general_reserved_zero_43bits, 0); - fixed(19, general_reserved_zero_43bits, 0); - } - - if(profile_compatible(1) || profile_compatible(2) || - profile_compatible(3) || profile_compatible(4) || - profile_compatible(5) || profile_compatible(9)) { - flag(general_inbld_flag); - } - else { - fixed(1, general_reserved_zero_bit, 0); - } + if (profile_compatible(4) || profile_compatible(5) || + profile_compatible(6) || profile_compatible(7) || + profile_compatible(8) || profile_compatible(9) || + profile_compatible(10) || profile_compatible(11)) { + flag(general_max_12bit_constraint_flag); + flag(general_max_10bit_constraint_flag); + flag(general_max_8bit_constraint_flag); + flag(general_max_422chroma_constraint_flag); + flag(general_max_420chroma_constraint_flag); + flag(general_max_monochrome_constraint_flag); + flag(general_intra_constraint_flag); + flag(general_one_picture_only_constraint_flag); + flag(general_lower_bit_rate_constraint_flag); + + if (profile_compatible(5) || profile_compatible(9) || + profile_compatible(10) || profile_compatible(11)) { + flag(general_max_14bit_constraint_flag); + fixed(24, general_reserved_zero_33bits, 0); + fixed( 9, general_reserved_zero_33bits, 0); + } else { + fixed(24, general_reserved_zero_34bits, 0); + fixed(10, general_reserved_zero_34bits, 0); + } + } else if (profile_compatible(2)) { + fixed(7, general_reserved_zero_7bits, 0); + flag(general_one_picture_only_constraint_flag); + fixed(24, general_reserved_zero_35bits, 0); + fixed(11, general_reserved_zero_35bits, 0); + } else { + fixed(24, general_reserved_zero_43bits, 0); + fixed(19, general_reserved_zero_43bits, 0); + } + + if (profile_compatible(1) || profile_compatible(2) || + profile_compatible(3) || profile_compatible(4) || + profile_compatible(5) || profile_compatible(9) || + profile_compatible(11)) { + flag(general_inbld_flag); + } else { + fixed(1, general_reserved_zero_bit, 0); + } #undef profile_compatible - } + } - ub(8, general_level_idc); + ub(8, general_level_idc); - for(i = 0; i < max_num_sub_layers_minus1; i++) { - flags(sub_layer_profile_present_flag[i], 1, i); - flags(sub_layer_level_present_flag[i], 1, i); - } + for (i = 0; i < max_num_sub_layers_minus1; i++) { + flags(sub_layer_profile_present_flag[i], 1, i); + flags(sub_layer_level_present_flag[i], 1, i); + } - if(max_num_sub_layers_minus1 > 0) { - for(i = max_num_sub_layers_minus1; i < 8; i++) - fixed(2, reserved_zero_2bits, 0); - } + if (max_num_sub_layers_minus1 > 0) { + for (i = max_num_sub_layers_minus1; i < 8; i++) + fixed(2, reserved_zero_2bits, 0); + } - for(i = 0; i < max_num_sub_layers_minus1; i++) { - if(current->sub_layer_profile_present_flag[i]) { - us(2, sub_layer_profile_space[i], 0, 0, 1, i); - flags(sub_layer_tier_flag[i], 1, i); - ubs(5, sub_layer_profile_idc[i], 1, i); + for (i = 0; i < max_num_sub_layers_minus1; i++) { + if (current->sub_layer_profile_present_flag[i]) { + us(2, sub_layer_profile_space[i], 0, 0, 1, i); + flags(sub_layer_tier_flag[i], 1, i); + ubs(5, sub_layer_profile_idc[i], 1, i); - for(j = 0; j < 32; j++) - flags(sub_layer_profile_compatibility_flag[i][j], 2, i, j); + for (j = 0; j < 32; j++) + flags(sub_layer_profile_compatibility_flag[i][j], 2, i, j); - flags(sub_layer_progressive_source_flag[i], 1, i); - flags(sub_layer_interlaced_source_flag[i], 1, i); - flags(sub_layer_non_packed_constraint_flag[i], 1, i); - flags(sub_layer_frame_only_constraint_flag[i], 1, i); + flags(sub_layer_progressive_source_flag[i], 1, i); + flags(sub_layer_interlaced_source_flag[i], 1, i); + flags(sub_layer_non_packed_constraint_flag[i], 1, i); + flags(sub_layer_frame_only_constraint_flag[i], 1, i); -#define profile_compatible(x) (current->sub_layer_profile_idc[i] == (x) || \ +#define profile_compatible(x) (current->sub_layer_profile_idc[i] == (x) || \ current->sub_layer_profile_compatibility_flag[i][x]) - if(profile_compatible(4) || profile_compatible(5) || - profile_compatible(6) || profile_compatible(7) || - profile_compatible(8) || profile_compatible(9) || - profile_compatible(10)) { - flags(sub_layer_max_12bit_constraint_flag[i], 1, i); - flags(sub_layer_max_10bit_constraint_flag[i], 1, i); - flags(sub_layer_max_8bit_constraint_flag[i], 1, i); - flags(sub_layer_max_422chroma_constraint_flag[i], 1, i); - flags(sub_layer_max_420chroma_constraint_flag[i], 1, i); - flags(sub_layer_max_monochrome_constraint_flag[i], 1, i); - flags(sub_layer_intra_constraint_flag[i], 1, i); - flags(sub_layer_one_picture_only_constraint_flag[i], 1, i); - flags(sub_layer_lower_bit_rate_constraint_flag[i], 1, i); - - if(profile_compatible(5)) { - flags(sub_layer_max_14bit_constraint_flag[i], 1, i); - fixed(24, sub_layer_reserved_zero_33bits, 0); - fixed(9, sub_layer_reserved_zero_33bits, 0); - } - else { - fixed(24, sub_layer_reserved_zero_34bits, 0); - fixed(10, sub_layer_reserved_zero_34bits, 0); - } - } - else if(profile_compatible(2)) { - fixed(7, sub_layer_reserved_zero_7bits, 0); - flags(sub_layer_one_picture_only_constraint_flag[i], 1, i); - fixed(24, sub_layer_reserved_zero_43bits, 0); - fixed(11, sub_layer_reserved_zero_43bits, 0); - } - else { - fixed(24, sub_layer_reserved_zero_43bits, 0); - fixed(19, sub_layer_reserved_zero_43bits, 0); - } - - if(profile_compatible(1) || profile_compatible(2) || - profile_compatible(3) || profile_compatible(4) || - profile_compatible(5) || profile_compatible(9)) { - flags(sub_layer_inbld_flag[i], 1, i); - } - else { - fixed(1, sub_layer_reserved_zero_bit, 0); - } + if (profile_compatible(4) || profile_compatible(5) || + profile_compatible(6) || profile_compatible(7) || + profile_compatible(8) || profile_compatible(9) || + profile_compatible(10) || profile_compatible(11)) { + flags(sub_layer_max_12bit_constraint_flag[i], 1, i); + flags(sub_layer_max_10bit_constraint_flag[i], 1, i); + flags(sub_layer_max_8bit_constraint_flag[i], 1, i); + flags(sub_layer_max_422chroma_constraint_flag[i], 1, i); + flags(sub_layer_max_420chroma_constraint_flag[i], 1, i); + flags(sub_layer_max_monochrome_constraint_flag[i], 1, i); + flags(sub_layer_intra_constraint_flag[i], 1, i); + flags(sub_layer_one_picture_only_constraint_flag[i], 1, i); + flags(sub_layer_lower_bit_rate_constraint_flag[i], 1, i); + + if (profile_compatible(5) || profile_compatible(9) || + profile_compatible(10) || profile_compatible(11)) { + flags(sub_layer_max_14bit_constraint_flag[i], 1, i); + fixed(24, sub_layer_reserved_zero_33bits, 0); + fixed( 9, sub_layer_reserved_zero_33bits, 0); + } else { + fixed(24, sub_layer_reserved_zero_34bits, 0); + fixed(10, sub_layer_reserved_zero_34bits, 0); + } + } else if (profile_compatible(2)) { + fixed(7, sub_layer_reserved_zero_7bits, 0); + flags(sub_layer_one_picture_only_constraint_flag[i], 1, i); + fixed(24, sub_layer_reserved_zero_43bits, 0); + fixed(11, sub_layer_reserved_zero_43bits, 0); + } else { + fixed(24, sub_layer_reserved_zero_43bits, 0); + fixed(19, sub_layer_reserved_zero_43bits, 0); + } + + if (profile_compatible(1) || profile_compatible(2) || + profile_compatible(3) || profile_compatible(4) || + profile_compatible(5) || profile_compatible(9) || + profile_compatible(11)) { + flags(sub_layer_inbld_flag[i], 1, i); + } else { + fixed(1, sub_layer_reserved_zero_bit, 0); + } #undef profile_compatible + } + if (current->sub_layer_level_present_flag[i]) + ubs(8, sub_layer_level_idc[i], 1, i); } - if(current->sub_layer_level_present_flag[i]) - ubs(8, sub_layer_level_idc[i], 1, i); - } - return 0; + return 0; } static int FUNC(sub_layer_hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawHRDParameters *hrd, - int nal, int sub_layer_id) { - H265RawSubLayerHRDParameters *current; - int err, i; - - if(nal) - current = &hrd->nal_sub_layer_hrd_parameters[sub_layer_id]; - else - current = &hrd->vcl_sub_layer_hrd_parameters[sub_layer_id]; - - for(i = 0; i <= hrd->cpb_cnt_minus1[sub_layer_id]; i++) { - ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - if(hrd->sub_pic_hrd_params_present_flag) { - ues(cpb_size_du_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - ues(bit_rate_du_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - } - flags(cbr_flag[i], 1, i); - } - - return 0; + H265RawHRDParameters *hrd, + int nal, int sub_layer_id) +{ + H265RawSubLayerHRDParameters *current; + int err, i; + + if (nal) + current = &hrd->nal_sub_layer_hrd_parameters[sub_layer_id]; + else + current = &hrd->vcl_sub_layer_hrd_parameters[sub_layer_id]; + + for (i = 0; i <= hrd->cpb_cnt_minus1[sub_layer_id]; i++) { + ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i); + ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i); + if (hrd->sub_pic_hrd_params_present_flag) { + ues(cpb_size_du_value_minus1[i], 0, UINT32_MAX - 1, 1, i); + ues(bit_rate_du_value_minus1[i], 0, UINT32_MAX - 1, 1, i); + } + flags(cbr_flag[i], 1, i); + } + + return 0; } static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawHRDParameters *current, int common_inf_present_flag, - int max_num_sub_layers_minus1) { - int err, i; - - if(common_inf_present_flag) { - flag(nal_hrd_parameters_present_flag); - flag(vcl_hrd_parameters_present_flag); - - if(current->nal_hrd_parameters_present_flag || - current->vcl_hrd_parameters_present_flag) { - flag(sub_pic_hrd_params_present_flag); - if(current->sub_pic_hrd_params_present_flag) { - ub(8, tick_divisor_minus2); - ub(5, du_cpb_removal_delay_increment_length_minus1); - flag(sub_pic_cpb_params_in_pic_timing_sei_flag); - ub(5, dpb_output_delay_du_length_minus1); - } - - ub(4, bit_rate_scale); - ub(4, cpb_size_scale); - if(current->sub_pic_hrd_params_present_flag) - ub(4, cpb_size_du_scale); - - ub(5, initial_cpb_removal_delay_length_minus1); - ub(5, au_cpb_removal_delay_length_minus1); - ub(5, dpb_output_delay_length_minus1); - } - else { - infer(sub_pic_hrd_params_present_flag, 0); - - infer(initial_cpb_removal_delay_length_minus1, 23); - infer(au_cpb_removal_delay_length_minus1, 23); - infer(dpb_output_delay_length_minus1, 23); - } - } - - for(i = 0; i <= max_num_sub_layers_minus1; i++) { - flags(fixed_pic_rate_general_flag[i], 1, i); - - if(!current->fixed_pic_rate_general_flag[i]) - flags(fixed_pic_rate_within_cvs_flag[i], 1, i); - else - infer(fixed_pic_rate_within_cvs_flag[i], 1); - - if(current->fixed_pic_rate_within_cvs_flag[i]) { - ues(elemental_duration_in_tc_minus1[i], 0, 2047, 1, i); - infer(low_delay_hrd_flag[i], 0); + H265RawHRDParameters *current, int common_inf_present_flag, + int max_num_sub_layers_minus1) +{ + int err, i; + + if (common_inf_present_flag) { + flag(nal_hrd_parameters_present_flag); + flag(vcl_hrd_parameters_present_flag); + + if (current->nal_hrd_parameters_present_flag || + current->vcl_hrd_parameters_present_flag) { + flag(sub_pic_hrd_params_present_flag); + if (current->sub_pic_hrd_params_present_flag) { + ub(8, tick_divisor_minus2); + ub(5, du_cpb_removal_delay_increment_length_minus1); + flag(sub_pic_cpb_params_in_pic_timing_sei_flag); + ub(5, dpb_output_delay_du_length_minus1); + } + + ub(4, bit_rate_scale); + ub(4, cpb_size_scale); + if (current->sub_pic_hrd_params_present_flag) + ub(4, cpb_size_du_scale); + + ub(5, initial_cpb_removal_delay_length_minus1); + ub(5, au_cpb_removal_delay_length_minus1); + ub(5, dpb_output_delay_length_minus1); + } else { + infer(sub_pic_hrd_params_present_flag, 0); + + infer(initial_cpb_removal_delay_length_minus1, 23); + infer(au_cpb_removal_delay_length_minus1, 23); + infer(dpb_output_delay_length_minus1, 23); + } } - else - flags(low_delay_hrd_flag[i], 1, i); - if(!current->low_delay_hrd_flag[i]) - ues(cpb_cnt_minus1[i], 0, 31, 1, i); - else - infer(cpb_cnt_minus1[i], 0); + for (i = 0; i <= max_num_sub_layers_minus1; i++) { + flags(fixed_pic_rate_general_flag[i], 1, i); + + if (!current->fixed_pic_rate_general_flag[i]) + flags(fixed_pic_rate_within_cvs_flag[i], 1, i); + else + infer(fixed_pic_rate_within_cvs_flag[i], 1); + + if (current->fixed_pic_rate_within_cvs_flag[i]) { + ues(elemental_duration_in_tc_minus1[i], 0, 2047, 1, i); + infer(low_delay_hrd_flag[i], 0); + } else + flags(low_delay_hrd_flag[i], 1, i); - if(current->nal_hrd_parameters_present_flag) - CHECK(FUNC(sub_layer_hrd_parameters)(ctx, rw, current, 0, i)); - if(current->vcl_hrd_parameters_present_flag) - CHECK(FUNC(sub_layer_hrd_parameters)(ctx, rw, current, 1, i)); - } + if (!current->low_delay_hrd_flag[i]) + ues(cpb_cnt_minus1[i], 0, 31, 1, i); + else + infer(cpb_cnt_minus1[i], 0); - return 0; + if (current->nal_hrd_parameters_present_flag) + CHECK(FUNC(sub_layer_hrd_parameters)(ctx, rw, current, 0, i)); + if (current->vcl_hrd_parameters_present_flag) + CHECK(FUNC(sub_layer_hrd_parameters)(ctx, rw, current, 1, i)); + } + + return 0; } static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawVUI *current, const H265RawSPS *sps) { - int err; - - flag(aspect_ratio_info_present_flag); - if(current->aspect_ratio_info_present_flag) { - ub(8, aspect_ratio_idc); - if(current->aspect_ratio_idc == 255) { - ub(16, sar_width); - ub(16, sar_height); - } - } - else { - infer(aspect_ratio_idc, 0); - } - - flag(overscan_info_present_flag); - if(current->overscan_info_present_flag) - flag(overscan_appropriate_flag); - - flag(video_signal_type_present_flag); - if(current->video_signal_type_present_flag) { - ub(3, video_format); - flag(video_full_range_flag); - flag(colour_description_present_flag); - if(current->colour_description_present_flag) { - ub(8, colour_primaries); - ub(8, transfer_characteristics); - ub(8, matrix_coefficients); - } - else { - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - } - } - else { - infer(video_format, 5); - infer(video_full_range_flag, 0); - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - } - - flag(chroma_loc_info_present_flag); - if(current->chroma_loc_info_present_flag) { - ue(chroma_sample_loc_type_top_field, 0, 5); - ue(chroma_sample_loc_type_bottom_field, 0, 5); - } - else { - infer(chroma_sample_loc_type_top_field, 0); - infer(chroma_sample_loc_type_bottom_field, 0); - } - - flag(neutral_chroma_indication_flag); - flag(field_seq_flag); - flag(frame_field_info_present_flag); - - flag(default_display_window_flag); - if(current->default_display_window_flag) { - ue(def_disp_win_left_offset, 0, 16384); - ue(def_disp_win_right_offset, 0, 16384); - ue(def_disp_win_top_offset, 0, 16384); - ue(def_disp_win_bottom_offset, 0, 16384); - } - - flag(vui_timing_info_present_flag); - if(current->vui_timing_info_present_flag) { - u(32, vui_num_units_in_tick, 1, UINT32_MAX); - u(32, vui_time_scale, 1, UINT32_MAX); - flag(vui_poc_proportional_to_timing_flag); - if(current->vui_poc_proportional_to_timing_flag) - ue(vui_num_ticks_poc_diff_one_minus1, 0, UINT32_MAX - 1); - - flag(vui_hrd_parameters_present_flag); - if(current->vui_hrd_parameters_present_flag) { - CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->hrd_parameters, - 1, sps->sps_max_sub_layers_minus1)); - } - } - - flag(bitstream_restriction_flag); - if(current->bitstream_restriction_flag) { - flag(tiles_fixed_structure_flag); - flag(motion_vectors_over_pic_boundaries_flag); - flag(restricted_ref_pic_lists_flag); - ue(min_spatial_segmentation_idc, 0, 4095); - ue(max_bytes_per_pic_denom, 0, 16); - ue(max_bits_per_min_cu_denom, 0, 16); - ue(log2_max_mv_length_horizontal, 0, 16); - ue(log2_max_mv_length_vertical, 0, 16); - } - else { - infer(tiles_fixed_structure_flag, 0); - infer(motion_vectors_over_pic_boundaries_flag, 1); - infer(min_spatial_segmentation_idc, 0); - infer(max_bytes_per_pic_denom, 2); - infer(max_bits_per_min_cu_denom, 1); - infer(log2_max_mv_length_horizontal, 15); - infer(log2_max_mv_length_vertical, 15); - } + H265RawVUI *current, const H265RawSPS *sps) +{ + int err; + + flag(aspect_ratio_info_present_flag); + if (current->aspect_ratio_info_present_flag) { + ub(8, aspect_ratio_idc); + if (current->aspect_ratio_idc == 255) { + ub(16, sar_width); + ub(16, sar_height); + } + } else { + infer(aspect_ratio_idc, 0); + } + + flag(overscan_info_present_flag); + if (current->overscan_info_present_flag) + flag(overscan_appropriate_flag); + + flag(video_signal_type_present_flag); + if (current->video_signal_type_present_flag) { + ub(3, video_format); + flag(video_full_range_flag); + flag(colour_description_present_flag); + if (current->colour_description_present_flag) { + ub(8, colour_primaries); + ub(8, transfer_characteristics); + ub(8, matrix_coefficients); + } else { + infer(colour_primaries, 2); + infer(transfer_characteristics, 2); + infer(matrix_coefficients, 2); + } + } else { + infer(video_format, 5); + infer(video_full_range_flag, 0); + infer(colour_primaries, 2); + infer(transfer_characteristics, 2); + infer(matrix_coefficients, 2); + } + + flag(chroma_loc_info_present_flag); + if (current->chroma_loc_info_present_flag) { + ue(chroma_sample_loc_type_top_field, 0, 5); + ue(chroma_sample_loc_type_bottom_field, 0, 5); + } else { + infer(chroma_sample_loc_type_top_field, 0); + infer(chroma_sample_loc_type_bottom_field, 0); + } + + flag(neutral_chroma_indication_flag); + flag(field_seq_flag); + flag(frame_field_info_present_flag); + + flag(default_display_window_flag); + if (current->default_display_window_flag) { + ue(def_disp_win_left_offset, 0, 16384); + ue(def_disp_win_right_offset, 0, 16384); + ue(def_disp_win_top_offset, 0, 16384); + ue(def_disp_win_bottom_offset, 0, 16384); + } + + flag(vui_timing_info_present_flag); + if (current->vui_timing_info_present_flag) { + u(32, vui_num_units_in_tick, 1, UINT32_MAX); + u(32, vui_time_scale, 1, UINT32_MAX); + flag(vui_poc_proportional_to_timing_flag); + if (current->vui_poc_proportional_to_timing_flag) + ue(vui_num_ticks_poc_diff_one_minus1, 0, UINT32_MAX - 1); + + flag(vui_hrd_parameters_present_flag); + if (current->vui_hrd_parameters_present_flag) { + CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->hrd_parameters, + 1, sps->sps_max_sub_layers_minus1)); + } + } - return 0; + flag(bitstream_restriction_flag); + if (current->bitstream_restriction_flag) { + flag(tiles_fixed_structure_flag); + flag(motion_vectors_over_pic_boundaries_flag); + flag(restricted_ref_pic_lists_flag); + ue(min_spatial_segmentation_idc, 0, 4095); + ue(max_bytes_per_pic_denom, 0, 16); + ue(max_bits_per_min_cu_denom, 0, 16); + ue(log2_max_mv_length_horizontal, 0, 16); + ue(log2_max_mv_length_vertical, 0, 16); + } else { + infer(tiles_fixed_structure_flag, 0); + infer(motion_vectors_over_pic_boundaries_flag, 1); + infer(min_spatial_segmentation_idc, 0); + infer(max_bytes_per_pic_denom, 2); + infer(max_bits_per_min_cu_denom, 1); + infer(log2_max_mv_length_horizontal, 15); + infer(log2_max_mv_length_vertical, 15); + } + + return 0; } static int FUNC(vps)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawVPS *current) { - int err, i, j; - - HEADER("Video Parameter Set"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_VPS)); - - ub(4, vps_video_parameter_set_id); - - flag(vps_base_layer_internal_flag); - flag(vps_base_layer_available_flag); - u(6, vps_max_layers_minus1, 0, HEVC_MAX_LAYERS - 1); - u(3, vps_max_sub_layers_minus1, 0, HEVC_MAX_SUB_LAYERS - 1); - flag(vps_temporal_id_nesting_flag); - - if(current->vps_max_sub_layers_minus1 == 0 && - current->vps_temporal_id_nesting_flag != 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " - "vps_temporal_id_nesting_flag must be 1 if " - "vps_max_sub_layers_minus1 is 0.\n"); - return AVERROR_INVALIDDATA; - } - - fixed(16, vps_reserved_0xffff_16bits, 0xffff); - - CHECK(FUNC(profile_tier_level)(ctx, rw, ¤t->profile_tier_level, - 1, current->vps_max_sub_layers_minus1)); - - flag(vps_sub_layer_ordering_info_present_flag); - for(i = (current->vps_sub_layer_ordering_info_present_flag ? - 0 : - current->vps_max_sub_layers_minus1); - i <= current->vps_max_sub_layers_minus1; i++) { - ues(vps_max_dec_pic_buffering_minus1[i], - 0, HEVC_MAX_DPB_SIZE - 1, 1, i); - ues(vps_max_num_reorder_pics[i], - 0, current->vps_max_dec_pic_buffering_minus1[i], 1, i); - ues(vps_max_latency_increase_plus1[i], - 0, UINT32_MAX - 1, 1, i); - } - if(!current->vps_sub_layer_ordering_info_present_flag) { - for(i = 0; i < current->vps_max_sub_layers_minus1; i++) { - infer(vps_max_dec_pic_buffering_minus1[i], - current->vps_max_dec_pic_buffering_minus1[current->vps_max_sub_layers_minus1]); - infer(vps_max_num_reorder_pics[i], - current->vps_max_num_reorder_pics[current->vps_max_sub_layers_minus1]); - infer(vps_max_latency_increase_plus1[i], - current->vps_max_latency_increase_plus1[current->vps_max_sub_layers_minus1]); - } - } - - u(6, vps_max_layer_id, 0, HEVC_MAX_LAYERS - 1); - ue(vps_num_layer_sets_minus1, 0, HEVC_MAX_LAYER_SETS - 1); - for(i = 1; i <= current->vps_num_layer_sets_minus1; i++) { - for(j = 0; j <= current->vps_max_layer_id; j++) - flags(layer_id_included_flag[i][j], 2, i, j); - } - for(j = 0; j <= current->vps_max_layer_id; j++) - infer(layer_id_included_flag[0][j], j == 0); - - flag(vps_timing_info_present_flag); - if(current->vps_timing_info_present_flag) { - u(32, vps_num_units_in_tick, 1, UINT32_MAX); - u(32, vps_time_scale, 1, UINT32_MAX); - flag(vps_poc_proportional_to_timing_flag); - if(current->vps_poc_proportional_to_timing_flag) - ue(vps_num_ticks_poc_diff_one_minus1, 0, UINT32_MAX - 1); - ue(vps_num_hrd_parameters, 0, current->vps_num_layer_sets_minus1 + 1); - for(i = 0; i < current->vps_num_hrd_parameters; i++) { - ues(hrd_layer_set_idx[i], - current->vps_base_layer_internal_flag ? 0 : 1, - current->vps_num_layer_sets_minus1, 1, i); - if(i > 0) - flags(cprms_present_flag[i], 1, i); - else - infer(cprms_present_flag[0], 1); - - CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->hrd_parameters[i], - current->cprms_present_flag[i], - current->vps_max_sub_layers_minus1)); - } - } - - flag(vps_extension_flag); - if(current->vps_extension_flag) - CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data)); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; + H265RawVPS *current) +{ + int err, i, j; + + HEADER("Video Parameter Set"); + + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_VPS)); + + ub(4, vps_video_parameter_set_id); + + flag(vps_base_layer_internal_flag); + flag(vps_base_layer_available_flag); + u(6, vps_max_layers_minus1, 0, HEVC_MAX_LAYERS - 1); + u(3, vps_max_sub_layers_minus1, 0, HEVC_MAX_SUB_LAYERS - 1); + flag(vps_temporal_id_nesting_flag); + + if (current->vps_max_sub_layers_minus1 == 0 && + current->vps_temporal_id_nesting_flag != 1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " + "vps_temporal_id_nesting_flag must be 1 if " + "vps_max_sub_layers_minus1 is 0.\n"); + return AVERROR_INVALIDDATA; + } + + fixed(16, vps_reserved_0xffff_16bits, 0xffff); + + CHECK(FUNC(profile_tier_level)(ctx, rw, ¤t->profile_tier_level, + 1, current->vps_max_sub_layers_minus1)); + + flag(vps_sub_layer_ordering_info_present_flag); + for (i = (current->vps_sub_layer_ordering_info_present_flag ? + 0 : current->vps_max_sub_layers_minus1); + i <= current->vps_max_sub_layers_minus1; i++) { + ues(vps_max_dec_pic_buffering_minus1[i], + 0, HEVC_MAX_DPB_SIZE - 1, 1, i); + ues(vps_max_num_reorder_pics[i], + 0, current->vps_max_dec_pic_buffering_minus1[i], 1, i); + ues(vps_max_latency_increase_plus1[i], + 0, UINT32_MAX - 1, 1, i); + } + if (!current->vps_sub_layer_ordering_info_present_flag) { + for (i = 0; i < current->vps_max_sub_layers_minus1; i++) { + infer(vps_max_dec_pic_buffering_minus1[i], + current->vps_max_dec_pic_buffering_minus1[current->vps_max_sub_layers_minus1]); + infer(vps_max_num_reorder_pics[i], + current->vps_max_num_reorder_pics[current->vps_max_sub_layers_minus1]); + infer(vps_max_latency_increase_plus1[i], + current->vps_max_latency_increase_plus1[current->vps_max_sub_layers_minus1]); + } + } + + u(6, vps_max_layer_id, 0, HEVC_MAX_LAYERS - 1); + ue(vps_num_layer_sets_minus1, 0, HEVC_MAX_LAYER_SETS - 1); + for (i = 1; i <= current->vps_num_layer_sets_minus1; i++) { + for (j = 0; j <= current->vps_max_layer_id; j++) + flags(layer_id_included_flag[i][j], 2, i, j); + } + for (j = 0; j <= current->vps_max_layer_id; j++) + infer(layer_id_included_flag[0][j], j == 0); + + flag(vps_timing_info_present_flag); + if (current->vps_timing_info_present_flag) { + u(32, vps_num_units_in_tick, 1, UINT32_MAX); + u(32, vps_time_scale, 1, UINT32_MAX); + flag(vps_poc_proportional_to_timing_flag); + if (current->vps_poc_proportional_to_timing_flag) + ue(vps_num_ticks_poc_diff_one_minus1, 0, UINT32_MAX - 1); + ue(vps_num_hrd_parameters, 0, current->vps_num_layer_sets_minus1 + 1); + for (i = 0; i < current->vps_num_hrd_parameters; i++) { + ues(hrd_layer_set_idx[i], + current->vps_base_layer_internal_flag ? 0 : 1, + current->vps_num_layer_sets_minus1, 1, i); + if (i > 0) + flags(cprms_present_flag[i], 1, i); + else + infer(cprms_present_flag[0], 1); + + CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->hrd_parameters[i], + current->cprms_present_flag[i], + current->vps_max_sub_layers_minus1)); + } + } + + flag(vps_extension_flag); + if (current->vps_extension_flag) + CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data)); + + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + + return 0; } static int FUNC(st_ref_pic_set)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSTRefPicSet *current, int st_rps_idx, - const H265RawSPS *sps) { - int err, i, j; - - if(st_rps_idx != 0) - flag(inter_ref_pic_set_prediction_flag); - else - infer(inter_ref_pic_set_prediction_flag, 0); - - if(current->inter_ref_pic_set_prediction_flag) { - unsigned int ref_rps_idx, num_delta_pocs, num_ref_pics; - const H265RawSTRefPicSet *ref; - int delta_rps, d_poc; - int ref_delta_poc_s0[HEVC_MAX_REFS], ref_delta_poc_s1[HEVC_MAX_REFS]; - int delta_poc_s0[HEVC_MAX_REFS], delta_poc_s1[HEVC_MAX_REFS]; - uint8_t used_by_curr_pic_s0[HEVC_MAX_REFS], - used_by_curr_pic_s1[HEVC_MAX_REFS]; - - if(st_rps_idx == sps->num_short_term_ref_pic_sets) - ue(delta_idx_minus1, 0, st_rps_idx - 1); + H265RawSTRefPicSet *current, int st_rps_idx, + const H265RawSPS *sps) +{ + int err, i, j; + + if (st_rps_idx != 0) + flag(inter_ref_pic_set_prediction_flag); else - infer(delta_idx_minus1, 0); - - ref_rps_idx = st_rps_idx - (current->delta_idx_minus1 + 1); - ref = &sps->st_ref_pic_set[ref_rps_idx]; - num_delta_pocs = ref->num_negative_pics + ref->num_positive_pics; - av_assert0(num_delta_pocs < HEVC_MAX_DPB_SIZE); - - flag(delta_rps_sign); - ue(abs_delta_rps_minus1, 0, INT16_MAX); - delta_rps = (1 - 2 * current->delta_rps_sign) * - (current->abs_delta_rps_minus1 + 1); - - num_ref_pics = 0; - for(j = 0; j <= num_delta_pocs; j++) { - flags(used_by_curr_pic_flag[j], 1, j); - if(!current->used_by_curr_pic_flag[j]) - flags(use_delta_flag[j], 1, j); - else - infer(use_delta_flag[j], 1); - if(current->use_delta_flag[j]) - ++num_ref_pics; - } - if(num_ref_pics >= HEVC_MAX_DPB_SIZE) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " - "short-term ref pic set %d " - "contains too many pictures.\n", - st_rps_idx); - return AVERROR_INVALIDDATA; - } - - // Since the stored form of an RPS here is actually the delta-step - // form used when inter_ref_pic_set_prediction_flag is not set, we - // need to reconstruct that here in order to be able to refer to - // the RPS later (which is required for parsing, because we don't - // even know what syntax elements appear without it). Therefore, - // this code takes the delta-step form of the reference set, turns - // it into the delta-array form, applies the prediction process of - // 7.4.8, converts the result back to the delta-step form, and - // stores that as the current set for future use. Note that the - // inferences here mean that writers using prediction will need - // to fill in the delta-step values correctly as well - since the - // whole RPS prediction process is somewhat overly sophisticated, - // this hopefully forms a useful check for them to ensure their - // predicted form actually matches what was intended rather than - // an onerous additional requirement. - - d_poc = 0; - for(i = 0; i < ref->num_negative_pics; i++) { - d_poc -= ref->delta_poc_s0_minus1[i] + 1; - ref_delta_poc_s0[i] = d_poc; - } - d_poc = 0; - for(i = 0; i < ref->num_positive_pics; i++) { - d_poc += ref->delta_poc_s1_minus1[i] + 1; - ref_delta_poc_s1[i] = d_poc; - } - - i = 0; - for(j = ref->num_positive_pics - 1; j >= 0; j--) { - d_poc = ref_delta_poc_s1[j] + delta_rps; - if(d_poc < 0 && current->use_delta_flag[ref->num_negative_pics + j]) { - delta_poc_s0[i] = d_poc; - used_by_curr_pic_s0[i++] = - current->used_by_curr_pic_flag[ref->num_negative_pics + j]; - } - } - if(delta_rps < 0 && current->use_delta_flag[num_delta_pocs]) { - delta_poc_s0[i] = delta_rps; - used_by_curr_pic_s0[i++] = - current->used_by_curr_pic_flag[num_delta_pocs]; - } - for(j = 0; j < ref->num_negative_pics; j++) { - d_poc = ref_delta_poc_s0[j] + delta_rps; - if(d_poc < 0 && current->use_delta_flag[j]) { - delta_poc_s0[i] = d_poc; - used_by_curr_pic_s0[i++] = current->used_by_curr_pic_flag[j]; - } - } - - infer(num_negative_pics, i); - for(i = 0; i < current->num_negative_pics; i++) { - infer(delta_poc_s0_minus1[i], - -(delta_poc_s0[i] - (i == 0 ? 0 : delta_poc_s0[i - 1])) - 1); - infer(used_by_curr_pic_s0_flag[i], used_by_curr_pic_s0[i]); - } - - i = 0; - for(j = ref->num_negative_pics - 1; j >= 0; j--) { - d_poc = ref_delta_poc_s0[j] + delta_rps; - if(d_poc > 0 && current->use_delta_flag[j]) { - delta_poc_s1[i] = d_poc; - used_by_curr_pic_s1[i++] = current->used_by_curr_pic_flag[j]; - } - } - if(delta_rps > 0 && current->use_delta_flag[num_delta_pocs]) { - delta_poc_s1[i] = delta_rps; - used_by_curr_pic_s1[i++] = - current->used_by_curr_pic_flag[num_delta_pocs]; - } - for(j = 0; j < ref->num_positive_pics; j++) { - d_poc = ref_delta_poc_s1[j] + delta_rps; - if(d_poc > 0 && current->use_delta_flag[ref->num_negative_pics + j]) { - delta_poc_s1[i] = d_poc; - used_by_curr_pic_s1[i++] = - current->used_by_curr_pic_flag[ref->num_negative_pics + j]; - } - } - - infer(num_positive_pics, i); - for(i = 0; i < current->num_positive_pics; i++) { - infer(delta_poc_s1_minus1[i], - delta_poc_s1[i] - (i == 0 ? 0 : delta_poc_s1[i - 1]) - 1); - infer(used_by_curr_pic_s1_flag[i], used_by_curr_pic_s1[i]); - } - } - else { - ue(num_negative_pics, 0, 15); - ue(num_positive_pics, 0, 15 - current->num_negative_pics); - - for(i = 0; i < current->num_negative_pics; i++) { - ues(delta_poc_s0_minus1[i], 0, INT16_MAX, 1, i); - flags(used_by_curr_pic_s0_flag[i], 1, i); - } - - for(i = 0; i < current->num_positive_pics; i++) { - ues(delta_poc_s1_minus1[i], 0, INT16_MAX, 1, i); - flags(used_by_curr_pic_s1_flag[i], 1, i); - } - } - - return 0; + infer(inter_ref_pic_set_prediction_flag, 0); + + if (current->inter_ref_pic_set_prediction_flag) { + unsigned int ref_rps_idx, num_delta_pocs, num_ref_pics; + const H265RawSTRefPicSet *ref; + int delta_rps, d_poc; + int ref_delta_poc_s0[HEVC_MAX_REFS], ref_delta_poc_s1[HEVC_MAX_REFS]; + int delta_poc_s0[HEVC_MAX_REFS], delta_poc_s1[HEVC_MAX_REFS]; + uint8_t used_by_curr_pic_s0[HEVC_MAX_REFS], + used_by_curr_pic_s1[HEVC_MAX_REFS]; + + if (st_rps_idx == sps->num_short_term_ref_pic_sets) + ue(delta_idx_minus1, 0, st_rps_idx - 1); + else + infer(delta_idx_minus1, 0); + + ref_rps_idx = st_rps_idx - (current->delta_idx_minus1 + 1); + ref = &sps->st_ref_pic_set[ref_rps_idx]; + num_delta_pocs = ref->num_negative_pics + ref->num_positive_pics; + av_assert0(num_delta_pocs < HEVC_MAX_DPB_SIZE); + + flag(delta_rps_sign); + ue(abs_delta_rps_minus1, 0, INT16_MAX); + delta_rps = (1 - 2 * current->delta_rps_sign) * + (current->abs_delta_rps_minus1 + 1); + + num_ref_pics = 0; + for (j = 0; j <= num_delta_pocs; j++) { + flags(used_by_curr_pic_flag[j], 1, j); + if (!current->used_by_curr_pic_flag[j]) + flags(use_delta_flag[j], 1, j); + else + infer(use_delta_flag[j], 1); + if (current->use_delta_flag[j]) + ++num_ref_pics; + } + if (num_ref_pics >= HEVC_MAX_DPB_SIZE) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " + "short-term ref pic set %d " + "contains too many pictures.\n", st_rps_idx); + return AVERROR_INVALIDDATA; + } + + // Since the stored form of an RPS here is actually the delta-step + // form used when inter_ref_pic_set_prediction_flag is not set, we + // need to reconstruct that here in order to be able to refer to + // the RPS later (which is required for parsing, because we don't + // even know what syntax elements appear without it). Therefore, + // this code takes the delta-step form of the reference set, turns + // it into the delta-array form, applies the prediction process of + // 7.4.8, converts the result back to the delta-step form, and + // stores that as the current set for future use. Note that the + // inferences here mean that writers using prediction will need + // to fill in the delta-step values correctly as well - since the + // whole RPS prediction process is somewhat overly sophisticated, + // this hopefully forms a useful check for them to ensure their + // predicted form actually matches what was intended rather than + // an onerous additional requirement. + + d_poc = 0; + for (i = 0; i < ref->num_negative_pics; i++) { + d_poc -= ref->delta_poc_s0_minus1[i] + 1; + ref_delta_poc_s0[i] = d_poc; + } + d_poc = 0; + for (i = 0; i < ref->num_positive_pics; i++) { + d_poc += ref->delta_poc_s1_minus1[i] + 1; + ref_delta_poc_s1[i] = d_poc; + } + + i = 0; + for (j = ref->num_positive_pics - 1; j >= 0; j--) { + d_poc = ref_delta_poc_s1[j] + delta_rps; + if (d_poc < 0 && current->use_delta_flag[ref->num_negative_pics + j]) { + delta_poc_s0[i] = d_poc; + used_by_curr_pic_s0[i++] = + current->used_by_curr_pic_flag[ref->num_negative_pics + j]; + } + } + if (delta_rps < 0 && current->use_delta_flag[num_delta_pocs]) { + delta_poc_s0[i] = delta_rps; + used_by_curr_pic_s0[i++] = + current->used_by_curr_pic_flag[num_delta_pocs]; + } + for (j = 0; j < ref->num_negative_pics; j++) { + d_poc = ref_delta_poc_s0[j] + delta_rps; + if (d_poc < 0 && current->use_delta_flag[j]) { + delta_poc_s0[i] = d_poc; + used_by_curr_pic_s0[i++] = current->used_by_curr_pic_flag[j]; + } + } + + infer(num_negative_pics, i); + for (i = 0; i < current->num_negative_pics; i++) { + infer(delta_poc_s0_minus1[i], + -(delta_poc_s0[i] - (i == 0 ? 0 : delta_poc_s0[i - 1])) - 1); + infer(used_by_curr_pic_s0_flag[i], used_by_curr_pic_s0[i]); + } + + i = 0; + for (j = ref->num_negative_pics - 1; j >= 0; j--) { + d_poc = ref_delta_poc_s0[j] + delta_rps; + if (d_poc > 0 && current->use_delta_flag[j]) { + delta_poc_s1[i] = d_poc; + used_by_curr_pic_s1[i++] = current->used_by_curr_pic_flag[j]; + } + } + if (delta_rps > 0 && current->use_delta_flag[num_delta_pocs]) { + delta_poc_s1[i] = delta_rps; + used_by_curr_pic_s1[i++] = + current->used_by_curr_pic_flag[num_delta_pocs]; + } + for (j = 0; j < ref->num_positive_pics; j++) { + d_poc = ref_delta_poc_s1[j] + delta_rps; + if (d_poc > 0 && current->use_delta_flag[ref->num_negative_pics + j]) { + delta_poc_s1[i] = d_poc; + used_by_curr_pic_s1[i++] = + current->used_by_curr_pic_flag[ref->num_negative_pics + j]; + } + } + + infer(num_positive_pics, i); + for (i = 0; i < current->num_positive_pics; i++) { + infer(delta_poc_s1_minus1[i], + delta_poc_s1[i] - (i == 0 ? 0 : delta_poc_s1[i - 1]) - 1); + infer(used_by_curr_pic_s1_flag[i], used_by_curr_pic_s1[i]); + } + + } else { + ue(num_negative_pics, 0, 15); + ue(num_positive_pics, 0, 15 - current->num_negative_pics); + + for (i = 0; i < current->num_negative_pics; i++) { + ues(delta_poc_s0_minus1[i], 0, INT16_MAX, 1, i); + flags(used_by_curr_pic_s0_flag[i], 1, i); + } + + for (i = 0; i < current->num_positive_pics; i++) { + ues(delta_poc_s1_minus1[i], 0, INT16_MAX, 1, i); + flags(used_by_curr_pic_s1_flag[i], 1, i); + } + } + + return 0; } static int FUNC(scaling_list_data)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawScalingList *current) { - int sizeId, matrixId; - int err, n, i; - - for(sizeId = 0; sizeId < 4; sizeId++) { - for(matrixId = 0; matrixId < 6; matrixId += (sizeId == 3 ? 3 : 1)) { - flags(scaling_list_pred_mode_flag[sizeId][matrixId], - 2, sizeId, matrixId); - if(!current->scaling_list_pred_mode_flag[sizeId][matrixId]) { - ues(scaling_list_pred_matrix_id_delta[sizeId][matrixId], - 0, sizeId == 3 ? matrixId / 3 : matrixId, - 2, sizeId, matrixId); - } - else { - n = FFMIN(64, 1 << (4 + (sizeId << 1))); - if(sizeId > 1) { - ses(scaling_list_dc_coef_minus8[sizeId - 2][matrixId], -7, +247, - 2, sizeId - 2, matrixId); + H265RawScalingList *current) +{ + int sizeId, matrixId; + int err, n, i; + + for (sizeId = 0; sizeId < 4; sizeId++) { + for (matrixId = 0; matrixId < 6; matrixId += (sizeId == 3 ? 3 : 1)) { + flags(scaling_list_pred_mode_flag[sizeId][matrixId], + 2, sizeId, matrixId); + if (!current->scaling_list_pred_mode_flag[sizeId][matrixId]) { + ues(scaling_list_pred_matrix_id_delta[sizeId][matrixId], + 0, sizeId == 3 ? matrixId / 3 : matrixId, + 2, sizeId, matrixId); + } else { + n = FFMIN(64, 1 << (4 + (sizeId << 1))); + if (sizeId > 1) { + ses(scaling_list_dc_coef_minus8[sizeId - 2][matrixId], -7, +247, + 2, sizeId - 2, matrixId); + } + for (i = 0; i < n; i++) { + ses(scaling_list_delta_coeff[sizeId][matrixId][i], + -128, +127, 3, sizeId, matrixId, i); + } + } } - for(i = 0; i < n; i++) { - ses(scaling_list_delta_coeff[sizeId][matrixId][i], - -128, +127, 3, sizeId, matrixId, i); - } - } } - } - return 0; + return 0; } static int FUNC(sps_range_extension)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSPS *current) { - int err; - - flag(transform_skip_rotation_enabled_flag); - flag(transform_skip_context_enabled_flag); - flag(implicit_rdpcm_enabled_flag); - flag(explicit_rdpcm_enabled_flag); - flag(extended_precision_processing_flag); - flag(intra_smoothing_disabled_flag); - flag(high_precision_offsets_enabled_flag); - flag(persistent_rice_adaptation_enabled_flag); - flag(cabac_bypass_alignment_enabled_flag); - - return 0; + H265RawSPS *current) +{ + int err; + + flag(transform_skip_rotation_enabled_flag); + flag(transform_skip_context_enabled_flag); + flag(implicit_rdpcm_enabled_flag); + flag(explicit_rdpcm_enabled_flag); + flag(extended_precision_processing_flag); + flag(intra_smoothing_disabled_flag); + flag(high_precision_offsets_enabled_flag); + flag(persistent_rice_adaptation_enabled_flag); + flag(cabac_bypass_alignment_enabled_flag); + + return 0; } static int FUNC(sps_scc_extension)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSPS *current) { - int err, comp, i; - - flag(sps_curr_pic_ref_enabled_flag); - - flag(palette_mode_enabled_flag); - if(current->palette_mode_enabled_flag) { - ue(palette_max_size, 0, 64); - ue(delta_palette_max_predictor_size, 0, 128); - - flag(sps_palette_predictor_initializer_present_flag); - if(current->sps_palette_predictor_initializer_present_flag) { - ue(sps_num_palette_predictor_initializer_minus1, 0, 128); - for(comp = 0; comp < (current->chroma_format_idc ? 3 : 1); comp++) { - int bit_depth = comp == 0 ? current->bit_depth_luma_minus8 + 8 : current->bit_depth_chroma_minus8 + 8; - for(i = 0; i <= current->sps_num_palette_predictor_initializer_minus1; i++) - ubs(bit_depth, sps_palette_predictor_initializers[comp][i], 2, comp, i); - } + H265RawSPS *current) +{ + int err, comp, i; + + flag(sps_curr_pic_ref_enabled_flag); + + flag(palette_mode_enabled_flag); + if (current->palette_mode_enabled_flag) { + ue(palette_max_size, 0, 64); + ue(delta_palette_max_predictor_size, 0, 128); + + flag(sps_palette_predictor_initializer_present_flag); + if (current->sps_palette_predictor_initializer_present_flag) { + ue(sps_num_palette_predictor_initializer_minus1, 0, 127); + for (comp = 0; comp < (current->chroma_format_idc ? 3 : 1); comp++) { + int bit_depth = comp == 0 ? current->bit_depth_luma_minus8 + 8 + : current->bit_depth_chroma_minus8 + 8; + for (i = 0; i <= current->sps_num_palette_predictor_initializer_minus1; i++) + ubs(bit_depth, sps_palette_predictor_initializers[comp][i], 2, comp, i); + } + } } - } - u(2, motion_vector_resolution_control_idc, 0, 2); - flag(intra_boundary_filtering_disable_flag); + u(2, motion_vector_resolution_control_idc, 0, 2); + flag(intra_boundary_filtering_disable_flag); - return 0; + return 0; } static int FUNC(vui_parameters_default)(CodedBitstreamContext *ctx, - RWContext *rw, H265RawVUI *current, - H265RawSPS *sps) { - infer(aspect_ratio_idc, 0); - - infer(video_format, 5); - infer(video_full_range_flag, 0); - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - - infer(chroma_sample_loc_type_top_field, 0); - infer(chroma_sample_loc_type_bottom_field, 0); - - infer(tiles_fixed_structure_flag, 0); - infer(motion_vectors_over_pic_boundaries_flag, 1); - infer(min_spatial_segmentation_idc, 0); - infer(max_bytes_per_pic_denom, 2); - infer(max_bits_per_min_cu_denom, 1); - infer(log2_max_mv_length_horizontal, 15); - infer(log2_max_mv_length_vertical, 15); - - return 0; + RWContext *rw, H265RawVUI *current, + H265RawSPS *sps) +{ + infer(aspect_ratio_idc, 0); + + infer(video_format, 5); + infer(video_full_range_flag, 0); + infer(colour_primaries, 2); + infer(transfer_characteristics, 2); + infer(matrix_coefficients, 2); + + infer(chroma_sample_loc_type_top_field, 0); + infer(chroma_sample_loc_type_bottom_field, 0); + + infer(tiles_fixed_structure_flag, 0); + infer(motion_vectors_over_pic_boundaries_flag, 1); + infer(min_spatial_segmentation_idc, 0); + infer(max_bytes_per_pic_denom, 2); + infer(max_bits_per_min_cu_denom, 1); + infer(log2_max_mv_length_horizontal, 15); + infer(log2_max_mv_length_vertical, 15); + + return 0; } static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSPS *current) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawVPS *vps; - int err, i; - unsigned int min_cb_log2_size_y, ctb_log2_size_y, - min_cb_size_y, min_tb_log2_size_y; - - HEADER("Sequence Parameter Set"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_SPS)); - - ub(4, sps_video_parameter_set_id); - h265->active_vps = vps = h265->vps[current->sps_video_parameter_set_id]; - - u(3, sps_max_sub_layers_minus1, 0, HEVC_MAX_SUB_LAYERS - 1); - flag(sps_temporal_id_nesting_flag); - if(vps) { - if(vps->vps_max_sub_layers_minus1 > current->sps_max_sub_layers_minus1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " - "sps_max_sub_layers_minus1 (%d) must be less than or equal to " - "vps_max_sub_layers_minus1 (%d).\n", - vps->vps_max_sub_layers_minus1, - current->sps_max_sub_layers_minus1); - return AVERROR_INVALIDDATA; - } - if(vps->vps_temporal_id_nesting_flag && - !current->sps_temporal_id_nesting_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " - "sps_temporal_id_nesting_flag must be 1 if " - "vps_temporal_id_nesting_flag is 1.\n"); - return AVERROR_INVALIDDATA; - } - } - - CHECK(FUNC(profile_tier_level)(ctx, rw, ¤t->profile_tier_level, - 1, current->sps_max_sub_layers_minus1)); - - ue(sps_seq_parameter_set_id, 0, 15); - - ue(chroma_format_idc, 0, 3); - if(current->chroma_format_idc == 3) - flag(separate_colour_plane_flag); - else - infer(separate_colour_plane_flag, 0); - - ue(pic_width_in_luma_samples, 1, HEVC_MAX_WIDTH); - ue(pic_height_in_luma_samples, 1, HEVC_MAX_HEIGHT); - - flag(conformance_window_flag); - if(current->conformance_window_flag) { - ue(conf_win_left_offset, 0, current->pic_width_in_luma_samples); - ue(conf_win_right_offset, 0, current->pic_width_in_luma_samples); - ue(conf_win_top_offset, 0, current->pic_height_in_luma_samples); - ue(conf_win_bottom_offset, 0, current->pic_height_in_luma_samples); - } - else { - infer(conf_win_left_offset, 0); - infer(conf_win_right_offset, 0); - infer(conf_win_top_offset, 0); - infer(conf_win_bottom_offset, 0); - } - - ue(bit_depth_luma_minus8, 0, 8); - ue(bit_depth_chroma_minus8, 0, 8); - - ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12); - - flag(sps_sub_layer_ordering_info_present_flag); - for(i = (current->sps_sub_layer_ordering_info_present_flag ? - 0 : - current->sps_max_sub_layers_minus1); - i <= current->sps_max_sub_layers_minus1; i++) { - ues(sps_max_dec_pic_buffering_minus1[i], - 0, HEVC_MAX_DPB_SIZE - 1, 1, i); - ues(sps_max_num_reorder_pics[i], - 0, current->sps_max_dec_pic_buffering_minus1[i], 1, i); - ues(sps_max_latency_increase_plus1[i], - 0, UINT32_MAX - 1, 1, i); - } - if(!current->sps_sub_layer_ordering_info_present_flag) { - for(i = 0; i < current->sps_max_sub_layers_minus1; i++) { - infer(sps_max_dec_pic_buffering_minus1[i], - current->sps_max_dec_pic_buffering_minus1[current->sps_max_sub_layers_minus1]); - infer(sps_max_num_reorder_pics[i], - current->sps_max_num_reorder_pics[current->sps_max_sub_layers_minus1]); - infer(sps_max_latency_increase_plus1[i], - current->sps_max_latency_increase_plus1[current->sps_max_sub_layers_minus1]); - } - } - - ue(log2_min_luma_coding_block_size_minus3, 0, 3); - min_cb_log2_size_y = current->log2_min_luma_coding_block_size_minus3 + 3; - - ue(log2_diff_max_min_luma_coding_block_size, 0, 3); - ctb_log2_size_y = min_cb_log2_size_y + - current->log2_diff_max_min_luma_coding_block_size; - - min_cb_size_y = 1 << min_cb_log2_size_y; - if(current->pic_width_in_luma_samples % min_cb_size_y || - current->pic_height_in_luma_samples % min_cb_size_y) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid dimensions: %ux%u not divisible " - "by MinCbSizeY = %u.\n", - current->pic_width_in_luma_samples, - current->pic_height_in_luma_samples, min_cb_size_y); - return AVERROR_INVALIDDATA; - } - - ue(log2_min_luma_transform_block_size_minus2, 0, min_cb_log2_size_y - 3); - min_tb_log2_size_y = current->log2_min_luma_transform_block_size_minus2 + 2; - - ue(log2_diff_max_min_luma_transform_block_size, - 0, FFMIN(ctb_log2_size_y, 5) - min_tb_log2_size_y); - - ue(max_transform_hierarchy_depth_inter, - 0, ctb_log2_size_y - min_tb_log2_size_y); - ue(max_transform_hierarchy_depth_intra, - 0, ctb_log2_size_y - min_tb_log2_size_y); - - flag(scaling_list_enabled_flag); - if(current->scaling_list_enabled_flag) { - flag(sps_scaling_list_data_present_flag); - if(current->sps_scaling_list_data_present_flag) - CHECK(FUNC(scaling_list_data)(ctx, rw, ¤t->scaling_list)); - } - else { - infer(sps_scaling_list_data_present_flag, 0); - } - - flag(amp_enabled_flag); - flag(sample_adaptive_offset_enabled_flag); - - flag(pcm_enabled_flag); - if(current->pcm_enabled_flag) { - u(4, pcm_sample_bit_depth_luma_minus1, - 0, current->bit_depth_luma_minus8 + 8 - 1); - u(4, pcm_sample_bit_depth_chroma_minus1, - 0, current->bit_depth_chroma_minus8 + 8 - 1); - - ue(log2_min_pcm_luma_coding_block_size_minus3, - FFMIN(min_cb_log2_size_y, 5) - 3, FFMIN(ctb_log2_size_y, 5) - 3); - ue(log2_diff_max_min_pcm_luma_coding_block_size, - 0, FFMIN(ctb_log2_size_y, 5) - (current->log2_min_pcm_luma_coding_block_size_minus3 + 3)); - - flag(pcm_loop_filter_disabled_flag); - } - - ue(num_short_term_ref_pic_sets, 0, HEVC_MAX_SHORT_TERM_REF_PIC_SETS); - for(i = 0; i < current->num_short_term_ref_pic_sets; i++) - CHECK(FUNC(st_ref_pic_set)(ctx, rw, ¤t->st_ref_pic_set[i], i, current)); - - flag(long_term_ref_pics_present_flag); - if(current->long_term_ref_pics_present_flag) { - ue(num_long_term_ref_pics_sps, 0, HEVC_MAX_LONG_TERM_REF_PICS); - for(i = 0; i < current->num_long_term_ref_pics_sps; i++) { - ubs(current->log2_max_pic_order_cnt_lsb_minus4 + 4, - lt_ref_pic_poc_lsb_sps[i], 1, i); - flags(used_by_curr_pic_lt_sps_flag[i], 1, i); - } - } - - flag(sps_temporal_mvp_enabled_flag); - flag(strong_intra_smoothing_enabled_flag); - - flag(vui_parameters_present_flag); - if(current->vui_parameters_present_flag) - CHECK(FUNC(vui_parameters)(ctx, rw, ¤t->vui, current)); - else - CHECK(FUNC(vui_parameters_default)(ctx, rw, ¤t->vui, current)); - - flag(sps_extension_present_flag); - if(current->sps_extension_present_flag) { - flag(sps_range_extension_flag); - flag(sps_multilayer_extension_flag); - flag(sps_3d_extension_flag); - flag(sps_scc_extension_flag); - ub(4, sps_extension_4bits); - } - - if(current->sps_range_extension_flag) - CHECK(FUNC(sps_range_extension)(ctx, rw, current)); - if(current->sps_multilayer_extension_flag) - return AVERROR_PATCHWELCOME; - if(current->sps_3d_extension_flag) - return AVERROR_PATCHWELCOME; - if(current->sps_scc_extension_flag) - CHECK(FUNC(sps_scc_extension)(ctx, rw, current)); - if(current->sps_extension_4bits) - CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data)); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; + H265RawSPS *current) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + const H265RawVPS *vps; + int err, i; + unsigned int min_cb_log2_size_y, ctb_log2_size_y, + min_cb_size_y, min_tb_log2_size_y; + + HEADER("Sequence Parameter Set"); + + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_SPS)); + + ub(4, sps_video_parameter_set_id); + h265->active_vps = vps = h265->vps[current->sps_video_parameter_set_id]; + + u(3, sps_max_sub_layers_minus1, 0, HEVC_MAX_SUB_LAYERS - 1); + flag(sps_temporal_id_nesting_flag); + if (vps) { + if (vps->vps_max_sub_layers_minus1 > current->sps_max_sub_layers_minus1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " + "sps_max_sub_layers_minus1 (%d) must be less than or equal to " + "vps_max_sub_layers_minus1 (%d).\n", + vps->vps_max_sub_layers_minus1, + current->sps_max_sub_layers_minus1); + return AVERROR_INVALIDDATA; + } + if (vps->vps_temporal_id_nesting_flag && + !current->sps_temporal_id_nesting_flag) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " + "sps_temporal_id_nesting_flag must be 1 if " + "vps_temporal_id_nesting_flag is 1.\n"); + return AVERROR_INVALIDDATA; + } + } + + CHECK(FUNC(profile_tier_level)(ctx, rw, ¤t->profile_tier_level, + 1, current->sps_max_sub_layers_minus1)); + + ue(sps_seq_parameter_set_id, 0, 15); + + ue(chroma_format_idc, 0, 3); + if (current->chroma_format_idc == 3) + flag(separate_colour_plane_flag); + else + infer(separate_colour_plane_flag, 0); + + ue(pic_width_in_luma_samples, 1, HEVC_MAX_WIDTH); + ue(pic_height_in_luma_samples, 1, HEVC_MAX_HEIGHT); + + flag(conformance_window_flag); + if (current->conformance_window_flag) { + ue(conf_win_left_offset, 0, current->pic_width_in_luma_samples); + ue(conf_win_right_offset, 0, current->pic_width_in_luma_samples); + ue(conf_win_top_offset, 0, current->pic_height_in_luma_samples); + ue(conf_win_bottom_offset, 0, current->pic_height_in_luma_samples); + } else { + infer(conf_win_left_offset, 0); + infer(conf_win_right_offset, 0); + infer(conf_win_top_offset, 0); + infer(conf_win_bottom_offset, 0); + } + + ue(bit_depth_luma_minus8, 0, 8); + ue(bit_depth_chroma_minus8, 0, 8); + + ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12); + + flag(sps_sub_layer_ordering_info_present_flag); + for (i = (current->sps_sub_layer_ordering_info_present_flag ? + 0 : current->sps_max_sub_layers_minus1); + i <= current->sps_max_sub_layers_minus1; i++) { + ues(sps_max_dec_pic_buffering_minus1[i], + 0, HEVC_MAX_DPB_SIZE - 1, 1, i); + ues(sps_max_num_reorder_pics[i], + 0, current->sps_max_dec_pic_buffering_minus1[i], 1, i); + ues(sps_max_latency_increase_plus1[i], + 0, UINT32_MAX - 1, 1, i); + } + if (!current->sps_sub_layer_ordering_info_present_flag) { + for (i = 0; i < current->sps_max_sub_layers_minus1; i++) { + infer(sps_max_dec_pic_buffering_minus1[i], + current->sps_max_dec_pic_buffering_minus1[current->sps_max_sub_layers_minus1]); + infer(sps_max_num_reorder_pics[i], + current->sps_max_num_reorder_pics[current->sps_max_sub_layers_minus1]); + infer(sps_max_latency_increase_plus1[i], + current->sps_max_latency_increase_plus1[current->sps_max_sub_layers_minus1]); + } + } + + ue(log2_min_luma_coding_block_size_minus3, 0, 3); + min_cb_log2_size_y = current->log2_min_luma_coding_block_size_minus3 + 3; + + ue(log2_diff_max_min_luma_coding_block_size, 0, 3); + ctb_log2_size_y = min_cb_log2_size_y + + current->log2_diff_max_min_luma_coding_block_size; + + min_cb_size_y = 1 << min_cb_log2_size_y; + if (current->pic_width_in_luma_samples % min_cb_size_y || + current->pic_height_in_luma_samples % min_cb_size_y) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid dimensions: %ux%u not divisible " + "by MinCbSizeY = %u.\n", current->pic_width_in_luma_samples, + current->pic_height_in_luma_samples, min_cb_size_y); + return AVERROR_INVALIDDATA; + } + + ue(log2_min_luma_transform_block_size_minus2, 0, min_cb_log2_size_y - 3); + min_tb_log2_size_y = current->log2_min_luma_transform_block_size_minus2 + 2; + + ue(log2_diff_max_min_luma_transform_block_size, + 0, FFMIN(ctb_log2_size_y, 5) - min_tb_log2_size_y); + + ue(max_transform_hierarchy_depth_inter, + 0, ctb_log2_size_y - min_tb_log2_size_y); + ue(max_transform_hierarchy_depth_intra, + 0, ctb_log2_size_y - min_tb_log2_size_y); + + flag(scaling_list_enabled_flag); + if (current->scaling_list_enabled_flag) { + flag(sps_scaling_list_data_present_flag); + if (current->sps_scaling_list_data_present_flag) + CHECK(FUNC(scaling_list_data)(ctx, rw, ¤t->scaling_list)); + } else { + infer(sps_scaling_list_data_present_flag, 0); + } + + flag(amp_enabled_flag); + flag(sample_adaptive_offset_enabled_flag); + + flag(pcm_enabled_flag); + if (current->pcm_enabled_flag) { + u(4, pcm_sample_bit_depth_luma_minus1, + 0, current->bit_depth_luma_minus8 + 8 - 1); + u(4, pcm_sample_bit_depth_chroma_minus1, + 0, current->bit_depth_chroma_minus8 + 8 - 1); + + ue(log2_min_pcm_luma_coding_block_size_minus3, + FFMIN(min_cb_log2_size_y, 5) - 3, FFMIN(ctb_log2_size_y, 5) - 3); + ue(log2_diff_max_min_pcm_luma_coding_block_size, + 0, FFMIN(ctb_log2_size_y, 5) - (current->log2_min_pcm_luma_coding_block_size_minus3 + 3)); + + flag(pcm_loop_filter_disabled_flag); + } + + ue(num_short_term_ref_pic_sets, 0, HEVC_MAX_SHORT_TERM_REF_PIC_SETS); + for (i = 0; i < current->num_short_term_ref_pic_sets; i++) + CHECK(FUNC(st_ref_pic_set)(ctx, rw, ¤t->st_ref_pic_set[i], i, current)); + + flag(long_term_ref_pics_present_flag); + if (current->long_term_ref_pics_present_flag) { + ue(num_long_term_ref_pics_sps, 0, HEVC_MAX_LONG_TERM_REF_PICS); + for (i = 0; i < current->num_long_term_ref_pics_sps; i++) { + ubs(current->log2_max_pic_order_cnt_lsb_minus4 + 4, + lt_ref_pic_poc_lsb_sps[i], 1, i); + flags(used_by_curr_pic_lt_sps_flag[i], 1, i); + } + } + + flag(sps_temporal_mvp_enabled_flag); + flag(strong_intra_smoothing_enabled_flag); + + flag(vui_parameters_present_flag); + if (current->vui_parameters_present_flag) + CHECK(FUNC(vui_parameters)(ctx, rw, ¤t->vui, current)); + else + CHECK(FUNC(vui_parameters_default)(ctx, rw, ¤t->vui, current)); + + flag(sps_extension_present_flag); + if (current->sps_extension_present_flag) { + flag(sps_range_extension_flag); + flag(sps_multilayer_extension_flag); + flag(sps_3d_extension_flag); + flag(sps_scc_extension_flag); + ub(4, sps_extension_4bits); + } + + if (current->sps_range_extension_flag) + CHECK(FUNC(sps_range_extension)(ctx, rw, current)); + if (current->sps_multilayer_extension_flag) + return AVERROR_PATCHWELCOME; + if (current->sps_3d_extension_flag) + return AVERROR_PATCHWELCOME; + if (current->sps_scc_extension_flag) + CHECK(FUNC(sps_scc_extension)(ctx, rw, current)); + if (current->sps_extension_4bits) + CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data)); + + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + + return 0; } static int FUNC(pps_range_extension)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawPPS *current) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps = h265->active_sps; - int err, i; - - if(current->transform_skip_enabled_flag) - ue(log2_max_transform_skip_block_size_minus2, 0, 3); - flag(cross_component_prediction_enabled_flag); - - flag(chroma_qp_offset_list_enabled_flag); - if(current->chroma_qp_offset_list_enabled_flag) { - ue(diff_cu_chroma_qp_offset_depth, - 0, sps->log2_diff_max_min_luma_coding_block_size); - ue(chroma_qp_offset_list_len_minus1, 0, 5); - for(i = 0; i <= current->chroma_qp_offset_list_len_minus1; i++) { - ses(cb_qp_offset_list[i], -12, +12, 1, i); - ses(cr_qp_offset_list[i], -12, +12, 1, i); - } - } - - ue(log2_sao_offset_scale_luma, 0, FFMAX(0, sps->bit_depth_luma_minus8 - 2)); - ue(log2_sao_offset_scale_chroma, 0, FFMAX(0, sps->bit_depth_chroma_minus8 - 2)); - - return 0; + H265RawPPS *current) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + const H265RawSPS *sps = h265->active_sps; + int err, i; + + if (current->transform_skip_enabled_flag) + ue(log2_max_transform_skip_block_size_minus2, 0, 3); + flag(cross_component_prediction_enabled_flag); + + flag(chroma_qp_offset_list_enabled_flag); + if (current->chroma_qp_offset_list_enabled_flag) { + ue(diff_cu_chroma_qp_offset_depth, + 0, sps->log2_diff_max_min_luma_coding_block_size); + ue(chroma_qp_offset_list_len_minus1, 0, 5); + for (i = 0; i <= current->chroma_qp_offset_list_len_minus1; i++) { + ses(cb_qp_offset_list[i], -12, +12, 1, i); + ses(cr_qp_offset_list[i], -12, +12, 1, i); + } + } + + ue(log2_sao_offset_scale_luma, 0, FFMAX(0, sps->bit_depth_luma_minus8 - 2)); + ue(log2_sao_offset_scale_chroma, 0, FFMAX(0, sps->bit_depth_chroma_minus8 - 2)); + + return 0; } static int FUNC(pps_scc_extension)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawPPS *current) { - int err, comp, i; - - flag(pps_curr_pic_ref_enabled_flag); - - flag(residual_adaptive_colour_transform_enabled_flag); - if(current->residual_adaptive_colour_transform_enabled_flag) { - flag(pps_slice_act_qp_offsets_present_flag); - se(pps_act_y_qp_offset_plus5, -7, +17); - se(pps_act_cb_qp_offset_plus5, -7, +17); - se(pps_act_cr_qp_offset_plus3, -9, +15); - } - else { - infer(pps_slice_act_qp_offsets_present_flag, 0); - infer(pps_act_y_qp_offset_plus5, 0); - infer(pps_act_cb_qp_offset_plus5, 0); - infer(pps_act_cr_qp_offset_plus3, 0); - } - - flag(pps_palette_predictor_initializer_present_flag); - if(current->pps_palette_predictor_initializer_present_flag) { - ue(pps_num_palette_predictor_initializer, 0, 128); - if(current->pps_num_palette_predictor_initializer > 0) { - flag(monochrome_palette_flag); - ue(luma_bit_depth_entry_minus8, 0, 8); - if(!current->monochrome_palette_flag) - ue(chroma_bit_depth_entry_minus8, 0, 8); - for(comp = 0; comp < (current->monochrome_palette_flag ? 1 : 3); comp++) { - int bit_depth = comp == 0 ? current->luma_bit_depth_entry_minus8 + 8 : current->chroma_bit_depth_entry_minus8 + 8; - for(i = 0; i < current->pps_num_palette_predictor_initializer; i++) - ubs(bit_depth, pps_palette_predictor_initializers[comp][i], 2, comp, i); - } - } - } - - return 0; + H265RawPPS *current) +{ + int err, comp, i; + + flag(pps_curr_pic_ref_enabled_flag); + + flag(residual_adaptive_colour_transform_enabled_flag); + if (current->residual_adaptive_colour_transform_enabled_flag) { + flag(pps_slice_act_qp_offsets_present_flag); + se(pps_act_y_qp_offset_plus5, -7, +17); + se(pps_act_cb_qp_offset_plus5, -7, +17); + se(pps_act_cr_qp_offset_plus3, -9, +15); + } else { + infer(pps_slice_act_qp_offsets_present_flag, 0); + infer(pps_act_y_qp_offset_plus5, 0); + infer(pps_act_cb_qp_offset_plus5, 0); + infer(pps_act_cr_qp_offset_plus3, 0); + } + + flag(pps_palette_predictor_initializer_present_flag); + if (current->pps_palette_predictor_initializer_present_flag) { + ue(pps_num_palette_predictor_initializer, 0, 128); + if (current->pps_num_palette_predictor_initializer > 0) { + flag(monochrome_palette_flag); + ue(luma_bit_depth_entry_minus8, 0, 8); + if (!current->monochrome_palette_flag) + ue(chroma_bit_depth_entry_minus8, 0, 8); + for (comp = 0; comp < (current->monochrome_palette_flag ? 1 : 3); comp++) { + int bit_depth = comp == 0 ? current->luma_bit_depth_entry_minus8 + 8 + : current->chroma_bit_depth_entry_minus8 + 8; + for (i = 0; i < current->pps_num_palette_predictor_initializer; i++) + ubs(bit_depth, pps_palette_predictor_initializers[comp][i], 2, comp, i); + } + } + } + + return 0; } static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawPPS *current) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps; - int err, i; - - HEADER("Picture Parameter Set"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_PPS)); - - ue(pps_pic_parameter_set_id, 0, 63); - ue(pps_seq_parameter_set_id, 0, 15); - sps = h265->sps[current->pps_seq_parameter_set_id]; - if(!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - current->pps_seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h265->active_sps = sps; - - flag(dependent_slice_segments_enabled_flag); - flag(output_flag_present_flag); - ub(3, num_extra_slice_header_bits); - flag(sign_data_hiding_enabled_flag); - flag(cabac_init_present_flag); - - ue(num_ref_idx_l0_default_active_minus1, 0, 14); - ue(num_ref_idx_l1_default_active_minus1, 0, 14); - - se(init_qp_minus26, -(26 + 6 * sps->bit_depth_luma_minus8), +25); - - flag(constrained_intra_pred_flag); - flag(transform_skip_enabled_flag); - flag(cu_qp_delta_enabled_flag); - if(current->cu_qp_delta_enabled_flag) - ue(diff_cu_qp_delta_depth, - 0, sps->log2_diff_max_min_luma_coding_block_size); - else - infer(diff_cu_qp_delta_depth, 0); - - se(pps_cb_qp_offset, -12, +12); - se(pps_cr_qp_offset, -12, +12); - flag(pps_slice_chroma_qp_offsets_present_flag); - - flag(weighted_pred_flag); - flag(weighted_bipred_flag); - - flag(transquant_bypass_enabled_flag); - flag(tiles_enabled_flag); - flag(entropy_coding_sync_enabled_flag); - - if(current->tiles_enabled_flag) { - ue(num_tile_columns_minus1, 0, HEVC_MAX_TILE_COLUMNS); - ue(num_tile_rows_minus1, 0, HEVC_MAX_TILE_ROWS); - flag(uniform_spacing_flag); - if(!current->uniform_spacing_flag) { - for(i = 0; i < current->num_tile_columns_minus1; i++) - ues(column_width_minus1[i], 0, sps->pic_width_in_luma_samples, 1, i); - for(i = 0; i < current->num_tile_rows_minus1; i++) - ues(row_height_minus1[i], 0, sps->pic_height_in_luma_samples, 1, i); - } - flag(loop_filter_across_tiles_enabled_flag); - } - else { - infer(num_tile_columns_minus1, 0); - infer(num_tile_rows_minus1, 0); - } - - flag(pps_loop_filter_across_slices_enabled_flag); - flag(deblocking_filter_control_present_flag); - if(current->deblocking_filter_control_present_flag) { - flag(deblocking_filter_override_enabled_flag); - flag(pps_deblocking_filter_disabled_flag); - if(!current->pps_deblocking_filter_disabled_flag) { - se(pps_beta_offset_div2, -6, +6); - se(pps_tc_offset_div2, -6, +6); - } - else { - infer(pps_beta_offset_div2, 0); - infer(pps_tc_offset_div2, 0); - } - } - else { - infer(deblocking_filter_override_enabled_flag, 0); - infer(pps_deblocking_filter_disabled_flag, 0); - infer(pps_beta_offset_div2, 0); - infer(pps_tc_offset_div2, 0); - } - - flag(pps_scaling_list_data_present_flag); - if(current->pps_scaling_list_data_present_flag) - CHECK(FUNC(scaling_list_data)(ctx, rw, ¤t->scaling_list)); - - flag(lists_modification_present_flag); - - ue(log2_parallel_merge_level_minus2, - 0, (sps->log2_min_luma_coding_block_size_minus3 + 3 + sps->log2_diff_max_min_luma_coding_block_size - 2)); - - flag(slice_segment_header_extension_present_flag); - - flag(pps_extension_present_flag); - if(current->pps_extension_present_flag) { - flag(pps_range_extension_flag); - flag(pps_multilayer_extension_flag); - flag(pps_3d_extension_flag); - flag(pps_scc_extension_flag); - ub(4, pps_extension_4bits); - } - if(current->pps_range_extension_flag) - CHECK(FUNC(pps_range_extension)(ctx, rw, current)); - if(current->pps_multilayer_extension_flag) - return AVERROR_PATCHWELCOME; - if(current->pps_3d_extension_flag) - return AVERROR_PATCHWELCOME; - if(current->pps_scc_extension_flag) - CHECK(FUNC(pps_scc_extension)(ctx, rw, current)); - if(current->pps_extension_4bits) - CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data)); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; + H265RawPPS *current) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + const H265RawSPS *sps; + int err, i; + + HEADER("Picture Parameter Set"); + + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_PPS)); + + ue(pps_pic_parameter_set_id, 0, 63); + ue(pps_seq_parameter_set_id, 0, 15); + sps = h265->sps[current->pps_seq_parameter_set_id]; + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", + current->pps_seq_parameter_set_id); + return AVERROR_INVALIDDATA; + } + h265->active_sps = sps; + + flag(dependent_slice_segments_enabled_flag); + flag(output_flag_present_flag); + ub(3, num_extra_slice_header_bits); + flag(sign_data_hiding_enabled_flag); + flag(cabac_init_present_flag); + + ue(num_ref_idx_l0_default_active_minus1, 0, 14); + ue(num_ref_idx_l1_default_active_minus1, 0, 14); + + se(init_qp_minus26, -(26 + 6 * sps->bit_depth_luma_minus8), +25); + + flag(constrained_intra_pred_flag); + flag(transform_skip_enabled_flag); + flag(cu_qp_delta_enabled_flag); + if (current->cu_qp_delta_enabled_flag) + ue(diff_cu_qp_delta_depth, + 0, sps->log2_diff_max_min_luma_coding_block_size); + else + infer(diff_cu_qp_delta_depth, 0); + + se(pps_cb_qp_offset, -12, +12); + se(pps_cr_qp_offset, -12, +12); + flag(pps_slice_chroma_qp_offsets_present_flag); + + flag(weighted_pred_flag); + flag(weighted_bipred_flag); + + flag(transquant_bypass_enabled_flag); + flag(tiles_enabled_flag); + flag(entropy_coding_sync_enabled_flag); + + if (current->tiles_enabled_flag) { + ue(num_tile_columns_minus1, 0, HEVC_MAX_TILE_COLUMNS); + ue(num_tile_rows_minus1, 0, HEVC_MAX_TILE_ROWS); + flag(uniform_spacing_flag); + if (!current->uniform_spacing_flag) { + for (i = 0; i < current->num_tile_columns_minus1; i++) + ues(column_width_minus1[i], 0, sps->pic_width_in_luma_samples, 1, i); + for (i = 0; i < current->num_tile_rows_minus1; i++) + ues(row_height_minus1[i], 0, sps->pic_height_in_luma_samples, 1, i); + } + flag(loop_filter_across_tiles_enabled_flag); + } else { + infer(num_tile_columns_minus1, 0); + infer(num_tile_rows_minus1, 0); + } + + flag(pps_loop_filter_across_slices_enabled_flag); + flag(deblocking_filter_control_present_flag); + if (current->deblocking_filter_control_present_flag) { + flag(deblocking_filter_override_enabled_flag); + flag(pps_deblocking_filter_disabled_flag); + if (!current->pps_deblocking_filter_disabled_flag) { + se(pps_beta_offset_div2, -6, +6); + se(pps_tc_offset_div2, -6, +6); + } else { + infer(pps_beta_offset_div2, 0); + infer(pps_tc_offset_div2, 0); + } + } else { + infer(deblocking_filter_override_enabled_flag, 0); + infer(pps_deblocking_filter_disabled_flag, 0); + infer(pps_beta_offset_div2, 0); + infer(pps_tc_offset_div2, 0); + } + + flag(pps_scaling_list_data_present_flag); + if (current->pps_scaling_list_data_present_flag) + CHECK(FUNC(scaling_list_data)(ctx, rw, ¤t->scaling_list)); + + flag(lists_modification_present_flag); + + ue(log2_parallel_merge_level_minus2, + 0, (sps->log2_min_luma_coding_block_size_minus3 + 3 + + sps->log2_diff_max_min_luma_coding_block_size - 2)); + + flag(slice_segment_header_extension_present_flag); + + flag(pps_extension_present_flag); + if (current->pps_extension_present_flag) { + flag(pps_range_extension_flag); + flag(pps_multilayer_extension_flag); + flag(pps_3d_extension_flag); + flag(pps_scc_extension_flag); + ub(4, pps_extension_4bits); + } + if (current->pps_range_extension_flag) + CHECK(FUNC(pps_range_extension)(ctx, rw, current)); + if (current->pps_multilayer_extension_flag) + return AVERROR_PATCHWELCOME; + if (current->pps_3d_extension_flag) + return AVERROR_PATCHWELCOME; + if (current->pps_scc_extension_flag) + CHECK(FUNC(pps_scc_extension)(ctx, rw, current)); + if (current->pps_extension_4bits) + CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data)); + + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + + return 0; } static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawAUD *current) { - int err; + H265RawAUD *current) +{ + int err; - HEADER("Access Unit Delimiter"); + HEADER("Access Unit Delimiter"); - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_AUD)); + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_AUD)); - u(3, pic_type, 0, 2); + u(3, pic_type, 0, 2); - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - return 0; + return 0; } static int FUNC(ref_pic_lists_modification)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSliceHeader *current, - unsigned int num_pic_total_curr) { - unsigned int entry_size; - int err, i; + H265RawSliceHeader *current, + unsigned int num_pic_total_curr) +{ + unsigned int entry_size; + int err, i; - entry_size = av_log2(num_pic_total_curr - 1) + 1; + entry_size = av_log2(num_pic_total_curr - 1) + 1; - flag(ref_pic_list_modification_flag_l0); - if(current->ref_pic_list_modification_flag_l0) { - for(i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) - us(entry_size, list_entry_l0[i], 0, num_pic_total_curr - 1, 1, i); - } + flag(ref_pic_list_modification_flag_l0); + if (current->ref_pic_list_modification_flag_l0) { + for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) + us(entry_size, list_entry_l0[i], 0, num_pic_total_curr - 1, 1, i); + } - if(current->slice_type == HEVC_SLICE_B) { - flag(ref_pic_list_modification_flag_l1); - if(current->ref_pic_list_modification_flag_l1) { - for(i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) - us(entry_size, list_entry_l1[i], 0, num_pic_total_curr - 1, 1, i); + if (current->slice_type == HEVC_SLICE_B) { + flag(ref_pic_list_modification_flag_l1); + if (current->ref_pic_list_modification_flag_l1) { + for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) + us(entry_size, list_entry_l1[i], 0, num_pic_total_curr - 1, 1, i); + } } - } - return 0; + return 0; } static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSliceHeader *current) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps = h265->active_sps; - int err, i, j; - int chroma = !sps->separate_colour_plane_flag && - sps->chroma_format_idc != 0; - - ue(luma_log2_weight_denom, 0, 7); - if(chroma) - se(delta_chroma_log2_weight_denom, -7, 7); - else - infer(delta_chroma_log2_weight_denom, 0); - - for(i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { - if(1 /* is not same POC and same layer_id */) - flags(luma_weight_l0_flag[i], 1, i); + H265RawSliceHeader *current) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + const H265RawSPS *sps = h265->active_sps; + int err, i, j; + int chroma = !sps->separate_colour_plane_flag && + sps->chroma_format_idc != 0; + + ue(luma_log2_weight_denom, 0, 7); + if (chroma) + se(delta_chroma_log2_weight_denom, -7, 7); else - infer(luma_weight_l0_flag[i], 0); - } - if(chroma) { - for(i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { - if(1 /* is not same POC and same layer_id */) - flags(chroma_weight_l0_flag[i], 1, i); - else - infer(chroma_weight_l0_flag[i], 0); - } - } - - for(i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { - if(current->luma_weight_l0_flag[i]) { - ses(delta_luma_weight_l0[i], -128, +127, 1, i); - ses(luma_offset_l0[i], - -(1 << (sps->bit_depth_luma_minus8 + 8 - 1)), - ((1 << (sps->bit_depth_luma_minus8 + 8 - 1)) - 1), 1, i); - } - else { - infer(delta_luma_weight_l0[i], 0); - infer(luma_offset_l0[i], 0); - } - if(current->chroma_weight_l0_flag[i]) { - for(j = 0; j < 2; j++) { - ses(delta_chroma_weight_l0[i][j], -128, +127, 2, i, j); - ses(chroma_offset_l0[i][j], - -(4 << (sps->bit_depth_chroma_minus8 + 8 - 1)), - ((4 << (sps->bit_depth_chroma_minus8 + 8 - 1)) - 1), 2, i, j); - } - } - else { - for(j = 0; j < 2; j++) { - infer(delta_chroma_weight_l0[i][j], 0); - infer(chroma_offset_l0[i][j], 0); - } - } - } - - if(current->slice_type == HEVC_SLICE_B) { - for(i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { - if(1 /* RefPicList1[i] is not CurrPic, nor is it in a different layer */) - flags(luma_weight_l1_flag[i], 1, i); - else - infer(luma_weight_l1_flag[i], 0); - } - if(chroma) { - for(i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { - if(1 /* RefPicList1[i] is not CurrPic, nor is it in a different layer */) - flags(chroma_weight_l1_flag[i], 1, i); + infer(delta_chroma_log2_weight_denom, 0); + + for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { + if (1 /* is not same POC and same layer_id */) + flags(luma_weight_l0_flag[i], 1, i); else - infer(chroma_weight_l1_flag[i], 0); - } - } - - for(i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { - if(current->luma_weight_l1_flag[i]) { - ses(delta_luma_weight_l1[i], -128, +127, 1, i); - ses(luma_offset_l1[i], - -(1 << (sps->bit_depth_luma_minus8 + 8 - 1)), - ((1 << (sps->bit_depth_luma_minus8 + 8 - 1)) - 1), 1, i); - } - else { - infer(delta_luma_weight_l1[i], 0); - infer(luma_offset_l1[i], 0); - } - if(current->chroma_weight_l1_flag[i]) { - for(j = 0; j < 2; j++) { - ses(delta_chroma_weight_l1[i][j], -128, +127, 2, i, j); - ses(chroma_offset_l1[i][j], - -(4 << (sps->bit_depth_chroma_minus8 + 8 - 1)), - ((4 << (sps->bit_depth_chroma_minus8 + 8 - 1)) - 1), 2, i, j); + infer(luma_weight_l0_flag[i], 0); + } + if (chroma) { + for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { + if (1 /* is not same POC and same layer_id */) + flags(chroma_weight_l0_flag[i], 1, i); + else + infer(chroma_weight_l0_flag[i], 0); } - } - else { - for(j = 0; j < 2; j++) { - infer(delta_chroma_weight_l1[i][j], 0); - infer(chroma_offset_l1[i][j], 0); - } - } } - } - - return 0; -} -static int FUNC(slice_segment_header)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSliceHeader *current) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps; - const H265RawPPS *pps; - unsigned int min_cb_log2_size_y, ctb_log2_size_y, ctb_size_y; - unsigned int pic_width_in_ctbs_y, pic_height_in_ctbs_y, pic_size_in_ctbs_y; - unsigned int num_pic_total_curr = 0; - int err, i; - - HEADER("Slice Segment Header"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, -1)); - - flag(first_slice_segment_in_pic_flag); - - if(current->nal_unit_header.nal_unit_type >= HEVC_NAL_BLA_W_LP && - current->nal_unit_header.nal_unit_type <= HEVC_NAL_RSV_IRAP_VCL23) - flag(no_output_of_prior_pics_flag); - - ue(slice_pic_parameter_set_id, 0, 63); - - pps = h265->pps[current->slice_pic_parameter_set_id]; - if(!pps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n", - current->slice_pic_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h265->active_pps = pps; - - sps = h265->sps[pps->pps_seq_parameter_set_id]; - if(!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - pps->pps_seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h265->active_sps = sps; - - min_cb_log2_size_y = sps->log2_min_luma_coding_block_size_minus3 + 3; - ctb_log2_size_y = min_cb_log2_size_y + sps->log2_diff_max_min_luma_coding_block_size; - ctb_size_y = 1 << ctb_log2_size_y; - pic_width_in_ctbs_y = - (sps->pic_width_in_luma_samples + ctb_size_y - 1) / ctb_size_y; - pic_height_in_ctbs_y = - (sps->pic_height_in_luma_samples + ctb_size_y - 1) / ctb_size_y; - pic_size_in_ctbs_y = pic_width_in_ctbs_y * pic_height_in_ctbs_y; - - if(!current->first_slice_segment_in_pic_flag) { - unsigned int address_size = av_log2(pic_size_in_ctbs_y - 1) + 1; - if(pps->dependent_slice_segments_enabled_flag) - flag(dependent_slice_segment_flag); - else - infer(dependent_slice_segment_flag, 0); - u(address_size, slice_segment_address, 0, pic_size_in_ctbs_y - 1); - } - else { - infer(dependent_slice_segment_flag, 0); - } - - if(!current->dependent_slice_segment_flag) { - for(i = 0; i < pps->num_extra_slice_header_bits; i++) - flags(slice_reserved_flag[i], 1, i); - - ue(slice_type, 0, 2); - - if(pps->output_flag_present_flag) - flag(pic_output_flag); - - if(sps->separate_colour_plane_flag) - u(2, colour_plane_id, 0, 2); - - if(current->nal_unit_header.nal_unit_type != HEVC_NAL_IDR_W_RADL && - current->nal_unit_header.nal_unit_type != HEVC_NAL_IDR_N_LP) { - const H265RawSTRefPicSet *rps; - int dpb_slots_remaining; - - ub(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, slice_pic_order_cnt_lsb); - - flag(short_term_ref_pic_set_sps_flag); - if(!current->short_term_ref_pic_set_sps_flag) { - CHECK(FUNC(st_ref_pic_set)(ctx, rw, ¤t->short_term_ref_pic_set, - sps->num_short_term_ref_pic_sets, sps)); - rps = ¤t->short_term_ref_pic_set; - } - else if(sps->num_short_term_ref_pic_sets > 1) { - unsigned int idx_size = av_log2(sps->num_short_term_ref_pic_sets - 1) + 1; - u(idx_size, short_term_ref_pic_set_idx, - 0, sps->num_short_term_ref_pic_sets - 1); - rps = &sps->st_ref_pic_set[current->short_term_ref_pic_set_idx]; - } - else { - infer(short_term_ref_pic_set_idx, 0); - rps = &sps->st_ref_pic_set[0]; - } - - dpb_slots_remaining = HEVC_MAX_DPB_SIZE - 1 - - rps->num_negative_pics - rps->num_positive_pics; - if(pps->pps_curr_pic_ref_enabled_flag && - (sps->sample_adaptive_offset_enabled_flag || - !pps->pps_deblocking_filter_disabled_flag || - pps->deblocking_filter_override_enabled_flag)) { - // This picture will occupy two DPB slots. - if(dpb_slots_remaining == 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " - "short-term ref pic set contains too many pictures " - "to use with current picture reference enabled.\n"); - return AVERROR_INVALIDDATA; + for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { + if (current->luma_weight_l0_flag[i]) { + ses(delta_luma_weight_l0[i], -128, +127, 1, i); + ses(luma_offset_l0[i], + -(1 << (sps->bit_depth_luma_minus8 + 8 - 1)), + ((1 << (sps->bit_depth_luma_minus8 + 8 - 1)) - 1), 1, i); + } else { + infer(delta_luma_weight_l0[i], 0); + infer(luma_offset_l0[i], 0); } - --dpb_slots_remaining; - } - - num_pic_total_curr = 0; - for(i = 0; i < rps->num_negative_pics; i++) - if(rps->used_by_curr_pic_s0_flag[i]) - ++num_pic_total_curr; - for(i = 0; i < rps->num_positive_pics; i++) - if(rps->used_by_curr_pic_s1_flag[i]) - ++num_pic_total_curr; - - if(sps->long_term_ref_pics_present_flag) { - unsigned int idx_size; - - if(sps->num_long_term_ref_pics_sps > 0) { - ue(num_long_term_sps, 0, FFMIN(sps->num_long_term_ref_pics_sps, dpb_slots_remaining)); - idx_size = av_log2(sps->num_long_term_ref_pics_sps - 1) + 1; - dpb_slots_remaining -= current->num_long_term_sps; + if (current->chroma_weight_l0_flag[i]) { + for (j = 0; j < 2; j++) { + ses(delta_chroma_weight_l0[i][j], -128, +127, 2, i, j); + ses(chroma_offset_l0[i][j], + -(4 << (sps->bit_depth_chroma_minus8 + 8 - 1)), + ((4 << (sps->bit_depth_chroma_minus8 + 8 - 1)) - 1), 2, i, j); + } + } else { + for (j = 0; j < 2; j++) { + infer(delta_chroma_weight_l0[i][j], 0); + infer(chroma_offset_l0[i][j], 0); + } } - else { - infer(num_long_term_sps, 0); - idx_size = 0; + } + + if (current->slice_type == HEVC_SLICE_B) { + for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { + if (1 /* RefPicList1[i] is not CurrPic, nor is it in a different layer */) + flags(luma_weight_l1_flag[i], 1, i); + else + infer(luma_weight_l1_flag[i], 0); } - ue(num_long_term_pics, 0, dpb_slots_remaining); - - for(i = 0; i < current->num_long_term_sps + - current->num_long_term_pics; - i++) { - if(i < current->num_long_term_sps) { - if(sps->num_long_term_ref_pics_sps > 1) - us(idx_size, lt_idx_sps[i], - 0, sps->num_long_term_ref_pics_sps - 1, 1, i); - if(sps->used_by_curr_pic_lt_sps_flag[current->lt_idx_sps[i]]) - ++num_pic_total_curr; - } - else { - ubs(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, poc_lsb_lt[i], 1, i); - flags(used_by_curr_pic_lt_flag[i], 1, i); - if(current->used_by_curr_pic_lt_flag[i]) - ++num_pic_total_curr; - } - flags(delta_poc_msb_present_flag[i], 1, i); - if(current->delta_poc_msb_present_flag[i]) - ues(delta_poc_msb_cycle_lt[i], 0, UINT32_MAX - 1, 1, i); - else - infer(delta_poc_msb_cycle_lt[i], 0); + if (chroma) { + for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { + if (1 /* RefPicList1[i] is not CurrPic, nor is it in a different layer */) + flags(chroma_weight_l1_flag[i], 1, i); + else + infer(chroma_weight_l1_flag[i], 0); + } } - } - if(sps->sps_temporal_mvp_enabled_flag) - flag(slice_temporal_mvp_enabled_flag); - else - infer(slice_temporal_mvp_enabled_flag, 0); - - if(pps->pps_curr_pic_ref_enabled_flag) - ++num_pic_total_curr; + for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { + if (current->luma_weight_l1_flag[i]) { + ses(delta_luma_weight_l1[i], -128, +127, 1, i); + ses(luma_offset_l1[i], + -(1 << (sps->bit_depth_luma_minus8 + 8 - 1)), + ((1 << (sps->bit_depth_luma_minus8 + 8 - 1)) - 1), 1, i); + } else { + infer(delta_luma_weight_l1[i], 0); + infer(luma_offset_l1[i], 0); + } + if (current->chroma_weight_l1_flag[i]) { + for (j = 0; j < 2; j++) { + ses(delta_chroma_weight_l1[i][j], -128, +127, 2, i, j); + ses(chroma_offset_l1[i][j], + -(4 << (sps->bit_depth_chroma_minus8 + 8 - 1)), + ((4 << (sps->bit_depth_chroma_minus8 + 8 - 1)) - 1), 2, i, j); + } + } else { + for (j = 0; j < 2; j++) { + infer(delta_chroma_weight_l1[i][j], 0); + infer(chroma_offset_l1[i][j], 0); + } + } + } } - if(sps->sample_adaptive_offset_enabled_flag) { - flag(slice_sao_luma_flag); - if(!sps->separate_colour_plane_flag && sps->chroma_format_idc != 0) - flag(slice_sao_chroma_flag); - else - infer(slice_sao_chroma_flag, 0); - } - else { - infer(slice_sao_luma_flag, 0); - infer(slice_sao_chroma_flag, 0); - } + return 0; +} - if(current->slice_type == HEVC_SLICE_P || - current->slice_type == HEVC_SLICE_B) { - flag(num_ref_idx_active_override_flag); - if(current->num_ref_idx_active_override_flag) { - ue(num_ref_idx_l0_active_minus1, 0, 14); - if(current->slice_type == HEVC_SLICE_B) - ue(num_ref_idx_l1_active_minus1, 0, 14); - else - infer(num_ref_idx_l1_active_minus1, pps->num_ref_idx_l1_default_active_minus1); - } - else { - infer(num_ref_idx_l0_active_minus1, pps->num_ref_idx_l0_default_active_minus1); - infer(num_ref_idx_l1_active_minus1, pps->num_ref_idx_l1_default_active_minus1); - } - - if(pps->lists_modification_present_flag && num_pic_total_curr > 1) - CHECK(FUNC(ref_pic_lists_modification)(ctx, rw, current, - num_pic_total_curr)); - - if(current->slice_type == HEVC_SLICE_B) - flag(mvd_l1_zero_flag); - if(pps->cabac_init_present_flag) - flag(cabac_init_flag); - else - infer(cabac_init_flag, 0); - if(current->slice_temporal_mvp_enabled_flag) { - if(current->slice_type == HEVC_SLICE_B) - flag(collocated_from_l0_flag); +static int FUNC(slice_segment_header)(CodedBitstreamContext *ctx, RWContext *rw, + H265RawSliceHeader *current) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + const H265RawSPS *sps; + const H265RawPPS *pps; + unsigned int min_cb_log2_size_y, ctb_log2_size_y, ctb_size_y; + unsigned int pic_width_in_ctbs_y, pic_height_in_ctbs_y, pic_size_in_ctbs_y; + unsigned int num_pic_total_curr = 0; + int err, i; + + HEADER("Slice Segment Header"); + + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, -1)); + + flag(first_slice_segment_in_pic_flag); + + if (current->nal_unit_header.nal_unit_type >= HEVC_NAL_BLA_W_LP && + current->nal_unit_header.nal_unit_type <= HEVC_NAL_RSV_IRAP_VCL23) + flag(no_output_of_prior_pics_flag); + + ue(slice_pic_parameter_set_id, 0, 63); + + pps = h265->pps[current->slice_pic_parameter_set_id]; + if (!pps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n", + current->slice_pic_parameter_set_id); + return AVERROR_INVALIDDATA; + } + h265->active_pps = pps; + + sps = h265->sps[pps->pps_seq_parameter_set_id]; + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", + pps->pps_seq_parameter_set_id); + return AVERROR_INVALIDDATA; + } + h265->active_sps = sps; + + min_cb_log2_size_y = sps->log2_min_luma_coding_block_size_minus3 + 3; + ctb_log2_size_y = min_cb_log2_size_y + sps->log2_diff_max_min_luma_coding_block_size; + ctb_size_y = 1 << ctb_log2_size_y; + pic_width_in_ctbs_y = + (sps->pic_width_in_luma_samples + ctb_size_y - 1) / ctb_size_y; + pic_height_in_ctbs_y = + (sps->pic_height_in_luma_samples + ctb_size_y - 1) / ctb_size_y; + pic_size_in_ctbs_y = pic_width_in_ctbs_y * pic_height_in_ctbs_y; + + if (!current->first_slice_segment_in_pic_flag) { + unsigned int address_size = av_log2(pic_size_in_ctbs_y - 1) + 1; + if (pps->dependent_slice_segments_enabled_flag) + flag(dependent_slice_segment_flag); else - infer(collocated_from_l0_flag, 1); - if(current->collocated_from_l0_flag) { - if(current->num_ref_idx_l0_active_minus1 > 0) - ue(collocated_ref_idx, 0, current->num_ref_idx_l0_active_minus1); - else - infer(collocated_ref_idx, 0); + infer(dependent_slice_segment_flag, 0); + u(address_size, slice_segment_address, 0, pic_size_in_ctbs_y - 1); + } else { + infer(dependent_slice_segment_flag, 0); + } + + if (!current->dependent_slice_segment_flag) { + for (i = 0; i < pps->num_extra_slice_header_bits; i++) + flags(slice_reserved_flag[i], 1, i); + + ue(slice_type, 0, 2); + + if (pps->output_flag_present_flag) + flag(pic_output_flag); + + if (sps->separate_colour_plane_flag) + u(2, colour_plane_id, 0, 2); + + if (current->nal_unit_header.nal_unit_type != HEVC_NAL_IDR_W_RADL && + current->nal_unit_header.nal_unit_type != HEVC_NAL_IDR_N_LP) { + const H265RawSTRefPicSet *rps; + int dpb_slots_remaining; + + ub(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, slice_pic_order_cnt_lsb); + + flag(short_term_ref_pic_set_sps_flag); + if (!current->short_term_ref_pic_set_sps_flag) { + CHECK(FUNC(st_ref_pic_set)(ctx, rw, ¤t->short_term_ref_pic_set, + sps->num_short_term_ref_pic_sets, sps)); + rps = ¤t->short_term_ref_pic_set; + } else if (sps->num_short_term_ref_pic_sets > 1) { + unsigned int idx_size = av_log2(sps->num_short_term_ref_pic_sets - 1) + 1; + u(idx_size, short_term_ref_pic_set_idx, + 0, sps->num_short_term_ref_pic_sets - 1); + rps = &sps->st_ref_pic_set[current->short_term_ref_pic_set_idx]; + } else { + infer(short_term_ref_pic_set_idx, 0); + rps = &sps->st_ref_pic_set[0]; + } + + dpb_slots_remaining = HEVC_MAX_DPB_SIZE - 1 - + rps->num_negative_pics - rps->num_positive_pics; + if (pps->pps_curr_pic_ref_enabled_flag && + (sps->sample_adaptive_offset_enabled_flag || + !pps->pps_deblocking_filter_disabled_flag || + pps->deblocking_filter_override_enabled_flag)) { + // This picture will occupy two DPB slots. + if (dpb_slots_remaining == 0) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " + "short-term ref pic set contains too many pictures " + "to use with current picture reference enabled.\n"); + return AVERROR_INVALIDDATA; + } + --dpb_slots_remaining; + } + + num_pic_total_curr = 0; + for (i = 0; i < rps->num_negative_pics; i++) + if (rps->used_by_curr_pic_s0_flag[i]) + ++num_pic_total_curr; + for (i = 0; i < rps->num_positive_pics; i++) + if (rps->used_by_curr_pic_s1_flag[i]) + ++num_pic_total_curr; + + if (sps->long_term_ref_pics_present_flag) { + unsigned int idx_size; + + if (sps->num_long_term_ref_pics_sps > 0) { + ue(num_long_term_sps, 0, FFMIN(sps->num_long_term_ref_pics_sps, + dpb_slots_remaining)); + idx_size = av_log2(sps->num_long_term_ref_pics_sps - 1) + 1; + dpb_slots_remaining -= current->num_long_term_sps; + } else { + infer(num_long_term_sps, 0); + idx_size = 0; + } + ue(num_long_term_pics, 0, dpb_slots_remaining); + + for (i = 0; i < current->num_long_term_sps + + current->num_long_term_pics; i++) { + if (i < current->num_long_term_sps) { + if (sps->num_long_term_ref_pics_sps > 1) + us(idx_size, lt_idx_sps[i], + 0, sps->num_long_term_ref_pics_sps - 1, 1, i); + if (sps->used_by_curr_pic_lt_sps_flag[current->lt_idx_sps[i]]) + ++num_pic_total_curr; + } else { + ubs(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, poc_lsb_lt[i], 1, i); + flags(used_by_curr_pic_lt_flag[i], 1, i); + if (current->used_by_curr_pic_lt_flag[i]) + ++num_pic_total_curr; + } + flags(delta_poc_msb_present_flag[i], 1, i); + if (current->delta_poc_msb_present_flag[i]) + ues(delta_poc_msb_cycle_lt[i], 0, UINT32_MAX - 1, 1, i); + else + infer(delta_poc_msb_cycle_lt[i], 0); + } + } + + if (sps->sps_temporal_mvp_enabled_flag) + flag(slice_temporal_mvp_enabled_flag); + else + infer(slice_temporal_mvp_enabled_flag, 0); + + if (pps->pps_curr_pic_ref_enabled_flag) + ++num_pic_total_curr; } - else { - if(current->num_ref_idx_l1_active_minus1 > 0) - ue(collocated_ref_idx, 0, current->num_ref_idx_l1_active_minus1); - else - infer(collocated_ref_idx, 0); + + if (sps->sample_adaptive_offset_enabled_flag) { + flag(slice_sao_luma_flag); + if (!sps->separate_colour_plane_flag && sps->chroma_format_idc != 0) + flag(slice_sao_chroma_flag); + else + infer(slice_sao_chroma_flag, 0); + } else { + infer(slice_sao_luma_flag, 0); + infer(slice_sao_chroma_flag, 0); } - } - - if((pps->weighted_pred_flag && current->slice_type == HEVC_SLICE_P) || - (pps->weighted_bipred_flag && current->slice_type == HEVC_SLICE_B)) - CHECK(FUNC(pred_weight_table)(ctx, rw, current)); - - ue(five_minus_max_num_merge_cand, 0, 4); - if(sps->motion_vector_resolution_control_idc == 2) - flag(use_integer_mv_flag); - else - infer(use_integer_mv_flag, sps->motion_vector_resolution_control_idc); - } - - se(slice_qp_delta, - -6 * sps->bit_depth_luma_minus8 - (pps->init_qp_minus26 + 26), - +51 - (pps->init_qp_minus26 + 26)); - if(pps->pps_slice_chroma_qp_offsets_present_flag) { - se(slice_cb_qp_offset, -12, +12); - se(slice_cr_qp_offset, -12, +12); - } - else { - infer(slice_cb_qp_offset, 0); - infer(slice_cr_qp_offset, 0); - } - if(pps->pps_slice_act_qp_offsets_present_flag) { - se(slice_act_y_qp_offset, - -12 - (pps->pps_act_y_qp_offset_plus5 - 5), - +12 - (pps->pps_act_y_qp_offset_plus5 - 5)); - se(slice_act_cb_qp_offset, - -12 - (pps->pps_act_cb_qp_offset_plus5 - 5), - +12 - (pps->pps_act_cb_qp_offset_plus5 - 5)); - se(slice_act_cr_qp_offset, - -12 - (pps->pps_act_cr_qp_offset_plus3 - 3), - +12 - (pps->pps_act_cr_qp_offset_plus3 - 3)); - } - else { - infer(slice_act_y_qp_offset, 0); - infer(slice_act_cb_qp_offset, 0); - infer(slice_act_cr_qp_offset, 0); - } - if(pps->chroma_qp_offset_list_enabled_flag) - flag(cu_chroma_qp_offset_enabled_flag); - else - infer(cu_chroma_qp_offset_enabled_flag, 0); - if(pps->deblocking_filter_override_enabled_flag) - flag(deblocking_filter_override_flag); - else - infer(deblocking_filter_override_flag, 0); - if(current->deblocking_filter_override_flag) { - flag(slice_deblocking_filter_disabled_flag); - if(!current->slice_deblocking_filter_disabled_flag) { - se(slice_beta_offset_div2, -6, +6); - se(slice_tc_offset_div2, -6, +6); - } - else { - infer(slice_beta_offset_div2, pps->pps_beta_offset_div2); - infer(slice_tc_offset_div2, pps->pps_tc_offset_div2); - } - } - else { - infer(slice_deblocking_filter_disabled_flag, - pps->pps_deblocking_filter_disabled_flag); - infer(slice_beta_offset_div2, pps->pps_beta_offset_div2); - infer(slice_tc_offset_div2, pps->pps_tc_offset_div2); - } - if(pps->pps_loop_filter_across_slices_enabled_flag && - (current->slice_sao_luma_flag || current->slice_sao_chroma_flag || - !current->slice_deblocking_filter_disabled_flag)) - flag(slice_loop_filter_across_slices_enabled_flag); - else - infer(slice_loop_filter_across_slices_enabled_flag, - pps->pps_loop_filter_across_slices_enabled_flag); - } - - if(pps->tiles_enabled_flag || pps->entropy_coding_sync_enabled_flag) { - unsigned int num_entry_point_offsets_limit; - if(!pps->tiles_enabled_flag && pps->entropy_coding_sync_enabled_flag) - num_entry_point_offsets_limit = pic_height_in_ctbs_y - 1; - else if(pps->tiles_enabled_flag && !pps->entropy_coding_sync_enabled_flag) - num_entry_point_offsets_limit = - (pps->num_tile_columns_minus1 + 1) * (pps->num_tile_rows_minus1 + 1); - else - num_entry_point_offsets_limit = - (pps->num_tile_columns_minus1 + 1) * pic_height_in_ctbs_y - 1; - ue(num_entry_point_offsets, 0, num_entry_point_offsets_limit); + if (current->slice_type == HEVC_SLICE_P || + current->slice_type == HEVC_SLICE_B) { + flag(num_ref_idx_active_override_flag); + if (current->num_ref_idx_active_override_flag) { + ue(num_ref_idx_l0_active_minus1, 0, 14); + if (current->slice_type == HEVC_SLICE_B) + ue(num_ref_idx_l1_active_minus1, 0, 14); + else + infer(num_ref_idx_l1_active_minus1, pps->num_ref_idx_l1_default_active_minus1); + } else { + infer(num_ref_idx_l0_active_minus1, pps->num_ref_idx_l0_default_active_minus1); + infer(num_ref_idx_l1_active_minus1, pps->num_ref_idx_l1_default_active_minus1); + } + + if (pps->lists_modification_present_flag && num_pic_total_curr > 1) + CHECK(FUNC(ref_pic_lists_modification)(ctx, rw, current, + num_pic_total_curr)); + + if (current->slice_type == HEVC_SLICE_B) + flag(mvd_l1_zero_flag); + if (pps->cabac_init_present_flag) + flag(cabac_init_flag); + else + infer(cabac_init_flag, 0); + if (current->slice_temporal_mvp_enabled_flag) { + if (current->slice_type == HEVC_SLICE_B) + flag(collocated_from_l0_flag); + else + infer(collocated_from_l0_flag, 1); + if (current->collocated_from_l0_flag) { + if (current->num_ref_idx_l0_active_minus1 > 0) + ue(collocated_ref_idx, 0, current->num_ref_idx_l0_active_minus1); + else + infer(collocated_ref_idx, 0); + } else { + if (current->num_ref_idx_l1_active_minus1 > 0) + ue(collocated_ref_idx, 0, current->num_ref_idx_l1_active_minus1); + else + infer(collocated_ref_idx, 0); + } + } + + if ((pps->weighted_pred_flag && current->slice_type == HEVC_SLICE_P) || + (pps->weighted_bipred_flag && current->slice_type == HEVC_SLICE_B)) + CHECK(FUNC(pred_weight_table)(ctx, rw, current)); + + ue(five_minus_max_num_merge_cand, 0, 4); + if (sps->motion_vector_resolution_control_idc == 2) + flag(use_integer_mv_flag); + else + infer(use_integer_mv_flag, sps->motion_vector_resolution_control_idc); + } + + se(slice_qp_delta, + - 6 * sps->bit_depth_luma_minus8 - (pps->init_qp_minus26 + 26), + + 51 - (pps->init_qp_minus26 + 26)); + if (pps->pps_slice_chroma_qp_offsets_present_flag) { + se(slice_cb_qp_offset, -12, +12); + se(slice_cr_qp_offset, -12, +12); + } else { + infer(slice_cb_qp_offset, 0); + infer(slice_cr_qp_offset, 0); + } + if (pps->pps_slice_act_qp_offsets_present_flag) { + se(slice_act_y_qp_offset, + -12 - (pps->pps_act_y_qp_offset_plus5 - 5), + +12 - (pps->pps_act_y_qp_offset_plus5 - 5)); + se(slice_act_cb_qp_offset, + -12 - (pps->pps_act_cb_qp_offset_plus5 - 5), + +12 - (pps->pps_act_cb_qp_offset_plus5 - 5)); + se(slice_act_cr_qp_offset, + -12 - (pps->pps_act_cr_qp_offset_plus3 - 3), + +12 - (pps->pps_act_cr_qp_offset_plus3 - 3)); + } else { + infer(slice_act_y_qp_offset, 0); + infer(slice_act_cb_qp_offset, 0); + infer(slice_act_cr_qp_offset, 0); + } + if (pps->chroma_qp_offset_list_enabled_flag) + flag(cu_chroma_qp_offset_enabled_flag); + else + infer(cu_chroma_qp_offset_enabled_flag, 0); - if(current->num_entry_point_offsets > HEVC_MAX_ENTRY_POINT_OFFSETS) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many entry points: " - "%" PRIu16 ".\n", - current->num_entry_point_offsets); - return AVERROR_PATCHWELCOME; + if (pps->deblocking_filter_override_enabled_flag) + flag(deblocking_filter_override_flag); + else + infer(deblocking_filter_override_flag, 0); + if (current->deblocking_filter_override_flag) { + flag(slice_deblocking_filter_disabled_flag); + if (!current->slice_deblocking_filter_disabled_flag) { + se(slice_beta_offset_div2, -6, +6); + se(slice_tc_offset_div2, -6, +6); + } else { + infer(slice_beta_offset_div2, pps->pps_beta_offset_div2); + infer(slice_tc_offset_div2, pps->pps_tc_offset_div2); + } + } else { + infer(slice_deblocking_filter_disabled_flag, + pps->pps_deblocking_filter_disabled_flag); + infer(slice_beta_offset_div2, pps->pps_beta_offset_div2); + infer(slice_tc_offset_div2, pps->pps_tc_offset_div2); + } + if (pps->pps_loop_filter_across_slices_enabled_flag && + (current->slice_sao_luma_flag || current->slice_sao_chroma_flag || + !current->slice_deblocking_filter_disabled_flag)) + flag(slice_loop_filter_across_slices_enabled_flag); + else + infer(slice_loop_filter_across_slices_enabled_flag, + pps->pps_loop_filter_across_slices_enabled_flag); } - if(current->num_entry_point_offsets > 0) { - ue(offset_len_minus1, 0, 31); - for(i = 0; i < current->num_entry_point_offsets; i++) - ubs(current->offset_len_minus1 + 1, entry_point_offset_minus1[i], 1, i); + if (pps->tiles_enabled_flag || pps->entropy_coding_sync_enabled_flag) { + unsigned int num_entry_point_offsets_limit; + if (!pps->tiles_enabled_flag && pps->entropy_coding_sync_enabled_flag) + num_entry_point_offsets_limit = pic_height_in_ctbs_y - 1; + else if (pps->tiles_enabled_flag && !pps->entropy_coding_sync_enabled_flag) + num_entry_point_offsets_limit = + (pps->num_tile_columns_minus1 + 1) * (pps->num_tile_rows_minus1 + 1); + else + num_entry_point_offsets_limit = + (pps->num_tile_columns_minus1 + 1) * pic_height_in_ctbs_y - 1; + ue(num_entry_point_offsets, 0, num_entry_point_offsets_limit); + + if (current->num_entry_point_offsets > HEVC_MAX_ENTRY_POINT_OFFSETS) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many entry points: " + "%"PRIu16".\n", current->num_entry_point_offsets); + return AVERROR_PATCHWELCOME; + } + + if (current->num_entry_point_offsets > 0) { + ue(offset_len_minus1, 0, 31); + for (i = 0; i < current->num_entry_point_offsets; i++) + ubs(current->offset_len_minus1 + 1, entry_point_offset_minus1[i], 1, i); + } } - } - if(pps->slice_segment_header_extension_present_flag) { - ue(slice_segment_header_extension_length, 0, 256); - for(i = 0; i < current->slice_segment_header_extension_length; i++) - us(8, slice_segment_header_extension_data_byte[i], 0x00, 0xff, 1, i); - } + if (pps->slice_segment_header_extension_present_flag) { + ue(slice_segment_header_extension_length, 0, 256); + for (i = 0; i < current->slice_segment_header_extension_length; i++) + us(8, slice_segment_header_extension_data_byte[i], 0x00, 0xff, 1, i); + } - CHECK(FUNC(byte_alignment)(ctx, rw)); + CHECK(FUNC(byte_alignment)(ctx, rw)); - return 0; + return 0; } -static int FUNC(sei_buffering_period)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIBufferingPeriod *current, SEIMessageState *sei) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps; - const H265RawHRDParameters *hrd; - int err, i, length; +static int FUNC(sei_buffering_period) + (CodedBitstreamContext *ctx, RWContext *rw, + H265RawSEIBufferingPeriod *current, SEIMessageState *sei) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + const H265RawSPS *sps; + const H265RawHRDParameters *hrd; + int err, i, length; #ifdef READ - int start_pos, end_pos; - start_pos = get_bits_count(rw); + int start_pos, end_pos; + start_pos = get_bits_count(rw); #endif - HEADER("Buffering Period"); - - ue(bp_seq_parameter_set_id, 0, HEVC_MAX_SPS_COUNT - 1); - - sps = h265->sps[current->bp_seq_parameter_set_id]; - if(!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - current->bp_seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h265->active_sps = sps; - - if(!sps->vui_parameters_present_flag || - !sps->vui.vui_hrd_parameters_present_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Buffering period SEI requires " - "HRD parameters to be present in SPS.\n"); - return AVERROR_INVALIDDATA; - } - hrd = &sps->vui.hrd_parameters; - if(!hrd->nal_hrd_parameters_present_flag && - !hrd->vcl_hrd_parameters_present_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Buffering period SEI requires " - "NAL or VCL HRD parameters to be present.\n"); - return AVERROR_INVALIDDATA; - } - - if(!hrd->sub_pic_hrd_params_present_flag) - flag(irap_cpb_params_present_flag); - else - infer(irap_cpb_params_present_flag, 0); - if(current->irap_cpb_params_present_flag) { - length = hrd->au_cpb_removal_delay_length_minus1 + 1; - ub(length, cpb_delay_offset); - length = hrd->dpb_output_delay_length_minus1 + 1; - ub(length, dpb_delay_offset); - } - else { - infer(cpb_delay_offset, 0); - infer(dpb_delay_offset, 0); - } + HEADER("Buffering Period"); - flag(concatenation_flag); + ue(bp_seq_parameter_set_id, 0, HEVC_MAX_SPS_COUNT - 1); - length = hrd->au_cpb_removal_delay_length_minus1 + 1; - ub(length, au_cpb_removal_delay_delta_minus1); + sps = h265->sps[current->bp_seq_parameter_set_id]; + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", + current->bp_seq_parameter_set_id); + return AVERROR_INVALIDDATA; + } + h265->active_sps = sps; - if(hrd->nal_hrd_parameters_present_flag) { - for(i = 0; i <= hrd->cpb_cnt_minus1[0]; i++) { - length = hrd->initial_cpb_removal_delay_length_minus1 + 1; + if (!sps->vui_parameters_present_flag || + !sps->vui.vui_hrd_parameters_present_flag) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Buffering period SEI requires " + "HRD parameters to be present in SPS.\n"); + return AVERROR_INVALIDDATA; + } + hrd = &sps->vui.hrd_parameters; + if (!hrd->nal_hrd_parameters_present_flag && + !hrd->vcl_hrd_parameters_present_flag) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Buffering period SEI requires " + "NAL or VCL HRD parameters to be present.\n"); + return AVERROR_INVALIDDATA; + } - ubs(length, nal_initial_cpb_removal_delay[i], 1, i); - ubs(length, nal_initial_cpb_removal_offset[i], 1, i); + if (!hrd->sub_pic_hrd_params_present_flag) + flag(irap_cpb_params_present_flag); + else + infer(irap_cpb_params_present_flag, 0); + if (current->irap_cpb_params_present_flag) { + length = hrd->au_cpb_removal_delay_length_minus1 + 1; + ub(length, cpb_delay_offset); + length = hrd->dpb_output_delay_length_minus1 + 1; + ub(length, dpb_delay_offset); + } else { + infer(cpb_delay_offset, 0); + infer(dpb_delay_offset, 0); + } + + flag(concatenation_flag); + + length = hrd->au_cpb_removal_delay_length_minus1 + 1; + ub(length, au_cpb_removal_delay_delta_minus1); + + if (hrd->nal_hrd_parameters_present_flag) { + for (i = 0; i <= hrd->cpb_cnt_minus1[0]; i++) { + length = hrd->initial_cpb_removal_delay_length_minus1 + 1; + + ubs(length, nal_initial_cpb_removal_delay[i], 1, i); + ubs(length, nal_initial_cpb_removal_offset[i], 1, i); - if(hrd->sub_pic_hrd_params_present_flag || - current->irap_cpb_params_present_flag) { - ubs(length, nal_initial_alt_cpb_removal_delay[i], 1, i); - ubs(length, nal_initial_alt_cpb_removal_offset[i], 1, i); - } + if (hrd->sub_pic_hrd_params_present_flag || + current->irap_cpb_params_present_flag) { + ubs(length, nal_initial_alt_cpb_removal_delay[i], 1, i); + ubs(length, nal_initial_alt_cpb_removal_offset[i], 1, i); + } + } } - } - if(hrd->vcl_hrd_parameters_present_flag) { - for(i = 0; i <= hrd->cpb_cnt_minus1[0]; i++) { - length = hrd->initial_cpb_removal_delay_length_minus1 + 1; + if (hrd->vcl_hrd_parameters_present_flag) { + for (i = 0; i <= hrd->cpb_cnt_minus1[0]; i++) { + length = hrd->initial_cpb_removal_delay_length_minus1 + 1; - ubs(length, vcl_initial_cpb_removal_delay[i], 1, i); - ubs(length, vcl_initial_cpb_removal_offset[i], 1, i); + ubs(length, vcl_initial_cpb_removal_delay[i], 1, i); + ubs(length, vcl_initial_cpb_removal_offset[i], 1, i); - if(hrd->sub_pic_hrd_params_present_flag || - current->irap_cpb_params_present_flag) { - ubs(length, vcl_initial_alt_cpb_removal_delay[i], 1, i); - ubs(length, vcl_initial_alt_cpb_removal_offset[i], 1, i); - } + if (hrd->sub_pic_hrd_params_present_flag || + current->irap_cpb_params_present_flag) { + ubs(length, vcl_initial_alt_cpb_removal_delay[i], 1, i); + ubs(length, vcl_initial_alt_cpb_removal_offset[i], 1, i); + } + } } - } #ifdef READ - end_pos = get_bits_count(rw); - if(cbs_h265_payload_extension_present(rw, sei->payload_size, - end_pos - start_pos)) - flag(use_alt_cpb_params_flag); - else - infer(use_alt_cpb_params_flag, 0); + end_pos = get_bits_count(rw); + if (cbs_h265_payload_extension_present(rw, sei->payload_size, + end_pos - start_pos)) + flag(use_alt_cpb_params_flag); + else + infer(use_alt_cpb_params_flag, 0); #else - // If unknown extension data exists, then use_alt_cpb_params_flag is - // coded in the bitstream and must be written even if it's 0. - if(current->use_alt_cpb_params_flag || sei->extension_present) { - flag(use_alt_cpb_params_flag); - // Ensure this bit is not the last in the payload by making the - // more_data_in_payload() check evaluate to true, so it may not - // be mistaken as something else by decoders. - sei->extension_present = 1; - } + // If unknown extension data exists, then use_alt_cpb_params_flag is + // coded in the bitstream and must be written even if it's 0. + if (current->use_alt_cpb_params_flag || sei->extension_present) { + flag(use_alt_cpb_params_flag); + // Ensure this bit is not the last in the payload by making the + // more_data_in_payload() check evaluate to true, so it may not + // be mistaken as something else by decoders. + sei->extension_present = 1; + } #endif - return 0; + return 0; } -static int FUNC(sei_pic_timing)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIPicTiming *current, SEIMessageState *sei) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps; - const H265RawHRDParameters *hrd; - int err, expected_source_scan_type, i, length; - - HEADER("Picture Timing"); - - sps = h265->active_sps; - if(!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "No active SPS for pic_timing.\n"); - return AVERROR_INVALIDDATA; - } - - expected_source_scan_type = 2 - - 2 * sps->profile_tier_level.general_interlaced_source_flag - - sps->profile_tier_level.general_progressive_source_flag; - - if(sps->vui.frame_field_info_present_flag) { - u(4, pic_struct, 0, 12); - u(2, source_scan_type, - expected_source_scan_type >= 0 ? expected_source_scan_type : 0, - expected_source_scan_type >= 0 ? expected_source_scan_type : 2); - flag(duplicate_flag); - } - else { - infer(pic_struct, 0); - infer(source_scan_type, - expected_source_scan_type >= 0 ? expected_source_scan_type : 2); - infer(duplicate_flag, 0); - } - - if(sps->vui_parameters_present_flag && - sps->vui.vui_hrd_parameters_present_flag) - hrd = &sps->vui.hrd_parameters; - else - hrd = NULL; - if(hrd && (hrd->nal_hrd_parameters_present_flag || - hrd->vcl_hrd_parameters_present_flag)) { - length = hrd->au_cpb_removal_delay_length_minus1 + 1; - ub(length, au_cpb_removal_delay_minus1); - - length = hrd->dpb_output_delay_length_minus1 + 1; - ub(length, pic_dpb_output_delay); +static int FUNC(sei_pic_timing) + (CodedBitstreamContext *ctx, RWContext *rw, + H265RawSEIPicTiming *current, SEIMessageState *sei) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + const H265RawSPS *sps; + const H265RawHRDParameters *hrd; + int err, expected_source_scan_type, i, length; + + HEADER("Picture Timing"); + + sps = h265->active_sps; + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "No active SPS for pic_timing.\n"); + return AVERROR_INVALIDDATA; + } + + expected_source_scan_type = 2 - + 2 * sps->profile_tier_level.general_interlaced_source_flag - + sps->profile_tier_level.general_progressive_source_flag; + + if (sps->vui.frame_field_info_present_flag) { + u(4, pic_struct, 0, 12); + u(2, source_scan_type, + expected_source_scan_type >= 0 ? expected_source_scan_type : 0, + expected_source_scan_type >= 0 ? expected_source_scan_type : 2); + flag(duplicate_flag); + } else { + infer(pic_struct, 0); + infer(source_scan_type, + expected_source_scan_type >= 0 ? expected_source_scan_type : 2); + infer(duplicate_flag, 0); + } + + if (sps->vui_parameters_present_flag && + sps->vui.vui_hrd_parameters_present_flag) + hrd = &sps->vui.hrd_parameters; + else + hrd = NULL; + if (hrd && (hrd->nal_hrd_parameters_present_flag || + hrd->vcl_hrd_parameters_present_flag)) { + length = hrd->au_cpb_removal_delay_length_minus1 + 1; + ub(length, au_cpb_removal_delay_minus1); + + length = hrd->dpb_output_delay_length_minus1 + 1; + ub(length, pic_dpb_output_delay); + + if (hrd->sub_pic_hrd_params_present_flag) { + length = hrd->dpb_output_delay_du_length_minus1 + 1; + ub(length, pic_dpb_output_du_delay); + } - if(hrd->sub_pic_hrd_params_present_flag) { - length = hrd->dpb_output_delay_du_length_minus1 + 1; - ub(length, pic_dpb_output_du_delay); + if (hrd->sub_pic_hrd_params_present_flag && + hrd->sub_pic_cpb_params_in_pic_timing_sei_flag) { + // Each decoding unit must contain at least one slice segment. + ue(num_decoding_units_minus1, 0, HEVC_MAX_SLICE_SEGMENTS); + flag(du_common_cpb_removal_delay_flag); + + length = hrd->du_cpb_removal_delay_increment_length_minus1 + 1; + if (current->du_common_cpb_removal_delay_flag) + ub(length, du_common_cpb_removal_delay_increment_minus1); + + for (i = 0; i <= current->num_decoding_units_minus1; i++) { + ues(num_nalus_in_du_minus1[i], + 0, HEVC_MAX_SLICE_SEGMENTS, 1, i); + if (!current->du_common_cpb_removal_delay_flag && + i < current->num_decoding_units_minus1) + ubs(length, du_cpb_removal_delay_increment_minus1[i], 1, i); + } + } } - if(hrd->sub_pic_hrd_params_present_flag && - hrd->sub_pic_cpb_params_in_pic_timing_sei_flag) { - // Each decoding unit must contain at least one slice segment. - ue(num_decoding_units_minus1, 0, HEVC_MAX_SLICE_SEGMENTS); - flag(du_common_cpb_removal_delay_flag); + return 0; +} - length = hrd->du_cpb_removal_delay_increment_length_minus1 + 1; - if(current->du_common_cpb_removal_delay_flag) - ub(length, du_common_cpb_removal_delay_increment_minus1); +static int FUNC(sei_pan_scan_rect) + (CodedBitstreamContext *ctx, RWContext *rw, + H265RawSEIPanScanRect *current, SEIMessageState *sei) +{ + int err, i; - for(i = 0; i <= current->num_decoding_units_minus1; i++) { - ues(num_nalus_in_du_minus1[i], - 0, HEVC_MAX_SLICE_SEGMENTS, 1, i); - if(!current->du_common_cpb_removal_delay_flag && - i < current->num_decoding_units_minus1) - ubs(length, du_cpb_removal_delay_increment_minus1[i], 1, i); - } - } - } + HEADER("Pan-Scan Rectangle"); - return 0; -} + ue(pan_scan_rect_id, 0, UINT32_MAX - 1); + flag(pan_scan_rect_cancel_flag); -static int FUNC(sei_pan_scan_rect)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIPanScanRect *current, SEIMessageState *sei) { - int err, i; + if (!current->pan_scan_rect_cancel_flag) { + ue(pan_scan_cnt_minus1, 0, 2); - HEADER("Pan-Scan Rectangle"); + for (i = 0; i <= current->pan_scan_cnt_minus1; i++) { + ses(pan_scan_rect_left_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); + ses(pan_scan_rect_right_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); + ses(pan_scan_rect_top_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); + ses(pan_scan_rect_bottom_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); + } - ue(pan_scan_rect_id, 0, UINT32_MAX - 1); - flag(pan_scan_rect_cancel_flag); + flag(pan_scan_rect_persistence_flag); + } - if(!current->pan_scan_rect_cancel_flag) { - ue(pan_scan_cnt_minus1, 0, 2); + return 0; +} - for(i = 0; i <= current->pan_scan_cnt_minus1; i++) { - ses(pan_scan_rect_left_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_right_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_top_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_bottom_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - } +static int FUNC(sei_recovery_point) + (CodedBitstreamContext *ctx, RWContext *rw, + H265RawSEIRecoveryPoint *current, SEIMessageState *sei) +{ + int err; - flag(pan_scan_rect_persistence_flag); - } + HEADER("Recovery Point"); - return 0; -} + se(recovery_poc_cnt, -32768, 32767); -static int FUNC(sei_recovery_point)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIRecoveryPoint *current, SEIMessageState *sei) { - int err; + flag(exact_match_flag); + flag(broken_link_flag); - HEADER("Recovery Point"); + return 0; +} - se(recovery_poc_cnt, -32768, 32767); +static int FUNC(film_grain_characteristics)(CodedBitstreamContext *ctx, RWContext *rw, + H265RawFilmGrainCharacteristics *current, + SEIMessageState *state) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + const H265RawSPS *sps = h265->active_sps; + int err, c, i, j; + + HEADER("Film Grain Characteristics"); + + flag(film_grain_characteristics_cancel_flag); + if (!current->film_grain_characteristics_cancel_flag) { + int filmGrainBitDepth[3]; + + u(2, film_grain_model_id, 0, 1); + flag(separate_colour_description_present_flag); + if (current->separate_colour_description_present_flag) { + ub(3, film_grain_bit_depth_luma_minus8); + ub(3, film_grain_bit_depth_chroma_minus8); + flag(film_grain_full_range_flag); + ub(8, film_grain_colour_primaries); + ub(8, film_grain_transfer_characteristics); + ub(8, film_grain_matrix_coeffs); + } else { + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "No active SPS for film_grain_characteristics.\n"); + return AVERROR_INVALIDDATA; + } + infer(film_grain_bit_depth_luma_minus8, sps->bit_depth_luma_minus8); + infer(film_grain_bit_depth_chroma_minus8, sps->bit_depth_chroma_minus8); + infer(film_grain_full_range_flag, sps->vui.video_full_range_flag); + infer(film_grain_colour_primaries, sps->vui.colour_primaries); + infer(film_grain_transfer_characteristics, sps->vui.transfer_characteristics); + infer(film_grain_matrix_coeffs, sps->vui.matrix_coefficients); + } - flag(exact_match_flag); - flag(broken_link_flag); + filmGrainBitDepth[0] = current->film_grain_bit_depth_luma_minus8 + 8; + filmGrainBitDepth[1] = + filmGrainBitDepth[2] = current->film_grain_bit_depth_chroma_minus8 + 8; + + u(2, blending_mode_id, 0, 1); + ub(4, log2_scale_factor); + for (c = 0; c < 3; c++) + flags(comp_model_present_flag[c], 1, c); + for (c = 0; c < 3; c++) { + if (current->comp_model_present_flag[c]) { + ubs(8, num_intensity_intervals_minus1[c], 1, c); + us(3, num_model_values_minus1[c], 0, 5, 1, c); + for (i = 0; i <= current->num_intensity_intervals_minus1[c]; i++) { + ubs(8, intensity_interval_lower_bound[c][i], 2, c, i); + ubs(8, intensity_interval_upper_bound[c][i], 2, c, i); + for (j = 0; j <= current->num_model_values_minus1[c]; j++) + ses(comp_model_value[c][i][j], 0 - current->film_grain_model_id * (1 << (filmGrainBitDepth[c] - 1)), + ((1 << filmGrainBitDepth[c]) - 1) - current->film_grain_model_id * (1 << (filmGrainBitDepth[c] - 1)), + 3, c, i, j); + } + } + } + flag(film_grain_characteristics_persistence_flag); + } - return 0; + return 0; } -static int FUNC(sei_display_orientation)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIDisplayOrientation *current, SEIMessageState *sei) { - int err; +static int FUNC(sei_display_orientation) + (CodedBitstreamContext *ctx, RWContext *rw, + H265RawSEIDisplayOrientation *current, SEIMessageState *sei) +{ + int err; - HEADER("Display Orientation"); + HEADER("Display Orientation"); - flag(display_orientation_cancel_flag); - if(!current->display_orientation_cancel_flag) { - flag(hor_flip); - flag(ver_flip); - ub(16, anticlockwise_rotation); - flag(display_orientation_persistence_flag); - } + flag(display_orientation_cancel_flag); + if (!current->display_orientation_cancel_flag) { + flag(hor_flip); + flag(ver_flip); + ub(16, anticlockwise_rotation); + flag(display_orientation_persistence_flag); + } - return 0; + return 0; } -static int FUNC(sei_active_parameter_sets)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIActiveParameterSets *current, SEIMessageState *sei) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawVPS *vps; - int err, i; +static int FUNC(sei_active_parameter_sets) + (CodedBitstreamContext *ctx, RWContext *rw, + H265RawSEIActiveParameterSets *current, SEIMessageState *sei) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + const H265RawVPS *vps; + int err, i; - HEADER("Active Parameter Sets"); + HEADER("Active Parameter Sets"); - u(4, active_video_parameter_set_id, 0, HEVC_MAX_VPS_COUNT); - vps = h265->vps[current->active_video_parameter_set_id]; - if(!vps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "VPS id %d not available for active " - "parameter sets.\n", - current->active_video_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h265->active_vps = vps; + u(4, active_video_parameter_set_id, 0, HEVC_MAX_VPS_COUNT); + vps = h265->vps[current->active_video_parameter_set_id]; + if (!vps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "VPS id %d not available for active " + "parameter sets.\n", current->active_video_parameter_set_id); + return AVERROR_INVALIDDATA; + } + h265->active_vps = vps; - flag(self_contained_cvs_flag); - flag(no_parameter_set_update_flag); + flag(self_contained_cvs_flag); + flag(no_parameter_set_update_flag); - ue(num_sps_ids_minus1, 0, HEVC_MAX_SPS_COUNT - 1); - for(i = 0; i <= current->num_sps_ids_minus1; i++) - ues(active_seq_parameter_set_id[i], 0, HEVC_MAX_SPS_COUNT - 1, 1, i); + ue(num_sps_ids_minus1, 0, HEVC_MAX_SPS_COUNT - 1); + for (i = 0; i <= current->num_sps_ids_minus1; i++) + ues(active_seq_parameter_set_id[i], 0, HEVC_MAX_SPS_COUNT - 1, 1, i); - for(i = vps->vps_base_layer_internal_flag; - i <= FFMIN(62, vps->vps_max_layers_minus1); i++) { - ues(layer_sps_idx[i], 0, current->num_sps_ids_minus1, 1, i); + for (i = vps->vps_base_layer_internal_flag; + i <= FFMIN(62, vps->vps_max_layers_minus1); i++) { + ues(layer_sps_idx[i], 0, current->num_sps_ids_minus1, 1, i); - if(i == 0) - h265->active_sps = h265->sps[current->active_seq_parameter_set_id[current->layer_sps_idx[0]]]; - } + if (i == 0) + h265->active_sps = h265->sps[current->active_seq_parameter_set_id[current->layer_sps_idx[0]]]; + } - return 0; + return 0; } -static int FUNC(sei_decoded_picture_hash)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIDecodedPictureHash *current, SEIMessageState *sei) { - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps = h265->active_sps; - int err, c, i; +static int FUNC(sei_decoded_picture_hash) + (CodedBitstreamContext *ctx, RWContext *rw, + H265RawSEIDecodedPictureHash *current, SEIMessageState *sei) +{ + CodedBitstreamH265Context *h265 = ctx->priv_data; + const H265RawSPS *sps = h265->active_sps; + int err, c, i; - HEADER("Decoded Picture Hash"); + HEADER("Decoded Picture Hash"); - if(!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "No active SPS for decoded picture hash.\n"); - return AVERROR_INVALIDDATA; - } + if (!sps) { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "No active SPS for decoded picture hash.\n"); + return AVERROR_INVALIDDATA; + } - u(8, hash_type, 0, 2); + u(8, hash_type, 0, 2); - for(c = 0; c < (sps->chroma_format_idc == 0 ? 1 : 3); c++) { - if(current->hash_type == 0) { - for(i = 0; i < 16; i++) - us(8, picture_md5[c][i], 0x00, 0xff, 2, c, i); - } - else if(current->hash_type == 1) { - us(16, picture_crc[c], 0x0000, 0xffff, 1, c); - } - else if(current->hash_type == 2) { - us(32, picture_checksum[c], 0x00000000, 0xffffffff, 1, c); + for (c = 0; c < (sps->chroma_format_idc == 0 ? 1 : 3); c++) { + if (current->hash_type == 0) { + for (i = 0; i < 16; i++) + us(8, picture_md5[c][i], 0x00, 0xff, 2, c, i); + } else if (current->hash_type == 1) { + us(16, picture_crc[c], 0x0000, 0xffff, 1, c); + } else if (current->hash_type == 2) { + us(32, picture_checksum[c], 0x00000000, 0xffffffff, 1, c); + } } - } - return 0; + return 0; } -static int FUNC(sei_time_code)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEITimeCode *current, SEIMessageState *sei) { - int err, i; - - HEADER("Time Code"); - - u(2, num_clock_ts, 1, 3); - - for(i = 0; i < current->num_clock_ts; i++) { - flags(clock_timestamp_flag[i], 1, i); - - if(current->clock_timestamp_flag[i]) { - flags(units_field_based_flag[i], 1, i); - us(5, counting_type[i], 0, 6, 1, i); - flags(full_timestamp_flag[i], 1, i); - flags(discontinuity_flag[i], 1, i); - flags(cnt_dropped_flag[i], 1, i); - - ubs(9, n_frames[i], 1, i); - - if(current->full_timestamp_flag[i]) { - us(6, seconds_value[i], 0, 59, 1, i); - us(6, minutes_value[i], 0, 59, 1, i); - us(5, hours_value[i], 0, 23, 1, i); - } - else { - flags(seconds_flag[i], 1, i); - if(current->seconds_flag[i]) { - us(6, seconds_value[i], 0, 59, 1, i); - flags(minutes_flag[i], 1, i); - if(current->minutes_flag[i]) { - us(6, minutes_value[i], 0, 59, 1, i); - flags(hours_flag[i], 1, i); - if(current->hours_flag[i]) - us(5, hours_value[i], 0, 23, 1, i); - } +static int FUNC(sei_time_code) + (CodedBitstreamContext *ctx, RWContext *rw, + H265RawSEITimeCode *current, SEIMessageState *sei) +{ + int err, i; + + HEADER("Time Code"); + + u(2, num_clock_ts, 1, 3); + + for (i = 0; i < current->num_clock_ts; i++) { + flags(clock_timestamp_flag[i], 1, i); + + if (current->clock_timestamp_flag[i]) { + flags(units_field_based_flag[i], 1, i); + us(5, counting_type[i], 0, 6, 1, i); + flags(full_timestamp_flag[i], 1, i); + flags(discontinuity_flag[i], 1, i); + flags(cnt_dropped_flag[i], 1, i); + + ubs(9, n_frames[i], 1, i); + + if (current->full_timestamp_flag[i]) { + us(6, seconds_value[i], 0, 59, 1, i); + us(6, minutes_value[i], 0, 59, 1, i); + us(5, hours_value[i], 0, 23, 1, i); + } else { + flags(seconds_flag[i], 1, i); + if (current->seconds_flag[i]) { + us(6, seconds_value[i], 0, 59, 1, i); + flags(minutes_flag[i], 1, i); + if (current->minutes_flag[i]) { + us(6, minutes_value[i], 0, 59, 1, i); + flags(hours_flag[i], 1, i); + if (current->hours_flag[i]) + us(5, hours_value[i], 0, 23, 1, i); + } + } + } + + ubs(5, time_offset_length[i], 1, i); + if (current->time_offset_length[i] > 0) + ibs(current->time_offset_length[i], time_offset_value[i], 1, i); + else + infer(time_offset_value[i], 0); } - } - - ubs(5, time_offset_length[i], 1, i); - if(current->time_offset_length[i] > 0) - ibs(current->time_offset_length[i], time_offset_value[i], 1, i); - else - infer(time_offset_value[i], 0); } - } - return 0; + return 0; } -static int FUNC(sei_alpha_channel_info)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIAlphaChannelInfo *current, SEIMessageState *sei) { - int err, length; - - HEADER("Alpha Channel Information"); - - flag(alpha_channel_cancel_flag); - if(!current->alpha_channel_cancel_flag) { - ub(3, alpha_channel_use_idc); - ub(3, alpha_channel_bit_depth_minus8); - length = current->alpha_channel_bit_depth_minus8 + 9; - ub(length, alpha_transparent_value); - ub(length, alpha_opaque_value); - flag(alpha_channel_incr_flag); - flag(alpha_channel_clip_flag); - if(current->alpha_channel_clip_flag) - flag(alpha_channel_clip_type_flag); - } - else { - infer(alpha_channel_use_idc, 2); - infer(alpha_channel_incr_flag, 0); - infer(alpha_channel_clip_flag, 0); - } - - return 0; +static int FUNC(sei_alpha_channel_info) + (CodedBitstreamContext *ctx, RWContext *rw, + H265RawSEIAlphaChannelInfo *current, SEIMessageState *sei) +{ + int err, length; + + HEADER("Alpha Channel Information"); + + flag(alpha_channel_cancel_flag); + if (!current->alpha_channel_cancel_flag) { + ub(3, alpha_channel_use_idc); + ub(3, alpha_channel_bit_depth_minus8); + length = current->alpha_channel_bit_depth_minus8 + 9; + ub(length, alpha_transparent_value); + ub(length, alpha_opaque_value); + flag(alpha_channel_incr_flag); + flag(alpha_channel_clip_flag); + if (current->alpha_channel_clip_flag) + flag(alpha_channel_clip_type_flag); + } else { + infer(alpha_channel_use_idc, 2); + infer(alpha_channel_incr_flag, 0); + infer(alpha_channel_clip_flag, 0); + } + + return 0; } static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEI *current, int prefix) { - int err; + H265RawSEI *current, int prefix) +{ + int err; - if(prefix) - HEADER("Prefix Supplemental Enhancement Information"); - else - HEADER("Suffix Supplemental Enhancement Information"); + if (prefix) + HEADER("Prefix Supplemental Enhancement Information"); + else + HEADER("Suffix Supplemental Enhancement Information"); - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - prefix ? HEVC_NAL_SEI_PREFIX : HEVC_NAL_SEI_SUFFIX)); + CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, + prefix ? HEVC_NAL_SEI_PREFIX + : HEVC_NAL_SEI_SUFFIX)); - CHECK(FUNC_SEI(message_list)(ctx, rw, ¤t->message_list, prefix)); + CHECK(FUNC_SEI(message_list)(ctx, rw, ¤t->message_list, prefix)); - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); + CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - return 0; + return 0; } diff --git a/third-party/cbs/cbs_internal.h b/third-party/cbs/cbs_internal.h index 4fb6e7c4d40..039d5184421 100644 --- a/third-party/cbs/cbs_internal.h +++ b/third-party/cbs/cbs_internal.h @@ -21,152 +21,154 @@ #include -#include -#include +#include "libavutil/buffer.h" +#include "libavutil/log.h" +// [manual] Changed include path #include "cbs/cbs.h" -#include "get_bits.h" +#include "cbs/codec_id.h" +#include "cbs/get_bits.h" #include "put_bits.h" enum CBSContentType { - // Unit content is a simple structure. - CBS_CONTENT_TYPE_POD, - // Unit content contains some references to other structures, but all - // managed via buffer reference counting. The descriptor defines the - // structure offsets of every buffer reference. - CBS_CONTENT_TYPE_INTERNAL_REFS, - // Unit content is something more complex. The descriptor defines - // special functions to manage the content. - CBS_CONTENT_TYPE_COMPLEX, + // Unit content is a simple structure. + CBS_CONTENT_TYPE_POD, + // Unit content contains some references to other structures, but all + // managed via buffer reference counting. The descriptor defines the + // structure offsets of every buffer reference. + CBS_CONTENT_TYPE_INTERNAL_REFS, + // Unit content is something more complex. The descriptor defines + // special functions to manage the content. + CBS_CONTENT_TYPE_COMPLEX, }; enum { - // Maximum number of unit types described by the same unit type - // descriptor. - CBS_MAX_UNIT_TYPES = 3, - // Maximum number of reference buffer offsets in any one unit. - CBS_MAX_REF_OFFSETS = 2, - // Special value used in a unit type descriptor to indicate that it - // applies to a large range of types rather than a set of discrete - // values. - CBS_UNIT_TYPE_RANGE = -1, + // Maximum number of unit types described by the same unit type + // descriptor. + CBS_MAX_UNIT_TYPES = 3, + // Maximum number of reference buffer offsets in any one unit. + CBS_MAX_REF_OFFSETS = 2, + // Special value used in a unit type descriptor to indicate that it + // applies to a large range of types rather than a set of discrete + // values. + CBS_UNIT_TYPE_RANGE = -1, }; typedef const struct CodedBitstreamUnitTypeDescriptor { - // Number of entries in the unit_types array, or the special value - // CBS_UNIT_TYPE_RANGE to indicate that the range fields should be - // used instead. - int nb_unit_types; - - // Array of unit types that this entry describes. - const CodedBitstreamUnitType unit_types[CBS_MAX_UNIT_TYPES]; - - // Start and end of unit type range, used if nb_unit_types is - // CBS_UNIT_TYPE_RANGE. - const CodedBitstreamUnitType unit_type_range_start; - const CodedBitstreamUnitType unit_type_range_end; - - // The type of content described. - enum CBSContentType content_type; - // The size of the structure which should be allocated to contain - // the decomposed content of this type of unit. - size_t content_size; - - // Number of entries in the ref_offsets array. Only used if the - // content_type is CBS_CONTENT_TYPE_INTERNAL_REFS. - int nb_ref_offsets; - // The structure must contain two adjacent elements: - // type *field; - // AVBufferRef *field_ref; - // where field points to something in the buffer referred to by - // field_ref. This offset is then set to offsetof(struct, field). - size_t ref_offsets[CBS_MAX_REF_OFFSETS]; - - void (*content_free)(void *opaque, uint8_t *data); - int (*content_clone)(AVBufferRef **ref, CodedBitstreamUnit *unit); + // Number of entries in the unit_types array, or the special value + // CBS_UNIT_TYPE_RANGE to indicate that the range fields should be + // used instead. + int nb_unit_types; + + // Array of unit types that this entry describes. + const CodedBitstreamUnitType unit_types[CBS_MAX_UNIT_TYPES]; + + // Start and end of unit type range, used if nb_unit_types is + // CBS_UNIT_TYPE_RANGE. + const CodedBitstreamUnitType unit_type_range_start; + const CodedBitstreamUnitType unit_type_range_end; + + // The type of content described. + enum CBSContentType content_type; + // The size of the structure which should be allocated to contain + // the decomposed content of this type of unit. + size_t content_size; + + // Number of entries in the ref_offsets array. Only used if the + // content_type is CBS_CONTENT_TYPE_INTERNAL_REFS. + int nb_ref_offsets; + // The structure must contain two adjacent elements: + // type *field; + // AVBufferRef *field_ref; + // where field points to something in the buffer referred to by + // field_ref. This offset is then set to offsetof(struct, field). + size_t ref_offsets[CBS_MAX_REF_OFFSETS]; + + void (*content_free)(void *opaque, uint8_t *data); + int (*content_clone)(AVBufferRef **ref, CodedBitstreamUnit *unit); } CodedBitstreamUnitTypeDescriptor; typedef struct CodedBitstreamType { - enum AVCodecID codec_id; - - // A class for the private data, used to declare private AVOptions. - // This field is NULL for types that do not declare any options. - // If this field is non-NULL, the first member of the filter private data - // must be a pointer to AVClass. - const AVClass *priv_class; - - size_t priv_data_size; - - // List of unit type descriptors for this codec. - // Terminated by a descriptor with nb_unit_types equal to zero. - const CodedBitstreamUnitTypeDescriptor *unit_types; - - // Split frag->data into coded bitstream units, creating the - // frag->units array. Fill data but not content on each unit. - // The header argument should be set if the fragment came from - // a header block, which may require different parsing for some - // codecs (e.g. the AVCC header in H.264). - int (*split_fragment)(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header); - - // Read the unit->data bitstream and decompose it, creating - // unit->content. - int (*read_unit)(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit); - - // Write the data bitstream from unit->content into pbc. - // Return value AVERROR(ENOSPC) indicates that pbc was too small. - int (*write_unit)(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc); - - // Read the data from all of frag->units and assemble it into - // a bitstream for the whole fragment. - int (*assemble_fragment)(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag); - - // Reset the codec internal state. - void (*flush)(CodedBitstreamContext *ctx); - - // Free the codec internal state. - void (*close)(CodedBitstreamContext *ctx); + enum AVCodecID codec_id; + + // A class for the private data, used to declare private AVOptions. + // This field is NULL for types that do not declare any options. + // If this field is non-NULL, the first member of the filter private data + // must be a pointer to AVClass. + const AVClass *priv_class; + + size_t priv_data_size; + + // List of unit type descriptors for this codec. + // Terminated by a descriptor with nb_unit_types equal to zero. + const CodedBitstreamUnitTypeDescriptor *unit_types; + + // Split frag->data into coded bitstream units, creating the + // frag->units array. Fill data but not content on each unit. + // The header argument should be set if the fragment came from + // a header block, which may require different parsing for some + // codecs (e.g. the AVCC header in H.264). + int (*split_fragment)(CodedBitstreamContext *ctx, + CodedBitstreamFragment *frag, + int header); + + // Read the unit->data bitstream and decompose it, creating + // unit->content. + int (*read_unit)(CodedBitstreamContext *ctx, + CodedBitstreamUnit *unit); + + // Write the data bitstream from unit->content into pbc. + // Return value AVERROR(ENOSPC) indicates that pbc was too small. + int (*write_unit)(CodedBitstreamContext *ctx, + CodedBitstreamUnit *unit, + PutBitContext *pbc); + + // Read the data from all of frag->units and assemble it into + // a bitstream for the whole fragment. + int (*assemble_fragment)(CodedBitstreamContext *ctx, + CodedBitstreamFragment *frag); + + // Reset the codec internal state. + void (*flush)(CodedBitstreamContext *ctx); + + // Free the codec internal state. + void (*close)(CodedBitstreamContext *ctx); } CodedBitstreamType; // Helper functions for trace output. void ff_cbs_trace_header(CodedBitstreamContext *ctx, - const char *name); + const char *name); void ff_cbs_trace_syntax_element(CodedBitstreamContext *ctx, int position, - const char *name, const int *subscripts, - const char *bitstring, int64_t value); + const char *name, const int *subscripts, + const char *bitstring, int64_t value); // Helper functions for read/write of common bitstream elements, including // generation of trace output. int ff_cbs_read_unsigned(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, uint32_t *write_to, - uint32_t range_min, uint32_t range_max); + int width, const char *name, + const int *subscripts, uint32_t *write_to, + uint32_t range_min, uint32_t range_max); int ff_cbs_write_unsigned(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, uint32_t value, - uint32_t range_min, uint32_t range_max); + int width, const char *name, + const int *subscripts, uint32_t value, + uint32_t range_min, uint32_t range_max); int ff_cbs_read_signed(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, int32_t *write_to, - int32_t range_min, int32_t range_max); + int width, const char *name, + const int *subscripts, int32_t *write_to, + int32_t range_min, int32_t range_max); int ff_cbs_write_signed(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, int32_t value, - int32_t range_min, int32_t range_max); + int width, const char *name, + const int *subscripts, int32_t value, + int32_t range_min, int32_t range_max); // The largest unsigned value representable in N bits, suitable for use as // range_max in the above functions. @@ -174,39 +176,35 @@ int ff_cbs_write_signed(CodedBitstreamContext *ctx, PutBitContext *pbc, // The largest signed value representable in N bits, suitable for use as // range_max in the above functions. -#define MAX_INT_BITS(length) ((INT64_C(1) << ((length)-1)) - 1) +#define MAX_INT_BITS(length) ((INT64_C(1) << ((length) - 1)) - 1) // The smallest signed value representable in N bits, suitable for use as // range_min in the above functions. -#define MIN_INT_BITS(length) (-(INT64_C(1) << ((length)-1))) - - -#define CBS_UNIT_TYPE_POD(type, structure) \ - { \ - .nb_unit_types = 1, \ - .unit_types = { type }, \ - .content_type = CBS_CONTENT_TYPE_POD, \ - .content_size = sizeof(structure), \ - } -#define CBS_UNIT_TYPE_INTERNAL_REF(type, structure, ref_field) \ - { \ - .nb_unit_types = 1, \ - .unit_types = { type }, \ - .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, \ - .content_size = sizeof(structure), \ - .nb_ref_offsets = 1, \ - .ref_offsets = { offsetof(structure, ref_field) }, \ - } -#define CBS_UNIT_TYPE_COMPLEX(type, structure, free_func) \ - { \ - .nb_unit_types = 1, \ - .unit_types = { type }, \ - .content_type = CBS_CONTENT_TYPE_COMPLEX, \ - .content_size = sizeof(structure), \ - .content_free = free_func, \ - } -#define CBS_UNIT_TYPE_END_OF_LIST \ - { .nb_unit_types = 0 } +#define MIN_INT_BITS(length) (-(INT64_C(1) << ((length) - 1))) + + +#define CBS_UNIT_TYPE_POD(type, structure) { \ + .nb_unit_types = 1, \ + .unit_types = { type }, \ + .content_type = CBS_CONTENT_TYPE_POD, \ + .content_size = sizeof(structure), \ + } +#define CBS_UNIT_TYPE_INTERNAL_REF(type, structure, ref_field) { \ + .nb_unit_types = 1, \ + .unit_types = { type }, \ + .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, \ + .content_size = sizeof(structure), \ + .nb_ref_offsets = 1, \ + .ref_offsets = { offsetof(structure, ref_field) }, \ + } +#define CBS_UNIT_TYPE_COMPLEX(type, structure, free_func) { \ + .nb_unit_types = 1, \ + .unit_types = { type }, \ + .content_type = CBS_CONTENT_TYPE_COMPLEX, \ + .content_size = sizeof(structure), \ + .content_free = free_func, \ + } +#define CBS_UNIT_TYPE_END_OF_LIST { .nb_unit_types = 0 } extern const CodedBitstreamType ff_cbs_type_av1; diff --git a/third-party/cbs/cbs_jpeg.c b/third-party/cbs/cbs_jpeg.c index bb4f08a14d9..be8fd3dcee5 100644 --- a/third-party/cbs/cbs_jpeg.c +++ b/third-party/cbs/cbs_jpeg.c @@ -16,45 +16,42 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -#include "cbs/cbs_jpeg.h" +// [manual] Changed include path #include "cbs/cbs.h" - #include "cbs_internal.h" +#include "cbs/cbs_jpeg.h" -#define HEADER(name) \ - do { \ - ff_cbs_trace_header(ctx, name); \ - } while(0) +#define HEADER(name) do { \ + ff_cbs_trace_header(ctx, name); \ + } while (0) -#define CHECK(call) \ - do { \ - err = (call); \ - if(err < 0) \ - return err; \ - } while(0) +#define CHECK(call) do { \ + err = (call); \ + if (err < 0) \ + return err; \ + } while (0) -#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]) { subs, __VA_ARGS__ }) : NULL) +#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]){ subs, __VA_ARGS__ }) : NULL) #define u(width, name, range_min, range_max) \ - xu(width, name, range_min, range_max, 0, ) + xu(width, name, range_min, range_max, 0, ) #define us(width, name, sub, range_min, range_max) \ - xu(width, name, range_min, range_max, 1, sub) + xu(width, name, range_min, range_max, 1, sub) #define READ #define READWRITE read #define RWContext GetBitContext -#define FUNC(name) cbs_jpeg_read_##name +#define FUNC(name) cbs_jpeg_read_ ## name -#define xu(width, name, range_min, range_max, subs, ...) \ - do { \ - uint32_t value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - current->name = value; \ - } while(0) +#define xu(width, name, range_min, range_max, subs, ...) do { \ + uint32_t value; \ + CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + &value, range_min, range_max)); \ + current->name = value; \ + } while (0) #include "cbs_jpeg_syntax_template.c" @@ -67,15 +64,14 @@ #define WRITE #define READWRITE write #define RWContext PutBitContext -#define FUNC(name) cbs_jpeg_write_##name +#define FUNC(name) cbs_jpeg_write_ ## name -#define xu(width, name, range_min, range_max, subs, ...) \ - do { \ - uint32_t value = current->name; \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - value, range_min, range_max)); \ - } while(0) +#define xu(width, name, range_min, range_max, subs, ...) do { \ + uint32_t value = current->name; \ + CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + value, range_min, range_max)); \ + } while (0) #include "cbs_jpeg_syntax_template.c" @@ -87,396 +83,385 @@ #undef xu -static void cbs_jpeg_free_application_data(void *opaque, uint8_t *content) { - JPEGRawApplicationData *ad = (JPEGRawApplicationData *)content; - av_buffer_unref(&ad->Ap_ref); - av_freep(&content); +static void cbs_jpeg_free_application_data(void *opaque, uint8_t *content) +{ + JPEGRawApplicationData *ad = (JPEGRawApplicationData*)content; + av_buffer_unref(&ad->Ap_ref); + av_freep(&content); } -static void cbs_jpeg_free_comment(void *opaque, uint8_t *content) { - JPEGRawComment *comment = (JPEGRawComment *)content; - av_buffer_unref(&comment->Cm_ref); - av_freep(&content); +static void cbs_jpeg_free_comment(void *opaque, uint8_t *content) +{ + JPEGRawComment *comment = (JPEGRawComment*)content; + av_buffer_unref(&comment->Cm_ref); + av_freep(&content); } -static void cbs_jpeg_free_scan(void *opaque, uint8_t *content) { - JPEGRawScan *scan = (JPEGRawScan *)content; - av_buffer_unref(&scan->data_ref); - av_freep(&content); +static void cbs_jpeg_free_scan(void *opaque, uint8_t *content) +{ + JPEGRawScan *scan = (JPEGRawScan*)content; + av_buffer_unref(&scan->data_ref); + av_freep(&content); } static int cbs_jpeg_split_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header) { - AVBufferRef *data_ref; - uint8_t *data; - size_t data_size; - int unit, start, end, marker, next_start, next_marker; - int err, i, j, length; - - if(frag->data_size < 4) { - // Definitely too short to be meaningful. - return AVERROR_INVALIDDATA; - } - - for(i = 0; i + 1 < frag->data_size && frag->data[i] != 0xff; i++) - ; - if(i > 0) { - av_log(ctx->log_ctx, AV_LOG_WARNING, "Discarding %d bytes at " - "beginning of image.\n", - i); - } - for(++i; i + 1 < frag->data_size && frag->data[i] == 0xff; i++) - ; - if(i + 1 >= frag->data_size && frag->data[i]) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " - "no SOI marker found.\n"); - return AVERROR_INVALIDDATA; - } - marker = frag->data[i]; - if(marker != JPEG_MARKER_SOI) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: first " - "marker is %02x, should be SOI.\n", - marker); - return AVERROR_INVALIDDATA; - } - for(++i; i + 1 < frag->data_size && frag->data[i] == 0xff; i++) - ; - if(i + 1 >= frag->data_size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " - "no image content found.\n"); - return AVERROR_INVALIDDATA; - } - marker = frag->data[i]; - start = i + 1; - - for(unit = 0;; unit++) { - if(marker == JPEG_MARKER_EOI) { - break; + CodedBitstreamFragment *frag, + int header) +{ + AVBufferRef *data_ref; + uint8_t *data; + size_t data_size; + int start, end, marker, next_start, next_marker; + int err, i, j, length; + + if (frag->data_size < 4) { + // Definitely too short to be meaningful. + return AVERROR_INVALIDDATA; } - else if(marker == JPEG_MARKER_SOS) { - next_marker = -1; - end = start; - for(i = start; i + 1 < frag->data_size; i++) { - if(frag->data[i] != 0xff) - continue; - end = i; - for(++i; i + 1 < frag->data_size && - frag->data[i] == 0xff; - i++) - ; - if(i + 1 < frag->data_size) { - if(frag->data[i] == 0x00) - continue; - next_marker = frag->data[i]; - next_start = i + 1; - } - break; - } + + for (i = 0; i + 1 < frag->data_size && frag->data[i] != 0xff; i++); + if (i > 0) { + av_log(ctx->log_ctx, AV_LOG_WARNING, "Discarding %d bytes at " + "beginning of image.\n", i); } - else { - i = start; - if(i + 2 > frag->data_size) { + for (++i; i + 1 < frag->data_size && frag->data[i] == 0xff; i++); + if (i + 1 >= frag->data_size && frag->data[i]) { av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " - "truncated at %02x marker.\n", - marker); + "no SOI marker found.\n"); return AVERROR_INVALIDDATA; - } - length = AV_RB16(frag->data + i); - if(i + length > frag->data_size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " - "truncated at %02x marker segment.\n", - marker); + } + marker = frag->data[i]; + if (marker != JPEG_MARKER_SOI) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: first " + "marker is %02x, should be SOI.\n", marker); return AVERROR_INVALIDDATA; - } - end = start + length; - - i = end; - if(frag->data[i] != 0xff) { - next_marker = -1; - } - else { - for(++i; i + 1 < frag->data_size && - frag->data[i] == 0xff; - i++) - ; - if(i + 1 >= frag->data_size) { - next_marker = -1; - } - else { - next_marker = frag->data[i]; - next_start = i + 1; - } - } } - - if(marker == JPEG_MARKER_SOS) { - length = AV_RB16(frag->data + start); - - if(length > end - start) + for (++i; i + 1 < frag->data_size && frag->data[i] == 0xff; i++); + if (i + 1 >= frag->data_size) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " + "no image content found.\n"); return AVERROR_INVALIDDATA; - - data_ref = NULL; - data = av_malloc(end - start + - AV_INPUT_BUFFER_PADDING_SIZE); - if(!data) - return AVERROR(ENOMEM); - - memcpy(data, frag->data + start, length); - for(i = start + length, j = length; i < end; i++, j++) { - if(frag->data[i] == 0xff) { - while(frag->data[i] == 0xff) - ++i; - data[j] = 0xff; - } - else { - data[j] = frag->data[i]; + } + marker = frag->data[i]; + start = i + 1; + + do { + if (marker == JPEG_MARKER_EOI) { + break; + } else if (marker == JPEG_MARKER_SOS) { + next_marker = -1; + end = start; + for (i = start; i + 1 < frag->data_size; i++) { + if (frag->data[i] != 0xff) + continue; + end = i; + for (++i; i + 1 < frag->data_size && + frag->data[i] == 0xff; i++); + if (i + 1 < frag->data_size) { + if (frag->data[i] == 0x00) + continue; + next_marker = frag->data[i]; + next_start = i + 1; + } + break; + } + } else { + i = start; + if (i + 2 > frag->data_size) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " + "truncated at %02x marker.\n", marker); + return AVERROR_INVALIDDATA; + } + length = AV_RB16(frag->data + i); + if (i + length > frag->data_size) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " + "truncated at %02x marker segment.\n", marker); + return AVERROR_INVALIDDATA; + } + end = start + length; + + i = end; + if (frag->data[i] != 0xff) { + next_marker = -1; + } else { + for (++i; i + 1 < frag->data_size && + frag->data[i] == 0xff; i++); + if (i + 1 >= frag->data_size) { + next_marker = -1; + } else { + next_marker = frag->data[i]; + next_start = i + 1; + } + } } - } - data_size = j; - memset(data + data_size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - } - else { - data = frag->data + start; - data_size = end - start; - data_ref = frag->data_ref; - } + if (marker == JPEG_MARKER_SOS) { + length = AV_RB16(frag->data + start); + + if (length > end - start) + return AVERROR_INVALIDDATA; + + data_ref = NULL; + data = av_malloc(end - start + + AV_INPUT_BUFFER_PADDING_SIZE); + if (!data) + return AVERROR(ENOMEM); + + memcpy(data, frag->data + start, length); + for (i = start + length, j = length; i < end; i++, j++) { + if (frag->data[i] == 0xff) { + while (frag->data[i] == 0xff) + ++i; + data[j] = 0xff; + } else { + data[j] = frag->data[i]; + } + } + data_size = j; + + memset(data + data_size, 0, AV_INPUT_BUFFER_PADDING_SIZE); + + } else { + data = frag->data + start; + data_size = end - start; + data_ref = frag->data_ref; + } - err = ff_cbs_insert_unit_data(frag, unit, marker, - data, data_size, data_ref); - if(err < 0) - return err; + err = ff_cbs_append_unit_data(frag, marker, + data, data_size, data_ref); + if (err < 0) + return err; - if(next_marker == -1) - break; - marker = next_marker; - start = next_start; - } + marker = next_marker; + start = next_start; + } while (next_marker != -1); - return 0; + return 0; } static int cbs_jpeg_read_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - GetBitContext gbc; - int err; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if(err < 0) - return err; + CodedBitstreamUnit *unit) +{ + GetBitContext gbc; + int err; + + err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); + if (err < 0) + return err; + + if (unit->type >= JPEG_MARKER_SOF0 && + unit->type <= JPEG_MARKER_SOF3) { + err = ff_cbs_alloc_unit_content(unit, + sizeof(JPEGRawFrameHeader), + NULL); + if (err < 0) + return err; + + err = cbs_jpeg_read_frame_header(ctx, &gbc, unit->content); + if (err < 0) + return err; + + } else if (unit->type >= JPEG_MARKER_APPN && + unit->type <= JPEG_MARKER_APPN + 15) { + err = ff_cbs_alloc_unit_content(unit, + sizeof(JPEGRawApplicationData), + &cbs_jpeg_free_application_data); + if (err < 0) + return err; + + err = cbs_jpeg_read_application_data(ctx, &gbc, unit->content); + if (err < 0) + return err; + + } else if (unit->type == JPEG_MARKER_SOS) { + JPEGRawScan *scan; + int pos; + + err = ff_cbs_alloc_unit_content(unit, + sizeof(JPEGRawScan), + &cbs_jpeg_free_scan); + if (err < 0) + return err; + scan = unit->content; + + err = cbs_jpeg_read_scan_header(ctx, &gbc, &scan->header); + if (err < 0) + return err; + + pos = get_bits_count(&gbc); + av_assert0(pos % 8 == 0); + if (pos > 0) { + scan->data_size = unit->data_size - pos / 8; + scan->data_ref = av_buffer_ref(unit->data_ref); + if (!scan->data_ref) + return AVERROR(ENOMEM); + scan->data = unit->data + pos / 8; + } - if(unit->type >= JPEG_MARKER_SOF0 && - unit->type <= JPEG_MARKER_SOF3) { - err = ff_cbs_alloc_unit_content(unit, - sizeof(JPEGRawFrameHeader), - NULL); - if(err < 0) - return err; - - err = cbs_jpeg_read_frame_header(ctx, &gbc, unit->content); - if(err < 0) - return err; - } - else if(unit->type >= JPEG_MARKER_APPN && - unit->type <= JPEG_MARKER_APPN + 15) { - err = ff_cbs_alloc_unit_content(unit, - sizeof(JPEGRawApplicationData), - &cbs_jpeg_free_application_data); - if(err < 0) - return err; - - err = cbs_jpeg_read_application_data(ctx, &gbc, unit->content); - if(err < 0) - return err; - } - else if(unit->type == JPEG_MARKER_SOS) { - JPEGRawScan *scan; - int pos; - - err = ff_cbs_alloc_unit_content(unit, - sizeof(JPEGRawScan), - &cbs_jpeg_free_scan); - if(err < 0) - return err; - scan = unit->content; - - err = cbs_jpeg_read_scan_header(ctx, &gbc, &scan->header); - if(err < 0) - return err; - - pos = get_bits_count(&gbc); - av_assert0(pos % 8 == 0); - if(pos > 0) { - scan->data_size = unit->data_size - pos / 8; - scan->data_ref = av_buffer_ref(unit->data_ref); - if(!scan->data_ref) - return AVERROR(ENOMEM); - scan->data = unit->data + pos / 8; - } - } - else { - switch(unit->type) { -#define SEGMENT(marker, type, func, free) \ - case JPEG_MARKER_##marker: { \ - err = ff_cbs_alloc_unit_content(unit, \ - sizeof(type), free); \ - if(err < 0) \ - return err; \ - err = cbs_jpeg_read_##func(ctx, &gbc, unit->content); \ - if(err < 0) \ - return err; \ - } break - SEGMENT(DQT, JPEGRawQuantisationTableSpecification, dqt, NULL); - SEGMENT(DHT, JPEGRawHuffmanTableSpecification, dht, NULL); - SEGMENT(COM, JPEGRawComment, comment, &cbs_jpeg_free_comment); + } else { + switch (unit->type) { +#define SEGMENT(marker, type, func, free) \ + case JPEG_MARKER_ ## marker: \ + { \ + err = ff_cbs_alloc_unit_content(unit, \ + sizeof(type), free); \ + if (err < 0) \ + return err; \ + err = cbs_jpeg_read_ ## func(ctx, &gbc, unit->content); \ + if (err < 0) \ + return err; \ + } \ + break + SEGMENT(DQT, JPEGRawQuantisationTableSpecification, dqt, NULL); + SEGMENT(DHT, JPEGRawHuffmanTableSpecification, dht, NULL); + SEGMENT(COM, JPEGRawComment, comment, &cbs_jpeg_free_comment); #undef SEGMENT - default: - return AVERROR(ENOSYS); + default: + return AVERROR(ENOSYS); + } } - } - return 0; + return 0; } static int cbs_jpeg_write_scan(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) { - JPEGRawScan *scan = unit->content; - int err; + CodedBitstreamUnit *unit, + PutBitContext *pbc) +{ + JPEGRawScan *scan = unit->content; + int err; - err = cbs_jpeg_write_scan_header(ctx, pbc, &scan->header); - if(err < 0) - return err; + err = cbs_jpeg_write_scan_header(ctx, pbc, &scan->header); + if (err < 0) + return err; - if(scan->data) { - if(scan->data_size * 8 > put_bits_left(pbc)) - return AVERROR(ENOSPC); + if (scan->data) { + if (scan->data_size * 8 > put_bits_left(pbc)) + return AVERROR(ENOSPC); - av_assert0(put_bits_count(pbc) % 8 == 0); + av_assert0(put_bits_count(pbc) % 8 == 0); - flush_put_bits(pbc); + flush_put_bits(pbc); - memcpy(put_bits_ptr(pbc), scan->data, scan->data_size); - skip_put_bytes(pbc, scan->data_size); - } + memcpy(put_bits_ptr(pbc), scan->data, scan->data_size); + skip_put_bytes(pbc, scan->data_size); + } - return 0; + return 0; } static int cbs_jpeg_write_segment(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) { - int err; - - if(unit->type >= JPEG_MARKER_SOF0 && - unit->type <= JPEG_MARKER_SOF3) { - err = cbs_jpeg_write_frame_header(ctx, pbc, unit->content); - } - else if(unit->type >= JPEG_MARKER_APPN && - unit->type <= JPEG_MARKER_APPN + 15) { - err = cbs_jpeg_write_application_data(ctx, pbc, unit->content); - } - else { - switch(unit->type) { -#define SEGMENT(marker, func) \ - case JPEG_MARKER_##marker: \ - err = cbs_jpeg_write_##func(ctx, pbc, unit->content); \ - break; - SEGMENT(DQT, dqt); - SEGMENT(DHT, dht); - SEGMENT(COM, comment); - default: - return AVERROR_PATCHWELCOME; + CodedBitstreamUnit *unit, + PutBitContext *pbc) +{ + int err; + + if (unit->type >= JPEG_MARKER_SOF0 && + unit->type <= JPEG_MARKER_SOF3) { + err = cbs_jpeg_write_frame_header(ctx, pbc, unit->content); + } else if (unit->type >= JPEG_MARKER_APPN && + unit->type <= JPEG_MARKER_APPN + 15) { + err = cbs_jpeg_write_application_data(ctx, pbc, unit->content); + } else { + switch (unit->type) { +#define SEGMENT(marker, func) \ + case JPEG_MARKER_ ## marker: \ + err = cbs_jpeg_write_ ## func(ctx, pbc, unit->content); \ + break; + SEGMENT(DQT, dqt); + SEGMENT(DHT, dht); + SEGMENT(COM, comment); + default: + return AVERROR_PATCHWELCOME; + } } - } - return err; + return err; } static int cbs_jpeg_write_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) { - if(unit->type == JPEG_MARKER_SOS) - return cbs_jpeg_write_scan(ctx, unit, pbc); - else - return cbs_jpeg_write_segment(ctx, unit, pbc); + CodedBitstreamUnit *unit, + PutBitContext *pbc) +{ + if (unit->type == JPEG_MARKER_SOS) + return cbs_jpeg_write_scan (ctx, unit, pbc); + else + return cbs_jpeg_write_segment(ctx, unit, pbc); } static int cbs_jpeg_assemble_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) { - const CodedBitstreamUnit *unit; - uint8_t *data; - size_t size, dp, sp; - int i; - - size = 4; // SOI + EOI. - for(i = 0; i < frag->nb_units; i++) { - unit = &frag->units[i]; - size += 2 + unit->data_size; - if(unit->type == JPEG_MARKER_SOS) { - for(sp = 0; sp < unit->data_size; sp++) { - if(unit->data[sp] == 0xff) - ++size; - } + CodedBitstreamFragment *frag) +{ + const CodedBitstreamUnit *unit; + uint8_t *data; + size_t size, dp, sp; + int i; + + size = 4; // SOI + EOI. + for (i = 0; i < frag->nb_units; i++) { + unit = &frag->units[i]; + size += 2 + unit->data_size; + if (unit->type == JPEG_MARKER_SOS) { + for (sp = 0; sp < unit->data_size; sp++) { + if (unit->data[sp] == 0xff) + ++size; + } + } } - } - - frag->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if(!frag->data_ref) - return AVERROR(ENOMEM); - data = frag->data_ref->data; - dp = 0; - - data[dp++] = 0xff; - data[dp++] = JPEG_MARKER_SOI; + frag->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); + if (!frag->data_ref) + return AVERROR(ENOMEM); + data = frag->data_ref->data; - for(i = 0; i < frag->nb_units; i++) { - unit = &frag->units[i]; + dp = 0; data[dp++] = 0xff; - data[dp++] = unit->type; - - if(unit->type != JPEG_MARKER_SOS) { - memcpy(data + dp, unit->data, unit->data_size); - dp += unit->data_size; - } - else { - sp = AV_RB16(unit->data); - av_assert0(sp <= unit->data_size); - memcpy(data + dp, unit->data, sp); - dp += sp; - - for(; sp < unit->data_size; sp++) { - if(unit->data[sp] == 0xff) { - data[dp++] = 0xff; - data[dp++] = 0x00; + data[dp++] = JPEG_MARKER_SOI; + + for (i = 0; i < frag->nb_units; i++) { + unit = &frag->units[i]; + + data[dp++] = 0xff; + data[dp++] = unit->type; + + if (unit->type != JPEG_MARKER_SOS) { + memcpy(data + dp, unit->data, unit->data_size); + dp += unit->data_size; + } else { + sp = AV_RB16(unit->data); + av_assert0(sp <= unit->data_size); + memcpy(data + dp, unit->data, sp); + dp += sp; + + for (; sp < unit->data_size; sp++) { + if (unit->data[sp] == 0xff) { + data[dp++] = 0xff; + data[dp++] = 0x00; + } else { + data[dp++] = unit->data[sp]; + } + } } - else { - data[dp++] = unit->data[sp]; - } - } } - } - data[dp++] = 0xff; - data[dp++] = JPEG_MARKER_EOI; + data[dp++] = 0xff; + data[dp++] = JPEG_MARKER_EOI; - av_assert0(dp == size); + av_assert0(dp == size); - memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - frag->data = data; - frag->data_size = size; + memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); + frag->data = data; + frag->data_size = size; - return 0; + return 0; } const CodedBitstreamType ff_cbs_type_jpeg = { - .codec_id = AV_CODEC_ID_MJPEG, + .codec_id = AV_CODEC_ID_MJPEG, - .split_fragment = &cbs_jpeg_split_fragment, - .read_unit = &cbs_jpeg_read_unit, - .write_unit = &cbs_jpeg_write_unit, - .assemble_fragment = &cbs_jpeg_assemble_fragment, + .split_fragment = &cbs_jpeg_split_fragment, + .read_unit = &cbs_jpeg_read_unit, + .write_unit = &cbs_jpeg_write_unit, + .assemble_fragment = &cbs_jpeg_assemble_fragment, }; diff --git a/third-party/cbs/cbs_jpeg_syntax_template.c b/third-party/cbs/cbs_jpeg_syntax_template.c index 613a68359c6..e06abdc674b 100644 --- a/third-party/cbs/cbs_jpeg_syntax_template.c +++ b/third-party/cbs/cbs_jpeg_syntax_template.c @@ -17,173 +17,180 @@ */ static int FUNC(frame_header)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawFrameHeader *current) { - int err, i; + JPEGRawFrameHeader *current) +{ + int err, i; - HEADER("Frame Header"); + HEADER("Frame Header"); - u(16, Lf, 8, 8 + 3 * JPEG_MAX_COMPONENTS); + u(16, Lf, 8, 8 + 3 * JPEG_MAX_COMPONENTS); - u(8, P, 2, 16); - u(16, Y, 0, JPEG_MAX_HEIGHT); - u(16, X, 1, JPEG_MAX_WIDTH); - u(8, Nf, 1, JPEG_MAX_COMPONENTS); + u(8, P, 2, 16); + u(16, Y, 0, JPEG_MAX_HEIGHT); + u(16, X, 1, JPEG_MAX_WIDTH); + u(8, Nf, 1, JPEG_MAX_COMPONENTS); - for(i = 0; i < current->Nf; i++) { - us(8, C[i], i, 0, JPEG_MAX_COMPONENTS); - us(4, H[i], i, 1, 4); - us(4, V[i], i, 1, 4); - us(8, Tq[i], i, 0, 3); - } + for (i = 0; i < current->Nf; i++) { + us(8, C[i], i, 0, JPEG_MAX_COMPONENTS); + us(4, H[i], i, 1, 4); + us(4, V[i], i, 1, 4); + us(8, Tq[i], i, 0, 3); + } - return 0; + return 0; } static int FUNC(quantisation_table)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawQuantisationTable *current) { - int err, i; - - u(4, Pq, 0, 1); - u(4, Tq, 0, 3); - - if(current->Pq) { - for(i = 0; i < 64; i++) - us(16, Q[i], i, 1, 255); - } - else { - for(i = 0; i < 64; i++) - us(8, Q[i], i, 1, 255); - } - - return 0; + JPEGRawQuantisationTable *current) +{ + int err, i; + + u(4, Pq, 0, 1); + u(4, Tq, 0, 3); + + if (current->Pq) { + for (i = 0; i < 64; i++) + us(16, Q[i], i, 1, 255); + } else { + for (i = 0; i < 64; i++) + us(8, Q[i], i, 1, 255); + } + + return 0; } static int FUNC(dqt)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawQuantisationTableSpecification *current) { - int err, i, n; + JPEGRawQuantisationTableSpecification *current) +{ + int err, i, n; - HEADER("Quantisation Tables"); + HEADER("Quantisation Tables"); - u(16, Lq, 2, 2 + 4 * 65); - n = current->Lq / 65; + u(16, Lq, 2, 2 + 4 * 65); + n = current->Lq / 65; - for(i = 0; i < n; i++) - CHECK(FUNC(quantisation_table)(ctx, rw, ¤t->table[i])); + for (i = 0; i < n; i++) + CHECK(FUNC(quantisation_table)(ctx, rw, ¤t->table[i])); - return 0; + return 0; } static int FUNC(huffman_table)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawHuffmanTable *current) { - int err, i, j, ij; - - u(4, Tc, 0, 1); - u(4, Th, 0, 3); - - for(i = 0; i < 16; i++) - us(8, L[i], i, 0, 224); - - ij = 0; - for(i = 0; i < 16; i++) { - for(j = 0; j < current->L[i]; j++) { - if(ij >= 224) - return AVERROR_INVALIDDATA; - us(8, V[ij], ij, 0, 255); - ++ij; + JPEGRawHuffmanTable *current) +{ + int err, i, j, ij; + + u(4, Tc, 0, 1); + u(4, Th, 0, 3); + + for (i = 0; i < 16; i++) + us(8, L[i], i, 0, 255); + + ij = 0; + for (i = 0; i < 16; i++) { + for (j = 0; j < current->L[i]; j++) { + if (ij >= FF_ARRAY_ELEMS(current->V)) + return AVERROR_INVALIDDATA; + us(8, V[ij], ij, 0, 255); + ++ij; + } } - } - return 0; + return 0; } static int FUNC(dht)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawHuffmanTableSpecification *current) { - int err, i, j, n; + JPEGRawHuffmanTableSpecification *current) +{ + int err, i, j, n; - HEADER("Huffman Tables"); + HEADER("Huffman Tables"); - u(16, Lh, 2, 2 + 8 * (1 + 16 + 256)); + u(16, Lh, 2, 2 + 8 * (1 + 16 + 256)); - n = 2; - for(i = 0; n < current->Lh; i++) { - if(i >= 8) - return AVERROR_INVALIDDATA; + n = 2; + for (i = 0; n < current->Lh; i++) { + if (i >= 8) + return AVERROR_INVALIDDATA; - CHECK(FUNC(huffman_table)(ctx, rw, ¤t->table[i])); + CHECK(FUNC(huffman_table)(ctx, rw, ¤t->table[i])); - ++n; - for(j = 0; j < 16; j++) - n += 1 + current->table[i].L[j]; - } + ++n; + for (j = 0; j < 16; j++) + n += 1 + current->table[i].L[j]; + } - return 0; + return 0; } static int FUNC(scan_header)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawScanHeader *current) { - int err, j; + JPEGRawScanHeader *current) +{ + int err, j; - HEADER("Scan"); + HEADER("Scan"); - u(16, Ls, 6, 6 + 2 * JPEG_MAX_COMPONENTS); + u(16, Ls, 6, 6 + 2 * JPEG_MAX_COMPONENTS); - u(8, Ns, 1, 4); - for(j = 0; j < current->Ns; j++) { - us(8, Cs[j], j, 0, JPEG_MAX_COMPONENTS); - us(4, Td[j], j, 0, 3); - us(4, Ta[j], j, 0, 3); - } + u(8, Ns, 1, 4); + for (j = 0; j < current->Ns; j++) { + us(8, Cs[j], j, 0, JPEG_MAX_COMPONENTS); + us(4, Td[j], j, 0, 3); + us(4, Ta[j], j, 0, 3); + } - u(8, Ss, 0, 63); - u(8, Se, 0, 63); - u(4, Ah, 0, 13); - u(4, Al, 0, 15); + u(8, Ss, 0, 63); + u(8, Se, 0, 63); + u(4, Ah, 0, 13); + u(4, Al, 0, 15); - return 0; + return 0; } static int FUNC(application_data)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawApplicationData *current) { - int err, i; + JPEGRawApplicationData *current) +{ + int err, i; - HEADER("Application Data"); + HEADER("Application Data"); - u(16, Lp, 2, 65535); + u(16, Lp, 2, 65535); - if(current->Lp > 2) { + if (current->Lp > 2) { #ifdef READ - current->Ap_ref = av_buffer_alloc(current->Lp - 2); - if(!current->Ap_ref) - return AVERROR(ENOMEM); - current->Ap = current->Ap_ref->data; + current->Ap_ref = av_buffer_alloc(current->Lp - 2); + if (!current->Ap_ref) + return AVERROR(ENOMEM); + current->Ap = current->Ap_ref->data; #endif - for(i = 0; i < current->Lp - 2; i++) - us(8, Ap[i], i, 0, 255); - } + for (i = 0; i < current->Lp - 2; i++) + us(8, Ap[i], i, 0, 255); + } - return 0; + return 0; } static int FUNC(comment)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawComment *current) { - int err, i; + JPEGRawComment *current) +{ + int err, i; - HEADER("Comment"); + HEADER("Comment"); - u(16, Lc, 2, 65535); + u(16, Lc, 2, 65535); - if(current->Lc > 2) { + if (current->Lc > 2) { #ifdef READ - current->Cm_ref = av_buffer_alloc(current->Lc - 2); - if(!current->Cm_ref) - return AVERROR(ENOMEM); - current->Cm = current->Cm_ref->data; + current->Cm_ref = av_buffer_alloc(current->Lc - 2); + if (!current->Cm_ref) + return AVERROR(ENOMEM); + current->Cm = current->Cm_ref->data; #endif - for(i = 0; i < current->Lc - 2; i++) - us(8, Cm[i], i, 0, 255); - } + for (i = 0; i < current->Lc - 2; i++) + us(8, Cm[i], i, 0, 255); + } - return 0; + return 0; } diff --git a/third-party/cbs/cbs_mpeg2.c b/third-party/cbs/cbs_mpeg2.c index 98a975f8f08..2b6a3fcc7bc 100644 --- a/third-party/cbs/cbs_mpeg2.c +++ b/third-party/cbs/cbs_mpeg2.c @@ -16,85 +16,80 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -#include +#include "libavutil/avassert.h" +// [manual] Changed include path #include "cbs/cbs.h" -#include "cbs/cbs_mpeg2.h" - #include "cbs_internal.h" +#include "cbs/cbs_mpeg2.h" +#include "startcode.h" -#define HEADER(name) \ - do { \ - ff_cbs_trace_header(ctx, name); \ - } while(0) +#define HEADER(name) do { \ + ff_cbs_trace_header(ctx, name); \ + } while (0) -#define CHECK(call) \ - do { \ - err = (call); \ - if(err < 0) \ - return err; \ - } while(0) +#define CHECK(call) do { \ + err = (call); \ + if (err < 0) \ + return err; \ + } while (0) -#define FUNC_NAME(rw, codec, name) cbs_##codec##_##rw##_##name +#define FUNC_NAME(rw, codec, name) cbs_ ## codec ## _ ## rw ## _ ## name #define FUNC_MPEG2(rw, name) FUNC_NAME(rw, mpeg2, name) #define FUNC(name) FUNC_MPEG2(READWRITE, name) -#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]) { subs, __VA_ARGS__ }) : NULL) +#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]){ subs, __VA_ARGS__ }) : NULL) #define ui(width, name) \ - xui(width, name, current->name, 0, MAX_UINT_BITS(width), 0, ) + xui(width, name, current->name, 0, MAX_UINT_BITS(width), 0, ) #define uir(width, name) \ - xui(width, name, current->name, 1, MAX_UINT_BITS(width), 0, ) + xui(width, name, current->name, 1, MAX_UINT_BITS(width), 0, ) #define uis(width, name, subs, ...) \ - xui(width, name, current->name, 0, MAX_UINT_BITS(width), subs, __VA_ARGS__) + xui(width, name, current->name, 0, MAX_UINT_BITS(width), subs, __VA_ARGS__) #define uirs(width, name, subs, ...) \ - xui(width, name, current->name, 1, MAX_UINT_BITS(width), subs, __VA_ARGS__) + xui(width, name, current->name, 1, MAX_UINT_BITS(width), subs, __VA_ARGS__) #define xui(width, name, var, range_min, range_max, subs, ...) \ - xuia(width, #name, var, range_min, range_max, subs, __VA_ARGS__) + xuia(width, #name, var, range_min, range_max, subs, __VA_ARGS__) #define sis(width, name, subs, ...) \ - xsi(width, name, current->name, subs, __VA_ARGS__) + xsi(width, name, current->name, subs, __VA_ARGS__) #define marker_bit() \ - bit("marker_bit", 1) -#define bit(string, value) \ - do { \ - av_unused uint32_t bit = value; \ - xuia(1, string, bit, value, value, 0, ); \ - } while(0) + bit("marker_bit", 1) +#define bit(string, value) do { \ + av_unused uint32_t bit = value; \ + xuia(1, string, bit, value, value, 0, ); \ + } while (0) #define READ #define READWRITE read #define RWContext GetBitContext -#define xuia(width, string, var, range_min, range_max, subs, ...) \ - do { \ - uint32_t value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, string, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while(0) - -#define xsi(width, name, var, subs, ...) \ - do { \ - int32_t value; \ - CHECK(ff_cbs_read_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), &value, \ - MIN_INT_BITS(width), \ - MAX_INT_BITS(width))); \ - var = value; \ - } while(0) +#define xuia(width, string, var, range_min, range_max, subs, ...) do { \ + uint32_t value; \ + CHECK(ff_cbs_read_unsigned(ctx, rw, width, string, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + &value, range_min, range_max)); \ + var = value; \ + } while (0) + +#define xsi(width, name, var, subs, ...) do { \ + int32_t value; \ + CHECK(ff_cbs_read_signed(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), &value, \ + MIN_INT_BITS(width), \ + MAX_INT_BITS(width))); \ + var = value; \ + } while (0) #define nextbits(width, compare, var) \ - (get_bits_left(rw) >= width && \ - (var = show_bits(rw, width)) == (compare)) + (get_bits_left(rw) >= width && \ + (var = show_bits(rw, width)) == (compare)) -#define infer(name, value) \ - do { \ - current->name = value; \ - } while(0) +#define infer(name, value) do { \ + current->name = value; \ + } while (0) #include "cbs_mpeg2_syntax_template.c" @@ -111,32 +106,29 @@ #define READWRITE write #define RWContext PutBitContext -#define xuia(width, string, var, range_min, range_max, subs, ...) \ - do { \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, string, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - var, range_min, range_max)); \ - } while(0) - -#define xsi(width, name, var, subs, ...) \ - do { \ - CHECK(ff_cbs_write_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), var, \ - MIN_INT_BITS(width), \ - MAX_INT_BITS(width))); \ - } while(0) +#define xuia(width, string, var, range_min, range_max, subs, ...) do { \ + CHECK(ff_cbs_write_unsigned(ctx, rw, width, string, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + var, range_min, range_max)); \ + } while (0) + +#define xsi(width, name, var, subs, ...) do { \ + CHECK(ff_cbs_write_signed(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), var, \ + MIN_INT_BITS(width), \ + MAX_INT_BITS(width))); \ + } while (0) #define nextbits(width, compare, var) (var) -#define infer(name, value) \ - do { \ - if(current->name != (value)) { \ - av_log(ctx->log_ctx, AV_LOG_WARNING, "Warning: " \ - "%s does not match inferred value: " \ - "%" PRId64 ", but should be %" PRId64 ".\n", \ - #name, (int64_t)current->name, (int64_t)(value)); \ - } \ - } while(0) +#define infer(name, value) do { \ + if (current->name != (value)) { \ + av_log(ctx->log_ctx, AV_LOG_WARNING, "Warning: " \ + "%s does not match inferred value: " \ + "%"PRId64", but should be %"PRId64".\n", \ + #name, (int64_t)current->name, (int64_t)(value)); \ + } \ + } while (0) #include "cbs_mpeg2_syntax_template.c" @@ -148,322 +140,289 @@ #undef nextbits #undef infer -static const uint8_t *avpriv_find_start_code(const uint8_t *restrict p, - const uint8_t *end, - uint32_t *restrict state) { - int i; - - av_assert0(p <= end); - if(p >= end) - return end; - - for(i = 0; i < 3; i++) { - uint32_t tmp = *state << 8; - *state = tmp + *(p++); - if(tmp == 0x100 || p == end) - return p; - } - - while(p < end) { - if(p[-1] > 1) p += 3; - else if(p[-2]) - p += 2; - else if(p[-3] | (p[-1] - 1)) - p++; - else { - p++; - break; - } - } - - p = FFMIN(p, end) - 4; - *state = AV_RB32(p); - - return p + 4; -} static int cbs_mpeg2_split_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header) { - const uint8_t *start, *end; - CodedBitstreamUnitType unit_type; - uint32_t start_code = -1; - size_t unit_size; - int err, i, final = 0; - - start = avpriv_find_start_code(frag->data, frag->data + frag->data_size, - &start_code); - if(start_code >> 8 != 0x000001) { - // No start code found. - return AVERROR_INVALIDDATA; - } - - for(i = 0;; i++) { - unit_type = start_code & 0xff; - - if(start == frag->data + frag->data_size) { - // The last four bytes form a start code which constitutes - // a unit of its own. In this situation avpriv_find_start_code - // won't modify start_code at all so modify start_code so that - // the next unit will be treated as the last unit. - start_code = 0; - } - - end = avpriv_find_start_code(start--, frag->data + frag->data_size, - &start_code); - - // start points to the byte containing the start_code_identifier - // (may be the last byte of fragment->data); end points to the byte - // following the byte containing the start code identifier (or to - // the end of fragment->data). - if(start_code >> 8 == 0x000001) { - // Unit runs from start to the beginning of the start code - // pointed to by end (including any padding zeroes). - unit_size = (end - 4) - start; - } - else { - // We didn't find a start code, so this is the final unit. - unit_size = end - start; - final = 1; + CodedBitstreamFragment *frag, + int header) +{ + const uint8_t *start; + uint32_t start_code = -1; + int err; + + start = avpriv_find_start_code(frag->data, frag->data + frag->data_size, + &start_code); + if (start_code >> 8 != 0x000001) { + // No start code found. + return AVERROR_INVALIDDATA; } - err = ff_cbs_insert_unit_data(frag, i, unit_type, (uint8_t *)start, - unit_size, frag->data_ref); - if(err < 0) - return err; - - if(final) - break; - - start = end; - } - - return 0; + do { + CodedBitstreamUnitType unit_type = start_code & 0xff; + const uint8_t *end; + size_t unit_size; + + // Reset start_code to ensure that avpriv_find_start_code() + // really reads a new start code and does not reuse the old + // start code in any way (as e.g. happens when there is a + // Sequence End unit at the very end of a packet). + start_code = UINT32_MAX; + end = avpriv_find_start_code(start--, frag->data + frag->data_size, + &start_code); + + // start points to the byte containing the start_code_identifier + // (may be the last byte of fragment->data); end points to the byte + // following the byte containing the start code identifier (or to + // the end of fragment->data). + if (start_code >> 8 == 0x000001) { + // Unit runs from start to the beginning of the start code + // pointed to by end (including any padding zeroes). + unit_size = (end - 4) - start; + } else { + // We didn't find a start code, so this is the final unit. + unit_size = end - start; + } + + err = ff_cbs_append_unit_data(frag, unit_type, (uint8_t*)start, + unit_size, frag->data_ref); + if (err < 0) + return err; + + start = end; + + // Do we have a further unit to add to the fragment? + } while ((start_code >> 8) == 0x000001); + + return 0; } static int cbs_mpeg2_read_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - GetBitContext gbc; - int err; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if(err < 0) - return err; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if(err < 0) - return err; - - if(MPEG2_START_IS_SLICE(unit->type)) { - MPEG2RawSlice *slice = unit->content; - int pos, len; - - err = cbs_mpeg2_read_slice_header(ctx, &gbc, &slice->header); - if(err < 0) - return err; - - if(!get_bits_left(&gbc)) - return AVERROR_INVALIDDATA; - - pos = get_bits_count(&gbc); - len = unit->data_size; - - slice->data_size = len - pos / 8; - slice->data_ref = av_buffer_ref(unit->data_ref); - if(!slice->data_ref) - return AVERROR(ENOMEM); - slice->data = unit->data + pos / 8; - - slice->data_bit_start = pos % 8; - } - else { - switch(unit->type) { -#define START(start_code, type, read_func, free_func) \ - case start_code: { \ - type *header = unit->content; \ - err = cbs_mpeg2_read_##read_func(ctx, &gbc, header); \ - if(err < 0) \ - return err; \ - } break; - START(MPEG2_START_PICTURE, MPEG2RawPictureHeader, - picture_header, &cbs_mpeg2_free_picture_header); - START(MPEG2_START_USER_DATA, MPEG2RawUserData, - user_data, &cbs_mpeg2_free_user_data); - START(MPEG2_START_SEQUENCE_HEADER, MPEG2RawSequenceHeader, - sequence_header, NULL); - START(MPEG2_START_EXTENSION, MPEG2RawExtensionData, - extension_data, NULL); - START(MPEG2_START_GROUP, MPEG2RawGroupOfPicturesHeader, - group_of_pictures_header, NULL); - START(MPEG2_START_SEQUENCE_END, MPEG2RawSequenceEnd, - sequence_end, NULL); + CodedBitstreamUnit *unit) +{ + GetBitContext gbc; + int err; + + err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); + if (err < 0) + return err; + + err = ff_cbs_alloc_unit_content2(ctx, unit); + if (err < 0) + return err; + + if (MPEG2_START_IS_SLICE(unit->type)) { + MPEG2RawSlice *slice = unit->content; + int pos, len; + + err = cbs_mpeg2_read_slice_header(ctx, &gbc, &slice->header); + if (err < 0) + return err; + + if (!get_bits_left(&gbc)) + return AVERROR_INVALIDDATA; + + pos = get_bits_count(&gbc); + len = unit->data_size; + + slice->data_size = len - pos / 8; + slice->data_ref = av_buffer_ref(unit->data_ref); + if (!slice->data_ref) + return AVERROR(ENOMEM); + slice->data = unit->data + pos / 8; + + slice->data_bit_start = pos % 8; + + } else { + switch (unit->type) { +#define START(start_code, type, read_func, free_func) \ + case start_code: \ + { \ + type *header = unit->content; \ + err = cbs_mpeg2_read_ ## read_func(ctx, &gbc, header); \ + if (err < 0) \ + return err; \ + } \ + break; + START(MPEG2_START_PICTURE, MPEG2RawPictureHeader, + picture_header, &cbs_mpeg2_free_picture_header); + START(MPEG2_START_USER_DATA, MPEG2RawUserData, + user_data, &cbs_mpeg2_free_user_data); + START(MPEG2_START_SEQUENCE_HEADER, MPEG2RawSequenceHeader, + sequence_header, NULL); + START(MPEG2_START_EXTENSION, MPEG2RawExtensionData, + extension_data, NULL); + START(MPEG2_START_GROUP, MPEG2RawGroupOfPicturesHeader, + group_of_pictures_header, NULL); + START(MPEG2_START_SEQUENCE_END, MPEG2RawSequenceEnd, + sequence_end, NULL); #undef START - default: - return AVERROR(ENOSYS); + default: + return AVERROR(ENOSYS); + } } - } - return 0; + return 0; } static int cbs_mpeg2_write_header(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) { - int err; - - switch(unit->type) { -#define START(start_code, type, func) \ - case start_code: \ - err = cbs_mpeg2_write_##func(ctx, pbc, unit->content); \ - break; - START(MPEG2_START_PICTURE, MPEG2RawPictureHeader, picture_header); - START(MPEG2_START_USER_DATA, MPEG2RawUserData, user_data); - START(MPEG2_START_SEQUENCE_HEADER, MPEG2RawSequenceHeader, sequence_header); - START(MPEG2_START_EXTENSION, MPEG2RawExtensionData, extension_data); - START(MPEG2_START_GROUP, MPEG2RawGroupOfPicturesHeader, - group_of_pictures_header); - START(MPEG2_START_SEQUENCE_END, MPEG2RawSequenceEnd, sequence_end); + CodedBitstreamUnit *unit, + PutBitContext *pbc) +{ + int err; + + switch (unit->type) { +#define START(start_code, type, func) \ + case start_code: \ + err = cbs_mpeg2_write_ ## func(ctx, pbc, unit->content); \ + break; + START(MPEG2_START_PICTURE, MPEG2RawPictureHeader, picture_header); + START(MPEG2_START_USER_DATA, MPEG2RawUserData, user_data); + START(MPEG2_START_SEQUENCE_HEADER, MPEG2RawSequenceHeader, sequence_header); + START(MPEG2_START_EXTENSION, MPEG2RawExtensionData, extension_data); + START(MPEG2_START_GROUP, MPEG2RawGroupOfPicturesHeader, + group_of_pictures_header); + START(MPEG2_START_SEQUENCE_END, MPEG2RawSequenceEnd, sequence_end); #undef START - default: - av_log(ctx->log_ctx, AV_LOG_ERROR, "Write unimplemented for start " - "code %02" PRIx32 ".\n", - unit->type); - return AVERROR_PATCHWELCOME; - } - - return err; -} - -static int cbs_mpeg2_write_slice(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) { - MPEG2RawSlice *slice = unit->content; - int err; - - err = cbs_mpeg2_write_slice_header(ctx, pbc, &slice->header); - if(err < 0) - return err; - - if(slice->data) { - size_t rest = slice->data_size - (slice->data_bit_start + 7) / 8; - uint8_t *pos = slice->data + slice->data_bit_start / 8; - - av_assert0(slice->data_bit_start >= 0 && - slice->data_size > slice->data_bit_start / 8); - - if(slice->data_size * 8 + 8 > put_bits_left(pbc)) - return AVERROR(ENOSPC); - - // First copy the remaining bits of the first byte - if(slice->data_bit_start % 8) - put_bits(pbc, 8 - slice->data_bit_start % 8, - *pos++ & MAX_UINT_BITS(8 - slice->data_bit_start % 8)); - - if(put_bits_count(pbc) % 8 == 0) { - // If the writer is aligned at this point, - // memcpy can be used to improve performance. - // This is the normal case. - flush_put_bits(pbc); - memcpy(put_bits_ptr(pbc), pos, rest); - skip_put_bytes(pbc, rest); + default: + av_log(ctx->log_ctx, AV_LOG_ERROR, "Write unimplemented for start " + "code %02"PRIx32".\n", unit->type); + return AVERROR_PATCHWELCOME; } - else { - // If not, we have to copy manually: - for(; rest > 3; rest -= 4, pos += 4) - put_bits32(pbc, AV_RB32(pos)); - for(; rest; rest--, pos++) - put_bits(pbc, 8, *pos); + return err; +} - // Align with zeros - put_bits(pbc, 8 - put_bits_count(pbc) % 8, 0); +static int cbs_mpeg2_write_slice(CodedBitstreamContext *ctx, + CodedBitstreamUnit *unit, + PutBitContext *pbc) +{ + MPEG2RawSlice *slice = unit->content; + int err; + + err = cbs_mpeg2_write_slice_header(ctx, pbc, &slice->header); + if (err < 0) + return err; + + if (slice->data) { + size_t rest = slice->data_size - (slice->data_bit_start + 7) / 8; + uint8_t *pos = slice->data + slice->data_bit_start / 8; + + av_assert0(slice->data_bit_start >= 0 && + slice->data_size > slice->data_bit_start / 8); + + if (slice->data_size * 8 + 8 > put_bits_left(pbc)) + return AVERROR(ENOSPC); + + // First copy the remaining bits of the first byte + if (slice->data_bit_start % 8) + put_bits(pbc, 8 - slice->data_bit_start % 8, + *pos++ & MAX_UINT_BITS(8 - slice->data_bit_start % 8)); + + if (put_bits_count(pbc) % 8 == 0) { + // If the writer is aligned at this point, + // memcpy can be used to improve performance. + // This is the normal case. + flush_put_bits(pbc); + memcpy(put_bits_ptr(pbc), pos, rest); + skip_put_bytes(pbc, rest); + } else { + // If not, we have to copy manually: + for (; rest > 3; rest -= 4, pos += 4) + put_bits32(pbc, AV_RB32(pos)); + + for (; rest; rest--, pos++) + put_bits(pbc, 8, *pos); + + // Align with zeros + put_bits(pbc, 8 - put_bits_count(pbc) % 8, 0); + } } - } - return 0; + return 0; } static int cbs_mpeg2_write_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) { - if(MPEG2_START_IS_SLICE(unit->type)) - return cbs_mpeg2_write_slice(ctx, unit, pbc); - else - return cbs_mpeg2_write_header(ctx, unit, pbc); + CodedBitstreamUnit *unit, + PutBitContext *pbc) +{ + if (MPEG2_START_IS_SLICE(unit->type)) + return cbs_mpeg2_write_slice (ctx, unit, pbc); + else + return cbs_mpeg2_write_header(ctx, unit, pbc); } static int cbs_mpeg2_assemble_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) { - uint8_t *data; - size_t size, dp; - int i; - - size = 0; - for(i = 0; i < frag->nb_units; i++) - size += 3 + frag->units[i].data_size; - - frag->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if(!frag->data_ref) - return AVERROR(ENOMEM); - data = frag->data_ref->data; - - dp = 0; - for(i = 0; i < frag->nb_units; i++) { - CodedBitstreamUnit *unit = &frag->units[i]; - - data[dp++] = 0; - data[dp++] = 0; - data[dp++] = 1; - - memcpy(data + dp, unit->data, unit->data_size); - dp += unit->data_size; - } + CodedBitstreamFragment *frag) +{ + uint8_t *data; + size_t size, dp; + int i; + + size = 0; + for (i = 0; i < frag->nb_units; i++) + size += 3 + frag->units[i].data_size; + + frag->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); + if (!frag->data_ref) + return AVERROR(ENOMEM); + data = frag->data_ref->data; + + dp = 0; + for (i = 0; i < frag->nb_units; i++) { + CodedBitstreamUnit *unit = &frag->units[i]; + + data[dp++] = 0; + data[dp++] = 0; + data[dp++] = 1; + + memcpy(data + dp, unit->data, unit->data_size); + dp += unit->data_size; + } - av_assert0(dp == size); + av_assert0(dp == size); - memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - frag->data = data; - frag->data_size = size; + memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); + frag->data = data; + frag->data_size = size; - return 0; + return 0; } static const CodedBitstreamUnitTypeDescriptor cbs_mpeg2_unit_types[] = { - CBS_UNIT_TYPE_INTERNAL_REF(MPEG2_START_PICTURE, MPEG2RawPictureHeader, - extra_information_picture.extra_information), - - { - .nb_unit_types = CBS_UNIT_TYPE_RANGE, - .unit_type_range_start = 0x01, - .unit_type_range_end = 0xaf, - - .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, - .content_size = sizeof(MPEG2RawSlice), - .nb_ref_offsets = 2, - .ref_offsets = { offsetof(MPEG2RawSlice, header.extra_information_slice.extra_information), - offsetof(MPEG2RawSlice, data) }, - }, - - CBS_UNIT_TYPE_INTERNAL_REF(MPEG2_START_USER_DATA, MPEG2RawUserData, - user_data), - - CBS_UNIT_TYPE_POD(MPEG2_START_SEQUENCE_HEADER, MPEG2RawSequenceHeader), - CBS_UNIT_TYPE_POD(MPEG2_START_EXTENSION, MPEG2RawExtensionData), - CBS_UNIT_TYPE_POD(MPEG2_START_SEQUENCE_END, MPEG2RawSequenceEnd), - CBS_UNIT_TYPE_POD(MPEG2_START_GROUP, MPEG2RawGroupOfPicturesHeader), - - CBS_UNIT_TYPE_END_OF_LIST + CBS_UNIT_TYPE_INTERNAL_REF(MPEG2_START_PICTURE, MPEG2RawPictureHeader, + extra_information_picture.extra_information), + + { + .nb_unit_types = CBS_UNIT_TYPE_RANGE, + .unit_type_range_start = 0x01, + .unit_type_range_end = 0xaf, + + .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, + .content_size = sizeof(MPEG2RawSlice), + .nb_ref_offsets = 2, + .ref_offsets = { offsetof(MPEG2RawSlice, header.extra_information_slice.extra_information), + offsetof(MPEG2RawSlice, data) }, + }, + + CBS_UNIT_TYPE_INTERNAL_REF(MPEG2_START_USER_DATA, MPEG2RawUserData, + user_data), + + CBS_UNIT_TYPE_POD(MPEG2_START_SEQUENCE_HEADER, MPEG2RawSequenceHeader), + CBS_UNIT_TYPE_POD(MPEG2_START_EXTENSION, MPEG2RawExtensionData), + CBS_UNIT_TYPE_POD(MPEG2_START_SEQUENCE_END, MPEG2RawSequenceEnd), + CBS_UNIT_TYPE_POD(MPEG2_START_GROUP, MPEG2RawGroupOfPicturesHeader), + + CBS_UNIT_TYPE_END_OF_LIST }; const CodedBitstreamType ff_cbs_type_mpeg2 = { - .codec_id = AV_CODEC_ID_MPEG2VIDEO, + .codec_id = AV_CODEC_ID_MPEG2VIDEO, - .priv_data_size = sizeof(CodedBitstreamMPEG2Context), + .priv_data_size = sizeof(CodedBitstreamMPEG2Context), - .unit_types = cbs_mpeg2_unit_types, + .unit_types = cbs_mpeg2_unit_types, - .split_fragment = &cbs_mpeg2_split_fragment, - .read_unit = &cbs_mpeg2_read_unit, - .write_unit = &cbs_mpeg2_write_unit, - .assemble_fragment = &cbs_mpeg2_assemble_fragment, + .split_fragment = &cbs_mpeg2_split_fragment, + .read_unit = &cbs_mpeg2_read_unit, + .write_unit = &cbs_mpeg2_write_unit, + .assemble_fragment = &cbs_mpeg2_assemble_fragment, }; diff --git a/third-party/cbs/cbs_mpeg2_syntax_template.c b/third-party/cbs/cbs_mpeg2_syntax_template.c index 7fc1e80aa6f..5165a14cd50 100644 --- a/third-party/cbs/cbs_mpeg2_syntax_template.c +++ b/third-party/cbs/cbs_mpeg2_syntax_template.c @@ -17,397 +17,409 @@ */ static int FUNC(sequence_header)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawSequenceHeader *current) { - CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; - int err, i; + MPEG2RawSequenceHeader *current) +{ + CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; + int err, i; - HEADER("Sequence Header"); + HEADER("Sequence Header"); - ui(8, sequence_header_code); + ui(8, sequence_header_code); - uir(12, horizontal_size_value); - uir(12, vertical_size_value); + uir(12, horizontal_size_value); + uir(12, vertical_size_value); - mpeg2->horizontal_size = current->horizontal_size_value; - mpeg2->vertical_size = current->vertical_size_value; + mpeg2->horizontal_size = current->horizontal_size_value; + mpeg2->vertical_size = current->vertical_size_value; - uir(4, aspect_ratio_information); - uir(4, frame_rate_code); - ui(18, bit_rate_value); + uir(4, aspect_ratio_information); + uir(4, frame_rate_code); + ui(18, bit_rate_value); - marker_bit(); + marker_bit(); - ui(10, vbv_buffer_size_value); - ui(1, constrained_parameters_flag); + ui(10, vbv_buffer_size_value); + ui(1, constrained_parameters_flag); - ui(1, load_intra_quantiser_matrix); - if(current->load_intra_quantiser_matrix) { - for(i = 0; i < 64; i++) - uirs(8, intra_quantiser_matrix[i], 1, i); - } + ui(1, load_intra_quantiser_matrix); + if (current->load_intra_quantiser_matrix) { + for (i = 0; i < 64; i++) + uirs(8, intra_quantiser_matrix[i], 1, i); + } - ui(1, load_non_intra_quantiser_matrix); - if(current->load_non_intra_quantiser_matrix) { - for(i = 0; i < 64; i++) - uirs(8, non_intra_quantiser_matrix[i], 1, i); - } + ui(1, load_non_intra_quantiser_matrix); + if (current->load_non_intra_quantiser_matrix) { + for (i = 0; i < 64; i++) + uirs(8, non_intra_quantiser_matrix[i], 1, i); + } - return 0; + return 0; } static int FUNC(user_data)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawUserData *current) { - size_t k; - int err; + MPEG2RawUserData *current) +{ + size_t k; + int err; - HEADER("User Data"); + HEADER("User Data"); - ui(8, user_data_start_code); + ui(8, user_data_start_code); #ifdef READ - k = get_bits_left(rw); - av_assert0(k % 8 == 0); - current->user_data_length = k /= 8; - if(k > 0) { - current->user_data_ref = av_buffer_allocz(k + AV_INPUT_BUFFER_PADDING_SIZE); - if(!current->user_data_ref) - return AVERROR(ENOMEM); - current->user_data = current->user_data_ref->data; - } + k = get_bits_left(rw); + av_assert0(k % 8 == 0); + current->user_data_length = k /= 8; + if (k > 0) { + current->user_data_ref = av_buffer_allocz(k + AV_INPUT_BUFFER_PADDING_SIZE); + if (!current->user_data_ref) + return AVERROR(ENOMEM); + current->user_data = current->user_data_ref->data; + } #endif - for(k = 0; k < current->user_data_length; k++) - uis(8, user_data[k], 1, k); + for (k = 0; k < current->user_data_length; k++) + uis(8, user_data[k], 1, k); - return 0; + return 0; } static int FUNC(sequence_extension)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawSequenceExtension *current) { - CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; - int err; - - HEADER("Sequence Extension"); - - ui(8, profile_and_level_indication); - ui(1, progressive_sequence); - ui(2, chroma_format); - ui(2, horizontal_size_extension); - ui(2, vertical_size_extension); - - mpeg2->horizontal_size = (mpeg2->horizontal_size & 0xfff) | - current->horizontal_size_extension << 12; - mpeg2->vertical_size = (mpeg2->vertical_size & 0xfff) | - current->vertical_size_extension << 12; - mpeg2->progressive_sequence = current->progressive_sequence; - - ui(12, bit_rate_extension); - marker_bit(); - ui(8, vbv_buffer_size_extension); - ui(1, low_delay); - ui(2, frame_rate_extension_n); - ui(5, frame_rate_extension_d); - - return 0; + MPEG2RawSequenceExtension *current) +{ + CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; + int err; + + HEADER("Sequence Extension"); + + ui(8, profile_and_level_indication); + ui(1, progressive_sequence); + ui(2, chroma_format); + ui(2, horizontal_size_extension); + ui(2, vertical_size_extension); + + mpeg2->horizontal_size = (mpeg2->horizontal_size & 0xfff) | + current->horizontal_size_extension << 12; + mpeg2->vertical_size = (mpeg2->vertical_size & 0xfff) | + current->vertical_size_extension << 12; + mpeg2->progressive_sequence = current->progressive_sequence; + + ui(12, bit_rate_extension); + marker_bit(); + ui(8, vbv_buffer_size_extension); + ui(1, low_delay); + ui(2, frame_rate_extension_n); + ui(5, frame_rate_extension_d); + + return 0; } static int FUNC(sequence_display_extension)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawSequenceDisplayExtension *current) { - int err; + MPEG2RawSequenceDisplayExtension *current) +{ + int err; - HEADER("Sequence Display Extension"); + HEADER("Sequence Display Extension"); - ui(3, video_format); + ui(3, video_format); - ui(1, colour_description); - if(current->colour_description) { + ui(1, colour_description); + if (current->colour_description) { #ifdef READ -#define READ_AND_PATCH(name) \ - do { \ - ui(8, name); \ - if(current->name == 0) { \ - current->name = 2; \ - av_log(ctx->log_ctx, AV_LOG_WARNING, "%s in a sequence display " \ - "extension had the invalid value 0. Setting it to 2 " \ - "(meaning unknown) instead.\n", \ - #name); \ - } \ - } while(0) - READ_AND_PATCH(colour_primaries); - READ_AND_PATCH(transfer_characteristics); - READ_AND_PATCH(matrix_coefficients); +#define READ_AND_PATCH(name) do { \ + ui(8, name); \ + if (current->name == 0) { \ + current->name = 2; \ + av_log(ctx->log_ctx, AV_LOG_WARNING, "%s in a sequence display " \ + "extension had the invalid value 0. Setting it to 2 " \ + "(meaning unknown) instead.\n", #name); \ + } \ + } while (0) + READ_AND_PATCH(colour_primaries); + READ_AND_PATCH(transfer_characteristics); + READ_AND_PATCH(matrix_coefficients); #undef READ_AND_PATCH #else - uir(8, colour_primaries); - uir(8, transfer_characteristics); - uir(8, matrix_coefficients); + uir(8, colour_primaries); + uir(8, transfer_characteristics); + uir(8, matrix_coefficients); #endif - } - else { - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - } - - ui(14, display_horizontal_size); - marker_bit(); - ui(14, display_vertical_size); - - return 0; + } else { + infer(colour_primaries, 2); + infer(transfer_characteristics, 2); + infer(matrix_coefficients, 2); + } + + ui(14, display_horizontal_size); + marker_bit(); + ui(14, display_vertical_size); + + return 0; } static int FUNC(group_of_pictures_header)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawGroupOfPicturesHeader *current) { - int err; + MPEG2RawGroupOfPicturesHeader *current) +{ + int err; - HEADER("Group of Pictures Header"); + HEADER("Group of Pictures Header"); - ui(8, group_start_code); + ui(8, group_start_code); - ui(25, time_code); - ui(1, closed_gop); - ui(1, broken_link); + ui(25, time_code); + ui(1, closed_gop); + ui(1, broken_link); - return 0; + return 0; } static int FUNC(extra_information)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawExtraInformation *current, - const char *element_name, const char *marker_name) { - int err; - size_t k; + MPEG2RawExtraInformation *current, + const char *element_name, const char *marker_name) +{ + int err; + size_t k; #ifdef READ - GetBitContext start = *rw; - uint8_t bit; - - for(k = 0; nextbits(1, 1, bit); k++) - skip_bits(rw, 1 + 8); - current->extra_information_length = k; - if(k > 0) { - *rw = start; - current->extra_information_ref = - av_buffer_allocz(k + AV_INPUT_BUFFER_PADDING_SIZE); - if(!current->extra_information_ref) - return AVERROR(ENOMEM); - current->extra_information = current->extra_information_ref->data; - } + GetBitContext start = *rw; + uint8_t bit; + + for (k = 0; nextbits(1, 1, bit); k++) + skip_bits(rw, 1 + 8); + current->extra_information_length = k; + if (k > 0) { + *rw = start; + current->extra_information_ref = + av_buffer_allocz(k + AV_INPUT_BUFFER_PADDING_SIZE); + if (!current->extra_information_ref) + return AVERROR(ENOMEM); + current->extra_information = current->extra_information_ref->data; + } #endif - for(k = 0; k < current->extra_information_length; k++) { - bit(marker_name, 1); - xuia(8, element_name, - current->extra_information[k], 0, 255, 1, k); - } + for (k = 0; k < current->extra_information_length; k++) { + bit(marker_name, 1); + xuia(8, element_name, + current->extra_information[k], 0, 255, 1, k); + } - bit(marker_name, 0); + bit(marker_name, 0); - return 0; + return 0; } static int FUNC(picture_header)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawPictureHeader *current) { - int err; + MPEG2RawPictureHeader *current) +{ + int err; - HEADER("Picture Header"); + HEADER("Picture Header"); - ui(8, picture_start_code); + ui(8, picture_start_code); - ui(10, temporal_reference); - uir(3, picture_coding_type); - ui(16, vbv_delay); + ui(10, temporal_reference); + uir(3, picture_coding_type); + ui(16, vbv_delay); - if(current->picture_coding_type == 2 || - current->picture_coding_type == 3) { - ui(1, full_pel_forward_vector); - ui(3, forward_f_code); - } + if (current->picture_coding_type == 2 || + current->picture_coding_type == 3) { + ui(1, full_pel_forward_vector); + ui(3, forward_f_code); + } - if(current->picture_coding_type == 3) { - ui(1, full_pel_backward_vector); - ui(3, backward_f_code); - } + if (current->picture_coding_type == 3) { + ui(1, full_pel_backward_vector); + ui(3, backward_f_code); + } - CHECK(FUNC(extra_information)(ctx, rw, ¤t->extra_information_picture, - "extra_information_picture[k]", "extra_bit_picture")); + CHECK(FUNC(extra_information)(ctx, rw, ¤t->extra_information_picture, + "extra_information_picture[k]", "extra_bit_picture")); - return 0; + return 0; } static int FUNC(picture_coding_extension)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawPictureCodingExtension *current) { - CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; - int err; - - HEADER("Picture Coding Extension"); - - uir(4, f_code[0][0]); - uir(4, f_code[0][1]); - uir(4, f_code[1][0]); - uir(4, f_code[1][1]); - - ui(2, intra_dc_precision); - ui(2, picture_structure); - ui(1, top_field_first); - ui(1, frame_pred_frame_dct); - ui(1, concealment_motion_vectors); - ui(1, q_scale_type); - ui(1, intra_vlc_format); - ui(1, alternate_scan); - ui(1, repeat_first_field); - ui(1, chroma_420_type); - ui(1, progressive_frame); - - if(mpeg2->progressive_sequence) { - if(current->repeat_first_field) { - if(current->top_field_first) - mpeg2->number_of_frame_centre_offsets = 3; - else - mpeg2->number_of_frame_centre_offsets = 2; + MPEG2RawPictureCodingExtension *current) +{ + CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; + int err; + + HEADER("Picture Coding Extension"); + + uir(4, f_code[0][0]); + uir(4, f_code[0][1]); + uir(4, f_code[1][0]); + uir(4, f_code[1][1]); + + ui(2, intra_dc_precision); + ui(2, picture_structure); + ui(1, top_field_first); + ui(1, frame_pred_frame_dct); + ui(1, concealment_motion_vectors); + ui(1, q_scale_type); + ui(1, intra_vlc_format); + ui(1, alternate_scan); + ui(1, repeat_first_field); + ui(1, chroma_420_type); + ui(1, progressive_frame); + + if (mpeg2->progressive_sequence) { + if (current->repeat_first_field) { + if (current->top_field_first) + mpeg2->number_of_frame_centre_offsets = 3; + else + mpeg2->number_of_frame_centre_offsets = 2; + } else { + mpeg2->number_of_frame_centre_offsets = 1; + } + } else { + if (current->picture_structure == 1 || // Top field. + current->picture_structure == 2) { // Bottom field. + mpeg2->number_of_frame_centre_offsets = 1; + } else { + if (current->repeat_first_field) + mpeg2->number_of_frame_centre_offsets = 3; + else + mpeg2->number_of_frame_centre_offsets = 2; + } } - else { - mpeg2->number_of_frame_centre_offsets = 1; - } - } - else { - if(current->picture_structure == 1 || // Top field. - current->picture_structure == 2) { // Bottom field. - mpeg2->number_of_frame_centre_offsets = 1; - } - else { - if(current->repeat_first_field) - mpeg2->number_of_frame_centre_offsets = 3; - else - mpeg2->number_of_frame_centre_offsets = 2; + + ui(1, composite_display_flag); + if (current->composite_display_flag) { + ui(1, v_axis); + ui(3, field_sequence); + ui(1, sub_carrier); + ui(7, burst_amplitude); + ui(8, sub_carrier_phase); } - } - - ui(1, composite_display_flag); - if(current->composite_display_flag) { - ui(1, v_axis); - ui(3, field_sequence); - ui(1, sub_carrier); - ui(7, burst_amplitude); - ui(8, sub_carrier_phase); - } - - return 0; + + return 0; } static int FUNC(quant_matrix_extension)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawQuantMatrixExtension *current) { - int err, i; - - HEADER("Quant Matrix Extension"); - - ui(1, load_intra_quantiser_matrix); - if(current->load_intra_quantiser_matrix) { - for(i = 0; i < 64; i++) - uirs(8, intra_quantiser_matrix[i], 1, i); - } - - ui(1, load_non_intra_quantiser_matrix); - if(current->load_non_intra_quantiser_matrix) { - for(i = 0; i < 64; i++) - uirs(8, non_intra_quantiser_matrix[i], 1, i); - } - - ui(1, load_chroma_intra_quantiser_matrix); - if(current->load_chroma_intra_quantiser_matrix) { - for(i = 0; i < 64; i++) - uirs(8, intra_quantiser_matrix[i], 1, i); - } - - ui(1, load_chroma_non_intra_quantiser_matrix); - if(current->load_chroma_non_intra_quantiser_matrix) { - for(i = 0; i < 64; i++) - uirs(8, chroma_non_intra_quantiser_matrix[i], 1, i); - } - - return 0; -} + MPEG2RawQuantMatrixExtension *current) +{ + int err, i; -static int FUNC(picture_display_extension)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawPictureDisplayExtension *current) { - CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; - int err, i; + HEADER("Quant Matrix Extension"); - HEADER("Picture Display Extension"); + ui(1, load_intra_quantiser_matrix); + if (current->load_intra_quantiser_matrix) { + for (i = 0; i < 64; i++) + uirs(8, intra_quantiser_matrix[i], 1, i); + } - for(i = 0; i < mpeg2->number_of_frame_centre_offsets; i++) { - sis(16, frame_centre_horizontal_offset[i], 1, i); - marker_bit(); - sis(16, frame_centre_vertical_offset[i], 1, i); - marker_bit(); - } + ui(1, load_non_intra_quantiser_matrix); + if (current->load_non_intra_quantiser_matrix) { + for (i = 0; i < 64; i++) + uirs(8, non_intra_quantiser_matrix[i], 1, i); + } + + ui(1, load_chroma_intra_quantiser_matrix); + if (current->load_chroma_intra_quantiser_matrix) { + for (i = 0; i < 64; i++) + uirs(8, intra_quantiser_matrix[i], 1, i); + } + + ui(1, load_chroma_non_intra_quantiser_matrix); + if (current->load_chroma_non_intra_quantiser_matrix) { + for (i = 0; i < 64; i++) + uirs(8, chroma_non_intra_quantiser_matrix[i], 1, i); + } - return 0; + return 0; +} + +static int FUNC(picture_display_extension)(CodedBitstreamContext *ctx, RWContext *rw, + MPEG2RawPictureDisplayExtension *current) +{ + CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; + int err, i; + + HEADER("Picture Display Extension"); + + for (i = 0; i < mpeg2->number_of_frame_centre_offsets; i++) { + sis(16, frame_centre_horizontal_offset[i], 1, i); + marker_bit(); + sis(16, frame_centre_vertical_offset[i], 1, i); + marker_bit(); + } + + return 0; } static int FUNC(extension_data)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawExtensionData *current) { - int err; - - HEADER("Extension Data"); - - ui(8, extension_start_code); - ui(4, extension_start_code_identifier); - - switch(current->extension_start_code_identifier) { - case MPEG2_EXTENSION_SEQUENCE: - return FUNC(sequence_extension)(ctx, rw, ¤t->data.sequence); - case MPEG2_EXTENSION_SEQUENCE_DISPLAY: - return FUNC(sequence_display_extension)(ctx, rw, ¤t->data.sequence_display); - case MPEG2_EXTENSION_QUANT_MATRIX: - return FUNC(quant_matrix_extension)(ctx, rw, ¤t->data.quant_matrix); - case MPEG2_EXTENSION_PICTURE_DISPLAY: - return FUNC(picture_display_extension)(ctx, rw, ¤t->data.picture_display); - case MPEG2_EXTENSION_PICTURE_CODING: - return FUNC(picture_coding_extension)(ctx, rw, ¤t->data.picture_coding); - default: - av_log(ctx->log_ctx, AV_LOG_ERROR, "Extension ID %d not supported.\n", - current->extension_start_code_identifier); - return AVERROR_PATCHWELCOME; - } + MPEG2RawExtensionData *current) +{ + int err; + + HEADER("Extension Data"); + + ui(8, extension_start_code); + ui(4, extension_start_code_identifier); + + switch (current->extension_start_code_identifier) { + case MPEG2_EXTENSION_SEQUENCE: + return FUNC(sequence_extension) + (ctx, rw, ¤t->data.sequence); + case MPEG2_EXTENSION_SEQUENCE_DISPLAY: + return FUNC(sequence_display_extension) + (ctx, rw, ¤t->data.sequence_display); + case MPEG2_EXTENSION_QUANT_MATRIX: + return FUNC(quant_matrix_extension) + (ctx, rw, ¤t->data.quant_matrix); + case MPEG2_EXTENSION_PICTURE_DISPLAY: + return FUNC(picture_display_extension) + (ctx, rw, ¤t->data.picture_display); + case MPEG2_EXTENSION_PICTURE_CODING: + return FUNC(picture_coding_extension) + (ctx, rw, ¤t->data.picture_coding); + default: + av_log(ctx->log_ctx, AV_LOG_ERROR, "Extension ID %d not supported.\n", + current->extension_start_code_identifier); + return AVERROR_PATCHWELCOME; + } } static int FUNC(slice_header)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawSliceHeader *current) { - CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; - int err; + MPEG2RawSliceHeader *current) +{ + CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; + int err; - HEADER("Slice Header"); + HEADER("Slice Header"); - ui(8, slice_vertical_position); + ui(8, slice_vertical_position); - if(mpeg2->vertical_size > 2800) - ui(3, slice_vertical_position_extension); - if(mpeg2->scalable) { - if(mpeg2->scalable_mode == 0) - ui(7, priority_breakpoint); - } + if (mpeg2->vertical_size > 2800) + ui(3, slice_vertical_position_extension); + if (mpeg2->scalable) { + if (mpeg2->scalable_mode == 0) + ui(7, priority_breakpoint); + } - uir(5, quantiser_scale_code); + uir(5, quantiser_scale_code); - if(nextbits(1, 1, current->slice_extension_flag)) { - ui(1, slice_extension_flag); - ui(1, intra_slice); - ui(1, slice_picture_id_enable); - ui(6, slice_picture_id); - } + if (nextbits(1, 1, current->slice_extension_flag)) { + ui(1, slice_extension_flag); + ui(1, intra_slice); + ui(1, slice_picture_id_enable); + ui(6, slice_picture_id); + } - CHECK(FUNC(extra_information)(ctx, rw, ¤t->extra_information_slice, - "extra_information_slice[k]", "extra_bit_slice")); + CHECK(FUNC(extra_information)(ctx, rw, ¤t->extra_information_slice, + "extra_information_slice[k]", "extra_bit_slice")); - return 0; + return 0; } static int FUNC(sequence_end)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawSequenceEnd *current) { - int err; + MPEG2RawSequenceEnd *current) +{ + int err; - HEADER("Sequence End"); + HEADER("Sequence End"); - ui(8, sequence_end_code); + ui(8, sequence_end_code); - return 0; + return 0; } diff --git a/third-party/cbs/cbs_sei.c b/third-party/cbs/cbs_sei.c index c184d67d41d..ee89e79ace7 100644 --- a/third-party/cbs/cbs_sei.c +++ b/third-party/cbs/cbs_sei.c @@ -16,340 +16,355 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -#include "cbs/cbs_sei.h" +// [manual] Changed include path #include "cbs/cbs.h" +#include "cbs_internal.h" #include "cbs/cbs_h264.h" #include "cbs/cbs_h265.h" +#include "cbs/cbs_sei.h" -#include "cbs_internal.h" - -static void cbs_free_user_data_registered(void *opaque, uint8_t *data) { - SEIRawUserDataRegistered *udr = (SEIRawUserDataRegistered *)data; - av_buffer_unref(&udr->data_ref); - av_free(udr); +static void cbs_free_user_data_registered(void *opaque, uint8_t *data) +{ + SEIRawUserDataRegistered *udr = (SEIRawUserDataRegistered*)data; + av_buffer_unref(&udr->data_ref); + av_free(udr); } -static void cbs_free_user_data_unregistered(void *opaque, uint8_t *data) { - SEIRawUserDataUnregistered *udu = (SEIRawUserDataUnregistered *)data; - av_buffer_unref(&udu->data_ref); - av_free(udu); +static void cbs_free_user_data_unregistered(void *opaque, uint8_t *data) +{ + SEIRawUserDataUnregistered *udu = (SEIRawUserDataUnregistered*)data; + av_buffer_unref(&udu->data_ref); + av_free(udu); } int ff_cbs_sei_alloc_message_payload(SEIRawMessage *message, - const SEIMessageTypeDescriptor *desc) { - void (*free_func)(void *, uint8_t *); - - av_assert0(message->payload == NULL && - message->payload_ref == NULL); - message->payload_type = desc->type; - - if(desc->type == SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35) - free_func = &cbs_free_user_data_registered; - else if(desc->type == SEI_TYPE_USER_DATA_UNREGISTERED) - free_func = &cbs_free_user_data_unregistered; - else - free_func = NULL; - - if(free_func) { - message->payload = av_mallocz(desc->size); - if(!message->payload) - return AVERROR(ENOMEM); - message->payload_ref = - av_buffer_create(message->payload, desc->size, - free_func, NULL, 0); - } - else { - message->payload_ref = av_buffer_alloc(desc->size); - } - if(!message->payload_ref) { - av_freep(&message->payload); - return AVERROR(ENOMEM); - } - message->payload = message->payload_ref->data; - - return 0; + const SEIMessageTypeDescriptor *desc) +{ + void (*free_func)(void*, uint8_t*); + + av_assert0(message->payload == NULL && + message->payload_ref == NULL); + message->payload_type = desc->type; + + if (desc->type == SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35) + free_func = &cbs_free_user_data_registered; + else if (desc->type == SEI_TYPE_USER_DATA_UNREGISTERED) + free_func = &cbs_free_user_data_unregistered; + else + free_func = NULL; + + if (free_func) { + message->payload = av_mallocz(desc->size); + if (!message->payload) + return AVERROR(ENOMEM); + message->payload_ref = + av_buffer_create(message->payload, desc->size, + free_func, NULL, 0); + } else { + message->payload_ref = av_buffer_alloc(desc->size); + } + if (!message->payload_ref) { + av_freep(&message->payload); + return AVERROR(ENOMEM); + } + message->payload = message->payload_ref->data; + + return 0; } -int ff_cbs_sei_list_add(SEIRawMessageList *list) { - void *ptr; - int old_count = list->nb_messages_allocated; +int ff_cbs_sei_list_add(SEIRawMessageList *list) +{ + void *ptr; + int old_count = list->nb_messages_allocated; - av_assert0(list->nb_messages <= old_count); - if(list->nb_messages + 1 > old_count) { - int new_count = 2 * old_count + 1; + av_assert0(list->nb_messages <= old_count); + if (list->nb_messages + 1 > old_count) { + int new_count = 2 * old_count + 1; - ptr = av_realloc_array(list->messages, - new_count, sizeof(*list->messages)); - if(!ptr) - return AVERROR(ENOMEM); + ptr = av_realloc_array(list->messages, + new_count, sizeof(*list->messages)); + if (!ptr) + return AVERROR(ENOMEM); - list->messages = ptr; - list->nb_messages_allocated = new_count; + list->messages = ptr; + list->nb_messages_allocated = new_count; - // Zero the newly-added entries. - memset(list->messages + old_count, 0, - (new_count - old_count) * sizeof(*list->messages)); - } - ++list->nb_messages; - return 0; + // Zero the newly-added entries. + memset(list->messages + old_count, 0, + (new_count - old_count) * sizeof(*list->messages)); + } + ++list->nb_messages; + return 0; } -void ff_cbs_sei_free_message_list(SEIRawMessageList *list) { - for(int i = 0; i < list->nb_messages; i++) { - SEIRawMessage *message = &list->messages[i]; - av_buffer_unref(&message->payload_ref); - av_buffer_unref(&message->extension_data_ref); - } - av_free(list->messages); +void ff_cbs_sei_free_message_list(SEIRawMessageList *list) +{ + for (int i = 0; i < list->nb_messages; i++) { + SEIRawMessage *message = &list->messages[i]; + av_buffer_unref(&message->payload_ref); + av_buffer_unref(&message->extension_data_ref); + } + av_free(list->messages); } static int cbs_sei_get_unit(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - int prefix, - CodedBitstreamUnit **sei_unit) { - CodedBitstreamUnit *unit; - int sei_type, highest_vcl_type, err, i, position; - - switch(ctx->codec->codec_id) { - case AV_CODEC_ID_H264: - // (We can ignore auxiliary slices because we only have prefix - // SEI in H.264 and an auxiliary picture must always follow a - // primary picture.) - highest_vcl_type = H264_NAL_IDR_SLICE; - if(prefix) - sei_type = H264_NAL_SEI; - else - return AVERROR(EINVAL); - break; - case AV_CODEC_ID_H265: - highest_vcl_type = HEVC_NAL_RSV_VCL31; - if(prefix) - sei_type = HEVC_NAL_SEI_PREFIX; - else - sei_type = HEVC_NAL_SEI_SUFFIX; - break; - default: - return AVERROR(EINVAL); - } - - // Find an existing SEI NAL unit of the right type. - unit = NULL; - for(i = 0; i < au->nb_units; i++) { - if(au->units[i].type == sei_type) { - unit = &au->units[i]; - break; + CodedBitstreamFragment *au, + int prefix, + CodedBitstreamUnit **sei_unit) +{ + CodedBitstreamUnit *unit; + int sei_type, highest_vcl_type, err, i, position; + + switch (ctx->codec->codec_id) { + case AV_CODEC_ID_H264: + // (We can ignore auxiliary slices because we only have prefix + // SEI in H.264 and an auxiliary picture must always follow a + // primary picture.) + highest_vcl_type = H264_NAL_IDR_SLICE; + if (prefix) + sei_type = H264_NAL_SEI; + else + return AVERROR(EINVAL); + break; + case AV_CODEC_ID_H265: + highest_vcl_type = HEVC_NAL_RSV_VCL31; + if (prefix) + sei_type = HEVC_NAL_SEI_PREFIX; + else + sei_type = HEVC_NAL_SEI_SUFFIX; + break; + default: + return AVERROR(EINVAL); } - } - - if(unit) { - *sei_unit = unit; - return 0; - } - // Need to add a new SEI NAL unit ... - if(prefix) { - // ... before the first VCL NAL unit. - for(i = 0; i < au->nb_units; i++) { - if(au->units[i].type < highest_vcl_type) - break; + // Find an existing SEI NAL unit of the right type. + unit = NULL; + for (i = 0; i < au->nb_units; i++) { + if (au->units[i].type == sei_type) { + unit = &au->units[i]; + break; + } } - position = i; - } - else { - // ... after the last VCL NAL unit. - for(i = au->nb_units - 1; i >= 0; i--) { - if(au->units[i].type < highest_vcl_type) - break; + + if (unit) { + *sei_unit = unit; + return 0; } - if(i < 0) { - // No VCL units; just put it at the end. - position = au->nb_units; + + // Need to add a new SEI NAL unit ... + if (prefix) { + // ... before the first VCL NAL unit. + for (i = 0; i < au->nb_units; i++) { + if (au->units[i].type < highest_vcl_type) + break; + } + position = i; + } else { + // ... after the last VCL NAL unit. + for (i = au->nb_units - 1; i >= 0; i--) { + if (au->units[i].type < highest_vcl_type) + break; + } + if (i < 0) { + // No VCL units; just put it at the end. + position = au->nb_units; + } else { + position = i + 1; + } } - else { - position = i + 1; + + err = ff_cbs_insert_unit_content(au, position, sei_type, + NULL, NULL); + if (err < 0) + return err; + unit = &au->units[position]; + unit->type = sei_type; + + err = ff_cbs_alloc_unit_content2(ctx, unit); + if (err < 0) + return err; + + switch (ctx->codec->codec_id) { + case AV_CODEC_ID_H264: + { + H264RawSEI sei = { + .nal_unit_header = { + .nal_ref_idc = 0, + .nal_unit_type = sei_type, + }, + }; + memcpy(unit->content, &sei, sizeof(sei)); + } + break; + case AV_CODEC_ID_H265: + { + H265RawSEI sei = { + .nal_unit_header = { + .nal_unit_type = sei_type, + .nuh_layer_id = 0, + .nuh_temporal_id_plus1 = 1, + }, + }; + memcpy(unit->content, &sei, sizeof(sei)); + } + break; + default: + av_assert0(0); } - } - - err = ff_cbs_insert_unit_content(au, position, sei_type, - NULL, NULL); - if(err < 0) - return err; - unit = &au->units[position]; - unit->type = sei_type; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if(err < 0) - return err; - - switch(ctx->codec->codec_id) { - case AV_CODEC_ID_H264: { - H264RawSEI sei = { - .nal_unit_header = { - .nal_ref_idc = 0, - .nal_unit_type = sei_type, - }, - }; - memcpy(unit->content, &sei, sizeof(sei)); - } break; - case AV_CODEC_ID_H265: { - H265RawSEI sei = { - .nal_unit_header = { - .nal_unit_type = sei_type, - .nuh_layer_id = 0, - .nuh_temporal_id_plus1 = 1, - }, - }; - memcpy(unit->content, &sei, sizeof(sei)); - } break; - default: - av_assert0(0); - } - - *sei_unit = unit; - return 0; + + *sei_unit = unit; + return 0; } static int cbs_sei_get_message_list(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - SEIRawMessageList **list) { - switch(ctx->codec->codec_id) { - case AV_CODEC_ID_H264: { - H264RawSEI *sei = unit->content; - if(unit->type != H264_NAL_SEI) - return AVERROR(EINVAL); - *list = &sei->message_list; - } break; - case AV_CODEC_ID_H265: { - H265RawSEI *sei = unit->content; - if(unit->type != HEVC_NAL_SEI_PREFIX && - unit->type != HEVC_NAL_SEI_SUFFIX) - return AVERROR(EINVAL); - *list = &sei->message_list; - } break; - default: - return AVERROR(EINVAL); - } - - return 0; -} + CodedBitstreamUnit *unit, + SEIRawMessageList **list) +{ + switch (ctx->codec->codec_id) { + case AV_CODEC_ID_H264: + { + H264RawSEI *sei = unit->content; + if (unit->type != H264_NAL_SEI) + return AVERROR(EINVAL); + *list = &sei->message_list; + } + break; + case AV_CODEC_ID_H265: + { + H265RawSEI *sei = unit->content; + if (unit->type != HEVC_NAL_SEI_PREFIX && + unit->type != HEVC_NAL_SEI_SUFFIX) + return AVERROR(EINVAL); + *list = &sei->message_list; + } + break; + default: + return AVERROR(EINVAL); + } -int ff_cbs_sei_add_message(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - int prefix, - uint32_t payload_type, - void *payload_data, - AVBufferRef *payload_buf) { - const SEIMessageTypeDescriptor *desc; - CodedBitstreamUnit *unit; - SEIRawMessageList *list; - SEIRawMessage *message; - AVBufferRef *payload_ref; - int err; - - desc = ff_cbs_sei_find_type(ctx, payload_type); - if(!desc) - return AVERROR(EINVAL); - - // Find an existing SEI unit or make a new one to add to. - err = cbs_sei_get_unit(ctx, au, prefix, &unit); - if(err < 0) - return err; - - // Find the message list inside the codec-dependent unit. - err = cbs_sei_get_message_list(ctx, unit, &list); - if(err < 0) - return err; - - // Add a new message to the message list. - err = ff_cbs_sei_list_add(list); - if(err < 0) - return err; - - if(payload_buf) { - payload_ref = av_buffer_ref(payload_buf); - if(!payload_ref) - return AVERROR(ENOMEM); - } - else { - payload_ref = NULL; - } - - message = &list->messages[list->nb_messages - 1]; - - message->payload_type = payload_type; - message->payload = payload_data; - message->payload_ref = payload_ref; - - return 0; + return 0; } -int ff_cbs_sei_find_message(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - uint32_t payload_type, - SEIRawMessage **iter) { - int err, i, j, found; - - found = 0; - for(i = 0; i < au->nb_units; i++) { - CodedBitstreamUnit *unit = &au->units[i]; +int ff_cbs_sei_add_message(CodedBitstreamContext *ctx, + CodedBitstreamFragment *au, + int prefix, + uint32_t payload_type, + void *payload_data, + AVBufferRef *payload_buf) +{ + const SEIMessageTypeDescriptor *desc; + CodedBitstreamUnit *unit; SEIRawMessageList *list; + SEIRawMessage *message; + AVBufferRef *payload_ref; + int err; + + desc = ff_cbs_sei_find_type(ctx, payload_type); + if (!desc) + return AVERROR(EINVAL); + // Find an existing SEI unit or make a new one to add to. + err = cbs_sei_get_unit(ctx, au, prefix, &unit); + if (err < 0) + return err; + + // Find the message list inside the codec-dependent unit. err = cbs_sei_get_message_list(ctx, unit, &list); - if(err < 0) - continue; + if (err < 0) + return err; + + // Add a new message to the message list. + err = ff_cbs_sei_list_add(list); + if (err < 0) + return err; + + if (payload_buf) { + payload_ref = av_buffer_ref(payload_buf); + if (!payload_ref) + return AVERROR(ENOMEM); + } else { + payload_ref = NULL; + } + + message = &list->messages[list->nb_messages - 1]; + + message->payload_type = payload_type; + message->payload = payload_data; + message->payload_ref = payload_ref; - for(j = 0; j < list->nb_messages; j++) { - SEIRawMessage *message = &list->messages[j]; + return 0; +} - if(message->payload_type == payload_type) { - if(!*iter || found) { - *iter = message; - return 0; +int ff_cbs_sei_find_message(CodedBitstreamContext *ctx, + CodedBitstreamFragment *au, + uint32_t payload_type, + SEIRawMessage **iter) +{ + int err, i, j, found; + + found = 0; + for (i = 0; i < au->nb_units; i++) { + CodedBitstreamUnit *unit = &au->units[i]; + SEIRawMessageList *list; + + err = cbs_sei_get_message_list(ctx, unit, &list); + if (err < 0) + continue; + + for (j = 0; j < list->nb_messages; j++) { + SEIRawMessage *message = &list->messages[j]; + + if (message->payload_type == payload_type) { + if (!*iter || found) { + *iter = message; + return 0; + } + if (message == *iter) + found = 1; + } } - if(message == *iter) - found = 1; - } } - } - return AVERROR(ENOENT); + return AVERROR(ENOENT); } static void cbs_sei_delete_message(SEIRawMessageList *list, - int position) { - SEIRawMessage *message; + int position) +{ + SEIRawMessage *message; - av_assert0(0 <= position && position < list->nb_messages); + av_assert0(0 <= position && position < list->nb_messages); - message = &list->messages[position]; - av_buffer_unref(&message->payload_ref); - av_buffer_unref(&message->extension_data_ref); + message = &list->messages[position]; + av_buffer_unref(&message->payload_ref); + av_buffer_unref(&message->extension_data_ref); - --list->nb_messages; + --list->nb_messages; - if(list->nb_messages > 0) { - memmove(list->messages + position, - list->messages + position + 1, - (list->nb_messages - position) * sizeof(*list->messages)); - } + if (list->nb_messages > 0) { + memmove(list->messages + position, + list->messages + position + 1, + (list->nb_messages - position) * sizeof(*list->messages)); + } } void ff_cbs_sei_delete_message_type(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - uint32_t payload_type) { - int err, i, j; - - for(i = 0; i < au->nb_units; i++) { - CodedBitstreamUnit *unit = &au->units[i]; - SEIRawMessageList *list; - - err = cbs_sei_get_message_list(ctx, unit, &list); - if(err < 0) - continue; - - for(j = list->nb_messages - 1; j >= 0; j--) { - if(list->messages[j].payload_type == payload_type) - cbs_sei_delete_message(list, j); + CodedBitstreamFragment *au, + uint32_t payload_type) +{ + int err, i, j; + + for (i = 0; i < au->nb_units; i++) { + CodedBitstreamUnit *unit = &au->units[i]; + SEIRawMessageList *list; + + err = cbs_sei_get_message_list(ctx, unit, &list); + if (err < 0) + continue; + + for (j = list->nb_messages - 1; j >= 0; j--) { + if (list->messages[j].payload_type == payload_type) + cbs_sei_delete_message(list, j); + } } - } } diff --git a/third-party/cbs/cbs_sei_syntax_template.c b/third-party/cbs/cbs_sei_syntax_template.c index e9688013adf..0ef7b42ed9c 100644 --- a/third-party/cbs/cbs_sei_syntax_template.c +++ b/third-party/cbs/cbs_sei_syntax_template.c @@ -16,295 +16,307 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -static int FUNC(filler_payload)(CodedBitstreamContext *ctx, RWContext *rw, - SEIRawFillerPayload *current, SEIMessageState *state) { - int err, i; +static int FUNC(filler_payload) + (CodedBitstreamContext *ctx, RWContext *rw, + SEIRawFillerPayload *current, SEIMessageState *state) +{ + int err, i; - HEADER("Filler Payload"); + HEADER("Filler Payload"); #ifdef READ - current->payload_size = state->payload_size; + current->payload_size = state->payload_size; #endif - for(i = 0; i < current->payload_size; i++) - fixed(8, ff_byte, 0xff); + for (i = 0; i < current->payload_size; i++) + fixed(8, ff_byte, 0xff); - return 0; + return 0; } -static int FUNC(user_data_registered)(CodedBitstreamContext *ctx, RWContext *rw, - SEIRawUserDataRegistered *current, SEIMessageState *state) { - int err, i, j; +static int FUNC(user_data_registered) + (CodedBitstreamContext *ctx, RWContext *rw, + SEIRawUserDataRegistered *current, SEIMessageState *state) +{ + int err, i, j; - HEADER("User Data Registered ITU-T T.35"); + HEADER("User Data Registered ITU-T T.35"); - u(8, itu_t_t35_country_code, 0x00, 0xff); - if(current->itu_t_t35_country_code != 0xff) - i = 1; - else { - u(8, itu_t_t35_country_code_extension_byte, 0x00, 0xff); - i = 2; - } + u(8, itu_t_t35_country_code, 0x00, 0xff); + if (current->itu_t_t35_country_code != 0xff) + i = 1; + else { + u(8, itu_t_t35_country_code_extension_byte, 0x00, 0xff); + i = 2; + } #ifdef READ - if(state->payload_size < i) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "Invalid SEI user data registered payload.\n"); - return AVERROR_INVALIDDATA; - } - current->data_length = state->payload_size - i; + if (state->payload_size < i) { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "Invalid SEI user data registered payload.\n"); + return AVERROR_INVALIDDATA; + } + current->data_length = state->payload_size - i; #endif - allocate(current->data, current->data_length); - for(j = 0; j < current->data_length; j++) - xu(8, itu_t_t35_payload_byte[], current->data[j], 0x00, 0xff, 1, i + j); + allocate(current->data, current->data_length); + for (j = 0; j < current->data_length; j++) + xu(8, itu_t_t35_payload_byte[], current->data[j], 0x00, 0xff, 1, i + j); - return 0; + return 0; } -static int FUNC(user_data_unregistered)(CodedBitstreamContext *ctx, RWContext *rw, - SEIRawUserDataUnregistered *current, SEIMessageState *state) { - int err, i; +static int FUNC(user_data_unregistered) + (CodedBitstreamContext *ctx, RWContext *rw, + SEIRawUserDataUnregistered *current, SEIMessageState *state) +{ + int err, i; - HEADER("User Data Unregistered"); + HEADER("User Data Unregistered"); #ifdef READ - if(state->payload_size < 16) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "Invalid SEI user data unregistered payload.\n"); - return AVERROR_INVALIDDATA; - } - current->data_length = state->payload_size - 16; + if (state->payload_size < 16) { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "Invalid SEI user data unregistered payload.\n"); + return AVERROR_INVALIDDATA; + } + current->data_length = state->payload_size - 16; #endif - for(i = 0; i < 16; i++) - us(8, uuid_iso_iec_11578[i], 0x00, 0xff, 1, i); + for (i = 0; i < 16; i++) + us(8, uuid_iso_iec_11578[i], 0x00, 0xff, 1, i); - allocate(current->data, current->data_length); + allocate(current->data, current->data_length); - for(i = 0; i < current->data_length; i++) - xu(8, user_data_payload_byte[i], current->data[i], 0x00, 0xff, 1, i); + for (i = 0; i < current->data_length; i++) + xu(8, user_data_payload_byte[i], current->data[i], 0x00, 0xff, 1, i); - return 0; + return 0; } -static int FUNC(mastering_display_colour_volume)(CodedBitstreamContext *ctx, RWContext *rw, - SEIRawMasteringDisplayColourVolume *current, SEIMessageState *state) { - int err, c; +static int FUNC(mastering_display_colour_volume) + (CodedBitstreamContext *ctx, RWContext *rw, + SEIRawMasteringDisplayColourVolume *current, SEIMessageState *state) +{ + int err, c; - HEADER("Mastering Display Colour Volume"); + HEADER("Mastering Display Colour Volume"); - for(c = 0; c < 3; c++) { - ubs(16, display_primaries_x[c], 1, c); - ubs(16, display_primaries_y[c], 1, c); - } + for (c = 0; c < 3; c++) { + ubs(16, display_primaries_x[c], 1, c); + ubs(16, display_primaries_y[c], 1, c); + } - ub(16, white_point_x); - ub(16, white_point_y); + ub(16, white_point_x); + ub(16, white_point_y); - ub(32, max_display_mastering_luminance); - ub(32, min_display_mastering_luminance); + ub(32, max_display_mastering_luminance); + ub(32, min_display_mastering_luminance); - return 0; + return 0; } -static int FUNC(content_light_level_info)(CodedBitstreamContext *ctx, RWContext *rw, - SEIRawContentLightLevelInfo *current, SEIMessageState *state) { - int err; +static int FUNC(content_light_level_info) + (CodedBitstreamContext *ctx, RWContext *rw, + SEIRawContentLightLevelInfo *current, SEIMessageState *state) +{ + int err; - HEADER("Content Light Level Information"); + HEADER("Content Light Level Information"); - ub(16, max_content_light_level); - ub(16, max_pic_average_light_level); + ub(16, max_content_light_level); + ub(16, max_pic_average_light_level); - return 0; + return 0; } -static int FUNC(alternative_transfer_characteristics)(CodedBitstreamContext *ctx, RWContext *rw, - SEIRawAlternativeTransferCharacteristics *current, - SEIMessageState *state) { - int err; +static int FUNC(alternative_transfer_characteristics) + (CodedBitstreamContext *ctx, RWContext *rw, + SEIRawAlternativeTransferCharacteristics *current, + SEIMessageState *state) +{ + int err; - HEADER("Alternative Transfer Characteristics"); + HEADER("Alternative Transfer Characteristics"); - ub(8, preferred_transfer_characteristics); + ub(8, preferred_transfer_characteristics); - return 0; + return 0; } static int FUNC(message)(CodedBitstreamContext *ctx, RWContext *rw, - SEIRawMessage *current) { - const SEIMessageTypeDescriptor *desc; - int err, i; - - desc = ff_cbs_sei_find_type(ctx, current->payload_type); - if(desc) { - SEIMessageState state = { - .payload_type = current->payload_type, - .payload_size = current->payload_size, - .extension_present = current->extension_bit_length > 0, - }; - int start_position, current_position, bits_written; + SEIRawMessage *current) +{ + const SEIMessageTypeDescriptor *desc; + int err, i; + + desc = ff_cbs_sei_find_type(ctx, current->payload_type); + if (desc) { + SEIMessageState state = { + .payload_type = current->payload_type, + .payload_size = current->payload_size, + .extension_present = current->extension_bit_length > 0, + }; + int start_position, current_position, bits_written; #ifdef READ - CHECK(ff_cbs_sei_alloc_message_payload(current, desc)); + CHECK(ff_cbs_sei_alloc_message_payload(current, desc)); #endif - start_position = bit_position(rw); + start_position = bit_position(rw); - CHECK(desc->READWRITE(ctx, rw, current->payload, &state)); + CHECK(desc->READWRITE(ctx, rw, current->payload, &state)); - current_position = bit_position(rw); - bits_written = current_position - start_position; + current_position = bit_position(rw); + bits_written = current_position - start_position; - if(byte_alignment(rw) || state.extension_present || - bits_written < 8 * current->payload_size) { - size_t bits_left; + if (byte_alignment(rw) || state.extension_present || + bits_written < 8 * current->payload_size) { + size_t bits_left; #ifdef READ - GetBitContext tmp = *rw; - int trailing_bits, trailing_zero_bits; - - bits_left = 8 * current->payload_size - bits_written; - if(bits_left > 8) - skip_bits_long(&tmp, bits_left - 8); - trailing_bits = get_bits(&tmp, FFMIN(bits_left, 8)); - if(trailing_bits == 0) { - // The trailing bits must contain a bit_equal_to_one, so - // they can't all be zero. - return AVERROR_INVALIDDATA; - } - trailing_zero_bits = ff_ctz(trailing_bits); - current->extension_bit_length = - bits_left - 1 - trailing_zero_bits; + GetBitContext tmp = *rw; + int trailing_bits, trailing_zero_bits; + + bits_left = 8 * current->payload_size - bits_written; + if (bits_left > 8) + skip_bits_long(&tmp, bits_left - 8); + trailing_bits = get_bits(&tmp, FFMIN(bits_left, 8)); + if (trailing_bits == 0) { + // The trailing bits must contain a bit_equal_to_one, so + // they can't all be zero. + return AVERROR_INVALIDDATA; + } + trailing_zero_bits = ff_ctz(trailing_bits); + current->extension_bit_length = + bits_left - 1 - trailing_zero_bits; #endif - if(current->extension_bit_length > 0) { - allocate(current->extension_data, - (current->extension_bit_length + 7) / 8); - - bits_left = current->extension_bit_length; - for(i = 0; bits_left > 0; i++) { - int length = FFMIN(bits_left, 8); - xu(length, reserved_payload_extension_data, - current->extension_data[i], - 0, MAX_UINT_BITS(length), 0); - bits_left -= length; + if (current->extension_bit_length > 0) { + allocate(current->extension_data, + (current->extension_bit_length + 7) / 8); + + bits_left = current->extension_bit_length; + for (i = 0; bits_left > 0; i++) { + int length = FFMIN(bits_left, 8); + xu(length, reserved_payload_extension_data, + current->extension_data[i], + 0, MAX_UINT_BITS(length), 0); + bits_left -= length; + } + } + + fixed(1, bit_equal_to_one, 1); + while (byte_alignment(rw)) + fixed(1, bit_equal_to_zero, 0); } - } - - fixed(1, bit_equal_to_one, 1); - while(byte_alignment(rw)) - fixed(1, bit_equal_to_zero, 0); - } #ifdef WRITE - current->payload_size = (put_bits_count(rw) - start_position) / 8; + current->payload_size = (put_bits_count(rw) - start_position) / 8; #endif - } - else { - uint8_t *data; + } else { + uint8_t *data; - allocate(current->payload, current->payload_size); - data = current->payload; + allocate(current->payload, current->payload_size); + data = current->payload; - for(i = 0; i < current->payload_size; i++) - xu(8, payload_byte[i], data[i], 0, 255, 1, i); - } + for (i = 0; i < current->payload_size; i++) + xu(8, payload_byte[i], data[i], 0, 255, 1, i); + } - return 0; + return 0; } static int FUNC(message_list)(CodedBitstreamContext *ctx, RWContext *rw, - SEIRawMessageList *current, int prefix) { - SEIRawMessage *message; - int err, k; + SEIRawMessageList *current, int prefix) +{ + SEIRawMessage *message; + int err, k; #ifdef READ - for(k = 0;; k++) { - uint32_t payload_type = 0; - uint32_t payload_size = 0; - uint32_t tmp; - GetBitContext payload_gbc; - - while(show_bits(rw, 8) == 0xff) { - fixed(8, ff_byte, 0xff); - payload_type += 255; - } - xu(8, last_payload_type_byte, tmp, 0, 254, 0); - payload_type += tmp; + for (k = 0;; k++) { + uint32_t payload_type = 0; + uint32_t payload_size = 0; + uint32_t tmp; + GetBitContext payload_gbc; + + while (show_bits(rw, 8) == 0xff) { + fixed(8, ff_byte, 0xff); + payload_type += 255; + } + xu(8, last_payload_type_byte, tmp, 0, 254, 0); + payload_type += tmp; - while(show_bits(rw, 8) == 0xff) { - fixed(8, ff_byte, 0xff); - payload_size += 255; - } - xu(8, last_payload_size_byte, tmp, 0, 254, 0); - payload_size += tmp; - - // There must be space remaining for both the payload and - // the trailing bits on the SEI NAL unit. - if(payload_size + 1 > get_bits_left(rw) / 8) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "Invalid SEI message: payload_size too large " - "(%" PRIu32 " bytes).\n", - payload_size); - return AVERROR_INVALIDDATA; - } - CHECK(init_get_bits(&payload_gbc, rw->buffer, - get_bits_count(rw) + 8 * payload_size)); - skip_bits_long(&payload_gbc, get_bits_count(rw)); + while (show_bits(rw, 8) == 0xff) { + fixed(8, ff_byte, 0xff); + payload_size += 255; + } + xu(8, last_payload_size_byte, tmp, 0, 254, 0); + payload_size += tmp; + + // There must be space remaining for both the payload and + // the trailing bits on the SEI NAL unit. + if (payload_size + 1 > get_bits_left(rw) / 8) { + av_log(ctx->log_ctx, AV_LOG_ERROR, + "Invalid SEI message: payload_size too large " + "(%"PRIu32" bytes).\n", payload_size); + return AVERROR_INVALIDDATA; + } + CHECK(init_get_bits(&payload_gbc, rw->buffer, + get_bits_count(rw) + 8 * payload_size)); + skip_bits_long(&payload_gbc, get_bits_count(rw)); - CHECK(ff_cbs_sei_list_add(current)); - message = ¤t->messages[k]; + CHECK(ff_cbs_sei_list_add(current)); + message = ¤t->messages[k]; - message->payload_type = payload_type; - message->payload_size = payload_size; + message->payload_type = payload_type; + message->payload_size = payload_size; - CHECK(FUNC(message)(ctx, &payload_gbc, message)); + CHECK(FUNC(message)(ctx, &payload_gbc, message)); - skip_bits_long(rw, 8 * payload_size); + skip_bits_long(rw, 8 * payload_size); - if(!cbs_h2645_read_more_rbsp_data(rw)) - break; - } + if (!cbs_h2645_read_more_rbsp_data(rw)) + break; + } #else - for(k = 0; k < current->nb_messages; k++) { - PutBitContext start_state; - uint32_t tmp; - int trace, i; - - message = ¤t->messages[k]; - - // We write the payload twice in order to find the size. Trace - // output is switched off for the first write. - trace = ctx->trace_enable; - ctx->trace_enable = 0; - - start_state = *rw; - for(i = 0; i < 2; i++) { - *rw = start_state; - - tmp = message->payload_type; - while(tmp >= 255) { - fixed(8, ff_byte, 0xff); - tmp -= 255; - } - xu(8, last_payload_type_byte, tmp, 0, 254, 0); - - tmp = message->payload_size; - while(tmp >= 255) { - fixed(8, ff_byte, 0xff); - tmp -= 255; - } - xu(8, last_payload_size_byte, tmp, 0, 254, 0); - - err = FUNC(message)(ctx, rw, message); - ctx->trace_enable = trace; - if(err < 0) - return err; + for (k = 0; k < current->nb_messages; k++) { + PutBitContext start_state; + uint32_t tmp; + int trace, i; + + message = ¤t->messages[k]; + + // We write the payload twice in order to find the size. Trace + // output is switched off for the first write. + trace = ctx->trace_enable; + ctx->trace_enable = 0; + + start_state = *rw; + for (i = 0; i < 2; i++) { + *rw = start_state; + + tmp = message->payload_type; + while (tmp >= 255) { + fixed(8, ff_byte, 0xff); + tmp -= 255; + } + xu(8, last_payload_type_byte, tmp, 0, 254, 0); + + tmp = message->payload_size; + while (tmp >= 255) { + fixed(8, ff_byte, 0xff); + tmp -= 255; + } + xu(8, last_payload_size_byte, tmp, 0, 254, 0); + + err = FUNC(message)(ctx, rw, message); + ctx->trace_enable = trace; + if (err < 0) + return err; + } } - } #endif - return 0; + return 0; } diff --git a/third-party/cbs/cbs_vp9.c b/third-party/cbs/cbs_vp9.c index 74da5b03875..77f7aad5b80 100644 --- a/third-party/cbs/cbs_vp9.c +++ b/third-party/cbs/cbs_vp9.c @@ -16,319 +16,311 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -#include +#include "libavutil/avassert.h" #include "cbs/cbs.h" -#include "cbs/cbs_vp9.h" #include "cbs_internal.h" +#include "cbs/cbs_vp9.h" static int cbs_vp9_read_s(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, int32_t *write_to) { - uint32_t magnitude; - int position, sign; - int32_t value; - - if(ctx->trace_enable) - position = get_bits_count(gbc); - - if(get_bits_left(gbc) < width + 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid signed value at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; - } - - magnitude = get_bits(gbc, width); - sign = get_bits1(gbc); - value = sign ? -(int32_t)magnitude : magnitude; - - if(ctx->trace_enable) { - char bits[33]; - int i; - for(i = 0; i < width; i++) - bits[i] = magnitude >> (width - i - 1) & 1 ? '1' : '0'; - bits[i] = sign ? '1' : '0'; - bits[i + 1] = 0; - - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - } - - *write_to = value; - return 0; -} + int width, const char *name, + const int *subscripts, int32_t *write_to) +{ + uint32_t magnitude; + int position, sign; + int32_t value; + + if (ctx->trace_enable) + position = get_bits_count(gbc); + + if (get_bits_left(gbc) < width + 1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid signed value at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } -static int cbs_vp9_write_s(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, int32_t value) { - uint32_t magnitude; - int sign; + magnitude = get_bits(gbc, width); + sign = get_bits1(gbc); + value = sign ? -(int32_t)magnitude : magnitude; - if(put_bits_left(pbc) < width + 1) - return AVERROR(ENOSPC); + if (ctx->trace_enable) { + char bits[33]; + int i; + for (i = 0; i < width; i++) + bits[i] = magnitude >> (width - i - 1) & 1 ? '1' : '0'; + bits[i] = sign ? '1' : '0'; + bits[i + 1] = 0; - sign = value < 0; - magnitude = sign ? -value : value; + ff_cbs_trace_syntax_element(ctx, position, name, subscripts, + bits, value); + } - if(ctx->trace_enable) { - char bits[33]; - int i; - for(i = 0; i < width; i++) - bits[i] = magnitude >> (width - i - 1) & 1 ? '1' : '0'; - bits[i] = sign ? '1' : '0'; - bits[i + 1] = 0; + *write_to = value; + return 0; +} - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } +static int cbs_vp9_write_s(CodedBitstreamContext *ctx, PutBitContext *pbc, + int width, const char *name, + const int *subscripts, int32_t value) +{ + uint32_t magnitude; + int sign; + + if (put_bits_left(pbc) < width + 1) + return AVERROR(ENOSPC); + + sign = value < 0; + magnitude = sign ? -value : value; + + if (ctx->trace_enable) { + char bits[33]; + int i; + for (i = 0; i < width; i++) + bits[i] = magnitude >> (width - i - 1) & 1 ? '1' : '0'; + bits[i] = sign ? '1' : '0'; + bits[i + 1] = 0; + + ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), + name, subscripts, bits, value); + } - put_bits(pbc, width, magnitude); - put_bits(pbc, 1, sign); + put_bits(pbc, width, magnitude); + put_bits(pbc, 1, sign); - return 0; + return 0; } static int cbs_vp9_read_increment(CodedBitstreamContext *ctx, GetBitContext *gbc, - uint32_t range_min, uint32_t range_max, - const char *name, uint32_t *write_to) { - uint32_t value; - int position, i; - char bits[8]; - - av_assert0(range_min <= range_max && range_max - range_min < sizeof(bits) - 1); - if(ctx->trace_enable) - position = get_bits_count(gbc); - - for(i = 0, value = range_min; value < range_max;) { - if(get_bits_left(gbc) < 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid increment value at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; - } - if(get_bits1(gbc)) { - bits[i++] = '1'; - ++value; - } - else { - bits[i++] = '0'; - break; + uint32_t range_min, uint32_t range_max, + const char *name, uint32_t *write_to) +{ + uint32_t value; + int position, i; + char bits[8]; + + av_assert0(range_min <= range_max && range_max - range_min < sizeof(bits) - 1); + if (ctx->trace_enable) + position = get_bits_count(gbc); + + for (i = 0, value = range_min; value < range_max;) { + if (get_bits_left(gbc) < 1) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid increment value at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } + if (get_bits1(gbc)) { + bits[i++] = '1'; + ++value; + } else { + bits[i++] = '0'; + break; + } } - } - if(ctx->trace_enable) { - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, position, name, NULL, bits, value); - } + if (ctx->trace_enable) { + bits[i] = 0; + ff_cbs_trace_syntax_element(ctx, position, name, NULL, bits, value); + } - *write_to = value; - return 0; + *write_to = value; + return 0; } static int cbs_vp9_write_increment(CodedBitstreamContext *ctx, PutBitContext *pbc, - uint32_t range_min, uint32_t range_max, - const char *name, uint32_t value) { - int len; - - av_assert0(range_min <= range_max && range_max - range_min < 8); - if(value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%" PRIu32 ", but must be in [%" PRIu32 ",%" PRIu32 "].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - if(value == range_max) - len = range_max - range_min; - else - len = value - range_min + 1; - if(put_bits_left(pbc) < len) - return AVERROR(ENOSPC); - - if(ctx->trace_enable) { - char bits[8]; - int i; - for(i = 0; i < len; i++) { - if(range_min + i == value) - bits[i] = '0'; - else - bits[i] = '1'; + uint32_t range_min, uint32_t range_max, + const char *name, uint32_t value) +{ + int len; + + av_assert0(range_min <= range_max && range_max - range_min < 8); + if (value < range_min || value > range_max) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " + "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", + name, value, range_min, range_max); + return AVERROR_INVALIDDATA; } - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, NULL, bits, value); - } - if(len > 0) - put_bits(pbc, len, (1 << len) - 1 - (value != range_max)); + if (value == range_max) + len = range_max - range_min; + else + len = value - range_min + 1; + if (put_bits_left(pbc) < len) + return AVERROR(ENOSPC); + + if (ctx->trace_enable) { + char bits[8]; + int i; + for (i = 0; i < len; i++) { + if (range_min + i == value) + bits[i] = '0'; + else + bits[i] = '1'; + } + bits[i] = 0; + ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), + name, NULL, bits, value); + } - return 0; + if (len > 0) + put_bits(pbc, len, (1 << len) - 1 - (value != range_max)); + + return 0; } static int cbs_vp9_read_le(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, uint32_t *write_to) { - uint32_t value; - int position, b; - - av_assert0(width % 8 == 0); - - if(ctx->trace_enable) - position = get_bits_count(gbc); - - if(get_bits_left(gbc) < width) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid le value at " - "%s: bitstream ended.\n", - name); - return AVERROR_INVALIDDATA; - } - - value = 0; - for(b = 0; b < width; b += 8) - value |= get_bits(gbc, 8) << b; - - if(ctx->trace_enable) { - char bits[33]; - int i; - for(b = 0; b < width; b += 8) - for(i = 0; i < 8; i++) - bits[b + i] = value >> (b + i) & 1 ? '1' : '0'; - bits[b] = 0; - - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - } - - *write_to = value; - return 0; -} + int width, const char *name, + const int *subscripts, uint32_t *write_to) +{ + uint32_t value; + int position, b; -static int cbs_vp9_write_le(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, uint32_t value) { - int b; + av_assert0(width % 8 == 0); + + if (ctx->trace_enable) + position = get_bits_count(gbc); + + if (get_bits_left(gbc) < width) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid le value at " + "%s: bitstream ended.\n", name); + return AVERROR_INVALIDDATA; + } - av_assert0(width % 8 == 0); + value = 0; + for (b = 0; b < width; b += 8) + value |= get_bits(gbc, 8) << b; - if(put_bits_left(pbc) < width) - return AVERROR(ENOSPC); + if (ctx->trace_enable) { + char bits[33]; + int i; + for (b = 0; b < width; b += 8) + for (i = 0; i < 8; i++) + bits[b + i] = value >> (b + i) & 1 ? '1' : '0'; + bits[b] = 0; + + ff_cbs_trace_syntax_element(ctx, position, name, subscripts, + bits, value); + } - if(ctx->trace_enable) { - char bits[33]; - int i; - for(b = 0; b < width; b += 8) - for(i = 0; i < 8; i++) - bits[b + i] = value >> (b + i) & 1 ? '1' : '0'; - bits[b] = 0; + *write_to = value; + return 0; +} - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } +static int cbs_vp9_write_le(CodedBitstreamContext *ctx, PutBitContext *pbc, + int width, const char *name, + const int *subscripts, uint32_t value) +{ + int b; + + av_assert0(width % 8 == 0); + + if (put_bits_left(pbc) < width) + return AVERROR(ENOSPC); + + if (ctx->trace_enable) { + char bits[33]; + int i; + for (b = 0; b < width; b += 8) + for (i = 0; i < 8; i++) + bits[b + i] = value >> (b + i) & 1 ? '1' : '0'; + bits[b] = 0; + + ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), + name, subscripts, bits, value); + } - for(b = 0; b < width; b += 8) - put_bits(pbc, 8, value >> b & 0xff); + for (b = 0; b < width; b += 8) + put_bits(pbc, 8, value >> b & 0xff); - return 0; + return 0; } -#define HEADER(name) \ - do { \ - ff_cbs_trace_header(ctx, name); \ - } while(0) +#define HEADER(name) do { \ + ff_cbs_trace_header(ctx, name); \ + } while (0) -#define CHECK(call) \ - do { \ - err = (call); \ - if(err < 0) \ - return err; \ - } while(0) +#define CHECK(call) do { \ + err = (call); \ + if (err < 0) \ + return err; \ + } while (0) -#define FUNC_NAME(rw, codec, name) cbs_##codec##_##rw##_##name +#define FUNC_NAME(rw, codec, name) cbs_ ## codec ## _ ## rw ## _ ## name #define FUNC_VP9(rw, name) FUNC_NAME(rw, vp9, name) #define FUNC(name) FUNC_VP9(READWRITE, name) -#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]) { subs, __VA_ARGS__ }) : NULL) +#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]){ subs, __VA_ARGS__ }) : NULL) #define f(width, name) \ - xf(width, name, current->name, 0, ) + xf(width, name, current->name, 0, ) #define s(width, name) \ - xs(width, name, current->name, 0, ) + xs(width, name, current->name, 0, ) #define fs(width, name, subs, ...) \ - xf(width, name, current->name, subs, __VA_ARGS__) + xf(width, name, current->name, subs, __VA_ARGS__) #define ss(width, name, subs, ...) \ - xs(width, name, current->name, subs, __VA_ARGS__) + xs(width, name, current->name, subs, __VA_ARGS__) #define READ #define READWRITE read #define RWContext GetBitContext -#define xf(width, name, var, subs, ...) \ - do { \ - uint32_t value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, 0, (1 << width) - 1)); \ - var = value; \ - } while(0) -#define xs(width, name, var, subs, ...) \ - do { \ - int32_t value; \ - CHECK(cbs_vp9_read_s(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), &value)); \ - var = value; \ - } while(0) - - -#define increment(name, min, max) \ - do { \ - uint32_t value; \ - CHECK(cbs_vp9_read_increment(ctx, rw, min, max, #name, &value)); \ - current->name = value; \ - } while(0) - -#define fle(width, name, subs, ...) \ - do { \ - CHECK(cbs_vp9_read_le(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), ¤t->name)); \ - } while(0) - -#define delta_q(name) \ - do { \ - uint8_t delta_coded; \ - int8_t delta_q; \ - xf(1, name.delta_coded, delta_coded, 0, ); \ - if(delta_coded) \ - xs(4, name.delta_q, delta_q, 0, ); \ - else \ - delta_q = 0; \ - current->name = delta_q; \ - } while(0) - -#define prob(name, subs, ...) \ - do { \ - uint8_t prob_coded; \ - uint8_t prob; \ - xf(1, name.prob_coded, prob_coded, subs, __VA_ARGS__); \ - if(prob_coded) \ - xf(8, name.prob, prob, subs, __VA_ARGS__); \ - else \ - prob = 255; \ - current->name = prob; \ - } while(0) - -#define fixed(width, name, value) \ - do { \ - av_unused uint32_t fixed_value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ - 0, &fixed_value, value, value)); \ - } while(0) - -#define infer(name, value) \ - do { \ - current->name = value; \ - } while(0) +#define xf(width, name, var, subs, ...) do { \ + uint32_t value; \ + CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + &value, 0, (1 << width) - 1)); \ + var = value; \ + } while (0) +#define xs(width, name, var, subs, ...) do { \ + int32_t value; \ + CHECK(cbs_vp9_read_s(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), &value)); \ + var = value; \ + } while (0) + + +#define increment(name, min, max) do { \ + uint32_t value; \ + CHECK(cbs_vp9_read_increment(ctx, rw, min, max, #name, &value)); \ + current->name = value; \ + } while (0) + +#define fle(width, name, subs, ...) do { \ + CHECK(cbs_vp9_read_le(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), ¤t->name)); \ + } while (0) + +#define delta_q(name) do { \ + uint8_t delta_coded; \ + int8_t delta_q; \ + xf(1, name.delta_coded, delta_coded, 0, ); \ + if (delta_coded) \ + xs(4, name.delta_q, delta_q, 0, ); \ + else \ + delta_q = 0; \ + current->name = delta_q; \ + } while (0) + +#define prob(name, subs, ...) do { \ + uint8_t prob_coded; \ + uint8_t prob; \ + xf(1, name.prob_coded, prob_coded, subs, __VA_ARGS__); \ + if (prob_coded) \ + xf(8, name.prob, prob, subs, __VA_ARGS__); \ + else \ + prob = 255; \ + current->name = prob; \ + } while (0) + +#define fixed(width, name, value) do { \ + av_unused uint32_t fixed_value; \ + CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ + 0, &fixed_value, value, value)); \ + } while (0) + +#define infer(name, value) do { \ + current->name = value; \ + } while (0) #define byte_alignment(rw) (get_bits_count(rw) % 8) @@ -352,58 +344,50 @@ static int cbs_vp9_write_le(CodedBitstreamContext *ctx, PutBitContext *pbc, #define READWRITE write #define RWContext PutBitContext -#define xf(width, name, var, subs, ...) \ - do { \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - var, 0, (1 << width) - 1)); \ - } while(0) -#define xs(width, name, var, subs, ...) \ - do { \ - CHECK(cbs_vp9_write_s(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), var)); \ - } while(0) - -#define increment(name, min, max) \ - do { \ - CHECK(cbs_vp9_write_increment(ctx, rw, min, max, #name, current->name)); \ - } while(0) - -#define fle(width, name, subs, ...) \ - do { \ - CHECK(cbs_vp9_write_le(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), current->name)); \ - } while(0) - -#define delta_q(name) \ - do { \ - xf(1, name.delta_coded, !!current->name, 0, ); \ - if(current->name) \ - xs(4, name.delta_q, current->name, 0, ); \ - } while(0) - -#define prob(name, subs, ...) \ - do { \ - xf(1, name.prob_coded, current->name != 255, subs, __VA_ARGS__); \ - if(current->name != 255) \ - xf(8, name.prob, current->name, subs, __VA_ARGS__); \ - } while(0) - -#define fixed(width, name, value) \ - do { \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ - 0, value, value, value)); \ - } while(0) - -#define infer(name, value) \ - do { \ - if(current->name != (value)) { \ - av_log(ctx->log_ctx, AV_LOG_WARNING, "Warning: " \ - "%s does not match inferred value: " \ - "%" PRId64 ", but should be %" PRId64 ".\n", \ - #name, (int64_t)current->name, (int64_t)(value)); \ - } \ - } while(0) +#define xf(width, name, var, subs, ...) do { \ + CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), \ + var, 0, (1 << width) - 1)); \ + } while (0) +#define xs(width, name, var, subs, ...) do { \ + CHECK(cbs_vp9_write_s(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), var)); \ + } while (0) + +#define increment(name, min, max) do { \ + CHECK(cbs_vp9_write_increment(ctx, rw, min, max, #name, current->name)); \ + } while (0) + +#define fle(width, name, subs, ...) do { \ + CHECK(cbs_vp9_write_le(ctx, rw, width, #name, \ + SUBSCRIPTS(subs, __VA_ARGS__), current->name)); \ + } while (0) + +#define delta_q(name) do { \ + xf(1, name.delta_coded, !!current->name, 0, ); \ + if (current->name) \ + xs(4, name.delta_q, current->name, 0, ); \ + } while (0) + +#define prob(name, subs, ...) do { \ + xf(1, name.prob_coded, current->name != 255, subs, __VA_ARGS__); \ + if (current->name != 255) \ + xf(8, name.prob, current->name, subs, __VA_ARGS__); \ + } while (0) + +#define fixed(width, name, value) do { \ + CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ + 0, value, value, value)); \ + } while (0) + +#define infer(name, value) do { \ + if (current->name != (value)) { \ + av_log(ctx->log_ctx, AV_LOG_WARNING, "Warning: " \ + "%s does not match inferred value: " \ + "%"PRId64", but should be %"PRId64".\n", \ + #name, (int64_t)current->name, (int64_t)(value)); \ + } \ + } while (0) #define byte_alignment(rw) (put_bits_count(rw) % 8) @@ -424,252 +408,255 @@ static int cbs_vp9_write_le(CodedBitstreamContext *ctx, PutBitContext *pbc, static int cbs_vp9_split_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header) { - uint8_t superframe_header; - int err; - - if(frag->data_size == 0) - return AVERROR_INVALIDDATA; + CodedBitstreamFragment *frag, + int header) +{ + uint8_t superframe_header; + int err; - // Last byte in the packet. - superframe_header = frag->data[frag->data_size - 1]; - - if((superframe_header & 0xe0) == 0xc0) { - VP9RawSuperframeIndex sfi; - GetBitContext gbc; - size_t index_size, pos; - int i; - - index_size = 2 + (((superframe_header & 0x18) >> 3) + 1) * - ((superframe_header & 0x07) + 1); - - if(index_size > frag->data_size) - return AVERROR_INVALIDDATA; - - err = init_get_bits(&gbc, frag->data + frag->data_size - index_size, - 8 * index_size); - if(err < 0) - return err; - - err = cbs_vp9_read_superframe_index(ctx, &gbc, &sfi); - if(err < 0) - return err; - - pos = 0; - for(i = 0; i <= sfi.frames_in_superframe_minus_1; i++) { - if(pos + sfi.frame_sizes[i] + index_size > frag->data_size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Frame %d too large " - "in superframe: %" PRIu32 " bytes.\n", - i, sfi.frame_sizes[i]); + if (frag->data_size == 0) return AVERROR_INVALIDDATA; - } - err = ff_cbs_insert_unit_data(frag, -1, 0, - frag->data + pos, - sfi.frame_sizes[i], - frag->data_ref); - if(err < 0) - return err; - - pos += sfi.frame_sizes[i]; - } - if(pos + index_size != frag->data_size) { - av_log(ctx->log_ctx, AV_LOG_WARNING, "Extra padding at " - "end of superframe: %zu bytes.\n", - frag->data_size - (pos + index_size)); + // Last byte in the packet. + superframe_header = frag->data[frag->data_size - 1]; + + if ((superframe_header & 0xe0) == 0xc0) { + VP9RawSuperframeIndex sfi; + GetBitContext gbc; + size_t index_size, pos; + int i; + + index_size = 2 + (((superframe_header & 0x18) >> 3) + 1) * + ((superframe_header & 0x07) + 1); + + if (index_size > frag->data_size) + return AVERROR_INVALIDDATA; + + err = init_get_bits(&gbc, frag->data + frag->data_size - index_size, + 8 * index_size); + if (err < 0) + return err; + + err = cbs_vp9_read_superframe_index(ctx, &gbc, &sfi); + if (err < 0) + return err; + + pos = 0; + for (i = 0; i <= sfi.frames_in_superframe_minus_1; i++) { + if (pos + sfi.frame_sizes[i] + index_size > frag->data_size) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Frame %d too large " + "in superframe: %"PRIu32" bytes.\n", + i, sfi.frame_sizes[i]); + return AVERROR_INVALIDDATA; + } + + err = ff_cbs_append_unit_data(frag, 0, + frag->data + pos, + sfi.frame_sizes[i], + frag->data_ref); + if (err < 0) + return err; + + pos += sfi.frame_sizes[i]; + } + if (pos + index_size != frag->data_size) { + av_log(ctx->log_ctx, AV_LOG_WARNING, "Extra padding at " + "end of superframe: %zu bytes.\n", + frag->data_size - (pos + index_size)); + } + + return 0; + + } else { + err = ff_cbs_append_unit_data(frag, 0, + frag->data, frag->data_size, + frag->data_ref); + if (err < 0) + return err; } return 0; - } - else { - err = ff_cbs_insert_unit_data(frag, -1, 0, - frag->data, frag->data_size, - frag->data_ref); - if(err < 0) - return err; - } - - return 0; } static int cbs_vp9_read_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) { - VP9RawFrame *frame; - GetBitContext gbc; - int err, pos; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if(err < 0) - return err; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if(err < 0) - return err; - frame = unit->content; - - err = cbs_vp9_read_frame(ctx, &gbc, frame); - if(err < 0) - return err; - - pos = get_bits_count(&gbc); - av_assert0(pos % 8 == 0); - pos /= 8; - av_assert0(pos <= unit->data_size); - - if(pos == unit->data_size) { - // No data (e.g. a show-existing-frame frame). - } - else { - frame->data_ref = av_buffer_ref(unit->data_ref); - if(!frame->data_ref) - return AVERROR(ENOMEM); - - frame->data = unit->data + pos; - frame->data_size = unit->data_size - pos; - } - - return 0; -} - -static int cbs_vp9_write_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) { - VP9RawFrame *frame = unit->content; - int err; + CodedBitstreamUnit *unit) +{ + VP9RawFrame *frame; + GetBitContext gbc; + int err, pos; - err = cbs_vp9_write_frame(ctx, pbc, frame); - if(err < 0) - return err; + err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); + if (err < 0) + return err; - // Frame must be byte-aligned. - av_assert0(put_bits_count(pbc) % 8 == 0); + err = ff_cbs_alloc_unit_content2(ctx, unit); + if (err < 0) + return err; + frame = unit->content; - if(frame->data) { - if(frame->data_size > put_bits_left(pbc) / 8) - return AVERROR(ENOSPC); + err = cbs_vp9_read_frame(ctx, &gbc, frame); + if (err < 0) + return err; - flush_put_bits(pbc); - memcpy(put_bits_ptr(pbc), frame->data, frame->data_size); - skip_put_bytes(pbc, frame->data_size); - } + pos = get_bits_count(&gbc); + av_assert0(pos % 8 == 0); + pos /= 8; + av_assert0(pos <= unit->data_size); - return 0; -} + if (pos == unit->data_size) { + // No data (e.g. a show-existing-frame frame). + } else { + frame->data_ref = av_buffer_ref(unit->data_ref); + if (!frame->data_ref) + return AVERROR(ENOMEM); -static int cbs_vp9_assemble_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) { - int err; - - if(frag->nb_units == 1) { - // Output is just the content of the single frame. - - CodedBitstreamUnit *frame = &frag->units[0]; - - frag->data_ref = av_buffer_ref(frame->data_ref); - if(!frag->data_ref) - return AVERROR(ENOMEM); - - frag->data = frame->data; - frag->data_size = frame->data_size; - } - else { - // Build superframe out of frames. - - VP9RawSuperframeIndex sfi; - PutBitContext pbc; - AVBufferRef *ref; - uint8_t *data; - size_t size, max, pos; - int i, size_len; - - if(frag->nb_units > 8) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many frames to " - "make superframe: %d.\n", - frag->nb_units); - return AVERROR(EINVAL); + frame->data = unit->data + pos; + frame->data_size = unit->data_size - pos; } - max = 0; - for(i = 0; i < frag->nb_units; i++) - if(max < frag->units[i].data_size) - max = frag->units[i].data_size; + return 0; +} - if(max < 2) - size_len = 1; - else - size_len = av_log2(max) / 8 + 1; - av_assert0(size_len <= 4); +static int cbs_vp9_write_unit(CodedBitstreamContext *ctx, + CodedBitstreamUnit *unit, + PutBitContext *pbc) +{ + VP9RawFrame *frame = unit->content; + int err; + + err = cbs_vp9_write_frame(ctx, pbc, frame); + if (err < 0) + return err; - sfi.superframe_marker = VP9_SUPERFRAME_MARKER; - sfi.bytes_per_framesize_minus_1 = size_len - 1; - sfi.frames_in_superframe_minus_1 = frag->nb_units - 1; + // Frame must be byte-aligned. + av_assert0(put_bits_count(pbc) % 8 == 0); - size = 2; - for(i = 0; i < frag->nb_units; i++) { - size += size_len + frag->units[i].data_size; - sfi.frame_sizes[i] = frag->units[i].data_size; - } + if (frame->data) { + if (frame->data_size > put_bits_left(pbc) / 8) + return AVERROR(ENOSPC); - ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if(!ref) - return AVERROR(ENOMEM); - data = ref->data; - memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - - pos = 0; - for(i = 0; i < frag->nb_units; i++) { - av_assert0(size - pos > frag->units[i].data_size); - memcpy(data + pos, frag->units[i].data, - frag->units[i].data_size); - pos += frag->units[i].data_size; + flush_put_bits(pbc); + memcpy(put_bits_ptr(pbc), frame->data, frame->data_size); + skip_put_bytes(pbc, frame->data_size); } - av_assert0(size - pos == 2 + frag->nb_units * size_len); - init_put_bits(&pbc, data + pos, size - pos); + return 0; +} - err = cbs_vp9_write_superframe_index(ctx, &pbc, &sfi); - if(err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to write " - "superframe index.\n"); - av_buffer_unref(&ref); - return err; +static int cbs_vp9_assemble_fragment(CodedBitstreamContext *ctx, + CodedBitstreamFragment *frag) +{ + int err; + + if (frag->nb_units == 1) { + // Output is just the content of the single frame. + + CodedBitstreamUnit *frame = &frag->units[0]; + + frag->data_ref = av_buffer_ref(frame->data_ref); + if (!frag->data_ref) + return AVERROR(ENOMEM); + + frag->data = frame->data; + frag->data_size = frame->data_size; + + } else { + // Build superframe out of frames. + + VP9RawSuperframeIndex sfi; + PutBitContext pbc; + AVBufferRef *ref; + uint8_t *data; + size_t size, max, pos; + int i, size_len; + + if (frag->nb_units > 8) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many frames to " + "make superframe: %d.\n", frag->nb_units); + return AVERROR(EINVAL); + } + + max = 0; + for (i = 0; i < frag->nb_units; i++) + if (max < frag->units[i].data_size) + max = frag->units[i].data_size; + + if (max < 2) + size_len = 1; + else + size_len = av_log2(max) / 8 + 1; + av_assert0(size_len <= 4); + + sfi.superframe_marker = VP9_SUPERFRAME_MARKER; + sfi.bytes_per_framesize_minus_1 = size_len - 1; + sfi.frames_in_superframe_minus_1 = frag->nb_units - 1; + + size = 2; + for (i = 0; i < frag->nb_units; i++) { + size += size_len + frag->units[i].data_size; + sfi.frame_sizes[i] = frag->units[i].data_size; + } + + ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); + if (!ref) + return AVERROR(ENOMEM); + data = ref->data; + memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); + + pos = 0; + for (i = 0; i < frag->nb_units; i++) { + av_assert0(size - pos > frag->units[i].data_size); + memcpy(data + pos, frag->units[i].data, + frag->units[i].data_size); + pos += frag->units[i].data_size; + } + av_assert0(size - pos == 2 + frag->nb_units * size_len); + + init_put_bits(&pbc, data + pos, size - pos); + + err = cbs_vp9_write_superframe_index(ctx, &pbc, &sfi); + if (err < 0) { + av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to write " + "superframe index.\n"); + av_buffer_unref(&ref); + return err; + } + + av_assert0(put_bits_left(&pbc) == 0); + flush_put_bits(&pbc); + + frag->data_ref = ref; + frag->data = data; + frag->data_size = size; } - av_assert0(put_bits_left(&pbc) == 0); - flush_put_bits(&pbc); - - frag->data_ref = ref; - frag->data = data; - frag->data_size = size; - } - - return 0; + return 0; } -static void cbs_vp9_flush(CodedBitstreamContext *ctx) { - CodedBitstreamVP9Context *vp9 = ctx->priv_data; +static void cbs_vp9_flush(CodedBitstreamContext *ctx) +{ + CodedBitstreamVP9Context *vp9 = ctx->priv_data; - memset(vp9->ref, 0, sizeof(vp9->ref)); + memset(vp9->ref, 0, sizeof(vp9->ref)); } static const CodedBitstreamUnitTypeDescriptor cbs_vp9_unit_types[] = { - CBS_UNIT_TYPE_INTERNAL_REF(0, VP9RawFrame, data), - CBS_UNIT_TYPE_END_OF_LIST + CBS_UNIT_TYPE_INTERNAL_REF(0, VP9RawFrame, data), + CBS_UNIT_TYPE_END_OF_LIST }; const CodedBitstreamType ff_cbs_type_vp9 = { - .codec_id = AV_CODEC_ID_VP9, + .codec_id = AV_CODEC_ID_VP9, - .priv_data_size = sizeof(CodedBitstreamVP9Context), + .priv_data_size = sizeof(CodedBitstreamVP9Context), - .unit_types = cbs_vp9_unit_types, + .unit_types = cbs_vp9_unit_types, - .split_fragment = &cbs_vp9_split_fragment, - .read_unit = &cbs_vp9_read_unit, - .write_unit = &cbs_vp9_write_unit, + .split_fragment = &cbs_vp9_split_fragment, + .read_unit = &cbs_vp9_read_unit, + .write_unit = &cbs_vp9_write_unit, - .flush = &cbs_vp9_flush, + .flush = &cbs_vp9_flush, - .assemble_fragment = &cbs_vp9_assemble_fragment, + .assemble_fragment = &cbs_vp9_assemble_fragment, }; diff --git a/third-party/cbs/cbs_vp9_syntax_template.c b/third-party/cbs/cbs_vp9_syntax_template.c index a0e15c538de..2f08eccf180 100644 --- a/third-party/cbs/cbs_vp9_syntax_template.c +++ b/third-party/cbs/cbs_vp9_syntax_template.c @@ -17,406 +17,413 @@ */ static int FUNC(frame_sync_code)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) { - int err; + VP9RawFrameHeader *current) +{ + int err; - fixed(8, frame_sync_byte_0, VP9_FRAME_SYNC_0); - fixed(8, frame_sync_byte_1, VP9_FRAME_SYNC_1); - fixed(8, frame_sync_byte_2, VP9_FRAME_SYNC_2); + fixed(8, frame_sync_byte_0, VP9_FRAME_SYNC_0); + fixed(8, frame_sync_byte_1, VP9_FRAME_SYNC_1); + fixed(8, frame_sync_byte_2, VP9_FRAME_SYNC_2); - return 0; + return 0; } static int FUNC(color_config)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current, int profile) { - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - int err; - - if(profile >= 2) { - f(1, ten_or_twelve_bit); - vp9->bit_depth = current->ten_or_twelve_bit ? 12 : 10; - } - else - vp9->bit_depth = 8; - - f(3, color_space); - - if(current->color_space != VP9_CS_RGB) { - f(1, color_range); - if(profile == 1 || profile == 3) { - f(1, subsampling_x); - f(1, subsampling_y); - fixed(1, reserved_zero, 0); - } - else { - infer(subsampling_x, 1); - infer(subsampling_y, 1); - } - } - else { - infer(color_range, 1); - if(profile == 1 || profile == 3) { - infer(subsampling_x, 0); - infer(subsampling_y, 0); - fixed(1, reserved_zero, 0); + VP9RawFrameHeader *current, int profile) +{ + CodedBitstreamVP9Context *vp9 = ctx->priv_data; + int err; + + if (profile >= 2) { + f(1, ten_or_twelve_bit); + vp9->bit_depth = current->ten_or_twelve_bit ? 12 : 10; + } else + vp9->bit_depth = 8; + + f(3, color_space); + + if (current->color_space != VP9_CS_RGB) { + f(1, color_range); + if (profile == 1 || profile == 3) { + f(1, subsampling_x); + f(1, subsampling_y); + fixed(1, reserved_zero, 0); + } else { + infer(subsampling_x, 1); + infer(subsampling_y, 1); + } + } else { + infer(color_range, 1); + if (profile == 1 || profile == 3) { + infer(subsampling_x, 0); + infer(subsampling_y, 0); + fixed(1, reserved_zero, 0); + } } - } - vp9->subsampling_x = current->subsampling_x; - vp9->subsampling_y = current->subsampling_y; + vp9->subsampling_x = current->subsampling_x; + vp9->subsampling_y = current->subsampling_y; - return 0; + return 0; } static int FUNC(frame_size)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) { - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - int err; + VP9RawFrameHeader *current) +{ + CodedBitstreamVP9Context *vp9 = ctx->priv_data; + int err; - f(16, frame_width_minus_1); - f(16, frame_height_minus_1); + f(16, frame_width_minus_1); + f(16, frame_height_minus_1); - vp9->frame_width = current->frame_width_minus_1 + 1; - vp9->frame_height = current->frame_height_minus_1 + 1; + vp9->frame_width = current->frame_width_minus_1 + 1; + vp9->frame_height = current->frame_height_minus_1 + 1; - vp9->mi_cols = (vp9->frame_width + 7) >> 3; - vp9->mi_rows = (vp9->frame_height + 7) >> 3; - vp9->sb64_cols = (vp9->mi_cols + 7) >> 3; - vp9->sb64_rows = (vp9->mi_rows + 7) >> 3; + vp9->mi_cols = (vp9->frame_width + 7) >> 3; + vp9->mi_rows = (vp9->frame_height + 7) >> 3; + vp9->sb64_cols = (vp9->mi_cols + 7) >> 3; + vp9->sb64_rows = (vp9->mi_rows + 7) >> 3; - return 0; + return 0; } static int FUNC(render_size)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) { - int err; + VP9RawFrameHeader *current) +{ + int err; - f(1, render_and_frame_size_different); + f(1, render_and_frame_size_different); - if(current->render_and_frame_size_different) { - f(16, render_width_minus_1); - f(16, render_height_minus_1); - } + if (current->render_and_frame_size_different) { + f(16, render_width_minus_1); + f(16, render_height_minus_1); + } - return 0; + return 0; } static int FUNC(frame_size_with_refs)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) { - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - int err, i; + VP9RawFrameHeader *current) +{ + CodedBitstreamVP9Context *vp9 = ctx->priv_data; + int err, i; - for(i = 0; i < VP9_REFS_PER_FRAME; i++) { - fs(1, found_ref[i], 1, i); - if(current->found_ref[i]) { - VP9ReferenceFrameState *ref = - &vp9->ref[current->ref_frame_idx[i]]; + for (i = 0; i < VP9_REFS_PER_FRAME; i++) { + fs(1, found_ref[i], 1, i); + if (current->found_ref[i]) { + VP9ReferenceFrameState *ref = + &vp9->ref[current->ref_frame_idx[i]]; - vp9->frame_width = ref->frame_width; - vp9->frame_height = ref->frame_height; + vp9->frame_width = ref->frame_width; + vp9->frame_height = ref->frame_height; - vp9->subsampling_x = ref->subsampling_x; - vp9->subsampling_y = ref->subsampling_y; - vp9->bit_depth = ref->bit_depth; + vp9->subsampling_x = ref->subsampling_x; + vp9->subsampling_y = ref->subsampling_y; + vp9->bit_depth = ref->bit_depth; - break; + break; + } } - } - if(i >= VP9_REFS_PER_FRAME) - CHECK(FUNC(frame_size)(ctx, rw, current)); - else { - vp9->mi_cols = (vp9->frame_width + 7) >> 3; - vp9->mi_rows = (vp9->frame_height + 7) >> 3; - vp9->sb64_cols = (vp9->mi_cols + 7) >> 3; - vp9->sb64_rows = (vp9->mi_rows + 7) >> 3; - } - CHECK(FUNC(render_size)(ctx, rw, current)); + if (i >= VP9_REFS_PER_FRAME) + CHECK(FUNC(frame_size)(ctx, rw, current)); + else { + vp9->mi_cols = (vp9->frame_width + 7) >> 3; + vp9->mi_rows = (vp9->frame_height + 7) >> 3; + vp9->sb64_cols = (vp9->mi_cols + 7) >> 3; + vp9->sb64_rows = (vp9->mi_rows + 7) >> 3; + } + CHECK(FUNC(render_size)(ctx, rw, current)); - return 0; + return 0; } static int FUNC(interpolation_filter)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) { - int err; + VP9RawFrameHeader *current) +{ + int err; - f(1, is_filter_switchable); - if(!current->is_filter_switchable) - f(2, raw_interpolation_filter_type); + f(1, is_filter_switchable); + if (!current->is_filter_switchable) + f(2, raw_interpolation_filter_type); - return 0; + return 0; } static int FUNC(loop_filter_params)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) { - int err, i; - - f(6, loop_filter_level); - f(3, loop_filter_sharpness); - - f(1, loop_filter_delta_enabled); - if(current->loop_filter_delta_enabled) { - f(1, loop_filter_delta_update); - if(current->loop_filter_delta_update) { - for(i = 0; i < VP9_MAX_REF_FRAMES; i++) { - fs(1, update_ref_delta[i], 1, i); - if(current->update_ref_delta[i]) - ss(6, loop_filter_ref_deltas[i], 1, i); - } - for(i = 0; i < 2; i++) { - fs(1, update_mode_delta[i], 1, i); - if(current->update_mode_delta[i]) - ss(6, loop_filter_mode_deltas[i], 1, i); - } + VP9RawFrameHeader *current) +{ + int err, i; + + f(6, loop_filter_level); + f(3, loop_filter_sharpness); + + f(1, loop_filter_delta_enabled); + if (current->loop_filter_delta_enabled) { + f(1, loop_filter_delta_update); + if (current->loop_filter_delta_update) { + for (i = 0; i < VP9_MAX_REF_FRAMES; i++) { + fs(1, update_ref_delta[i], 1, i); + if (current->update_ref_delta[i]) + ss(6, loop_filter_ref_deltas[i], 1, i); + } + for (i = 0; i < 2; i++) { + fs(1, update_mode_delta[i], 1, i); + if (current->update_mode_delta[i]) + ss(6, loop_filter_mode_deltas[i], 1, i); + } + } } - } - return 0; + return 0; } static int FUNC(quantization_params)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) { - int err; + VP9RawFrameHeader *current) +{ + int err; - f(8, base_q_idx); + f(8, base_q_idx); - delta_q(delta_q_y_dc); - delta_q(delta_q_uv_dc); - delta_q(delta_q_uv_ac); + delta_q(delta_q_y_dc); + delta_q(delta_q_uv_dc); + delta_q(delta_q_uv_ac); - return 0; + return 0; } static int FUNC(segmentation_params)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) { - static const uint8_t segmentation_feature_bits[VP9_SEG_LVL_MAX] = { 8, 6, 2, 0 }; - static const uint8_t segmentation_feature_signed[VP9_SEG_LVL_MAX] = { 1, 1, 0, 0 }; - - int err, i, j; - - f(1, segmentation_enabled); - - if(current->segmentation_enabled) { - f(1, segmentation_update_map); - if(current->segmentation_update_map) { - for(i = 0; i < 7; i++) - prob(segmentation_tree_probs[i], 1, i); - f(1, segmentation_temporal_update); - for(i = 0; i < 3; i++) { - if(current->segmentation_temporal_update) - prob(segmentation_pred_prob[i], 1, i); - else - infer(segmentation_pred_prob[i], 255); - } - } + VP9RawFrameHeader *current) +{ + static const uint8_t segmentation_feature_bits[VP9_SEG_LVL_MAX] = { 8, 6, 2, 0 }; + static const uint8_t segmentation_feature_signed[VP9_SEG_LVL_MAX] = { 1, 1, 0, 0 }; + + int err, i, j; + + f(1, segmentation_enabled); + + if (current->segmentation_enabled) { + f(1, segmentation_update_map); + if (current->segmentation_update_map) { + for (i = 0; i < 7; i++) + prob(segmentation_tree_probs[i], 1, i); + f(1, segmentation_temporal_update); + for (i = 0; i < 3; i++) { + if (current->segmentation_temporal_update) + prob(segmentation_pred_prob[i], 1, i); + else + infer(segmentation_pred_prob[i], 255); + } + } - f(1, segmentation_update_data); - if(current->segmentation_update_data) { - f(1, segmentation_abs_or_delta_update); - for(i = 0; i < VP9_MAX_SEGMENTS; i++) { - for(j = 0; j < VP9_SEG_LVL_MAX; j++) { - fs(1, feature_enabled[i][j], 2, i, j); - if(current->feature_enabled[i][j] && - segmentation_feature_bits[j]) { - fs(segmentation_feature_bits[j], - feature_value[i][j], 2, i, j); - if(segmentation_feature_signed[j]) - fs(1, feature_sign[i][j], 2, i, j); - else - infer(feature_sign[i][j], 0); - } - else { - infer(feature_value[i][j], 0); - infer(feature_sign[i][j], 0); - } + f(1, segmentation_update_data); + if (current->segmentation_update_data) { + f(1, segmentation_abs_or_delta_update); + for (i = 0; i < VP9_MAX_SEGMENTS; i++) { + for (j = 0; j < VP9_SEG_LVL_MAX; j++) { + fs(1, feature_enabled[i][j], 2, i, j); + if (current->feature_enabled[i][j] && + segmentation_feature_bits[j]) { + fs(segmentation_feature_bits[j], + feature_value[i][j], 2, i, j); + if (segmentation_feature_signed[j]) + fs(1, feature_sign[i][j], 2, i, j); + else + infer(feature_sign[i][j], 0); + } else { + infer(feature_value[i][j], 0); + infer(feature_sign[i][j], 0); + } + } + } } - } } - } - return 0; + return 0; } static int FUNC(tile_info)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) { - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - int min_log2_tile_cols, max_log2_tile_cols; - int err; + VP9RawFrameHeader *current) +{ + CodedBitstreamVP9Context *vp9 = ctx->priv_data; + int min_log2_tile_cols, max_log2_tile_cols; + int err; - min_log2_tile_cols = 0; - while((VP9_MAX_TILE_WIDTH_B64 << min_log2_tile_cols) < vp9->sb64_cols) - ++min_log2_tile_cols; - max_log2_tile_cols = 0; - while((vp9->sb64_cols >> (max_log2_tile_cols + 1)) >= VP9_MIN_TILE_WIDTH_B64) - ++max_log2_tile_cols; + min_log2_tile_cols = 0; + while ((VP9_MAX_TILE_WIDTH_B64 << min_log2_tile_cols) < vp9->sb64_cols) + ++min_log2_tile_cols; + max_log2_tile_cols = 0; + while ((vp9->sb64_cols >> (max_log2_tile_cols + 1)) >= VP9_MIN_TILE_WIDTH_B64) + ++max_log2_tile_cols; - increment(tile_cols_log2, min_log2_tile_cols, max_log2_tile_cols); + increment(tile_cols_log2, min_log2_tile_cols, max_log2_tile_cols); - increment(tile_rows_log2, 0, 2); + increment(tile_rows_log2, 0, 2); - return 0; + return 0; } static int FUNC(uncompressed_header)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) { - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - int err, i; - - f(2, frame_marker); - - f(1, profile_low_bit); - f(1, profile_high_bit); - vp9->profile = (current->profile_high_bit << 1) + current->profile_low_bit; - if(vp9->profile == 3) - fixed(1, reserved_zero, 0); - - f(1, show_existing_frame); - if(current->show_existing_frame) { - f(3, frame_to_show_map_idx); - infer(header_size_in_bytes, 0); - infer(refresh_frame_flags, 0x00); - infer(loop_filter_level, 0); - return 0; - } - - f(1, frame_type); - f(1, show_frame); - f(1, error_resilient_mode); - - if(current->frame_type == VP9_KEY_FRAME) { - CHECK(FUNC(frame_sync_code)(ctx, rw, current)); - CHECK(FUNC(color_config)(ctx, rw, current, vp9->profile)); - CHECK(FUNC(frame_size)(ctx, rw, current)); - CHECK(FUNC(render_size)(ctx, rw, current)); - - infer(refresh_frame_flags, 0xff); - } - else { - if(current->show_frame == 0) - f(1, intra_only); - else - infer(intra_only, 0); - - if(current->error_resilient_mode == 0) - f(2, reset_frame_context); - else - infer(reset_frame_context, 0); + VP9RawFrameHeader *current) +{ + CodedBitstreamVP9Context *vp9 = ctx->priv_data; + int err, i; + + f(2, frame_marker); + + f(1, profile_low_bit); + f(1, profile_high_bit); + vp9->profile = (current->profile_high_bit << 1) + current->profile_low_bit; + if (vp9->profile == 3) + fixed(1, reserved_zero, 0); + + f(1, show_existing_frame); + if (current->show_existing_frame) { + f(3, frame_to_show_map_idx); + infer(header_size_in_bytes, 0); + infer(refresh_frame_flags, 0x00); + infer(loop_filter_level, 0); + return 0; + } - if(current->intra_only == 1) { - CHECK(FUNC(frame_sync_code)(ctx, rw, current)); + f(1, frame_type); + f(1, show_frame); + f(1, error_resilient_mode); - if(vp9->profile > 0) { + if (current->frame_type == VP9_KEY_FRAME) { + CHECK(FUNC(frame_sync_code)(ctx, rw, current)); CHECK(FUNC(color_config)(ctx, rw, current, vp9->profile)); - } - else { - infer(color_space, 1); - infer(subsampling_x, 1); - infer(subsampling_y, 1); - vp9->bit_depth = 8; + CHECK(FUNC(frame_size)(ctx, rw, current)); + CHECK(FUNC(render_size)(ctx, rw, current)); + + infer(refresh_frame_flags, 0xff); + + } else { + if (current->show_frame == 0) + f(1, intra_only); + else + infer(intra_only, 0); + + if (current->error_resilient_mode == 0) + f(2, reset_frame_context); + else + infer(reset_frame_context, 0); + + if (current->intra_only == 1) { + CHECK(FUNC(frame_sync_code)(ctx, rw, current)); + + if (vp9->profile > 0) { + CHECK(FUNC(color_config)(ctx, rw, current, vp9->profile)); + } else { + infer(color_space, 1); + infer(subsampling_x, 1); + infer(subsampling_y, 1); + vp9->bit_depth = 8; + + vp9->subsampling_x = current->subsampling_x; + vp9->subsampling_y = current->subsampling_y; + } + + f(8, refresh_frame_flags); + + CHECK(FUNC(frame_size)(ctx, rw, current)); + CHECK(FUNC(render_size)(ctx, rw, current)); + } else { + f(8, refresh_frame_flags); + + for (i = 0; i < VP9_REFS_PER_FRAME; i++) { + fs(3, ref_frame_idx[i], 1, i); + fs(1, ref_frame_sign_bias[VP9_LAST_FRAME + i], + 1, VP9_LAST_FRAME + i); + } + + CHECK(FUNC(frame_size_with_refs)(ctx, rw, current)); + f(1, allow_high_precision_mv); + CHECK(FUNC(interpolation_filter)(ctx, rw, current)); + } + } - vp9->subsampling_x = current->subsampling_x; - vp9->subsampling_y = current->subsampling_y; - } + if (current->error_resilient_mode == 0) { + f(1, refresh_frame_context); + f(1, frame_parallel_decoding_mode); + } else { + infer(refresh_frame_context, 0); + infer(frame_parallel_decoding_mode, 1); + } - f(8, refresh_frame_flags); + f(2, frame_context_idx); - CHECK(FUNC(frame_size)(ctx, rw, current)); - CHECK(FUNC(render_size)(ctx, rw, current)); - } - else { - f(8, refresh_frame_flags); + CHECK(FUNC(loop_filter_params)(ctx, rw, current)); + CHECK(FUNC(quantization_params)(ctx, rw, current)); + CHECK(FUNC(segmentation_params)(ctx, rw, current)); + CHECK(FUNC(tile_info)(ctx, rw, current)); - for(i = 0; i < VP9_REFS_PER_FRAME; i++) { - fs(3, ref_frame_idx[i], 1, i); - fs(1, ref_frame_sign_bias[VP9_LAST_FRAME + i], - 1, VP9_LAST_FRAME + i); - } + f(16, header_size_in_bytes); - CHECK(FUNC(frame_size_with_refs)(ctx, rw, current)); - f(1, allow_high_precision_mv); - CHECK(FUNC(interpolation_filter)(ctx, rw, current)); - } - } - - if(current->error_resilient_mode == 0) { - f(1, refresh_frame_context); - f(1, frame_parallel_decoding_mode); - } - else { - infer(refresh_frame_context, 0); - infer(frame_parallel_decoding_mode, 1); - } - - f(2, frame_context_idx); - - CHECK(FUNC(loop_filter_params)(ctx, rw, current)); - CHECK(FUNC(quantization_params)(ctx, rw, current)); - CHECK(FUNC(segmentation_params)(ctx, rw, current)); - CHECK(FUNC(tile_info)(ctx, rw, current)); - - f(16, header_size_in_bytes); - - for(i = 0; i < VP9_NUM_REF_FRAMES; i++) { - if(current->refresh_frame_flags & (1 << i)) { - vp9->ref[i] = (VP9ReferenceFrameState) { - .frame_width = vp9->frame_width, - .frame_height = vp9->frame_height, - .subsampling_x = vp9->subsampling_x, - .subsampling_y = vp9->subsampling_y, - .bit_depth = vp9->bit_depth, - }; + for (i = 0; i < VP9_NUM_REF_FRAMES; i++) { + if (current->refresh_frame_flags & (1 << i)) { + vp9->ref[i] = (VP9ReferenceFrameState) { + .frame_width = vp9->frame_width, + .frame_height = vp9->frame_height, + .subsampling_x = vp9->subsampling_x, + .subsampling_y = vp9->subsampling_y, + .bit_depth = vp9->bit_depth, + }; + } } - } - av_log(ctx->log_ctx, AV_LOG_DEBUG, "Frame: size %dx%d " - "subsample %dx%d bit_depth %d tiles %dx%d.\n", - vp9->frame_width, vp9->frame_height, - vp9->subsampling_x, vp9->subsampling_y, - vp9->bit_depth, 1 << current->tile_cols_log2, - 1 << current->tile_rows_log2); + av_log(ctx->log_ctx, AV_LOG_DEBUG, "Frame: size %dx%d " + "subsample %dx%d bit_depth %d tiles %dx%d.\n", + vp9->frame_width, vp9->frame_height, + vp9->subsampling_x, vp9->subsampling_y, + vp9->bit_depth, 1 << current->tile_cols_log2, + 1 << current->tile_rows_log2); - return 0; + return 0; } -static int FUNC(trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw) { - int err; - while(byte_alignment(rw) != 0) - fixed(1, zero_bit, 0); +static int FUNC(trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw) +{ + int err; + while (byte_alignment(rw) != 0) + fixed(1, zero_bit, 0); - return 0; + return 0; } static int FUNC(frame)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrame *current) { - int err; + VP9RawFrame *current) +{ + int err; - HEADER("Frame"); + HEADER("Frame"); - CHECK(FUNC(uncompressed_header)(ctx, rw, ¤t->header)); + CHECK(FUNC(uncompressed_header)(ctx, rw, ¤t->header)); - CHECK(FUNC(trailing_bits)(ctx, rw)); + CHECK(FUNC(trailing_bits)(ctx, rw)); - return 0; + return 0; } static int FUNC(superframe_index)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawSuperframeIndex *current) { - int err, i; + VP9RawSuperframeIndex *current) +{ + int err, i; - HEADER("Superframe Index"); + HEADER("Superframe Index"); - f(3, superframe_marker); - f(2, bytes_per_framesize_minus_1); - f(3, frames_in_superframe_minus_1); + f(3, superframe_marker); + f(2, bytes_per_framesize_minus_1); + f(3, frames_in_superframe_minus_1); - for(i = 0; i <= current->frames_in_superframe_minus_1; i++) { - // Surprise little-endian! - fle(8 * (current->bytes_per_framesize_minus_1 + 1), - frame_sizes[i], 1, i); - } + for (i = 0; i <= current->frames_in_superframe_minus_1; i++) { + // Surprise little-endian! + fle(8 * (current->bytes_per_framesize_minus_1 + 1), + frame_sizes[i], 1, i); + } - f(3, superframe_marker); - f(2, bytes_per_framesize_minus_1); - f(3, frames_in_superframe_minus_1); + f(3, superframe_marker); + f(2, bytes_per_framesize_minus_1); + f(3, frames_in_superframe_minus_1); - return 0; + return 0; } diff --git a/third-party/cbs/codec.h b/third-party/cbs/codec.h new file mode 100644 index 00000000000..03e8be90a2f --- /dev/null +++ b/third-party/cbs/codec.h @@ -0,0 +1,387 @@ +/* + * AVCodec public API + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_CODEC_H +#define AVCODEC_CODEC_H + +#include + +#include "libavutil/avutil.h" +#include "libavutil/hwcontext.h" +#include "libavutil/log.h" +#include "libavutil/pixfmt.h" +#include "libavutil/rational.h" +#include "libavutil/samplefmt.h" + +#include "libavcodec/codec_id.h" +#include "libavcodec/version_major.h" + +/** + * @addtogroup lavc_core + * @{ + */ + +/** + * Decoder can use draw_horiz_band callback. + */ +#define AV_CODEC_CAP_DRAW_HORIZ_BAND (1 << 0) +/** + * Codec uses get_buffer() or get_encode_buffer() for allocating buffers and + * supports custom allocators. + * If not set, it might not use get_buffer() or get_encode_buffer() at all, or + * use operations that assume the buffer was allocated by + * avcodec_default_get_buffer2 or avcodec_default_get_encode_buffer. + */ +#define AV_CODEC_CAP_DR1 (1 << 1) +#if FF_API_FLAG_TRUNCATED +/** + * @deprecated Use parsers to always send proper frames. + */ +#define AV_CODEC_CAP_TRUNCATED (1 << 3) +#endif +/** + * Encoder or decoder requires flushing with NULL input at the end in order to + * give the complete and correct output. + * + * NOTE: If this flag is not set, the codec is guaranteed to never be fed with + * with NULL data. The user can still send NULL data to the public encode + * or decode function, but libavcodec will not pass it along to the codec + * unless this flag is set. + * + * Decoders: + * The decoder has a non-zero delay and needs to be fed with avpkt->data=NULL, + * avpkt->size=0 at the end to get the delayed data until the decoder no longer + * returns frames. + * + * Encoders: + * The encoder needs to be fed with NULL data at the end of encoding until the + * encoder no longer returns data. + * + * NOTE: For encoders implementing the AVCodec.encode2() function, setting this + * flag also means that the encoder must set the pts and duration for + * each output packet. If this flag is not set, the pts and duration will + * be determined by libavcodec from the input frame. + */ +#define AV_CODEC_CAP_DELAY (1 << 5) +/** + * Codec can be fed a final frame with a smaller size. + * This can be used to prevent truncation of the last audio samples. + */ +#define AV_CODEC_CAP_SMALL_LAST_FRAME (1 << 6) + +/** + * Codec can output multiple frames per AVPacket + * Normally demuxers return one frame at a time, demuxers which do not do + * are connected to a parser to split what they return into proper frames. + * This flag is reserved to the very rare category of codecs which have a + * bitstream that cannot be split into frames without timeconsuming + * operations like full decoding. Demuxers carrying such bitstreams thus + * may return multiple frames in a packet. This has many disadvantages like + * prohibiting stream copy in many cases thus it should only be considered + * as a last resort. + */ +#define AV_CODEC_CAP_SUBFRAMES (1 << 8) +/** + * Codec is experimental and is thus avoided in favor of non experimental + * encoders + */ +#define AV_CODEC_CAP_EXPERIMENTAL (1 << 9) +/** + * Codec should fill in channel configuration and samplerate instead of container + */ +#define AV_CODEC_CAP_CHANNEL_CONF (1 << 10) +/** + * Codec supports frame-level multithreading. + */ +#define AV_CODEC_CAP_FRAME_THREADS (1 << 12) +/** + * Codec supports slice-based (or partition-based) multithreading. + */ +#define AV_CODEC_CAP_SLICE_THREADS (1 << 13) +/** + * Codec supports changed parameters at any point. + */ +#define AV_CODEC_CAP_PARAM_CHANGE (1 << 14) +/** + * Codec supports multithreading through a method other than slice- or + * frame-level multithreading. Typically this marks wrappers around + * multithreading-capable external libraries. + */ +#define AV_CODEC_CAP_OTHER_THREADS (1 << 15) +#if FF_API_AUTO_THREADS +#define AV_CODEC_CAP_AUTO_THREADS AV_CODEC_CAP_OTHER_THREADS +#endif +/** + * Audio encoder supports receiving a different number of samples in each call. + */ +#define AV_CODEC_CAP_VARIABLE_FRAME_SIZE (1 << 16) +/** + * Decoder is not a preferred choice for probing. + * This indicates that the decoder is not a good choice for probing. + * It could for example be an expensive to spin up hardware decoder, + * or it could simply not provide a lot of useful information about + * the stream. + * A decoder marked with this flag should only be used as last resort + * choice for probing. + */ +#define AV_CODEC_CAP_AVOID_PROBING (1 << 17) + +#if FF_API_UNUSED_CODEC_CAPS +/** + * Deprecated and unused. Use AVCodecDescriptor.props instead + */ +#define AV_CODEC_CAP_INTRA_ONLY 0x40000000 +/** + * Deprecated and unused. Use AVCodecDescriptor.props instead + */ +#define AV_CODEC_CAP_LOSSLESS 0x80000000 +#endif + +/** + * Codec is backed by a hardware implementation. Typically used to + * identify a non-hwaccel hardware decoder. For information about hwaccels, use + * avcodec_get_hw_config() instead. + */ +#define AV_CODEC_CAP_HARDWARE (1 << 18) + +/** + * Codec is potentially backed by a hardware implementation, but not + * necessarily. This is used instead of AV_CODEC_CAP_HARDWARE, if the + * implementation provides some sort of internal fallback. + */ +#define AV_CODEC_CAP_HYBRID (1 << 19) + +/** + * This codec takes the reordered_opaque field from input AVFrames + * and returns it in the corresponding field in AVCodecContext after + * encoding. + */ +#define AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE (1 << 20) + +/** + * This encoder can be flushed using avcodec_flush_buffers(). If this flag is + * not set, the encoder must be closed and reopened to ensure that no frames + * remain pending. + */ +#define AV_CODEC_CAP_ENCODER_FLUSH (1 << 21) + +/** + * AVProfile. + */ +typedef struct AVProfile { + int profile; + const char *name; ///< short name for the profile +} AVProfile; + +/** + * AVCodec. + */ +typedef struct AVCodec { + /** + * Name of the codec implementation. + * The name is globally unique among encoders and among decoders (but an + * encoder and a decoder can share the same name). + * This is the primary way to find a codec from the user perspective. + */ + const char *name; + /** + * Descriptive name for the codec, meant to be more human readable than name. + * You should use the NULL_IF_CONFIG_SMALL() macro to define it. + */ + const char *long_name; + enum AVMediaType type; + enum AVCodecID id; + /** + * Codec capabilities. + * see AV_CODEC_CAP_* + */ + int capabilities; + uint8_t max_lowres; ///< maximum value for lowres supported by the decoder + const AVRational *supported_framerates; ///< array of supported framerates, or NULL if any, array is terminated by {0,0} + const enum AVPixelFormat *pix_fmts; ///< array of supported pixel formats, or NULL if unknown, array is terminated by -1 + const int *supported_samplerates; ///< array of supported audio samplerates, or NULL if unknown, array is terminated by 0 + const enum AVSampleFormat *sample_fmts; ///< array of supported sample formats, or NULL if unknown, array is terminated by -1 +#if FF_API_OLD_CHANNEL_LAYOUT + /** + * @deprecated use ch_layouts instead + */ + attribute_deprecated + const uint64_t *channel_layouts; ///< array of support channel layouts, or NULL if unknown. array is terminated by 0 +#endif + const AVClass *priv_class; ///< AVClass for the private context + const AVProfile *profiles; ///< array of recognized profiles, or NULL if unknown, array is terminated by {FF_PROFILE_UNKNOWN} + + /** + * Group name of the codec implementation. + * This is a short symbolic name of the wrapper backing this codec. A + * wrapper uses some kind of external implementation for the codec, such + * as an external library, or a codec implementation provided by the OS or + * the hardware. + * If this field is NULL, this is a builtin, libavcodec native codec. + * If non-NULL, this will be the suffix in AVCodec.name in most cases + * (usually AVCodec.name will be of the form "_"). + */ + const char *wrapper_name; + + /** + * Array of supported channel layouts, terminated with a zeroed layout. + */ + const AVChannelLayout *ch_layouts; +} AVCodec; + +/** + * Iterate over all registered codecs. + * + * @param opaque a pointer where libavcodec will store the iteration state. Must + * point to NULL to start the iteration. + * + * @return the next registered codec or NULL when the iteration is + * finished + */ +const AVCodec *av_codec_iterate(void **opaque); + +/** + * Find a registered decoder with a matching codec ID. + * + * @param id AVCodecID of the requested decoder + * @return A decoder if one was found, NULL otherwise. + */ +const AVCodec *avcodec_find_decoder(enum AVCodecID id); + +/** + * Find a registered decoder with the specified name. + * + * @param name name of the requested decoder + * @return A decoder if one was found, NULL otherwise. + */ +const AVCodec *avcodec_find_decoder_by_name(const char *name); + +/** + * Find a registered encoder with a matching codec ID. + * + * @param id AVCodecID of the requested encoder + * @return An encoder if one was found, NULL otherwise. + */ +const AVCodec *avcodec_find_encoder(enum AVCodecID id); + +/** + * Find a registered encoder with the specified name. + * + * @param name name of the requested encoder + * @return An encoder if one was found, NULL otherwise. + */ +const AVCodec *avcodec_find_encoder_by_name(const char *name); +/** + * @return a non-zero number if codec is an encoder, zero otherwise + */ +int av_codec_is_encoder(const AVCodec *codec); + +/** + * @return a non-zero number if codec is a decoder, zero otherwise + */ +int av_codec_is_decoder(const AVCodec *codec); + +/** + * Return a name for the specified profile, if available. + * + * @param codec the codec that is searched for the given profile + * @param profile the profile value for which a name is requested + * @return A name for the profile if found, NULL otherwise. + */ +const char *av_get_profile_name(const AVCodec *codec, int profile); + +enum { + /** + * The codec supports this format via the hw_device_ctx interface. + * + * When selecting this format, AVCodecContext.hw_device_ctx should + * have been set to a device of the specified type before calling + * avcodec_open2(). + */ + AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX = 0x01, + /** + * The codec supports this format via the hw_frames_ctx interface. + * + * When selecting this format for a decoder, + * AVCodecContext.hw_frames_ctx should be set to a suitable frames + * context inside the get_format() callback. The frames context + * must have been created on a device of the specified type. + * + * When selecting this format for an encoder, + * AVCodecContext.hw_frames_ctx should be set to the context which + * will be used for the input frames before calling avcodec_open2(). + */ + AV_CODEC_HW_CONFIG_METHOD_HW_FRAMES_CTX = 0x02, + /** + * The codec supports this format by some internal method. + * + * This format can be selected without any additional configuration - + * no device or frames context is required. + */ + AV_CODEC_HW_CONFIG_METHOD_INTERNAL = 0x04, + /** + * The codec supports this format by some ad-hoc method. + * + * Additional settings and/or function calls are required. See the + * codec-specific documentation for details. (Methods requiring + * this sort of configuration are deprecated and others should be + * used in preference.) + */ + AV_CODEC_HW_CONFIG_METHOD_AD_HOC = 0x08, +}; + +typedef struct AVCodecHWConfig { + /** + * For decoders, a hardware pixel format which that decoder may be + * able to decode to if suitable hardware is available. + * + * For encoders, a pixel format which the encoder may be able to + * accept. If set to AV_PIX_FMT_NONE, this applies to all pixel + * formats supported by the codec. + */ + enum AVPixelFormat pix_fmt; + /** + * Bit set of AV_CODEC_HW_CONFIG_METHOD_* flags, describing the possible + * setup methods which can be used with this configuration. + */ + int methods; + /** + * The device type associated with the configuration. + * + * Must be set for AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX and + * AV_CODEC_HW_CONFIG_METHOD_HW_FRAMES_CTX, otherwise unused. + */ + enum AVHWDeviceType device_type; +} AVCodecHWConfig; + +/** + * Retrieve supported hardware configurations for a codec. + * + * Values of index from zero to some maximum return the indexed configuration + * descriptor; all other values return NULL. If the codec does not support + * any hardware configurations then it will always return NULL. + */ +const AVCodecHWConfig *avcodec_get_hw_config(const AVCodec *codec, int index); + +/** + * @} + */ + +#endif /* AVCODEC_CODEC_H */ diff --git a/third-party/cbs/defs.h b/third-party/cbs/defs.h deleted file mode 100644 index 1c5f0ce82c1..00000000000 --- a/third-party/cbs/defs.h +++ /dev/null @@ -1,51 +0,0 @@ -/* - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_DEFS_H -#define AVCODEC_DEFS_H - -/** - * @file - * @ingroup libavc - * Misc types and constants that do not belong anywhere else. - */ - -#include -#include - -/** - * @ingroup lavc_decoding - * Required number of additionally allocated bytes at the end of the input bitstream for decoding. - * This is mainly needed because some optimized bitstream readers read - * 32 or 64 bit at once and could read over the end.
- * Note: If the first 23 bits of the additional bytes are not 0, then damaged - * MPEG bitstreams could cause overread and segfault. - */ -#define AV_INPUT_BUFFER_PADDING_SIZE 64 - -/** - * Encode extradata length to a buffer. Used by xiph codecs. - * - * @param s buffer to write to; must be at least (v/255+1) bytes long - * @param v size of extradata in bytes - * @return number of bytes written to the buffer. - */ -unsigned int av_xiphlacing(unsigned char *s, unsigned int v); - -#endif // AVCODEC_DEFS_H diff --git a/third-party/cbs/get_bits.h b/third-party/cbs/get_bits.h deleted file mode 100644 index 3c34b0408d5..00000000000 --- a/third-party/cbs/get_bits.h +++ /dev/null @@ -1,831 +0,0 @@ -/* - * Copyright (c) 2004 Michael Niedermayer - * Copyright (c) 2016 Alexandra Hájková - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * bitstream reader API header. - */ - -#ifndef AVCODEC_GET_BITS_H -#define AVCODEC_GET_BITS_H - -#include - -#include -#include -#include -#include - -#include "defs.h" -#include "mathops.h" -#include "vlc.h" - -/* - * Safe bitstream reading: - * optionally, the get_bits API can check to ensure that we - * don't read past input buffer boundaries. This is protected - * with CONFIG_SAFE_BITSTREAM_READER at the global level, and - * then below that with UNCHECKED_BITSTREAM_READER at the per- - * decoder level. This means that decoders that check internally - * can "#define UNCHECKED_BITSTREAM_READER 1" to disable - * overread checks. - * Boundary checking causes a minor performance penalty so for - * applications that won't want/need this, it can be disabled - * globally using "#define CONFIG_SAFE_BITSTREAM_READER 0". - */ -#ifndef UNCHECKED_BITSTREAM_READER -#define UNCHECKED_BITSTREAM_READER 0 -#endif - -#ifndef CACHED_BITSTREAM_READER -#define CACHED_BITSTREAM_READER 0 -#endif - -#include "cbs/h2645_parse.h" - -static inline unsigned int get_bits(GetBitContext *s, int n); -static inline void skip_bits(GetBitContext *s, int n); -static inline unsigned int show_bits(GetBitContext *s, int n); - -/* Bitstream reader API docs: - * name - * arbitrary name which is used as prefix for the internal variables - * - * gb - * getbitcontext - * - * OPEN_READER(name, gb) - * load gb into local variables - * - * CLOSE_READER(name, gb) - * store local vars in gb - * - * UPDATE_CACHE(name, gb) - * Refill the internal cache from the bitstream. - * After this call at least MIN_CACHE_BITS will be available. - * - * GET_CACHE(name, gb) - * Will output the contents of the internal cache, - * next bit is MSB of 32 or 64 bits (FIXME 64 bits). - * - * SHOW_UBITS(name, gb, num) - * Will return the next num bits. - * - * SHOW_SBITS(name, gb, num) - * Will return the next num bits and do sign extension. - * - * SKIP_BITS(name, gb, num) - * Will skip over the next num bits. - * Note, this is equivalent to SKIP_CACHE; SKIP_COUNTER. - * - * SKIP_CACHE(name, gb, num) - * Will remove the next num bits from the cache (note SKIP_COUNTER - * MUST be called before UPDATE_CACHE / CLOSE_READER). - * - * SKIP_COUNTER(name, gb, num) - * Will increment the internal bit counter (see SKIP_CACHE & SKIP_BITS). - * - * LAST_SKIP_BITS(name, gb, num) - * Like SKIP_BITS, to be used if next call is UPDATE_CACHE or CLOSE_READER. - * - * BITS_LEFT(name, gb) - * Return the number of bits left - * - * For examples see get_bits, show_bits, skip_bits, get_vlc. - */ - -#if CACHED_BITSTREAM_READER -#define MIN_CACHE_BITS 64 -#elif defined LONG_BITSTREAM_READER -#define MIN_CACHE_BITS 32 -#else -#define MIN_CACHE_BITS 25 -#endif - -#if !CACHED_BITSTREAM_READER - -#define OPEN_READER_NOSIZE(name, gb) \ - unsigned int name##_index = (gb)->index; \ - unsigned int av_unused name##_cache - -#if UNCHECKED_BITSTREAM_READER -#define OPEN_READER(name, gb) OPEN_READER_NOSIZE(name, gb) - -#define BITS_AVAILABLE(name, gb) 1 -#else -#define OPEN_READER(name, gb) \ - OPEN_READER_NOSIZE(name, gb); \ - unsigned int name##_size_plus8 = (gb)->size_in_bits_plus8 - -#define BITS_AVAILABLE(name, gb) name##_index < name##_size_plus8 -#endif - -#define CLOSE_READER(name, gb) (gb)->index = name##_index - -#ifdef LONG_BITSTREAM_READER - -#define UPDATE_CACHE_LE(name, gb) name##_cache = \ - AV_RL64((gb)->buffer + (name##_index >> 3)) >> (name##_index & 7) - -#define UPDATE_CACHE_BE(name, gb) name##_cache = \ - AV_RB64((gb)->buffer + (name##_index >> 3)) >> (32 - (name##_index & 7)) - -#else - -#define UPDATE_CACHE_LE(name, gb) name##_cache = \ - AV_RL32((gb)->buffer + (name##_index >> 3)) >> (name##_index & 7) - -#define UPDATE_CACHE_BE(name, gb) name##_cache = \ - AV_RB32((gb)->buffer + (name##_index >> 3)) << (name##_index & 7) - -#endif - - -#ifdef BITSTREAM_READER_LE - -#define UPDATE_CACHE(name, gb) UPDATE_CACHE_LE(name, gb) - -#define SKIP_CACHE(name, gb, num) name##_cache >>= (num) - -#else - -#define UPDATE_CACHE(name, gb) UPDATE_CACHE_BE(name, gb) - -#define SKIP_CACHE(name, gb, num) name##_cache <<= (num) - -#endif - -#if UNCHECKED_BITSTREAM_READER -#define SKIP_COUNTER(name, gb, num) name##_index += (num) -#else -#define SKIP_COUNTER(name, gb, num) \ - name##_index = FFMIN(name##_size_plus8, name##_index + (num)) -#endif - -#define BITS_LEFT(name, gb) ((int)((gb)->size_in_bits - name##_index)) - -#define SKIP_BITS(name, gb, num) \ - do { \ - SKIP_CACHE(name, gb, num); \ - SKIP_COUNTER(name, gb, num); \ - } while(0) - -#define LAST_SKIP_BITS(name, gb, num) SKIP_COUNTER(name, gb, num) - -#define SHOW_UBITS_LE(name, gb, num) zero_extend(name##_cache, num) -#define SHOW_SBITS_LE(name, gb, num) sign_extend(name##_cache, num) - -#define SHOW_UBITS_BE(name, gb, num) NEG_USR32(name##_cache, num) -#define SHOW_SBITS_BE(name, gb, num) NEG_SSR32(name##_cache, num) - -#ifdef BITSTREAM_READER_LE -#define SHOW_UBITS(name, gb, num) SHOW_UBITS_LE(name, gb, num) -#define SHOW_SBITS(name, gb, num) SHOW_SBITS_LE(name, gb, num) -#else -#define SHOW_UBITS(name, gb, num) SHOW_UBITS_BE(name, gb, num) -#define SHOW_SBITS(name, gb, num) SHOW_SBITS_BE(name, gb, num) -#endif - -#define GET_CACHE(name, gb) ((uint32_t)name##_cache) - -#endif - -static inline int get_bits_count(const GetBitContext *s) { -#if CACHED_BITSTREAM_READER - return s->index - s->bits_left; -#else - return s->index; -#endif -} - -#if CACHED_BITSTREAM_READER -static inline void refill_32(GetBitContext *s, int is_le) { -#if !UNCHECKED_BITSTREAM_READER - if(s->index >> 3 >= s->buffer_end - s->buffer) - return; -#endif - - if(is_le) - s->cache = (uint64_t)AV_RL32(s->buffer + (s->index >> 3)) << s->bits_left | s->cache; - else - s->cache = s->cache | (uint64_t)AV_RB32(s->buffer + (s->index >> 3)) << (32 - s->bits_left); - s->index += 32; - s->bits_left += 32; -} - -static inline void refill_64(GetBitContext *s, int is_le) { -#if !UNCHECKED_BITSTREAM_READER - if(s->index >> 3 >= s->buffer_end - s->buffer) - return; -#endif - - if(is_le) - s->cache = AV_RL64(s->buffer + (s->index >> 3)); - else - s->cache = AV_RB64(s->buffer + (s->index >> 3)); - s->index += 64; - s->bits_left = 64; -} - -static inline uint64_t get_val(GetBitContext *s, unsigned n, int is_le) { - uint64_t ret; - av_assert2(n > 0 && n <= 63); - if(is_le) { - ret = s->cache & ((UINT64_C(1) << n) - 1); - s->cache >>= n; - } - else { - ret = s->cache >> (64 - n); - s->cache <<= n; - } - s->bits_left -= n; - return ret; -} - -static inline unsigned show_val(const GetBitContext *s, unsigned n) { -#ifdef BITSTREAM_READER_LE - return s->cache & ((UINT64_C(1) << n) - 1); -#else - return s->cache >> (64 - n); -#endif -} -#endif - -/** - * Skips the specified number of bits. - * @param n the number of bits to skip, - * For the UNCHECKED_BITSTREAM_READER this must not cause the distance - * from the start to overflow int32_t. Staying within the bitstream + padding - * is sufficient, too. - */ -static inline void skip_bits_long(GetBitContext *s, int n) { -#if CACHED_BITSTREAM_READER - skip_bits(s, n); -#else -#if UNCHECKED_BITSTREAM_READER - s->index += n; -#else - s->index += av_clip(n, -s->index, s->size_in_bits_plus8 - s->index); -#endif -#endif -} - -#if CACHED_BITSTREAM_READER -static inline void skip_remaining(GetBitContext *s, unsigned n) { -#ifdef BITSTREAM_READER_LE - s->cache >>= n; -#else - s->cache <<= n; -#endif - s->bits_left -= n; -} -#endif - -/** - * Read MPEG-1 dc-style VLC (sign bit + mantissa with no MSB). - * if MSB not set it is negative - * @param n length in bits - */ -static inline int get_xbits(GetBitContext *s, int n) { -#if CACHED_BITSTREAM_READER - int32_t cache = show_bits(s, 32); - int sign = ~cache >> 31; - skip_remaining(s, n); - - return ((((uint32_t)(sign ^ cache)) >> (32 - n)) ^ sign) - sign; -#else - register int sign; - register int32_t cache; - OPEN_READER(re, s); - av_assert2(n > 0 && n <= 25); - UPDATE_CACHE(re, s); - cache = GET_CACHE(re, s); - sign = ~cache >> 31; - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); - return (NEG_USR32(sign ^ cache, n) ^ sign) - sign; -#endif -} - -#if !CACHED_BITSTREAM_READER -static inline int get_xbits_le(GetBitContext *s, int n) { - register int sign; - register int32_t cache; - OPEN_READER(re, s); - av_assert2(n > 0 && n <= 25); - UPDATE_CACHE_LE(re, s); - cache = GET_CACHE(re, s); - sign = sign_extend(~cache, n) >> 31; - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); - return (zero_extend(sign ^ cache, n) ^ sign) - sign; -} -#endif - -static inline int get_sbits(GetBitContext *s, int n) { - register int tmp; -#if CACHED_BITSTREAM_READER - av_assert2(n > 0 && n <= 25); - tmp = sign_extend(get_bits(s, n), n); -#else - OPEN_READER(re, s); - av_assert2(n > 0 && n <= 25); - UPDATE_CACHE(re, s); - tmp = SHOW_SBITS(re, s, n); - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); -#endif - return tmp; -} - -/** - * Read 1-25 bits. - */ -static inline unsigned int get_bits(GetBitContext *s, int n) { - register unsigned int tmp; -#if CACHED_BITSTREAM_READER - - av_assert2(n > 0 && n <= 32); - if(n > s->bits_left) { -#ifdef BITSTREAM_READER_LE - refill_32(s, 1); -#else - refill_32(s, 0); -#endif - if(s->bits_left < 32) - s->bits_left = n; - } - -#ifdef BITSTREAM_READER_LE - tmp = get_val(s, n, 1); -#else - tmp = get_val(s, n, 0); -#endif -#else - OPEN_READER(re, s); - av_assert2(n > 0 && n <= 25); - UPDATE_CACHE(re, s); - tmp = SHOW_UBITS(re, s, n); - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); -#endif - av_assert2(tmp < UINT64_C(1) << n); - return tmp; -} - -/** - * Read 0-25 bits. - */ -static av_always_inline int get_bitsz(GetBitContext *s, int n) { - return n ? get_bits(s, n) : 0; -} - -static inline unsigned int get_bits_le(GetBitContext *s, int n) { -#if CACHED_BITSTREAM_READER - av_assert2(n > 0 && n <= 32); - if(n > s->bits_left) { - refill_32(s, 1); - if(s->bits_left < 32) - s->bits_left = n; - } - - return get_val(s, n, 1); -#else - register int tmp; - OPEN_READER(re, s); - av_assert2(n > 0 && n <= 25); - UPDATE_CACHE_LE(re, s); - tmp = SHOW_UBITS_LE(re, s, n); - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); - return tmp; -#endif -} - -/** - * Show 1-25 bits. - */ -static inline unsigned int show_bits(GetBitContext *s, int n) { - register unsigned int tmp; -#if CACHED_BITSTREAM_READER - if(n > s->bits_left) -#ifdef BITSTREAM_READER_LE - refill_32(s, 1); -#else - refill_32(s, 0); -#endif - - tmp = show_val(s, n); -#else - OPEN_READER_NOSIZE(re, s); - av_assert2(n > 0 && n <= 25); - UPDATE_CACHE(re, s); - tmp = SHOW_UBITS(re, s, n); -#endif - return tmp; -} - -static inline void skip_bits(GetBitContext *s, int n) { -#if CACHED_BITSTREAM_READER - if(n < s->bits_left) - skip_remaining(s, n); - else { - n -= s->bits_left; - s->cache = 0; - s->bits_left = 0; - - if(n >= 64) { - unsigned skip = (n / 8) * 8; - - n -= skip; - s->index += skip; - } -#ifdef BITSTREAM_READER_LE - refill_64(s, 1); -#else - refill_64(s, 0); -#endif - if(n) - skip_remaining(s, n); - } -#else - OPEN_READER(re, s); - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); -#endif -} - -static inline unsigned int get_bits1(GetBitContext *s) { -#if CACHED_BITSTREAM_READER - if(!s->bits_left) -#ifdef BITSTREAM_READER_LE - refill_64(s, 1); -#else - refill_64(s, 0); -#endif - -#ifdef BITSTREAM_READER_LE - return get_val(s, 1, 1); -#else - return get_val(s, 1, 0); -#endif -#else - unsigned int index = s->index; - uint8_t result = s->buffer[index >> 3]; -#ifdef BITSTREAM_READER_LE - result >>= index & 7; - result &= 1; -#else - result <<= index & 7; - result >>= 8 - 1; -#endif -#if !UNCHECKED_BITSTREAM_READER - if(s->index < s->size_in_bits_plus8) -#endif - index++; - s->index = index; - - return result; -#endif -} - -static inline unsigned int show_bits1(GetBitContext *s) { - return show_bits(s, 1); -} - -static inline void skip_bits1(GetBitContext *s) { - skip_bits(s, 1); -} - -/** - * Read 0-32 bits. - */ -static inline unsigned int get_bits_long(GetBitContext *s, int n) { - av_assert2(n >= 0 && n <= 32); - if(!n) { - return 0; -#if CACHED_BITSTREAM_READER - } - return get_bits(s, n); -#else - } - else if(n <= MIN_CACHE_BITS) { - return get_bits(s, n); - } - else { -#ifdef BITSTREAM_READER_LE - unsigned ret = get_bits(s, 16); - return ret | (get_bits(s, n - 16) << 16); -#else - unsigned ret = get_bits(s, 16) << (n - 16); - return ret | get_bits(s, n - 16); -#endif - } -#endif -} - -/** - * Read 0-64 bits. - */ -static inline uint64_t get_bits64(GetBitContext *s, int n) { - if(n <= 32) { - return get_bits_long(s, n); - } - else { -#ifdef BITSTREAM_READER_LE - uint64_t ret = get_bits_long(s, 32); - return ret | (uint64_t)get_bits_long(s, n - 32) << 32; -#else - uint64_t ret = (uint64_t)get_bits_long(s, n - 32) << 32; - return ret | get_bits_long(s, 32); -#endif - } -} - -/** - * Read 0-32 bits as a signed integer. - */ -static inline int get_sbits_long(GetBitContext *s, int n) { - // sign_extend(x, 0) is undefined - if(!n) - return 0; - - return sign_extend(get_bits_long(s, n), n); -} - -/** - * Show 0-32 bits. - */ -static inline unsigned int show_bits_long(GetBitContext *s, int n) { - if(n <= MIN_CACHE_BITS) { - return show_bits(s, n); - } - else { - GetBitContext gb = *s; - return get_bits_long(&gb, n); - } -} - -static inline int check_marker(void *logctx, GetBitContext *s, const char *msg) { - int bit = get_bits1(s); - if(!bit) - av_log(logctx, AV_LOG_INFO, "Marker bit missing at %d of %d %s\n", - get_bits_count(s) - 1, s->size_in_bits, msg); - - return bit; -} - -static inline int init_get_bits_xe(GetBitContext *s, const uint8_t *buffer, - int bit_size, int is_le) { - int buffer_size; - int ret = 0; - - if(bit_size >= INT_MAX - FFMAX(7, AV_INPUT_BUFFER_PADDING_SIZE * 8) || bit_size < 0 || !buffer) { - bit_size = 0; - buffer = NULL; - ret = AVERROR_INVALIDDATA; - } - - buffer_size = (bit_size + 7) >> 3; - - s->buffer = buffer; - s->size_in_bits = bit_size; - s->size_in_bits_plus8 = bit_size + 8; - s->buffer_end = buffer + buffer_size; - s->index = 0; - -#if CACHED_BITSTREAM_READER - s->cache = 0; - s->bits_left = 0; - refill_64(s, is_le); -#endif - - return ret; -} - -/** - * Initialize GetBitContext. - * @param buffer bitstream buffer, must be AV_INPUT_BUFFER_PADDING_SIZE bytes - * larger than the actual read bits because some optimized bitstream - * readers read 32 or 64 bit at once and could read over the end - * @param bit_size the size of the buffer in bits - * @return 0 on success, AVERROR_INVALIDDATA if the buffer_size would overflow. - */ -static inline int init_get_bits(GetBitContext *s, const uint8_t *buffer, - int bit_size) { -#ifdef BITSTREAM_READER_LE - return init_get_bits_xe(s, buffer, bit_size, 1); -#else - return init_get_bits_xe(s, buffer, bit_size, 0); -#endif -} - -/** - * Initialize GetBitContext. - * @param buffer bitstream buffer, must be AV_INPUT_BUFFER_PADDING_SIZE bytes - * larger than the actual read bits because some optimized bitstream - * readers read 32 or 64 bit at once and could read over the end - * @param byte_size the size of the buffer in bytes - * @return 0 on success, AVERROR_INVALIDDATA if the buffer_size would overflow. - */ -static inline int init_get_bits8(GetBitContext *s, const uint8_t *buffer, - int byte_size) { - if(byte_size > INT_MAX / 8 || byte_size < 0) - byte_size = -1; - return init_get_bits(s, buffer, byte_size * 8); -} - -static inline int init_get_bits8_le(GetBitContext *s, const uint8_t *buffer, - int byte_size) { - if(byte_size > INT_MAX / 8 || byte_size < 0) - byte_size = -1; - return init_get_bits_xe(s, buffer, byte_size * 8, 1); -} - -static inline const uint8_t *align_get_bits(GetBitContext *s) { - int n = -get_bits_count(s) & 7; - if(n) - skip_bits(s, n); - return s->buffer + (s->index >> 3); -} - -/** - * If the vlc code is invalid and max_depth=1, then no bits will be removed. - * If the vlc code is invalid and max_depth>1, then the number of bits removed - * is undefined. - */ -#define GET_VLC(code, name, gb, table, bits, max_depth) \ - do { \ - int n, nb_bits; \ - unsigned int index; \ - \ - index = SHOW_UBITS(name, gb, bits); \ - code = table[index][0]; \ - n = table[index][1]; \ - \ - if(max_depth > 1 && n < 0) { \ - LAST_SKIP_BITS(name, gb, bits); \ - UPDATE_CACHE(name, gb); \ - \ - nb_bits = -n; \ - \ - index = SHOW_UBITS(name, gb, nb_bits) + code; \ - code = table[index][0]; \ - n = table[index][1]; \ - if(max_depth > 2 && n < 0) { \ - LAST_SKIP_BITS(name, gb, nb_bits); \ - UPDATE_CACHE(name, gb); \ - \ - nb_bits = -n; \ - \ - index = SHOW_UBITS(name, gb, nb_bits) + code; \ - code = table[index][0]; \ - n = table[index][1]; \ - } \ - } \ - SKIP_BITS(name, gb, n); \ - } while(0) - -#define GET_RL_VLC(level, run, name, gb, table, bits, \ - max_depth, need_update) \ - do { \ - int n, nb_bits; \ - unsigned int index; \ - \ - index = SHOW_UBITS(name, gb, bits); \ - level = table[index].level; \ - n = table[index].len; \ - \ - if(max_depth > 1 && n < 0) { \ - SKIP_BITS(name, gb, bits); \ - if(need_update) { \ - UPDATE_CACHE(name, gb); \ - } \ - \ - nb_bits = -n; \ - \ - index = SHOW_UBITS(name, gb, nb_bits) + level; \ - level = table[index].level; \ - n = table[index].len; \ - if(max_depth > 2 && n < 0) { \ - LAST_SKIP_BITS(name, gb, nb_bits); \ - if(need_update) { \ - UPDATE_CACHE(name, gb); \ - } \ - nb_bits = -n; \ - \ - index = SHOW_UBITS(name, gb, nb_bits) + level; \ - level = table[index].level; \ - n = table[index].len; \ - } \ - } \ - run = table[index].run; \ - SKIP_BITS(name, gb, n); \ - } while(0) - -/* Return the LUT element for the given bitstream configuration. */ -static inline int set_idx(GetBitContext *s, int code, int *n, int *nb_bits, - VLC_TYPE (*table)[2]) { - unsigned idx; - - *nb_bits = -*n; - idx = show_bits(s, *nb_bits) + code; - *n = table[idx][1]; - - return table[idx][0]; -} - -/** - * Parse a vlc code. - * @param bits is the number of bits which will be read at once, must be - * identical to nb_bits in init_vlc() - * @param max_depth is the number of times bits bits must be read to completely - * read the longest vlc code - * = (max_vlc_length + bits - 1) / bits - * @returns the code parsed or -1 if no vlc matches - */ -static av_always_inline int get_vlc2(GetBitContext *s, VLC_TYPE (*table)[2], - int bits, int max_depth) { -#if CACHED_BITSTREAM_READER - int nb_bits; - unsigned idx = show_bits(s, bits); - int code = table[idx][0]; - int n = table[idx][1]; - - if(max_depth > 1 && n < 0) { - skip_remaining(s, bits); - code = set_idx(s, code, &n, &nb_bits, table); - if(max_depth > 2 && n < 0) { - skip_remaining(s, nb_bits); - code = set_idx(s, code, &n, &nb_bits, table); - } - } - skip_remaining(s, n); - - return code; -#else - int code; - - OPEN_READER(re, s); - UPDATE_CACHE(re, s); - - GET_VLC(code, re, s, table, bits, max_depth); - - CLOSE_READER(re, s); - - return code; -#endif -} - -static inline int decode012(GetBitContext *gb) { - int n; - n = get_bits1(gb); - if(n == 0) - return 0; - else - return get_bits1(gb) + 1; -} - -static inline int decode210(GetBitContext *gb) { - if(get_bits1(gb)) - return 0; - else - return 2 - get_bits1(gb); -} - -static inline int get_bits_left(GetBitContext *gb) { - return gb->size_in_bits - get_bits_count(gb); -} - -static inline int skip_1stop_8data_bits(GetBitContext *gb) { - if(get_bits_left(gb) <= 0) - return AVERROR_INVALIDDATA; - - while(get_bits1(gb)) { - skip_bits(gb, 8); - if(get_bits_left(gb) <= 0) - return AVERROR_INVALIDDATA; - } - - return 0; -} - -#endif /* AVCODEC_GET_BITS_H */ diff --git a/third-party/cbs/h2645_parse.c b/third-party/cbs/h2645_parse.c index a401ca577e6..ff65a59f0c1 100644 --- a/third-party/cbs/h2645_parse.c +++ b/third-party/cbs/h2645_parse.c @@ -20,516 +20,525 @@ #include -#include -#include +// [manual] Changed include path +#include "cbs/config.h" -#include "cbs/h264.h" -#include "cbs/h2645_parse.h" -#include "cbs/hevc.h" +#include "intmath.h" +#include "libavutil/intreadwrite.h" +#include "libavutil/mem.h" +// [manual] Changed include path #include "bytestream.h" -#include "config.h" -#include "get_bits.h" -#include "intmath.h" +#include "cbs/hevc.h" +#include "cbs/h264.h" +#include "cbs/h2645_parse.h" int ff_h2645_extract_rbsp(const uint8_t *src, int length, - H2645RBSP *rbsp, H2645NAL *nal, int small_padding) { - int i, si, di; - uint8_t *dst; - - nal->skipped_bytes = 0; -#define STARTCODE_TEST \ - if(i + 2 < length && src[i + 1] == 0 && src[i + 2] <= 3) { \ - if(src[i + 2] != 3 && src[i + 2] != 0) { \ - /* startcode, so we must be past the end */ \ - length = i; \ - } \ - break; \ - } + H2645RBSP *rbsp, H2645NAL *nal, int small_padding) +{ + int i, si, di; + uint8_t *dst; + + nal->skipped_bytes = 0; +#define STARTCODE_TEST \ + if (i + 2 < length && src[i + 1] == 0 && src[i + 2] <= 3) { \ + if (src[i + 2] != 3 && src[i + 2] != 0) { \ + /* startcode, so we must be past the end */ \ + length = i; \ + } \ + break; \ + } #if HAVE_FAST_UNALIGNED -#define FIND_FIRST_ZERO \ - if(i > 0 && !src[i]) \ - i--; \ - while(src[i]) \ - i++ +#define FIND_FIRST_ZERO \ + if (i > 0 && !src[i]) \ + i--; \ + while (src[i]) \ + i++ #if HAVE_FAST_64BIT - for(i = 0; i + 1 < length; i += 9) { - if(!((~AV_RN64(src + i) & - (AV_RN64(src + i) - 0x0100010001000101ULL)) & - 0x8000800080008080ULL)) - continue; - FIND_FIRST_ZERO; - STARTCODE_TEST; - i -= 7; - } + for (i = 0; i + 1 < length; i += 9) { + if (!((~AV_RN64(src + i) & + (AV_RN64(src + i) - 0x0100010001000101ULL)) & + 0x8000800080008080ULL)) + continue; + FIND_FIRST_ZERO; + STARTCODE_TEST; + i -= 7; + } #else - for(i = 0; i + 1 < length; i += 5) { - if(!((~AV_RN32(src + i) & - (AV_RN32(src + i) - 0x01000101U)) & - 0x80008080U)) - continue; - FIND_FIRST_ZERO; - STARTCODE_TEST; - i -= 3; - } + for (i = 0; i + 1 < length; i += 5) { + if (!((~AV_RN32(src + i) & + (AV_RN32(src + i) - 0x01000101U)) & + 0x80008080U)) + continue; + FIND_FIRST_ZERO; + STARTCODE_TEST; + i -= 3; + } #endif /* HAVE_FAST_64BIT */ #else - for(i = 0; i + 1 < length; i += 2) { - if(src[i]) - continue; - if(i > 0 && src[i - 1] == 0) - i--; - STARTCODE_TEST; - } + for (i = 0; i + 1 < length; i += 2) { + if (src[i]) + continue; + if (i > 0 && src[i - 1] == 0) + i--; + STARTCODE_TEST; + } #endif /* HAVE_FAST_UNALIGNED */ - if(i >= length - 1 && small_padding) { // no escaped 0 - nal->data = - nal->raw_data = src; - nal->size = - nal->raw_size = length; - return length; - } - else if(i > length) - i = length; - - nal->rbsp_buffer = &rbsp->rbsp_buffer[rbsp->rbsp_buffer_size]; - dst = nal->rbsp_buffer; - - memcpy(dst, src, i); - si = di = i; - while(si + 2 < length) { - // remove escapes (very rare 1:2^22) - if(src[si + 2] > 3) { - dst[di++] = src[si++]; - dst[di++] = src[si++]; - } - else if(src[si] == 0 && src[si + 1] == 0 && src[si + 2] != 0) { - if(src[si + 2] == 3) { // escape - dst[di++] = 0; - dst[di++] = 0; - si += 3; - - if(nal->skipped_bytes_pos) { - nal->skipped_bytes++; - if(nal->skipped_bytes_pos_size < nal->skipped_bytes) { - nal->skipped_bytes_pos_size *= 2; - av_assert0(nal->skipped_bytes_pos_size >= nal->skipped_bytes); - av_reallocp_array(&nal->skipped_bytes_pos, - nal->skipped_bytes_pos_size, - sizeof(*nal->skipped_bytes_pos)); - if(!nal->skipped_bytes_pos) { - nal->skipped_bytes_pos_size = 0; - return AVERROR(ENOMEM); - } - } - if(nal->skipped_bytes_pos) - nal->skipped_bytes_pos[nal->skipped_bytes - 1] = di - 1; + if (i >= length - 1 && small_padding) { // no escaped 0 + nal->data = + nal->raw_data = src; + nal->size = + nal->raw_size = length; + return length; + } else if (i > length) + i = length; + + dst = &rbsp->rbsp_buffer[rbsp->rbsp_buffer_size]; + + memcpy(dst, src, i); + si = di = i; + while (si + 2 < length) { + // remove escapes (very rare 1:2^22) + if (src[si + 2] > 3) { + dst[di++] = src[si++]; + dst[di++] = src[si++]; + } else if (src[si] == 0 && src[si + 1] == 0 && src[si + 2] != 0) { + if (src[si + 2] == 3) { // escape + dst[di++] = 0; + dst[di++] = 0; + si += 3; + + if (nal->skipped_bytes_pos) { + nal->skipped_bytes++; + if (nal->skipped_bytes_pos_size < nal->skipped_bytes) { + nal->skipped_bytes_pos_size *= 2; + av_assert0(nal->skipped_bytes_pos_size >= nal->skipped_bytes); + av_reallocp_array(&nal->skipped_bytes_pos, + nal->skipped_bytes_pos_size, + sizeof(*nal->skipped_bytes_pos)); + if (!nal->skipped_bytes_pos) { + nal->skipped_bytes_pos_size = 0; + return AVERROR(ENOMEM); + } + } + if (nal->skipped_bytes_pos) + nal->skipped_bytes_pos[nal->skipped_bytes-1] = di - 1; + } + continue; + } else // next start code + goto nsc; } - continue; - } - else // next start code - goto nsc; - } - dst[di++] = src[si++]; - } - while(si < length) - dst[di++] = src[si++]; + dst[di++] = src[si++]; + } + while (si < length) + dst[di++] = src[si++]; nsc: - memset(dst + di, 0, AV_INPUT_BUFFER_PADDING_SIZE); + memset(dst + di, 0, AV_INPUT_BUFFER_PADDING_SIZE); - nal->data = dst; - nal->size = di; - nal->raw_data = src; - nal->raw_size = si; - rbsp->rbsp_buffer_size += si; + nal->data = dst; + nal->size = di; + nal->raw_data = src; + nal->raw_size = si; + rbsp->rbsp_buffer_size += si; - return si; + return si; } static const char *const hevc_nal_type_name[64] = { - "TRAIL_N", // HEVC_NAL_TRAIL_N - "TRAIL_R", // HEVC_NAL_TRAIL_R - "TSA_N", // HEVC_NAL_TSA_N - "TSA_R", // HEVC_NAL_TSA_R - "STSA_N", // HEVC_NAL_STSA_N - "STSA_R", // HEVC_NAL_STSA_R - "RADL_N", // HEVC_NAL_RADL_N - "RADL_R", // HEVC_NAL_RADL_R - "RASL_N", // HEVC_NAL_RASL_N - "RASL_R", // HEVC_NAL_RASL_R - "RSV_VCL_N10", // HEVC_NAL_VCL_N10 - "RSV_VCL_R11", // HEVC_NAL_VCL_R11 - "RSV_VCL_N12", // HEVC_NAL_VCL_N12 - "RSV_VLC_R13", // HEVC_NAL_VCL_R13 - "RSV_VCL_N14", // HEVC_NAL_VCL_N14 - "RSV_VCL_R15", // HEVC_NAL_VCL_R15 - "BLA_W_LP", // HEVC_NAL_BLA_W_LP - "BLA_W_RADL", // HEVC_NAL_BLA_W_RADL - "BLA_N_LP", // HEVC_NAL_BLA_N_LP - "IDR_W_RADL", // HEVC_NAL_IDR_W_RADL - "IDR_N_LP", // HEVC_NAL_IDR_N_LP - "CRA_NUT", // HEVC_NAL_CRA_NUT - "RSV_IRAP_VCL22", // HEVC_NAL_RSV_IRAP_VCL22 - "RSV_IRAP_VCL23", // HEVC_NAL_RSV_IRAP_VCL23 - "RSV_VCL24", // HEVC_NAL_RSV_VCL24 - "RSV_VCL25", // HEVC_NAL_RSV_VCL25 - "RSV_VCL26", // HEVC_NAL_RSV_VCL26 - "RSV_VCL27", // HEVC_NAL_RSV_VCL27 - "RSV_VCL28", // HEVC_NAL_RSV_VCL28 - "RSV_VCL29", // HEVC_NAL_RSV_VCL29 - "RSV_VCL30", // HEVC_NAL_RSV_VCL30 - "RSV_VCL31", // HEVC_NAL_RSV_VCL31 - "VPS", // HEVC_NAL_VPS - "SPS", // HEVC_NAL_SPS - "PPS", // HEVC_NAL_PPS - "AUD", // HEVC_NAL_AUD - "EOS_NUT", // HEVC_NAL_EOS_NUT - "EOB_NUT", // HEVC_NAL_EOB_NUT - "FD_NUT", // HEVC_NAL_FD_NUT - "SEI_PREFIX", // HEVC_NAL_SEI_PREFIX - "SEI_SUFFIX", // HEVC_NAL_SEI_SUFFIX - "RSV_NVCL41", // HEVC_NAL_RSV_NVCL41 - "RSV_NVCL42", // HEVC_NAL_RSV_NVCL42 - "RSV_NVCL43", // HEVC_NAL_RSV_NVCL43 - "RSV_NVCL44", // HEVC_NAL_RSV_NVCL44 - "RSV_NVCL45", // HEVC_NAL_RSV_NVCL45 - "RSV_NVCL46", // HEVC_NAL_RSV_NVCL46 - "RSV_NVCL47", // HEVC_NAL_RSV_NVCL47 - "UNSPEC48", // HEVC_NAL_UNSPEC48 - "UNSPEC49", // HEVC_NAL_UNSPEC49 - "UNSPEC50", // HEVC_NAL_UNSPEC50 - "UNSPEC51", // HEVC_NAL_UNSPEC51 - "UNSPEC52", // HEVC_NAL_UNSPEC52 - "UNSPEC53", // HEVC_NAL_UNSPEC53 - "UNSPEC54", // HEVC_NAL_UNSPEC54 - "UNSPEC55", // HEVC_NAL_UNSPEC55 - "UNSPEC56", // HEVC_NAL_UNSPEC56 - "UNSPEC57", // HEVC_NAL_UNSPEC57 - "UNSPEC58", // HEVC_NAL_UNSPEC58 - "UNSPEC59", // HEVC_NAL_UNSPEC59 - "UNSPEC60", // HEVC_NAL_UNSPEC60 - "UNSPEC61", // HEVC_NAL_UNSPEC61 - "UNSPEC62", // HEVC_NAL_UNSPEC62 - "UNSPEC63", // HEVC_NAL_UNSPEC63 + "TRAIL_N", // HEVC_NAL_TRAIL_N + "TRAIL_R", // HEVC_NAL_TRAIL_R + "TSA_N", // HEVC_NAL_TSA_N + "TSA_R", // HEVC_NAL_TSA_R + "STSA_N", // HEVC_NAL_STSA_N + "STSA_R", // HEVC_NAL_STSA_R + "RADL_N", // HEVC_NAL_RADL_N + "RADL_R", // HEVC_NAL_RADL_R + "RASL_N", // HEVC_NAL_RASL_N + "RASL_R", // HEVC_NAL_RASL_R + "RSV_VCL_N10", // HEVC_NAL_VCL_N10 + "RSV_VCL_R11", // HEVC_NAL_VCL_R11 + "RSV_VCL_N12", // HEVC_NAL_VCL_N12 + "RSV_VLC_R13", // HEVC_NAL_VCL_R13 + "RSV_VCL_N14", // HEVC_NAL_VCL_N14 + "RSV_VCL_R15", // HEVC_NAL_VCL_R15 + "BLA_W_LP", // HEVC_NAL_BLA_W_LP + "BLA_W_RADL", // HEVC_NAL_BLA_W_RADL + "BLA_N_LP", // HEVC_NAL_BLA_N_LP + "IDR_W_RADL", // HEVC_NAL_IDR_W_RADL + "IDR_N_LP", // HEVC_NAL_IDR_N_LP + "CRA_NUT", // HEVC_NAL_CRA_NUT + "RSV_IRAP_VCL22", // HEVC_NAL_RSV_IRAP_VCL22 + "RSV_IRAP_VCL23", // HEVC_NAL_RSV_IRAP_VCL23 + "RSV_VCL24", // HEVC_NAL_RSV_VCL24 + "RSV_VCL25", // HEVC_NAL_RSV_VCL25 + "RSV_VCL26", // HEVC_NAL_RSV_VCL26 + "RSV_VCL27", // HEVC_NAL_RSV_VCL27 + "RSV_VCL28", // HEVC_NAL_RSV_VCL28 + "RSV_VCL29", // HEVC_NAL_RSV_VCL29 + "RSV_VCL30", // HEVC_NAL_RSV_VCL30 + "RSV_VCL31", // HEVC_NAL_RSV_VCL31 + "VPS", // HEVC_NAL_VPS + "SPS", // HEVC_NAL_SPS + "PPS", // HEVC_NAL_PPS + "AUD", // HEVC_NAL_AUD + "EOS_NUT", // HEVC_NAL_EOS_NUT + "EOB_NUT", // HEVC_NAL_EOB_NUT + "FD_NUT", // HEVC_NAL_FD_NUT + "SEI_PREFIX", // HEVC_NAL_SEI_PREFIX + "SEI_SUFFIX", // HEVC_NAL_SEI_SUFFIX + "RSV_NVCL41", // HEVC_NAL_RSV_NVCL41 + "RSV_NVCL42", // HEVC_NAL_RSV_NVCL42 + "RSV_NVCL43", // HEVC_NAL_RSV_NVCL43 + "RSV_NVCL44", // HEVC_NAL_RSV_NVCL44 + "RSV_NVCL45", // HEVC_NAL_RSV_NVCL45 + "RSV_NVCL46", // HEVC_NAL_RSV_NVCL46 + "RSV_NVCL47", // HEVC_NAL_RSV_NVCL47 + "UNSPEC48", // HEVC_NAL_UNSPEC48 + "UNSPEC49", // HEVC_NAL_UNSPEC49 + "UNSPEC50", // HEVC_NAL_UNSPEC50 + "UNSPEC51", // HEVC_NAL_UNSPEC51 + "UNSPEC52", // HEVC_NAL_UNSPEC52 + "UNSPEC53", // HEVC_NAL_UNSPEC53 + "UNSPEC54", // HEVC_NAL_UNSPEC54 + "UNSPEC55", // HEVC_NAL_UNSPEC55 + "UNSPEC56", // HEVC_NAL_UNSPEC56 + "UNSPEC57", // HEVC_NAL_UNSPEC57 + "UNSPEC58", // HEVC_NAL_UNSPEC58 + "UNSPEC59", // HEVC_NAL_UNSPEC59 + "UNSPEC60", // HEVC_NAL_UNSPEC60 + "UNSPEC61", // HEVC_NAL_UNSPEC61 + "UNSPEC62", // HEVC_NAL_UNSPEC62 + "UNSPEC63", // HEVC_NAL_UNSPEC63 }; -static const char *hevc_nal_unit_name(int nal_type) { - av_assert0(nal_type >= 0 && nal_type < 64); - return hevc_nal_type_name[nal_type]; +static const char *hevc_nal_unit_name(int nal_type) +{ + av_assert0(nal_type >= 0 && nal_type < 64); + return hevc_nal_type_name[nal_type]; } static const char *const h264_nal_type_name[32] = { - "Unspecified 0", //H264_NAL_UNSPECIFIED - "Coded slice of a non-IDR picture", // H264_NAL_SLICE - "Coded slice data partition A", // H264_NAL_DPA - "Coded slice data partition B", // H264_NAL_DPB - "Coded slice data partition C", // H264_NAL_DPC - "IDR", // H264_NAL_IDR_SLICE - "SEI", // H264_NAL_SEI - "SPS", // H264_NAL_SPS - "PPS", // H264_NAL_PPS - "AUD", // H264_NAL_AUD - "End of sequence", // H264_NAL_END_SEQUENCE - "End of stream", // H264_NAL_END_STREAM - "Filler data", // H264_NAL_FILLER_DATA - "SPS extension", // H264_NAL_SPS_EXT - "Prefix", // H264_NAL_PREFIX - "Subset SPS", // H264_NAL_SUB_SPS - "Depth parameter set", // H264_NAL_DPS - "Reserved 17", // H264_NAL_RESERVED17 - "Reserved 18", // H264_NAL_RESERVED18 - "Auxiliary coded picture without partitioning", // H264_NAL_AUXILIARY_SLICE - "Slice extension", // H264_NAL_EXTEN_SLICE - "Slice extension for a depth view or a 3D-AVC texture view", // H264_NAL_DEPTH_EXTEN_SLICE - "Reserved 22", // H264_NAL_RESERVED22 - "Reserved 23", // H264_NAL_RESERVED23 - "Unspecified 24", // H264_NAL_UNSPECIFIED24 - "Unspecified 25", // H264_NAL_UNSPECIFIED25 - "Unspecified 26", // H264_NAL_UNSPECIFIED26 - "Unspecified 27", // H264_NAL_UNSPECIFIED27 - "Unspecified 28", // H264_NAL_UNSPECIFIED28 - "Unspecified 29", // H264_NAL_UNSPECIFIED29 - "Unspecified 30", // H264_NAL_UNSPECIFIED30 - "Unspecified 31", // H264_NAL_UNSPECIFIED31 + "Unspecified 0", //H264_NAL_UNSPECIFIED + "Coded slice of a non-IDR picture", // H264_NAL_SLICE + "Coded slice data partition A", // H264_NAL_DPA + "Coded slice data partition B", // H264_NAL_DPB + "Coded slice data partition C", // H264_NAL_DPC + "IDR", // H264_NAL_IDR_SLICE + "SEI", // H264_NAL_SEI + "SPS", // H264_NAL_SPS + "PPS", // H264_NAL_PPS + "AUD", // H264_NAL_AUD + "End of sequence", // H264_NAL_END_SEQUENCE + "End of stream", // H264_NAL_END_STREAM + "Filler data", // H264_NAL_FILLER_DATA + "SPS extension", // H264_NAL_SPS_EXT + "Prefix", // H264_NAL_PREFIX + "Subset SPS", // H264_NAL_SUB_SPS + "Depth parameter set", // H264_NAL_DPS + "Reserved 17", // H264_NAL_RESERVED17 + "Reserved 18", // H264_NAL_RESERVED18 + "Auxiliary coded picture without partitioning", // H264_NAL_AUXILIARY_SLICE + "Slice extension", // H264_NAL_EXTEN_SLICE + "Slice extension for a depth view or a 3D-AVC texture view", // H264_NAL_DEPTH_EXTEN_SLICE + "Reserved 22", // H264_NAL_RESERVED22 + "Reserved 23", // H264_NAL_RESERVED23 + "Unspecified 24", // H264_NAL_UNSPECIFIED24 + "Unspecified 25", // H264_NAL_UNSPECIFIED25 + "Unspecified 26", // H264_NAL_UNSPECIFIED26 + "Unspecified 27", // H264_NAL_UNSPECIFIED27 + "Unspecified 28", // H264_NAL_UNSPECIFIED28 + "Unspecified 29", // H264_NAL_UNSPECIFIED29 + "Unspecified 30", // H264_NAL_UNSPECIFIED30 + "Unspecified 31", // H264_NAL_UNSPECIFIED31 }; -static const char *h264_nal_unit_name(int nal_type) { - av_assert0(nal_type >= 0 && nal_type < 32); - return h264_nal_type_name[nal_type]; +static const char *h264_nal_unit_name(int nal_type) +{ + av_assert0(nal_type >= 0 && nal_type < 32); + return h264_nal_type_name[nal_type]; } -static int get_bit_length(H2645NAL *nal, int skip_trailing_zeros) { - int size = nal->size; - int v; - - while(skip_trailing_zeros && size > 0 && nal->data[size - 1] == 0) - size--; - - if(!size) - return 0; - - v = nal->data[size - 1]; - - if(size > INT_MAX / 8) - return AVERROR(ERANGE); - size *= 8; +static int get_bit_length(H2645NAL *nal, int min_size, int skip_trailing_zeros) +{ + int size = nal->size; + int trailing_padding = 0; + + while (skip_trailing_zeros && size > 0 && nal->data[size - 1] == 0) + size--; + + if (!size) + return 0; + + if (size <= min_size) { + if (nal->size < min_size) + return AVERROR_INVALIDDATA; + size = min_size; + } else { + int v = nal->data[size - 1]; + /* remove the stop bit and following trailing zeros, + * or nothing for damaged bitstreams */ + if (v) + trailing_padding = ff_ctz(v) + 1; + } - /* remove the stop bit and following trailing zeros, - * or nothing for damaged bitstreams */ - if(v) - size -= ff_ctz(v) + 1; + if (size > INT_MAX / 8) + return AVERROR(ERANGE); + size *= 8; - return size; + return size - trailing_padding; } /** * @return AVERROR_INVALIDDATA if the packet is not a valid NAL unit, * 0 otherwise */ -static int hevc_parse_nal_header(H2645NAL *nal, void *logctx) { - GetBitContext *gb = &nal->gb; +static int hevc_parse_nal_header(H2645NAL *nal, void *logctx) +{ + GetBitContext *gb = &nal->gb; - if(get_bits1(gb) != 0) - return AVERROR_INVALIDDATA; + if (get_bits1(gb) != 0) + return AVERROR_INVALIDDATA; - nal->type = get_bits(gb, 6); + nal->type = get_bits(gb, 6); - nal->nuh_layer_id = get_bits(gb, 6); - nal->temporal_id = get_bits(gb, 3) - 1; - if(nal->temporal_id < 0) - return AVERROR_INVALIDDATA; + nal->nuh_layer_id = get_bits(gb, 6); + nal->temporal_id = get_bits(gb, 3) - 1; + if (nal->temporal_id < 0) + return AVERROR_INVALIDDATA; - av_log(logctx, AV_LOG_DEBUG, - "nal_unit_type: %d(%s), nuh_layer_id: %d, temporal_id: %d\n", - nal->type, hevc_nal_unit_name(nal->type), nal->nuh_layer_id, nal->temporal_id); + av_log(logctx, AV_LOG_DEBUG, + "nal_unit_type: %d(%s), nuh_layer_id: %d, temporal_id: %d\n", + nal->type, hevc_nal_unit_name(nal->type), nal->nuh_layer_id, nal->temporal_id); - return 0; + return 0; } -static int h264_parse_nal_header(H2645NAL *nal, void *logctx) { - GetBitContext *gb = &nal->gb; +static int h264_parse_nal_header(H2645NAL *nal, void *logctx) +{ + GetBitContext *gb = &nal->gb; - if(get_bits1(gb) != 0) - return AVERROR_INVALIDDATA; + if (get_bits1(gb) != 0) + return AVERROR_INVALIDDATA; - nal->ref_idc = get_bits(gb, 2); - nal->type = get_bits(gb, 5); + nal->ref_idc = get_bits(gb, 2); + nal->type = get_bits(gb, 5); - av_log(logctx, AV_LOG_DEBUG, - "nal_unit_type: %d(%s), nal_ref_idc: %d\n", - nal->type, h264_nal_unit_name(nal->type), nal->ref_idc); + av_log(logctx, AV_LOG_DEBUG, + "nal_unit_type: %d(%s), nal_ref_idc: %d\n", + nal->type, h264_nal_unit_name(nal->type), nal->ref_idc); - return 0; + return 0; } -static int find_next_start_code(const uint8_t *buf, const uint8_t *next_avc) { - int i = 0; +static int find_next_start_code(const uint8_t *buf, const uint8_t *next_avc) +{ + int i = 0; - if(buf + 3 >= next_avc) - return next_avc - buf; + if (buf + 3 >= next_avc) + return next_avc - buf; - while(buf + i + 3 < next_avc) { - if(buf[i] == 0 && buf[i + 1] == 0 && buf[i + 2] == 1) - break; - i++; - } - return i + 3; + while (buf + i + 3 < next_avc) { + if (buf[i] == 0 && buf[i + 1] == 0 && buf[i + 2] == 1) + break; + i++; + } + return i + 3; } -static void alloc_rbsp_buffer(H2645RBSP *rbsp, unsigned int size, int use_ref) { - int min_size = size; - - if(size > INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE) - goto fail; - size += AV_INPUT_BUFFER_PADDING_SIZE; +static void alloc_rbsp_buffer(H2645RBSP *rbsp, unsigned int size, int use_ref) +{ + int min_size = size; - if(rbsp->rbsp_buffer_alloc_size >= size && - (!rbsp->rbsp_buffer_ref || av_buffer_is_writable(rbsp->rbsp_buffer_ref))) { - av_assert0(rbsp->rbsp_buffer); - memset(rbsp->rbsp_buffer + min_size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - return; - } - - size = FFMIN(size + size / 16 + 32, INT_MAX); + if (size > INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE) + goto fail; + size += AV_INPUT_BUFFER_PADDING_SIZE; - if(rbsp->rbsp_buffer_ref) - av_buffer_unref(&rbsp->rbsp_buffer_ref); - else - av_free(rbsp->rbsp_buffer); + if (rbsp->rbsp_buffer_alloc_size >= size && + (!rbsp->rbsp_buffer_ref || av_buffer_is_writable(rbsp->rbsp_buffer_ref))) { + av_assert0(rbsp->rbsp_buffer); + memset(rbsp->rbsp_buffer + min_size, 0, AV_INPUT_BUFFER_PADDING_SIZE); + return; + } - rbsp->rbsp_buffer = av_mallocz(size); - if(!rbsp->rbsp_buffer) - goto fail; - rbsp->rbsp_buffer_alloc_size = size; + size = FFMIN(size + size / 16 + 32, INT_MAX); - if(use_ref) { - rbsp->rbsp_buffer_ref = av_buffer_create(rbsp->rbsp_buffer, size, - NULL, NULL, 0); - if(!rbsp->rbsp_buffer_ref) - goto fail; - } + if (rbsp->rbsp_buffer_ref) + av_buffer_unref(&rbsp->rbsp_buffer_ref); + else + av_free(rbsp->rbsp_buffer); + + rbsp->rbsp_buffer = av_mallocz(size); + if (!rbsp->rbsp_buffer) + goto fail; + rbsp->rbsp_buffer_alloc_size = size; + + if (use_ref) { + rbsp->rbsp_buffer_ref = av_buffer_create(rbsp->rbsp_buffer, size, + NULL, NULL, 0); + if (!rbsp->rbsp_buffer_ref) + goto fail; + } - return; + return; fail: - rbsp->rbsp_buffer_alloc_size = 0; - if(rbsp->rbsp_buffer_ref) { - av_buffer_unref(&rbsp->rbsp_buffer_ref); - rbsp->rbsp_buffer = NULL; - } - else - av_freep(&rbsp->rbsp_buffer); - - return; + rbsp->rbsp_buffer_alloc_size = 0; + if (rbsp->rbsp_buffer_ref) { + av_buffer_unref(&rbsp->rbsp_buffer_ref); + rbsp->rbsp_buffer = NULL; + } else + av_freep(&rbsp->rbsp_buffer); + + return; } int ff_h2645_packet_split(H2645Packet *pkt, const uint8_t *buf, int length, - void *logctx, int is_nalff, int nal_length_size, - enum AVCodecID codec_id, int small_padding, int use_ref) { - GetByteContext bc; - int consumed, ret = 0; - int next_avc = is_nalff ? 0 : length; - int64_t padding = small_padding ? 0 : MAX_MBPAIR_SIZE; - - bytestream2_init(&bc, buf, length); - alloc_rbsp_buffer(&pkt->rbsp, length + padding, use_ref); - - if(!pkt->rbsp.rbsp_buffer) - return AVERROR(ENOMEM); - - pkt->rbsp.rbsp_buffer_size = 0; - pkt->nb_nals = 0; - while(bytestream2_get_bytes_left(&bc) >= 4) { - H2645NAL *nal; - int extract_length = 0; - int skip_trailing_zeros = 1; - - if(bytestream2_tell(&bc) == next_avc) { - int i = 0; - extract_length = get_nalsize(nal_length_size, - bc.buffer, bytestream2_get_bytes_left(&bc), &i, logctx); - if(extract_length < 0) - return extract_length; - - bytestream2_skip(&bc, nal_length_size); - - next_avc = bytestream2_tell(&bc) + extract_length; - } - else { - int buf_index; - - if(bytestream2_tell(&bc) > next_avc) - av_log(logctx, AV_LOG_WARNING, "Exceeded next NALFF position, re-syncing.\n"); + void *logctx, int is_nalff, int nal_length_size, + enum AVCodecID codec_id, int small_padding, int use_ref) +{ + GetByteContext bc; + int consumed, ret = 0; + int next_avc = is_nalff ? 0 : length; + int64_t padding = small_padding ? 0 : MAX_MBPAIR_SIZE; + + bytestream2_init(&bc, buf, length); + alloc_rbsp_buffer(&pkt->rbsp, length + padding, use_ref); + + if (!pkt->rbsp.rbsp_buffer) + return AVERROR(ENOMEM); - /* search start code */ - buf_index = find_next_start_code(bc.buffer, buf + next_avc); + pkt->rbsp.rbsp_buffer_size = 0; + pkt->nb_nals = 0; + while (bytestream2_get_bytes_left(&bc) >= 4) { + H2645NAL *nal; + int extract_length = 0; + int skip_trailing_zeros = 1; + + if (bytestream2_tell(&bc) == next_avc) { + int i = 0; + extract_length = get_nalsize(nal_length_size, + bc.buffer, bytestream2_get_bytes_left(&bc), &i, logctx); + if (extract_length < 0) + return extract_length; + + bytestream2_skip(&bc, nal_length_size); + + next_avc = bytestream2_tell(&bc) + extract_length; + } else { + int buf_index; + + if (bytestream2_tell(&bc) > next_avc) + av_log(logctx, AV_LOG_WARNING, "Exceeded next NALFF position, re-syncing.\n"); + + /* search start code */ + buf_index = find_next_start_code(bc.buffer, buf + next_avc); + + bytestream2_skip(&bc, buf_index); + + if (!bytestream2_get_bytes_left(&bc)) { + if (pkt->nb_nals > 0) { + // No more start codes: we discarded some irrelevant + // bytes at the end of the packet. + return 0; + } else { + av_log(logctx, AV_LOG_ERROR, "No start code is found.\n"); + return AVERROR_INVALIDDATA; + } + } - bytestream2_skip(&bc, buf_index); + extract_length = FFMIN(bytestream2_get_bytes_left(&bc), next_avc - bytestream2_tell(&bc)); - if(!bytestream2_get_bytes_left(&bc)) { - if(pkt->nb_nals > 0) { - // No more start codes: we discarded some irrelevant - // bytes at the end of the packet. - return 0; - } - else { - av_log(logctx, AV_LOG_ERROR, "No start code is found.\n"); - return AVERROR_INVALIDDATA; + if (bytestream2_tell(&bc) >= next_avc) { + /* skip to the start of the next NAL */ + bytestream2_skip(&bc, next_avc - bytestream2_tell(&bc)); + continue; + } } - } - extract_length = FFMIN(bytestream2_get_bytes_left(&bc), next_avc - bytestream2_tell(&bc)); + if (pkt->nals_allocated < pkt->nb_nals + 1) { + int new_size = pkt->nals_allocated + 1; + void *tmp; - if(bytestream2_tell(&bc) >= next_avc) { - /* skip to the start of the next NAL */ - bytestream2_skip(&bc, next_avc - bytestream2_tell(&bc)); - continue; - } - } + if (new_size >= INT_MAX / sizeof(*pkt->nals)) + return AVERROR(ENOMEM); - if(pkt->nals_allocated < pkt->nb_nals + 1) { - int new_size = pkt->nals_allocated + 1; - void *tmp; + tmp = av_fast_realloc(pkt->nals, &pkt->nal_buffer_size, new_size * sizeof(*pkt->nals)); + if (!tmp) + return AVERROR(ENOMEM); - if(new_size >= INT_MAX / sizeof(*pkt->nals)) - return AVERROR(ENOMEM); + pkt->nals = tmp; + memset(pkt->nals + pkt->nals_allocated, 0, sizeof(*pkt->nals)); - tmp = av_fast_realloc(pkt->nals, &pkt->nal_buffer_size, new_size * sizeof(*pkt->nals)); - if(!tmp) - return AVERROR(ENOMEM); + nal = &pkt->nals[pkt->nb_nals]; + nal->skipped_bytes_pos_size = FFMIN(1024, extract_length/3+1); // initial buffer size + nal->skipped_bytes_pos = av_malloc_array(nal->skipped_bytes_pos_size, sizeof(*nal->skipped_bytes_pos)); + if (!nal->skipped_bytes_pos) + return AVERROR(ENOMEM); - pkt->nals = tmp; - memset(pkt->nals + pkt->nals_allocated, 0, sizeof(*pkt->nals)); + pkt->nals_allocated = new_size; + } + nal = &pkt->nals[pkt->nb_nals]; - nal = &pkt->nals[pkt->nb_nals]; - nal->skipped_bytes_pos_size = FFMIN(1024, extract_length / 3 + 1); // initial buffer size - nal->skipped_bytes_pos = av_malloc_array(nal->skipped_bytes_pos_size, sizeof(*nal->skipped_bytes_pos)); - if(!nal->skipped_bytes_pos) - return AVERROR(ENOMEM); + consumed = ff_h2645_extract_rbsp(bc.buffer, extract_length, &pkt->rbsp, nal, small_padding); + if (consumed < 0) + return consumed; - pkt->nals_allocated = new_size; - } - nal = &pkt->nals[pkt->nb_nals]; + if (is_nalff && (extract_length != consumed) && extract_length) + av_log(logctx, AV_LOG_DEBUG, + "NALFF: Consumed only %d bytes instead of %d\n", + consumed, extract_length); - consumed = ff_h2645_extract_rbsp(bc.buffer, extract_length, &pkt->rbsp, nal, small_padding); - if(consumed < 0) - return consumed; + bytestream2_skip(&bc, consumed); - if(is_nalff && (extract_length != consumed) && extract_length) - av_log(logctx, AV_LOG_DEBUG, - "NALFF: Consumed only %d bytes instead of %d\n", - consumed, extract_length); + /* see commit 3566042a0 */ + if (bytestream2_get_bytes_left(&bc) >= 4 && + bytestream2_peek_be32(&bc) == 0x000001E0) + skip_trailing_zeros = 0; - bytestream2_skip(&bc, consumed); + nal->size_bits = get_bit_length(nal, 1 + (codec_id == AV_CODEC_ID_HEVC), + skip_trailing_zeros); - /* see commit 3566042a0 */ - if(bytestream2_get_bytes_left(&bc) >= 4 && - bytestream2_peek_be32(&bc) == 0x000001E0) - skip_trailing_zeros = 0; + if (nal->size <= 0 || nal->size_bits <= 0) + continue; - nal->size_bits = get_bit_length(nal, skip_trailing_zeros); + ret = init_get_bits(&nal->gb, nal->data, nal->size_bits); + if (ret < 0) + return ret; - if(nal->size <= 0 || nal->size_bits <= 0) - continue; + /* Reset type in case it contains a stale value from a previously parsed NAL */ + nal->type = 0; - ret = init_get_bits(&nal->gb, nal->data, nal->size_bits); - if(ret < 0) - return ret; - - /* Reset type in case it contains a stale value from a previously parsed NAL */ - nal->type = 0; + if (codec_id == AV_CODEC_ID_HEVC) + ret = hevc_parse_nal_header(nal, logctx); + else + ret = h264_parse_nal_header(nal, logctx); + if (ret < 0) { + av_log(logctx, AV_LOG_WARNING, "Invalid NAL unit %d, skipping.\n", + nal->type); + continue; + } - if(codec_id == AV_CODEC_ID_HEVC) - ret = hevc_parse_nal_header(nal, logctx); - else - ret = h264_parse_nal_header(nal, logctx); - if(ret < 0) { - av_log(logctx, AV_LOG_WARNING, "Invalid NAL unit %d, skipping.\n", - nal->type); - continue; + pkt->nb_nals++; } - pkt->nb_nals++; - } - - return 0; + return 0; } -void ff_h2645_packet_uninit(H2645Packet *pkt) { - int i; - for(i = 0; i < pkt->nals_allocated; i++) { - av_freep(&pkt->nals[i].skipped_bytes_pos); - } - av_freep(&pkt->nals); - pkt->nals_allocated = pkt->nal_buffer_size = 0; - if(pkt->rbsp.rbsp_buffer_ref) { - av_buffer_unref(&pkt->rbsp.rbsp_buffer_ref); - pkt->rbsp.rbsp_buffer = NULL; - } - else - av_freep(&pkt->rbsp.rbsp_buffer); - pkt->rbsp.rbsp_buffer_alloc_size = pkt->rbsp.rbsp_buffer_size = 0; +void ff_h2645_packet_uninit(H2645Packet *pkt) +{ + int i; + for (i = 0; i < pkt->nals_allocated; i++) { + av_freep(&pkt->nals[i].skipped_bytes_pos); + } + av_freep(&pkt->nals); + pkt->nals_allocated = pkt->nal_buffer_size = 0; + if (pkt->rbsp.rbsp_buffer_ref) { + av_buffer_unref(&pkt->rbsp.rbsp_buffer_ref); + pkt->rbsp.rbsp_buffer = NULL; + } else + av_freep(&pkt->rbsp.rbsp_buffer); + pkt->rbsp.rbsp_buffer_alloc_size = pkt->rbsp.rbsp_buffer_size = 0; } diff --git a/third-party/cbs/h264_levels.c b/third-party/cbs/h264_levels.c new file mode 100644 index 00000000000..2d65bc30eef --- /dev/null +++ b/third-party/cbs/h264_levels.c @@ -0,0 +1,124 @@ +/* + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include +#include "libavutil/macros.h" +// [manual] Changed include path +#include "cbs/h264_levels.h" + +// H.264 table A-1. +static const H264LevelDescriptor h264_levels[] = { + // Name MaxMBPS MaxBR MinCR + // | level_idc | MaxFS | MaxCPB | MaxMvsPer2Mb + // | | cs3f | | MaxDpbMbs | | MaxVmvR | | + { "1", 10, 0, 1485, 99, 396, 64, 175, 64, 2, 0 }, + { "1b", 11, 1, 1485, 99, 396, 128, 350, 64, 2, 0 }, + { "1b", 9, 0, 1485, 99, 396, 128, 350, 64, 2, 0 }, + { "1.1", 11, 0, 3000, 396, 900, 192, 500, 128, 2, 0 }, + { "1.2", 12, 0, 6000, 396, 2376, 384, 1000, 128, 2, 0 }, + { "1.3", 13, 0, 11880, 396, 2376, 768, 2000, 128, 2, 0 }, + { "2", 20, 0, 11880, 396, 2376, 2000, 2000, 128, 2, 0 }, + { "2.1", 21, 0, 19800, 792, 4752, 4000, 4000, 256, 2, 0 }, + { "2.2", 22, 0, 20250, 1620, 8100, 4000, 4000, 256, 2, 0 }, + { "3", 30, 0, 40500, 1620, 8100, 10000, 10000, 256, 2, 32 }, + { "3.1", 31, 0, 108000, 3600, 18000, 14000, 14000, 512, 4, 16 }, + { "3.2", 32, 0, 216000, 5120, 20480, 20000, 20000, 512, 4, 16 }, + { "4", 40, 0, 245760, 8192, 32768, 20000, 25000, 512, 4, 16 }, + { "4.1", 41, 0, 245760, 8192, 32768, 50000, 62500, 512, 2, 16 }, + { "4.2", 42, 0, 522240, 8704, 34816, 50000, 62500, 512, 2, 16 }, + { "5", 50, 0, 589824, 22080, 110400, 135000, 135000, 512, 2, 16 }, + { "5.1", 51, 0, 983040, 36864, 184320, 240000, 240000, 512, 2, 16 }, + { "5.2", 52, 0, 2073600, 36864, 184320, 240000, 240000, 512, 2, 16 }, + { "6", 60, 0, 4177920, 139264, 696320, 240000, 240000, 8192, 2, 16 }, + { "6.1", 61, 0, 8355840, 139264, 696320, 480000, 480000, 8192, 2, 16 }, + { "6.2", 62, 0, 16711680, 139264, 696320, 800000, 800000, 8192, 2, 16 }, +}; + +// H.264 table A-2 plus values from A-1. +static const struct { + int profile_idc; + int cpb_br_vcl_factor; + int cpb_br_nal_factor; +} h264_br_factors[] = { + { 66, 1000, 1200 }, + { 77, 1000, 1200 }, + { 88, 1000, 1200 }, + { 100, 1250, 1500 }, + { 110, 3000, 3600 }, + { 122, 4000, 4800 }, + { 244, 4000, 4800 }, + { 44, 4000, 4800 }, +}; + +// We are only ever interested in the NAL bitrate factor. +static int h264_get_br_factor(int profile_idc) +{ + int i; + for (i = 0; i < FF_ARRAY_ELEMS(h264_br_factors); i++) { + if (h264_br_factors[i].profile_idc == profile_idc) + return h264_br_factors[i].cpb_br_nal_factor; + } + // Default to the non-high profile value if not specified. + return 1200; +} + +const H264LevelDescriptor *ff_h264_guess_level(int profile_idc, + int64_t bitrate, + int framerate, + int width, int height, + int max_dec_frame_buffering) +{ + int width_mbs = (width + 15) / 16; + int height_mbs = (height + 15) / 16; + int no_cs3f = !(profile_idc == 66 || + profile_idc == 77 || + profile_idc == 88); + int i; + + for (i = 0; i < FF_ARRAY_ELEMS(h264_levels); i++) { + const H264LevelDescriptor *level = &h264_levels[i]; + + if (level->constraint_set3_flag && no_cs3f) + continue; + + if (bitrate > (int64_t)level->max_br * h264_get_br_factor(profile_idc)) + continue; + + if (width_mbs * height_mbs > level->max_fs) + continue; + if (width_mbs * width_mbs > 8 * level->max_fs) + continue; + if (height_mbs * height_mbs > 8 * level->max_fs) + continue; + + if (width_mbs && height_mbs) { + int max_dpb_frames = + FFMIN(level->max_dpb_mbs / (width_mbs * height_mbs), 16); + if (max_dec_frame_buffering > max_dpb_frames) + continue; + + if (framerate > (level->max_mbps / (width_mbs * height_mbs))) + continue; + } + + return level; + } + + // No usable levels found - frame is too big or bitrate is too high. + return NULL; +} diff --git a/third-party/cbs/h264_ps.h b/third-party/cbs/h264_ps.h index fbd0cb7a423..dc52835ed4c 100644 --- a/third-party/cbs/h264_ps.h +++ b/third-party/cbs/h264_ps.h @@ -26,144 +26,148 @@ #include -#include -#include -#include -#include - -#include "cbs/h264.h" +#include "libavutil/buffer.h" +#include "libavutil/pixfmt.h" +#include "libavutil/rational.h" +#include "avcodec.h" #include "get_bits.h" +#include "h264.h" -#define MAX_SPS_COUNT 32 -#define MAX_PPS_COUNT 256 -#define MAX_LOG2_MAX_FRAME_NUM (12 + 4) +#define MAX_SPS_COUNT 32 +#define MAX_PPS_COUNT 256 +#define MAX_LOG2_MAX_FRAME_NUM (12 + 4) /** * Sequence parameter set */ typedef struct SPS { - unsigned int sps_id; - int profile_idc; - int level_idc; - int chroma_format_idc; - int transform_bypass; ///< qpprime_y_zero_transform_bypass_flag - int log2_max_frame_num; ///< log2_max_frame_num_minus4 + 4 - int poc_type; ///< pic_order_cnt_type - int log2_max_poc_lsb; ///< log2_max_pic_order_cnt_lsb_minus4 - int delta_pic_order_always_zero_flag; - int offset_for_non_ref_pic; - int offset_for_top_to_bottom_field; - int poc_cycle_length; ///< num_ref_frames_in_pic_order_cnt_cycle - int ref_frame_count; ///< num_ref_frames - int gaps_in_frame_num_allowed_flag; - int mb_width; ///< pic_width_in_mbs_minus1 + 1 - ///< (pic_height_in_map_units_minus1 + 1) * (2 - frame_mbs_only_flag) - int mb_height; - int frame_mbs_only_flag; - int mb_aff; ///< mb_adaptive_frame_field_flag - int direct_8x8_inference_flag; - int crop; ///< frame_cropping_flag - - /* those 4 are already in luma samples */ - unsigned int crop_left; ///< frame_cropping_rect_left_offset - unsigned int crop_right; ///< frame_cropping_rect_right_offset - unsigned int crop_top; ///< frame_cropping_rect_top_offset - unsigned int crop_bottom; ///< frame_cropping_rect_bottom_offset - int vui_parameters_present_flag; - AVRational sar; - int video_signal_type_present_flag; - int full_range; - int colour_description_present_flag; - enum AVColorPrimaries color_primaries; - enum AVColorTransferCharacteristic color_trc; - enum AVColorSpace colorspace; - enum AVChromaLocation chroma_location; - - int timing_info_present_flag; - uint32_t num_units_in_tick; - uint32_t time_scale; - int fixed_frame_rate_flag; - int32_t offset_for_ref_frame[256]; - int bitstream_restriction_flag; - int num_reorder_frames; - int scaling_matrix_present; - uint8_t scaling_matrix4[6][16]; - uint8_t scaling_matrix8[6][64]; - int nal_hrd_parameters_present_flag; - int vcl_hrd_parameters_present_flag; - int pic_struct_present_flag; - int time_offset_length; - int cpb_cnt; ///< See H.264 E.1.2 - int initial_cpb_removal_delay_length; ///< initial_cpb_removal_delay_length_minus1 + 1 - int cpb_removal_delay_length; ///< cpb_removal_delay_length_minus1 + 1 - int dpb_output_delay_length; ///< dpb_output_delay_length_minus1 + 1 - int bit_depth_luma; ///< bit_depth_luma_minus8 + 8 - int bit_depth_chroma; ///< bit_depth_chroma_minus8 + 8 - int residual_color_transform_flag; ///< residual_colour_transform_flag - int constraint_set_flags; ///< constraint_set[0-3]_flag - uint8_t data[4096]; - size_t data_size; + unsigned int sps_id; + int profile_idc; + int level_idc; + int chroma_format_idc; + int transform_bypass; ///< qpprime_y_zero_transform_bypass_flag + int log2_max_frame_num; ///< log2_max_frame_num_minus4 + 4 + int poc_type; ///< pic_order_cnt_type + int log2_max_poc_lsb; ///< log2_max_pic_order_cnt_lsb_minus4 + int delta_pic_order_always_zero_flag; + int offset_for_non_ref_pic; + int offset_for_top_to_bottom_field; + int poc_cycle_length; ///< num_ref_frames_in_pic_order_cnt_cycle + int ref_frame_count; ///< num_ref_frames + int gaps_in_frame_num_allowed_flag; + int mb_width; ///< pic_width_in_mbs_minus1 + 1 + ///< (pic_height_in_map_units_minus1 + 1) * (2 - frame_mbs_only_flag) + int mb_height; + int frame_mbs_only_flag; + int mb_aff; ///< mb_adaptive_frame_field_flag + int direct_8x8_inference_flag; + int crop; ///< frame_cropping_flag + + /* those 4 are already in luma samples */ + unsigned int crop_left; ///< frame_cropping_rect_left_offset + unsigned int crop_right; ///< frame_cropping_rect_right_offset + unsigned int crop_top; ///< frame_cropping_rect_top_offset + unsigned int crop_bottom; ///< frame_cropping_rect_bottom_offset + int vui_parameters_present_flag; + AVRational sar; + int video_signal_type_present_flag; + int full_range; + int colour_description_present_flag; + enum AVColorPrimaries color_primaries; + enum AVColorTransferCharacteristic color_trc; + enum AVColorSpace colorspace; + enum AVChromaLocation chroma_location; + + int timing_info_present_flag; + uint32_t num_units_in_tick; + uint32_t time_scale; + int fixed_frame_rate_flag; + int32_t offset_for_ref_frame[256]; + int bitstream_restriction_flag; + int num_reorder_frames; + int scaling_matrix_present; + uint8_t scaling_matrix4[6][16]; + uint8_t scaling_matrix8[6][64]; + int nal_hrd_parameters_present_flag; + int vcl_hrd_parameters_present_flag; + int pic_struct_present_flag; + int time_offset_length; + int cpb_cnt; ///< See H.264 E.1.2 + int initial_cpb_removal_delay_length; ///< initial_cpb_removal_delay_length_minus1 + 1 + int cpb_removal_delay_length; ///< cpb_removal_delay_length_minus1 + 1 + int dpb_output_delay_length; ///< dpb_output_delay_length_minus1 + 1 + int bit_depth_luma; ///< bit_depth_luma_minus8 + 8 + int bit_depth_chroma; ///< bit_depth_chroma_minus8 + 8 + int residual_color_transform_flag; ///< residual_colour_transform_flag + int constraint_set_flags; ///< constraint_set[0-3]_flag + uint8_t data[4096]; + size_t data_size; } SPS; /** * Picture parameter set */ typedef struct PPS { - unsigned int sps_id; - int cabac; ///< entropy_coding_mode_flag - int pic_order_present; ///< pic_order_present_flag - int slice_group_count; ///< num_slice_groups_minus1 + 1 - int mb_slice_group_map_type; - unsigned int ref_count[2]; ///< num_ref_idx_l0/1_active_minus1 + 1 - int weighted_pred; ///< weighted_pred_flag - int weighted_bipred_idc; - int init_qp; ///< pic_init_qp_minus26 + 26 - int init_qs; ///< pic_init_qs_minus26 + 26 - int chroma_qp_index_offset[2]; - int deblocking_filter_parameters_present; ///< deblocking_filter_parameters_present_flag - int constrained_intra_pred; ///< constrained_intra_pred_flag - int redundant_pic_cnt_present; ///< redundant_pic_cnt_present_flag - int transform_8x8_mode; ///< transform_8x8_mode_flag - uint8_t scaling_matrix4[6][16]; - uint8_t scaling_matrix8[6][64]; - uint8_t chroma_qp_table[2][QP_MAX_NUM + 1]; ///< pre-scaled (with chroma_qp_index_offset) version of qp_table - int chroma_qp_diff; - uint8_t data[4096]; - size_t data_size; - - uint32_t dequant4_buffer[6][QP_MAX_NUM + 1][16]; - uint32_t dequant8_buffer[6][QP_MAX_NUM + 1][64]; - uint32_t (*dequant4_coeff[6])[16]; - uint32_t (*dequant8_coeff[6])[64]; - - AVBufferRef *sps_ref; - const SPS *sps; + unsigned int sps_id; + int cabac; ///< entropy_coding_mode_flag + int pic_order_present; ///< pic_order_present_flag + int slice_group_count; ///< num_slice_groups_minus1 + 1 + int mb_slice_group_map_type; + unsigned int ref_count[2]; ///< num_ref_idx_l0/1_active_minus1 + 1 + int weighted_pred; ///< weighted_pred_flag + int weighted_bipred_idc; + int init_qp; ///< pic_init_qp_minus26 + 26 + int init_qs; ///< pic_init_qs_minus26 + 26 + int chroma_qp_index_offset[2]; + int deblocking_filter_parameters_present; ///< deblocking_filter_parameters_present_flag + int constrained_intra_pred; ///< constrained_intra_pred_flag + int redundant_pic_cnt_present; ///< redundant_pic_cnt_present_flag + int transform_8x8_mode; ///< transform_8x8_mode_flag + uint8_t scaling_matrix4[6][16]; + uint8_t scaling_matrix8[6][64]; + uint8_t chroma_qp_table[2][QP_MAX_NUM+1]; ///< pre-scaled (with chroma_qp_index_offset) version of qp_table + int chroma_qp_diff; + uint8_t data[4096]; + size_t data_size; + + uint32_t dequant4_buffer[6][QP_MAX_NUM + 1][16]; + uint32_t dequant8_buffer[6][QP_MAX_NUM + 1][64]; + uint32_t(*dequant4_coeff[6])[16]; + uint32_t(*dequant8_coeff[6])[64]; + + AVBufferRef *sps_ref; + const SPS *sps; } PPS; typedef struct H264ParamSets { - AVBufferRef *sps_list[MAX_SPS_COUNT]; - AVBufferRef *pps_list[MAX_PPS_COUNT]; + AVBufferRef *sps_list[MAX_SPS_COUNT]; + AVBufferRef *pps_list[MAX_PPS_COUNT]; - AVBufferRef *pps_ref; - /* currently active parameters sets */ - const PPS *pps; - const SPS *sps; + AVBufferRef *pps_ref; + /* currently active parameters sets */ + const PPS *pps; + const SPS *sps; - int overread_warning_printed[2]; + int overread_warning_printed[2]; } H264ParamSets; +/** + * compute profile from sps + */ +int ff_h264_get_profile(const SPS *sps); + /** * Decode SPS */ int ff_h264_decode_seq_parameter_set(GetBitContext *gb, AVCodecContext *avctx, - H264ParamSets *ps, int ignore_truncation); + H264ParamSets *ps, int ignore_truncation); /** * Decode PPS */ int ff_h264_decode_picture_parameter_set(GetBitContext *gb, AVCodecContext *avctx, - H264ParamSets *ps, int bit_length); + H264ParamSets *ps, int bit_length); /** * Uninit H264 param sets structure. diff --git a/third-party/cbs/h264_sei.h b/third-party/cbs/h264_sei.h index 64dbb86e777..f9166b45dff 100644 --- a/third-party/cbs/h264_sei.h +++ b/third-party/cbs/h264_sei.h @@ -19,169 +19,191 @@ #ifndef AVCODEC_H264_SEI_H #define AVCODEC_H264_SEI_H -#include "cbs/sei.h" #include "get_bits.h" #include "h264_ps.h" +#include "sei.h" /** * pic_struct in picture timing SEI message */ typedef enum { - H264_SEI_PIC_STRUCT_FRAME = 0, ///< 0: %frame - H264_SEI_PIC_STRUCT_TOP_FIELD = 1, ///< 1: top field - H264_SEI_PIC_STRUCT_BOTTOM_FIELD = 2, ///< 2: bottom field - H264_SEI_PIC_STRUCT_TOP_BOTTOM = 3, ///< 3: top field, bottom field, in that order - H264_SEI_PIC_STRUCT_BOTTOM_TOP = 4, ///< 4: bottom field, top field, in that order - H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP = 5, ///< 5: top field, bottom field, top field repeated, in that order - H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM = 6, ///< 6: bottom field, top field, bottom field repeated, in that order - H264_SEI_PIC_STRUCT_FRAME_DOUBLING = 7, ///< 7: %frame doubling - H264_SEI_PIC_STRUCT_FRAME_TRIPLING = 8 ///< 8: %frame tripling + H264_SEI_PIC_STRUCT_FRAME = 0, ///< 0: %frame + H264_SEI_PIC_STRUCT_TOP_FIELD = 1, ///< 1: top field + H264_SEI_PIC_STRUCT_BOTTOM_FIELD = 2, ///< 2: bottom field + H264_SEI_PIC_STRUCT_TOP_BOTTOM = 3, ///< 3: top field, bottom field, in that order + H264_SEI_PIC_STRUCT_BOTTOM_TOP = 4, ///< 4: bottom field, top field, in that order + H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP = 5, ///< 5: top field, bottom field, top field repeated, in that order + H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM = 6, ///< 6: bottom field, top field, bottom field repeated, in that order + H264_SEI_PIC_STRUCT_FRAME_DOUBLING = 7, ///< 7: %frame doubling + H264_SEI_PIC_STRUCT_FRAME_TRIPLING = 8 ///< 8: %frame tripling } H264_SEI_PicStructType; /** * frame_packing_arrangement types */ typedef enum { - H264_SEI_FPA_TYPE_CHECKERBOARD = 0, - H264_SEI_FPA_TYPE_INTERLEAVE_COLUMN = 1, - H264_SEI_FPA_TYPE_INTERLEAVE_ROW = 2, - H264_SEI_FPA_TYPE_SIDE_BY_SIDE = 3, - H264_SEI_FPA_TYPE_TOP_BOTTOM = 4, - H264_SEI_FPA_TYPE_INTERLEAVE_TEMPORAL = 5, - H264_SEI_FPA_TYPE_2D = 6, + H264_SEI_FPA_TYPE_CHECKERBOARD = 0, + H264_SEI_FPA_TYPE_INTERLEAVE_COLUMN = 1, + H264_SEI_FPA_TYPE_INTERLEAVE_ROW = 2, + H264_SEI_FPA_TYPE_SIDE_BY_SIDE = 3, + H264_SEI_FPA_TYPE_TOP_BOTTOM = 4, + H264_SEI_FPA_TYPE_INTERLEAVE_TEMPORAL = 5, + H264_SEI_FPA_TYPE_2D = 6, } H264_SEI_FpaType; typedef struct H264SEITimeCode { - /* When not continuously receiving full timecodes, we have to reference + /* When not continuously receiving full timecodes, we have to reference the previous timecode received */ - int full; - int frame; - int seconds; - int minutes; - int hours; - int dropframe; + int full; + int frame; + int seconds; + int minutes; + int hours; + int dropframe; } H264SEITimeCode; typedef struct H264SEIPictureTiming { - // maximum size of pic_timing according to the spec should be 274 bits - uint8_t payload[40]; - int payload_size_bits; + // maximum size of pic_timing according to the spec should be 274 bits + uint8_t payload[40]; + int payload_size_bits; - int present; - H264_SEI_PicStructType pic_struct; + int present; + H264_SEI_PicStructType pic_struct; - /** + /** * Bit set of clock types for fields/frames in picture timing SEI message. * For each found ct_type, appropriate bit is set (e.g., bit 1 for * interlaced). */ - int ct_type; + int ct_type; - /** + /** * dpb_output_delay in picture timing SEI message, see H.264 C.2.2 */ - int dpb_output_delay; + int dpb_output_delay; - /** + /** * cpb_removal_delay in picture timing SEI message, see H.264 C.1.2 */ - int cpb_removal_delay; + int cpb_removal_delay; - /** + /** * Maximum three timecodes in a pic_timing SEI. */ - H264SEITimeCode timecode[3]; + H264SEITimeCode timecode[3]; - /** + /** * Number of timecode in use */ - int timecode_cnt; + int timecode_cnt; } H264SEIPictureTiming; typedef struct H264SEIAFD { - int present; - uint8_t active_format_description; + int present; + uint8_t active_format_description; } H264SEIAFD; typedef struct H264SEIA53Caption { - AVBufferRef *buf_ref; + AVBufferRef *buf_ref; } H264SEIA53Caption; typedef struct H264SEIUnregistered { - int x264_build; - AVBufferRef **buf_ref; - int nb_buf_ref; + int x264_build; + AVBufferRef **buf_ref; + int nb_buf_ref; } H264SEIUnregistered; typedef struct H264SEIRecoveryPoint { - /** + /** * recovery_frame_cnt * * Set to -1 if no recovery point SEI message found or to number of frames * before playback synchronizes. Frames having recovery point are key * frames. */ - int recovery_frame_cnt; + int recovery_frame_cnt; } H264SEIRecoveryPoint; typedef struct H264SEIBufferingPeriod { - int present; ///< Buffering period SEI flag - int initial_cpb_removal_delay[32]; ///< Initial timestamps for CPBs + int present; ///< Buffering period SEI flag + int initial_cpb_removal_delay[32]; ///< Initial timestamps for CPBs } H264SEIBufferingPeriod; typedef struct H264SEIFramePacking { - int present; - int arrangement_id; - int arrangement_cancel_flag; ///< is previous arrangement canceled, -1 if never received - H264_SEI_FpaType arrangement_type; - int arrangement_repetition_period; - int content_interpretation_type; - int quincunx_sampling_flag; - int current_frame_is_frame0_flag; + int present; + int arrangement_id; + int arrangement_cancel_flag; ///< is previous arrangement canceled, -1 if never received + H264_SEI_FpaType arrangement_type; + int arrangement_repetition_period; + int content_interpretation_type; + int quincunx_sampling_flag; + int current_frame_is_frame0_flag; } H264SEIFramePacking; typedef struct H264SEIDisplayOrientation { - int present; - int anticlockwise_rotation; - int hflip, vflip; + int present; + int anticlockwise_rotation; + int hflip, vflip; } H264SEIDisplayOrientation; typedef struct H264SEIGreenMetaData { - uint8_t green_metadata_type; - uint8_t period_type; - uint16_t num_seconds; - uint16_t num_pictures; - uint8_t percent_non_zero_macroblocks; - uint8_t percent_intra_coded_macroblocks; - uint8_t percent_six_tap_filtering; - uint8_t percent_alpha_point_deblocking_instance; - uint8_t xsd_metric_type; - uint16_t xsd_metric_value; + uint8_t green_metadata_type; + uint8_t period_type; + uint16_t num_seconds; + uint16_t num_pictures; + uint8_t percent_non_zero_macroblocks; + uint8_t percent_intra_coded_macroblocks; + uint8_t percent_six_tap_filtering; + uint8_t percent_alpha_point_deblocking_instance; + uint8_t xsd_metric_type; + uint16_t xsd_metric_value; } H264SEIGreenMetaData; typedef struct H264SEIAlternativeTransfer { - int present; - int preferred_transfer_characteristics; + int present; + int preferred_transfer_characteristics; } H264SEIAlternativeTransfer; +typedef struct H264SEIFilmGrainCharacteristics { + int present; + int model_id; + int separate_colour_description_present_flag; + int bit_depth_luma; + int bit_depth_chroma; + int full_range; + int color_primaries; + int transfer_characteristics; + int matrix_coeffs; + int blending_mode_id; + int log2_scale_factor; + int comp_model_present_flag[3]; + uint16_t num_intensity_intervals[3]; + uint8_t num_model_values[3]; + uint8_t intensity_interval_lower_bound[3][256]; + uint8_t intensity_interval_upper_bound[3][256]; + int16_t comp_model_value[3][256][6]; + int repetition_period; +} H264SEIFilmGrainCharacteristics; + typedef struct H264SEIContext { - H264SEIPictureTiming picture_timing; - H264SEIAFD afd; - H264SEIA53Caption a53_caption; - H264SEIUnregistered unregistered; - H264SEIRecoveryPoint recovery_point; - H264SEIBufferingPeriod buffering_period; - H264SEIFramePacking frame_packing; - H264SEIDisplayOrientation display_orientation; - H264SEIGreenMetaData green_metadata; - H264SEIAlternativeTransfer alternative_transfer; + H264SEIPictureTiming picture_timing; + H264SEIAFD afd; + H264SEIA53Caption a53_caption; + H264SEIUnregistered unregistered; + H264SEIRecoveryPoint recovery_point; + H264SEIBufferingPeriod buffering_period; + H264SEIFramePacking frame_packing; + H264SEIDisplayOrientation display_orientation; + H264SEIGreenMetaData green_metadata; + H264SEIAlternativeTransfer alternative_transfer; + H264SEIFilmGrainCharacteristics film_grain_characteristics; } H264SEIContext; struct H264ParamSets; int ff_h264_sei_decode(H264SEIContext *h, GetBitContext *gb, - const struct H264ParamSets *ps, void *logctx); + const struct H264ParamSets *ps, void *logctx); /** * Reset SEI values at the beginning of the frame. @@ -197,6 +219,6 @@ const char *ff_h264_sei_stereo_mode(const H264SEIFramePacking *h); * Parse the contents of a picture timing message given an active SPS. */ int ff_h264_sei_process_picture_timing(H264SEIPictureTiming *h, const SPS *sps, - void *logctx); + void *logctx); #endif /* AVCODEC_H264_SEI_H */ diff --git a/third-party/cbs/hevc_sei.h b/third-party/cbs/hevc_sei.h index 8dfffcc45d9..ef987f67818 100644 --- a/third-party/cbs/hevc_sei.h +++ b/third-party/cbs/hevc_sei.h @@ -23,112 +23,139 @@ #include -#include - -#include "cbs/sei.h" +#include "libavutil/buffer.h" #include "get_bits.h" +#include "hevc.h" +#include "sei.h" typedef enum { - HEVC_SEI_PIC_STRUCT_FRAME_DOUBLING = 7, - HEVC_SEI_PIC_STRUCT_FRAME_TRIPLING = 8 + HEVC_SEI_PIC_STRUCT_FRAME_DOUBLING = 7, + HEVC_SEI_PIC_STRUCT_FRAME_TRIPLING = 8 } HEVC_SEI_PicStructType; typedef struct HEVCSEIPictureHash { - uint8_t md5[3][16]; - uint8_t is_md5; + uint8_t md5[3][16]; + uint8_t is_md5; } HEVCSEIPictureHash; typedef struct HEVCSEIFramePacking { - int present; - int arrangement_type; - int content_interpretation_type; - int quincunx_subsampling; - int current_frame_is_frame0_flag; + int present; + int arrangement_type; + int content_interpretation_type; + int quincunx_subsampling; + int current_frame_is_frame0_flag; } HEVCSEIFramePacking; typedef struct HEVCSEIDisplayOrientation { - int present; - int anticlockwise_rotation; - int hflip, vflip; + int present; + int anticlockwise_rotation; + int hflip, vflip; } HEVCSEIDisplayOrientation; typedef struct HEVCSEIPictureTiming { - int picture_struct; + int picture_struct; } HEVCSEIPictureTiming; typedef struct HEVCSEIA53Caption { - AVBufferRef *buf_ref; + AVBufferRef *buf_ref; } HEVCSEIA53Caption; typedef struct HEVCSEIUnregistered { - AVBufferRef **buf_ref; - int nb_buf_ref; + AVBufferRef **buf_ref; + int nb_buf_ref; } HEVCSEIUnregistered; typedef struct HEVCSEIMasteringDisplay { - int present; - uint16_t display_primaries[3][2]; - uint16_t white_point[2]; - uint32_t max_luminance; - uint32_t min_luminance; + int present; + uint16_t display_primaries[3][2]; + uint16_t white_point[2]; + uint32_t max_luminance; + uint32_t min_luminance; } HEVCSEIMasteringDisplay; typedef struct HEVCSEIDynamicHDRPlus { - AVBufferRef *info; + AVBufferRef *info; } HEVCSEIDynamicHDRPlus; +typedef struct HEVCSEIDynamicHDRVivid { + AVBufferRef *info; +} HEVCSEIDynamicHDRVivid; + typedef struct HEVCSEIContentLight { - int present; - uint16_t max_content_light_level; - uint16_t max_pic_average_light_level; + int present; + uint16_t max_content_light_level; + uint16_t max_pic_average_light_level; } HEVCSEIContentLight; typedef struct HEVCSEIAlternativeTransfer { - int present; - int preferred_transfer_characteristics; + int present; + int preferred_transfer_characteristics; } HEVCSEIAlternativeTransfer; typedef struct HEVCSEITimeCode { - int present; - uint8_t num_clock_ts; - uint8_t clock_timestamp_flag[3]; - uint8_t units_field_based_flag[3]; - uint8_t counting_type[3]; - uint8_t full_timestamp_flag[3]; - uint8_t discontinuity_flag[3]; - uint8_t cnt_dropped_flag[3]; - uint16_t n_frames[3]; - uint8_t seconds_value[3]; - uint8_t minutes_value[3]; - uint8_t hours_value[3]; - uint8_t seconds_flag[3]; - uint8_t minutes_flag[3]; - uint8_t hours_flag[3]; - uint8_t time_offset_length[3]; - int32_t time_offset_value[3]; + int present; + uint8_t num_clock_ts; + uint8_t clock_timestamp_flag[3]; + uint8_t units_field_based_flag[3]; + uint8_t counting_type[3]; + uint8_t full_timestamp_flag[3]; + uint8_t discontinuity_flag[3]; + uint8_t cnt_dropped_flag[3]; + uint16_t n_frames[3]; + uint8_t seconds_value[3]; + uint8_t minutes_value[3]; + uint8_t hours_value[3]; + uint8_t seconds_flag[3]; + uint8_t minutes_flag[3]; + uint8_t hours_flag[3]; + uint8_t time_offset_length[3]; + int32_t time_offset_value[3]; } HEVCSEITimeCode; +typedef struct HEVCSEIFilmGrainCharacteristics { + int present; + int model_id; + int separate_colour_description_present_flag; + int bit_depth_luma; + int bit_depth_chroma; + int full_range; + int color_primaries; + int transfer_characteristics; + int matrix_coeffs; + int blending_mode_id; + int log2_scale_factor; + int comp_model_present_flag[3]; + uint16_t num_intensity_intervals[3]; + uint8_t num_model_values[3]; + uint8_t intensity_interval_lower_bound[3][256]; + uint8_t intensity_interval_upper_bound[3][256]; + int16_t comp_model_value[3][256][6]; + int persistence_flag; +} HEVCSEIFilmGrainCharacteristics; + typedef struct HEVCSEI { - HEVCSEIPictureHash picture_hash; - HEVCSEIFramePacking frame_packing; - HEVCSEIDisplayOrientation display_orientation; - HEVCSEIPictureTiming picture_timing; - HEVCSEIA53Caption a53_caption; - HEVCSEIUnregistered unregistered; - HEVCSEIMasteringDisplay mastering_display; - HEVCSEIDynamicHDRPlus dynamic_hdr_plus; - HEVCSEIContentLight content_light; - int active_seq_parameter_set_id; - HEVCSEIAlternativeTransfer alternative_transfer; - HEVCSEITimeCode timecode; + HEVCSEIPictureHash picture_hash; + HEVCSEIFramePacking frame_packing; + HEVCSEIDisplayOrientation display_orientation; + HEVCSEIPictureTiming picture_timing; + HEVCSEIA53Caption a53_caption; + HEVCSEIUnregistered unregistered; + HEVCSEIMasteringDisplay mastering_display; + HEVCSEIDynamicHDRPlus dynamic_hdr_plus; + HEVCSEIDynamicHDRVivid dynamic_hdr_vivid; + HEVCSEIContentLight content_light; + int active_seq_parameter_set_id; + HEVCSEIAlternativeTransfer alternative_transfer; + HEVCSEITimeCode timecode; + HEVCSEIFilmGrainCharacteristics film_grain_characteristics; } HEVCSEI; struct HEVCParamSets; int ff_hevc_decode_nal_sei(GetBitContext *gb, void *logctx, HEVCSEI *s, - const struct HEVCParamSets *ps, int type); + const struct HEVCParamSets *ps, enum HEVCNALUnitType type); /** * Reset SEI values that are stored on the Context. diff --git a/third-party/cbs/include/cbs/attributes.h b/third-party/cbs/include/cbs/attributes.h new file mode 100644 index 00000000000..04c615c952c --- /dev/null +++ b/third-party/cbs/include/cbs/attributes.h @@ -0,0 +1,173 @@ +/* + * copyright (c) 2006 Michael Niedermayer + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * Macro definitions for various function/variable attributes + */ + +#ifndef AVUTIL_ATTRIBUTES_H +#define AVUTIL_ATTRIBUTES_H + +#ifdef __GNUC__ +# define AV_GCC_VERSION_AT_LEAST(x,y) (__GNUC__ > (x) || __GNUC__ == (x) && __GNUC_MINOR__ >= (y)) +# define AV_GCC_VERSION_AT_MOST(x,y) (__GNUC__ < (x) || __GNUC__ == (x) && __GNUC_MINOR__ <= (y)) +#else +# define AV_GCC_VERSION_AT_LEAST(x,y) 0 +# define AV_GCC_VERSION_AT_MOST(x,y) 0 +#endif + +#ifdef __has_builtin +# define AV_HAS_BUILTIN(x) __has_builtin(x) +#else +# define AV_HAS_BUILTIN(x) 0 +#endif + +#ifndef av_always_inline +#if AV_GCC_VERSION_AT_LEAST(3,1) +# define av_always_inline __attribute__((always_inline)) inline +#elif defined(_MSC_VER) +# define av_always_inline __forceinline +#else +# define av_always_inline inline +#endif +#endif + +#ifndef av_extern_inline +#if defined(__ICL) && __ICL >= 1210 || defined(__GNUC_STDC_INLINE__) +# define av_extern_inline extern inline +#else +# define av_extern_inline inline +#endif +#endif + +#if AV_GCC_VERSION_AT_LEAST(3,4) +# define av_warn_unused_result __attribute__((warn_unused_result)) +#else +# define av_warn_unused_result +#endif + +#if AV_GCC_VERSION_AT_LEAST(3,1) +# define av_noinline __attribute__((noinline)) +#elif defined(_MSC_VER) +# define av_noinline __declspec(noinline) +#else +# define av_noinline +#endif + +#if AV_GCC_VERSION_AT_LEAST(3,1) || defined(__clang__) +# define av_pure __attribute__((pure)) +#else +# define av_pure +#endif + +#if AV_GCC_VERSION_AT_LEAST(2,6) || defined(__clang__) +# define av_const __attribute__((const)) +#else +# define av_const +#endif + +#if AV_GCC_VERSION_AT_LEAST(4,3) || defined(__clang__) +# define av_cold __attribute__((cold)) +#else +# define av_cold +#endif + +#if AV_GCC_VERSION_AT_LEAST(4,1) && !defined(__llvm__) +# define av_flatten __attribute__((flatten)) +#else +# define av_flatten +#endif + +#if AV_GCC_VERSION_AT_LEAST(3,1) +# define attribute_deprecated __attribute__((deprecated)) +#elif defined(_MSC_VER) +# define attribute_deprecated __declspec(deprecated) +#else +# define attribute_deprecated +#endif + +/** + * Disable warnings about deprecated features + * This is useful for sections of code kept for backward compatibility and + * scheduled for removal. + */ +#ifndef AV_NOWARN_DEPRECATED +#if AV_GCC_VERSION_AT_LEAST(4,6) || defined(__clang__) +# define AV_NOWARN_DEPRECATED(code) \ + _Pragma("GCC diagnostic push") \ + _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") \ + code \ + _Pragma("GCC diagnostic pop") +#elif defined(_MSC_VER) +# define AV_NOWARN_DEPRECATED(code) \ + __pragma(warning(push)) \ + __pragma(warning(disable : 4996)) \ + code; \ + __pragma(warning(pop)) +#else +# define AV_NOWARN_DEPRECATED(code) code +#endif +#endif + +#if defined(__GNUC__) || defined(__clang__) +# define av_unused __attribute__((unused)) +#else +# define av_unused +#endif + +/** + * Mark a variable as used and prevent the compiler from optimizing it + * away. This is useful for variables accessed only from inline + * assembler without the compiler being aware. + */ +#if AV_GCC_VERSION_AT_LEAST(3,1) || defined(__clang__) +# define av_used __attribute__((used)) +#else +# define av_used +#endif + +#if AV_GCC_VERSION_AT_LEAST(3,3) || defined(__clang__) +# define av_alias __attribute__((may_alias)) +#else +# define av_alias +#endif + +#if (defined(__GNUC__) || defined(__clang__)) && !defined(__INTEL_COMPILER) +# define av_uninit(x) x=x +#else +# define av_uninit(x) x +#endif + +#if defined(__GNUC__) || defined(__clang__) +# define av_builtin_constant_p __builtin_constant_p +# define av_printf_format(fmtpos, attrpos) __attribute__((__format__(__printf__, fmtpos, attrpos))) +#else +# define av_builtin_constant_p(x) 0 +# define av_printf_format(fmtpos, attrpos) +#endif + +#if AV_GCC_VERSION_AT_LEAST(2,5) || defined(__clang__) +# define av_noreturn __attribute__((noreturn)) +#else +# define av_noreturn +#endif + +#endif /* AVUTIL_ATTRIBUTES_H */ diff --git a/third-party/cbs/include/cbs/av1.h b/third-party/cbs/include/cbs/av1.h index d8c2b9e7e1a..384f7cddc7e 100644 --- a/third-party/cbs/include/cbs/av1.h +++ b/third-party/cbs/include/cbs/av1.h @@ -26,94 +26,101 @@ // OBU types (section 6.2.2). typedef enum { - // 0 reserved. - AV1_OBU_SEQUENCE_HEADER = 1, - AV1_OBU_TEMPORAL_DELIMITER = 2, - AV1_OBU_FRAME_HEADER = 3, - AV1_OBU_TILE_GROUP = 4, - AV1_OBU_METADATA = 5, - AV1_OBU_FRAME = 6, - AV1_OBU_REDUNDANT_FRAME_HEADER = 7, - AV1_OBU_TILE_LIST = 8, - // 9-14 reserved. - AV1_OBU_PADDING = 15, + // 0 reserved. + AV1_OBU_SEQUENCE_HEADER = 1, + AV1_OBU_TEMPORAL_DELIMITER = 2, + AV1_OBU_FRAME_HEADER = 3, + AV1_OBU_TILE_GROUP = 4, + AV1_OBU_METADATA = 5, + AV1_OBU_FRAME = 6, + AV1_OBU_REDUNDANT_FRAME_HEADER = 7, + AV1_OBU_TILE_LIST = 8, + // 9-14 reserved. + AV1_OBU_PADDING = 15, } AV1_OBU_Type; // Metadata types (section 6.7.1). enum { - AV1_METADATA_TYPE_HDR_CLL = 1, - AV1_METADATA_TYPE_HDR_MDCV = 2, - AV1_METADATA_TYPE_SCALABILITY = 3, - AV1_METADATA_TYPE_ITUT_T35 = 4, - AV1_METADATA_TYPE_TIMECODE = 5, + AV1_METADATA_TYPE_HDR_CLL = 1, + AV1_METADATA_TYPE_HDR_MDCV = 2, + AV1_METADATA_TYPE_SCALABILITY = 3, + AV1_METADATA_TYPE_ITUT_T35 = 4, + AV1_METADATA_TYPE_TIMECODE = 5, }; // Frame types (section 6.8.2). enum { - AV1_FRAME_KEY = 0, - AV1_FRAME_INTER = 1, - AV1_FRAME_INTRA_ONLY = 2, - AV1_FRAME_SWITCH = 3, + AV1_FRAME_KEY = 0, + AV1_FRAME_INTER = 1, + AV1_FRAME_INTRA_ONLY = 2, + AV1_FRAME_SWITCH = 3, }; // Reference frames (section 6.10.24). enum { - AV1_REF_FRAME_INTRA = 0, - AV1_REF_FRAME_LAST = 1, - AV1_REF_FRAME_LAST2 = 2, - AV1_REF_FRAME_LAST3 = 3, - AV1_REF_FRAME_GOLDEN = 4, - AV1_REF_FRAME_BWDREF = 5, - AV1_REF_FRAME_ALTREF2 = 6, - AV1_REF_FRAME_ALTREF = 7, + AV1_REF_FRAME_INTRA = 0, + AV1_REF_FRAME_LAST = 1, + AV1_REF_FRAME_LAST2 = 2, + AV1_REF_FRAME_LAST3 = 3, + AV1_REF_FRAME_GOLDEN = 4, + AV1_REF_FRAME_BWDREF = 5, + AV1_REF_FRAME_ALTREF2 = 6, + AV1_REF_FRAME_ALTREF = 7, }; // Constants (section 3). enum { - AV1_MAX_OPERATING_POINTS = 32, - - AV1_MAX_SB_SIZE = 128, - AV1_MI_SIZE = 4, - - AV1_MAX_TILE_WIDTH = 4096, - AV1_MAX_TILE_AREA = 4096 * 2304, - AV1_MAX_TILE_ROWS = 64, - AV1_MAX_TILE_COLS = 64, - - AV1_NUM_REF_FRAMES = 8, - AV1_REFS_PER_FRAME = 7, - AV1_TOTAL_REFS_PER_FRAME = 8, - AV1_PRIMARY_REF_NONE = 7, - - AV1_MAX_SEGMENTS = 8, - AV1_SEG_LVL_MAX = 8, - - AV1_SEG_LVL_ALT_Q = 0, - AV1_SEG_LVL_ALT_LF_Y_V = 1, - AV1_SEG_LVL_REF_FRAME = 5, - AV1_SEG_LVL_SKIP = 6, - AV1_SEG_LVL_GLOBAL_MV = 7, - - AV1_SELECT_SCREEN_CONTENT_TOOLS = 2, - AV1_SELECT_INTEGER_MV = 2, - - AV1_SUPERRES_NUM = 8, - AV1_SUPERRES_DENOM_MIN = 9, - - AV1_INTERPOLATION_FILTER_SWITCHABLE = 4, - - AV1_GM_ABS_ALPHA_BITS = 12, - AV1_GM_ALPHA_PREC_BITS = 15, - AV1_GM_ABS_TRANS_ONLY_BITS = 9, - AV1_GM_TRANS_ONLY_PREC_BITS = 3, - AV1_GM_ABS_TRANS_BITS = 12, - AV1_GM_TRANS_PREC_BITS = 6, - AV1_WARPEDMODEL_PREC_BITS = 16, - - AV1_WARP_MODEL_IDENTITY = 0, - AV1_WARP_MODEL_TRANSLATION = 1, - AV1_WARP_MODEL_ROTZOOM = 2, - AV1_WARP_MODEL_AFFINE = 3, + AV1_MAX_OPERATING_POINTS = 32, + + AV1_MAX_SB_SIZE = 128, + AV1_MI_SIZE = 4, + + AV1_MAX_TILE_WIDTH = 4096, + AV1_MAX_TILE_AREA = 4096 * 2304, + AV1_MAX_TILE_ROWS = 64, + AV1_MAX_TILE_COLS = 64, + + AV1_NUM_REF_FRAMES = 8, + AV1_REFS_PER_FRAME = 7, + AV1_TOTAL_REFS_PER_FRAME = 8, + AV1_PRIMARY_REF_NONE = 7, + + AV1_MAX_SEGMENTS = 8, + AV1_SEG_LVL_MAX = 8, + + AV1_SEG_LVL_ALT_Q = 0, + AV1_SEG_LVL_ALT_LF_Y_V = 1, + AV1_SEG_LVL_REF_FRAME = 5, + AV1_SEG_LVL_SKIP = 6, + AV1_SEG_LVL_GLOBAL_MV = 7, + + AV1_SELECT_SCREEN_CONTENT_TOOLS = 2, + AV1_SELECT_INTEGER_MV = 2, + + AV1_SUPERRES_NUM = 8, + AV1_SUPERRES_DENOM_MIN = 9, + + AV1_INTERPOLATION_FILTER_SWITCHABLE = 4, + + AV1_GM_ABS_ALPHA_BITS = 12, + AV1_GM_ALPHA_PREC_BITS = 15, + AV1_GM_ABS_TRANS_ONLY_BITS = 9, + AV1_GM_TRANS_ONLY_PREC_BITS = 3, + AV1_GM_ABS_TRANS_BITS = 12, + AV1_GM_TRANS_PREC_BITS = 6, + AV1_WARPEDMODEL_PREC_BITS = 16, + + AV1_WARP_MODEL_IDENTITY = 0, + AV1_WARP_MODEL_TRANSLATION = 1, + AV1_WARP_MODEL_ROTZOOM = 2, + AV1_WARP_MODEL_AFFINE = 3, + AV1_WARP_PARAM_REDUCE_BITS = 6, + + AV1_DIV_LUT_BITS = 8, + AV1_DIV_LUT_PREC_BITS = 14, + AV1_DIV_LUT_NUM = 257, + + AV1_MAX_LOOP_FILTER = 63, }; @@ -122,50 +129,56 @@ enum { // Chroma sample position. enum { - AV1_CSP_UNKNOWN = 0, - AV1_CSP_VERTICAL = 1, // -> AVCHROMA_LOC_LEFT. - AV1_CSP_COLOCATED = 2, // -> AVCHROMA_LOC_TOPLEFT. + AV1_CSP_UNKNOWN = 0, + AV1_CSP_VERTICAL = 1, // -> AVCHROMA_LOC_LEFT. + AV1_CSP_COLOCATED = 2, // -> AVCHROMA_LOC_TOPLEFT. }; // Scalability modes (section 6.7.5) enum { - AV1_SCALABILITY_L1T2 = 0, - AV1_SCALABILITY_L1T3 = 1, - AV1_SCALABILITY_L2T1 = 2, - AV1_SCALABILITY_L2T2 = 3, - AV1_SCALABILITY_L2T3 = 4, - AV1_SCALABILITY_S2T1 = 5, - AV1_SCALABILITY_S2T2 = 6, - AV1_SCALABILITY_S2T3 = 7, - AV1_SCALABILITY_L2T1h = 8, - AV1_SCALABILITY_L2T2h = 9, - AV1_SCALABILITY_L2T3h = 10, - AV1_SCALABILITY_S2T1h = 11, - AV1_SCALABILITY_S2T2h = 12, - AV1_SCALABILITY_S2T3h = 13, - AV1_SCALABILITY_SS = 14, - AV1_SCALABILITY_L3T1 = 15, - AV1_SCALABILITY_L3T2 = 16, - AV1_SCALABILITY_L3T3 = 17, - AV1_SCALABILITY_S3T1 = 18, - AV1_SCALABILITY_S3T2 = 19, - AV1_SCALABILITY_S3T3 = 20, - AV1_SCALABILITY_L3T2_KEY = 21, - AV1_SCALABILITY_L3T3_KEY = 22, - AV1_SCALABILITY_L4T5_KEY = 23, - AV1_SCALABILITY_L4T7_KEY = 24, - AV1_SCALABILITY_L3T2_KEY_SHIFT = 25, - AV1_SCALABILITY_L3T3_KEY_SHIFT = 26, - AV1_SCALABILITY_L4T5_KEY_SHIFT = 27, - AV1_SCALABILITY_L4T7_KEY_SHIFT = 28, + AV1_SCALABILITY_L1T2 = 0, + AV1_SCALABILITY_L1T3 = 1, + AV1_SCALABILITY_L2T1 = 2, + AV1_SCALABILITY_L2T2 = 3, + AV1_SCALABILITY_L2T3 = 4, + AV1_SCALABILITY_S2T1 = 5, + AV1_SCALABILITY_S2T2 = 6, + AV1_SCALABILITY_S2T3 = 7, + AV1_SCALABILITY_L2T1h = 8, + AV1_SCALABILITY_L2T2h = 9, + AV1_SCALABILITY_L2T3h = 10, + AV1_SCALABILITY_S2T1h = 11, + AV1_SCALABILITY_S2T2h = 12, + AV1_SCALABILITY_S2T3h = 13, + AV1_SCALABILITY_SS = 14, + AV1_SCALABILITY_L3T1 = 15, + AV1_SCALABILITY_L3T2 = 16, + AV1_SCALABILITY_L3T3 = 17, + AV1_SCALABILITY_S3T1 = 18, + AV1_SCALABILITY_S3T2 = 19, + AV1_SCALABILITY_S3T3 = 20, + AV1_SCALABILITY_L3T2_KEY = 21, + AV1_SCALABILITY_L3T3_KEY = 22, + AV1_SCALABILITY_L4T5_KEY = 23, + AV1_SCALABILITY_L4T7_KEY = 24, + AV1_SCALABILITY_L3T2_KEY_SHIFT = 25, + AV1_SCALABILITY_L3T3_KEY_SHIFT = 26, + AV1_SCALABILITY_L4T5_KEY_SHIFT = 27, + AV1_SCALABILITY_L4T7_KEY_SHIFT = 28, }; // Frame Restoration types (section 6.10.15) enum { - AV1_RESTORE_NONE = 0, - AV1_RESTORE_WIENER = 1, - AV1_RESTORE_SGRPROJ = 2, - AV1_RESTORE_SWITCHABLE = 3, + AV1_RESTORE_NONE = 0, + AV1_RESTORE_WIENER = 1, + AV1_RESTORE_SGRPROJ = 2, + AV1_RESTORE_SWITCHABLE = 3, }; +// Sequence Headers are actually unbounded because one can use +// an arbitrary number of leading zeroes when encoding via uvlc. +// The following estimate is based around using the lowest number +// of bits for uvlc encoding. +#define AV1_SANE_SEQUENCE_HEADER_MAX_BITS 3138 + #endif /* AVCODEC_AV1_H */ diff --git a/third-party/cbs/include/cbs/cbs.h b/third-party/cbs/include/cbs/cbs.h index 5502a16627f..5583063b5e9 100644 --- a/third-party/cbs/include/cbs/cbs.h +++ b/third-party/cbs/include/cbs/cbs.h @@ -22,9 +22,11 @@ #include #include -#include +#include "libavutil/buffer.h" -#include +#include "codec_id.h" +#include "codec_par.h" +#include "packet.h" /* @@ -40,6 +42,7 @@ * bitstream. */ +struct AVCodecContext; struct CodedBitstreamType; /** @@ -64,48 +67,48 @@ typedef uint32_t CodedBitstreamUnitType; * particular codec. */ typedef struct CodedBitstreamUnit { - /** + /** * Codec-specific type of this unit. */ - CodedBitstreamUnitType type; + CodedBitstreamUnitType type; - /** + /** * Pointer to the directly-parsable bitstream form of this unit. * * May be NULL if the unit currently only exists in decomposed form. */ - uint8_t *data; - /** + uint8_t *data; + /** * The number of bytes in the bitstream (including any padding bits * in the final byte). */ - size_t data_size; - /** + size_t data_size; + /** * The number of bits which should be ignored in the final byte. * * This supports non-byte-aligned bitstreams. */ - size_t data_bit_padding; - /** + size_t data_bit_padding; + /** * A reference to the buffer containing data. * * Must be set if data is not NULL. */ - AVBufferRef *data_ref; + AVBufferRef *data_ref; - /** + /** * Pointer to the decomposed form of this unit. * * The type of this structure depends on both the codec and the * type of this unit. May be NULL if the unit only exists in * bitstream form. */ - void *content; - /** + void *content; + /** * If content is reference counted, a reference to the buffer containing * content. Null if content is not reference counted. */ - AVBufferRef *content_ref; + AVBufferRef *content_ref; } CodedBitstreamUnit; /** @@ -116,70 +119,70 @@ typedef struct CodedBitstreamUnit { * which is composed of a sequence of H.264 NAL units. */ typedef struct CodedBitstreamFragment { - /** + /** * Pointer to the bitstream form of this fragment. * * May be NULL if the fragment only exists as component units. */ - uint8_t *data; - /** + uint8_t *data; + /** * The number of bytes in the bitstream. * * The number of bytes in the bitstream (including any padding bits * in the final byte). */ - size_t data_size; - /** + size_t data_size; + /** * The number of bits which should be ignored in the final byte. */ - size_t data_bit_padding; - /** + size_t data_bit_padding; + /** * A reference to the buffer containing data. * * Must be set if data is not NULL. */ - AVBufferRef *data_ref; + AVBufferRef *data_ref; - /** + /** * Number of units in this fragment. * * This may be zero if the fragment only exists in bitstream form * and has not been decomposed. */ - int nb_units; + int nb_units; - /** + /** * Number of allocated units. * * Must always be >= nb_units; designed for internal use by cbs. */ - int nb_units_allocated; + int nb_units_allocated; - /** + /** * Pointer to an array of units of length nb_units_allocated. * Only the first nb_units are valid. * * Must be NULL if nb_units_allocated is zero. */ - CodedBitstreamUnit *units; + CodedBitstreamUnit *units; } CodedBitstreamFragment; /** * Context structure for coded bitstream operations. */ typedef struct CodedBitstreamContext { - /** + /** * Logging context to be passed to all av_log() calls associated * with this context. */ - void *log_ctx; + void *log_ctx; - /** + /** * Internal codec-specific hooks. */ - const struct CodedBitstreamType *codec; + const struct CodedBitstreamType *codec; - /** + /** * Internal codec-specific data. * * This contains any information needed when reading/writing @@ -188,37 +191,37 @@ typedef struct CodedBitstreamContext { * parameter sets - they are required to determine the bitstream * syntax but need not be present in every access unit. */ - void *priv_data; + void *priv_data; - /** + /** * Array of unit types which should be decomposed when reading. * * Types not in this list will be available in bitstream form only. * If NULL, all supported types will be decomposed. */ - const CodedBitstreamUnitType *decompose_unit_types; - /** + const CodedBitstreamUnitType *decompose_unit_types; + /** * Length of the decompose_unit_types array. */ - int nb_decompose_unit_types; + int nb_decompose_unit_types; - /** + /** * Enable trace output during read/write operations. */ - int trace_enable; - /** + int trace_enable; + /** * Log level to use for trace output. * * From AV_LOG_*; defaults to AV_LOG_TRACE. */ - int trace_level; + int trace_level; - /** + /** * Write buffer. Used as intermediate buffer when writing units. * For internal use of cbs only. */ - uint8_t *write_buffer; - size_t write_buffer_size; + uint8_t *write_buffer; + size_t write_buffer_size; } CodedBitstreamContext; @@ -234,7 +237,7 @@ extern const enum AVCodecID ff_cbs_all_codec_ids[]; * Create and initialise a new context for the given codec. */ int ff_cbs_init(CodedBitstreamContext **ctx, - enum AVCodecID codec_id, void *log_ctx); + enum AVCodecID codec_id, void *log_ctx); /** * Reset all internal state in a context. @@ -259,8 +262,8 @@ void ff_cbs_close(CodedBitstreamContext **ctx); * before use. */ int ff_cbs_read_extradata(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVCodecParameters *par); + CodedBitstreamFragment *frag, + const AVCodecParameters *par); /** * Read the extradata bitstream found in a codec context into a @@ -270,8 +273,12 @@ int ff_cbs_read_extradata(CodedBitstreamContext *ctx, * you already have a codec context. */ int ff_cbs_read_extradata_from_codec(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVCodecContext *avctx); + CodedBitstreamFragment *frag, + const struct AVCodecContext *avctx); + +int ff_cbs_read_packet_side_data(CodedBitstreamContext *ctx, + CodedBitstreamFragment *frag, + const AVPacket *pkt); /** * Read the data bitstream from a packet into a fragment, then @@ -285,8 +292,8 @@ int ff_cbs_read_extradata_from_codec(CodedBitstreamContext *ctx, * before use. */ int ff_cbs_read_packet(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVPacket *pkt); + CodedBitstreamFragment *frag, + const AVPacket *pkt); /** * Read a bitstream from a memory region into a fragment, then @@ -300,8 +307,8 @@ int ff_cbs_read_packet(CodedBitstreamContext *ctx, * before use. */ int ff_cbs_read(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const uint8_t *data, size_t size); + CodedBitstreamFragment *frag, + const uint8_t *data, size_t size); /** @@ -317,7 +324,7 @@ int ff_cbs_read(CodedBitstreamContext *ctx, * write following fragments (e.g. parameter sets). */ int ff_cbs_write_fragment_data(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag); + CodedBitstreamFragment *frag); /** * Write the bitstream of a fragment to the extradata in codec parameters. @@ -326,8 +333,8 @@ int ff_cbs_write_fragment_data(CodedBitstreamContext *ctx, * replaces any existing extradata in the structure. */ int ff_cbs_write_extradata(CodedBitstreamContext *ctx, - AVCodecParameters *par, - CodedBitstreamFragment *frag); + AVCodecParameters *par, + CodedBitstreamFragment *frag); /** * Write the bitstream of a fragment to a packet. @@ -340,8 +347,8 @@ int ff_cbs_write_extradata(CodedBitstreamContext *ctx, * touched at all. */ int ff_cbs_write_packet(CodedBitstreamContext *ctx, - AVPacket *pkt, - CodedBitstreamFragment *frag); + AVPacket *pkt, + CodedBitstreamFragment *frag); /** @@ -362,8 +369,8 @@ void ff_cbs_fragment_free(CodedBitstreamFragment *frag); * The content will be zeroed. */ int ff_cbs_alloc_unit_content(CodedBitstreamUnit *unit, - size_t size, - void (*free)(void *opaque, uint8_t *content)); + size_t size, + void (*free)(void *opaque, uint8_t *content)); /** * Allocate a new internal content buffer matching the type of the unit. @@ -371,16 +378,7 @@ int ff_cbs_alloc_unit_content(CodedBitstreamUnit *unit, * The content will be zeroed. */ int ff_cbs_alloc_unit_content2(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit); - - -/** - * Allocate a new internal data buffer of the given size in the unit. - * - * The data buffer will have input padding. - */ -int ff_cbs_alloc_unit_data(CodedBitstreamUnit *unit, - size_t size); + CodedBitstreamUnit *unit); /** * Insert a new unit into a fragment with the given content. @@ -389,23 +387,22 @@ int ff_cbs_alloc_unit_data(CodedBitstreamUnit *unit, * content_buf is not supplied. */ int ff_cbs_insert_unit_content(CodedBitstreamFragment *frag, - int position, - CodedBitstreamUnitType type, - void *content, - AVBufferRef *content_buf); + int position, + CodedBitstreamUnitType type, + void *content, + AVBufferRef *content_buf); /** - * Insert a new unit into a fragment with the given data bitstream. + * Add a new unit to a fragment with the given data bitstream. * * If data_buf is not supplied then data must have been allocated with * av_malloc() and will on success become owned by the unit after this * call or freed on error. */ -int ff_cbs_insert_unit_data(CodedBitstreamFragment *frag, - int position, - CodedBitstreamUnitType type, - uint8_t *data, size_t data_size, - AVBufferRef *data_buf); +int ff_cbs_append_unit_data(CodedBitstreamFragment *frag, + CodedBitstreamUnitType type, + uint8_t *data, size_t data_size, + AVBufferRef *data_buf); /** * Delete a unit from a fragment and free all memory it uses. @@ -413,7 +410,7 @@ int ff_cbs_insert_unit_data(CodedBitstreamFragment *frag, * Requires position to be >= 0 and < frag->nb_units. */ void ff_cbs_delete_unit(CodedBitstreamFragment *frag, - int position); + int position); /** @@ -426,7 +423,7 @@ void ff_cbs_delete_unit(CodedBitstreamFragment *frag, * decomposed content. */ int ff_cbs_make_unit_refcounted(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit); + CodedBitstreamUnit *unit); /** * Make the content of a unit writable so that internal fields can be @@ -442,7 +439,7 @@ int ff_cbs_make_unit_refcounted(CodedBitstreamContext *ctx, * decomposed content. */ int ff_cbs_make_unit_writable(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit); + CodedBitstreamUnit *unit); #endif /* AVCODEC_CBS_H */ diff --git a/third-party/cbs/include/cbs/cbs_av1.h b/third-party/cbs/include/cbs/cbs_av1.h index 760f9a5dc67..1fc80dcfa05 100644 --- a/third-party/cbs/include/cbs/cbs_av1.h +++ b/third-party/cbs/include/cbs/cbs_av1.h @@ -27,437 +27,437 @@ typedef struct AV1RawOBUHeader { - uint8_t obu_forbidden_bit; - uint8_t obu_type; - uint8_t obu_extension_flag; - uint8_t obu_has_size_field; - uint8_t obu_reserved_1bit; - - uint8_t temporal_id; - uint8_t spatial_id; - uint8_t extension_header_reserved_3bits; + uint8_t obu_forbidden_bit; + uint8_t obu_type; + uint8_t obu_extension_flag; + uint8_t obu_has_size_field; + uint8_t obu_reserved_1bit; + + uint8_t temporal_id; + uint8_t spatial_id; + uint8_t extension_header_reserved_3bits; } AV1RawOBUHeader; typedef struct AV1RawColorConfig { - uint8_t high_bitdepth; - uint8_t twelve_bit; - uint8_t mono_chrome; - - uint8_t color_description_present_flag; - uint8_t color_primaries; - uint8_t transfer_characteristics; - uint8_t matrix_coefficients; - - uint8_t color_range; - uint8_t subsampling_x; - uint8_t subsampling_y; - uint8_t chroma_sample_position; - uint8_t separate_uv_delta_q; + uint8_t high_bitdepth; + uint8_t twelve_bit; + uint8_t mono_chrome; + + uint8_t color_description_present_flag; + uint8_t color_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; + + uint8_t color_range; + uint8_t subsampling_x; + uint8_t subsampling_y; + uint8_t chroma_sample_position; + uint8_t separate_uv_delta_q; } AV1RawColorConfig; typedef struct AV1RawTimingInfo { - uint32_t num_units_in_display_tick; - uint32_t time_scale; + uint32_t num_units_in_display_tick; + uint32_t time_scale; - uint8_t equal_picture_interval; - uint32_t num_ticks_per_picture_minus_1; + uint8_t equal_picture_interval; + uint32_t num_ticks_per_picture_minus_1; } AV1RawTimingInfo; typedef struct AV1RawDecoderModelInfo { - uint8_t buffer_delay_length_minus_1; - uint32_t num_units_in_decoding_tick; - uint8_t buffer_removal_time_length_minus_1; - uint8_t frame_presentation_time_length_minus_1; + uint8_t buffer_delay_length_minus_1; + uint32_t num_units_in_decoding_tick; + uint8_t buffer_removal_time_length_minus_1; + uint8_t frame_presentation_time_length_minus_1; } AV1RawDecoderModelInfo; typedef struct AV1RawSequenceHeader { - uint8_t seq_profile; - uint8_t still_picture; - uint8_t reduced_still_picture_header; - - uint8_t timing_info_present_flag; - uint8_t decoder_model_info_present_flag; - uint8_t initial_display_delay_present_flag; - uint8_t operating_points_cnt_minus_1; - - AV1RawTimingInfo timing_info; - AV1RawDecoderModelInfo decoder_model_info; - - uint16_t operating_point_idc[AV1_MAX_OPERATING_POINTS]; - uint8_t seq_level_idx[AV1_MAX_OPERATING_POINTS]; - uint8_t seq_tier[AV1_MAX_OPERATING_POINTS]; - uint8_t decoder_model_present_for_this_op[AV1_MAX_OPERATING_POINTS]; - uint32_t decoder_buffer_delay[AV1_MAX_OPERATING_POINTS]; - uint32_t encoder_buffer_delay[AV1_MAX_OPERATING_POINTS]; - uint8_t low_delay_mode_flag[AV1_MAX_OPERATING_POINTS]; - uint8_t initial_display_delay_present_for_this_op[AV1_MAX_OPERATING_POINTS]; - uint8_t initial_display_delay_minus_1[AV1_MAX_OPERATING_POINTS]; - - uint8_t frame_width_bits_minus_1; - uint8_t frame_height_bits_minus_1; - uint16_t max_frame_width_minus_1; - uint16_t max_frame_height_minus_1; - - uint8_t frame_id_numbers_present_flag; - uint8_t delta_frame_id_length_minus_2; - uint8_t additional_frame_id_length_minus_1; - - uint8_t use_128x128_superblock; - uint8_t enable_filter_intra; - uint8_t enable_intra_edge_filter; - uint8_t enable_interintra_compound; - uint8_t enable_masked_compound; - uint8_t enable_warped_motion; - uint8_t enable_dual_filter; - - uint8_t enable_order_hint; - uint8_t enable_jnt_comp; - uint8_t enable_ref_frame_mvs; - - uint8_t seq_choose_screen_content_tools; - uint8_t seq_force_screen_content_tools; - uint8_t seq_choose_integer_mv; - uint8_t seq_force_integer_mv; - - uint8_t order_hint_bits_minus_1; - - uint8_t enable_superres; - uint8_t enable_cdef; - uint8_t enable_restoration; - - AV1RawColorConfig color_config; - - uint8_t film_grain_params_present; + uint8_t seq_profile; + uint8_t still_picture; + uint8_t reduced_still_picture_header; + + uint8_t timing_info_present_flag; + uint8_t decoder_model_info_present_flag; + uint8_t initial_display_delay_present_flag; + uint8_t operating_points_cnt_minus_1; + + AV1RawTimingInfo timing_info; + AV1RawDecoderModelInfo decoder_model_info; + + uint16_t operating_point_idc[AV1_MAX_OPERATING_POINTS]; + uint8_t seq_level_idx[AV1_MAX_OPERATING_POINTS]; + uint8_t seq_tier[AV1_MAX_OPERATING_POINTS]; + uint8_t decoder_model_present_for_this_op[AV1_MAX_OPERATING_POINTS]; + uint32_t decoder_buffer_delay[AV1_MAX_OPERATING_POINTS]; + uint32_t encoder_buffer_delay[AV1_MAX_OPERATING_POINTS]; + uint8_t low_delay_mode_flag[AV1_MAX_OPERATING_POINTS]; + uint8_t initial_display_delay_present_for_this_op[AV1_MAX_OPERATING_POINTS]; + uint8_t initial_display_delay_minus_1[AV1_MAX_OPERATING_POINTS]; + + uint8_t frame_width_bits_minus_1; + uint8_t frame_height_bits_minus_1; + uint16_t max_frame_width_minus_1; + uint16_t max_frame_height_minus_1; + + uint8_t frame_id_numbers_present_flag; + uint8_t delta_frame_id_length_minus_2; + uint8_t additional_frame_id_length_minus_1; + + uint8_t use_128x128_superblock; + uint8_t enable_filter_intra; + uint8_t enable_intra_edge_filter; + uint8_t enable_interintra_compound; + uint8_t enable_masked_compound; + uint8_t enable_warped_motion; + uint8_t enable_dual_filter; + + uint8_t enable_order_hint; + uint8_t enable_jnt_comp; + uint8_t enable_ref_frame_mvs; + + uint8_t seq_choose_screen_content_tools; + uint8_t seq_force_screen_content_tools; + uint8_t seq_choose_integer_mv; + uint8_t seq_force_integer_mv; + + uint8_t order_hint_bits_minus_1; + + uint8_t enable_superres; + uint8_t enable_cdef; + uint8_t enable_restoration; + + AV1RawColorConfig color_config; + + uint8_t film_grain_params_present; } AV1RawSequenceHeader; typedef struct AV1RawFilmGrainParams { - uint8_t apply_grain; - uint16_t grain_seed; - uint8_t update_grain; - uint8_t film_grain_params_ref_idx; - uint8_t num_y_points; - uint8_t point_y_value[14]; - uint8_t point_y_scaling[14]; - uint8_t chroma_scaling_from_luma; - uint8_t num_cb_points; - uint8_t point_cb_value[10]; - uint8_t point_cb_scaling[10]; - uint8_t num_cr_points; - uint8_t point_cr_value[10]; - uint8_t point_cr_scaling[10]; - uint8_t grain_scaling_minus_8; - uint8_t ar_coeff_lag; - uint8_t ar_coeffs_y_plus_128[24]; - uint8_t ar_coeffs_cb_plus_128[25]; - uint8_t ar_coeffs_cr_plus_128[25]; - uint8_t ar_coeff_shift_minus_6; - uint8_t grain_scale_shift; - uint8_t cb_mult; - uint8_t cb_luma_mult; - uint16_t cb_offset; - uint8_t cr_mult; - uint8_t cr_luma_mult; - uint16_t cr_offset; - uint8_t overlap_flag; - uint8_t clip_to_restricted_range; + uint8_t apply_grain; + uint16_t grain_seed; + uint8_t update_grain; + uint8_t film_grain_params_ref_idx; + uint8_t num_y_points; + uint8_t point_y_value[14]; + uint8_t point_y_scaling[14]; + uint8_t chroma_scaling_from_luma; + uint8_t num_cb_points; + uint8_t point_cb_value[10]; + uint8_t point_cb_scaling[10]; + uint8_t num_cr_points; + uint8_t point_cr_value[10]; + uint8_t point_cr_scaling[10]; + uint8_t grain_scaling_minus_8; + uint8_t ar_coeff_lag; + uint8_t ar_coeffs_y_plus_128[24]; + uint8_t ar_coeffs_cb_plus_128[25]; + uint8_t ar_coeffs_cr_plus_128[25]; + uint8_t ar_coeff_shift_minus_6; + uint8_t grain_scale_shift; + uint8_t cb_mult; + uint8_t cb_luma_mult; + uint16_t cb_offset; + uint8_t cr_mult; + uint8_t cr_luma_mult; + uint16_t cr_offset; + uint8_t overlap_flag; + uint8_t clip_to_restricted_range; } AV1RawFilmGrainParams; typedef struct AV1RawFrameHeader { - uint8_t show_existing_frame; - uint8_t frame_to_show_map_idx; - uint32_t frame_presentation_time; - uint32_t display_frame_id; - - uint8_t frame_type; - uint8_t show_frame; - uint8_t showable_frame; - - uint8_t error_resilient_mode; - uint8_t disable_cdf_update; - uint8_t allow_screen_content_tools; - uint8_t force_integer_mv; - - uint32_t current_frame_id; - uint8_t frame_size_override_flag; - uint8_t order_hint; - - uint8_t buffer_removal_time_present_flag; - uint32_t buffer_removal_time[AV1_MAX_OPERATING_POINTS]; - - uint8_t primary_ref_frame; - uint16_t frame_width_minus_1; - uint16_t frame_height_minus_1; - uint8_t use_superres; - uint8_t coded_denom; - uint8_t render_and_frame_size_different; - uint16_t render_width_minus_1; - uint16_t render_height_minus_1; - - uint8_t found_ref[AV1_REFS_PER_FRAME]; - - uint8_t refresh_frame_flags; - uint8_t allow_intrabc; - uint8_t ref_order_hint[AV1_NUM_REF_FRAMES]; - uint8_t frame_refs_short_signaling; - uint8_t last_frame_idx; - uint8_t golden_frame_idx; - int8_t ref_frame_idx[AV1_REFS_PER_FRAME]; - uint32_t delta_frame_id_minus1[AV1_REFS_PER_FRAME]; - - uint8_t allow_high_precision_mv; - uint8_t is_filter_switchable; - uint8_t interpolation_filter; - uint8_t is_motion_mode_switchable; - uint8_t use_ref_frame_mvs; - - uint8_t disable_frame_end_update_cdf; - - uint8_t uniform_tile_spacing_flag; - uint8_t tile_cols_log2; - uint8_t tile_rows_log2; - uint8_t width_in_sbs_minus_1[AV1_MAX_TILE_COLS]; - uint8_t height_in_sbs_minus_1[AV1_MAX_TILE_ROWS]; - uint16_t context_update_tile_id; - uint8_t tile_size_bytes_minus1; - - // These are derived values, but it's very unhelpful to have to - // recalculate them all the time so we store them here. - uint16_t tile_cols; - uint16_t tile_rows; - - uint8_t base_q_idx; - int8_t delta_q_y_dc; - uint8_t diff_uv_delta; - int8_t delta_q_u_dc; - int8_t delta_q_u_ac; - int8_t delta_q_v_dc; - int8_t delta_q_v_ac; - uint8_t using_qmatrix; - uint8_t qm_y; - uint8_t qm_u; - uint8_t qm_v; - - uint8_t segmentation_enabled; - uint8_t segmentation_update_map; - uint8_t segmentation_temporal_update; - uint8_t segmentation_update_data; - uint8_t feature_enabled[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; - int16_t feature_value[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; - - uint8_t delta_q_present; - uint8_t delta_q_res; - uint8_t delta_lf_present; - uint8_t delta_lf_res; - uint8_t delta_lf_multi; - - uint8_t loop_filter_level[4]; - uint8_t loop_filter_sharpness; - uint8_t loop_filter_delta_enabled; - uint8_t loop_filter_delta_update; - uint8_t update_ref_delta[AV1_TOTAL_REFS_PER_FRAME]; - int8_t loop_filter_ref_deltas[AV1_TOTAL_REFS_PER_FRAME]; - uint8_t update_mode_delta[2]; - int8_t loop_filter_mode_deltas[2]; - - uint8_t cdef_damping_minus_3; - uint8_t cdef_bits; - uint8_t cdef_y_pri_strength[8]; - uint8_t cdef_y_sec_strength[8]; - uint8_t cdef_uv_pri_strength[8]; - uint8_t cdef_uv_sec_strength[8]; - - uint8_t lr_type[3]; - uint8_t lr_unit_shift; - uint8_t lr_uv_shift; - - uint8_t tx_mode; - uint8_t reference_select; - uint8_t skip_mode_present; - - uint8_t allow_warped_motion; - uint8_t reduced_tx_set; - - uint8_t is_global[AV1_TOTAL_REFS_PER_FRAME]; - uint8_t is_rot_zoom[AV1_TOTAL_REFS_PER_FRAME]; - uint8_t is_translation[AV1_TOTAL_REFS_PER_FRAME]; - //AV1RawSubexp gm_params[AV1_TOTAL_REFS_PER_FRAME][6]; - uint32_t gm_params[AV1_TOTAL_REFS_PER_FRAME][6]; - - AV1RawFilmGrainParams film_grain; + uint8_t show_existing_frame; + uint8_t frame_to_show_map_idx; + uint32_t frame_presentation_time; + uint32_t display_frame_id; + + uint8_t frame_type; + uint8_t show_frame; + uint8_t showable_frame; + + uint8_t error_resilient_mode; + uint8_t disable_cdf_update; + uint8_t allow_screen_content_tools; + uint8_t force_integer_mv; + + uint32_t current_frame_id; + uint8_t frame_size_override_flag; + uint8_t order_hint; + + uint8_t buffer_removal_time_present_flag; + uint32_t buffer_removal_time[AV1_MAX_OPERATING_POINTS]; + + uint8_t primary_ref_frame; + uint16_t frame_width_minus_1; + uint16_t frame_height_minus_1; + uint8_t use_superres; + uint8_t coded_denom; + uint8_t render_and_frame_size_different; + uint16_t render_width_minus_1; + uint16_t render_height_minus_1; + + uint8_t found_ref[AV1_REFS_PER_FRAME]; + + uint8_t refresh_frame_flags; + uint8_t allow_intrabc; + uint8_t ref_order_hint[AV1_NUM_REF_FRAMES]; + uint8_t frame_refs_short_signaling; + uint8_t last_frame_idx; + uint8_t golden_frame_idx; + int8_t ref_frame_idx[AV1_REFS_PER_FRAME]; + uint32_t delta_frame_id_minus1[AV1_REFS_PER_FRAME]; + + uint8_t allow_high_precision_mv; + uint8_t is_filter_switchable; + uint8_t interpolation_filter; + uint8_t is_motion_mode_switchable; + uint8_t use_ref_frame_mvs; + + uint8_t disable_frame_end_update_cdf; + + uint8_t uniform_tile_spacing_flag; + uint8_t tile_cols_log2; + uint8_t tile_rows_log2; + uint8_t width_in_sbs_minus_1[AV1_MAX_TILE_COLS]; + uint8_t height_in_sbs_minus_1[AV1_MAX_TILE_ROWS]; + uint16_t context_update_tile_id; + uint8_t tile_size_bytes_minus1; + + // These are derived values, but it's very unhelpful to have to + // recalculate them all the time so we store them here. + uint16_t tile_cols; + uint16_t tile_rows; + + uint8_t base_q_idx; + int8_t delta_q_y_dc; + uint8_t diff_uv_delta; + int8_t delta_q_u_dc; + int8_t delta_q_u_ac; + int8_t delta_q_v_dc; + int8_t delta_q_v_ac; + uint8_t using_qmatrix; + uint8_t qm_y; + uint8_t qm_u; + uint8_t qm_v; + + uint8_t segmentation_enabled; + uint8_t segmentation_update_map; + uint8_t segmentation_temporal_update; + uint8_t segmentation_update_data; + uint8_t feature_enabled[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; + int16_t feature_value[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; + + uint8_t delta_q_present; + uint8_t delta_q_res; + uint8_t delta_lf_present; + uint8_t delta_lf_res; + uint8_t delta_lf_multi; + + uint8_t loop_filter_level[4]; + uint8_t loop_filter_sharpness; + uint8_t loop_filter_delta_enabled; + uint8_t loop_filter_delta_update; + uint8_t update_ref_delta[AV1_TOTAL_REFS_PER_FRAME]; + int8_t loop_filter_ref_deltas[AV1_TOTAL_REFS_PER_FRAME]; + uint8_t update_mode_delta[2]; + int8_t loop_filter_mode_deltas[2]; + + uint8_t cdef_damping_minus_3; + uint8_t cdef_bits; + uint8_t cdef_y_pri_strength[8]; + uint8_t cdef_y_sec_strength[8]; + uint8_t cdef_uv_pri_strength[8]; + uint8_t cdef_uv_sec_strength[8]; + + uint8_t lr_type[3]; + uint8_t lr_unit_shift; + uint8_t lr_uv_shift; + + uint8_t tx_mode; + uint8_t reference_select; + uint8_t skip_mode_present; + + uint8_t allow_warped_motion; + uint8_t reduced_tx_set; + + uint8_t is_global[AV1_TOTAL_REFS_PER_FRAME]; + uint8_t is_rot_zoom[AV1_TOTAL_REFS_PER_FRAME]; + uint8_t is_translation[AV1_TOTAL_REFS_PER_FRAME]; + //AV1RawSubexp gm_params[AV1_TOTAL_REFS_PER_FRAME][6]; + uint32_t gm_params[AV1_TOTAL_REFS_PER_FRAME][6]; + + AV1RawFilmGrainParams film_grain; } AV1RawFrameHeader; typedef struct AV1RawTileData { - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; + uint8_t *data; + AVBufferRef *data_ref; + size_t data_size; } AV1RawTileData; typedef struct AV1RawTileGroup { - uint8_t tile_start_and_end_present_flag; - uint16_t tg_start; - uint16_t tg_end; + uint8_t tile_start_and_end_present_flag; + uint16_t tg_start; + uint16_t tg_end; - AV1RawTileData tile_data; + AV1RawTileData tile_data; } AV1RawTileGroup; typedef struct AV1RawFrame { - AV1RawFrameHeader header; - AV1RawTileGroup tile_group; + AV1RawFrameHeader header; + AV1RawTileGroup tile_group; } AV1RawFrame; typedef struct AV1RawTileList { - uint8_t output_frame_width_in_tiles_minus_1; - uint8_t output_frame_height_in_tiles_minus_1; - uint16_t tile_count_minus_1; + uint8_t output_frame_width_in_tiles_minus_1; + uint8_t output_frame_height_in_tiles_minus_1; + uint16_t tile_count_minus_1; - AV1RawTileData tile_data; + AV1RawTileData tile_data; } AV1RawTileList; typedef struct AV1RawMetadataHDRCLL { - uint16_t max_cll; - uint16_t max_fall; + uint16_t max_cll; + uint16_t max_fall; } AV1RawMetadataHDRCLL; typedef struct AV1RawMetadataHDRMDCV { - uint16_t primary_chromaticity_x[3]; - uint16_t primary_chromaticity_y[3]; - uint16_t white_point_chromaticity_x; - uint16_t white_point_chromaticity_y; - uint32_t luminance_max; - uint32_t luminance_min; + uint16_t primary_chromaticity_x[3]; + uint16_t primary_chromaticity_y[3]; + uint16_t white_point_chromaticity_x; + uint16_t white_point_chromaticity_y; + uint32_t luminance_max; + uint32_t luminance_min; } AV1RawMetadataHDRMDCV; typedef struct AV1RawMetadataScalability { - uint8_t scalability_mode_idc; - uint8_t spatial_layers_cnt_minus_1; - uint8_t spatial_layer_dimensions_present_flag; - uint8_t spatial_layer_description_present_flag; - uint8_t temporal_group_description_present_flag; - uint8_t scalability_structure_reserved_3bits; - uint16_t spatial_layer_max_width[4]; - uint16_t spatial_layer_max_height[4]; - uint8_t spatial_layer_ref_id[4]; - uint8_t temporal_group_size; - uint8_t temporal_group_temporal_id[255]; - uint8_t temporal_group_temporal_switching_up_point_flag[255]; - uint8_t temporal_group_spatial_switching_up_point_flag[255]; - uint8_t temporal_group_ref_cnt[255]; - uint8_t temporal_group_ref_pic_diff[255][7]; + uint8_t scalability_mode_idc; + uint8_t spatial_layers_cnt_minus_1; + uint8_t spatial_layer_dimensions_present_flag; + uint8_t spatial_layer_description_present_flag; + uint8_t temporal_group_description_present_flag; + uint8_t scalability_structure_reserved_3bits; + uint16_t spatial_layer_max_width[4]; + uint16_t spatial_layer_max_height[4]; + uint8_t spatial_layer_ref_id[4]; + uint8_t temporal_group_size; + uint8_t temporal_group_temporal_id[255]; + uint8_t temporal_group_temporal_switching_up_point_flag[255]; + uint8_t temporal_group_spatial_switching_up_point_flag[255]; + uint8_t temporal_group_ref_cnt[255]; + uint8_t temporal_group_ref_pic_diff[255][7]; } AV1RawMetadataScalability; typedef struct AV1RawMetadataITUTT35 { - uint8_t itu_t_t35_country_code; - uint8_t itu_t_t35_country_code_extension_byte; + uint8_t itu_t_t35_country_code; + uint8_t itu_t_t35_country_code_extension_byte; - uint8_t *payload; - AVBufferRef *payload_ref; - size_t payload_size; + uint8_t *payload; + AVBufferRef *payload_ref; + size_t payload_size; } AV1RawMetadataITUTT35; typedef struct AV1RawMetadataTimecode { - uint8_t counting_type; - uint8_t full_timestamp_flag; - uint8_t discontinuity_flag; - uint8_t cnt_dropped_flag; - uint16_t n_frames; - uint8_t seconds_value; - uint8_t minutes_value; - uint8_t hours_value; - uint8_t seconds_flag; - uint8_t minutes_flag; - uint8_t hours_flag; - uint8_t time_offset_length; - uint32_t time_offset_value; + uint8_t counting_type; + uint8_t full_timestamp_flag; + uint8_t discontinuity_flag; + uint8_t cnt_dropped_flag; + uint16_t n_frames; + uint8_t seconds_value; + uint8_t minutes_value; + uint8_t hours_value; + uint8_t seconds_flag; + uint8_t minutes_flag; + uint8_t hours_flag; + uint8_t time_offset_length; + uint32_t time_offset_value; } AV1RawMetadataTimecode; typedef struct AV1RawMetadata { - uint64_t metadata_type; - union { - AV1RawMetadataHDRCLL hdr_cll; - AV1RawMetadataHDRMDCV hdr_mdcv; - AV1RawMetadataScalability scalability; - AV1RawMetadataITUTT35 itut_t35; - AV1RawMetadataTimecode timecode; - } metadata; + uint64_t metadata_type; + union { + AV1RawMetadataHDRCLL hdr_cll; + AV1RawMetadataHDRMDCV hdr_mdcv; + AV1RawMetadataScalability scalability; + AV1RawMetadataITUTT35 itut_t35; + AV1RawMetadataTimecode timecode; + } metadata; } AV1RawMetadata; typedef struct AV1RawPadding { - uint8_t *payload; - AVBufferRef *payload_ref; - size_t payload_size; + uint8_t *payload; + AVBufferRef *payload_ref; + size_t payload_size; } AV1RawPadding; typedef struct AV1RawOBU { - AV1RawOBUHeader header; - - size_t obu_size; - - union { - AV1RawSequenceHeader sequence_header; - AV1RawFrameHeader frame_header; - AV1RawFrame frame; - AV1RawTileGroup tile_group; - AV1RawTileList tile_list; - AV1RawMetadata metadata; - AV1RawPadding padding; - } obu; + AV1RawOBUHeader header; + + size_t obu_size; + + union { + AV1RawSequenceHeader sequence_header; + AV1RawFrameHeader frame_header; + AV1RawFrame frame; + AV1RawTileGroup tile_group; + AV1RawTileList tile_list; + AV1RawMetadata metadata; + AV1RawPadding padding; + } obu; } AV1RawOBU; typedef struct AV1ReferenceFrameState { - int valid; // RefValid - int frame_id; // RefFrameId - int upscaled_width; // RefUpscaledWidth - int frame_width; // RefFrameWidth - int frame_height; // RefFrameHeight - int render_width; // RefRenderWidth - int render_height; // RefRenderHeight - int frame_type; // RefFrameType - int subsampling_x; // RefSubsamplingX - int subsampling_y; // RefSubsamplingY - int bit_depth; // RefBitDepth - int order_hint; // RefOrderHint - - int8_t loop_filter_ref_deltas[AV1_TOTAL_REFS_PER_FRAME]; - int8_t loop_filter_mode_deltas[2]; - uint8_t feature_enabled[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; - int16_t feature_value[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; + int valid; // RefValid + int frame_id; // RefFrameId + int upscaled_width; // RefUpscaledWidth + int frame_width; // RefFrameWidth + int frame_height; // RefFrameHeight + int render_width; // RefRenderWidth + int render_height; // RefRenderHeight + int frame_type; // RefFrameType + int subsampling_x; // RefSubsamplingX + int subsampling_y; // RefSubsamplingY + int bit_depth; // RefBitDepth + int order_hint; // RefOrderHint + + int8_t loop_filter_ref_deltas[AV1_TOTAL_REFS_PER_FRAME]; + int8_t loop_filter_mode_deltas[2]; + uint8_t feature_enabled[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; + int16_t feature_value[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; } AV1ReferenceFrameState; typedef struct CodedBitstreamAV1Context { - const AVClass *class; - - AV1RawSequenceHeader *sequence_header; - AVBufferRef *sequence_header_ref; - - int seen_frame_header; - AVBufferRef *frame_header_ref; - uint8_t *frame_header; - size_t frame_header_size; - - int temporal_id; - int spatial_id; - int operating_point_idc; - - int bit_depth; - int order_hint; - int frame_width; - int frame_height; - int upscaled_width; - int render_width; - int render_height; - - int num_planes; - int coded_lossless; - int all_lossless; - int tile_cols; - int tile_rows; - int tile_num; - - AV1ReferenceFrameState ref[AV1_NUM_REF_FRAMES]; - - // AVOptions - int operating_point; + const AVClass *class; + + AV1RawSequenceHeader *sequence_header; + AVBufferRef *sequence_header_ref; + + int seen_frame_header; + AVBufferRef *frame_header_ref; + uint8_t *frame_header; + size_t frame_header_size; + + int temporal_id; + int spatial_id; + int operating_point_idc; + + int bit_depth; + int order_hint; + int frame_width; + int frame_height; + int upscaled_width; + int render_width; + int render_height; + + int num_planes; + int coded_lossless; + int all_lossless; + int tile_cols; + int tile_rows; + int tile_num; + + AV1ReferenceFrameState ref[AV1_NUM_REF_FRAMES]; + + // AVOptions + int operating_point; } CodedBitstreamAV1Context; diff --git a/third-party/cbs/include/cbs/cbs_bsf.h b/third-party/cbs/include/cbs/cbs_bsf.h index 0cfb64b4c60..aa7385c8f22 100644 --- a/third-party/cbs/include/cbs/cbs_bsf.h +++ b/third-party/cbs/include/cbs/cbs_bsf.h @@ -19,38 +19,44 @@ #ifndef AVCODEC_CBS_BSF_H #define AVCODEC_CBS_BSF_H +#include "libavutil/log.h" +#include "libavutil/opt.h" + +#include "bsf.h" +#include "codec_id.h" #include "cbs.h" +#include "packet.h" typedef struct CBSBSFType { - enum AVCodecID codec_id; - - // Name of a frame fragment in this codec (e.g. "access unit", - // "temporal unit"). - const char *fragment_name; - - // Name of a unit for this BSF, for use in error messages (e.g. - // "NAL unit", "OBU"). - const char *unit_name; - - // Update the content of a fragment with whatever metadata changes - // are desired. The associated AVPacket is provided so that any side - // data associated with the fragment can be inspected or edited. If - // pkt is NULL, then an extradata header fragment is being updated. - int (*update_fragment)(AVBSFContext *bsf, AVPacket *pkt, - CodedBitstreamFragment *frag); + enum AVCodecID codec_id; + + // Name of a frame fragment in this codec (e.g. "access unit", + // "temporal unit"). + const char *fragment_name; + + // Name of a unit for this BSF, for use in error messages (e.g. + // "NAL unit", "OBU"). + const char *unit_name; + + // Update the content of a fragment with whatever metadata changes + // are desired. The associated AVPacket is provided so that any side + // data associated with the fragment can be inspected or edited. If + // pkt is NULL, then an extradata header fragment is being updated. + int (*update_fragment)(AVBSFContext *bsf, AVPacket *pkt, + CodedBitstreamFragment *frag); } CBSBSFType; // Common structure for all generic CBS BSF users. An instance of this // structure must be the first member of the BSF private context (to be // pointed to by AVBSFContext.priv_data). typedef struct CBSBSFContext { - const AVClass *class; - const CBSBSFType *type; + const AVClass *class; + const CBSBSFType *type; - CodedBitstreamContext *input; - CodedBitstreamContext *output; - CodedBitstreamFragment fragment; + CodedBitstreamContext *input; + CodedBitstreamContext *output; + CodedBitstreamFragment fragment; } CBSBSFContext; /** @@ -61,7 +67,7 @@ typedef struct CBSBSFContext { * * Since it calls the update_fragment() function immediately to deal with * extradata, this should be called after any codec-specific setup is done - * (probably at the end of the AVBitStreamFilter.init function). + * (probably at the end of the FFBitStreamFilter.init function). */ int ff_cbs_bsf_generic_init(AVBSFContext *bsf, const CBSBSFType *type); @@ -69,7 +75,7 @@ int ff_cbs_bsf_generic_init(AVBSFContext *bsf, const CBSBSFType *type); * Close a generic CBS BSF instance. * * If no other deinitialisation is required then this function can be used - * directly as AVBitStreamFilter.close. + * directly as FFBitStreamFilter.close. */ void ff_cbs_bsf_generic_close(AVBSFContext *bsf); @@ -82,50 +88,50 @@ void ff_cbs_bsf_generic_close(AVBSFContext *bsf); * the same thing to that new extradata to form the output side-data first. * * If the BSF does not do anything else then this function can be used - * directly as AVBitStreamFilter.filter. + * directly as FFBitStreamFilter.filter. */ int ff_cbs_bsf_generic_filter(AVBSFContext *bsf, AVPacket *pkt); // Options for element manipulation. enum { - // Pass this element through unchanged. - BSF_ELEMENT_PASS, - // Insert this element, replacing any existing instances of it. - // Associated values may be provided explicitly (as addtional options) - // or implicitly (either as side data or deduced from other parts of - // the stream). - BSF_ELEMENT_INSERT, - // Remove this element if it appears in the stream. - BSF_ELEMENT_REMOVE, - // Extract this element to side data, so that further manipulation - // can happen elsewhere. - BSF_ELEMENT_EXTRACT, + // Pass this element through unchanged. + BSF_ELEMENT_PASS, + // Insert this element, replacing any existing instances of it. + // Associated values may be provided explicitly (as addtional options) + // or implicitly (either as side data or deduced from other parts of + // the stream). + BSF_ELEMENT_INSERT, + // Remove this element if it appears in the stream. + BSF_ELEMENT_REMOVE, + // Extract this element to side data, so that further manipulation + // can happen elsewhere. + BSF_ELEMENT_EXTRACT, }; -#define BSF_ELEMENT_OPTIONS_PIR(name, help, field, opt_flags) \ - { name, help, OFFSET(field), AV_OPT_TYPE_INT, \ - { .i64 = BSF_ELEMENT_PASS }, \ - BSF_ELEMENT_PASS, BSF_ELEMENT_REMOVE, opt_flags, name }, \ - { "pass", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_PASS }, .flags = opt_flags, .unit = name }, \ - { "insert", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_INSERT }, .flags = opt_flags, .unit = name }, \ - { "remove", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_REMOVE }, .flags = opt_flags, .unit = name } - -#define BSF_ELEMENT_OPTIONS_PIRE(name, help, field, opt_flags) \ - { name, help, OFFSET(field), AV_OPT_TYPE_INT, \ - { .i64 = BSF_ELEMENT_PASS }, \ - BSF_ELEMENT_PASS, BSF_ELEMENT_EXTRACT, opt_flags, name }, \ - { "pass", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_PASS }, .flags = opt_flags, .unit = name }, \ - { "insert", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_INSERT }, .flags = opt_flags, .unit = name }, \ - { "remove", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_REMOVE }, .flags = opt_flags, .unit = name }, \ - { "extract", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_EXTRACT }, .flags = opt_flags, .unit = name } +#define BSF_ELEMENT_OPTIONS_PIR(name, help, field, opt_flags) \ + { name, help, OFFSET(field), AV_OPT_TYPE_INT, \ + { .i64 = BSF_ELEMENT_PASS }, \ + BSF_ELEMENT_PASS, BSF_ELEMENT_REMOVE, opt_flags, name }, \ + { "pass", NULL, 0, AV_OPT_TYPE_CONST, \ + { .i64 = BSF_ELEMENT_PASS }, .flags = opt_flags, .unit = name }, \ + { "insert", NULL, 0, AV_OPT_TYPE_CONST, \ + { .i64 = BSF_ELEMENT_INSERT }, .flags = opt_flags, .unit = name }, \ + { "remove", NULL, 0, AV_OPT_TYPE_CONST, \ + { .i64 = BSF_ELEMENT_REMOVE }, .flags = opt_flags, .unit = name } + +#define BSF_ELEMENT_OPTIONS_PIRE(name, help, field, opt_flags) \ + { name, help, OFFSET(field), AV_OPT_TYPE_INT, \ + { .i64 = BSF_ELEMENT_PASS }, \ + BSF_ELEMENT_PASS, BSF_ELEMENT_EXTRACT, opt_flags, name }, \ + { "pass", NULL, 0, AV_OPT_TYPE_CONST, \ + { .i64 = BSF_ELEMENT_PASS }, .flags = opt_flags, .unit = name }, \ + { "insert", NULL, 0, AV_OPT_TYPE_CONST, \ + { .i64 = BSF_ELEMENT_INSERT }, .flags = opt_flags, .unit = name }, \ + { "remove", NULL, 0, AV_OPT_TYPE_CONST, \ + { .i64 = BSF_ELEMENT_REMOVE }, .flags = opt_flags, .unit = name }, \ + { "extract", NULL, 0, AV_OPT_TYPE_CONST, \ + { .i64 = BSF_ELEMENT_EXTRACT }, .flags = opt_flags, .unit = name } \ #endif /* AVCODEC_CBS_BSF_H */ diff --git a/third-party/cbs/include/cbs/cbs_h264.h b/third-party/cbs/include/cbs/cbs_h264.h index 713975341d5..ca9b688c057 100644 --- a/third-party/cbs/include/cbs/cbs_h264.h +++ b/third-party/cbs/include/cbs/cbs_h264.h @@ -29,378 +29,399 @@ typedef struct H264RawNALUnitHeader { - uint8_t nal_ref_idc; - uint8_t nal_unit_type; + uint8_t nal_ref_idc; + uint8_t nal_unit_type; - uint8_t svc_extension_flag; - uint8_t avc_3d_extension_flag; + uint8_t svc_extension_flag; + uint8_t avc_3d_extension_flag; } H264RawNALUnitHeader; typedef struct H264RawScalingList { - int8_t delta_scale[64]; + int8_t delta_scale[64]; } H264RawScalingList; typedef struct H264RawHRD { - uint8_t cpb_cnt_minus1; - uint8_t bit_rate_scale; - uint8_t cpb_size_scale; - - uint32_t bit_rate_value_minus1[H264_MAX_CPB_CNT]; - uint32_t cpb_size_value_minus1[H264_MAX_CPB_CNT]; - uint8_t cbr_flag[H264_MAX_CPB_CNT]; - - uint8_t initial_cpb_removal_delay_length_minus1; - uint8_t cpb_removal_delay_length_minus1; - uint8_t dpb_output_delay_length_minus1; - uint8_t time_offset_length; + uint8_t cpb_cnt_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + + uint32_t bit_rate_value_minus1[H264_MAX_CPB_CNT]; + uint32_t cpb_size_value_minus1[H264_MAX_CPB_CNT]; + uint8_t cbr_flag[H264_MAX_CPB_CNT]; + + uint8_t initial_cpb_removal_delay_length_minus1; + uint8_t cpb_removal_delay_length_minus1; + uint8_t dpb_output_delay_length_minus1; + uint8_t time_offset_length; } H264RawHRD; typedef struct H264RawVUI { - uint8_t aspect_ratio_info_present_flag; - uint8_t aspect_ratio_idc; - uint16_t sar_width; - uint16_t sar_height; - - uint8_t overscan_info_present_flag; - uint8_t overscan_appropriate_flag; - - uint8_t video_signal_type_present_flag; - uint8_t video_format; - uint8_t video_full_range_flag; - uint8_t colour_description_present_flag; - uint8_t colour_primaries; - uint8_t transfer_characteristics; - uint8_t matrix_coefficients; - - uint8_t chroma_loc_info_present_flag; - uint8_t chroma_sample_loc_type_top_field; - uint8_t chroma_sample_loc_type_bottom_field; - - uint8_t timing_info_present_flag; - uint32_t num_units_in_tick; - uint32_t time_scale; - uint8_t fixed_frame_rate_flag; - - uint8_t nal_hrd_parameters_present_flag; - H264RawHRD nal_hrd_parameters; - uint8_t vcl_hrd_parameters_present_flag; - H264RawHRD vcl_hrd_parameters; - uint8_t low_delay_hrd_flag; - - uint8_t pic_struct_present_flag; - - uint8_t bitstream_restriction_flag; - uint8_t motion_vectors_over_pic_boundaries_flag; - uint8_t max_bytes_per_pic_denom; - uint8_t max_bits_per_mb_denom; - uint8_t log2_max_mv_length_horizontal; - uint8_t log2_max_mv_length_vertical; - uint8_t max_num_reorder_frames; - uint8_t max_dec_frame_buffering; + uint8_t aspect_ratio_info_present_flag; + uint8_t aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + + uint8_t overscan_info_present_flag; + uint8_t overscan_appropriate_flag; + + uint8_t video_signal_type_present_flag; + uint8_t video_format; + uint8_t video_full_range_flag; + uint8_t colour_description_present_flag; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; + + uint8_t chroma_loc_info_present_flag; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + + uint8_t timing_info_present_flag; + uint32_t num_units_in_tick; + uint32_t time_scale; + uint8_t fixed_frame_rate_flag; + + uint8_t nal_hrd_parameters_present_flag; + H264RawHRD nal_hrd_parameters; + uint8_t vcl_hrd_parameters_present_flag; + H264RawHRD vcl_hrd_parameters; + uint8_t low_delay_hrd_flag; + + uint8_t pic_struct_present_flag; + + uint8_t bitstream_restriction_flag; + uint8_t motion_vectors_over_pic_boundaries_flag; + uint8_t max_bytes_per_pic_denom; + uint8_t max_bits_per_mb_denom; + uint8_t log2_max_mv_length_horizontal; + uint8_t log2_max_mv_length_vertical; + uint8_t max_num_reorder_frames; + uint8_t max_dec_frame_buffering; } H264RawVUI; typedef struct H264RawSPS { - H264RawNALUnitHeader nal_unit_header; - - uint8_t profile_idc; - uint8_t constraint_set0_flag; - uint8_t constraint_set1_flag; - uint8_t constraint_set2_flag; - uint8_t constraint_set3_flag; - uint8_t constraint_set4_flag; - uint8_t constraint_set5_flag; - uint8_t reserved_zero_2bits; - uint8_t level_idc; - - uint8_t seq_parameter_set_id; - - uint8_t chroma_format_idc; - uint8_t separate_colour_plane_flag; - uint8_t bit_depth_luma_minus8; - uint8_t bit_depth_chroma_minus8; - uint8_t qpprime_y_zero_transform_bypass_flag; - - uint8_t seq_scaling_matrix_present_flag; - uint8_t seq_scaling_list_present_flag[12]; - H264RawScalingList scaling_list_4x4[6]; - H264RawScalingList scaling_list_8x8[6]; - - uint8_t log2_max_frame_num_minus4; - uint8_t pic_order_cnt_type; - uint8_t log2_max_pic_order_cnt_lsb_minus4; - uint8_t delta_pic_order_always_zero_flag; - int32_t offset_for_non_ref_pic; - int32_t offset_for_top_to_bottom_field; - uint8_t num_ref_frames_in_pic_order_cnt_cycle; - int32_t offset_for_ref_frame[256]; - - uint8_t max_num_ref_frames; - uint8_t gaps_in_frame_num_allowed_flag; - - uint16_t pic_width_in_mbs_minus1; - uint16_t pic_height_in_map_units_minus1; - - uint8_t frame_mbs_only_flag; - uint8_t mb_adaptive_frame_field_flag; - uint8_t direct_8x8_inference_flag; - - uint8_t frame_cropping_flag; - uint16_t frame_crop_left_offset; - uint16_t frame_crop_right_offset; - uint16_t frame_crop_top_offset; - uint16_t frame_crop_bottom_offset; - - uint8_t vui_parameters_present_flag; - H264RawVUI vui; + H264RawNALUnitHeader nal_unit_header; + + uint8_t profile_idc; + uint8_t constraint_set0_flag; + uint8_t constraint_set1_flag; + uint8_t constraint_set2_flag; + uint8_t constraint_set3_flag; + uint8_t constraint_set4_flag; + uint8_t constraint_set5_flag; + uint8_t reserved_zero_2bits; + uint8_t level_idc; + + uint8_t seq_parameter_set_id; + + uint8_t chroma_format_idc; + uint8_t separate_colour_plane_flag; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t qpprime_y_zero_transform_bypass_flag; + + uint8_t seq_scaling_matrix_present_flag; + uint8_t seq_scaling_list_present_flag[12]; + H264RawScalingList scaling_list_4x4[6]; + H264RawScalingList scaling_list_8x8[6]; + + uint8_t log2_max_frame_num_minus4; + uint8_t pic_order_cnt_type; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + uint8_t delta_pic_order_always_zero_flag; + int32_t offset_for_non_ref_pic; + int32_t offset_for_top_to_bottom_field; + uint8_t num_ref_frames_in_pic_order_cnt_cycle; + int32_t offset_for_ref_frame[256]; + + uint8_t max_num_ref_frames; + uint8_t gaps_in_frame_num_allowed_flag; + + uint16_t pic_width_in_mbs_minus1; + uint16_t pic_height_in_map_units_minus1; + + uint8_t frame_mbs_only_flag; + uint8_t mb_adaptive_frame_field_flag; + uint8_t direct_8x8_inference_flag; + + uint8_t frame_cropping_flag; + uint16_t frame_crop_left_offset; + uint16_t frame_crop_right_offset; + uint16_t frame_crop_top_offset; + uint16_t frame_crop_bottom_offset; + + uint8_t vui_parameters_present_flag; + H264RawVUI vui; } H264RawSPS; typedef struct H264RawSPSExtension { - H264RawNALUnitHeader nal_unit_header; + H264RawNALUnitHeader nal_unit_header; - uint8_t seq_parameter_set_id; + uint8_t seq_parameter_set_id; - uint8_t aux_format_idc; - uint8_t bit_depth_aux_minus8; - uint8_t alpha_incr_flag; - uint16_t alpha_opaque_value; - uint16_t alpha_transparent_value; + uint8_t aux_format_idc; + uint8_t bit_depth_aux_minus8; + uint8_t alpha_incr_flag; + uint16_t alpha_opaque_value; + uint16_t alpha_transparent_value; - uint8_t additional_extension_flag; + uint8_t additional_extension_flag; } H264RawSPSExtension; typedef struct H264RawPPS { - H264RawNALUnitHeader nal_unit_header; + H264RawNALUnitHeader nal_unit_header; - uint8_t pic_parameter_set_id; - uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint8_t seq_parameter_set_id; - uint8_t entropy_coding_mode_flag; - uint8_t bottom_field_pic_order_in_frame_present_flag; + uint8_t entropy_coding_mode_flag; + uint8_t bottom_field_pic_order_in_frame_present_flag; - uint8_t num_slice_groups_minus1; - uint8_t slice_group_map_type; - uint16_t run_length_minus1[H264_MAX_SLICE_GROUPS]; - uint16_t top_left[H264_MAX_SLICE_GROUPS]; - uint16_t bottom_right[H264_MAX_SLICE_GROUPS]; - uint8_t slice_group_change_direction_flag; - uint16_t slice_group_change_rate_minus1; - uint16_t pic_size_in_map_units_minus1; + uint8_t num_slice_groups_minus1; + uint8_t slice_group_map_type; + uint16_t run_length_minus1[H264_MAX_SLICE_GROUPS]; + uint16_t top_left[H264_MAX_SLICE_GROUPS]; + uint16_t bottom_right[H264_MAX_SLICE_GROUPS]; + uint8_t slice_group_change_direction_flag; + uint16_t slice_group_change_rate_minus1; + uint16_t pic_size_in_map_units_minus1; - uint8_t *slice_group_id; - AVBufferRef *slice_group_id_ref; + uint8_t *slice_group_id; + AVBufferRef *slice_group_id_ref; - uint8_t num_ref_idx_l0_default_active_minus1; - uint8_t num_ref_idx_l1_default_active_minus1; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; - uint8_t weighted_pred_flag; - uint8_t weighted_bipred_idc; + uint8_t weighted_pred_flag; + uint8_t weighted_bipred_idc; - int8_t pic_init_qp_minus26; - int8_t pic_init_qs_minus26; - int8_t chroma_qp_index_offset; + int8_t pic_init_qp_minus26; + int8_t pic_init_qs_minus26; + int8_t chroma_qp_index_offset; - uint8_t deblocking_filter_control_present_flag; - uint8_t constrained_intra_pred_flag; + uint8_t deblocking_filter_control_present_flag; + uint8_t constrained_intra_pred_flag; - uint8_t more_rbsp_data; + uint8_t more_rbsp_data; - uint8_t redundant_pic_cnt_present_flag; - uint8_t transform_8x8_mode_flag; + uint8_t redundant_pic_cnt_present_flag; + uint8_t transform_8x8_mode_flag; - uint8_t pic_scaling_matrix_present_flag; - uint8_t pic_scaling_list_present_flag[12]; - H264RawScalingList scaling_list_4x4[6]; - H264RawScalingList scaling_list_8x8[6]; + uint8_t pic_scaling_matrix_present_flag; + uint8_t pic_scaling_list_present_flag[12]; + H264RawScalingList scaling_list_4x4[6]; + H264RawScalingList scaling_list_8x8[6]; - int8_t second_chroma_qp_index_offset; + int8_t second_chroma_qp_index_offset; } H264RawPPS; typedef struct H264RawAUD { - H264RawNALUnitHeader nal_unit_header; + H264RawNALUnitHeader nal_unit_header; - uint8_t primary_pic_type; + uint8_t primary_pic_type; } H264RawAUD; typedef struct H264RawSEIBufferingPeriod { - uint8_t seq_parameter_set_id; - struct { - uint32_t initial_cpb_removal_delay[H264_MAX_CPB_CNT]; - uint32_t initial_cpb_removal_delay_offset[H264_MAX_CPB_CNT]; - } nal, vcl; + uint8_t seq_parameter_set_id; + struct { + uint32_t initial_cpb_removal_delay[H264_MAX_CPB_CNT]; + uint32_t initial_cpb_removal_delay_offset[H264_MAX_CPB_CNT]; + } nal, vcl; } H264RawSEIBufferingPeriod; typedef struct H264RawSEIPicTimestamp { - uint8_t ct_type; - uint8_t nuit_field_based_flag; - uint8_t counting_type; - uint8_t full_timestamp_flag; - uint8_t discontinuity_flag; - uint8_t cnt_dropped_flag; - uint8_t n_frames; - uint8_t seconds_flag; - uint8_t seconds_value; - uint8_t minutes_flag; - uint8_t minutes_value; - uint8_t hours_flag; - uint8_t hours_value; - int32_t time_offset; + uint8_t ct_type; + uint8_t nuit_field_based_flag; + uint8_t counting_type; + uint8_t full_timestamp_flag; + uint8_t discontinuity_flag; + uint8_t cnt_dropped_flag; + uint8_t n_frames; + uint8_t seconds_flag; + uint8_t seconds_value; + uint8_t minutes_flag; + uint8_t minutes_value; + uint8_t hours_flag; + uint8_t hours_value; + int32_t time_offset; } H264RawSEIPicTimestamp; typedef struct H264RawSEIPicTiming { - uint32_t cpb_removal_delay; - uint32_t dpb_output_delay; - uint8_t pic_struct; - uint8_t clock_timestamp_flag[3]; - H264RawSEIPicTimestamp timestamp[3]; + uint32_t cpb_removal_delay; + uint32_t dpb_output_delay; + uint8_t pic_struct; + uint8_t clock_timestamp_flag[3]; + H264RawSEIPicTimestamp timestamp[3]; } H264RawSEIPicTiming; typedef struct H264RawSEIPanScanRect { - uint32_t pan_scan_rect_id; - uint8_t pan_scan_rect_cancel_flag; - uint8_t pan_scan_cnt_minus1; - int32_t pan_scan_rect_left_offset[3]; - int32_t pan_scan_rect_right_offset[3]; - int32_t pan_scan_rect_top_offset[3]; - int32_t pan_scan_rect_bottom_offset[3]; - uint16_t pan_scan_rect_repetition_period; + uint32_t pan_scan_rect_id; + uint8_t pan_scan_rect_cancel_flag; + uint8_t pan_scan_cnt_minus1; + int32_t pan_scan_rect_left_offset[3]; + int32_t pan_scan_rect_right_offset[3]; + int32_t pan_scan_rect_top_offset[3]; + int32_t pan_scan_rect_bottom_offset[3]; + uint16_t pan_scan_rect_repetition_period; } H264RawSEIPanScanRect; typedef struct H264RawSEIRecoveryPoint { - uint16_t recovery_frame_cnt; - uint8_t exact_match_flag; - uint8_t broken_link_flag; - uint8_t changing_slice_group_idc; + uint16_t recovery_frame_cnt; + uint8_t exact_match_flag; + uint8_t broken_link_flag; + uint8_t changing_slice_group_idc; } H264RawSEIRecoveryPoint; +typedef struct H264RawFilmGrainCharacteristics { + uint8_t film_grain_characteristics_cancel_flag; + uint8_t film_grain_model_id; + uint8_t separate_colour_description_present_flag; + uint8_t film_grain_bit_depth_luma_minus8; + uint8_t film_grain_bit_depth_chroma_minus8; + uint8_t film_grain_full_range_flag; + uint8_t film_grain_colour_primaries; + uint8_t film_grain_transfer_characteristics; + uint8_t film_grain_matrix_coefficients; + uint8_t blending_mode_id; + uint8_t log2_scale_factor; + uint8_t comp_model_present_flag[3]; + uint8_t num_intensity_intervals_minus1[3]; + uint8_t num_model_values_minus1[3]; + uint8_t intensity_interval_lower_bound[3][256]; + uint8_t intensity_interval_upper_bound[3][256]; + int16_t comp_model_value[3][256][6]; + uint8_t film_grain_characteristics_repetition_period; +} H264RawFilmGrainCharacteristics; + typedef struct H264RawSEIDisplayOrientation { - uint8_t display_orientation_cancel_flag; - uint8_t hor_flip; - uint8_t ver_flip; - uint16_t anticlockwise_rotation; - uint16_t display_orientation_repetition_period; - uint8_t display_orientation_extension_flag; + uint8_t display_orientation_cancel_flag; + uint8_t hor_flip; + uint8_t ver_flip; + uint16_t anticlockwise_rotation; + uint16_t display_orientation_repetition_period; + uint8_t display_orientation_extension_flag; } H264RawSEIDisplayOrientation; typedef struct H264RawSEI { - H264RawNALUnitHeader nal_unit_header; - SEIRawMessageList message_list; + H264RawNALUnitHeader nal_unit_header; + SEIRawMessageList message_list; } H264RawSEI; typedef struct H264RawSliceHeader { - H264RawNALUnitHeader nal_unit_header; + H264RawNALUnitHeader nal_unit_header; - uint32_t first_mb_in_slice; - uint8_t slice_type; + uint32_t first_mb_in_slice; + uint8_t slice_type; - uint8_t pic_parameter_set_id; + uint8_t pic_parameter_set_id; - uint8_t colour_plane_id; + uint8_t colour_plane_id; - uint16_t frame_num; - uint8_t field_pic_flag; - uint8_t bottom_field_flag; + uint16_t frame_num; + uint8_t field_pic_flag; + uint8_t bottom_field_flag; - uint16_t idr_pic_id; + uint16_t idr_pic_id; - uint16_t pic_order_cnt_lsb; - int32_t delta_pic_order_cnt_bottom; - int32_t delta_pic_order_cnt[2]; + uint16_t pic_order_cnt_lsb; + int32_t delta_pic_order_cnt_bottom; + int32_t delta_pic_order_cnt[2]; - uint8_t redundant_pic_cnt; - uint8_t direct_spatial_mv_pred_flag; + uint8_t redundant_pic_cnt; + uint8_t direct_spatial_mv_pred_flag; - uint8_t num_ref_idx_active_override_flag; - uint8_t num_ref_idx_l0_active_minus1; - uint8_t num_ref_idx_l1_active_minus1; + uint8_t num_ref_idx_active_override_flag; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; - uint8_t ref_pic_list_modification_flag_l0; - uint8_t ref_pic_list_modification_flag_l1; - struct { - uint8_t modification_of_pic_nums_idc; - int32_t abs_diff_pic_num_minus1; - uint8_t long_term_pic_num; - } rplm_l0[H264_MAX_RPLM_COUNT], rplm_l1[H264_MAX_RPLM_COUNT]; + uint8_t ref_pic_list_modification_flag_l0; + uint8_t ref_pic_list_modification_flag_l1; + struct { + uint8_t modification_of_pic_nums_idc; + int32_t abs_diff_pic_num_minus1; + uint8_t long_term_pic_num; + } rplm_l0[H264_MAX_RPLM_COUNT], rplm_l1[H264_MAX_RPLM_COUNT]; - uint8_t luma_log2_weight_denom; - uint8_t chroma_log2_weight_denom; + uint8_t luma_log2_weight_denom; + uint8_t chroma_log2_weight_denom; - uint8_t luma_weight_l0_flag[H264_MAX_REFS]; - int8_t luma_weight_l0[H264_MAX_REFS]; - int8_t luma_offset_l0[H264_MAX_REFS]; - uint8_t chroma_weight_l0_flag[H264_MAX_REFS]; - int8_t chroma_weight_l0[H264_MAX_REFS][2]; - int8_t chroma_offset_l0[H264_MAX_REFS][2]; + uint8_t luma_weight_l0_flag[H264_MAX_REFS]; + int8_t luma_weight_l0[H264_MAX_REFS]; + int8_t luma_offset_l0[H264_MAX_REFS]; + uint8_t chroma_weight_l0_flag[H264_MAX_REFS]; + int8_t chroma_weight_l0[H264_MAX_REFS][2]; + int8_t chroma_offset_l0[H264_MAX_REFS][2]; - uint8_t luma_weight_l1_flag[H264_MAX_REFS]; - int8_t luma_weight_l1[H264_MAX_REFS]; - int8_t luma_offset_l1[H264_MAX_REFS]; - uint8_t chroma_weight_l1_flag[H264_MAX_REFS]; - int8_t chroma_weight_l1[H264_MAX_REFS][2]; - int8_t chroma_offset_l1[H264_MAX_REFS][2]; + uint8_t luma_weight_l1_flag[H264_MAX_REFS]; + int8_t luma_weight_l1[H264_MAX_REFS]; + int8_t luma_offset_l1[H264_MAX_REFS]; + uint8_t chroma_weight_l1_flag[H264_MAX_REFS]; + int8_t chroma_weight_l1[H264_MAX_REFS][2]; + int8_t chroma_offset_l1[H264_MAX_REFS][2]; - uint8_t no_output_of_prior_pics_flag; - uint8_t long_term_reference_flag; + uint8_t no_output_of_prior_pics_flag; + uint8_t long_term_reference_flag; - uint8_t adaptive_ref_pic_marking_mode_flag; - struct { - uint8_t memory_management_control_operation; - int32_t difference_of_pic_nums_minus1; - uint8_t long_term_pic_num; - uint8_t long_term_frame_idx; - uint8_t max_long_term_frame_idx_plus1; - } mmco[H264_MAX_MMCO_COUNT]; + uint8_t adaptive_ref_pic_marking_mode_flag; + struct { + uint8_t memory_management_control_operation; + int32_t difference_of_pic_nums_minus1; + uint8_t long_term_pic_num; + uint8_t long_term_frame_idx; + uint8_t max_long_term_frame_idx_plus1; + } mmco[H264_MAX_MMCO_COUNT]; - uint8_t cabac_init_idc; + uint8_t cabac_init_idc; - int8_t slice_qp_delta; + int8_t slice_qp_delta; - uint8_t sp_for_switch_flag; - int8_t slice_qs_delta; + uint8_t sp_for_switch_flag; + int8_t slice_qs_delta; - uint8_t disable_deblocking_filter_idc; - int8_t slice_alpha_c0_offset_div2; - int8_t slice_beta_offset_div2; + uint8_t disable_deblocking_filter_idc; + int8_t slice_alpha_c0_offset_div2; + int8_t slice_beta_offset_div2; - uint16_t slice_group_change_cycle; + uint16_t slice_group_change_cycle; } H264RawSliceHeader; typedef struct H264RawSlice { - H264RawSliceHeader header; + H264RawSliceHeader header; - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; - int data_bit_start; + uint8_t *data; + AVBufferRef *data_ref; + size_t data_size; + int data_bit_start; } H264RawSlice; typedef struct H264RawFiller { - H264RawNALUnitHeader nal_unit_header; + H264RawNALUnitHeader nal_unit_header; - uint32_t filler_size; + uint32_t filler_size; } H264RawFiller; typedef struct CodedBitstreamH264Context { - // Reader/writer context in common with the H.265 implementation. - CodedBitstreamH2645Context common; - - // All currently available parameter sets. These are updated when - // any parameter set NAL unit is read/written with this context. - AVBufferRef *sps_ref[H264_MAX_SPS_COUNT]; - AVBufferRef *pps_ref[H264_MAX_PPS_COUNT]; - H264RawSPS *sps[H264_MAX_SPS_COUNT]; - H264RawPPS *pps[H264_MAX_PPS_COUNT]; - - // The currently active parameter sets. These are updated when any - // NAL unit refers to the relevant parameter set. These pointers - // must also be present in the arrays above. - const H264RawSPS *active_sps; - const H264RawPPS *active_pps; - - // The NAL unit type of the most recent normal slice. This is required - // to be able to read/write auxiliary slices, because IdrPicFlag is - // otherwise unknown. - uint8_t last_slice_nal_unit_type; + // Reader/writer context in common with the H.265 implementation. + CodedBitstreamH2645Context common; + + // All currently available parameter sets. These are updated when + // any parameter set NAL unit is read/written with this context. + AVBufferRef *sps_ref[H264_MAX_SPS_COUNT]; + AVBufferRef *pps_ref[H264_MAX_PPS_COUNT]; + H264RawSPS *sps[H264_MAX_SPS_COUNT]; + H264RawPPS *pps[H264_MAX_PPS_COUNT]; + + // The currently active parameter sets. These are updated when any + // NAL unit refers to the relevant parameter set. These pointers + // must also be present in the arrays above. + const H264RawSPS *active_sps; + const H264RawPPS *active_pps; + + // The NAL unit type of the most recent normal slice. This is required + // to be able to read/write auxiliary slices, because IdrPicFlag is + // otherwise unknown. + uint8_t last_slice_nal_unit_type; } CodedBitstreamH264Context; #endif /* AVCODEC_CBS_H264_H */ diff --git a/third-party/cbs/include/cbs/cbs_h2645.h b/third-party/cbs/include/cbs/cbs_h2645.h index 03cfb7a0227..f4c987a5119 100644 --- a/third-party/cbs/include/cbs/cbs_h2645.h +++ b/third-party/cbs/include/cbs/cbs_h2645.h @@ -23,13 +23,13 @@ typedef struct CodedBitstreamH2645Context { - // If set, the stream being read is in MP4 (AVCC/HVCC) format. If not - // set, the stream is assumed to be in annex B format. - int mp4; - // Size in bytes of the NAL length field for MP4 format. - int nal_length_size; - // Packet reader. - H2645Packet read_packet; + // If set, the stream being read is in MP4 (AVCC/HVCC) format. If not + // set, the stream is assumed to be in annex B format. + int mp4; + // Size in bytes of the NAL length field for MP4 format. + int nal_length_size; + // Packet reader. + H2645Packet read_packet; } CodedBitstreamH2645Context; diff --git a/third-party/cbs/include/cbs/cbs_h265.h b/third-party/cbs/include/cbs/cbs_h265.h index 73c19b2de74..f7cbd4970d0 100644 --- a/third-party/cbs/include/cbs/cbs_h265.h +++ b/third-party/cbs/include/cbs/cbs_h265.h @@ -27,652 +27,673 @@ #include "hevc.h" typedef struct H265RawNALUnitHeader { - uint8_t nal_unit_type; - uint8_t nuh_layer_id; - uint8_t nuh_temporal_id_plus1; + uint8_t nal_unit_type; + uint8_t nuh_layer_id; + uint8_t nuh_temporal_id_plus1; } H265RawNALUnitHeader; typedef struct H265RawProfileTierLevel { - uint8_t general_profile_space; - uint8_t general_tier_flag; - uint8_t general_profile_idc; - - uint8_t general_profile_compatibility_flag[32]; - - uint8_t general_progressive_source_flag; - uint8_t general_interlaced_source_flag; - uint8_t general_non_packed_constraint_flag; - uint8_t general_frame_only_constraint_flag; - - uint8_t general_max_12bit_constraint_flag; - uint8_t general_max_10bit_constraint_flag; - uint8_t general_max_8bit_constraint_flag; - uint8_t general_max_422chroma_constraint_flag; - uint8_t general_max_420chroma_constraint_flag; - uint8_t general_max_monochrome_constraint_flag; - uint8_t general_intra_constraint_flag; - uint8_t general_one_picture_only_constraint_flag; - uint8_t general_lower_bit_rate_constraint_flag; - uint8_t general_max_14bit_constraint_flag; - - uint8_t general_inbld_flag; - - uint8_t general_level_idc; - - uint8_t sub_layer_profile_present_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_level_present_flag[HEVC_MAX_SUB_LAYERS]; - - uint8_t sub_layer_profile_space[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_tier_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_profile_idc[HEVC_MAX_SUB_LAYERS]; - - uint8_t sub_layer_profile_compatibility_flag[HEVC_MAX_SUB_LAYERS][32]; - - uint8_t sub_layer_progressive_source_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_interlaced_source_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_non_packed_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_frame_only_constraint_flag[HEVC_MAX_SUB_LAYERS]; - - uint8_t sub_layer_max_12bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_10bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_8bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_422chroma_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_420chroma_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_monochrome_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_intra_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_one_picture_only_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_lower_bit_rate_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_14bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; - - uint8_t sub_layer_inbld_flag[HEVC_MAX_SUB_LAYERS]; - - uint8_t sub_layer_level_idc[HEVC_MAX_SUB_LAYERS]; + uint8_t general_profile_space; + uint8_t general_tier_flag; + uint8_t general_profile_idc; + + uint8_t general_profile_compatibility_flag[32]; + + uint8_t general_progressive_source_flag; + uint8_t general_interlaced_source_flag; + uint8_t general_non_packed_constraint_flag; + uint8_t general_frame_only_constraint_flag; + + uint8_t general_max_12bit_constraint_flag; + uint8_t general_max_10bit_constraint_flag; + uint8_t general_max_8bit_constraint_flag; + uint8_t general_max_422chroma_constraint_flag; + uint8_t general_max_420chroma_constraint_flag; + uint8_t general_max_monochrome_constraint_flag; + uint8_t general_intra_constraint_flag; + uint8_t general_one_picture_only_constraint_flag; + uint8_t general_lower_bit_rate_constraint_flag; + uint8_t general_max_14bit_constraint_flag; + + uint8_t general_inbld_flag; + + uint8_t general_level_idc; + + uint8_t sub_layer_profile_present_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_level_present_flag[HEVC_MAX_SUB_LAYERS]; + + uint8_t sub_layer_profile_space[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_tier_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_profile_idc[HEVC_MAX_SUB_LAYERS]; + + uint8_t sub_layer_profile_compatibility_flag[HEVC_MAX_SUB_LAYERS][32]; + + uint8_t sub_layer_progressive_source_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_interlaced_source_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_non_packed_constraint_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_frame_only_constraint_flag[HEVC_MAX_SUB_LAYERS]; + + uint8_t sub_layer_max_12bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_max_10bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_max_8bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_max_422chroma_constraint_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_max_420chroma_constraint_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_max_monochrome_constraint_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_intra_constraint_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_one_picture_only_constraint_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_lower_bit_rate_constraint_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t sub_layer_max_14bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; + + uint8_t sub_layer_inbld_flag[HEVC_MAX_SUB_LAYERS]; + + uint8_t sub_layer_level_idc[HEVC_MAX_SUB_LAYERS]; } H265RawProfileTierLevel; typedef struct H265RawSubLayerHRDParameters { - uint32_t bit_rate_value_minus1[HEVC_MAX_CPB_CNT]; - uint32_t cpb_size_value_minus1[HEVC_MAX_CPB_CNT]; - uint32_t cpb_size_du_value_minus1[HEVC_MAX_CPB_CNT]; - uint32_t bit_rate_du_value_minus1[HEVC_MAX_CPB_CNT]; - uint8_t cbr_flag[HEVC_MAX_CPB_CNT]; + uint32_t bit_rate_value_minus1[HEVC_MAX_CPB_CNT]; + uint32_t cpb_size_value_minus1[HEVC_MAX_CPB_CNT]; + uint32_t cpb_size_du_value_minus1[HEVC_MAX_CPB_CNT]; + uint32_t bit_rate_du_value_minus1[HEVC_MAX_CPB_CNT]; + uint8_t cbr_flag[HEVC_MAX_CPB_CNT]; } H265RawSubLayerHRDParameters; typedef struct H265RawHRDParameters { - uint8_t nal_hrd_parameters_present_flag; - uint8_t vcl_hrd_parameters_present_flag; - - uint8_t sub_pic_hrd_params_present_flag; - uint8_t tick_divisor_minus2; - uint8_t du_cpb_removal_delay_increment_length_minus1; - uint8_t sub_pic_cpb_params_in_pic_timing_sei_flag; - uint8_t dpb_output_delay_du_length_minus1; - - uint8_t bit_rate_scale; - uint8_t cpb_size_scale; - uint8_t cpb_size_du_scale; - - uint8_t initial_cpb_removal_delay_length_minus1; - uint8_t au_cpb_removal_delay_length_minus1; - uint8_t dpb_output_delay_length_minus1; - - uint8_t fixed_pic_rate_general_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t fixed_pic_rate_within_cvs_flag[HEVC_MAX_SUB_LAYERS]; - uint16_t elemental_duration_in_tc_minus1[HEVC_MAX_SUB_LAYERS]; - uint8_t low_delay_hrd_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t cpb_cnt_minus1[HEVC_MAX_SUB_LAYERS]; - H265RawSubLayerHRDParameters nal_sub_layer_hrd_parameters[HEVC_MAX_SUB_LAYERS]; - H265RawSubLayerHRDParameters vcl_sub_layer_hrd_parameters[HEVC_MAX_SUB_LAYERS]; + uint8_t nal_hrd_parameters_present_flag; + uint8_t vcl_hrd_parameters_present_flag; + + uint8_t sub_pic_hrd_params_present_flag; + uint8_t tick_divisor_minus2; + uint8_t du_cpb_removal_delay_increment_length_minus1; + uint8_t sub_pic_cpb_params_in_pic_timing_sei_flag; + uint8_t dpb_output_delay_du_length_minus1; + + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint8_t cpb_size_du_scale; + + uint8_t initial_cpb_removal_delay_length_minus1; + uint8_t au_cpb_removal_delay_length_minus1; + uint8_t dpb_output_delay_length_minus1; + + uint8_t fixed_pic_rate_general_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t fixed_pic_rate_within_cvs_flag[HEVC_MAX_SUB_LAYERS]; + uint16_t elemental_duration_in_tc_minus1[HEVC_MAX_SUB_LAYERS]; + uint8_t low_delay_hrd_flag[HEVC_MAX_SUB_LAYERS]; + uint8_t cpb_cnt_minus1[HEVC_MAX_SUB_LAYERS]; + H265RawSubLayerHRDParameters nal_sub_layer_hrd_parameters[HEVC_MAX_SUB_LAYERS]; + H265RawSubLayerHRDParameters vcl_sub_layer_hrd_parameters[HEVC_MAX_SUB_LAYERS]; } H265RawHRDParameters; typedef struct H265RawVUI { - uint8_t aspect_ratio_info_present_flag; - uint8_t aspect_ratio_idc; - uint16_t sar_width; - uint16_t sar_height; - - uint8_t overscan_info_present_flag; - uint8_t overscan_appropriate_flag; - - uint8_t video_signal_type_present_flag; - uint8_t video_format; - uint8_t video_full_range_flag; - uint8_t colour_description_present_flag; - uint8_t colour_primaries; - uint8_t transfer_characteristics; - uint8_t matrix_coefficients; - - uint8_t chroma_loc_info_present_flag; - uint8_t chroma_sample_loc_type_top_field; - uint8_t chroma_sample_loc_type_bottom_field; - - uint8_t neutral_chroma_indication_flag; - uint8_t field_seq_flag; - uint8_t frame_field_info_present_flag; - - uint8_t default_display_window_flag; - uint16_t def_disp_win_left_offset; - uint16_t def_disp_win_right_offset; - uint16_t def_disp_win_top_offset; - uint16_t def_disp_win_bottom_offset; - - uint8_t vui_timing_info_present_flag; - uint32_t vui_num_units_in_tick; - uint32_t vui_time_scale; - uint8_t vui_poc_proportional_to_timing_flag; - uint32_t vui_num_ticks_poc_diff_one_minus1; - uint8_t vui_hrd_parameters_present_flag; - H265RawHRDParameters hrd_parameters; - - uint8_t bitstream_restriction_flag; - uint8_t tiles_fixed_structure_flag; - uint8_t motion_vectors_over_pic_boundaries_flag; - uint8_t restricted_ref_pic_lists_flag; - uint16_t min_spatial_segmentation_idc; - uint8_t max_bytes_per_pic_denom; - uint8_t max_bits_per_min_cu_denom; - uint8_t log2_max_mv_length_horizontal; - uint8_t log2_max_mv_length_vertical; + uint8_t aspect_ratio_info_present_flag; + uint8_t aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + + uint8_t overscan_info_present_flag; + uint8_t overscan_appropriate_flag; + + uint8_t video_signal_type_present_flag; + uint8_t video_format; + uint8_t video_full_range_flag; + uint8_t colour_description_present_flag; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; + + uint8_t chroma_loc_info_present_flag; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + + uint8_t neutral_chroma_indication_flag; + uint8_t field_seq_flag; + uint8_t frame_field_info_present_flag; + + uint8_t default_display_window_flag; + uint16_t def_disp_win_left_offset; + uint16_t def_disp_win_right_offset; + uint16_t def_disp_win_top_offset; + uint16_t def_disp_win_bottom_offset; + + uint8_t vui_timing_info_present_flag; + uint32_t vui_num_units_in_tick; + uint32_t vui_time_scale; + uint8_t vui_poc_proportional_to_timing_flag; + uint32_t vui_num_ticks_poc_diff_one_minus1; + uint8_t vui_hrd_parameters_present_flag; + H265RawHRDParameters hrd_parameters; + + uint8_t bitstream_restriction_flag; + uint8_t tiles_fixed_structure_flag; + uint8_t motion_vectors_over_pic_boundaries_flag; + uint8_t restricted_ref_pic_lists_flag; + uint16_t min_spatial_segmentation_idc; + uint8_t max_bytes_per_pic_denom; + uint8_t max_bits_per_min_cu_denom; + uint8_t log2_max_mv_length_horizontal; + uint8_t log2_max_mv_length_vertical; } H265RawVUI; typedef struct H265RawExtensionData { - uint8_t *data; - AVBufferRef *data_ref; - size_t bit_length; + uint8_t *data; + AVBufferRef *data_ref; + size_t bit_length; } H265RawExtensionData; typedef struct H265RawVPS { - H265RawNALUnitHeader nal_unit_header; - - uint8_t vps_video_parameter_set_id; - - uint8_t vps_base_layer_internal_flag; - uint8_t vps_base_layer_available_flag; - uint8_t vps_max_layers_minus1; - uint8_t vps_max_sub_layers_minus1; - uint8_t vps_temporal_id_nesting_flag; - - H265RawProfileTierLevel profile_tier_level; - - uint8_t vps_sub_layer_ordering_info_present_flag; - uint8_t vps_max_dec_pic_buffering_minus1[HEVC_MAX_SUB_LAYERS]; - uint8_t vps_max_num_reorder_pics[HEVC_MAX_SUB_LAYERS]; - uint32_t vps_max_latency_increase_plus1[HEVC_MAX_SUB_LAYERS]; - - uint8_t vps_max_layer_id; - uint16_t vps_num_layer_sets_minus1; - uint8_t layer_id_included_flag[HEVC_MAX_LAYER_SETS][HEVC_MAX_LAYERS]; - - uint8_t vps_timing_info_present_flag; - uint32_t vps_num_units_in_tick; - uint32_t vps_time_scale; - uint8_t vps_poc_proportional_to_timing_flag; - uint32_t vps_num_ticks_poc_diff_one_minus1; - uint16_t vps_num_hrd_parameters; - uint16_t hrd_layer_set_idx[HEVC_MAX_LAYER_SETS]; - uint8_t cprms_present_flag[HEVC_MAX_LAYER_SETS]; - H265RawHRDParameters hrd_parameters[HEVC_MAX_LAYER_SETS]; - - uint8_t vps_extension_flag; - H265RawExtensionData extension_data; + H265RawNALUnitHeader nal_unit_header; + + uint8_t vps_video_parameter_set_id; + + uint8_t vps_base_layer_internal_flag; + uint8_t vps_base_layer_available_flag; + uint8_t vps_max_layers_minus1; + uint8_t vps_max_sub_layers_minus1; + uint8_t vps_temporal_id_nesting_flag; + + H265RawProfileTierLevel profile_tier_level; + + uint8_t vps_sub_layer_ordering_info_present_flag; + uint8_t vps_max_dec_pic_buffering_minus1[HEVC_MAX_SUB_LAYERS]; + uint8_t vps_max_num_reorder_pics[HEVC_MAX_SUB_LAYERS]; + uint32_t vps_max_latency_increase_plus1[HEVC_MAX_SUB_LAYERS]; + + uint8_t vps_max_layer_id; + uint16_t vps_num_layer_sets_minus1; + uint8_t layer_id_included_flag[HEVC_MAX_LAYER_SETS][HEVC_MAX_LAYERS]; + + uint8_t vps_timing_info_present_flag; + uint32_t vps_num_units_in_tick; + uint32_t vps_time_scale; + uint8_t vps_poc_proportional_to_timing_flag; + uint32_t vps_num_ticks_poc_diff_one_minus1; + uint16_t vps_num_hrd_parameters; + uint16_t hrd_layer_set_idx[HEVC_MAX_LAYER_SETS]; + uint8_t cprms_present_flag[HEVC_MAX_LAYER_SETS]; + H265RawHRDParameters hrd_parameters[HEVC_MAX_LAYER_SETS]; + + uint8_t vps_extension_flag; + H265RawExtensionData extension_data; } H265RawVPS; typedef struct H265RawSTRefPicSet { - uint8_t inter_ref_pic_set_prediction_flag; + uint8_t inter_ref_pic_set_prediction_flag; - uint8_t delta_idx_minus1; - uint8_t delta_rps_sign; - uint16_t abs_delta_rps_minus1; + uint8_t delta_idx_minus1; + uint8_t delta_rps_sign; + uint16_t abs_delta_rps_minus1; - uint8_t used_by_curr_pic_flag[HEVC_MAX_REFS]; - uint8_t use_delta_flag[HEVC_MAX_REFS]; + uint8_t used_by_curr_pic_flag[HEVC_MAX_REFS]; + uint8_t use_delta_flag[HEVC_MAX_REFS]; - uint8_t num_negative_pics; - uint8_t num_positive_pics; - uint16_t delta_poc_s0_minus1[HEVC_MAX_REFS]; - uint8_t used_by_curr_pic_s0_flag[HEVC_MAX_REFS]; - uint16_t delta_poc_s1_minus1[HEVC_MAX_REFS]; - uint8_t used_by_curr_pic_s1_flag[HEVC_MAX_REFS]; + uint8_t num_negative_pics; + uint8_t num_positive_pics; + uint16_t delta_poc_s0_minus1[HEVC_MAX_REFS]; + uint8_t used_by_curr_pic_s0_flag[HEVC_MAX_REFS]; + uint16_t delta_poc_s1_minus1[HEVC_MAX_REFS]; + uint8_t used_by_curr_pic_s1_flag[HEVC_MAX_REFS]; } H265RawSTRefPicSet; typedef struct H265RawScalingList { - uint8_t scaling_list_pred_mode_flag[4][6]; - uint8_t scaling_list_pred_matrix_id_delta[4][6]; - int16_t scaling_list_dc_coef_minus8[4][6]; - int8_t scaling_list_delta_coeff[4][6][64]; + uint8_t scaling_list_pred_mode_flag[4][6]; + uint8_t scaling_list_pred_matrix_id_delta[4][6]; + int16_t scaling_list_dc_coef_minus8[4][6]; + int8_t scaling_list_delta_coeff[4][6][64]; } H265RawScalingList; typedef struct H265RawSPS { - H265RawNALUnitHeader nal_unit_header; - - uint8_t sps_video_parameter_set_id; - - uint8_t sps_max_sub_layers_minus1; - uint8_t sps_temporal_id_nesting_flag; - - H265RawProfileTierLevel profile_tier_level; - - uint8_t sps_seq_parameter_set_id; - - uint8_t chroma_format_idc; - uint8_t separate_colour_plane_flag; - - uint16_t pic_width_in_luma_samples; - uint16_t pic_height_in_luma_samples; - - uint8_t conformance_window_flag; - uint16_t conf_win_left_offset; - uint16_t conf_win_right_offset; - uint16_t conf_win_top_offset; - uint16_t conf_win_bottom_offset; - - uint8_t bit_depth_luma_minus8; - uint8_t bit_depth_chroma_minus8; - - uint8_t log2_max_pic_order_cnt_lsb_minus4; - - uint8_t sps_sub_layer_ordering_info_present_flag; - uint8_t sps_max_dec_pic_buffering_minus1[HEVC_MAX_SUB_LAYERS]; - uint8_t sps_max_num_reorder_pics[HEVC_MAX_SUB_LAYERS]; - uint32_t sps_max_latency_increase_plus1[HEVC_MAX_SUB_LAYERS]; - - uint8_t log2_min_luma_coding_block_size_minus3; - uint8_t log2_diff_max_min_luma_coding_block_size; - uint8_t log2_min_luma_transform_block_size_minus2; - uint8_t log2_diff_max_min_luma_transform_block_size; - uint8_t max_transform_hierarchy_depth_inter; - uint8_t max_transform_hierarchy_depth_intra; - - uint8_t scaling_list_enabled_flag; - uint8_t sps_scaling_list_data_present_flag; - H265RawScalingList scaling_list; - - uint8_t amp_enabled_flag; - uint8_t sample_adaptive_offset_enabled_flag; - - uint8_t pcm_enabled_flag; - uint8_t pcm_sample_bit_depth_luma_minus1; - uint8_t pcm_sample_bit_depth_chroma_minus1; - uint8_t log2_min_pcm_luma_coding_block_size_minus3; - uint8_t log2_diff_max_min_pcm_luma_coding_block_size; - uint8_t pcm_loop_filter_disabled_flag; - - uint8_t num_short_term_ref_pic_sets; - H265RawSTRefPicSet st_ref_pic_set[HEVC_MAX_SHORT_TERM_REF_PIC_SETS]; - - uint8_t long_term_ref_pics_present_flag; - uint8_t num_long_term_ref_pics_sps; - uint16_t lt_ref_pic_poc_lsb_sps[HEVC_MAX_LONG_TERM_REF_PICS]; - uint8_t used_by_curr_pic_lt_sps_flag[HEVC_MAX_LONG_TERM_REF_PICS]; - - uint8_t sps_temporal_mvp_enabled_flag; - uint8_t strong_intra_smoothing_enabled_flag; - - uint8_t vui_parameters_present_flag; - H265RawVUI vui; - - uint8_t sps_extension_present_flag; - uint8_t sps_range_extension_flag; - uint8_t sps_multilayer_extension_flag; - uint8_t sps_3d_extension_flag; - uint8_t sps_scc_extension_flag; - uint8_t sps_extension_4bits; - - H265RawExtensionData extension_data; - - // Range extension. - uint8_t transform_skip_rotation_enabled_flag; - uint8_t transform_skip_context_enabled_flag; - uint8_t implicit_rdpcm_enabled_flag; - uint8_t explicit_rdpcm_enabled_flag; - uint8_t extended_precision_processing_flag; - uint8_t intra_smoothing_disabled_flag; - uint8_t high_precision_offsets_enabled_flag; - uint8_t persistent_rice_adaptation_enabled_flag; - uint8_t cabac_bypass_alignment_enabled_flag; - - // Screen content coding extension. - uint8_t sps_curr_pic_ref_enabled_flag; - uint8_t palette_mode_enabled_flag; - uint8_t palette_max_size; - uint8_t delta_palette_max_predictor_size; - uint8_t sps_palette_predictor_initializer_present_flag; - uint8_t sps_num_palette_predictor_initializer_minus1; - uint16_t sps_palette_predictor_initializers[3][128]; - - uint8_t motion_vector_resolution_control_idc; - uint8_t intra_boundary_filtering_disable_flag; + H265RawNALUnitHeader nal_unit_header; + + uint8_t sps_video_parameter_set_id; + + uint8_t sps_max_sub_layers_minus1; + uint8_t sps_temporal_id_nesting_flag; + + H265RawProfileTierLevel profile_tier_level; + + uint8_t sps_seq_parameter_set_id; + + uint8_t chroma_format_idc; + uint8_t separate_colour_plane_flag; + + uint16_t pic_width_in_luma_samples; + uint16_t pic_height_in_luma_samples; + + uint8_t conformance_window_flag; + uint16_t conf_win_left_offset; + uint16_t conf_win_right_offset; + uint16_t conf_win_top_offset; + uint16_t conf_win_bottom_offset; + + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + + uint8_t log2_max_pic_order_cnt_lsb_minus4; + + uint8_t sps_sub_layer_ordering_info_present_flag; + uint8_t sps_max_dec_pic_buffering_minus1[HEVC_MAX_SUB_LAYERS]; + uint8_t sps_max_num_reorder_pics[HEVC_MAX_SUB_LAYERS]; + uint32_t sps_max_latency_increase_plus1[HEVC_MAX_SUB_LAYERS]; + + uint8_t log2_min_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_luma_coding_block_size; + uint8_t log2_min_luma_transform_block_size_minus2; + uint8_t log2_diff_max_min_luma_transform_block_size; + uint8_t max_transform_hierarchy_depth_inter; + uint8_t max_transform_hierarchy_depth_intra; + + uint8_t scaling_list_enabled_flag; + uint8_t sps_scaling_list_data_present_flag; + H265RawScalingList scaling_list; + + uint8_t amp_enabled_flag; + uint8_t sample_adaptive_offset_enabled_flag; + + uint8_t pcm_enabled_flag; + uint8_t pcm_sample_bit_depth_luma_minus1; + uint8_t pcm_sample_bit_depth_chroma_minus1; + uint8_t log2_min_pcm_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_pcm_luma_coding_block_size; + uint8_t pcm_loop_filter_disabled_flag; + + uint8_t num_short_term_ref_pic_sets; + H265RawSTRefPicSet st_ref_pic_set[HEVC_MAX_SHORT_TERM_REF_PIC_SETS]; + + uint8_t long_term_ref_pics_present_flag; + uint8_t num_long_term_ref_pics_sps; + uint16_t lt_ref_pic_poc_lsb_sps[HEVC_MAX_LONG_TERM_REF_PICS]; + uint8_t used_by_curr_pic_lt_sps_flag[HEVC_MAX_LONG_TERM_REF_PICS]; + + uint8_t sps_temporal_mvp_enabled_flag; + uint8_t strong_intra_smoothing_enabled_flag; + + uint8_t vui_parameters_present_flag; + H265RawVUI vui; + + uint8_t sps_extension_present_flag; + uint8_t sps_range_extension_flag; + uint8_t sps_multilayer_extension_flag; + uint8_t sps_3d_extension_flag; + uint8_t sps_scc_extension_flag; + uint8_t sps_extension_4bits; + + H265RawExtensionData extension_data; + + // Range extension. + uint8_t transform_skip_rotation_enabled_flag; + uint8_t transform_skip_context_enabled_flag; + uint8_t implicit_rdpcm_enabled_flag; + uint8_t explicit_rdpcm_enabled_flag; + uint8_t extended_precision_processing_flag; + uint8_t intra_smoothing_disabled_flag; + uint8_t high_precision_offsets_enabled_flag; + uint8_t persistent_rice_adaptation_enabled_flag; + uint8_t cabac_bypass_alignment_enabled_flag; + + // Screen content coding extension. + uint8_t sps_curr_pic_ref_enabled_flag; + uint8_t palette_mode_enabled_flag; + uint8_t palette_max_size; + uint8_t delta_palette_max_predictor_size; + uint8_t sps_palette_predictor_initializer_present_flag; + uint8_t sps_num_palette_predictor_initializer_minus1; + uint16_t sps_palette_predictor_initializers[3][128]; + + uint8_t motion_vector_resolution_control_idc; + uint8_t intra_boundary_filtering_disable_flag; } H265RawSPS; typedef struct H265RawPPS { - H265RawNALUnitHeader nal_unit_header; - - uint8_t pps_pic_parameter_set_id; - uint8_t pps_seq_parameter_set_id; - - uint8_t dependent_slice_segments_enabled_flag; - uint8_t output_flag_present_flag; - uint8_t num_extra_slice_header_bits; - uint8_t sign_data_hiding_enabled_flag; - uint8_t cabac_init_present_flag; - - uint8_t num_ref_idx_l0_default_active_minus1; - uint8_t num_ref_idx_l1_default_active_minus1; - - int8_t init_qp_minus26; - - uint8_t constrained_intra_pred_flag; - uint8_t transform_skip_enabled_flag; - uint8_t cu_qp_delta_enabled_flag; - uint8_t diff_cu_qp_delta_depth; - - int8_t pps_cb_qp_offset; - int8_t pps_cr_qp_offset; - uint8_t pps_slice_chroma_qp_offsets_present_flag; - - uint8_t weighted_pred_flag; - uint8_t weighted_bipred_flag; - - uint8_t transquant_bypass_enabled_flag; - uint8_t tiles_enabled_flag; - uint8_t entropy_coding_sync_enabled_flag; - - uint8_t num_tile_columns_minus1; - uint8_t num_tile_rows_minus1; - uint8_t uniform_spacing_flag; - uint16_t column_width_minus1[HEVC_MAX_TILE_COLUMNS]; - uint16_t row_height_minus1[HEVC_MAX_TILE_ROWS]; - uint8_t loop_filter_across_tiles_enabled_flag; - - uint8_t pps_loop_filter_across_slices_enabled_flag; - uint8_t deblocking_filter_control_present_flag; - uint8_t deblocking_filter_override_enabled_flag; - uint8_t pps_deblocking_filter_disabled_flag; - int8_t pps_beta_offset_div2; - int8_t pps_tc_offset_div2; - - uint8_t pps_scaling_list_data_present_flag; - H265RawScalingList scaling_list; - - uint8_t lists_modification_present_flag; - uint8_t log2_parallel_merge_level_minus2; - - uint8_t slice_segment_header_extension_present_flag; - - uint8_t pps_extension_present_flag; - uint8_t pps_range_extension_flag; - uint8_t pps_multilayer_extension_flag; - uint8_t pps_3d_extension_flag; - uint8_t pps_scc_extension_flag; - uint8_t pps_extension_4bits; - - H265RawExtensionData extension_data; - - // Range extension. - uint8_t log2_max_transform_skip_block_size_minus2; - uint8_t cross_component_prediction_enabled_flag; - uint8_t chroma_qp_offset_list_enabled_flag; - uint8_t diff_cu_chroma_qp_offset_depth; - uint8_t chroma_qp_offset_list_len_minus1; - int8_t cb_qp_offset_list[6]; - int8_t cr_qp_offset_list[6]; - uint8_t log2_sao_offset_scale_luma; - uint8_t log2_sao_offset_scale_chroma; - - // Screen content coding extension. - uint8_t pps_curr_pic_ref_enabled_flag; - uint8_t residual_adaptive_colour_transform_enabled_flag; - uint8_t pps_slice_act_qp_offsets_present_flag; - int8_t pps_act_y_qp_offset_plus5; - int8_t pps_act_cb_qp_offset_plus5; - int8_t pps_act_cr_qp_offset_plus3; - - uint8_t pps_palette_predictor_initializer_present_flag; - uint8_t pps_num_palette_predictor_initializer; - uint8_t monochrome_palette_flag; - uint8_t luma_bit_depth_entry_minus8; - uint8_t chroma_bit_depth_entry_minus8; - uint16_t pps_palette_predictor_initializers[3][128]; + H265RawNALUnitHeader nal_unit_header; + + uint8_t pps_pic_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + + uint8_t dependent_slice_segments_enabled_flag; + uint8_t output_flag_present_flag; + uint8_t num_extra_slice_header_bits; + uint8_t sign_data_hiding_enabled_flag; + uint8_t cabac_init_present_flag; + + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + + int8_t init_qp_minus26; + + uint8_t constrained_intra_pred_flag; + uint8_t transform_skip_enabled_flag; + uint8_t cu_qp_delta_enabled_flag; + uint8_t diff_cu_qp_delta_depth; + + int8_t pps_cb_qp_offset; + int8_t pps_cr_qp_offset; + uint8_t pps_slice_chroma_qp_offsets_present_flag; + + uint8_t weighted_pred_flag; + uint8_t weighted_bipred_flag; + + uint8_t transquant_bypass_enabled_flag; + uint8_t tiles_enabled_flag; + uint8_t entropy_coding_sync_enabled_flag; + + uint8_t num_tile_columns_minus1; + uint8_t num_tile_rows_minus1; + uint8_t uniform_spacing_flag; + uint16_t column_width_minus1[HEVC_MAX_TILE_COLUMNS]; + uint16_t row_height_minus1[HEVC_MAX_TILE_ROWS]; + uint8_t loop_filter_across_tiles_enabled_flag; + + uint8_t pps_loop_filter_across_slices_enabled_flag; + uint8_t deblocking_filter_control_present_flag; + uint8_t deblocking_filter_override_enabled_flag; + uint8_t pps_deblocking_filter_disabled_flag; + int8_t pps_beta_offset_div2; + int8_t pps_tc_offset_div2; + + uint8_t pps_scaling_list_data_present_flag; + H265RawScalingList scaling_list; + + uint8_t lists_modification_present_flag; + uint8_t log2_parallel_merge_level_minus2; + + uint8_t slice_segment_header_extension_present_flag; + + uint8_t pps_extension_present_flag; + uint8_t pps_range_extension_flag; + uint8_t pps_multilayer_extension_flag; + uint8_t pps_3d_extension_flag; + uint8_t pps_scc_extension_flag; + uint8_t pps_extension_4bits; + + H265RawExtensionData extension_data; + + // Range extension. + uint8_t log2_max_transform_skip_block_size_minus2; + uint8_t cross_component_prediction_enabled_flag; + uint8_t chroma_qp_offset_list_enabled_flag; + uint8_t diff_cu_chroma_qp_offset_depth; + uint8_t chroma_qp_offset_list_len_minus1; + int8_t cb_qp_offset_list[6]; + int8_t cr_qp_offset_list[6]; + uint8_t log2_sao_offset_scale_luma; + uint8_t log2_sao_offset_scale_chroma; + + // Screen content coding extension. + uint8_t pps_curr_pic_ref_enabled_flag; + uint8_t residual_adaptive_colour_transform_enabled_flag; + uint8_t pps_slice_act_qp_offsets_present_flag; + int8_t pps_act_y_qp_offset_plus5; + int8_t pps_act_cb_qp_offset_plus5; + int8_t pps_act_cr_qp_offset_plus3; + + uint8_t pps_palette_predictor_initializer_present_flag; + uint8_t pps_num_palette_predictor_initializer; + uint8_t monochrome_palette_flag; + uint8_t luma_bit_depth_entry_minus8; + uint8_t chroma_bit_depth_entry_minus8; + uint16_t pps_palette_predictor_initializers[3][128]; } H265RawPPS; typedef struct H265RawAUD { - H265RawNALUnitHeader nal_unit_header; + H265RawNALUnitHeader nal_unit_header; - uint8_t pic_type; + uint8_t pic_type; } H265RawAUD; -typedef struct H265RawSliceHeader { - H265RawNALUnitHeader nal_unit_header; - - uint8_t first_slice_segment_in_pic_flag; - uint8_t no_output_of_prior_pics_flag; - uint8_t slice_pic_parameter_set_id; - - uint8_t dependent_slice_segment_flag; - uint16_t slice_segment_address; - - uint8_t slice_reserved_flag[8]; - uint8_t slice_type; - - uint8_t pic_output_flag; - uint8_t colour_plane_id; - - uint16_t slice_pic_order_cnt_lsb; - - uint8_t short_term_ref_pic_set_sps_flag; - H265RawSTRefPicSet short_term_ref_pic_set; - uint8_t short_term_ref_pic_set_idx; - - uint8_t num_long_term_sps; - uint8_t num_long_term_pics; - uint8_t lt_idx_sps[HEVC_MAX_REFS]; - uint8_t poc_lsb_lt[HEVC_MAX_REFS]; - uint8_t used_by_curr_pic_lt_flag[HEVC_MAX_REFS]; - uint8_t delta_poc_msb_present_flag[HEVC_MAX_REFS]; - uint32_t delta_poc_msb_cycle_lt[HEVC_MAX_REFS]; - - uint8_t slice_temporal_mvp_enabled_flag; - - uint8_t slice_sao_luma_flag; - uint8_t slice_sao_chroma_flag; - - uint8_t num_ref_idx_active_override_flag; - uint8_t num_ref_idx_l0_active_minus1; - uint8_t num_ref_idx_l1_active_minus1; - - uint8_t ref_pic_list_modification_flag_l0; - uint8_t list_entry_l0[HEVC_MAX_REFS]; - uint8_t ref_pic_list_modification_flag_l1; - uint8_t list_entry_l1[HEVC_MAX_REFS]; - - uint8_t mvd_l1_zero_flag; - uint8_t cabac_init_flag; - uint8_t collocated_from_l0_flag; - uint8_t collocated_ref_idx; - - uint8_t luma_log2_weight_denom; - int8_t delta_chroma_log2_weight_denom; - uint8_t luma_weight_l0_flag[HEVC_MAX_REFS]; - uint8_t chroma_weight_l0_flag[HEVC_MAX_REFS]; - int8_t delta_luma_weight_l0[HEVC_MAX_REFS]; - int16_t luma_offset_l0[HEVC_MAX_REFS]; - int8_t delta_chroma_weight_l0[HEVC_MAX_REFS][2]; - int16_t chroma_offset_l0[HEVC_MAX_REFS][2]; - uint8_t luma_weight_l1_flag[HEVC_MAX_REFS]; - uint8_t chroma_weight_l1_flag[HEVC_MAX_REFS]; - int8_t delta_luma_weight_l1[HEVC_MAX_REFS]; - int16_t luma_offset_l1[HEVC_MAX_REFS]; - int8_t delta_chroma_weight_l1[HEVC_MAX_REFS][2]; - int16_t chroma_offset_l1[HEVC_MAX_REFS][2]; - - uint8_t five_minus_max_num_merge_cand; - uint8_t use_integer_mv_flag; - - int8_t slice_qp_delta; - int8_t slice_cb_qp_offset; - int8_t slice_cr_qp_offset; - int8_t slice_act_y_qp_offset; - int8_t slice_act_cb_qp_offset; - int8_t slice_act_cr_qp_offset; - uint8_t cu_chroma_qp_offset_enabled_flag; - - uint8_t deblocking_filter_override_flag; - uint8_t slice_deblocking_filter_disabled_flag; - int8_t slice_beta_offset_div2; - int8_t slice_tc_offset_div2; - uint8_t slice_loop_filter_across_slices_enabled_flag; - - uint16_t num_entry_point_offsets; - uint8_t offset_len_minus1; - uint32_t entry_point_offset_minus1[HEVC_MAX_ENTRY_POINT_OFFSETS]; - - uint16_t slice_segment_header_extension_length; - uint8_t slice_segment_header_extension_data_byte[256]; +typedef struct H265RawSliceHeader { + H265RawNALUnitHeader nal_unit_header; + + uint8_t first_slice_segment_in_pic_flag; + uint8_t no_output_of_prior_pics_flag; + uint8_t slice_pic_parameter_set_id; + + uint8_t dependent_slice_segment_flag; + uint16_t slice_segment_address; + + uint8_t slice_reserved_flag[8]; + uint8_t slice_type; + + uint8_t pic_output_flag; + uint8_t colour_plane_id; + + uint16_t slice_pic_order_cnt_lsb; + + uint8_t short_term_ref_pic_set_sps_flag; + H265RawSTRefPicSet short_term_ref_pic_set; + uint8_t short_term_ref_pic_set_idx; + + uint8_t num_long_term_sps; + uint8_t num_long_term_pics; + uint8_t lt_idx_sps[HEVC_MAX_REFS]; + uint8_t poc_lsb_lt[HEVC_MAX_REFS]; + uint8_t used_by_curr_pic_lt_flag[HEVC_MAX_REFS]; + uint8_t delta_poc_msb_present_flag[HEVC_MAX_REFS]; + uint32_t delta_poc_msb_cycle_lt[HEVC_MAX_REFS]; + + uint8_t slice_temporal_mvp_enabled_flag; + + uint8_t slice_sao_luma_flag; + uint8_t slice_sao_chroma_flag; + + uint8_t num_ref_idx_active_override_flag; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; + + uint8_t ref_pic_list_modification_flag_l0; + uint8_t list_entry_l0[HEVC_MAX_REFS]; + uint8_t ref_pic_list_modification_flag_l1; + uint8_t list_entry_l1[HEVC_MAX_REFS]; + + uint8_t mvd_l1_zero_flag; + uint8_t cabac_init_flag; + uint8_t collocated_from_l0_flag; + uint8_t collocated_ref_idx; + + uint8_t luma_log2_weight_denom; + int8_t delta_chroma_log2_weight_denom; + uint8_t luma_weight_l0_flag[HEVC_MAX_REFS]; + uint8_t chroma_weight_l0_flag[HEVC_MAX_REFS]; + int8_t delta_luma_weight_l0[HEVC_MAX_REFS]; + int16_t luma_offset_l0[HEVC_MAX_REFS]; + int8_t delta_chroma_weight_l0[HEVC_MAX_REFS][2]; + int16_t chroma_offset_l0[HEVC_MAX_REFS][2]; + uint8_t luma_weight_l1_flag[HEVC_MAX_REFS]; + uint8_t chroma_weight_l1_flag[HEVC_MAX_REFS]; + int8_t delta_luma_weight_l1[HEVC_MAX_REFS]; + int16_t luma_offset_l1[HEVC_MAX_REFS]; + int8_t delta_chroma_weight_l1[HEVC_MAX_REFS][2]; + int16_t chroma_offset_l1[HEVC_MAX_REFS][2]; + + uint8_t five_minus_max_num_merge_cand; + uint8_t use_integer_mv_flag; + + int8_t slice_qp_delta; + int8_t slice_cb_qp_offset; + int8_t slice_cr_qp_offset; + int8_t slice_act_y_qp_offset; + int8_t slice_act_cb_qp_offset; + int8_t slice_act_cr_qp_offset; + uint8_t cu_chroma_qp_offset_enabled_flag; + + uint8_t deblocking_filter_override_flag; + uint8_t slice_deblocking_filter_disabled_flag; + int8_t slice_beta_offset_div2; + int8_t slice_tc_offset_div2; + uint8_t slice_loop_filter_across_slices_enabled_flag; + + uint16_t num_entry_point_offsets; + uint8_t offset_len_minus1; + uint32_t entry_point_offset_minus1[HEVC_MAX_ENTRY_POINT_OFFSETS]; + + uint16_t slice_segment_header_extension_length; + uint8_t slice_segment_header_extension_data_byte[256]; } H265RawSliceHeader; typedef struct H265RawSlice { - H265RawSliceHeader header; + H265RawSliceHeader header; - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; - int data_bit_start; + uint8_t *data; + AVBufferRef *data_ref; + size_t data_size; + int data_bit_start; } H265RawSlice; typedef struct H265RawSEIBufferingPeriod { - uint8_t bp_seq_parameter_set_id; - uint8_t irap_cpb_params_present_flag; - uint32_t cpb_delay_offset; - uint32_t dpb_delay_offset; - uint8_t concatenation_flag; - uint32_t au_cpb_removal_delay_delta_minus1; - - uint32_t nal_initial_cpb_removal_delay[HEVC_MAX_CPB_CNT]; - uint32_t nal_initial_cpb_removal_offset[HEVC_MAX_CPB_CNT]; - uint32_t nal_initial_alt_cpb_removal_delay[HEVC_MAX_CPB_CNT]; - uint32_t nal_initial_alt_cpb_removal_offset[HEVC_MAX_CPB_CNT]; - - uint32_t vcl_initial_cpb_removal_delay[HEVC_MAX_CPB_CNT]; - uint32_t vcl_initial_cpb_removal_offset[HEVC_MAX_CPB_CNT]; - uint32_t vcl_initial_alt_cpb_removal_delay[HEVC_MAX_CPB_CNT]; - uint32_t vcl_initial_alt_cpb_removal_offset[HEVC_MAX_CPB_CNT]; - - uint8_t use_alt_cpb_params_flag; + uint8_t bp_seq_parameter_set_id; + uint8_t irap_cpb_params_present_flag; + uint32_t cpb_delay_offset; + uint32_t dpb_delay_offset; + uint8_t concatenation_flag; + uint32_t au_cpb_removal_delay_delta_minus1; + + uint32_t nal_initial_cpb_removal_delay[HEVC_MAX_CPB_CNT]; + uint32_t nal_initial_cpb_removal_offset[HEVC_MAX_CPB_CNT]; + uint32_t nal_initial_alt_cpb_removal_delay[HEVC_MAX_CPB_CNT]; + uint32_t nal_initial_alt_cpb_removal_offset[HEVC_MAX_CPB_CNT]; + + uint32_t vcl_initial_cpb_removal_delay[HEVC_MAX_CPB_CNT]; + uint32_t vcl_initial_cpb_removal_offset[HEVC_MAX_CPB_CNT]; + uint32_t vcl_initial_alt_cpb_removal_delay[HEVC_MAX_CPB_CNT]; + uint32_t vcl_initial_alt_cpb_removal_offset[HEVC_MAX_CPB_CNT]; + + uint8_t use_alt_cpb_params_flag; } H265RawSEIBufferingPeriod; typedef struct H265RawSEIPicTiming { - uint8_t pic_struct; - uint8_t source_scan_type; - uint8_t duplicate_flag; - - uint32_t au_cpb_removal_delay_minus1; - uint32_t pic_dpb_output_delay; - uint32_t pic_dpb_output_du_delay; - - uint16_t num_decoding_units_minus1; - uint8_t du_common_cpb_removal_delay_flag; - uint32_t du_common_cpb_removal_delay_increment_minus1; - uint16_t num_nalus_in_du_minus1[HEVC_MAX_SLICE_SEGMENTS]; - uint32_t du_cpb_removal_delay_increment_minus1[HEVC_MAX_SLICE_SEGMENTS]; + uint8_t pic_struct; + uint8_t source_scan_type; + uint8_t duplicate_flag; + + uint32_t au_cpb_removal_delay_minus1; + uint32_t pic_dpb_output_delay; + uint32_t pic_dpb_output_du_delay; + + uint16_t num_decoding_units_minus1; + uint8_t du_common_cpb_removal_delay_flag; + uint32_t du_common_cpb_removal_delay_increment_minus1; + uint16_t num_nalus_in_du_minus1[HEVC_MAX_SLICE_SEGMENTS]; + uint32_t du_cpb_removal_delay_increment_minus1[HEVC_MAX_SLICE_SEGMENTS]; } H265RawSEIPicTiming; typedef struct H265RawSEIPanScanRect { - uint32_t pan_scan_rect_id; - uint8_t pan_scan_rect_cancel_flag; - uint8_t pan_scan_cnt_minus1; - int32_t pan_scan_rect_left_offset[3]; - int32_t pan_scan_rect_right_offset[3]; - int32_t pan_scan_rect_top_offset[3]; - int32_t pan_scan_rect_bottom_offset[3]; - uint16_t pan_scan_rect_persistence_flag; + uint32_t pan_scan_rect_id; + uint8_t pan_scan_rect_cancel_flag; + uint8_t pan_scan_cnt_minus1; + int32_t pan_scan_rect_left_offset[3]; + int32_t pan_scan_rect_right_offset[3]; + int32_t pan_scan_rect_top_offset[3]; + int32_t pan_scan_rect_bottom_offset[3]; + uint16_t pan_scan_rect_persistence_flag; } H265RawSEIPanScanRect; typedef struct H265RawSEIRecoveryPoint { - int16_t recovery_poc_cnt; - uint8_t exact_match_flag; - uint8_t broken_link_flag; + int16_t recovery_poc_cnt; + uint8_t exact_match_flag; + uint8_t broken_link_flag; } H265RawSEIRecoveryPoint; +typedef struct H265RawFilmGrainCharacteristics { + uint8_t film_grain_characteristics_cancel_flag; + uint8_t film_grain_model_id; + uint8_t separate_colour_description_present_flag; + uint8_t film_grain_bit_depth_luma_minus8; + uint8_t film_grain_bit_depth_chroma_minus8; + uint8_t film_grain_full_range_flag; + uint8_t film_grain_colour_primaries; + uint8_t film_grain_transfer_characteristics; + uint8_t film_grain_matrix_coeffs; + uint8_t blending_mode_id; + uint8_t log2_scale_factor; + uint8_t comp_model_present_flag[3]; + uint8_t num_intensity_intervals_minus1[3]; + uint8_t num_model_values_minus1[3]; + uint8_t intensity_interval_lower_bound[3][256]; + uint8_t intensity_interval_upper_bound[3][256]; + int16_t comp_model_value[3][256][6]; + uint8_t film_grain_characteristics_persistence_flag; +} H265RawFilmGrainCharacteristics; + typedef struct H265RawSEIDisplayOrientation { - uint8_t display_orientation_cancel_flag; - uint8_t hor_flip; - uint8_t ver_flip; - uint16_t anticlockwise_rotation; - uint16_t display_orientation_repetition_period; - uint8_t display_orientation_persistence_flag; + uint8_t display_orientation_cancel_flag; + uint8_t hor_flip; + uint8_t ver_flip; + uint16_t anticlockwise_rotation; + uint16_t display_orientation_repetition_period; + uint8_t display_orientation_persistence_flag; } H265RawSEIDisplayOrientation; typedef struct H265RawSEIActiveParameterSets { - uint8_t active_video_parameter_set_id; - uint8_t self_contained_cvs_flag; - uint8_t no_parameter_set_update_flag; - uint8_t num_sps_ids_minus1; - uint8_t active_seq_parameter_set_id[HEVC_MAX_SPS_COUNT]; - uint8_t layer_sps_idx[HEVC_MAX_LAYERS]; + uint8_t active_video_parameter_set_id; + uint8_t self_contained_cvs_flag; + uint8_t no_parameter_set_update_flag; + uint8_t num_sps_ids_minus1; + uint8_t active_seq_parameter_set_id[HEVC_MAX_SPS_COUNT]; + uint8_t layer_sps_idx[HEVC_MAX_LAYERS]; } H265RawSEIActiveParameterSets; typedef struct H265RawSEIDecodedPictureHash { - uint8_t hash_type; - uint8_t picture_md5[3][16]; - uint16_t picture_crc[3]; - uint32_t picture_checksum[3]; + uint8_t hash_type; + uint8_t picture_md5[3][16]; + uint16_t picture_crc[3]; + uint32_t picture_checksum[3]; } H265RawSEIDecodedPictureHash; typedef struct H265RawSEITimeCode { - uint8_t num_clock_ts; - uint8_t clock_timestamp_flag[3]; - uint8_t units_field_based_flag[3]; - uint8_t counting_type[3]; - uint8_t full_timestamp_flag[3]; - uint8_t discontinuity_flag[3]; - uint8_t cnt_dropped_flag[3]; - uint16_t n_frames[3]; - uint8_t seconds_value[3]; - uint8_t minutes_value[3]; - uint8_t hours_value[3]; - uint8_t seconds_flag[3]; - uint8_t minutes_flag[3]; - uint8_t hours_flag[3]; - uint8_t time_offset_length[3]; - int32_t time_offset_value[3]; + uint8_t num_clock_ts; + uint8_t clock_timestamp_flag[3]; + uint8_t units_field_based_flag[3]; + uint8_t counting_type[3]; + uint8_t full_timestamp_flag[3]; + uint8_t discontinuity_flag[3]; + uint8_t cnt_dropped_flag[3]; + uint16_t n_frames[3]; + uint8_t seconds_value[3]; + uint8_t minutes_value[3]; + uint8_t hours_value[3]; + uint8_t seconds_flag[3]; + uint8_t minutes_flag[3]; + uint8_t hours_flag[3]; + uint8_t time_offset_length[3]; + int32_t time_offset_value[3]; } H265RawSEITimeCode; typedef struct H265RawSEIAlphaChannelInfo { - uint8_t alpha_channel_cancel_flag; - uint8_t alpha_channel_use_idc; - uint8_t alpha_channel_bit_depth_minus8; - uint16_t alpha_transparent_value; - uint16_t alpha_opaque_value; - uint8_t alpha_channel_incr_flag; - uint8_t alpha_channel_clip_flag; - uint8_t alpha_channel_clip_type_flag; + uint8_t alpha_channel_cancel_flag; + uint8_t alpha_channel_use_idc; + uint8_t alpha_channel_bit_depth_minus8; + uint16_t alpha_transparent_value; + uint16_t alpha_opaque_value; + uint8_t alpha_channel_incr_flag; + uint8_t alpha_channel_clip_flag; + uint8_t alpha_channel_clip_type_flag; } H265RawSEIAlphaChannelInfo; typedef struct H265RawSEI { - H265RawNALUnitHeader nal_unit_header; - SEIRawMessageList message_list; + H265RawNALUnitHeader nal_unit_header; + SEIRawMessageList message_list; } H265RawSEI; typedef struct CodedBitstreamH265Context { - // Reader/writer context in common with the H.264 implementation. - CodedBitstreamH2645Context common; - - // All currently available parameter sets. These are updated when - // any parameter set NAL unit is read/written with this context. - AVBufferRef *vps_ref[HEVC_MAX_VPS_COUNT]; - AVBufferRef *sps_ref[HEVC_MAX_SPS_COUNT]; - AVBufferRef *pps_ref[HEVC_MAX_PPS_COUNT]; - H265RawVPS *vps[HEVC_MAX_VPS_COUNT]; - H265RawSPS *sps[HEVC_MAX_SPS_COUNT]; - H265RawPPS *pps[HEVC_MAX_PPS_COUNT]; - - // The currently active parameter sets. These are updated when any - // NAL unit refers to the relevant parameter set. These pointers - // must also be present in the arrays above. - const H265RawVPS *active_vps; - const H265RawSPS *active_sps; - const H265RawPPS *active_pps; + // Reader/writer context in common with the H.264 implementation. + CodedBitstreamH2645Context common; + + // All currently available parameter sets. These are updated when + // any parameter set NAL unit is read/written with this context. + AVBufferRef *vps_ref[HEVC_MAX_VPS_COUNT]; + AVBufferRef *sps_ref[HEVC_MAX_SPS_COUNT]; + AVBufferRef *pps_ref[HEVC_MAX_PPS_COUNT]; + H265RawVPS *vps[HEVC_MAX_VPS_COUNT]; + H265RawSPS *sps[HEVC_MAX_SPS_COUNT]; + H265RawPPS *pps[HEVC_MAX_PPS_COUNT]; + + // The currently active parameter sets. These are updated when any + // NAL unit refers to the relevant parameter set. These pointers + // must also be present in the arrays above. + const H265RawVPS *active_vps; + const H265RawSPS *active_sps; + const H265RawPPS *active_pps; } CodedBitstreamH265Context; diff --git a/third-party/cbs/include/cbs/cbs_jpeg.h b/third-party/cbs/include/cbs/cbs_jpeg.h index c2ee8c5e731..9dbebd259fd 100644 --- a/third-party/cbs/include/cbs/cbs_jpeg.h +++ b/third-party/cbs/include/cbs/cbs_jpeg.h @@ -22,101 +22,101 @@ #include #include -#include +#include "libavutil/buffer.h" enum { - JPEG_MARKER_SOF0 = 0xc0, - JPEG_MARKER_SOF1 = 0xc1, - JPEG_MARKER_SOF2 = 0xc2, - JPEG_MARKER_SOF3 = 0xc3, - - JPEG_MARKER_DHT = 0xc4, - JPEG_MARKER_SOI = 0xd8, - JPEG_MARKER_EOI = 0xd9, - JPEG_MARKER_SOS = 0xda, - JPEG_MARKER_DQT = 0xdb, - - JPEG_MARKER_APPN = 0xe0, - JPEG_MARKER_JPGN = 0xf0, - JPEG_MARKER_COM = 0xfe, + JPEG_MARKER_SOF0 = 0xc0, + JPEG_MARKER_SOF1 = 0xc1, + JPEG_MARKER_SOF2 = 0xc2, + JPEG_MARKER_SOF3 = 0xc3, + + JPEG_MARKER_DHT = 0xc4, + JPEG_MARKER_SOI = 0xd8, + JPEG_MARKER_EOI = 0xd9, + JPEG_MARKER_SOS = 0xda, + JPEG_MARKER_DQT = 0xdb, + + JPEG_MARKER_APPN = 0xe0, + JPEG_MARKER_JPGN = 0xf0, + JPEG_MARKER_COM = 0xfe, }; enum { - JPEG_MAX_COMPONENTS = 255, + JPEG_MAX_COMPONENTS = 255, - JPEG_MAX_HEIGHT = 65535, - JPEG_MAX_WIDTH = 65535, + JPEG_MAX_HEIGHT = 65535, + JPEG_MAX_WIDTH = 65535, }; typedef struct JPEGRawFrameHeader { - uint16_t Lf; - uint8_t P; - uint16_t Y; - uint16_t X; - uint16_t Nf; - - uint8_t C[JPEG_MAX_COMPONENTS]; - uint8_t H[JPEG_MAX_COMPONENTS]; - uint8_t V[JPEG_MAX_COMPONENTS]; - uint8_t Tq[JPEG_MAX_COMPONENTS]; + uint16_t Lf; + uint8_t P; + uint16_t Y; + uint16_t X; + uint16_t Nf; + + uint8_t C [JPEG_MAX_COMPONENTS]; + uint8_t H [JPEG_MAX_COMPONENTS]; + uint8_t V [JPEG_MAX_COMPONENTS]; + uint8_t Tq[JPEG_MAX_COMPONENTS]; } JPEGRawFrameHeader; typedef struct JPEGRawScanHeader { - uint16_t Ls; - uint8_t Ns; + uint16_t Ls; + uint8_t Ns; - uint8_t Cs[JPEG_MAX_COMPONENTS]; - uint8_t Td[JPEG_MAX_COMPONENTS]; - uint8_t Ta[JPEG_MAX_COMPONENTS]; + uint8_t Cs[JPEG_MAX_COMPONENTS]; + uint8_t Td[JPEG_MAX_COMPONENTS]; + uint8_t Ta[JPEG_MAX_COMPONENTS]; - uint8_t Ss; - uint8_t Se; - uint8_t Ah; - uint8_t Al; + uint8_t Ss; + uint8_t Se; + uint8_t Ah; + uint8_t Al; } JPEGRawScanHeader; typedef struct JPEGRawScan { - JPEGRawScanHeader header; - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; + JPEGRawScanHeader header; + uint8_t *data; + AVBufferRef *data_ref; + size_t data_size; } JPEGRawScan; typedef struct JPEGRawQuantisationTable { - uint8_t Pq; - uint8_t Tq; - uint16_t Q[64]; + uint8_t Pq; + uint8_t Tq; + uint16_t Q[64]; } JPEGRawQuantisationTable; typedef struct JPEGRawQuantisationTableSpecification { - uint16_t Lq; - JPEGRawQuantisationTable table[4]; + uint16_t Lq; + JPEGRawQuantisationTable table[4]; } JPEGRawQuantisationTableSpecification; typedef struct JPEGRawHuffmanTable { - uint8_t Tc; - uint8_t Th; - uint8_t L[16]; - uint8_t V[224]; + uint8_t Tc; + uint8_t Th; + uint8_t L[16]; + uint8_t V[256]; } JPEGRawHuffmanTable; typedef struct JPEGRawHuffmanTableSpecification { - uint16_t Lh; - JPEGRawHuffmanTable table[8]; + uint16_t Lh; + JPEGRawHuffmanTable table[8]; } JPEGRawHuffmanTableSpecification; typedef struct JPEGRawApplicationData { - uint16_t Lp; - uint8_t *Ap; - AVBufferRef *Ap_ref; + uint16_t Lp; + uint8_t *Ap; + AVBufferRef *Ap_ref; } JPEGRawApplicationData; typedef struct JPEGRawComment { - uint16_t Lc; - uint8_t *Cm; - AVBufferRef *Cm_ref; + uint16_t Lc; + uint8_t *Cm; + AVBufferRef *Cm_ref; } JPEGRawComment; diff --git a/third-party/cbs/include/cbs/cbs_mpeg2.h b/third-party/cbs/include/cbs/cbs_mpeg2.h index 858cb543ce1..f7075a460dc 100644 --- a/third-party/cbs/include/cbs/cbs_mpeg2.h +++ b/third-party/cbs/include/cbs/cbs_mpeg2.h @@ -22,209 +22,209 @@ #include #include -#include +#include "libavutil/buffer.h" enum { - MPEG2_START_PICTURE = 0x00, - MPEG2_START_SLICE_MIN = 0x01, - MPEG2_START_SLICE_MAX = 0xaf, - MPEG2_START_USER_DATA = 0xb2, - MPEG2_START_SEQUENCE_HEADER = 0xb3, - MPEG2_START_SEQUENCE_ERROR = 0xb4, - MPEG2_START_EXTENSION = 0xb5, - MPEG2_START_SEQUENCE_END = 0xb7, - MPEG2_START_GROUP = 0xb8, + MPEG2_START_PICTURE = 0x00, + MPEG2_START_SLICE_MIN = 0x01, + MPEG2_START_SLICE_MAX = 0xaf, + MPEG2_START_USER_DATA = 0xb2, + MPEG2_START_SEQUENCE_HEADER = 0xb3, + MPEG2_START_SEQUENCE_ERROR = 0xb4, + MPEG2_START_EXTENSION = 0xb5, + MPEG2_START_SEQUENCE_END = 0xb7, + MPEG2_START_GROUP = 0xb8, }; -#define MPEG2_START_IS_SLICE(type) \ - ((type) >= MPEG2_START_SLICE_MIN && \ - (type) <= MPEG2_START_SLICE_MAX) +#define MPEG2_START_IS_SLICE(type) \ + ((type) >= MPEG2_START_SLICE_MIN && \ + (type) <= MPEG2_START_SLICE_MAX) enum { - MPEG2_EXTENSION_SEQUENCE = 0x1, - MPEG2_EXTENSION_SEQUENCE_DISPLAY = 0x2, - MPEG2_EXTENSION_QUANT_MATRIX = 0x3, - MPEG2_EXTENSION_COPYRIGHT = 0x4, - MPEG2_EXTENSION_SEQUENCE_SCALABLE = 0x5, - MPEG2_EXTENSION_PICTURE_DISPLAY = 0x7, - MPEG2_EXTENSION_PICTURE_CODING = 0x8, - MPEG2_EXTENSION_PICTURE_SPATIAL_SCALABLE = 0x9, - MPEG2_EXTENSION_PICTURE_TEMPORAL_SCALABLE = 0xa, - MPEG2_EXTENSION_CAMERA_PARAMETERS = 0xb, - MPEG2_EXTENSION_ITU_T = 0xc, + MPEG2_EXTENSION_SEQUENCE = 0x1, + MPEG2_EXTENSION_SEQUENCE_DISPLAY = 0x2, + MPEG2_EXTENSION_QUANT_MATRIX = 0x3, + MPEG2_EXTENSION_COPYRIGHT = 0x4, + MPEG2_EXTENSION_SEQUENCE_SCALABLE = 0x5, + MPEG2_EXTENSION_PICTURE_DISPLAY = 0x7, + MPEG2_EXTENSION_PICTURE_CODING = 0x8, + MPEG2_EXTENSION_PICTURE_SPATIAL_SCALABLE = 0x9, + MPEG2_EXTENSION_PICTURE_TEMPORAL_SCALABLE = 0xa, + MPEG2_EXTENSION_CAMERA_PARAMETERS = 0xb, + MPEG2_EXTENSION_ITU_T = 0xc, }; typedef struct MPEG2RawSequenceHeader { - uint8_t sequence_header_code; - - uint16_t horizontal_size_value; - uint16_t vertical_size_value; - uint8_t aspect_ratio_information; - uint8_t frame_rate_code; - uint32_t bit_rate_value; - uint16_t vbv_buffer_size_value; - uint8_t constrained_parameters_flag; - - uint8_t load_intra_quantiser_matrix; - uint8_t intra_quantiser_matrix[64]; - uint8_t load_non_intra_quantiser_matrix; - uint8_t non_intra_quantiser_matrix[64]; + uint8_t sequence_header_code; + + uint16_t horizontal_size_value; + uint16_t vertical_size_value; + uint8_t aspect_ratio_information; + uint8_t frame_rate_code; + uint32_t bit_rate_value; + uint16_t vbv_buffer_size_value; + uint8_t constrained_parameters_flag; + + uint8_t load_intra_quantiser_matrix; + uint8_t intra_quantiser_matrix[64]; + uint8_t load_non_intra_quantiser_matrix; + uint8_t non_intra_quantiser_matrix[64]; } MPEG2RawSequenceHeader; typedef struct MPEG2RawUserData { - uint8_t user_data_start_code; + uint8_t user_data_start_code; - uint8_t *user_data; - AVBufferRef *user_data_ref; - size_t user_data_length; + uint8_t *user_data; + AVBufferRef *user_data_ref; + size_t user_data_length; } MPEG2RawUserData; typedef struct MPEG2RawSequenceExtension { - uint8_t profile_and_level_indication; - uint8_t progressive_sequence; - uint8_t chroma_format; - uint8_t horizontal_size_extension; - uint8_t vertical_size_extension; - uint16_t bit_rate_extension; - uint8_t vbv_buffer_size_extension; - uint8_t low_delay; - uint8_t frame_rate_extension_n; - uint8_t frame_rate_extension_d; + uint8_t profile_and_level_indication; + uint8_t progressive_sequence; + uint8_t chroma_format; + uint8_t horizontal_size_extension; + uint8_t vertical_size_extension; + uint16_t bit_rate_extension; + uint8_t vbv_buffer_size_extension; + uint8_t low_delay; + uint8_t frame_rate_extension_n; + uint8_t frame_rate_extension_d; } MPEG2RawSequenceExtension; typedef struct MPEG2RawSequenceDisplayExtension { - uint8_t video_format; + uint8_t video_format; - uint8_t colour_description; - uint8_t colour_primaries; - uint8_t transfer_characteristics; - uint8_t matrix_coefficients; + uint8_t colour_description; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; - uint16_t display_horizontal_size; - uint16_t display_vertical_size; + uint16_t display_horizontal_size; + uint16_t display_vertical_size; } MPEG2RawSequenceDisplayExtension; typedef struct MPEG2RawGroupOfPicturesHeader { - uint8_t group_start_code; + uint8_t group_start_code; - uint32_t time_code; - uint8_t closed_gop; - uint8_t broken_link; + uint32_t time_code; + uint8_t closed_gop; + uint8_t broken_link; } MPEG2RawGroupOfPicturesHeader; typedef struct MPEG2RawExtraInformation { - uint8_t *extra_information; - AVBufferRef *extra_information_ref; - size_t extra_information_length; + uint8_t *extra_information; + AVBufferRef *extra_information_ref; + size_t extra_information_length; } MPEG2RawExtraInformation; typedef struct MPEG2RawPictureHeader { - uint8_t picture_start_code; + uint8_t picture_start_code; - uint16_t temporal_reference; - uint8_t picture_coding_type; - uint16_t vbv_delay; + uint16_t temporal_reference; + uint8_t picture_coding_type; + uint16_t vbv_delay; - uint8_t full_pel_forward_vector; - uint8_t forward_f_code; - uint8_t full_pel_backward_vector; - uint8_t backward_f_code; + uint8_t full_pel_forward_vector; + uint8_t forward_f_code; + uint8_t full_pel_backward_vector; + uint8_t backward_f_code; - MPEG2RawExtraInformation extra_information_picture; + MPEG2RawExtraInformation extra_information_picture; } MPEG2RawPictureHeader; typedef struct MPEG2RawPictureCodingExtension { - uint8_t f_code[2][2]; - - uint8_t intra_dc_precision; - uint8_t picture_structure; - uint8_t top_field_first; - uint8_t frame_pred_frame_dct; - uint8_t concealment_motion_vectors; - uint8_t q_scale_type; - uint8_t intra_vlc_format; - uint8_t alternate_scan; - uint8_t repeat_first_field; - uint8_t chroma_420_type; - uint8_t progressive_frame; - - uint8_t composite_display_flag; - uint8_t v_axis; - uint8_t field_sequence; - uint8_t sub_carrier; - uint8_t burst_amplitude; - uint8_t sub_carrier_phase; + uint8_t f_code[2][2]; + + uint8_t intra_dc_precision; + uint8_t picture_structure; + uint8_t top_field_first; + uint8_t frame_pred_frame_dct; + uint8_t concealment_motion_vectors; + uint8_t q_scale_type; + uint8_t intra_vlc_format; + uint8_t alternate_scan; + uint8_t repeat_first_field; + uint8_t chroma_420_type; + uint8_t progressive_frame; + + uint8_t composite_display_flag; + uint8_t v_axis; + uint8_t field_sequence; + uint8_t sub_carrier; + uint8_t burst_amplitude; + uint8_t sub_carrier_phase; } MPEG2RawPictureCodingExtension; typedef struct MPEG2RawQuantMatrixExtension { - uint8_t load_intra_quantiser_matrix; - uint8_t intra_quantiser_matrix[64]; - uint8_t load_non_intra_quantiser_matrix; - uint8_t non_intra_quantiser_matrix[64]; - uint8_t load_chroma_intra_quantiser_matrix; - uint8_t chroma_intra_quantiser_matrix[64]; - uint8_t load_chroma_non_intra_quantiser_matrix; - uint8_t chroma_non_intra_quantiser_matrix[64]; + uint8_t load_intra_quantiser_matrix; + uint8_t intra_quantiser_matrix[64]; + uint8_t load_non_intra_quantiser_matrix; + uint8_t non_intra_quantiser_matrix[64]; + uint8_t load_chroma_intra_quantiser_matrix; + uint8_t chroma_intra_quantiser_matrix[64]; + uint8_t load_chroma_non_intra_quantiser_matrix; + uint8_t chroma_non_intra_quantiser_matrix[64]; } MPEG2RawQuantMatrixExtension; typedef struct MPEG2RawPictureDisplayExtension { - int16_t frame_centre_horizontal_offset[3]; - int16_t frame_centre_vertical_offset[3]; + int16_t frame_centre_horizontal_offset[3]; + int16_t frame_centre_vertical_offset[3]; } MPEG2RawPictureDisplayExtension; typedef struct MPEG2RawExtensionData { - uint8_t extension_start_code; - uint8_t extension_start_code_identifier; - - union { - MPEG2RawSequenceExtension sequence; - MPEG2RawSequenceDisplayExtension sequence_display; - MPEG2RawQuantMatrixExtension quant_matrix; - MPEG2RawPictureCodingExtension picture_coding; - MPEG2RawPictureDisplayExtension picture_display; - } data; + uint8_t extension_start_code; + uint8_t extension_start_code_identifier; + + union { + MPEG2RawSequenceExtension sequence; + MPEG2RawSequenceDisplayExtension sequence_display; + MPEG2RawQuantMatrixExtension quant_matrix; + MPEG2RawPictureCodingExtension picture_coding; + MPEG2RawPictureDisplayExtension picture_display; + } data; } MPEG2RawExtensionData; typedef struct MPEG2RawSliceHeader { - uint8_t slice_vertical_position; + uint8_t slice_vertical_position; - uint8_t slice_vertical_position_extension; - uint8_t priority_breakpoint; + uint8_t slice_vertical_position_extension; + uint8_t priority_breakpoint; - uint8_t quantiser_scale_code; + uint8_t quantiser_scale_code; - uint8_t slice_extension_flag; - uint8_t intra_slice; - uint8_t slice_picture_id_enable; - uint8_t slice_picture_id; + uint8_t slice_extension_flag; + uint8_t intra_slice; + uint8_t slice_picture_id_enable; + uint8_t slice_picture_id; - MPEG2RawExtraInformation extra_information_slice; + MPEG2RawExtraInformation extra_information_slice; } MPEG2RawSliceHeader; typedef struct MPEG2RawSlice { - MPEG2RawSliceHeader header; + MPEG2RawSliceHeader header; - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; - int data_bit_start; + uint8_t *data; + AVBufferRef *data_ref; + size_t data_size; + int data_bit_start; } MPEG2RawSlice; typedef struct MPEG2RawSequenceEnd { - uint8_t sequence_end_code; + uint8_t sequence_end_code; } MPEG2RawSequenceEnd; typedef struct CodedBitstreamMPEG2Context { - // Elements stored in headers which are required for other decoding. - uint16_t horizontal_size; - uint16_t vertical_size; - uint8_t scalable; - uint8_t scalable_mode; - uint8_t progressive_sequence; - uint8_t number_of_frame_centre_offsets; + // Elements stored in headers which are required for other decoding. + uint16_t horizontal_size; + uint16_t vertical_size; + uint8_t scalable; + uint8_t scalable_mode; + uint8_t progressive_sequence; + uint8_t number_of_frame_centre_offsets; } CodedBitstreamMPEG2Context; diff --git a/third-party/cbs/include/cbs/cbs_sei.h b/third-party/cbs/include/cbs/cbs_sei.h index d20bdda664c..c7a7a95be09 100644 --- a/third-party/cbs/include/cbs/cbs_sei.h +++ b/third-party/cbs/include/cbs/cbs_sei.h @@ -22,119 +22,118 @@ #include #include -#include +#include "libavutil/buffer.h" #include "cbs.h" #include "sei.h" typedef struct SEIRawFillerPayload { - uint32_t payload_size; + uint32_t payload_size; } SEIRawFillerPayload; typedef struct SEIRawUserDataRegistered { - uint8_t itu_t_t35_country_code; - uint8_t itu_t_t35_country_code_extension_byte; - uint8_t *data; - AVBufferRef *data_ref; - size_t data_length; + uint8_t itu_t_t35_country_code; + uint8_t itu_t_t35_country_code_extension_byte; + uint8_t *data; + AVBufferRef *data_ref; + size_t data_length; } SEIRawUserDataRegistered; typedef struct SEIRawUserDataUnregistered { - uint8_t uuid_iso_iec_11578[16]; - uint8_t *data; - AVBufferRef *data_ref; - size_t data_length; + uint8_t uuid_iso_iec_11578[16]; + uint8_t *data; + AVBufferRef *data_ref; + size_t data_length; } SEIRawUserDataUnregistered; typedef struct SEIRawMasteringDisplayColourVolume { - uint16_t display_primaries_x[3]; - uint16_t display_primaries_y[3]; - uint16_t white_point_x; - uint16_t white_point_y; - uint32_t max_display_mastering_luminance; - uint32_t min_display_mastering_luminance; + uint16_t display_primaries_x[3]; + uint16_t display_primaries_y[3]; + uint16_t white_point_x; + uint16_t white_point_y; + uint32_t max_display_mastering_luminance; + uint32_t min_display_mastering_luminance; } SEIRawMasteringDisplayColourVolume; typedef struct SEIRawContentLightLevelInfo { - uint16_t max_content_light_level; - uint16_t max_pic_average_light_level; + uint16_t max_content_light_level; + uint16_t max_pic_average_light_level; } SEIRawContentLightLevelInfo; typedef struct SEIRawAlternativeTransferCharacteristics { - uint8_t preferred_transfer_characteristics; + uint8_t preferred_transfer_characteristics; } SEIRawAlternativeTransferCharacteristics; typedef struct SEIRawMessage { - uint32_t payload_type; - uint32_t payload_size; - void *payload; - AVBufferRef *payload_ref; - uint8_t *extension_data; - AVBufferRef *extension_data_ref; - size_t extension_bit_length; + uint32_t payload_type; + uint32_t payload_size; + void *payload; + AVBufferRef *payload_ref; + uint8_t *extension_data; + AVBufferRef *extension_data_ref; + size_t extension_bit_length; } SEIRawMessage; typedef struct SEIRawMessageList { - SEIRawMessage *messages; - int nb_messages; - int nb_messages_allocated; + SEIRawMessage *messages; + int nb_messages; + int nb_messages_allocated; } SEIRawMessageList; typedef struct SEIMessageState { - // The type of the payload being written. - uint32_t payload_type; - // When reading, contains the size of the payload to allow finding the - // end of variable-length fields (such as user_data_payload_byte[]). - // (When writing, the size will be derived from the total number of - // bytes actually written.) - uint32_t payload_size; - // When writing, indicates that payload extension data is present so - // all extended fields must be written. May be updated by the writer - // to indicate that extended fields have been written, so the extension - // end bits must be written too. - uint8_t extension_present; + // The type of the payload being written. + uint32_t payload_type; + // When reading, contains the size of the payload to allow finding the + // end of variable-length fields (such as user_data_payload_byte[]). + // (When writing, the size will be derived from the total number of + // bytes actually written.) + uint32_t payload_size; + // When writing, indicates that payload extension data is present so + // all extended fields must be written. May be updated by the writer + // to indicate that extended fields have been written, so the extension + // end bits must be written too. + uint8_t extension_present; } SEIMessageState; struct GetBitContext; struct PutBitContext; typedef int (*SEIMessageReadFunction)(CodedBitstreamContext *ctx, - struct GetBitContext *rw, - void *current, - SEIMessageState *sei); + struct GetBitContext *rw, + void *current, + SEIMessageState *sei); typedef int (*SEIMessageWriteFunction)(CodedBitstreamContext *ctx, - struct PutBitContext *rw, - void *current, - SEIMessageState *sei); + struct PutBitContext *rw, + void *current, + SEIMessageState *sei); typedef struct SEIMessageTypeDescriptor { - // Payload type for the message. (-1 in this field ends a list.) - int type; - // Valid in a prefix SEI NAL unit (always for H.264). - uint8_t prefix; - // Valid in a suffix SEI NAL unit (never for H.264). - uint8_t suffix; - // Size of the decomposed structure. - size_t size; - // Read bitstream into SEI message. - SEIMessageReadFunction read; - // Write bitstream from SEI message. - SEIMessageWriteFunction write; + // Payload type for the message. (-1 in this field ends a list.) + int type; + // Valid in a prefix SEI NAL unit (always for H.264). + uint8_t prefix; + // Valid in a suffix SEI NAL unit (never for H.264). + uint8_t suffix; + // Size of the decomposed structure. + size_t size; + // Read bitstream into SEI message. + SEIMessageReadFunction read; + // Write bitstream from SEI message. + SEIMessageWriteFunction write; } SEIMessageTypeDescriptor; // Macro for the read/write pair. The clumsy cast is needed because the // current pointer is typed in all of the read/write functions but has to // be void here to fit all cases. -#define SEI_MESSAGE_RW(codec, name) \ - .read = (SEIMessageReadFunction)cbs_##codec##_read_##name, \ - .write = (SEIMessageWriteFunction)cbs_##codec##_write_##name +#define SEI_MESSAGE_RW(codec, name) \ + .read = (SEIMessageReadFunction) cbs_ ## codec ## _read_ ## name, \ + .write = (SEIMessageWriteFunction)cbs_ ## codec ## _write_ ## name // End-of-list sentinel element. -#define SEI_MESSAGE_TYPE_END \ - { .type = -1 } +#define SEI_MESSAGE_TYPE_END { .type = -1 } /** @@ -143,13 +142,13 @@ typedef struct SEIMessageTypeDescriptor { * Returns NULL if the payload type is not known. */ const SEIMessageTypeDescriptor *ff_cbs_sei_find_type(CodedBitstreamContext *ctx, - int payload_type); + int payload_type); /** * Allocate a new payload for the given SEI message. */ int ff_cbs_sei_alloc_message_payload(SEIRawMessage *message, - const SEIMessageTypeDescriptor *desc); + const SEIMessageTypeDescriptor *desc); /** * Allocate a new empty SEI message in a message list. @@ -173,11 +172,11 @@ void ff_cbs_sei_free_message_list(SEIRawMessageList *list); * NULL then the new message will not be reference counted. */ int ff_cbs_sei_add_message(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - int prefix, - uint32_t payload_type, - void *payload_data, - AVBufferRef *payload_buf); + CodedBitstreamFragment *au, + int prefix, + uint32_t payload_type, + void *payload_data, + AVBufferRef *payload_buf); /** * Iterate over messages with the given payload type in an access unit. @@ -186,15 +185,15 @@ int ff_cbs_sei_add_message(CodedBitstreamContext *ctx, * are available, AVERROR(ENOENT) when all messages have been found. */ int ff_cbs_sei_find_message(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - uint32_t payload_type, - SEIRawMessage **message); + CodedBitstreamFragment *au, + uint32_t payload_type, + SEIRawMessage **message); /** * Delete all messages with the given payload type from an access unit. */ void ff_cbs_sei_delete_message_type(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - uint32_t payload_type); + CodedBitstreamFragment *au, + uint32_t payload_type); #endif /* AVCODEC_CBS_SEI_H */ diff --git a/third-party/cbs/include/cbs/cbs_vp9.h b/third-party/cbs/include/cbs/cbs_vp9.h index 62754498579..af15eb4bace 100644 --- a/third-party/cbs/include/cbs/cbs_vp9.h +++ b/third-party/cbs/include/cbs/cbs_vp9.h @@ -27,186 +27,186 @@ // Miscellaneous constants (section 3). enum { - VP9_REFS_PER_FRAME = 3, + VP9_REFS_PER_FRAME = 3, - VP9_MIN_TILE_WIDTH_B64 = 4, - VP9_MAX_TILE_WIDTH_B64 = 64, + VP9_MIN_TILE_WIDTH_B64 = 4, + VP9_MAX_TILE_WIDTH_B64 = 64, - VP9_NUM_REF_FRAMES = 8, - VP9_MAX_REF_FRAMES = 4, + VP9_NUM_REF_FRAMES = 8, + VP9_MAX_REF_FRAMES = 4, - VP9_MAX_SEGMENTS = 8, - VP9_SEG_LVL_MAX = 4, + VP9_MAX_SEGMENTS = 8, + VP9_SEG_LVL_MAX = 4, }; // Frame types (section 7.2). enum { - VP9_KEY_FRAME = 0, - VP9_NON_KEY_FRAME = 1, + VP9_KEY_FRAME = 0, + VP9_NON_KEY_FRAME = 1, }; // Frame sync bytes (section 7.2.1). enum { - VP9_FRAME_SYNC_0 = 0x49, - VP9_FRAME_SYNC_1 = 0x83, - VP9_FRAME_SYNC_2 = 0x42, + VP9_FRAME_SYNC_0 = 0x49, + VP9_FRAME_SYNC_1 = 0x83, + VP9_FRAME_SYNC_2 = 0x42, }; // Color space values (section 7.2.2). enum { - VP9_CS_UNKNOWN = 0, - VP9_CS_BT_601 = 1, - VP9_CS_BT_709 = 2, - VP9_CS_SMPTE_170 = 3, - VP9_CS_SMPTE_240 = 4, - VP9_CS_BT_2020 = 5, - VP9_CS_RESERVED = 6, - VP9_CS_RGB = 7, + VP9_CS_UNKNOWN = 0, + VP9_CS_BT_601 = 1, + VP9_CS_BT_709 = 2, + VP9_CS_SMPTE_170 = 3, + VP9_CS_SMPTE_240 = 4, + VP9_CS_BT_2020 = 5, + VP9_CS_RESERVED = 6, + VP9_CS_RGB = 7, }; // Reference frame types (section 7.4.12). enum { - VP9_INTRA_FRAME = 0, - VP9_LAST_FRAME = 1, - VP9_GOLDEN_FRAME = 2, - VP9_ALTREF_FRAME = 3, + VP9_INTRA_FRAME = 0, + VP9_LAST_FRAME = 1, + VP9_GOLDEN_FRAME = 2, + VP9_ALTREF_FRAME = 3, }; // Superframe properties (section B.3). enum { - VP9_MAX_FRAMES_IN_SUPERFRAME = 8, + VP9_MAX_FRAMES_IN_SUPERFRAME = 8, - VP9_SUPERFRAME_MARKER = 6, + VP9_SUPERFRAME_MARKER = 6, }; typedef struct VP9RawFrameHeader { - uint8_t frame_marker; - uint8_t profile_low_bit; - uint8_t profile_high_bit; - - uint8_t show_existing_frame; - uint8_t frame_to_show_map_idx; - - uint8_t frame_type; - uint8_t show_frame; - uint8_t error_resilient_mode; - - // Color config. - uint8_t ten_or_twelve_bit; - uint8_t color_space; - uint8_t color_range; - uint8_t subsampling_x; - uint8_t subsampling_y; - - uint8_t refresh_frame_flags; - - uint8_t intra_only; - uint8_t reset_frame_context; - - uint8_t ref_frame_idx[VP9_REFS_PER_FRAME]; - uint8_t ref_frame_sign_bias[VP9_MAX_REF_FRAMES]; - - uint8_t allow_high_precision_mv; - - uint8_t refresh_frame_context; - uint8_t frame_parallel_decoding_mode; - - uint8_t frame_context_idx; - - // Frame/render size. - uint8_t found_ref[VP9_REFS_PER_FRAME]; - uint16_t frame_width_minus_1; - uint16_t frame_height_minus_1; - uint8_t render_and_frame_size_different; - uint16_t render_width_minus_1; - uint16_t render_height_minus_1; - - // Interpolation filter. - uint8_t is_filter_switchable; - uint8_t raw_interpolation_filter_type; - - // Loop filter params. - uint8_t loop_filter_level; - uint8_t loop_filter_sharpness; - uint8_t loop_filter_delta_enabled; - uint8_t loop_filter_delta_update; - uint8_t update_ref_delta[VP9_MAX_REF_FRAMES]; - int8_t loop_filter_ref_deltas[VP9_MAX_REF_FRAMES]; - uint8_t update_mode_delta[2]; - int8_t loop_filter_mode_deltas[2]; - - // Quantization params. - uint8_t base_q_idx; - int8_t delta_q_y_dc; - int8_t delta_q_uv_dc; - int8_t delta_q_uv_ac; - - // Segmentation params. - uint8_t segmentation_enabled; - uint8_t segmentation_update_map; - uint8_t segmentation_tree_probs[7]; - uint8_t segmentation_temporal_update; - uint8_t segmentation_pred_prob[3]; - uint8_t segmentation_update_data; - uint8_t segmentation_abs_or_delta_update; - uint8_t feature_enabled[VP9_MAX_SEGMENTS][VP9_SEG_LVL_MAX]; - uint8_t feature_value[VP9_MAX_SEGMENTS][VP9_SEG_LVL_MAX]; - uint8_t feature_sign[VP9_MAX_SEGMENTS][VP9_SEG_LVL_MAX]; - - // Tile info. - uint8_t tile_cols_log2; - uint8_t tile_rows_log2; - - uint16_t header_size_in_bytes; + uint8_t frame_marker; + uint8_t profile_low_bit; + uint8_t profile_high_bit; + + uint8_t show_existing_frame; + uint8_t frame_to_show_map_idx; + + uint8_t frame_type; + uint8_t show_frame; + uint8_t error_resilient_mode; + + // Color config. + uint8_t ten_or_twelve_bit; + uint8_t color_space; + uint8_t color_range; + uint8_t subsampling_x; + uint8_t subsampling_y; + + uint8_t refresh_frame_flags; + + uint8_t intra_only; + uint8_t reset_frame_context; + + uint8_t ref_frame_idx[VP9_REFS_PER_FRAME]; + uint8_t ref_frame_sign_bias[VP9_MAX_REF_FRAMES]; + + uint8_t allow_high_precision_mv; + + uint8_t refresh_frame_context; + uint8_t frame_parallel_decoding_mode; + + uint8_t frame_context_idx; + + // Frame/render size. + uint8_t found_ref[VP9_REFS_PER_FRAME]; + uint16_t frame_width_minus_1; + uint16_t frame_height_minus_1; + uint8_t render_and_frame_size_different; + uint16_t render_width_minus_1; + uint16_t render_height_minus_1; + + // Interpolation filter. + uint8_t is_filter_switchable; + uint8_t raw_interpolation_filter_type; + + // Loop filter params. + uint8_t loop_filter_level; + uint8_t loop_filter_sharpness; + uint8_t loop_filter_delta_enabled; + uint8_t loop_filter_delta_update; + uint8_t update_ref_delta[VP9_MAX_REF_FRAMES]; + int8_t loop_filter_ref_deltas[VP9_MAX_REF_FRAMES]; + uint8_t update_mode_delta[2]; + int8_t loop_filter_mode_deltas[2]; + + // Quantization params. + uint8_t base_q_idx; + int8_t delta_q_y_dc; + int8_t delta_q_uv_dc; + int8_t delta_q_uv_ac; + + // Segmentation params. + uint8_t segmentation_enabled; + uint8_t segmentation_update_map; + uint8_t segmentation_tree_probs[7]; + uint8_t segmentation_temporal_update; + uint8_t segmentation_pred_prob[3]; + uint8_t segmentation_update_data; + uint8_t segmentation_abs_or_delta_update; + uint8_t feature_enabled[VP9_MAX_SEGMENTS][VP9_SEG_LVL_MAX]; + uint8_t feature_value[VP9_MAX_SEGMENTS][VP9_SEG_LVL_MAX]; + uint8_t feature_sign[VP9_MAX_SEGMENTS][VP9_SEG_LVL_MAX]; + + // Tile info. + uint8_t tile_cols_log2; + uint8_t tile_rows_log2; + + uint16_t header_size_in_bytes; } VP9RawFrameHeader; typedef struct VP9RawFrame { - VP9RawFrameHeader header; + VP9RawFrameHeader header; - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; + uint8_t *data; + AVBufferRef *data_ref; + size_t data_size; } VP9RawFrame; typedef struct VP9RawSuperframeIndex { - uint8_t superframe_marker; - uint8_t bytes_per_framesize_minus_1; - uint8_t frames_in_superframe_minus_1; - uint32_t frame_sizes[VP9_MAX_FRAMES_IN_SUPERFRAME]; + uint8_t superframe_marker; + uint8_t bytes_per_framesize_minus_1; + uint8_t frames_in_superframe_minus_1; + uint32_t frame_sizes[VP9_MAX_FRAMES_IN_SUPERFRAME]; } VP9RawSuperframeIndex; typedef struct VP9RawSuperframe { - VP9RawFrame frames[VP9_MAX_FRAMES_IN_SUPERFRAME]; - VP9RawSuperframeIndex index; + VP9RawFrame frames[VP9_MAX_FRAMES_IN_SUPERFRAME]; + VP9RawSuperframeIndex index; } VP9RawSuperframe; typedef struct VP9ReferenceFrameState { - int frame_width; // RefFrameWidth - int frame_height; // RefFrameHeight - int subsampling_x; // RefSubsamplingX - int subsampling_y; // RefSubsamplingY - int bit_depth; // RefBitDepth + int frame_width; // RefFrameWidth + int frame_height; // RefFrameHeight + int subsampling_x; // RefSubsamplingX + int subsampling_y; // RefSubsamplingY + int bit_depth; // RefBitDepth } VP9ReferenceFrameState; typedef struct CodedBitstreamVP9Context { - int profile; + int profile; - // Frame dimensions in 8x8 mode info blocks. - uint16_t mi_cols; - uint16_t mi_rows; - // Frame dimensions in 64x64 superblocks. - uint16_t sb64_cols; - uint16_t sb64_rows; + // Frame dimensions in 8x8 mode info blocks. + uint16_t mi_cols; + uint16_t mi_rows; + // Frame dimensions in 64x64 superblocks. + uint16_t sb64_cols; + uint16_t sb64_rows; - int frame_width; - int frame_height; + int frame_width; + int frame_height; - uint8_t subsampling_x; - uint8_t subsampling_y; - int bit_depth; + uint8_t subsampling_x; + uint8_t subsampling_y; + int bit_depth; - VP9ReferenceFrameState ref[VP9_NUM_REF_FRAMES]; + VP9ReferenceFrameState ref[VP9_NUM_REF_FRAMES]; } CodedBitstreamVP9Context; diff --git a/third-party/cbs/include/cbs/codec_desc.h b/third-party/cbs/include/cbs/codec_desc.h new file mode 100644 index 00000000000..126b52df476 --- /dev/null +++ b/third-party/cbs/include/cbs/codec_desc.h @@ -0,0 +1,128 @@ +/* + * Codec descriptors public API + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_CODEC_DESC_H +#define AVCODEC_CODEC_DESC_H + +#include "libavutil/avutil.h" + +#include "codec_id.h" + +/** + * @addtogroup lavc_core + * @{ + */ + +/** + * This struct describes the properties of a single codec described by an + * AVCodecID. + * @see avcodec_descriptor_get() + */ +typedef struct AVCodecDescriptor { + enum AVCodecID id; + enum AVMediaType type; + /** + * Name of the codec described by this descriptor. It is non-empty and + * unique for each codec descriptor. It should contain alphanumeric + * characters and '_' only. + */ + const char *name; + /** + * A more descriptive name for this codec. May be NULL. + */ + const char *long_name; + /** + * Codec properties, a combination of AV_CODEC_PROP_* flags. + */ + int props; + /** + * MIME type(s) associated with the codec. + * May be NULL; if not, a NULL-terminated array of MIME types. + * The first item is always non-NULL and is the preferred MIME type. + */ + const char *const *mime_types; + /** + * If non-NULL, an array of profiles recognized for this codec. + * Terminated with FF_PROFILE_UNKNOWN. + */ + const struct AVProfile *profiles; +} AVCodecDescriptor; + +/** + * Codec uses only intra compression. + * Video and audio codecs only. + */ +#define AV_CODEC_PROP_INTRA_ONLY (1 << 0) +/** + * Codec supports lossy compression. Audio and video codecs only. + * @note a codec may support both lossy and lossless + * compression modes + */ +#define AV_CODEC_PROP_LOSSY (1 << 1) +/** + * Codec supports lossless compression. Audio and video codecs only. + */ +#define AV_CODEC_PROP_LOSSLESS (1 << 2) +/** + * Codec supports frame reordering. That is, the coded order (the order in which + * the encoded packets are output by the encoders / stored / input to the + * decoders) may be different from the presentation order of the corresponding + * frames. + * + * For codecs that do not have this property set, PTS and DTS should always be + * equal. + */ +#define AV_CODEC_PROP_REORDER (1 << 3) +/** + * Subtitle codec is bitmap based + * Decoded AVSubtitle data can be read from the AVSubtitleRect->pict field. + */ +#define AV_CODEC_PROP_BITMAP_SUB (1 << 16) +/** + * Subtitle codec is text based. + * Decoded AVSubtitle data can be read from the AVSubtitleRect->ass field. + */ +#define AV_CODEC_PROP_TEXT_SUB (1 << 17) + +/** + * @return descriptor for given codec ID or NULL if no descriptor exists. + */ +const AVCodecDescriptor *avcodec_descriptor_get(enum AVCodecID id); + +/** + * Iterate over all codec descriptors known to libavcodec. + * + * @param prev previous descriptor. NULL to get the first descriptor. + * + * @return next descriptor or NULL after the last descriptor + */ +const AVCodecDescriptor *avcodec_descriptor_next(const AVCodecDescriptor *prev); + +/** + * @return codec descriptor with the given name or NULL if no such descriptor + * exists. + */ +const AVCodecDescriptor *avcodec_descriptor_get_by_name(const char *name); + +/** + * @} + */ + +#endif // AVCODEC_CODEC_DESC_H diff --git a/third-party/cbs/include/cbs/codec_id.h b/third-party/cbs/include/cbs/codec_id.h new file mode 100644 index 00000000000..81fb316cff6 --- /dev/null +++ b/third-party/cbs/include/cbs/codec_id.h @@ -0,0 +1,634 @@ +/* + * Codec IDs + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_CODEC_ID_H +#define AVCODEC_CODEC_ID_H + +#include "libavutil/avutil.h" +#include "libavutil/samplefmt.h" + +/** + * @addtogroup lavc_core + * @{ + */ + +/** + * Identify the syntax and semantics of the bitstream. + * The principle is roughly: + * Two decoders with the same ID can decode the same streams. + * Two encoders with the same ID can encode compatible streams. + * There may be slight deviations from the principle due to implementation + * details. + * + * If you add a codec ID to this list, add it so that + * 1. no value of an existing codec ID changes (that would break ABI), + * 2. it is as close as possible to similar codecs + * + * After adding new codec IDs, do not forget to add an entry to the codec + * descriptor list and bump libavcodec minor version. + */ +enum AVCodecID { + AV_CODEC_ID_NONE, + + /* video codecs */ + AV_CODEC_ID_MPEG1VIDEO, + AV_CODEC_ID_MPEG2VIDEO, ///< preferred ID for MPEG-1/2 video decoding + AV_CODEC_ID_H261, + AV_CODEC_ID_H263, + AV_CODEC_ID_RV10, + AV_CODEC_ID_RV20, + AV_CODEC_ID_MJPEG, + AV_CODEC_ID_MJPEGB, + AV_CODEC_ID_LJPEG, + AV_CODEC_ID_SP5X, + AV_CODEC_ID_JPEGLS, + AV_CODEC_ID_MPEG4, + AV_CODEC_ID_RAWVIDEO, + AV_CODEC_ID_MSMPEG4V1, + AV_CODEC_ID_MSMPEG4V2, + AV_CODEC_ID_MSMPEG4V3, + AV_CODEC_ID_WMV1, + AV_CODEC_ID_WMV2, + AV_CODEC_ID_H263P, + AV_CODEC_ID_H263I, + AV_CODEC_ID_FLV1, + AV_CODEC_ID_SVQ1, + AV_CODEC_ID_SVQ3, + AV_CODEC_ID_DVVIDEO, + AV_CODEC_ID_HUFFYUV, + AV_CODEC_ID_CYUV, + AV_CODEC_ID_H264, + AV_CODEC_ID_INDEO3, + AV_CODEC_ID_VP3, + AV_CODEC_ID_THEORA, + AV_CODEC_ID_ASV1, + AV_CODEC_ID_ASV2, + AV_CODEC_ID_FFV1, + AV_CODEC_ID_4XM, + AV_CODEC_ID_VCR1, + AV_CODEC_ID_CLJR, + AV_CODEC_ID_MDEC, + AV_CODEC_ID_ROQ, + AV_CODEC_ID_INTERPLAY_VIDEO, + AV_CODEC_ID_XAN_WC3, + AV_CODEC_ID_XAN_WC4, + AV_CODEC_ID_RPZA, + AV_CODEC_ID_CINEPAK, + AV_CODEC_ID_WS_VQA, + AV_CODEC_ID_MSRLE, + AV_CODEC_ID_MSVIDEO1, + AV_CODEC_ID_IDCIN, + AV_CODEC_ID_8BPS, + AV_CODEC_ID_SMC, + AV_CODEC_ID_FLIC, + AV_CODEC_ID_TRUEMOTION1, + AV_CODEC_ID_VMDVIDEO, + AV_CODEC_ID_MSZH, + AV_CODEC_ID_ZLIB, + AV_CODEC_ID_QTRLE, + AV_CODEC_ID_TSCC, + AV_CODEC_ID_ULTI, + AV_CODEC_ID_QDRAW, + AV_CODEC_ID_VIXL, + AV_CODEC_ID_QPEG, + AV_CODEC_ID_PNG, + AV_CODEC_ID_PPM, + AV_CODEC_ID_PBM, + AV_CODEC_ID_PGM, + AV_CODEC_ID_PGMYUV, + AV_CODEC_ID_PAM, + AV_CODEC_ID_FFVHUFF, + AV_CODEC_ID_RV30, + AV_CODEC_ID_RV40, + AV_CODEC_ID_VC1, + AV_CODEC_ID_WMV3, + AV_CODEC_ID_LOCO, + AV_CODEC_ID_WNV1, + AV_CODEC_ID_AASC, + AV_CODEC_ID_INDEO2, + AV_CODEC_ID_FRAPS, + AV_CODEC_ID_TRUEMOTION2, + AV_CODEC_ID_BMP, + AV_CODEC_ID_CSCD, + AV_CODEC_ID_MMVIDEO, + AV_CODEC_ID_ZMBV, + AV_CODEC_ID_AVS, + AV_CODEC_ID_SMACKVIDEO, + AV_CODEC_ID_NUV, + AV_CODEC_ID_KMVC, + AV_CODEC_ID_FLASHSV, + AV_CODEC_ID_CAVS, + AV_CODEC_ID_JPEG2000, + AV_CODEC_ID_VMNC, + AV_CODEC_ID_VP5, + AV_CODEC_ID_VP6, + AV_CODEC_ID_VP6F, + AV_CODEC_ID_TARGA, + AV_CODEC_ID_DSICINVIDEO, + AV_CODEC_ID_TIERTEXSEQVIDEO, + AV_CODEC_ID_TIFF, + AV_CODEC_ID_GIF, + AV_CODEC_ID_DXA, + AV_CODEC_ID_DNXHD, + AV_CODEC_ID_THP, + AV_CODEC_ID_SGI, + AV_CODEC_ID_C93, + AV_CODEC_ID_BETHSOFTVID, + AV_CODEC_ID_PTX, + AV_CODEC_ID_TXD, + AV_CODEC_ID_VP6A, + AV_CODEC_ID_AMV, + AV_CODEC_ID_VB, + AV_CODEC_ID_PCX, + AV_CODEC_ID_SUNRAST, + AV_CODEC_ID_INDEO4, + AV_CODEC_ID_INDEO5, + AV_CODEC_ID_MIMIC, + AV_CODEC_ID_RL2, + AV_CODEC_ID_ESCAPE124, + AV_CODEC_ID_DIRAC, + AV_CODEC_ID_BFI, + AV_CODEC_ID_CMV, + AV_CODEC_ID_MOTIONPIXELS, + AV_CODEC_ID_TGV, + AV_CODEC_ID_TGQ, + AV_CODEC_ID_TQI, + AV_CODEC_ID_AURA, + AV_CODEC_ID_AURA2, + AV_CODEC_ID_V210X, + AV_CODEC_ID_TMV, + AV_CODEC_ID_V210, + AV_CODEC_ID_DPX, + AV_CODEC_ID_MAD, + AV_CODEC_ID_FRWU, + AV_CODEC_ID_FLASHSV2, + AV_CODEC_ID_CDGRAPHICS, + AV_CODEC_ID_R210, + AV_CODEC_ID_ANM, + AV_CODEC_ID_BINKVIDEO, + AV_CODEC_ID_IFF_ILBM, +#define AV_CODEC_ID_IFF_BYTERUN1 AV_CODEC_ID_IFF_ILBM + AV_CODEC_ID_KGV1, + AV_CODEC_ID_YOP, + AV_CODEC_ID_VP8, + AV_CODEC_ID_PICTOR, + AV_CODEC_ID_ANSI, + AV_CODEC_ID_A64_MULTI, + AV_CODEC_ID_A64_MULTI5, + AV_CODEC_ID_R10K, + AV_CODEC_ID_MXPEG, + AV_CODEC_ID_LAGARITH, + AV_CODEC_ID_PRORES, + AV_CODEC_ID_JV, + AV_CODEC_ID_DFA, + AV_CODEC_ID_WMV3IMAGE, + AV_CODEC_ID_VC1IMAGE, + AV_CODEC_ID_UTVIDEO, + AV_CODEC_ID_BMV_VIDEO, + AV_CODEC_ID_VBLE, + AV_CODEC_ID_DXTORY, + AV_CODEC_ID_V410, + AV_CODEC_ID_XWD, + AV_CODEC_ID_CDXL, + AV_CODEC_ID_XBM, + AV_CODEC_ID_ZEROCODEC, + AV_CODEC_ID_MSS1, + AV_CODEC_ID_MSA1, + AV_CODEC_ID_TSCC2, + AV_CODEC_ID_MTS2, + AV_CODEC_ID_CLLC, + AV_CODEC_ID_MSS2, + AV_CODEC_ID_VP9, + AV_CODEC_ID_AIC, + AV_CODEC_ID_ESCAPE130, + AV_CODEC_ID_G2M, + AV_CODEC_ID_WEBP, + AV_CODEC_ID_HNM4_VIDEO, + AV_CODEC_ID_HEVC, +#define AV_CODEC_ID_H265 AV_CODEC_ID_HEVC + AV_CODEC_ID_FIC, + AV_CODEC_ID_ALIAS_PIX, + AV_CODEC_ID_BRENDER_PIX, + AV_CODEC_ID_PAF_VIDEO, + AV_CODEC_ID_EXR, + AV_CODEC_ID_VP7, + AV_CODEC_ID_SANM, + AV_CODEC_ID_SGIRLE, + AV_CODEC_ID_MVC1, + AV_CODEC_ID_MVC2, + AV_CODEC_ID_HQX, + AV_CODEC_ID_TDSC, + AV_CODEC_ID_HQ_HQA, + AV_CODEC_ID_HAP, + AV_CODEC_ID_DDS, + AV_CODEC_ID_DXV, + AV_CODEC_ID_SCREENPRESSO, + AV_CODEC_ID_RSCC, + AV_CODEC_ID_AVS2, + AV_CODEC_ID_PGX, + AV_CODEC_ID_AVS3, + AV_CODEC_ID_MSP2, + AV_CODEC_ID_VVC, +#define AV_CODEC_ID_H266 AV_CODEC_ID_VVC + AV_CODEC_ID_Y41P, + AV_CODEC_ID_AVRP, + AV_CODEC_ID_012V, + AV_CODEC_ID_AVUI, + AV_CODEC_ID_AYUV, + AV_CODEC_ID_TARGA_Y216, + AV_CODEC_ID_V308, + AV_CODEC_ID_V408, + AV_CODEC_ID_YUV4, + AV_CODEC_ID_AVRN, + AV_CODEC_ID_CPIA, + AV_CODEC_ID_XFACE, + AV_CODEC_ID_SNOW, + AV_CODEC_ID_SMVJPEG, + AV_CODEC_ID_APNG, + AV_CODEC_ID_DAALA, + AV_CODEC_ID_CFHD, + AV_CODEC_ID_TRUEMOTION2RT, + AV_CODEC_ID_M101, + AV_CODEC_ID_MAGICYUV, + AV_CODEC_ID_SHEERVIDEO, + AV_CODEC_ID_YLC, + AV_CODEC_ID_PSD, + AV_CODEC_ID_PIXLET, + AV_CODEC_ID_SPEEDHQ, + AV_CODEC_ID_FMVC, + AV_CODEC_ID_SCPR, + AV_CODEC_ID_CLEARVIDEO, + AV_CODEC_ID_XPM, + AV_CODEC_ID_AV1, + AV_CODEC_ID_BITPACKED, + AV_CODEC_ID_MSCC, + AV_CODEC_ID_SRGC, + AV_CODEC_ID_SVG, + AV_CODEC_ID_GDV, + AV_CODEC_ID_FITS, + AV_CODEC_ID_IMM4, + AV_CODEC_ID_PROSUMER, + AV_CODEC_ID_MWSC, + AV_CODEC_ID_WCMV, + AV_CODEC_ID_RASC, + AV_CODEC_ID_HYMT, + AV_CODEC_ID_ARBC, + AV_CODEC_ID_AGM, + AV_CODEC_ID_LSCR, + AV_CODEC_ID_VP4, + AV_CODEC_ID_IMM5, + AV_CODEC_ID_MVDV, + AV_CODEC_ID_MVHA, + AV_CODEC_ID_CDTOONS, + AV_CODEC_ID_MV30, + AV_CODEC_ID_NOTCHLC, + AV_CODEC_ID_PFM, + AV_CODEC_ID_MOBICLIP, + AV_CODEC_ID_PHOTOCD, + AV_CODEC_ID_IPU, + AV_CODEC_ID_ARGO, + AV_CODEC_ID_CRI, + AV_CODEC_ID_SIMBIOSIS_IMX, + AV_CODEC_ID_SGA_VIDEO, + AV_CODEC_ID_GEM, + AV_CODEC_ID_VBN, + AV_CODEC_ID_JPEGXL, + AV_CODEC_ID_QOI, + AV_CODEC_ID_PHM, + + /* various PCM "codecs" */ + AV_CODEC_ID_FIRST_AUDIO = 0x10000, ///< A dummy id pointing at the start of audio codecs + AV_CODEC_ID_PCM_S16LE = 0x10000, + AV_CODEC_ID_PCM_S16BE, + AV_CODEC_ID_PCM_U16LE, + AV_CODEC_ID_PCM_U16BE, + AV_CODEC_ID_PCM_S8, + AV_CODEC_ID_PCM_U8, + AV_CODEC_ID_PCM_MULAW, + AV_CODEC_ID_PCM_ALAW, + AV_CODEC_ID_PCM_S32LE, + AV_CODEC_ID_PCM_S32BE, + AV_CODEC_ID_PCM_U32LE, + AV_CODEC_ID_PCM_U32BE, + AV_CODEC_ID_PCM_S24LE, + AV_CODEC_ID_PCM_S24BE, + AV_CODEC_ID_PCM_U24LE, + AV_CODEC_ID_PCM_U24BE, + AV_CODEC_ID_PCM_S24DAUD, + AV_CODEC_ID_PCM_ZORK, + AV_CODEC_ID_PCM_S16LE_PLANAR, + AV_CODEC_ID_PCM_DVD, + AV_CODEC_ID_PCM_F32BE, + AV_CODEC_ID_PCM_F32LE, + AV_CODEC_ID_PCM_F64BE, + AV_CODEC_ID_PCM_F64LE, + AV_CODEC_ID_PCM_BLURAY, + AV_CODEC_ID_PCM_LXF, + AV_CODEC_ID_S302M, + AV_CODEC_ID_PCM_S8_PLANAR, + AV_CODEC_ID_PCM_S24LE_PLANAR, + AV_CODEC_ID_PCM_S32LE_PLANAR, + AV_CODEC_ID_PCM_S16BE_PLANAR, + AV_CODEC_ID_PCM_S64LE, + AV_CODEC_ID_PCM_S64BE, + AV_CODEC_ID_PCM_F16LE, + AV_CODEC_ID_PCM_F24LE, + AV_CODEC_ID_PCM_VIDC, + AV_CODEC_ID_PCM_SGA, + + /* various ADPCM codecs */ + AV_CODEC_ID_ADPCM_IMA_QT = 0x11000, + AV_CODEC_ID_ADPCM_IMA_WAV, + AV_CODEC_ID_ADPCM_IMA_DK3, + AV_CODEC_ID_ADPCM_IMA_DK4, + AV_CODEC_ID_ADPCM_IMA_WS, + AV_CODEC_ID_ADPCM_IMA_SMJPEG, + AV_CODEC_ID_ADPCM_MS, + AV_CODEC_ID_ADPCM_4XM, + AV_CODEC_ID_ADPCM_XA, + AV_CODEC_ID_ADPCM_ADX, + AV_CODEC_ID_ADPCM_EA, + AV_CODEC_ID_ADPCM_G726, + AV_CODEC_ID_ADPCM_CT, + AV_CODEC_ID_ADPCM_SWF, + AV_CODEC_ID_ADPCM_YAMAHA, + AV_CODEC_ID_ADPCM_SBPRO_4, + AV_CODEC_ID_ADPCM_SBPRO_3, + AV_CODEC_ID_ADPCM_SBPRO_2, + AV_CODEC_ID_ADPCM_THP, + AV_CODEC_ID_ADPCM_IMA_AMV, + AV_CODEC_ID_ADPCM_EA_R1, + AV_CODEC_ID_ADPCM_EA_R3, + AV_CODEC_ID_ADPCM_EA_R2, + AV_CODEC_ID_ADPCM_IMA_EA_SEAD, + AV_CODEC_ID_ADPCM_IMA_EA_EACS, + AV_CODEC_ID_ADPCM_EA_XAS, + AV_CODEC_ID_ADPCM_EA_MAXIS_XA, + AV_CODEC_ID_ADPCM_IMA_ISS, + AV_CODEC_ID_ADPCM_G722, + AV_CODEC_ID_ADPCM_IMA_APC, + AV_CODEC_ID_ADPCM_VIMA, + AV_CODEC_ID_ADPCM_AFC, + AV_CODEC_ID_ADPCM_IMA_OKI, + AV_CODEC_ID_ADPCM_DTK, + AV_CODEC_ID_ADPCM_IMA_RAD, + AV_CODEC_ID_ADPCM_G726LE, + AV_CODEC_ID_ADPCM_THP_LE, + AV_CODEC_ID_ADPCM_PSX, + AV_CODEC_ID_ADPCM_AICA, + AV_CODEC_ID_ADPCM_IMA_DAT4, + AV_CODEC_ID_ADPCM_MTAF, + AV_CODEC_ID_ADPCM_AGM, + AV_CODEC_ID_ADPCM_ARGO, + AV_CODEC_ID_ADPCM_IMA_SSI, + AV_CODEC_ID_ADPCM_ZORK, + AV_CODEC_ID_ADPCM_IMA_APM, + AV_CODEC_ID_ADPCM_IMA_ALP, + AV_CODEC_ID_ADPCM_IMA_MTF, + AV_CODEC_ID_ADPCM_IMA_CUNNING, + AV_CODEC_ID_ADPCM_IMA_MOFLEX, + AV_CODEC_ID_ADPCM_IMA_ACORN, + + /* AMR */ + AV_CODEC_ID_AMR_NB = 0x12000, + AV_CODEC_ID_AMR_WB, + + /* RealAudio codecs*/ + AV_CODEC_ID_RA_144 = 0x13000, + AV_CODEC_ID_RA_288, + + /* various DPCM codecs */ + AV_CODEC_ID_ROQ_DPCM = 0x14000, + AV_CODEC_ID_INTERPLAY_DPCM, + AV_CODEC_ID_XAN_DPCM, + AV_CODEC_ID_SOL_DPCM, + AV_CODEC_ID_SDX2_DPCM, + AV_CODEC_ID_GREMLIN_DPCM, + AV_CODEC_ID_DERF_DPCM, + + /* audio codecs */ + AV_CODEC_ID_MP2 = 0x15000, + AV_CODEC_ID_MP3, ///< preferred ID for decoding MPEG audio layer 1, 2 or 3 + AV_CODEC_ID_AAC, + AV_CODEC_ID_AC3, + AV_CODEC_ID_DTS, + AV_CODEC_ID_VORBIS, + AV_CODEC_ID_DVAUDIO, + AV_CODEC_ID_WMAV1, + AV_CODEC_ID_WMAV2, + AV_CODEC_ID_MACE3, + AV_CODEC_ID_MACE6, + AV_CODEC_ID_VMDAUDIO, + AV_CODEC_ID_FLAC, + AV_CODEC_ID_MP3ADU, + AV_CODEC_ID_MP3ON4, + AV_CODEC_ID_SHORTEN, + AV_CODEC_ID_ALAC, + AV_CODEC_ID_WESTWOOD_SND1, + AV_CODEC_ID_GSM, ///< as in Berlin toast format + AV_CODEC_ID_QDM2, + AV_CODEC_ID_COOK, + AV_CODEC_ID_TRUESPEECH, + AV_CODEC_ID_TTA, + AV_CODEC_ID_SMACKAUDIO, + AV_CODEC_ID_QCELP, + AV_CODEC_ID_WAVPACK, + AV_CODEC_ID_DSICINAUDIO, + AV_CODEC_ID_IMC, + AV_CODEC_ID_MUSEPACK7, + AV_CODEC_ID_MLP, + AV_CODEC_ID_GSM_MS, /* as found in WAV */ + AV_CODEC_ID_ATRAC3, + AV_CODEC_ID_APE, + AV_CODEC_ID_NELLYMOSER, + AV_CODEC_ID_MUSEPACK8, + AV_CODEC_ID_SPEEX, + AV_CODEC_ID_WMAVOICE, + AV_CODEC_ID_WMAPRO, + AV_CODEC_ID_WMALOSSLESS, + AV_CODEC_ID_ATRAC3P, + AV_CODEC_ID_EAC3, + AV_CODEC_ID_SIPR, + AV_CODEC_ID_MP1, + AV_CODEC_ID_TWINVQ, + AV_CODEC_ID_TRUEHD, + AV_CODEC_ID_MP4ALS, + AV_CODEC_ID_ATRAC1, + AV_CODEC_ID_BINKAUDIO_RDFT, + AV_CODEC_ID_BINKAUDIO_DCT, + AV_CODEC_ID_AAC_LATM, + AV_CODEC_ID_QDMC, + AV_CODEC_ID_CELT, + AV_CODEC_ID_G723_1, + AV_CODEC_ID_G729, + AV_CODEC_ID_8SVX_EXP, + AV_CODEC_ID_8SVX_FIB, + AV_CODEC_ID_BMV_AUDIO, + AV_CODEC_ID_RALF, + AV_CODEC_ID_IAC, + AV_CODEC_ID_ILBC, + AV_CODEC_ID_OPUS, + AV_CODEC_ID_COMFORT_NOISE, + AV_CODEC_ID_TAK, + AV_CODEC_ID_METASOUND, + AV_CODEC_ID_PAF_AUDIO, + AV_CODEC_ID_ON2AVC, + AV_CODEC_ID_DSS_SP, + AV_CODEC_ID_CODEC2, + AV_CODEC_ID_FFWAVESYNTH, + AV_CODEC_ID_SONIC, + AV_CODEC_ID_SONIC_LS, + AV_CODEC_ID_EVRC, + AV_CODEC_ID_SMV, + AV_CODEC_ID_DSD_LSBF, + AV_CODEC_ID_DSD_MSBF, + AV_CODEC_ID_DSD_LSBF_PLANAR, + AV_CODEC_ID_DSD_MSBF_PLANAR, + AV_CODEC_ID_4GV, + AV_CODEC_ID_INTERPLAY_ACM, + AV_CODEC_ID_XMA1, + AV_CODEC_ID_XMA2, + AV_CODEC_ID_DST, + AV_CODEC_ID_ATRAC3AL, + AV_CODEC_ID_ATRAC3PAL, + AV_CODEC_ID_DOLBY_E, + AV_CODEC_ID_APTX, + AV_CODEC_ID_APTX_HD, + AV_CODEC_ID_SBC, + AV_CODEC_ID_ATRAC9, + AV_CODEC_ID_HCOM, + AV_CODEC_ID_ACELP_KELVIN, + AV_CODEC_ID_MPEGH_3D_AUDIO, + AV_CODEC_ID_SIREN, + AV_CODEC_ID_HCA, + AV_CODEC_ID_FASTAUDIO, + AV_CODEC_ID_MSNSIREN, + AV_CODEC_ID_DFPWM, + + /* subtitle codecs */ + AV_CODEC_ID_FIRST_SUBTITLE = 0x17000, ///< A dummy ID pointing at the start of subtitle codecs. + AV_CODEC_ID_DVD_SUBTITLE = 0x17000, + AV_CODEC_ID_DVB_SUBTITLE, + AV_CODEC_ID_TEXT, ///< raw UTF-8 text + AV_CODEC_ID_XSUB, + AV_CODEC_ID_SSA, + AV_CODEC_ID_MOV_TEXT, + AV_CODEC_ID_HDMV_PGS_SUBTITLE, + AV_CODEC_ID_DVB_TELETEXT, + AV_CODEC_ID_SRT, + AV_CODEC_ID_MICRODVD, + AV_CODEC_ID_EIA_608, + AV_CODEC_ID_JACOSUB, + AV_CODEC_ID_SAMI, + AV_CODEC_ID_REALTEXT, + AV_CODEC_ID_STL, + AV_CODEC_ID_SUBVIEWER1, + AV_CODEC_ID_SUBVIEWER, + AV_CODEC_ID_SUBRIP, + AV_CODEC_ID_WEBVTT, + AV_CODEC_ID_MPL2, + AV_CODEC_ID_VPLAYER, + AV_CODEC_ID_PJS, + AV_CODEC_ID_ASS, + AV_CODEC_ID_HDMV_TEXT_SUBTITLE, + AV_CODEC_ID_TTML, + AV_CODEC_ID_ARIB_CAPTION, + + /* other specific kind of codecs (generally used for attachments) */ + AV_CODEC_ID_FIRST_UNKNOWN = 0x18000, ///< A dummy ID pointing at the start of various fake codecs. + AV_CODEC_ID_TTF = 0x18000, + + AV_CODEC_ID_SCTE_35, ///< Contain timestamp estimated through PCR of program stream. + AV_CODEC_ID_EPG, + AV_CODEC_ID_BINTEXT, + AV_CODEC_ID_XBIN, + AV_CODEC_ID_IDF, + AV_CODEC_ID_OTF, + AV_CODEC_ID_SMPTE_KLV, + AV_CODEC_ID_DVD_NAV, + AV_CODEC_ID_TIMED_ID3, + AV_CODEC_ID_BIN_DATA, + + + AV_CODEC_ID_PROBE = 0x19000, ///< codec_id is not known (like AV_CODEC_ID_NONE) but lavf should attempt to identify it + + AV_CODEC_ID_MPEG2TS = 0x20000, /**< _FAKE_ codec to indicate a raw MPEG-2 TS + * stream (only used by libavformat) */ + AV_CODEC_ID_MPEG4SYSTEMS = 0x20001, /**< _FAKE_ codec to indicate a MPEG-4 Systems + * stream (only used by libavformat) */ + AV_CODEC_ID_FFMETADATA = 0x21000, ///< Dummy codec for streams containing only metadata information. + AV_CODEC_ID_WRAPPED_AVFRAME = 0x21001, ///< Passthrough codec, AVFrames wrapped in AVPacket +}; + +/** + * Get the type of the given codec. + */ +enum AVMediaType avcodec_get_type(enum AVCodecID codec_id); + +/** + * Get the name of a codec. + * @return a static string identifying the codec; never NULL + */ +const char *avcodec_get_name(enum AVCodecID id); + +/** + * Return codec bits per sample. + * + * @param[in] codec_id the codec + * @return Number of bits per sample or zero if unknown for the given codec. + */ +int av_get_bits_per_sample(enum AVCodecID codec_id); + +/** + * Return codec bits per sample. + * Only return non-zero if the bits per sample is exactly correct, not an + * approximation. + * + * @param[in] codec_id the codec + * @return Number of bits per sample or zero if unknown for the given codec. + */ +int av_get_exact_bits_per_sample(enum AVCodecID codec_id); + +/** + * Return a name for the specified profile, if available. + * + * @param codec_id the ID of the codec to which the requested profile belongs + * @param profile the profile value for which a name is requested + * @return A name for the profile if found, NULL otherwise. + * + * @note unlike av_get_profile_name(), which searches a list of profiles + * supported by a specific decoder or encoder implementation, this + * function searches the list of profiles from the AVCodecDescriptor + */ +const char *avcodec_profile_name(enum AVCodecID codec_id, int profile); + +/** + * Return the PCM codec associated with a sample format. + * @param be endianness, 0 for little, 1 for big, + * -1 (or anything else) for native + * @return AV_CODEC_ID_PCM_* or AV_CODEC_ID_NONE + */ +enum AVCodecID av_get_pcm_codec(enum AVSampleFormat fmt, int be); + +/** + * @} + */ + +#endif // AVCODEC_CODEC_ID_H diff --git a/third-party/cbs/include/cbs/codec_par.h b/third-party/cbs/include/cbs/codec_par.h new file mode 100644 index 00000000000..7660791a12e --- /dev/null +++ b/third-party/cbs/include/cbs/codec_par.h @@ -0,0 +1,246 @@ +/* + * Codec parameters public API + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_CODEC_PAR_H +#define AVCODEC_CODEC_PAR_H + +#include + +#include "libavutil/avutil.h" +#include "libavutil/channel_layout.h" +#include "libavutil/rational.h" +#include "libavutil/pixfmt.h" + +#include "codec_id.h" + +/** + * @addtogroup lavc_core + */ + +enum AVFieldOrder { + AV_FIELD_UNKNOWN, + AV_FIELD_PROGRESSIVE, + AV_FIELD_TT, //< Top coded_first, top displayed first + AV_FIELD_BB, //< Bottom coded first, bottom displayed first + AV_FIELD_TB, //< Top coded first, bottom displayed first + AV_FIELD_BT, //< Bottom coded first, top displayed first +}; + +/** + * This struct describes the properties of an encoded stream. + * + * sizeof(AVCodecParameters) is not a part of the public ABI, this struct must + * be allocated with avcodec_parameters_alloc() and freed with + * avcodec_parameters_free(). + */ +typedef struct AVCodecParameters { + /** + * General type of the encoded data. + */ + enum AVMediaType codec_type; + /** + * Specific type of the encoded data (the codec used). + */ + enum AVCodecID codec_id; + /** + * Additional information about the codec (corresponds to the AVI FOURCC). + */ + uint32_t codec_tag; + + /** + * Extra binary data needed for initializing the decoder, codec-dependent. + * + * Must be allocated with av_malloc() and will be freed by + * avcodec_parameters_free(). The allocated size of extradata must be at + * least extradata_size + AV_INPUT_BUFFER_PADDING_SIZE, with the padding + * bytes zeroed. + */ + uint8_t *extradata; + /** + * Size of the extradata content in bytes. + */ + int extradata_size; + + /** + * - video: the pixel format, the value corresponds to enum AVPixelFormat. + * - audio: the sample format, the value corresponds to enum AVSampleFormat. + */ + int format; + + /** + * The average bitrate of the encoded data (in bits per second). + */ + int64_t bit_rate; + + /** + * The number of bits per sample in the codedwords. + * + * This is basically the bitrate per sample. It is mandatory for a bunch of + * formats to actually decode them. It's the number of bits for one sample in + * the actual coded bitstream. + * + * This could be for example 4 for ADPCM + * For PCM formats this matches bits_per_raw_sample + * Can be 0 + */ + int bits_per_coded_sample; + + /** + * This is the number of valid bits in each output sample. If the + * sample format has more bits, the least significant bits are additional + * padding bits, which are always 0. Use right shifts to reduce the sample + * to its actual size. For example, audio formats with 24 bit samples will + * have bits_per_raw_sample set to 24, and format set to AV_SAMPLE_FMT_S32. + * To get the original sample use "(int32_t)sample >> 8"." + * + * For ADPCM this might be 12 or 16 or similar + * Can be 0 + */ + int bits_per_raw_sample; + + /** + * Codec-specific bitstream restrictions that the stream conforms to. + */ + int profile; + int level; + + /** + * Video only. The dimensions of the video frame in pixels. + */ + int width; + int height; + + /** + * Video only. The aspect ratio (width / height) which a single pixel + * should have when displayed. + * + * When the aspect ratio is unknown / undefined, the numerator should be + * set to 0 (the denominator may have any value). + */ + AVRational sample_aspect_ratio; + + /** + * Video only. The order of the fields in interlaced video. + */ + enum AVFieldOrder field_order; + + /** + * Video only. Additional colorspace characteristics. + */ + enum AVColorRange color_range; + enum AVColorPrimaries color_primaries; + enum AVColorTransferCharacteristic color_trc; + enum AVColorSpace color_space; + enum AVChromaLocation chroma_location; + + /** + * Video only. Number of delayed frames. + */ + int video_delay; + +#if FF_API_OLD_CHANNEL_LAYOUT + /** + * Audio only. The channel layout bitmask. May be 0 if the channel layout is + * unknown or unspecified, otherwise the number of bits set must be equal to + * the channels field. + * @deprecated use ch_layout + */ + attribute_deprecated + uint64_t channel_layout; + /** + * Audio only. The number of audio channels. + * @deprecated use ch_layout.nb_channels + */ + attribute_deprecated + int channels; +#endif + /** + * Audio only. The number of audio samples per second. + */ + int sample_rate; + /** + * Audio only. The number of bytes per coded audio frame, required by some + * formats. + * + * Corresponds to nBlockAlign in WAVEFORMATEX. + */ + int block_align; + /** + * Audio only. Audio frame size, if known. Required by some formats to be static. + */ + int frame_size; + + /** + * Audio only. The amount of padding (in samples) inserted by the encoder at + * the beginning of the audio. I.e. this number of leading decoded samples + * must be discarded by the caller to get the original audio without leading + * padding. + */ + int initial_padding; + /** + * Audio only. The amount of padding (in samples) appended by the encoder to + * the end of the audio. I.e. this number of decoded samples must be + * discarded by the caller from the end of the stream to get the original + * audio without any trailing padding. + */ + int trailing_padding; + /** + * Audio only. Number of samples to skip after a discontinuity. + */ + int seek_preroll; + + /** + * Audio only. The channel layout and number of channels. + */ + AVChannelLayout ch_layout; +} AVCodecParameters; + +/** + * Allocate a new AVCodecParameters and set its fields to default values + * (unknown/invalid/0). The returned struct must be freed with + * avcodec_parameters_free(). + */ +AVCodecParameters *avcodec_parameters_alloc(void); + +/** + * Free an AVCodecParameters instance and everything associated with it and + * write NULL to the supplied pointer. + */ +void avcodec_parameters_free(AVCodecParameters **par); + +/** + * Copy the contents of src to dst. Any allocated fields in dst are freed and + * replaced with newly allocated duplicates of the corresponding fields in src. + * + * @return >= 0 on success, a negative AVERROR code on failure. + */ +int avcodec_parameters_copy(AVCodecParameters *dst, const AVCodecParameters *src); + +/** + * This function is the same as av_get_audio_frame_duration(), except it works + * with AVCodecParameters instead of an AVCodecContext. + */ +int av_get_audio_frame_duration2(AVCodecParameters *par, int frame_bytes); + +/** + * @} + */ + +#endif // AVCODEC_CODEC_PAR_H diff --git a/third-party/cbs/config.h b/third-party/cbs/include/cbs/config.h similarity index 96% rename from third-party/cbs/config.h rename to third-party/cbs/include/cbs/config.h index b33923f3ff2..23056826e3a 100644 --- a/third-party/cbs/config.h +++ b/third-party/cbs/include/cbs/config.h @@ -1,3 +1,4 @@ +// [manual] Copied, generated file #ifndef CBS_CONFIG_H #define CBS_CONFIG_H diff --git a/third-party/cbs/include/cbs/defs.h b/third-party/cbs/include/cbs/defs.h new file mode 100644 index 00000000000..420a042b8ff --- /dev/null +++ b/third-party/cbs/include/cbs/defs.h @@ -0,0 +1,170 @@ +/* + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_DEFS_H +#define AVCODEC_DEFS_H + +/** + * @file + * @ingroup libavc + * Misc types and constants that do not belong anywhere else. + */ + +#include +#include + +/** + * @ingroup lavc_decoding + * Required number of additionally allocated bytes at the end of the input bitstream for decoding. + * This is mainly needed because some optimized bitstream readers read + * 32 or 64 bit at once and could read over the end.
+ * Note: If the first 23 bits of the additional bytes are not 0, then damaged + * MPEG bitstreams could cause overread and segfault. + */ +#define AV_INPUT_BUFFER_PADDING_SIZE 64 + +/** + * @ingroup lavc_decoding + */ +enum AVDiscard{ + /* We leave some space between them for extensions (drop some + * keyframes for intra-only or drop just some bidir frames). */ + AVDISCARD_NONE =-16, ///< discard nothing + AVDISCARD_DEFAULT = 0, ///< discard useless packets like 0 size packets in avi + AVDISCARD_NONREF = 8, ///< discard all non reference + AVDISCARD_BIDIR = 16, ///< discard all bidirectional frames + AVDISCARD_NONINTRA= 24, ///< discard all non intra frames + AVDISCARD_NONKEY = 32, ///< discard all frames except keyframes + AVDISCARD_ALL = 48, ///< discard all +}; + +enum AVAudioServiceType { + AV_AUDIO_SERVICE_TYPE_MAIN = 0, + AV_AUDIO_SERVICE_TYPE_EFFECTS = 1, + AV_AUDIO_SERVICE_TYPE_VISUALLY_IMPAIRED = 2, + AV_AUDIO_SERVICE_TYPE_HEARING_IMPAIRED = 3, + AV_AUDIO_SERVICE_TYPE_DIALOGUE = 4, + AV_AUDIO_SERVICE_TYPE_COMMENTARY = 5, + AV_AUDIO_SERVICE_TYPE_EMERGENCY = 6, + AV_AUDIO_SERVICE_TYPE_VOICE_OVER = 7, + AV_AUDIO_SERVICE_TYPE_KARAOKE = 8, + AV_AUDIO_SERVICE_TYPE_NB , ///< Not part of ABI +}; + +/** + * Pan Scan area. + * This specifies the area which should be displayed. + * Note there may be multiple such areas for one frame. + */ +typedef struct AVPanScan { + /** + * id + * - encoding: Set by user. + * - decoding: Set by libavcodec. + */ + int id; + + /** + * width and height in 1/16 pel + * - encoding: Set by user. + * - decoding: Set by libavcodec. + */ + int width; + int height; + + /** + * position of the top left corner in 1/16 pel for up to 3 fields/frames + * - encoding: Set by user. + * - decoding: Set by libavcodec. + */ + int16_t position[3][2]; +} AVPanScan; + +/** + * This structure describes the bitrate properties of an encoded bitstream. It + * roughly corresponds to a subset the VBV parameters for MPEG-2 or HRD + * parameters for H.264/HEVC. + */ +typedef struct AVCPBProperties { + /** + * Maximum bitrate of the stream, in bits per second. + * Zero if unknown or unspecified. + */ + int64_t max_bitrate; + /** + * Minimum bitrate of the stream, in bits per second. + * Zero if unknown or unspecified. + */ + int64_t min_bitrate; + /** + * Average bitrate of the stream, in bits per second. + * Zero if unknown or unspecified. + */ + int64_t avg_bitrate; + + /** + * The size of the buffer to which the ratecontrol is applied, in bits. + * Zero if unknown or unspecified. + */ + int64_t buffer_size; + + /** + * The delay between the time the packet this structure is associated with + * is received and the time when it should be decoded, in periods of a 27MHz + * clock. + * + * UINT64_MAX when unknown or unspecified. + */ + uint64_t vbv_delay; +} AVCPBProperties; + +/** + * Allocate a CPB properties structure and initialize its fields to default + * values. + * + * @param size if non-NULL, the size of the allocated struct will be written + * here. This is useful for embedding it in side data. + * + * @return the newly allocated struct or NULL on failure + */ +AVCPBProperties *av_cpb_properties_alloc(size_t *size); + +/** + * This structure supplies correlation between a packet timestamp and a wall clock + * production time. The definition follows the Producer Reference Time ('prft') + * as defined in ISO/IEC 14496-12 + */ +typedef struct AVProducerReferenceTime { + /** + * A UTC timestamp, in microseconds, since Unix epoch (e.g, av_gettime()). + */ + int64_t wallclock; + int flags; +} AVProducerReferenceTime; + +/** + * Encode extradata length to a buffer. Used by xiph codecs. + * + * @param s buffer to write to; must be at least (v/255+1) bytes long + * @param v size of extradata in bytes + * @return number of bytes written to the buffer. + */ +unsigned int av_xiphlacing(unsigned char *s, unsigned int v); + +#endif // AVCODEC_DEFS_H diff --git a/third-party/cbs/include/cbs/get_bits.h b/third-party/cbs/include/cbs/get_bits.h new file mode 100644 index 00000000000..992765dc92c --- /dev/null +++ b/third-party/cbs/include/cbs/get_bits.h @@ -0,0 +1,858 @@ +/* + * Copyright (c) 2004 Michael Niedermayer + * Copyright (c) 2016 Alexandra Hájková + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * bitstream reader API header. + */ + +#ifndef AVCODEC_GET_BITS_H +#define AVCODEC_GET_BITS_H + +#include + +#include "libavutil/common.h" +#include "libavutil/intreadwrite.h" +#include "libavutil/avassert.h" + +#include "defs.h" +#include "mathops.h" +#include "vlc.h" + +/* + * Safe bitstream reading: + * optionally, the get_bits API can check to ensure that we + * don't read past input buffer boundaries. This is protected + * with CONFIG_SAFE_BITSTREAM_READER at the global level, and + * then below that with UNCHECKED_BITSTREAM_READER at the per- + * decoder level. This means that decoders that check internally + * can "#define UNCHECKED_BITSTREAM_READER 1" to disable + * overread checks. + * Boundary checking causes a minor performance penalty so for + * applications that won't want/need this, it can be disabled + * globally using "#define CONFIG_SAFE_BITSTREAM_READER 0". + */ +#ifndef UNCHECKED_BITSTREAM_READER +#define UNCHECKED_BITSTREAM_READER !CONFIG_SAFE_BITSTREAM_READER +#endif + +#ifndef CACHED_BITSTREAM_READER +#define CACHED_BITSTREAM_READER 0 +#endif + +typedef struct GetBitContext { + const uint8_t *buffer, *buffer_end; +#if CACHED_BITSTREAM_READER + uint64_t cache; + unsigned bits_left; +#endif + int index; + int size_in_bits; + int size_in_bits_plus8; +} GetBitContext; + +static inline unsigned int get_bits(GetBitContext *s, int n); +static inline void skip_bits(GetBitContext *s, int n); +static inline unsigned int show_bits(GetBitContext *s, int n); + +/* Bitstream reader API docs: + * name + * arbitrary name which is used as prefix for the internal variables + * + * gb + * getbitcontext + * + * OPEN_READER(name, gb) + * load gb into local variables + * + * CLOSE_READER(name, gb) + * store local vars in gb + * + * UPDATE_CACHE(name, gb) + * Refill the internal cache from the bitstream. + * After this call at least MIN_CACHE_BITS will be available. + * + * GET_CACHE(name, gb) + * Will output the contents of the internal cache, + * next bit is MSB of 32 or 64 bits (FIXME 64 bits). + * + * SHOW_UBITS(name, gb, num) + * Will return the next num bits. + * + * SHOW_SBITS(name, gb, num) + * Will return the next num bits and do sign extension. + * + * SKIP_BITS(name, gb, num) + * Will skip over the next num bits. + * Note, this is equivalent to SKIP_CACHE; SKIP_COUNTER. + * + * SKIP_CACHE(name, gb, num) + * Will remove the next num bits from the cache (note SKIP_COUNTER + * MUST be called before UPDATE_CACHE / CLOSE_READER). + * + * SKIP_COUNTER(name, gb, num) + * Will increment the internal bit counter (see SKIP_CACHE & SKIP_BITS). + * + * LAST_SKIP_BITS(name, gb, num) + * Like SKIP_BITS, to be used if next call is UPDATE_CACHE or CLOSE_READER. + * + * BITS_LEFT(name, gb) + * Return the number of bits left + * + * For examples see get_bits, show_bits, skip_bits, get_vlc. + */ + +#if CACHED_BITSTREAM_READER +# define MIN_CACHE_BITS 64 +#elif defined LONG_BITSTREAM_READER +# define MIN_CACHE_BITS 32 +#else +# define MIN_CACHE_BITS 25 +#endif + +#if !CACHED_BITSTREAM_READER + +#define OPEN_READER_NOSIZE(name, gb) \ + unsigned int name ## _index = (gb)->index; \ + unsigned int av_unused name ## _cache + +#if UNCHECKED_BITSTREAM_READER +#define OPEN_READER(name, gb) OPEN_READER_NOSIZE(name, gb) + +#define BITS_AVAILABLE(name, gb) 1 +#else +#define OPEN_READER(name, gb) \ + OPEN_READER_NOSIZE(name, gb); \ + unsigned int name ## _size_plus8 = (gb)->size_in_bits_plus8 + +#define BITS_AVAILABLE(name, gb) name ## _index < name ## _size_plus8 +#endif + +#define CLOSE_READER(name, gb) (gb)->index = name ## _index + +# ifdef LONG_BITSTREAM_READER + +# define UPDATE_CACHE_LE(name, gb) name ## _cache = \ + AV_RL64((gb)->buffer + (name ## _index >> 3)) >> (name ## _index & 7) + +# define UPDATE_CACHE_BE(name, gb) name ## _cache = \ + AV_RB64((gb)->buffer + (name ## _index >> 3)) >> (32 - (name ## _index & 7)) + +#else + +# define UPDATE_CACHE_LE(name, gb) name ## _cache = \ + AV_RL32((gb)->buffer + (name ## _index >> 3)) >> (name ## _index & 7) + +# define UPDATE_CACHE_BE(name, gb) name ## _cache = \ + AV_RB32((gb)->buffer + (name ## _index >> 3)) << (name ## _index & 7) + +#endif + + +#ifdef BITSTREAM_READER_LE + +# define UPDATE_CACHE(name, gb) UPDATE_CACHE_LE(name, gb) + +# define SKIP_CACHE(name, gb, num) name ## _cache >>= (num) + +#else + +# define UPDATE_CACHE(name, gb) UPDATE_CACHE_BE(name, gb) + +# define SKIP_CACHE(name, gb, num) name ## _cache <<= (num) + +#endif + +#if UNCHECKED_BITSTREAM_READER +# define SKIP_COUNTER(name, gb, num) name ## _index += (num) +#else +# define SKIP_COUNTER(name, gb, num) \ + name ## _index = FFMIN(name ## _size_plus8, name ## _index + (num)) +#endif + +#define BITS_LEFT(name, gb) ((int)((gb)->size_in_bits - name ## _index)) + +#define SKIP_BITS(name, gb, num) \ + do { \ + SKIP_CACHE(name, gb, num); \ + SKIP_COUNTER(name, gb, num); \ + } while (0) + +#define LAST_SKIP_BITS(name, gb, num) SKIP_COUNTER(name, gb, num) + +#define SHOW_UBITS_LE(name, gb, num) zero_extend(name ## _cache, num) +#define SHOW_SBITS_LE(name, gb, num) sign_extend(name ## _cache, num) + +#define SHOW_UBITS_BE(name, gb, num) NEG_USR32(name ## _cache, num) +#define SHOW_SBITS_BE(name, gb, num) NEG_SSR32(name ## _cache, num) + +#ifdef BITSTREAM_READER_LE +# define SHOW_UBITS(name, gb, num) SHOW_UBITS_LE(name, gb, num) +# define SHOW_SBITS(name, gb, num) SHOW_SBITS_LE(name, gb, num) +#else +# define SHOW_UBITS(name, gb, num) SHOW_UBITS_BE(name, gb, num) +# define SHOW_SBITS(name, gb, num) SHOW_SBITS_BE(name, gb, num) +#endif + +#define GET_CACHE(name, gb) ((uint32_t) name ## _cache) + +#endif + +static inline int get_bits_count(const GetBitContext *s) +{ +#if CACHED_BITSTREAM_READER + return s->index - s->bits_left; +#else + return s->index; +#endif +} + +#if CACHED_BITSTREAM_READER +static inline void refill_32(GetBitContext *s, int is_le) +{ +#if !UNCHECKED_BITSTREAM_READER + if (s->index >> 3 >= s->buffer_end - s->buffer) + return; +#endif + + if (is_le) + s->cache = (uint64_t)AV_RL32(s->buffer + (s->index >> 3)) << s->bits_left | s->cache; + else + s->cache = s->cache | (uint64_t)AV_RB32(s->buffer + (s->index >> 3)) << (32 - s->bits_left); + s->index += 32; + s->bits_left += 32; +} + +static inline void refill_64(GetBitContext *s, int is_le) +{ +#if !UNCHECKED_BITSTREAM_READER + if (s->index >> 3 >= s->buffer_end - s->buffer) + return; +#endif + + if (is_le) + s->cache = AV_RL64(s->buffer + (s->index >> 3)); + else + s->cache = AV_RB64(s->buffer + (s->index >> 3)); + s->index += 64; + s->bits_left = 64; +} + +static inline uint64_t get_val(GetBitContext *s, unsigned n, int is_le) +{ + uint64_t ret; + av_assert2(n>0 && n<=63); + if (is_le) { + ret = s->cache & ((UINT64_C(1) << n) - 1); + s->cache >>= n; + } else { + ret = s->cache >> (64 - n); + s->cache <<= n; + } + s->bits_left -= n; + return ret; +} + +static inline unsigned show_val(const GetBitContext *s, unsigned n) +{ +#ifdef BITSTREAM_READER_LE + return s->cache & ((UINT64_C(1) << n) - 1); +#else + return s->cache >> (64 - n); +#endif +} +#endif + +/** + * Skips the specified number of bits. + * @param n the number of bits to skip, + * For the UNCHECKED_BITSTREAM_READER this must not cause the distance + * from the start to overflow int32_t. Staying within the bitstream + padding + * is sufficient, too. + */ +static inline void skip_bits_long(GetBitContext *s, int n) +{ +#if CACHED_BITSTREAM_READER + skip_bits(s, n); +#else +#if UNCHECKED_BITSTREAM_READER + s->index += n; +#else + s->index += av_clip(n, -s->index, s->size_in_bits_plus8 - s->index); +#endif +#endif +} + +#if CACHED_BITSTREAM_READER +static inline void skip_remaining(GetBitContext *s, unsigned n) +{ +#ifdef BITSTREAM_READER_LE + s->cache >>= n; +#else + s->cache <<= n; +#endif + s->bits_left -= n; +} +#endif + +/** + * Read MPEG-1 dc-style VLC (sign bit + mantissa with no MSB). + * if MSB not set it is negative + * @param n length in bits + */ +static inline int get_xbits(GetBitContext *s, int n) +{ +#if CACHED_BITSTREAM_READER + int32_t cache = show_bits(s, 32); + int sign = ~cache >> 31; + skip_remaining(s, n); + + return ((((uint32_t)(sign ^ cache)) >> (32 - n)) ^ sign) - sign; +#else + register int sign; + register int32_t cache; + OPEN_READER(re, s); + av_assert2(n>0 && n<=25); + UPDATE_CACHE(re, s); + cache = GET_CACHE(re, s); + sign = ~cache >> 31; + LAST_SKIP_BITS(re, s, n); + CLOSE_READER(re, s); + return (NEG_USR32(sign ^ cache, n) ^ sign) - sign; +#endif +} + +#if !CACHED_BITSTREAM_READER +static inline int get_xbits_le(GetBitContext *s, int n) +{ + register int sign; + register int32_t cache; + OPEN_READER(re, s); + av_assert2(n>0 && n<=25); + UPDATE_CACHE_LE(re, s); + cache = GET_CACHE(re, s); + sign = sign_extend(~cache, n) >> 31; + LAST_SKIP_BITS(re, s, n); + CLOSE_READER(re, s); + return (zero_extend(sign ^ cache, n) ^ sign) - sign; +} +#endif + +static inline int get_sbits(GetBitContext *s, int n) +{ + register int tmp; +#if CACHED_BITSTREAM_READER + av_assert2(n>0 && n<=25); + tmp = sign_extend(get_bits(s, n), n); +#else + OPEN_READER(re, s); + av_assert2(n>0 && n<=25); + UPDATE_CACHE(re, s); + tmp = SHOW_SBITS(re, s, n); + LAST_SKIP_BITS(re, s, n); + CLOSE_READER(re, s); +#endif + return tmp; +} + +/** + * Read 1-25 bits. + */ +static inline unsigned int get_bits(GetBitContext *s, int n) +{ + register unsigned int tmp; +#if CACHED_BITSTREAM_READER + + av_assert2(n>0 && n<=32); + if (n > s->bits_left) { +#ifdef BITSTREAM_READER_LE + refill_32(s, 1); +#else + refill_32(s, 0); +#endif + if (s->bits_left < 32) + s->bits_left = n; + } + +#ifdef BITSTREAM_READER_LE + tmp = get_val(s, n, 1); +#else + tmp = get_val(s, n, 0); +#endif +#else + OPEN_READER(re, s); + av_assert2(n>0 && n<=25); + UPDATE_CACHE(re, s); + tmp = SHOW_UBITS(re, s, n); + LAST_SKIP_BITS(re, s, n); + CLOSE_READER(re, s); +#endif + av_assert2(tmp < UINT64_C(1) << n); + return tmp; +} + +/** + * Read 0-25 bits. + */ +static av_always_inline int get_bitsz(GetBitContext *s, int n) +{ + return n ? get_bits(s, n) : 0; +} + +static inline unsigned int get_bits_le(GetBitContext *s, int n) +{ +#if CACHED_BITSTREAM_READER + av_assert2(n>0 && n<=32); + if (n > s->bits_left) { + refill_32(s, 1); + if (s->bits_left < 32) + s->bits_left = n; + } + + return get_val(s, n, 1); +#else + register int tmp; + OPEN_READER(re, s); + av_assert2(n>0 && n<=25); + UPDATE_CACHE_LE(re, s); + tmp = SHOW_UBITS_LE(re, s, n); + LAST_SKIP_BITS(re, s, n); + CLOSE_READER(re, s); + return tmp; +#endif +} + +/** + * Show 1-25 bits. + */ +static inline unsigned int show_bits(GetBitContext *s, int n) +{ + register unsigned int tmp; +#if CACHED_BITSTREAM_READER + if (n > s->bits_left) +#ifdef BITSTREAM_READER_LE + refill_32(s, 1); +#else + refill_32(s, 0); +#endif + + tmp = show_val(s, n); +#else + OPEN_READER_NOSIZE(re, s); + av_assert2(n>0 && n<=25); + UPDATE_CACHE(re, s); + tmp = SHOW_UBITS(re, s, n); +#endif + return tmp; +} + +static inline void skip_bits(GetBitContext *s, int n) +{ +#if CACHED_BITSTREAM_READER + if (n < s->bits_left) + skip_remaining(s, n); + else { + n -= s->bits_left; + s->cache = 0; + s->bits_left = 0; + + if (n >= 64) { + unsigned skip = (n / 8) * 8; + + n -= skip; + s->index += skip; + } +#ifdef BITSTREAM_READER_LE + refill_64(s, 1); +#else + refill_64(s, 0); +#endif + if (n) + skip_remaining(s, n); + } +#else + OPEN_READER(re, s); + LAST_SKIP_BITS(re, s, n); + CLOSE_READER(re, s); +#endif +} + +static inline unsigned int get_bits1(GetBitContext *s) +{ +#if CACHED_BITSTREAM_READER + if (!s->bits_left) +#ifdef BITSTREAM_READER_LE + refill_64(s, 1); +#else + refill_64(s, 0); +#endif + +#ifdef BITSTREAM_READER_LE + return get_val(s, 1, 1); +#else + return get_val(s, 1, 0); +#endif +#else + unsigned int index = s->index; + uint8_t result = s->buffer[index >> 3]; +#ifdef BITSTREAM_READER_LE + result >>= index & 7; + result &= 1; +#else + result <<= index & 7; + result >>= 8 - 1; +#endif +#if !UNCHECKED_BITSTREAM_READER + if (s->index < s->size_in_bits_plus8) +#endif + index++; + s->index = index; + + return result; +#endif +} + +static inline unsigned int show_bits1(GetBitContext *s) +{ + return show_bits(s, 1); +} + +static inline void skip_bits1(GetBitContext *s) +{ + skip_bits(s, 1); +} + +/** + * Read 0-32 bits. + */ +static inline unsigned int get_bits_long(GetBitContext *s, int n) +{ + av_assert2(n>=0 && n<=32); + if (!n) { + return 0; +#if CACHED_BITSTREAM_READER + } + return get_bits(s, n); +#else + } else if (n <= MIN_CACHE_BITS) { + return get_bits(s, n); + } else { +#ifdef BITSTREAM_READER_LE + unsigned ret = get_bits(s, 16); + return ret | (get_bits(s, n - 16) << 16); +#else + unsigned ret = get_bits(s, 16) << (n - 16); + return ret | get_bits(s, n - 16); +#endif + } +#endif +} + +/** + * Read 0-64 bits. + */ +static inline uint64_t get_bits64(GetBitContext *s, int n) +{ + if (n <= 32) { + return get_bits_long(s, n); + } else { +#ifdef BITSTREAM_READER_LE + uint64_t ret = get_bits_long(s, 32); + return ret | (uint64_t) get_bits_long(s, n - 32) << 32; +#else + uint64_t ret = (uint64_t) get_bits_long(s, n - 32) << 32; + return ret | get_bits_long(s, 32); +#endif + } +} + +/** + * Read 0-32 bits as a signed integer. + */ +static inline int get_sbits_long(GetBitContext *s, int n) +{ + // sign_extend(x, 0) is undefined + if (!n) + return 0; + + return sign_extend(get_bits_long(s, n), n); +} + +/** + * Show 0-32 bits. + */ +static inline unsigned int show_bits_long(GetBitContext *s, int n) +{ + if (n <= MIN_CACHE_BITS) { + return show_bits(s, n); + } else { + GetBitContext gb = *s; + return get_bits_long(&gb, n); + } +} + +static inline int init_get_bits_xe(GetBitContext *s, const uint8_t *buffer, + int bit_size, int is_le) +{ + int buffer_size; + int ret = 0; + + if (bit_size >= INT_MAX - FFMAX(7, AV_INPUT_BUFFER_PADDING_SIZE*8) || bit_size < 0 || !buffer) { + bit_size = 0; + buffer = NULL; + ret = AVERROR_INVALIDDATA; + } + + buffer_size = (bit_size + 7) >> 3; + + s->buffer = buffer; + s->size_in_bits = bit_size; + s->size_in_bits_plus8 = bit_size + 8; + s->buffer_end = buffer + buffer_size; + s->index = 0; + +#if CACHED_BITSTREAM_READER + s->cache = 0; + s->bits_left = 0; + refill_64(s, is_le); +#endif + + return ret; +} + +/** + * Initialize GetBitContext. + * @param buffer bitstream buffer, must be AV_INPUT_BUFFER_PADDING_SIZE bytes + * larger than the actual read bits because some optimized bitstream + * readers read 32 or 64 bit at once and could read over the end + * @param bit_size the size of the buffer in bits + * @return 0 on success, AVERROR_INVALIDDATA if the buffer_size would overflow. + */ +static inline int init_get_bits(GetBitContext *s, const uint8_t *buffer, + int bit_size) +{ +#ifdef BITSTREAM_READER_LE + return init_get_bits_xe(s, buffer, bit_size, 1); +#else + return init_get_bits_xe(s, buffer, bit_size, 0); +#endif +} + +/** + * Initialize GetBitContext. + * @param buffer bitstream buffer, must be AV_INPUT_BUFFER_PADDING_SIZE bytes + * larger than the actual read bits because some optimized bitstream + * readers read 32 or 64 bit at once and could read over the end + * @param byte_size the size of the buffer in bytes + * @return 0 on success, AVERROR_INVALIDDATA if the buffer_size would overflow. + */ +static inline int init_get_bits8(GetBitContext *s, const uint8_t *buffer, + int byte_size) +{ + if (byte_size > INT_MAX / 8 || byte_size < 0) + byte_size = -1; + return init_get_bits(s, buffer, byte_size * 8); +} + +static inline int init_get_bits8_le(GetBitContext *s, const uint8_t *buffer, + int byte_size) +{ + if (byte_size > INT_MAX / 8 || byte_size < 0) + byte_size = -1; + return init_get_bits_xe(s, buffer, byte_size * 8, 1); +} + +static inline const uint8_t *align_get_bits(GetBitContext *s) +{ + int n = -get_bits_count(s) & 7; + if (n) + skip_bits(s, n); + return s->buffer + (s->index >> 3); +} + +/** + * If the vlc code is invalid and max_depth=1, then no bits will be removed. + * If the vlc code is invalid and max_depth>1, then the number of bits removed + * is undefined. + */ +#define GET_VLC(code, name, gb, table, bits, max_depth) \ + do { \ + int n, nb_bits; \ + unsigned int index; \ + \ + index = SHOW_UBITS(name, gb, bits); \ + code = table[index].sym; \ + n = table[index].len; \ + \ + if (max_depth > 1 && n < 0) { \ + LAST_SKIP_BITS(name, gb, bits); \ + UPDATE_CACHE(name, gb); \ + \ + nb_bits = -n; \ + \ + index = SHOW_UBITS(name, gb, nb_bits) + code; \ + code = table[index].sym; \ + n = table[index].len; \ + if (max_depth > 2 && n < 0) { \ + LAST_SKIP_BITS(name, gb, nb_bits); \ + UPDATE_CACHE(name, gb); \ + \ + nb_bits = -n; \ + \ + index = SHOW_UBITS(name, gb, nb_bits) + code; \ + code = table[index].sym; \ + n = table[index].len; \ + } \ + } \ + SKIP_BITS(name, gb, n); \ + } while (0) + +#define GET_RL_VLC(level, run, name, gb, table, bits, \ + max_depth, need_update) \ + do { \ + int n, nb_bits; \ + unsigned int index; \ + \ + index = SHOW_UBITS(name, gb, bits); \ + level = table[index].level; \ + n = table[index].len; \ + \ + if (max_depth > 1 && n < 0) { \ + SKIP_BITS(name, gb, bits); \ + if (need_update) { \ + UPDATE_CACHE(name, gb); \ + } \ + \ + nb_bits = -n; \ + \ + index = SHOW_UBITS(name, gb, nb_bits) + level; \ + level = table[index].level; \ + n = table[index].len; \ + if (max_depth > 2 && n < 0) { \ + LAST_SKIP_BITS(name, gb, nb_bits); \ + if (need_update) { \ + UPDATE_CACHE(name, gb); \ + } \ + nb_bits = -n; \ + \ + index = SHOW_UBITS(name, gb, nb_bits) + level; \ + level = table[index].level; \ + n = table[index].len; \ + } \ + } \ + run = table[index].run; \ + SKIP_BITS(name, gb, n); \ + } while (0) + +/* Return the LUT element for the given bitstream configuration. */ +static inline int set_idx(GetBitContext *s, int code, int *n, int *nb_bits, + const VLCElem *table) +{ + unsigned idx; + + *nb_bits = -*n; + idx = show_bits(s, *nb_bits) + code; + *n = table[idx].len; + + return table[idx].sym; +} + +/** + * Parse a vlc code. + * @param bits is the number of bits which will be read at once, must be + * identical to nb_bits in init_vlc() + * @param max_depth is the number of times bits bits must be read to completely + * read the longest vlc code + * = (max_vlc_length + bits - 1) / bits + * @returns the code parsed or -1 if no vlc matches + */ +static av_always_inline int get_vlc2(GetBitContext *s, const VLCElem *table, + int bits, int max_depth) +{ +#if CACHED_BITSTREAM_READER + int nb_bits; + unsigned idx = show_bits(s, bits); + int code = table[idx].sym; + int n = table[idx].len; + + if (max_depth > 1 && n < 0) { + skip_remaining(s, bits); + code = set_idx(s, code, &n, &nb_bits, table); + if (max_depth > 2 && n < 0) { + skip_remaining(s, nb_bits); + code = set_idx(s, code, &n, &nb_bits, table); + } + } + skip_remaining(s, n); + + return code; +#else + int code; + + OPEN_READER(re, s); + UPDATE_CACHE(re, s); + + GET_VLC(code, re, s, table, bits, max_depth); + + CLOSE_READER(re, s); + + return code; +#endif +} + +static inline int decode012(GetBitContext *gb) +{ + int n; + n = get_bits1(gb); + if (n == 0) + return 0; + else + return get_bits1(gb) + 1; +} + +static inline int decode210(GetBitContext *gb) +{ + if (get_bits1(gb)) + return 0; + else + return 2 - get_bits1(gb); +} + +static inline int get_bits_left(GetBitContext *gb) +{ + return gb->size_in_bits - get_bits_count(gb); +} + +static inline int skip_1stop_8data_bits(GetBitContext *gb) +{ + if (get_bits_left(gb) <= 0) + return AVERROR_INVALIDDATA; + + while (get_bits1(gb)) { + skip_bits(gb, 8); + if (get_bits_left(gb) <= 0) + return AVERROR_INVALIDDATA; + } + + return 0; +} + +#endif /* AVCODEC_GET_BITS_H */ diff --git a/third-party/cbs/include/cbs/h264.h b/third-party/cbs/include/cbs/h264.h index 7fc4d07e26a..7a1fb6d6879 100644 --- a/third-party/cbs/include/cbs/h264.h +++ b/third-party/cbs/include/cbs/h264.h @@ -24,89 +24,89 @@ #ifndef AVCODEC_H264_H #define AVCODEC_H264_H -#define QP_MAX_NUM (51 + 6 * 6) // The maximum supported qp +#define QP_MAX_NUM (51 + 6*6) // The maximum supported qp /* * Table 7-1 – NAL unit type codes, syntax element categories, and NAL unit type classes in * T-REC-H.264-201704 */ enum { - H264_NAL_UNSPECIFIED = 0, - H264_NAL_SLICE = 1, - H264_NAL_DPA = 2, - H264_NAL_DPB = 3, - H264_NAL_DPC = 4, - H264_NAL_IDR_SLICE = 5, - H264_NAL_SEI = 6, - H264_NAL_SPS = 7, - H264_NAL_PPS = 8, - H264_NAL_AUD = 9, - H264_NAL_END_SEQUENCE = 10, - H264_NAL_END_STREAM = 11, - H264_NAL_FILLER_DATA = 12, - H264_NAL_SPS_EXT = 13, - H264_NAL_PREFIX = 14, - H264_NAL_SUB_SPS = 15, - H264_NAL_DPS = 16, - H264_NAL_RESERVED17 = 17, - H264_NAL_RESERVED18 = 18, - H264_NAL_AUXILIARY_SLICE = 19, - H264_NAL_EXTEN_SLICE = 20, - H264_NAL_DEPTH_EXTEN_SLICE = 21, - H264_NAL_RESERVED22 = 22, - H264_NAL_RESERVED23 = 23, - H264_NAL_UNSPECIFIED24 = 24, - H264_NAL_UNSPECIFIED25 = 25, - H264_NAL_UNSPECIFIED26 = 26, - H264_NAL_UNSPECIFIED27 = 27, - H264_NAL_UNSPECIFIED28 = 28, - H264_NAL_UNSPECIFIED29 = 29, - H264_NAL_UNSPECIFIED30 = 30, - H264_NAL_UNSPECIFIED31 = 31, + H264_NAL_UNSPECIFIED = 0, + H264_NAL_SLICE = 1, + H264_NAL_DPA = 2, + H264_NAL_DPB = 3, + H264_NAL_DPC = 4, + H264_NAL_IDR_SLICE = 5, + H264_NAL_SEI = 6, + H264_NAL_SPS = 7, + H264_NAL_PPS = 8, + H264_NAL_AUD = 9, + H264_NAL_END_SEQUENCE = 10, + H264_NAL_END_STREAM = 11, + H264_NAL_FILLER_DATA = 12, + H264_NAL_SPS_EXT = 13, + H264_NAL_PREFIX = 14, + H264_NAL_SUB_SPS = 15, + H264_NAL_DPS = 16, + H264_NAL_RESERVED17 = 17, + H264_NAL_RESERVED18 = 18, + H264_NAL_AUXILIARY_SLICE = 19, + H264_NAL_EXTEN_SLICE = 20, + H264_NAL_DEPTH_EXTEN_SLICE = 21, + H264_NAL_RESERVED22 = 22, + H264_NAL_RESERVED23 = 23, + H264_NAL_UNSPECIFIED24 = 24, + H264_NAL_UNSPECIFIED25 = 25, + H264_NAL_UNSPECIFIED26 = 26, + H264_NAL_UNSPECIFIED27 = 27, + H264_NAL_UNSPECIFIED28 = 28, + H264_NAL_UNSPECIFIED29 = 29, + H264_NAL_UNSPECIFIED30 = 30, + H264_NAL_UNSPECIFIED31 = 31, }; enum { - // 7.4.2.1.1: seq_parameter_set_id is in [0, 31]. - H264_MAX_SPS_COUNT = 32, - // 7.4.2.2: pic_parameter_set_id is in [0, 255]. - H264_MAX_PPS_COUNT = 256, + // 7.4.2.1.1: seq_parameter_set_id is in [0, 31]. + H264_MAX_SPS_COUNT = 32, + // 7.4.2.2: pic_parameter_set_id is in [0, 255]. + H264_MAX_PPS_COUNT = 256, - // A.3: MaxDpbFrames is bounded above by 16. - H264_MAX_DPB_FRAMES = 16, - // 7.4.2.1.1: max_num_ref_frames is in [0, MaxDpbFrames], and - // each reference frame can have two fields. - H264_MAX_REFS = 2 * H264_MAX_DPB_FRAMES, + // A.3: MaxDpbFrames is bounded above by 16. + H264_MAX_DPB_FRAMES = 16, + // 7.4.2.1.1: max_num_ref_frames is in [0, MaxDpbFrames], and + // each reference frame can have two fields. + H264_MAX_REFS = 2 * H264_MAX_DPB_FRAMES, - // 7.4.3.1: modification_of_pic_nums_idc is not equal to 3 at most - // num_ref_idx_lN_active_minus1 + 1 times (that is, once for each - // possible reference), then equal to 3 once. - H264_MAX_RPLM_COUNT = H264_MAX_REFS + 1, + // 7.4.3.1: modification_of_pic_nums_idc is not equal to 3 at most + // num_ref_idx_lN_active_minus1 + 1 times (that is, once for each + // possible reference), then equal to 3 once. + H264_MAX_RPLM_COUNT = H264_MAX_REFS + 1, - // 7.4.3.3: in the worst case, we begin with a full short-term - // reference picture list. Each picture in turn is moved to the - // long-term list (type 3) and then discarded from there (type 2). - // Then, we set the length of the long-term list (type 4), mark - // the current picture as long-term (type 6) and terminate the - // process (type 0). - H264_MAX_MMCO_COUNT = H264_MAX_REFS * 2 + 3, + // 7.4.3.3: in the worst case, we begin with a full short-term + // reference picture list. Each picture in turn is moved to the + // long-term list (type 3) and then discarded from there (type 2). + // Then, we set the length of the long-term list (type 4), mark + // the current picture as long-term (type 6) and terminate the + // process (type 0). + H264_MAX_MMCO_COUNT = H264_MAX_REFS * 2 + 3, - // A.2.1, A.2.3: profiles supporting FMO constrain - // num_slice_groups_minus1 to be in [0, 7]. - H264_MAX_SLICE_GROUPS = 8, + // A.2.1, A.2.3: profiles supporting FMO constrain + // num_slice_groups_minus1 to be in [0, 7]. + H264_MAX_SLICE_GROUPS = 8, - // E.2.2: cpb_cnt_minus1 is in [0, 31]. - H264_MAX_CPB_CNT = 32, + // E.2.2: cpb_cnt_minus1 is in [0, 31]. + H264_MAX_CPB_CNT = 32, - // A.3: in table A-1 the highest level allows a MaxFS of 139264. - H264_MAX_MB_PIC_SIZE = 139264, - // A.3.1, A.3.2: PicWidthInMbs and PicHeightInMbs are constrained - // to be not greater than sqrt(MaxFS * 8). Hence height/width are - // bounded above by sqrt(139264 * 8) = 1055.5 macroblocks. - H264_MAX_MB_WIDTH = 1055, - H264_MAX_MB_HEIGHT = 1055, - H264_MAX_WIDTH = H264_MAX_MB_WIDTH * 16, - H264_MAX_HEIGHT = H264_MAX_MB_HEIGHT * 16, + // A.3: in table A-1 the highest level allows a MaxFS of 139264. + H264_MAX_MB_PIC_SIZE = 139264, + // A.3.1, A.3.2: PicWidthInMbs and PicHeightInMbs are constrained + // to be not greater than sqrt(MaxFS * 8). Hence height/width are + // bounded above by sqrt(139264 * 8) = 1055.5 macroblocks. + H264_MAX_MB_WIDTH = 1055, + H264_MAX_MB_HEIGHT = 1055, + H264_MAX_WIDTH = H264_MAX_MB_WIDTH * 16, + H264_MAX_HEIGHT = H264_MAX_MB_HEIGHT * 16, }; diff --git a/third-party/cbs/include/cbs/h2645_parse.h b/third-party/cbs/include/cbs/h2645_parse.h index 4b445e01344..787ce971ee4 100644 --- a/third-party/cbs/include/cbs/h2645_parse.h +++ b/third-party/cbs/include/cbs/h2645_parse.h @@ -23,89 +23,75 @@ #include -#include -#include +#include "libavutil/buffer.h" +#include "libavutil/error.h" +#include "libavutil/log.h" +#include "codec_id.h" +#include "get_bits.h" - -/** - * CACHED_BITSTREAM_READER can only be true if it's used by a decoder - * Thus, Sunshine doesn't need to worry about it - */ -typedef struct GetBitContext { - const uint8_t *buffer, *buffer_end; -#if CACHED_BITSTREAM_READER - uint64_t cache; - unsigned bits_left; -#endif - int index; - int size_in_bits; - int size_in_bits_plus8; -} GetBitContext; - -#define MAX_MBPAIR_SIZE (256 * 1024) // a tighter bound could be calculated if someone cares about a few bytes +#define MAX_MBPAIR_SIZE (256*1024) // a tighter bound could be calculated if someone cares about a few bytes typedef struct H2645NAL { - uint8_t *rbsp_buffer; + const uint8_t *data; + int size; - int size; - const uint8_t *data; - - /** + /** * Size, in bits, of just the data, excluding the stop bit and any trailing * padding. I.e. what HEVC calls SODB. */ - int size_bits; + int size_bits; - int raw_size; - const uint8_t *raw_data; + int raw_size; + const uint8_t *raw_data; - GetBitContext gb; + GetBitContext gb; - /** + /** * NAL unit type */ - int type; + int type; - /** + /** + * H.264 only, nal_ref_idc + */ + int ref_idc; + + /** * HEVC only, nuh_temporal_id_plus_1 - 1 */ - int temporal_id; + int temporal_id; - /* + /* * HEVC only, identifier of layer to which nal unit belongs */ - int nuh_layer_id; + int nuh_layer_id; - int skipped_bytes; - int skipped_bytes_pos_size; - int *skipped_bytes_pos; - /** - * H.264 only, nal_ref_idc - */ - int ref_idc; + int skipped_bytes; + int skipped_bytes_pos_size; + int *skipped_bytes_pos; } H2645NAL; typedef struct H2645RBSP { - uint8_t *rbsp_buffer; - AVBufferRef *rbsp_buffer_ref; - int rbsp_buffer_alloc_size; - int rbsp_buffer_size; + uint8_t *rbsp_buffer; + AVBufferRef *rbsp_buffer_ref; + int rbsp_buffer_alloc_size; + int rbsp_buffer_size; } H2645RBSP; /* an input packet split into unescaped NAL units */ typedef struct H2645Packet { - H2645NAL *nals; - H2645RBSP rbsp; - int nb_nals; - int nals_allocated; - unsigned nal_buffer_size; + H2645NAL *nals; + H2645RBSP rbsp; + int nb_nals; + int nals_allocated; + unsigned nal_buffer_size; } H2645Packet; /** * Extract the raw (unescaped) bitstream. */ int ff_h2645_extract_rbsp(const uint8_t *src, int length, H2645RBSP *rbsp, - H2645NAL *nal, int small_padding); + H2645NAL *nal, int small_padding); /** * Split an input packet into NAL units. @@ -122,8 +108,8 @@ int ff_h2645_extract_rbsp(const uint8_t *src, int length, H2645RBSP *rbsp, * the underlying AVBuffer of rbsp_buffer_ref. */ int ff_h2645_packet_split(H2645Packet *pkt, const uint8_t *buf, int length, - void *logctx, int is_nalff, int nal_length_size, - enum AVCodecID codec_id, int small_padding, int use_ref); + void *logctx, int is_nalff, int nal_length_size, + enum AVCodecID codec_id, int small_padding, int use_ref); /** * Free all the allocated memory in the packet. @@ -131,22 +117,23 @@ int ff_h2645_packet_split(H2645Packet *pkt, const uint8_t *buf, int length, void ff_h2645_packet_uninit(H2645Packet *pkt); static inline int get_nalsize(int nal_length_size, const uint8_t *buf, - int buf_size, int *buf_index, void *logctx) { - int i, nalsize = 0; - - if(*buf_index >= buf_size - nal_length_size) { - // the end of the buffer is reached, refill it - return AVERROR(EAGAIN); - } - - for(i = 0; i < nal_length_size; i++) - nalsize = ((unsigned)nalsize << 8) | buf[(*buf_index)++]; - if(nalsize <= 0 || nalsize > buf_size - *buf_index) { - av_log(logctx, AV_LOG_ERROR, - "Invalid NAL unit size (%d > %d).\n", nalsize, buf_size - *buf_index); - return AVERROR_INVALIDDATA; - } - return nalsize; + int buf_size, int *buf_index, void *logctx) +{ + int i, nalsize = 0; + + if (*buf_index >= buf_size - nal_length_size) { + // the end of the buffer is reached, refill it + return AVERROR(EAGAIN); + } + + for (i = 0; i < nal_length_size; i++) + nalsize = ((unsigned)nalsize << 8) | buf[(*buf_index)++]; + if (nalsize <= 0 || nalsize > buf_size - *buf_index) { + av_log(logctx, AV_LOG_ERROR, + "Invalid NAL unit size (%d > %d).\n", nalsize, buf_size - *buf_index); + return AVERROR_INVALIDDATA; + } + return nalsize; } #endif /* AVCODEC_H2645_PARSE_H */ diff --git a/third-party/cbs/include/cbs/h264_levels.h b/third-party/cbs/include/cbs/h264_levels.h new file mode 100644 index 00000000000..310d79e51a2 --- /dev/null +++ b/third-party/cbs/include/cbs/h264_levels.h @@ -0,0 +1,51 @@ +/* + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_H264_LEVELS_H +#define AVCODEC_H264_LEVELS_H + + +#include + +typedef struct H264LevelDescriptor { + char name[4]; // Large enough for all current levels like "4.1" + uint8_t level_idc; + uint8_t constraint_set3_flag; + uint32_t max_mbps; + uint32_t max_fs; + uint32_t max_dpb_mbs; + uint32_t max_br; + uint32_t max_cpb; + uint16_t max_v_mv_r; + uint8_t min_cr; + uint8_t max_mvs_per_2mb; +} H264LevelDescriptor; + +/** + * Guess the level of a stream from some parameters. + * + * Unknown parameters may be zero, in which case they are ignored. + */ +const H264LevelDescriptor *ff_h264_guess_level(int profile_idc, + int64_t bitrate, + int framerate, + int width, int height, + int max_dec_frame_buffering); + + +#endif /* AVCODEC_H264_LEVELS_H */ diff --git a/third-party/cbs/include/cbs/hevc.h b/third-party/cbs/include/cbs/hevc.h index 76707f1ec35..1804755327e 100644 --- a/third-party/cbs/include/cbs/hevc.h +++ b/third-party/cbs/include/cbs/hevc.h @@ -26,134 +26,134 @@ * T-REC-H.265-201802 */ enum HEVCNALUnitType { - HEVC_NAL_TRAIL_N = 0, - HEVC_NAL_TRAIL_R = 1, - HEVC_NAL_TSA_N = 2, - HEVC_NAL_TSA_R = 3, - HEVC_NAL_STSA_N = 4, - HEVC_NAL_STSA_R = 5, - HEVC_NAL_RADL_N = 6, - HEVC_NAL_RADL_R = 7, - HEVC_NAL_RASL_N = 8, - HEVC_NAL_RASL_R = 9, - HEVC_NAL_VCL_N10 = 10, - HEVC_NAL_VCL_R11 = 11, - HEVC_NAL_VCL_N12 = 12, - HEVC_NAL_VCL_R13 = 13, - HEVC_NAL_VCL_N14 = 14, - HEVC_NAL_VCL_R15 = 15, - HEVC_NAL_BLA_W_LP = 16, - HEVC_NAL_BLA_W_RADL = 17, - HEVC_NAL_BLA_N_LP = 18, - HEVC_NAL_IDR_W_RADL = 19, - HEVC_NAL_IDR_N_LP = 20, - HEVC_NAL_CRA_NUT = 21, - HEVC_NAL_RSV_IRAP_VCL22 = 22, - HEVC_NAL_RSV_IRAP_VCL23 = 23, - HEVC_NAL_RSV_VCL24 = 24, - HEVC_NAL_RSV_VCL25 = 25, - HEVC_NAL_RSV_VCL26 = 26, - HEVC_NAL_RSV_VCL27 = 27, - HEVC_NAL_RSV_VCL28 = 28, - HEVC_NAL_RSV_VCL29 = 29, - HEVC_NAL_RSV_VCL30 = 30, - HEVC_NAL_RSV_VCL31 = 31, - HEVC_NAL_VPS = 32, - HEVC_NAL_SPS = 33, - HEVC_NAL_PPS = 34, - HEVC_NAL_AUD = 35, - HEVC_NAL_EOS_NUT = 36, - HEVC_NAL_EOB_NUT = 37, - HEVC_NAL_FD_NUT = 38, - HEVC_NAL_SEI_PREFIX = 39, - HEVC_NAL_SEI_SUFFIX = 40, - HEVC_NAL_RSV_NVCL41 = 41, - HEVC_NAL_RSV_NVCL42 = 42, - HEVC_NAL_RSV_NVCL43 = 43, - HEVC_NAL_RSV_NVCL44 = 44, - HEVC_NAL_RSV_NVCL45 = 45, - HEVC_NAL_RSV_NVCL46 = 46, - HEVC_NAL_RSV_NVCL47 = 47, - HEVC_NAL_UNSPEC48 = 48, - HEVC_NAL_UNSPEC49 = 49, - HEVC_NAL_UNSPEC50 = 50, - HEVC_NAL_UNSPEC51 = 51, - HEVC_NAL_UNSPEC52 = 52, - HEVC_NAL_UNSPEC53 = 53, - HEVC_NAL_UNSPEC54 = 54, - HEVC_NAL_UNSPEC55 = 55, - HEVC_NAL_UNSPEC56 = 56, - HEVC_NAL_UNSPEC57 = 57, - HEVC_NAL_UNSPEC58 = 58, - HEVC_NAL_UNSPEC59 = 59, - HEVC_NAL_UNSPEC60 = 60, - HEVC_NAL_UNSPEC61 = 61, - HEVC_NAL_UNSPEC62 = 62, - HEVC_NAL_UNSPEC63 = 63, + HEVC_NAL_TRAIL_N = 0, + HEVC_NAL_TRAIL_R = 1, + HEVC_NAL_TSA_N = 2, + HEVC_NAL_TSA_R = 3, + HEVC_NAL_STSA_N = 4, + HEVC_NAL_STSA_R = 5, + HEVC_NAL_RADL_N = 6, + HEVC_NAL_RADL_R = 7, + HEVC_NAL_RASL_N = 8, + HEVC_NAL_RASL_R = 9, + HEVC_NAL_VCL_N10 = 10, + HEVC_NAL_VCL_R11 = 11, + HEVC_NAL_VCL_N12 = 12, + HEVC_NAL_VCL_R13 = 13, + HEVC_NAL_VCL_N14 = 14, + HEVC_NAL_VCL_R15 = 15, + HEVC_NAL_BLA_W_LP = 16, + HEVC_NAL_BLA_W_RADL = 17, + HEVC_NAL_BLA_N_LP = 18, + HEVC_NAL_IDR_W_RADL = 19, + HEVC_NAL_IDR_N_LP = 20, + HEVC_NAL_CRA_NUT = 21, + HEVC_NAL_RSV_IRAP_VCL22 = 22, + HEVC_NAL_RSV_IRAP_VCL23 = 23, + HEVC_NAL_RSV_VCL24 = 24, + HEVC_NAL_RSV_VCL25 = 25, + HEVC_NAL_RSV_VCL26 = 26, + HEVC_NAL_RSV_VCL27 = 27, + HEVC_NAL_RSV_VCL28 = 28, + HEVC_NAL_RSV_VCL29 = 29, + HEVC_NAL_RSV_VCL30 = 30, + HEVC_NAL_RSV_VCL31 = 31, + HEVC_NAL_VPS = 32, + HEVC_NAL_SPS = 33, + HEVC_NAL_PPS = 34, + HEVC_NAL_AUD = 35, + HEVC_NAL_EOS_NUT = 36, + HEVC_NAL_EOB_NUT = 37, + HEVC_NAL_FD_NUT = 38, + HEVC_NAL_SEI_PREFIX = 39, + HEVC_NAL_SEI_SUFFIX = 40, + HEVC_NAL_RSV_NVCL41 = 41, + HEVC_NAL_RSV_NVCL42 = 42, + HEVC_NAL_RSV_NVCL43 = 43, + HEVC_NAL_RSV_NVCL44 = 44, + HEVC_NAL_RSV_NVCL45 = 45, + HEVC_NAL_RSV_NVCL46 = 46, + HEVC_NAL_RSV_NVCL47 = 47, + HEVC_NAL_UNSPEC48 = 48, + HEVC_NAL_UNSPEC49 = 49, + HEVC_NAL_UNSPEC50 = 50, + HEVC_NAL_UNSPEC51 = 51, + HEVC_NAL_UNSPEC52 = 52, + HEVC_NAL_UNSPEC53 = 53, + HEVC_NAL_UNSPEC54 = 54, + HEVC_NAL_UNSPEC55 = 55, + HEVC_NAL_UNSPEC56 = 56, + HEVC_NAL_UNSPEC57 = 57, + HEVC_NAL_UNSPEC58 = 58, + HEVC_NAL_UNSPEC59 = 59, + HEVC_NAL_UNSPEC60 = 60, + HEVC_NAL_UNSPEC61 = 61, + HEVC_NAL_UNSPEC62 = 62, + HEVC_NAL_UNSPEC63 = 63, }; enum HEVCSliceType { - HEVC_SLICE_B = 0, - HEVC_SLICE_P = 1, - HEVC_SLICE_I = 2, + HEVC_SLICE_B = 0, + HEVC_SLICE_P = 1, + HEVC_SLICE_I = 2, }; enum { - // 7.4.3.1: vps_max_layers_minus1 is in [0, 62]. - HEVC_MAX_LAYERS = 63, - // 7.4.3.1: vps_max_sub_layers_minus1 is in [0, 6]. - HEVC_MAX_SUB_LAYERS = 7, - // 7.4.3.1: vps_num_layer_sets_minus1 is in [0, 1023]. - HEVC_MAX_LAYER_SETS = 1024, + // 7.4.3.1: vps_max_layers_minus1 is in [0, 62]. + HEVC_MAX_LAYERS = 63, + // 7.4.3.1: vps_max_sub_layers_minus1 is in [0, 6]. + HEVC_MAX_SUB_LAYERS = 7, + // 7.4.3.1: vps_num_layer_sets_minus1 is in [0, 1023]. + HEVC_MAX_LAYER_SETS = 1024, - // 7.4.2.1: vps_video_parameter_set_id is u(4). - HEVC_MAX_VPS_COUNT = 16, - // 7.4.3.2.1: sps_seq_parameter_set_id is in [0, 15]. - HEVC_MAX_SPS_COUNT = 16, - // 7.4.3.3.1: pps_pic_parameter_set_id is in [0, 63]. - HEVC_MAX_PPS_COUNT = 64, + // 7.4.2.1: vps_video_parameter_set_id is u(4). + HEVC_MAX_VPS_COUNT = 16, + // 7.4.3.2.1: sps_seq_parameter_set_id is in [0, 15]. + HEVC_MAX_SPS_COUNT = 16, + // 7.4.3.3.1: pps_pic_parameter_set_id is in [0, 63]. + HEVC_MAX_PPS_COUNT = 64, - // A.4.2: MaxDpbSize is bounded above by 16. - HEVC_MAX_DPB_SIZE = 16, - // 7.4.3.1: vps_max_dec_pic_buffering_minus1[i] is in [0, MaxDpbSize - 1]. - HEVC_MAX_REFS = HEVC_MAX_DPB_SIZE, + // A.4.2: MaxDpbSize is bounded above by 16. + HEVC_MAX_DPB_SIZE = 16, + // 7.4.3.1: vps_max_dec_pic_buffering_minus1[i] is in [0, MaxDpbSize - 1]. + HEVC_MAX_REFS = HEVC_MAX_DPB_SIZE, - // 7.4.3.2.1: num_short_term_ref_pic_sets is in [0, 64]. - HEVC_MAX_SHORT_TERM_REF_PIC_SETS = 64, - // 7.4.3.2.1: num_long_term_ref_pics_sps is in [0, 32]. - HEVC_MAX_LONG_TERM_REF_PICS = 32, + // 7.4.3.2.1: num_short_term_ref_pic_sets is in [0, 64]. + HEVC_MAX_SHORT_TERM_REF_PIC_SETS = 64, + // 7.4.3.2.1: num_long_term_ref_pics_sps is in [0, 32]. + HEVC_MAX_LONG_TERM_REF_PICS = 32, - // A.3: all profiles require that CtbLog2SizeY is in [4, 6]. - HEVC_MIN_LOG2_CTB_SIZE = 4, - HEVC_MAX_LOG2_CTB_SIZE = 6, + // A.3: all profiles require that CtbLog2SizeY is in [4, 6]. + HEVC_MIN_LOG2_CTB_SIZE = 4, + HEVC_MAX_LOG2_CTB_SIZE = 6, - // E.3.2: cpb_cnt_minus1[i] is in [0, 31]. - HEVC_MAX_CPB_CNT = 32, + // E.3.2: cpb_cnt_minus1[i] is in [0, 31]. + HEVC_MAX_CPB_CNT = 32, - // A.4.1: in table A.6 the highest level allows a MaxLumaPs of 35 651 584. - HEVC_MAX_LUMA_PS = 35651584, - // A.4.1: pic_width_in_luma_samples and pic_height_in_luma_samples are - // constrained to be not greater than sqrt(MaxLumaPs * 8). Hence height/ - // width are bounded above by sqrt(8 * 35651584) = 16888.2 samples. - HEVC_MAX_WIDTH = 16888, - HEVC_MAX_HEIGHT = 16888, + // A.4.1: in table A.6 the highest level allows a MaxLumaPs of 35 651 584. + HEVC_MAX_LUMA_PS = 35651584, + // A.4.1: pic_width_in_luma_samples and pic_height_in_luma_samples are + // constrained to be not greater than sqrt(MaxLumaPs * 8). Hence height/ + // width are bounded above by sqrt(8 * 35651584) = 16888.2 samples. + HEVC_MAX_WIDTH = 16888, + HEVC_MAX_HEIGHT = 16888, - // A.4.1: table A.6 allows at most 22 tile rows for any level. - HEVC_MAX_TILE_ROWS = 22, - // A.4.1: table A.6 allows at most 20 tile columns for any level. - HEVC_MAX_TILE_COLUMNS = 20, + // A.4.1: table A.6 allows at most 22 tile rows for any level. + HEVC_MAX_TILE_ROWS = 22, + // A.4.1: table A.6 allows at most 20 tile columns for any level. + HEVC_MAX_TILE_COLUMNS = 20, - // A.4.2: table A.6 allows at most 600 slice segments for any level. - HEVC_MAX_SLICE_SEGMENTS = 600, + // A.4.2: table A.6 allows at most 600 slice segments for any level. + HEVC_MAX_SLICE_SEGMENTS = 600, - // 7.4.7.1: in the worst case (tiles_enabled_flag and - // entropy_coding_sync_enabled_flag are both set), entry points can be - // placed at the beginning of every Ctb row in every tile, giving an - // upper bound of (num_tile_columns_minus1 + 1) * PicHeightInCtbsY - 1. - // Only a stream with very high resolution and perverse parameters could - // get near that, though, so set a lower limit here with the maximum - // possible value for 4K video (at most 135 16x16 Ctb rows). - HEVC_MAX_ENTRY_POINT_OFFSETS = HEVC_MAX_TILE_COLUMNS * 135, + // 7.4.7.1: in the worst case (tiles_enabled_flag and + // entropy_coding_sync_enabled_flag are both set), entry points can be + // placed at the beginning of every Ctb row in every tile, giving an + // upper bound of (num_tile_columns_minus1 + 1) * PicHeightInCtbsY - 1. + // Only a stream with very high resolution and perverse parameters could + // get near that, though, so set a lower limit here with the maximum + // possible value for 4K video (at most 135 16x16 Ctb rows). + HEVC_MAX_ENTRY_POINT_OFFSETS = HEVC_MAX_TILE_COLUMNS * 135, }; diff --git a/third-party/cbs/include/cbs/mathops.h b/third-party/cbs/include/cbs/mathops.h new file mode 100644 index 00000000000..f81d21f9c47 --- /dev/null +++ b/third-party/cbs/include/cbs/mathops.h @@ -0,0 +1,245 @@ +/* + * simple math operations + * Copyright (c) 2001, 2002 Fabrice Bellard + * Copyright (c) 2006 Michael Niedermayer et al + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ +#ifndef AVCODEC_MATHOPS_H +#define AVCODEC_MATHOPS_H + +#include + +#include "libavutil/common.h" +#include "config.h" + +#define MAX_NEG_CROP 1024 + +extern const uint32_t ff_inverse[257]; +extern const uint8_t ff_log2_run[41]; +extern const uint8_t ff_sqrt_tab[256]; +extern const uint8_t ff_crop_tab[256 + 2 * MAX_NEG_CROP]; +extern const uint8_t ff_zigzag_direct[64]; +extern const uint8_t ff_zigzag_scan[16+1]; + +#if ARCH_ARM +# include "arm/mathops.h" +#elif ARCH_AVR32 +# include "avr32/mathops.h" +#elif ARCH_MIPS +# include "mips/mathops.h" +#elif ARCH_PPC +# include "ppc/mathops.h" +#elif ARCH_X86 +# include "x86/mathops.h" +#endif + +/* generic implementation */ + +#ifndef MUL64 +# define MUL64(a,b) ((int64_t)(a) * (int64_t)(b)) +#endif + +#ifndef MULL +# define MULL(a,b,s) (MUL64(a, b) >> (s)) +#endif + +#ifndef MULH +static av_always_inline int MULH(int a, int b){ + return MUL64(a, b) >> 32; +} +#endif + +#ifndef UMULH +static av_always_inline unsigned UMULH(unsigned a, unsigned b){ + return ((uint64_t)(a) * (uint64_t)(b))>>32; +} +#endif + +#ifndef MAC64 +# define MAC64(d, a, b) ((d) += MUL64(a, b)) +#endif + +#ifndef MLS64 +# define MLS64(d, a, b) ((d) -= MUL64(a, b)) +#endif + +/* signed 16x16 -> 32 multiply add accumulate */ +#ifndef MAC16 +# define MAC16(rt, ra, rb) rt += (ra) * (rb) +#endif + +/* signed 16x16 -> 32 multiply */ +#ifndef MUL16 +# define MUL16(ra, rb) ((ra) * (rb)) +#endif + +#ifndef MLS16 +# define MLS16(rt, ra, rb) ((rt) -= (ra) * (rb)) +#endif + +/* median of 3 */ +#ifndef mid_pred +#define mid_pred mid_pred +static inline av_const int mid_pred(int a, int b, int c) +{ + if(a>b){ + if(c>b){ + if(c>a) b=a; + else b=c; + } + }else{ + if(b>c){ + if(c>a) b=c; + else b=a; + } + } + return b; +} +#endif + +#ifndef median4 +#define median4 median4 +static inline av_const int median4(int a, int b, int c, int d) +{ + if (a < b) { + if (c < d) return (FFMIN(b, d) + FFMAX(a, c)) / 2; + else return (FFMIN(b, c) + FFMAX(a, d)) / 2; + } else { + if (c < d) return (FFMIN(a, d) + FFMAX(b, c)) / 2; + else return (FFMIN(a, c) + FFMAX(b, d)) / 2; + } +} +#endif + +#define FF_SIGNBIT(x) ((x) >> CHAR_BIT * sizeof(x) - 1) + +#ifndef sign_extend +static inline av_const int sign_extend(int val, unsigned bits) +{ + unsigned shift = 8 * sizeof(int) - bits; + union { unsigned u; int s; } v = { (unsigned) val << shift }; + return v.s >> shift; +} +#endif + +#ifndef zero_extend +static inline av_const unsigned zero_extend(unsigned val, unsigned bits) +{ + return (val << ((8 * sizeof(int)) - bits)) >> ((8 * sizeof(int)) - bits); +} +#endif + +#ifndef COPY3_IF_LT +#define COPY3_IF_LT(x, y, a, b, c, d)\ +if ((y) < (x)) {\ + (x) = (y);\ + (a) = (b);\ + (c) = (d);\ +} +#endif + +#ifndef MASK_ABS +#define MASK_ABS(mask, level) do { \ + mask = level >> 31; \ + level = (level ^ mask) - mask; \ + } while (0) +#endif + +#ifndef NEG_SSR32 +# define NEG_SSR32(a,s) ((( int32_t)(a))>>(32-(s))) +#endif + +#ifndef NEG_USR32 +# define NEG_USR32(a,s) (((uint32_t)(a))>>(32-(s))) +#endif + +#if HAVE_BIGENDIAN +# ifndef PACK_2U8 +# define PACK_2U8(a,b) (((a) << 8) | (b)) +# endif +# ifndef PACK_4U8 +# define PACK_4U8(a,b,c,d) (((a) << 24) | ((b) << 16) | ((c) << 8) | (d)) +# endif +# ifndef PACK_2U16 +# define PACK_2U16(a,b) (((a) << 16) | (b)) +# endif +#else +# ifndef PACK_2U8 +# define PACK_2U8(a,b) (((b) << 8) | (a)) +# endif +# ifndef PACK_4U2 +# define PACK_4U8(a,b,c,d) (((d) << 24) | ((c) << 16) | ((b) << 8) | (a)) +# endif +# ifndef PACK_2U16 +# define PACK_2U16(a,b) (((b) << 16) | (a)) +# endif +#endif + +#ifndef PACK_2S8 +# define PACK_2S8(a,b) PACK_2U8((a)&255, (b)&255) +#endif +#ifndef PACK_4S8 +# define PACK_4S8(a,b,c,d) PACK_4U8((a)&255, (b)&255, (c)&255, (d)&255) +#endif +#ifndef PACK_2S16 +# define PACK_2S16(a,b) PACK_2U16((a)&0xffff, (b)&0xffff) +#endif + +#ifndef FASTDIV +# define FASTDIV(a,b) ((uint32_t)((((uint64_t)a) * ff_inverse[b]) >> 32)) +#endif /* FASTDIV */ + +#ifndef ff_sqrt +#define ff_sqrt ff_sqrt +static inline av_const unsigned int ff_sqrt(unsigned int a) +{ + unsigned int b; + + if (a < 255) return (ff_sqrt_tab[a + 1] - 1) >> 4; + else if (a < (1 << 12)) b = ff_sqrt_tab[a >> 4] >> 2; +#if !CONFIG_SMALL + else if (a < (1 << 14)) b = ff_sqrt_tab[a >> 6] >> 1; + else if (a < (1 << 16)) b = ff_sqrt_tab[a >> 8] ; +#endif + else { + int s = av_log2_16bit(a >> 16) >> 1; + unsigned int c = a >> (s + 2); + b = ff_sqrt_tab[c >> (s + 8)]; + b = FASTDIV(c,b) + (b << s); + } + + return b - (a < b * b); +} +#endif + +static inline av_const float ff_sqrf(float a) +{ + return a*a; +} + +static inline int8_t ff_u8_to_s8(uint8_t a) +{ + union { + uint8_t u8; + int8_t s8; + } b; + b.u8 = a; + return b.s8; +} + +#endif /* AVCODEC_MATHOPS_H */ diff --git a/third-party/cbs/include/cbs/packet.h b/third-party/cbs/include/cbs/packet.h new file mode 100644 index 00000000000..404d520071e --- /dev/null +++ b/third-party/cbs/include/cbs/packet.h @@ -0,0 +1,731 @@ +/* + * AVPacket public API + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_PACKET_H +#define AVCODEC_PACKET_H + +#include +#include + +#include "libavutil/attributes.h" +#include "libavutil/buffer.h" +#include "libavutil/dict.h" +#include "libavutil/rational.h" +#include "libavutil/version.h" + +#include "libavcodec/version_major.h" + +/** + * @defgroup lavc_packet AVPacket + * + * Types and functions for working with AVPacket. + * @{ + */ +enum AVPacketSideDataType { + /** + * An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE + * bytes worth of palette. This side data signals that a new palette is + * present. + */ + AV_PKT_DATA_PALETTE, + + /** + * The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format + * that the extradata buffer was changed and the receiving side should + * act upon it appropriately. The new extradata is embedded in the side + * data buffer and should be immediately used for processing the current + * frame or packet. + */ + AV_PKT_DATA_NEW_EXTRADATA, + + /** + * An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows: + * @code + * u32le param_flags + * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT) + * s32le channel_count + * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT) + * u64le channel_layout + * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE) + * s32le sample_rate + * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS) + * s32le width + * s32le height + * @endcode + */ + AV_PKT_DATA_PARAM_CHANGE, + + /** + * An AV_PKT_DATA_H263_MB_INFO side data packet contains a number of + * structures with info about macroblocks relevant to splitting the + * packet into smaller packets on macroblock edges (e.g. as for RFC 2190). + * That is, it does not necessarily contain info about all macroblocks, + * as long as the distance between macroblocks in the info is smaller + * than the target payload size. + * Each MB info structure is 12 bytes, and is laid out as follows: + * @code + * u32le bit offset from the start of the packet + * u8 current quantizer at the start of the macroblock + * u8 GOB number + * u16le macroblock address within the GOB + * u8 horizontal MV predictor + * u8 vertical MV predictor + * u8 horizontal MV predictor for block number 3 + * u8 vertical MV predictor for block number 3 + * @endcode + */ + AV_PKT_DATA_H263_MB_INFO, + + /** + * This side data should be associated with an audio stream and contains + * ReplayGain information in form of the AVReplayGain struct. + */ + AV_PKT_DATA_REPLAYGAIN, + + /** + * This side data contains a 3x3 transformation matrix describing an affine + * transformation that needs to be applied to the decoded video frames for + * correct presentation. + * + * See libavutil/display.h for a detailed description of the data. + */ + AV_PKT_DATA_DISPLAYMATRIX, + + /** + * This side data should be associated with a video stream and contains + * Stereoscopic 3D information in form of the AVStereo3D struct. + */ + AV_PKT_DATA_STEREO3D, + + /** + * This side data should be associated with an audio stream and corresponds + * to enum AVAudioServiceType. + */ + AV_PKT_DATA_AUDIO_SERVICE_TYPE, + + /** + * This side data contains quality related information from the encoder. + * @code + * u32le quality factor of the compressed frame. Allowed range is between 1 (good) and FF_LAMBDA_MAX (bad). + * u8 picture type + * u8 error count + * u16 reserved + * u64le[error count] sum of squared differences between encoder in and output + * @endcode + */ + AV_PKT_DATA_QUALITY_STATS, + + /** + * This side data contains an integer value representing the stream index + * of a "fallback" track. A fallback track indicates an alternate + * track to use when the current track can not be decoded for some reason. + * e.g. no decoder available for codec. + */ + AV_PKT_DATA_FALLBACK_TRACK, + + /** + * This side data corresponds to the AVCPBProperties struct. + */ + AV_PKT_DATA_CPB_PROPERTIES, + + /** + * Recommmends skipping the specified number of samples + * @code + * u32le number of samples to skip from start of this packet + * u32le number of samples to skip from end of this packet + * u8 reason for start skip + * u8 reason for end skip (0=padding silence, 1=convergence) + * @endcode + */ + AV_PKT_DATA_SKIP_SAMPLES, + + /** + * An AV_PKT_DATA_JP_DUALMONO side data packet indicates that + * the packet may contain "dual mono" audio specific to Japanese DTV + * and if it is true, recommends only the selected channel to be used. + * @code + * u8 selected channels (0=mail/left, 1=sub/right, 2=both) + * @endcode + */ + AV_PKT_DATA_JP_DUALMONO, + + /** + * A list of zero terminated key/value strings. There is no end marker for + * the list, so it is required to rely on the side data size to stop. + */ + AV_PKT_DATA_STRINGS_METADATA, + + /** + * Subtitle event position + * @code + * u32le x1 + * u32le y1 + * u32le x2 + * u32le y2 + * @endcode + */ + AV_PKT_DATA_SUBTITLE_POSITION, + + /** + * Data found in BlockAdditional element of matroska container. There is + * no end marker for the data, so it is required to rely on the side data + * size to recognize the end. 8 byte id (as found in BlockAddId) followed + * by data. + */ + AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL, + + /** + * The optional first identifier line of a WebVTT cue. + */ + AV_PKT_DATA_WEBVTT_IDENTIFIER, + + /** + * The optional settings (rendering instructions) that immediately + * follow the timestamp specifier of a WebVTT cue. + */ + AV_PKT_DATA_WEBVTT_SETTINGS, + + /** + * A list of zero terminated key/value strings. There is no end marker for + * the list, so it is required to rely on the side data size to stop. This + * side data includes updated metadata which appeared in the stream. + */ + AV_PKT_DATA_METADATA_UPDATE, + + /** + * MPEGTS stream ID as uint8_t, this is required to pass the stream ID + * information from the demuxer to the corresponding muxer. + */ + AV_PKT_DATA_MPEGTS_STREAM_ID, + + /** + * Mastering display metadata (based on SMPTE-2086:2014). This metadata + * should be associated with a video stream and contains data in the form + * of the AVMasteringDisplayMetadata struct. + */ + AV_PKT_DATA_MASTERING_DISPLAY_METADATA, + + /** + * This side data should be associated with a video stream and corresponds + * to the AVSphericalMapping structure. + */ + AV_PKT_DATA_SPHERICAL, + + /** + * Content light level (based on CTA-861.3). This metadata should be + * associated with a video stream and contains data in the form of the + * AVContentLightMetadata struct. + */ + AV_PKT_DATA_CONTENT_LIGHT_LEVEL, + + /** + * ATSC A53 Part 4 Closed Captions. This metadata should be associated with + * a video stream. A53 CC bitstream is stored as uint8_t in AVPacketSideData.data. + * The number of bytes of CC data is AVPacketSideData.size. + */ + AV_PKT_DATA_A53_CC, + + /** + * This side data is encryption initialization data. + * The format is not part of ABI, use av_encryption_init_info_* methods to + * access. + */ + AV_PKT_DATA_ENCRYPTION_INIT_INFO, + + /** + * This side data contains encryption info for how to decrypt the packet. + * The format is not part of ABI, use av_encryption_info_* methods to access. + */ + AV_PKT_DATA_ENCRYPTION_INFO, + + /** + * Active Format Description data consisting of a single byte as specified + * in ETSI TS 101 154 using AVActiveFormatDescription enum. + */ + AV_PKT_DATA_AFD, + + /** + * Producer Reference Time data corresponding to the AVProducerReferenceTime struct, + * usually exported by some encoders (on demand through the prft flag set in the + * AVCodecContext export_side_data field). + */ + AV_PKT_DATA_PRFT, + + /** + * ICC profile data consisting of an opaque octet buffer following the + * format described by ISO 15076-1. + */ + AV_PKT_DATA_ICC_PROFILE, + + /** + * DOVI configuration + * ref: + * dolby-vision-bitstreams-within-the-iso-base-media-file-format-v2.1.2, section 2.2 + * dolby-vision-bitstreams-in-mpeg-2-transport-stream-multiplex-v1.2, section 3.3 + * Tags are stored in struct AVDOVIDecoderConfigurationRecord. + */ + AV_PKT_DATA_DOVI_CONF, + + /** + * Timecode which conforms to SMPTE ST 12-1:2014. The data is an array of 4 uint32_t + * where the first uint32_t describes how many (1-3) of the other timecodes are used. + * The timecode format is described in the documentation of av_timecode_get_smpte_from_framenum() + * function in libavutil/timecode.h. + */ + AV_PKT_DATA_S12M_TIMECODE, + + /** + * HDR10+ dynamic metadata associated with a video frame. The metadata is in + * the form of the AVDynamicHDRPlus struct and contains + * information for color volume transform - application 4 of + * SMPTE 2094-40:2016 standard. + */ + AV_PKT_DATA_DYNAMIC_HDR10_PLUS, + + /** + * The number of side data types. + * This is not part of the public API/ABI in the sense that it may + * change when new side data types are added. + * This must stay the last enum value. + * If its value becomes huge, some code using it + * needs to be updated as it assumes it to be smaller than other limits. + */ + AV_PKT_DATA_NB +}; + +#define AV_PKT_DATA_QUALITY_FACTOR AV_PKT_DATA_QUALITY_STATS //DEPRECATED + +typedef struct AVPacketSideData { + uint8_t *data; + size_t size; + enum AVPacketSideDataType type; +} AVPacketSideData; + +/** + * This structure stores compressed data. It is typically exported by demuxers + * and then passed as input to decoders, or received as output from encoders and + * then passed to muxers. + * + * For video, it should typically contain one compressed frame. For audio it may + * contain several compressed frames. Encoders are allowed to output empty + * packets, with no compressed data, containing only side data + * (e.g. to update some stream parameters at the end of encoding). + * + * The semantics of data ownership depends on the buf field. + * If it is set, the packet data is dynamically allocated and is + * valid indefinitely until a call to av_packet_unref() reduces the + * reference count to 0. + * + * If the buf field is not set av_packet_ref() would make a copy instead + * of increasing the reference count. + * + * The side data is always allocated with av_malloc(), copied by + * av_packet_ref() and freed by av_packet_unref(). + * + * sizeof(AVPacket) being a part of the public ABI is deprecated. once + * av_init_packet() is removed, new packets will only be able to be allocated + * with av_packet_alloc(), and new fields may be added to the end of the struct + * with a minor bump. + * + * @see av_packet_alloc + * @see av_packet_ref + * @see av_packet_unref + */ +typedef struct AVPacket { + /** + * A reference to the reference-counted buffer where the packet data is + * stored. + * May be NULL, then the packet data is not reference-counted. + */ + AVBufferRef *buf; + /** + * Presentation timestamp in AVStream->time_base units; the time at which + * the decompressed packet will be presented to the user. + * Can be AV_NOPTS_VALUE if it is not stored in the file. + * pts MUST be larger or equal to dts as presentation cannot happen before + * decompression, unless one wants to view hex dumps. Some formats misuse + * the terms dts and pts/cts to mean something different. Such timestamps + * must be converted to true pts/dts before they are stored in AVPacket. + */ + int64_t pts; + /** + * Decompression timestamp in AVStream->time_base units; the time at which + * the packet is decompressed. + * Can be AV_NOPTS_VALUE if it is not stored in the file. + */ + int64_t dts; + uint8_t *data; + int size; + int stream_index; + /** + * A combination of AV_PKT_FLAG values + */ + int flags; + /** + * Additional packet data that can be provided by the container. + * Packet can contain several types of side information. + */ + AVPacketSideData *side_data; + int side_data_elems; + + /** + * Duration of this packet in AVStream->time_base units, 0 if unknown. + * Equals next_pts - this_pts in presentation order. + */ + int64_t duration; + + int64_t pos; ///< byte position in stream, -1 if unknown + + /** + * for some private data of the user + */ + void *opaque; + + /** + * AVBufferRef for free use by the API user. FFmpeg will never check the + * contents of the buffer ref. FFmpeg calls av_buffer_unref() on it when + * the packet is unreferenced. av_packet_copy_props() calls create a new + * reference with av_buffer_ref() for the target packet's opaque_ref field. + * + * This is unrelated to the opaque field, although it serves a similar + * purpose. + */ + AVBufferRef *opaque_ref; + + /** + * Time base of the packet's timestamps. + * In the future, this field may be set on packets output by encoders or + * demuxers, but its value will be by default ignored on input to decoders + * or muxers. + */ + AVRational time_base; +} AVPacket; + +#if FF_API_INIT_PACKET +attribute_deprecated +typedef struct AVPacketList { + AVPacket pkt; + struct AVPacketList *next; +} AVPacketList; +#endif + +#define AV_PKT_FLAG_KEY 0x0001 ///< The packet contains a keyframe +#define AV_PKT_FLAG_CORRUPT 0x0002 ///< The packet content is corrupted +/** + * Flag is used to discard packets which are required to maintain valid + * decoder state but are not required for output and should be dropped + * after decoding. + **/ +#define AV_PKT_FLAG_DISCARD 0x0004 +/** + * The packet comes from a trusted source. + * + * Otherwise-unsafe constructs such as arbitrary pointers to data + * outside the packet may be followed. + */ +#define AV_PKT_FLAG_TRUSTED 0x0008 +/** + * Flag is used to indicate packets that contain frames that can + * be discarded by the decoder. I.e. Non-reference frames. + */ +#define AV_PKT_FLAG_DISPOSABLE 0x0010 + +enum AVSideDataParamChangeFlags { +#if FF_API_OLD_CHANNEL_LAYOUT + /** + * @deprecated those are not used by any decoder + */ + AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT = 0x0001, + AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT = 0x0002, +#endif + AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE = 0x0004, + AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS = 0x0008, +}; + +/** + * Allocate an AVPacket and set its fields to default values. The resulting + * struct must be freed using av_packet_free(). + * + * @return An AVPacket filled with default values or NULL on failure. + * + * @note this only allocates the AVPacket itself, not the data buffers. Those + * must be allocated through other means such as av_new_packet. + * + * @see av_new_packet + */ +AVPacket *av_packet_alloc(void); + +/** + * Create a new packet that references the same data as src. + * + * This is a shortcut for av_packet_alloc()+av_packet_ref(). + * + * @return newly created AVPacket on success, NULL on error. + * + * @see av_packet_alloc + * @see av_packet_ref + */ +AVPacket *av_packet_clone(const AVPacket *src); + +/** + * Free the packet, if the packet is reference counted, it will be + * unreferenced first. + * + * @param pkt packet to be freed. The pointer will be set to NULL. + * @note passing NULL is a no-op. + */ +void av_packet_free(AVPacket **pkt); + +#if FF_API_INIT_PACKET +/** + * Initialize optional fields of a packet with default values. + * + * Note, this does not touch the data and size members, which have to be + * initialized separately. + * + * @param pkt packet + * + * @see av_packet_alloc + * @see av_packet_unref + * + * @deprecated This function is deprecated. Once it's removed, + sizeof(AVPacket) will not be a part of the ABI anymore. + */ +attribute_deprecated +void av_init_packet(AVPacket *pkt); +#endif + +/** + * Allocate the payload of a packet and initialize its fields with + * default values. + * + * @param pkt packet + * @param size wanted payload size + * @return 0 if OK, AVERROR_xxx otherwise + */ +int av_new_packet(AVPacket *pkt, int size); + +/** + * Reduce packet size, correctly zeroing padding + * + * @param pkt packet + * @param size new size + */ +void av_shrink_packet(AVPacket *pkt, int size); + +/** + * Increase packet size, correctly zeroing padding + * + * @param pkt packet + * @param grow_by number of bytes by which to increase the size of the packet + */ +int av_grow_packet(AVPacket *pkt, int grow_by); + +/** + * Initialize a reference-counted packet from av_malloc()ed data. + * + * @param pkt packet to be initialized. This function will set the data, size, + * and buf fields, all others are left untouched. + * @param data Data allocated by av_malloc() to be used as packet data. If this + * function returns successfully, the data is owned by the underlying AVBuffer. + * The caller may not access the data through other means. + * @param size size of data in bytes, without the padding. I.e. the full buffer + * size is assumed to be size + AV_INPUT_BUFFER_PADDING_SIZE. + * + * @return 0 on success, a negative AVERROR on error + */ +int av_packet_from_data(AVPacket *pkt, uint8_t *data, int size); + +/** + * Allocate new information of a packet. + * + * @param pkt packet + * @param type side information type + * @param size side information size + * @return pointer to fresh allocated data or NULL otherwise + */ +uint8_t* av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, + size_t size); + +/** + * Wrap an existing array as a packet side data. + * + * @param pkt packet + * @param type side information type + * @param data the side data array. It must be allocated with the av_malloc() + * family of functions. The ownership of the data is transferred to + * pkt. + * @param size side information size + * @return a non-negative number on success, a negative AVERROR code on + * failure. On failure, the packet is unchanged and the data remains + * owned by the caller. + */ +int av_packet_add_side_data(AVPacket *pkt, enum AVPacketSideDataType type, + uint8_t *data, size_t size); + +/** + * Shrink the already allocated side data buffer + * + * @param pkt packet + * @param type side information type + * @param size new side information size + * @return 0 on success, < 0 on failure + */ +int av_packet_shrink_side_data(AVPacket *pkt, enum AVPacketSideDataType type, + size_t size); + +/** + * Get side information from packet. + * + * @param pkt packet + * @param type desired side information type + * @param size If supplied, *size will be set to the size of the side data + * or to zero if the desired side data is not present. + * @return pointer to data if present or NULL otherwise + */ +uint8_t* av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, + size_t *size); + +const char *av_packet_side_data_name(enum AVPacketSideDataType type); + +/** + * Pack a dictionary for use in side_data. + * + * @param dict The dictionary to pack. + * @param size pointer to store the size of the returned data + * @return pointer to data if successful, NULL otherwise + */ +uint8_t *av_packet_pack_dictionary(AVDictionary *dict, size_t *size); +/** + * Unpack a dictionary from side_data. + * + * @param data data from side_data + * @param size size of the data + * @param dict the metadata storage dictionary + * @return 0 on success, < 0 on failure + */ +int av_packet_unpack_dictionary(const uint8_t *data, size_t size, + AVDictionary **dict); + +/** + * Convenience function to free all the side data stored. + * All the other fields stay untouched. + * + * @param pkt packet + */ +void av_packet_free_side_data(AVPacket *pkt); + +/** + * Setup a new reference to the data described by a given packet + * + * If src is reference-counted, setup dst as a new reference to the + * buffer in src. Otherwise allocate a new buffer in dst and copy the + * data from src into it. + * + * All the other fields are copied from src. + * + * @see av_packet_unref + * + * @param dst Destination packet. Will be completely overwritten. + * @param src Source packet + * + * @return 0 on success, a negative AVERROR on error. On error, dst + * will be blank (as if returned by av_packet_alloc()). + */ +int av_packet_ref(AVPacket *dst, const AVPacket *src); + +/** + * Wipe the packet. + * + * Unreference the buffer referenced by the packet and reset the + * remaining packet fields to their default values. + * + * @param pkt The packet to be unreferenced. + */ +void av_packet_unref(AVPacket *pkt); + +/** + * Move every field in src to dst and reset src. + * + * @see av_packet_unref + * + * @param src Source packet, will be reset + * @param dst Destination packet + */ +void av_packet_move_ref(AVPacket *dst, AVPacket *src); + +/** + * Copy only "properties" fields from src to dst. + * + * Properties for the purpose of this function are all the fields + * beside those related to the packet data (buf, data, size) + * + * @param dst Destination packet + * @param src Source packet + * + * @return 0 on success AVERROR on failure. + */ +int av_packet_copy_props(AVPacket *dst, const AVPacket *src); + +/** + * Ensure the data described by a given packet is reference counted. + * + * @note This function does not ensure that the reference will be writable. + * Use av_packet_make_writable instead for that purpose. + * + * @see av_packet_ref + * @see av_packet_make_writable + * + * @param pkt packet whose data should be made reference counted. + * + * @return 0 on success, a negative AVERROR on error. On failure, the + * packet is unchanged. + */ +int av_packet_make_refcounted(AVPacket *pkt); + +/** + * Create a writable reference for the data described by a given packet, + * avoiding data copy if possible. + * + * @param pkt Packet whose data should be made writable. + * + * @return 0 on success, a negative AVERROR on failure. On failure, the + * packet is unchanged. + */ +int av_packet_make_writable(AVPacket *pkt); + +/** + * Convert valid timing fields (timestamps / durations) in a packet from one + * timebase to another. Timestamps with unknown values (AV_NOPTS_VALUE) will be + * ignored. + * + * @param pkt packet on which the conversion will be performed + * @param tb_src source timebase, in which the timing fields in pkt are + * expressed + * @param tb_dst destination timebase, to which the timing fields will be + * converted + */ +void av_packet_rescale_ts(AVPacket *pkt, AVRational tb_src, AVRational tb_dst); + +/** + * @} + */ + +#endif // AVCODEC_PACKET_H diff --git a/third-party/cbs/include/cbs/sei.h b/third-party/cbs/include/cbs/sei.h index 4a35109b643..5513590b51e 100644 --- a/third-party/cbs/include/cbs/sei.h +++ b/third-party/cbs/include/cbs/sei.h @@ -27,114 +27,114 @@ // many generic parts have the same interpretation everywhere (such as // mastering-display-colour-volume and user-data-unregistered). enum { - SEI_TYPE_BUFFERING_PERIOD = 0, - SEI_TYPE_PIC_TIMING = 1, - SEI_TYPE_PAN_SCAN_RECT = 2, - SEI_TYPE_FILLER_PAYLOAD = 3, - SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35 = 4, - SEI_TYPE_USER_DATA_UNREGISTERED = 5, - SEI_TYPE_RECOVERY_POINT = 6, - SEI_TYPE_DEC_REF_PIC_MARKING_REPETITION = 7, - SEI_TYPE_SPARE_PIC = 8, - SEI_TYPE_SCENE_INFO = 9, - SEI_TYPE_SUB_SEQ_INFO = 10, - SEI_TYPE_SUB_SEQ_LAYER_CHARACTERISTICS = 11, - SEI_TYPE_SUB_SEQ_CHARACTERISTICS = 12, - SEI_TYPE_FULL_FRAME_FREEZE = 13, - SEI_TYPE_FULL_FRAME_FREEZE_RELEASE = 14, - SEI_TYPE_FULL_FRAME_SNAPSHOT = 15, - SEI_TYPE_PROGRESSIVE_REFINEMENT_SEGMENT_START = 16, - SEI_TYPE_PROGRESSIVE_REFINEMENT_SEGMENT_END = 17, - SEI_TYPE_MOTION_CONSTRAINED_SLICE_GROUP_SET = 18, - SEI_TYPE_FILM_GRAIN_CHARACTERISTICS = 19, - SEI_TYPE_DEBLOCKING_FILTER_DISPLAY_PREFERENCE = 20, - SEI_TYPE_STEREO_VIDEO_INFO = 21, - SEI_TYPE_POST_FILTER_HINT = 22, - SEI_TYPE_TONE_MAPPING_INFO = 23, - SEI_TYPE_SCALABILITY_INFO = 24, - SEI_TYPE_SUB_PIC_SCALABLE_LAYER = 25, - SEI_TYPE_NON_REQUIRED_LAYER_REP = 26, - SEI_TYPE_PRIORITY_LAYER_INFO = 27, - SEI_TYPE_LAYERS_NOT_PRESENT_4 = 28, - SEI_TYPE_LAYER_DEPENDENCY_CHANGE = 29, - SEI_TYPE_SCALABLE_NESTING_4 = 30, - SEI_TYPE_BASE_LAYER_TEMPORAL_HRD = 31, - SEI_TYPE_QUALITY_LAYER_INTEGRITY_CHECK = 32, - SEI_TYPE_REDUNDANT_PIC_PROPERTY = 33, - SEI_TYPE_TL0_DEP_REP_INDEX = 34, - SEI_TYPE_TL_SWITCHING_POINT = 35, - SEI_TYPE_PARALLEL_DECODING_INFO = 36, - SEI_TYPE_MVC_SCALABLE_NESTING = 37, - SEI_TYPE_VIEW_SCALABILITY_INFO = 38, - SEI_TYPE_MULTIVIEW_SCENE_INFO_4 = 39, - SEI_TYPE_MULTIVIEW_ACQUISITION_INFO_4 = 40, - SEI_TYPE_NON_REQUIRED_VIEW_COMPONENT = 41, - SEI_TYPE_VIEW_DEPENDENCY_CHANGE = 42, - SEI_TYPE_OPERATION_POINTS_NOT_PRESENT = 43, - SEI_TYPE_BASE_VIEW_TEMPORAL_HRD = 44, - SEI_TYPE_FRAME_PACKING_ARRANGEMENT = 45, - SEI_TYPE_MULTIVIEW_VIEW_POSITION_4 = 46, - SEI_TYPE_DISPLAY_ORIENTATION = 47, - SEI_TYPE_MVCD_SCALABLE_NESTING = 48, - SEI_TYPE_MVCD_VIEW_SCALABILITY_INFO = 49, - SEI_TYPE_DEPTH_REPRESENTATION_INFO_4 = 50, - SEI_TYPE_THREE_DIMENSIONAL_REFERENCE_DISPLAYS_INFO_4 = 51, - SEI_TYPE_DEPTH_TIMING = 52, - SEI_TYPE_DEPTH_SAMPLING_INFO = 53, - SEI_TYPE_CONSTRAINED_DEPTH_PARAMETER_SET_IDENTIFIER = 54, - SEI_TYPE_GREEN_METADATA = 56, - SEI_TYPE_STRUCTURE_OF_PICTURES_INFO = 128, - SEI_TYPE_ACTIVE_PARAMETER_SETS = 129, - SEI_TYPE_PARAMETER_SETS_INCLUSION_INDICATION = SEI_TYPE_ACTIVE_PARAMETER_SETS, - SEI_TYPE_DECODING_UNIT_INFO = 130, - SEI_TYPE_TEMPORAL_SUB_LAYER_ZERO_IDX = 131, - SEI_TYPE_DECODED_PICTURE_HASH = 132, - SEI_TYPE_SCALABLE_NESTING_5 = 133, - SEI_TYPE_REGION_REFRESH_INFO = 134, - SEI_TYPE_NO_DISPLAY = 135, - SEI_TYPE_TIME_CODE = 136, - SEI_TYPE_MASTERING_DISPLAY_COLOUR_VOLUME = 137, - SEI_TYPE_SEGMENTED_RECT_FRAME_PACKING_ARRANGEMENT = 138, - SEI_TYPE_TEMPORAL_MOTION_CONSTRAINED_TILE_SETS = 139, - SEI_TYPE_CHROMA_RESAMPLING_FILTER_HINT = 140, - SEI_TYPE_KNEE_FUNCTION_INFO = 141, - SEI_TYPE_COLOUR_REMAPPING_INFO = 142, - SEI_TYPE_DEINTERLACED_FIELD_IDENTIFICATION = 143, - SEI_TYPE_CONTENT_LIGHT_LEVEL_INFO = 144, - SEI_TYPE_DEPENDENT_RAP_INDICATION = 145, - SEI_TYPE_CODED_REGION_COMPLETION = 146, - SEI_TYPE_ALTERNATIVE_TRANSFER_CHARACTERISTICS = 147, - SEI_TYPE_AMBIENT_VIEWING_ENVIRONMENT = 148, - SEI_TYPE_CONTENT_COLOUR_VOLUME = 149, - SEI_TYPE_EQUIRECTANGULAR_PROJECTION = 150, - SEI_TYPE_CUBEMAP_PROJECTION = 151, - SEI_TYPE_FISHEYE_VIDEO_INFO = 152, - SEI_TYPE_SPHERE_ROTATION = 154, - SEI_TYPE_REGIONWISE_PACKING = 155, - SEI_TYPE_OMNI_VIEWPORT = 156, - SEI_TYPE_REGIONAL_NESTING = 157, - SEI_TYPE_MCTS_EXTRACTION_INFO_SETS = 158, - SEI_TYPE_MCTS_EXTRACTION_INFO_NESTING = 159, - SEI_TYPE_LAYERS_NOT_PRESENT_5 = 160, - SEI_TYPE_INTER_LAYER_CONSTRAINED_TILE_SETS = 161, - SEI_TYPE_BSP_NESTING = 162, - SEI_TYPE_BSP_INITIAL_ARRIVAL_TIME = 163, - SEI_TYPE_SUB_BITSTREAM_PROPERTY = 164, - SEI_TYPE_ALPHA_CHANNEL_INFO = 165, - SEI_TYPE_OVERLAY_INFO = 166, - SEI_TYPE_TEMPORAL_MV_PREDICTION_CONSTRAINTS = 167, - SEI_TYPE_FRAME_FIELD_INFO = 168, - SEI_TYPE_THREE_DIMENSIONAL_REFERENCE_DISPLAYS_INFO = 176, - SEI_TYPE_DEPTH_REPRESENTATION_INFO_5 = 177, - SEI_TYPE_MULTIVIEW_SCENE_INFO_5 = 178, - SEI_TYPE_MULTIVIEW_ACQUISITION_INFO_5 = 179, - SEI_TYPE_MULTIVIEW_VIEW_POSITION_5 = 180, - SEI_TYPE_ALTERNATIVE_DEPTH_INFO = 181, - SEI_TYPE_SEI_MANIFEST = 200, - SEI_TYPE_SEI_PREFIX_INDICATION = 201, - SEI_TYPE_ANNOTATED_REGIONS = 202, - SEI_TYPE_SUBPIC_LEVEL_INFO = 203, - SEI_TYPE_SAMPLE_ASPECT_RATIO_INFO = 204, + SEI_TYPE_BUFFERING_PERIOD = 0, + SEI_TYPE_PIC_TIMING = 1, + SEI_TYPE_PAN_SCAN_RECT = 2, + SEI_TYPE_FILLER_PAYLOAD = 3, + SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35 = 4, + SEI_TYPE_USER_DATA_UNREGISTERED = 5, + SEI_TYPE_RECOVERY_POINT = 6, + SEI_TYPE_DEC_REF_PIC_MARKING_REPETITION = 7, + SEI_TYPE_SPARE_PIC = 8, + SEI_TYPE_SCENE_INFO = 9, + SEI_TYPE_SUB_SEQ_INFO = 10, + SEI_TYPE_SUB_SEQ_LAYER_CHARACTERISTICS = 11, + SEI_TYPE_SUB_SEQ_CHARACTERISTICS = 12, + SEI_TYPE_FULL_FRAME_FREEZE = 13, + SEI_TYPE_FULL_FRAME_FREEZE_RELEASE = 14, + SEI_TYPE_FULL_FRAME_SNAPSHOT = 15, + SEI_TYPE_PROGRESSIVE_REFINEMENT_SEGMENT_START = 16, + SEI_TYPE_PROGRESSIVE_REFINEMENT_SEGMENT_END = 17, + SEI_TYPE_MOTION_CONSTRAINED_SLICE_GROUP_SET = 18, + SEI_TYPE_FILM_GRAIN_CHARACTERISTICS = 19, + SEI_TYPE_DEBLOCKING_FILTER_DISPLAY_PREFERENCE = 20, + SEI_TYPE_STEREO_VIDEO_INFO = 21, + SEI_TYPE_POST_FILTER_HINT = 22, + SEI_TYPE_TONE_MAPPING_INFO = 23, + SEI_TYPE_SCALABILITY_INFO = 24, + SEI_TYPE_SUB_PIC_SCALABLE_LAYER = 25, + SEI_TYPE_NON_REQUIRED_LAYER_REP = 26, + SEI_TYPE_PRIORITY_LAYER_INFO = 27, + SEI_TYPE_LAYERS_NOT_PRESENT_4 = 28, + SEI_TYPE_LAYER_DEPENDENCY_CHANGE = 29, + SEI_TYPE_SCALABLE_NESTING_4 = 30, + SEI_TYPE_BASE_LAYER_TEMPORAL_HRD = 31, + SEI_TYPE_QUALITY_LAYER_INTEGRITY_CHECK = 32, + SEI_TYPE_REDUNDANT_PIC_PROPERTY = 33, + SEI_TYPE_TL0_DEP_REP_INDEX = 34, + SEI_TYPE_TL_SWITCHING_POINT = 35, + SEI_TYPE_PARALLEL_DECODING_INFO = 36, + SEI_TYPE_MVC_SCALABLE_NESTING = 37, + SEI_TYPE_VIEW_SCALABILITY_INFO = 38, + SEI_TYPE_MULTIVIEW_SCENE_INFO_4 = 39, + SEI_TYPE_MULTIVIEW_ACQUISITION_INFO_4 = 40, + SEI_TYPE_NON_REQUIRED_VIEW_COMPONENT = 41, + SEI_TYPE_VIEW_DEPENDENCY_CHANGE = 42, + SEI_TYPE_OPERATION_POINTS_NOT_PRESENT = 43, + SEI_TYPE_BASE_VIEW_TEMPORAL_HRD = 44, + SEI_TYPE_FRAME_PACKING_ARRANGEMENT = 45, + SEI_TYPE_MULTIVIEW_VIEW_POSITION_4 = 46, + SEI_TYPE_DISPLAY_ORIENTATION = 47, + SEI_TYPE_MVCD_SCALABLE_NESTING = 48, + SEI_TYPE_MVCD_VIEW_SCALABILITY_INFO = 49, + SEI_TYPE_DEPTH_REPRESENTATION_INFO_4 = 50, + SEI_TYPE_THREE_DIMENSIONAL_REFERENCE_DISPLAYS_INFO_4 = 51, + SEI_TYPE_DEPTH_TIMING = 52, + SEI_TYPE_DEPTH_SAMPLING_INFO = 53, + SEI_TYPE_CONSTRAINED_DEPTH_PARAMETER_SET_IDENTIFIER = 54, + SEI_TYPE_GREEN_METADATA = 56, + SEI_TYPE_STRUCTURE_OF_PICTURES_INFO = 128, + SEI_TYPE_ACTIVE_PARAMETER_SETS = 129, + SEI_TYPE_PARAMETER_SETS_INCLUSION_INDICATION = SEI_TYPE_ACTIVE_PARAMETER_SETS, + SEI_TYPE_DECODING_UNIT_INFO = 130, + SEI_TYPE_TEMPORAL_SUB_LAYER_ZERO_IDX = 131, + SEI_TYPE_DECODED_PICTURE_HASH = 132, + SEI_TYPE_SCALABLE_NESTING_5 = 133, + SEI_TYPE_REGION_REFRESH_INFO = 134, + SEI_TYPE_NO_DISPLAY = 135, + SEI_TYPE_TIME_CODE = 136, + SEI_TYPE_MASTERING_DISPLAY_COLOUR_VOLUME = 137, + SEI_TYPE_SEGMENTED_RECT_FRAME_PACKING_ARRANGEMENT = 138, + SEI_TYPE_TEMPORAL_MOTION_CONSTRAINED_TILE_SETS = 139, + SEI_TYPE_CHROMA_RESAMPLING_FILTER_HINT = 140, + SEI_TYPE_KNEE_FUNCTION_INFO = 141, + SEI_TYPE_COLOUR_REMAPPING_INFO = 142, + SEI_TYPE_DEINTERLACED_FIELD_IDENTIFICATION = 143, + SEI_TYPE_CONTENT_LIGHT_LEVEL_INFO = 144, + SEI_TYPE_DEPENDENT_RAP_INDICATION = 145, + SEI_TYPE_CODED_REGION_COMPLETION = 146, + SEI_TYPE_ALTERNATIVE_TRANSFER_CHARACTERISTICS = 147, + SEI_TYPE_AMBIENT_VIEWING_ENVIRONMENT = 148, + SEI_TYPE_CONTENT_COLOUR_VOLUME = 149, + SEI_TYPE_EQUIRECTANGULAR_PROJECTION = 150, + SEI_TYPE_CUBEMAP_PROJECTION = 151, + SEI_TYPE_FISHEYE_VIDEO_INFO = 152, + SEI_TYPE_SPHERE_ROTATION = 154, + SEI_TYPE_REGIONWISE_PACKING = 155, + SEI_TYPE_OMNI_VIEWPORT = 156, + SEI_TYPE_REGIONAL_NESTING = 157, + SEI_TYPE_MCTS_EXTRACTION_INFO_SETS = 158, + SEI_TYPE_MCTS_EXTRACTION_INFO_NESTING = 159, + SEI_TYPE_LAYERS_NOT_PRESENT_5 = 160, + SEI_TYPE_INTER_LAYER_CONSTRAINED_TILE_SETS = 161, + SEI_TYPE_BSP_NESTING = 162, + SEI_TYPE_BSP_INITIAL_ARRIVAL_TIME = 163, + SEI_TYPE_SUB_BITSTREAM_PROPERTY = 164, + SEI_TYPE_ALPHA_CHANNEL_INFO = 165, + SEI_TYPE_OVERLAY_INFO = 166, + SEI_TYPE_TEMPORAL_MV_PREDICTION_CONSTRAINTS = 167, + SEI_TYPE_FRAME_FIELD_INFO = 168, + SEI_TYPE_THREE_DIMENSIONAL_REFERENCE_DISPLAYS_INFO = 176, + SEI_TYPE_DEPTH_REPRESENTATION_INFO_5 = 177, + SEI_TYPE_MULTIVIEW_SCENE_INFO_5 = 178, + SEI_TYPE_MULTIVIEW_ACQUISITION_INFO_5 = 179, + SEI_TYPE_MULTIVIEW_VIEW_POSITION_5 = 180, + SEI_TYPE_ALTERNATIVE_DEPTH_INFO = 181, + SEI_TYPE_SEI_MANIFEST = 200, + SEI_TYPE_SEI_PREFIX_INDICATION = 201, + SEI_TYPE_ANNOTATED_REGIONS = 202, + SEI_TYPE_SUBPIC_LEVEL_INFO = 203, + SEI_TYPE_SAMPLE_ASPECT_RATIO_INFO = 204, }; #endif /* AVCODEC_SEI_H */ diff --git a/third-party/cbs/include/cbs/video_levels.h b/third-party/cbs/include/cbs/video_levels.h deleted file mode 100644 index 53ad277fa81..00000000000 --- a/third-party/cbs/include/cbs/video_levels.h +++ /dev/null @@ -1,112 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_H264_LEVELS_H -#define AVCODEC_H264_LEVELS_H - - -#include - -#include "cbs_h265.h" - -typedef struct H265LevelDescriptor { - const char *name; - uint8_t level_idc; - - // Table A.6. - uint32_t max_luma_ps; - uint32_t max_cpb_main; - uint32_t max_cpb_high; - uint16_t max_slice_segments_per_picture; - uint8_t max_tile_rows; - uint8_t max_tile_cols; - - // Table A.7. - uint32_t max_luma_sr; - uint32_t max_br_main; - uint32_t max_br_high; - uint8_t min_cr_base_main; - uint8_t min_cr_base_high; -} H265LevelDescriptor; - -typedef struct H265ProfileDescriptor { - const char *name; - uint8_t profile_idc; - uint8_t high_throughput; - - // Tables A.2, A.3 and A.5. - uint8_t max_14bit; - uint8_t max_12bit; - uint8_t max_10bit; - uint8_t max_8bit; - uint8_t max_422chroma; - uint8_t max_420chroma; - uint8_t max_monochrome; - uint8_t intra; - uint8_t one_picture_only; - uint8_t lower_bit_rate; - - // Table A.8. - uint16_t cpb_vcl_factor; - uint16_t cpb_nal_factor; - float format_capability_factor; - float min_cr_scale_factor; - uint8_t max_dpb_pic_buf; -} H265ProfileDescriptor; - -typedef struct H264LevelDescriptor { - const char *name; - uint8_t level_idc; - uint8_t constraint_set3_flag; - uint32_t max_mbps; - uint32_t max_fs; - uint32_t max_dpb_mbs; - uint32_t max_br; - uint32_t max_cpb; - uint16_t max_v_mv_r; - uint8_t min_cr; - uint8_t max_mvs_per_2mb; -} H264LevelDescriptor; - -const H265ProfileDescriptor *ff_h265_get_profile(const H265RawProfileTierLevel *ptl); - -/** - * Guess the level of a stream from some parameters. - * - * Unknown parameters may be zero, in which case they are ignored. - */ -const H265LevelDescriptor *ff_h265_guess_level(const H265RawProfileTierLevel *ptl, - int64_t bitrate, - int width, int height, - int slice_segments, - int tile_rows, int tile_cols, - int max_dec_pic_buffering); - -/** - * Guess the level of a stream from some parameters. - * - * Unknown parameters may be zero, in which case they are ignored. - */ -const H264LevelDescriptor *ff_h264_guess_level(int profile_idc, - int64_t bitrate, - int framerate, - int width, int height, - int max_dec_frame_buffering); - - -#endif /* AVCODEC_H264_LEVELS_H */ diff --git a/third-party/cbs/vlc.h b/third-party/cbs/include/cbs/vlc.h similarity index 52% rename from third-party/cbs/vlc.h rename to third-party/cbs/include/cbs/vlc.h index aaa21a9cb6f..e63c484755a 100644 --- a/third-party/cbs/vlc.h +++ b/third-party/cbs/include/cbs/vlc.h @@ -21,35 +21,39 @@ #include -#define VLC_TYPE int16_t +// When changing this, be sure to also update tableprint_vlc.h accordingly. +typedef int16_t VLCBaseType; + +typedef struct VLCElem { + VLCBaseType sym, len; +} VLCElem; typedef struct VLC { - int bits; - VLC_TYPE (*table) - [2]; ///< code, bits - int table_size, table_allocated; + int bits; + VLCElem *table; + int table_size, table_allocated; } VLC; typedef struct RL_VLC_ELEM { - int16_t level; - int8_t len; - uint8_t run; + int16_t level; + int8_t len; + uint8_t run; } RL_VLC_ELEM; -#define init_vlc(vlc, nb_bits, nb_codes, \ - bits, bits_wrap, bits_size, \ - codes, codes_wrap, codes_size, \ - flags) \ - ff_init_vlc_sparse(vlc, nb_bits, nb_codes, \ - bits, bits_wrap, bits_size, \ - codes, codes_wrap, codes_size, \ - NULL, 0, 0, flags) +#define init_vlc(vlc, nb_bits, nb_codes, \ + bits, bits_wrap, bits_size, \ + codes, codes_wrap, codes_size, \ + flags) \ + ff_init_vlc_sparse(vlc, nb_bits, nb_codes, \ + bits, bits_wrap, bits_size, \ + codes, codes_wrap, codes_size, \ + NULL, 0, 0, flags) int ff_init_vlc_sparse(VLC *vlc, int nb_bits, int nb_codes, - const void *bits, int bits_wrap, int bits_size, - const void *codes, int codes_wrap, int codes_size, - const void *symbols, int symbols_wrap, int symbols_size, - int flags); + const void *bits, int bits_wrap, int bits_size, + const void *codes, int codes_wrap, int codes_size, + const void *symbols, int symbols_wrap, int symbols_size, + int flags); /** * Build VLC decoding tables suitable for use with get_vlc2() @@ -81,60 +85,60 @@ int ff_init_vlc_sparse(VLC *vlc, int nb_bits, int nb_codes, * INIT_VLC_INPUT_LE is pointless and ignored. */ int ff_init_vlc_from_lengths(VLC *vlc, int nb_bits, int nb_codes, - const int8_t *lens, int lens_wrap, - const void *symbols, int symbols_wrap, int symbols_size, - int offset, int flags, void *logctx); + const int8_t *lens, int lens_wrap, + const void *symbols, int symbols_wrap, int symbols_size, + int offset, int flags, void *logctx); void ff_free_vlc(VLC *vlc); /* If INIT_VLC_INPUT_LE is set, the LSB bit of the codes used to * initialize the VLC table is the first bit to be read. */ -#define INIT_VLC_INPUT_LE 2 +#define INIT_VLC_INPUT_LE 2 /* If set the VLC is intended for a little endian bitstream reader. */ -#define INIT_VLC_OUTPUT_LE 8 -#define INIT_VLC_LE (INIT_VLC_INPUT_LE | INIT_VLC_OUTPUT_LE) +#define INIT_VLC_OUTPUT_LE 8 +#define INIT_VLC_LE (INIT_VLC_INPUT_LE | INIT_VLC_OUTPUT_LE) #define INIT_VLC_USE_NEW_STATIC 4 #define INIT_VLC_STATIC_OVERLONG (1 | INIT_VLC_USE_NEW_STATIC) -#define INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ - h, i, j, flags, static_size) \ - do { \ - static VLC_TYPE table[static_size][2]; \ - (vlc)->table = table; \ - (vlc)->table_allocated = static_size; \ - ff_init_vlc_sparse(vlc, bits, a, b, c, d, e, f, g, h, i, j, \ - flags | INIT_VLC_USE_NEW_STATIC); \ - } while(0) +#define INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ + h, i, j, flags, static_size) \ + do { \ + static VLCElem table[static_size]; \ + (vlc)->table = table; \ + (vlc)->table_allocated = static_size; \ + ff_init_vlc_sparse(vlc, bits, a, b, c, d, e, f, g, h, i, j, \ + flags | INIT_VLC_USE_NEW_STATIC); \ + } while (0) #define INIT_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, h, i, j, static_size) \ - INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ - h, i, j, 0, static_size) + INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ + h, i, j, 0, static_size) #define INIT_LE_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, h, i, j, static_size) \ - INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ - h, i, j, INIT_VLC_LE, static_size) + INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ + h, i, j, INIT_VLC_LE, static_size) #define INIT_CUSTOM_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, flags, static_size) \ - INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ - NULL, 0, 0, flags, static_size) + INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ + NULL, 0, 0, flags, static_size) -#define INIT_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, static_size) \ - INIT_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, NULL, 0, 0, static_size) +#define INIT_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, static_size) \ + INIT_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, NULL, 0, 0, static_size) #define INIT_LE_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, static_size) \ - INIT_LE_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, NULL, 0, 0, static_size) - -#define INIT_VLC_STATIC_FROM_LENGTHS(vlc, bits, nb_codes, lens, len_wrap, \ - symbols, symbols_wrap, symbols_size, \ - offset, flags, static_size) \ - do { \ - static VLC_TYPE table[static_size][2]; \ - (vlc)->table = table; \ - (vlc)->table_allocated = static_size; \ - ff_init_vlc_from_lengths(vlc, bits, nb_codes, lens, len_wrap, \ - symbols, symbols_wrap, symbols_size, \ - offset, flags | INIT_VLC_USE_NEW_STATIC, \ - NULL); \ - } while(0) + INIT_LE_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, NULL, 0, 0, static_size) + +#define INIT_VLC_STATIC_FROM_LENGTHS(vlc, bits, nb_codes, lens, len_wrap, \ + symbols, symbols_wrap, symbols_size, \ + offset, flags, static_size) \ + do { \ + static VLCElem table[static_size]; \ + (vlc)->table = table; \ + (vlc)->table_allocated = static_size; \ + ff_init_vlc_from_lengths(vlc, bits, nb_codes, lens, len_wrap, \ + symbols, symbols_wrap, symbols_size, \ + offset, flags | INIT_VLC_USE_NEW_STATIC, \ + NULL); \ + } while (0) #endif /* AVCODEC_VLC_H */ diff --git a/third-party/cbs/intmath.h b/third-party/cbs/intmath.h index 377b74127f2..8fdd2c88aa1 100644 --- a/third-party/cbs/intmath.h +++ b/third-party/cbs/intmath.h @@ -17,21 +17,30 @@ * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ - +// [manual] copied from libavutil #ifndef AVUTIL_INTMATH_H #define AVUTIL_INTMATH_H #include -#include +// [manual] Changed include path +#include "cbs/config.h" +#include "cbs/attributes.h" + +#if ARCH_ARM +# include "arm/intmath.h" +#endif +#if ARCH_X86 +# include "x86/intmath.h" +#endif #if HAVE_FAST_CLZ -#if AV_GCC_VERSION_AT_LEAST(3, 4) +#if AV_GCC_VERSION_AT_LEAST(3,4) #ifndef ff_log2 -#define ff_log2(x) (31 - __builtin_clz((x) | 1)) -#ifndef ff_log2_16bit -#define ff_log2_16bit av_log2 -#endif +# define ff_log2(x) (31 - __builtin_clz((x)|1)) +# ifndef ff_log2_16bit +# define ff_log2_16bit av_log2 +# endif #endif /* ff_log2 */ #endif /* AV_GCC_VERSION_AT_LEAST(3,4) */ #endif @@ -40,37 +49,39 @@ extern const uint8_t ff_log2_tab[256]; #ifndef ff_log2 #define ff_log2 ff_log2_c -static av_always_inline av_const int ff_log2_c(unsigned int v) { - int n = 0; - if(v & 0xffff0000) { - v >>= 16; - n += 16; - } - if(v & 0xff00) { - v >>= 8; - n += 8; - } - n += ff_log2_tab[v]; - - return n; +static av_always_inline av_const int ff_log2_c(unsigned int v) +{ + int n = 0; + if (v & 0xffff0000) { + v >>= 16; + n += 16; + } + if (v & 0xff00) { + v >>= 8; + n += 8; + } + n += ff_log2_tab[v]; + + return n; } #endif #ifndef ff_log2_16bit #define ff_log2_16bit ff_log2_16bit_c -static av_always_inline av_const int ff_log2_16bit_c(unsigned int v) { - int n = 0; - if(v & 0xff00) { - v >>= 8; - n += 8; - } - n += ff_log2_tab[v]; - - return n; +static av_always_inline av_const int ff_log2_16bit_c(unsigned int v) +{ + int n = 0; + if (v & 0xff00) { + v >>= 8; + n += 8; + } + n += ff_log2_tab[v]; + + return n; } #endif -#define av_log2 ff_log2 +#define av_log2 ff_log2 #define av_log2_16bit ff_log2_16bit /** @@ -79,7 +90,7 @@ static av_always_inline av_const int ff_log2_16bit_c(unsigned int v) { */ #if HAVE_FAST_CLZ -#if AV_GCC_VERSION_AT_LEAST(3, 4) +#if AV_GCC_VERSION_AT_LEAST(3,4) #ifndef ff_ctz #define ff_ctz(v) __builtin_ctz(v) #endif @@ -102,12 +113,13 @@ static av_always_inline av_const int ff_log2_16bit_c(unsigned int v) { */ /* We use the De-Bruijn method outlined in: * http://supertech.csail.mit.edu/papers/debruijn.pdf. */ -static av_always_inline av_const int ff_ctz_c(int v) { - static const uint8_t debruijn_ctz32[32] = { - 0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8, - 31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9 - }; - return debruijn_ctz32[(uint32_t)((v & -v) * 0x077CB531U) >> 27]; +static av_always_inline av_const int ff_ctz_c(int v) +{ + static const uint8_t debruijn_ctz32[32] = { + 0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8, + 31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9 + }; + return debruijn_ctz32[(uint32_t)((v & -v) * 0x077CB531U) >> 27]; } #endif @@ -115,32 +127,34 @@ static av_always_inline av_const int ff_ctz_c(int v) { #define ff_ctzll ff_ctzll_c /* We use the De-Bruijn method outlined in: * http://supertech.csail.mit.edu/papers/debruijn.pdf. */ -static av_always_inline av_const int ff_ctzll_c(long long v) { - static const uint8_t debruijn_ctz64[64] = { - 0, 1, 2, 53, 3, 7, 54, 27, 4, 38, 41, 8, 34, 55, 48, 28, - 62, 5, 39, 46, 44, 42, 22, 9, 24, 35, 59, 56, 49, 18, 29, 11, - 63, 52, 6, 26, 37, 40, 33, 47, 61, 45, 43, 21, 23, 58, 17, 10, - 51, 25, 36, 32, 60, 20, 57, 16, 50, 31, 19, 15, 30, 14, 13, 12 - }; - return debruijn_ctz64[(uint64_t)((v & -v) * 0x022FDD63CC95386DU) >> 58]; +static av_always_inline av_const int ff_ctzll_c(long long v) +{ + static const uint8_t debruijn_ctz64[64] = { + 0, 1, 2, 53, 3, 7, 54, 27, 4, 38, 41, 8, 34, 55, 48, 28, + 62, 5, 39, 46, 44, 42, 22, 9, 24, 35, 59, 56, 49, 18, 29, 11, + 63, 52, 6, 26, 37, 40, 33, 47, 61, 45, 43, 21, 23, 58, 17, 10, + 51, 25, 36, 32, 60, 20, 57, 16, 50, 31, 19, 15, 30, 14, 13, 12 + }; + return debruijn_ctz64[(uint64_t)((v & -v) * 0x022FDD63CC95386DU) >> 58]; } #endif #ifndef ff_clz #define ff_clz ff_clz_c -static av_always_inline av_const unsigned ff_clz_c(unsigned x) { - unsigned i = sizeof(x) * 8; +static av_always_inline av_const unsigned ff_clz_c(unsigned x) +{ + unsigned i = sizeof(x) * 8; - while(x) { - x >>= 1; - i--; - } + while (x) { + x >>= 1; + i--; + } - return i; + return i; } #endif -#if AV_GCC_VERSION_AT_LEAST(3, 4) +#if AV_GCC_VERSION_AT_LEAST(3,4) #ifndef av_parity #define av_parity __builtin_parity #endif diff --git a/third-party/cbs/log2_tab.c b/third-party/cbs/log2_tab.c new file mode 100644 index 00000000000..576fd7edbdb --- /dev/null +++ b/third-party/cbs/log2_tab.c @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2003-2012 Michael Niedermayer + * + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ +// [manual] Copied from libavutil + +#include + +const uint8_t ff_log2_tab[256]={ + 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4, + 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5, + 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6, + 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6, + 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, + 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, + 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, + 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7 +}; diff --git a/third-party/cbs/mathops.h b/third-party/cbs/mathops.h deleted file mode 100644 index c0b8f3db2c1..00000000000 --- a/third-party/cbs/mathops.h +++ /dev/null @@ -1,243 +0,0 @@ -/* - * simple math operations - * Copyright (c) 2001, 2002 Fabrice Bellard - * Copyright (c) 2006 Michael Niedermayer et al - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ -#ifndef AVCODEC_MATHOPS_H -#define AVCODEC_MATHOPS_H - -#include "config.h" - -#include - -#include - -#define MAX_NEG_CROP 1024 - -extern const uint8_t ff_reverse[256]; -extern const uint32_t ff_inverse[257]; -extern const uint8_t ff_sqrt_tab[256]; -extern const uint8_t ff_crop_tab[256 + 2 * MAX_NEG_CROP]; -extern const uint8_t ff_zigzag_direct[64]; -extern const uint8_t ff_zigzag_scan[16 + 1]; - -#ifndef MUL64 -#define MUL64(a, b) ((int64_t)(a) * (int64_t)(b)) -#endif - -#ifndef MULL -#define MULL(a, b, s) (MUL64(a, b) >> (s)) -#endif - -#ifndef MULH -static av_always_inline int MULH(int a, int b) { - return MUL64(a, b) >> 32; -} -#endif - -#ifndef UMULH -static av_always_inline unsigned UMULH(unsigned a, unsigned b) { - return ((uint64_t)(a) * (uint64_t)(b)) >> 32; -} -#endif - -#ifndef MAC64 -#define MAC64(d, a, b) ((d) += MUL64(a, b)) -#endif - -#ifndef MLS64 -#define MLS64(d, a, b) ((d) -= MUL64(a, b)) -#endif - -/* signed 16x16 -> 32 multiply add accumulate */ -#ifndef MAC16 -#define MAC16(rt, ra, rb) rt += (ra) * (rb) -#endif - -/* signed 16x16 -> 32 multiply */ -#ifndef MUL16 -#define MUL16(ra, rb) ((ra) * (rb)) -#endif - -#ifndef MLS16 -#define MLS16(rt, ra, rb) ((rt) -= (ra) * (rb)) -#endif - -/* median of 3 */ -#ifndef mid_pred -#define mid_pred mid_pred -static inline av_const int mid_pred(int a, int b, int c) { - if(a > b) { - if(c > b) { - if(c > a) b = a; - else - b = c; - } - } - else { - if(b > c) { - if(c > a) b = c; - else - b = a; - } - } - return b; -} -#endif - -#ifndef median4 -#define median4 median4 -static inline av_const int median4(int a, int b, int c, int d) { - if(a < b) { - if(c < d) return (FFMIN(b, d) + FFMAX(a, c)) / 2; - else - return (FFMIN(b, c) + FFMAX(a, d)) / 2; - } - else { - if(c < d) return (FFMIN(a, d) + FFMAX(b, c)) / 2; - else - return (FFMIN(a, c) + FFMAX(b, d)) / 2; - } -} -#endif - -#ifndef sign_extend -static inline av_const int sign_extend(int val, unsigned bits) { - unsigned shift = 8 * sizeof(int) - bits; - union { - unsigned u; - int s; - } v = { (unsigned)val << shift }; - return v.s >> shift; -} -#endif - -#ifndef zero_extend -static inline av_const unsigned zero_extend(unsigned val, unsigned bits) { - return (val << ((8 * sizeof(int)) - bits)) >> ((8 * sizeof(int)) - bits); -} -#endif - -#ifndef COPY3_IF_LT -#define COPY3_IF_LT(x, y, a, b, c, d) \ - if((y) < (x)) { \ - (x) = (y); \ - (a) = (b); \ - (c) = (d); \ - } -#endif - -#ifndef MASK_ABS -#define MASK_ABS(mask, level) \ - do { \ - mask = level >> 31; \ - level = (level ^ mask) - mask; \ - } while(0) -#endif - -#ifndef NEG_SSR32 -#define NEG_SSR32(a, s) (((int32_t)(a)) >> (32 - (s))) -#endif - -#ifndef NEG_USR32 -#define NEG_USR32(a, s) (((uint32_t)(a)) >> (32 - (s))) -#endif - -#if HAVE_BIGENDIAN -#ifndef PACK_2U8 -#define PACK_2U8(a, b) (((a) << 8) | (b)) -#endif -#ifndef PACK_4U8 -#define PACK_4U8(a, b, c, d) (((a) << 24) | ((b) << 16) | ((c) << 8) | (d)) -#endif -#ifndef PACK_2U16 -#define PACK_2U16(a, b) (((a) << 16) | (b)) -#endif -#else -#ifndef PACK_2U8 -#define PACK_2U8(a, b) (((b) << 8) | (a)) -#endif -#ifndef PACK_4U2 -#define PACK_4U8(a, b, c, d) (((d) << 24) | ((c) << 16) | ((b) << 8) | (a)) -#endif -#ifndef PACK_2U16 -#define PACK_2U16(a, b) (((b) << 16) | (a)) -#endif -#endif - -#ifndef PACK_2S8 -#define PACK_2S8(a, b) PACK_2U8((a)&255, (b)&255) -#endif -#ifndef PACK_4S8 -#define PACK_4S8(a, b, c, d) PACK_4U8((a)&255, (b)&255, (c)&255, (d)&255) -#endif -#ifndef PACK_2S16 -#define PACK_2S16(a, b) PACK_2U16((a)&0xffff, (b)&0xffff) -#endif - -#ifndef FASTDIV -#define FASTDIV(a, b) ((uint32_t)((((uint64_t)a) * ff_inverse[b]) >> 32)) -#endif /* FASTDIV */ - -#ifndef ff_sqrt -#define ff_sqrt ff_sqrt -static inline av_const unsigned int ff_sqrt(unsigned int a) { - unsigned int b; - - if(a < 255) return (ff_sqrt_tab[a + 1] - 1) >> 4; - else if(a < (1 << 12)) - b = ff_sqrt_tab[a >> 4] >> 2; -#if !CONFIG_SMALL - else if(a < (1 << 14)) - b = ff_sqrt_tab[a >> 6] >> 1; - else if(a < (1 << 16)) - b = ff_sqrt_tab[a >> 8]; -#endif - else { - int s = av_log2_16bit(a >> 16) >> 1; - unsigned int c = a >> (s + 2); - b = ff_sqrt_tab[c >> (s + 8)]; - b = FASTDIV(c, b) + (b << s); - } - - return b - (a < b * b); -} -#endif - -static inline av_const float ff_sqrf(float a) { - return a * a; -} - -static inline int8_t ff_u8_to_s8(uint8_t a) { - union { - uint8_t u8; - int8_t s8; - } b; - b.u8 = a; - return b.s8; -} - -static av_always_inline uint32_t bitswap_32(uint32_t x) { - return (uint32_t)ff_reverse[x & 0xFF] << 24 | - (uint32_t)ff_reverse[(x >> 8) & 0xFF] << 16 | - (uint32_t)ff_reverse[(x >> 16) & 0xFF] << 8 | - (uint32_t)ff_reverse[x >> 24]; -} - -#endif /* AVCODEC_MATHOPS_H */ diff --git a/third-party/cbs/put_bits.h b/third-party/cbs/put_bits.h index 5d3f96c4bd1..43409afdd34 100644 --- a/third-party/cbs/put_bits.h +++ b/third-party/cbs/put_bits.h @@ -26,15 +26,17 @@ #ifndef AVCODEC_PUT_BITS_H #define AVCODEC_PUT_BITS_H -#include "config.h" - -#include #include +#include -#include -#include +// [manual] Changed include path +#include "cbs/config.h" +#include "libavutil/intreadwrite.h" +#include "libavutil/avassert.h" +#include "libavutil/common.h" -#if HAVE_FAST_64BIT +#if ARCH_X86_64 +// TODO: Benchmark and optionally enable on other 64-bit architectures. typedef uint64_t BitBuf; #define AV_WBBUF AV_WB64 #define AV_WLBUF AV_WL64 @@ -47,9 +49,9 @@ typedef uint32_t BitBuf; static const int BUF_BITS = 8 * sizeof(BitBuf); typedef struct PutBitContext { - BitBuf bit_buf; - int bit_left; - uint8_t *buf, *buf_ptr, *buf_end; + BitBuf bit_buf; + int bit_left; + uint8_t *buf, *buf_ptr, *buf_end; } PutBitContext; /** @@ -59,33 +61,36 @@ typedef struct PutBitContext { * @param buffer_size the size in bytes of buffer */ static inline void init_put_bits(PutBitContext *s, uint8_t *buffer, - int buffer_size) { - if(buffer_size < 0) { - buffer_size = 0; - buffer = NULL; - } - - s->buf = buffer; - s->buf_end = s->buf + buffer_size; - s->buf_ptr = s->buf; - s->bit_left = BUF_BITS; - s->bit_buf = 0; + int buffer_size) +{ + if (buffer_size < 0) { + buffer_size = 0; + buffer = NULL; + } + + s->buf = buffer; + s->buf_end = s->buf + buffer_size; + s->buf_ptr = s->buf; + s->bit_left = BUF_BITS; + s->bit_buf = 0; } /** * @return the total number of bits written to the bitstream. */ -static inline int put_bits_count(PutBitContext *s) { - return (s->buf_ptr - s->buf) * 8 + BUF_BITS - s->bit_left; +static inline int put_bits_count(PutBitContext *s) +{ + return (s->buf_ptr - s->buf) * 8 + BUF_BITS - s->bit_left; } /** * @return the number of bytes output so far; may only be called * when the PutBitContext is freshly initialized or flushed. */ -static inline int put_bytes_output(const PutBitContext *s) { - av_assert2(s->bit_left == BUF_BITS); - return s->buf_ptr - s->buf; +static inline int put_bytes_output(const PutBitContext *s) +{ + av_assert2(s->bit_left == BUF_BITS); + return s->buf_ptr - s->buf; } /** @@ -93,8 +98,9 @@ static inline int put_bytes_output(const PutBitContext *s) { * rounded up to the next byte. * @return the number of bytes output so far. */ -static inline int put_bytes_count(const PutBitContext *s, int round_up) { - return s->buf_ptr - s->buf + ((BUF_BITS - s->bit_left + (round_up ? 7 : 0)) >> 3); +static inline int put_bytes_count(const PutBitContext *s, int round_up) +{ + return s->buf_ptr - s->buf + ((BUF_BITS - s->bit_left + (round_up ? 7 : 0)) >> 3); } /** @@ -105,19 +111,21 @@ static inline int put_bytes_count(const PutBitContext *s, int round_up) { * must be large enough to hold everything written so far */ static inline void rebase_put_bits(PutBitContext *s, uint8_t *buffer, - int buffer_size) { - av_assert0(8 * buffer_size >= put_bits_count(s)); + int buffer_size) +{ + av_assert0(8*buffer_size >= put_bits_count(s)); - s->buf_end = buffer + buffer_size; - s->buf_ptr = buffer + (s->buf_ptr - s->buf); - s->buf = buffer; + s->buf_end = buffer + buffer_size; + s->buf_ptr = buffer + (s->buf_ptr - s->buf); + s->buf = buffer; } /** * @return the number of bits available in the bitstream. */ -static inline int put_bits_left(PutBitContext *s) { - return (s->buf_end - s->buf_ptr) * 8 - BUF_BITS + s->bit_left; +static inline int put_bits_left(PutBitContext* s) +{ + return (s->buf_end - s->buf_ptr) * 8 - BUF_BITS + s->bit_left; } /** @@ -125,42 +133,45 @@ static inline int put_bits_left(PutBitContext *s) { * rounded up to the next byte. * @return the number of bytes left. */ -static inline int put_bytes_left(const PutBitContext *s, int round_up) { - return s->buf_end - s->buf_ptr - ((BUF_BITS - s->bit_left + (round_up ? 7 : 0)) >> 3); +static inline int put_bytes_left(const PutBitContext *s, int round_up) +{ + return s->buf_end - s->buf_ptr - ((BUF_BITS - s->bit_left + (round_up ? 7 : 0)) >> 3); } /** * Pad the end of the output stream with zeros. */ -static inline void flush_put_bits(PutBitContext *s) { +static inline void flush_put_bits(PutBitContext *s) +{ #ifndef BITSTREAM_WRITER_LE - if(s->bit_left < BUF_BITS) - s->bit_buf <<= s->bit_left; + if (s->bit_left < BUF_BITS) + s->bit_buf <<= s->bit_left; #endif - while(s->bit_left < BUF_BITS) { - av_assert0(s->buf_ptr < s->buf_end); + while (s->bit_left < BUF_BITS) { + av_assert0(s->buf_ptr < s->buf_end); #ifdef BITSTREAM_WRITER_LE - *s->buf_ptr++ = s->bit_buf; - s->bit_buf >>= 8; + *s->buf_ptr++ = s->bit_buf; + s->bit_buf >>= 8; #else - *s->buf_ptr++ = s->bit_buf >> (BUF_BITS - 8); - s->bit_buf <<= 8; + *s->buf_ptr++ = s->bit_buf >> (BUF_BITS - 8); + s->bit_buf <<= 8; #endif - s->bit_left += 8; - } - s->bit_left = BUF_BITS; - s->bit_buf = 0; + s->bit_left += 8; + } + s->bit_left = BUF_BITS; + s->bit_buf = 0; } -static inline void flush_put_bits_le(PutBitContext *s) { - while(s->bit_left < BUF_BITS) { - av_assert0(s->buf_ptr < s->buf_end); - *s->buf_ptr++ = s->bit_buf; - s->bit_buf >>= 8; - s->bit_left += 8; - } - s->bit_left = BUF_BITS; - s->bit_buf = 0; +static inline void flush_put_bits_le(PutBitContext *s) +{ + while (s->bit_left < BUF_BITS) { + av_assert0(s->buf_ptr < s->buf_end); + *s->buf_ptr++ = s->bit_buf; + s->bit_buf >>= 8; + s->bit_left += 8; + } + s->bit_left = BUF_BITS; + s->bit_buf = 0; } #ifdef BITSTREAM_WRITER_LE @@ -174,7 +185,7 @@ static inline void flush_put_bits_le(PutBitContext *s) { * @param terminate_string 0-terminates the written string if value is 1 */ void ff_put_string(PutBitContext *pb, const char *string, - int terminate_string); + int terminate_string); /** * Copy the content of src to the bitstream. @@ -184,192 +195,194 @@ void ff_put_string(PutBitContext *pb, const char *string, void ff_copy_bits(PutBitContext *pb, const uint8_t *src, int length); #endif -static inline void put_bits_no_assert(PutBitContext *s, int n, BitBuf value) { - BitBuf bit_buf; - int bit_left; +static inline void put_bits_no_assert(PutBitContext *s, int n, BitBuf value) +{ + BitBuf bit_buf; + int bit_left; - bit_buf = s->bit_buf; - bit_left = s->bit_left; + bit_buf = s->bit_buf; + bit_left = s->bit_left; - /* XXX: optimize */ + /* XXX: optimize */ #ifdef BITSTREAM_WRITER_LE - bit_buf |= value << (BUF_BITS - bit_left); - if(n >= bit_left) { - if(s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { - AV_WLBUF(s->buf_ptr, bit_buf); - s->buf_ptr += sizeof(BitBuf); - } - else { - av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); - av_assert2(0); + bit_buf |= value << (BUF_BITS - bit_left); + if (n >= bit_left) { + if (s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { + AV_WLBUF(s->buf_ptr, bit_buf); + s->buf_ptr += sizeof(BitBuf); + } else { + av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); + av_assert2(0); + } + bit_buf = value >> bit_left; + bit_left += BUF_BITS; } - bit_buf = value >> bit_left; - bit_left += BUF_BITS; - } - bit_left -= n; -#else - if(n < bit_left) { - bit_buf = (bit_buf << n) | value; bit_left -= n; - } - else { - bit_buf <<= bit_left; - bit_buf |= value >> (n - bit_left); - if(s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { - AV_WBBUF(s->buf_ptr, bit_buf); - s->buf_ptr += sizeof(BitBuf); - } - else { - av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); - av_assert2(0); +#else + if (n < bit_left) { + bit_buf = (bit_buf << n) | value; + bit_left -= n; + } else { + bit_buf <<= bit_left; + bit_buf |= value >> (n - bit_left); + if (s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { + AV_WBBUF(s->buf_ptr, bit_buf); + s->buf_ptr += sizeof(BitBuf); + } else { + av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); + av_assert2(0); + } + bit_left += BUF_BITS - n; + bit_buf = value; } - bit_left += BUF_BITS - n; - bit_buf = value; - } #endif - s->bit_buf = bit_buf; - s->bit_left = bit_left; + s->bit_buf = bit_buf; + s->bit_left = bit_left; } /** * Write up to 31 bits into a bitstream. * Use put_bits32 to write 32 bits. */ -static inline void put_bits(PutBitContext *s, int n, BitBuf value) { - av_assert2(n <= 31 && value < (1UL << n)); - put_bits_no_assert(s, n, value); +static inline void put_bits(PutBitContext *s, int n, BitBuf value) +{ + av_assert2(n <= 31 && value < (1UL << n)); + put_bits_no_assert(s, n, value); } -static inline void put_bits_le(PutBitContext *s, int n, BitBuf value) { - BitBuf bit_buf; - int bit_left; - - av_assert2(n <= 31 && value < (1UL << n)); - - bit_buf = s->bit_buf; - bit_left = s->bit_left; - - bit_buf |= value << (BUF_BITS - bit_left); - if(n >= bit_left) { - if(s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { - AV_WLBUF(s->buf_ptr, bit_buf); - s->buf_ptr += sizeof(BitBuf); +static inline void put_bits_le(PutBitContext *s, int n, BitBuf value) +{ + BitBuf bit_buf; + int bit_left; + + av_assert2(n <= 31 && value < (1UL << n)); + + bit_buf = s->bit_buf; + bit_left = s->bit_left; + + bit_buf |= value << (BUF_BITS - bit_left); + if (n >= bit_left) { + if (s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { + AV_WLBUF(s->buf_ptr, bit_buf); + s->buf_ptr += sizeof(BitBuf); + } else { + av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); + av_assert2(0); + } + bit_buf = value >> bit_left; + bit_left += BUF_BITS; } - else { - av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); - av_assert2(0); - } - bit_buf = value >> bit_left; - bit_left += BUF_BITS; - } - bit_left -= n; + bit_left -= n; - s->bit_buf = bit_buf; - s->bit_left = bit_left; + s->bit_buf = bit_buf; + s->bit_left = bit_left; } -static inline void put_sbits(PutBitContext *pb, int n, int32_t value) { - av_assert2(n >= 0 && n <= 31); +static inline void put_sbits(PutBitContext *pb, int n, int32_t value) +{ + av_assert2(n >= 0 && n <= 31); - put_bits(pb, n, av_mod_uintp2(value, n)); + put_bits(pb, n, av_mod_uintp2(value, n)); } /** * Write exactly 32 bits into a bitstream. */ -static void av_unused put_bits32(PutBitContext *s, uint32_t value) { - BitBuf bit_buf; - int bit_left; - - if(BUF_BITS > 32) { - put_bits_no_assert(s, 32, value); - return; - } +static void av_unused put_bits32(PutBitContext *s, uint32_t value) +{ + BitBuf bit_buf; + int bit_left; + + if (BUF_BITS > 32) { + put_bits_no_assert(s, 32, value); + return; + } - bit_buf = s->bit_buf; - bit_left = s->bit_left; + bit_buf = s->bit_buf; + bit_left = s->bit_left; #ifdef BITSTREAM_WRITER_LE - bit_buf |= (BitBuf)value << (BUF_BITS - bit_left); - if(s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { - AV_WLBUF(s->buf_ptr, bit_buf); - s->buf_ptr += sizeof(BitBuf); - } - else { - av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); - av_assert2(0); - } - bit_buf = (uint64_t)value >> bit_left; + bit_buf |= (BitBuf)value << (BUF_BITS - bit_left); + if (s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { + AV_WLBUF(s->buf_ptr, bit_buf); + s->buf_ptr += sizeof(BitBuf); + } else { + av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); + av_assert2(0); + } + bit_buf = (uint64_t)value >> bit_left; #else - bit_buf = (uint64_t)bit_buf << bit_left; - bit_buf |= (BitBuf)value >> (BUF_BITS - bit_left); - if(s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { - AV_WBBUF(s->buf_ptr, bit_buf); - s->buf_ptr += sizeof(BitBuf); - } - else { - av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); - av_assert2(0); - } - bit_buf = value; + bit_buf = (uint64_t)bit_buf << bit_left; + bit_buf |= (BitBuf)value >> (BUF_BITS - bit_left); + if (s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { + AV_WBBUF(s->buf_ptr, bit_buf); + s->buf_ptr += sizeof(BitBuf); + } else { + av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); + av_assert2(0); + } + bit_buf = value; #endif - s->bit_buf = bit_buf; - s->bit_left = bit_left; + s->bit_buf = bit_buf; + s->bit_left = bit_left; } /** * Write up to 64 bits into a bitstream. */ -static inline void put_bits64(PutBitContext *s, int n, uint64_t value) { - av_assert2((n == 64) || (n < 64 && value < (UINT64_C(1) << n))); - - if(n < 32) - put_bits(s, n, value); - else if(n == 32) - put_bits32(s, value); - else if(n < 64) { - uint32_t lo = value & 0xffffffff; - uint32_t hi = value >> 32; +static inline void put_bits64(PutBitContext *s, int n, uint64_t value) +{ + av_assert2((n == 64) || (n < 64 && value < (UINT64_C(1) << n))); + + if (n < 32) + put_bits(s, n, value); + else if (n == 32) + put_bits32(s, value); + else if (n < 64) { + uint32_t lo = value & 0xffffffff; + uint32_t hi = value >> 32; #ifdef BITSTREAM_WRITER_LE - put_bits32(s, lo); - put_bits(s, n - 32, hi); + put_bits32(s, lo); + put_bits(s, n - 32, hi); #else - put_bits(s, n - 32, hi); - put_bits32(s, lo); + put_bits(s, n - 32, hi); + put_bits32(s, lo); #endif - } - else { - uint32_t lo = value & 0xffffffff; - uint32_t hi = value >> 32; + } else { + uint32_t lo = value & 0xffffffff; + uint32_t hi = value >> 32; #ifdef BITSTREAM_WRITER_LE - put_bits32(s, lo); - put_bits32(s, hi); + put_bits32(s, lo); + put_bits32(s, hi); #else - put_bits32(s, hi); - put_bits32(s, lo); + put_bits32(s, hi); + put_bits32(s, lo); #endif - } + + } } /** * Return the pointer to the byte where the bitstream writer will put * the next bit. */ -static inline uint8_t *put_bits_ptr(PutBitContext *s) { - return s->buf_ptr; +static inline uint8_t *put_bits_ptr(PutBitContext *s) +{ + return s->buf_ptr; } /** * Skip the given number of bytes. * PutBitContext must be flushed & aligned to a byte boundary before calling this. */ -static inline void skip_put_bytes(PutBitContext *s, int n) { - av_assert2((put_bits_count(s) & 7) == 0); - av_assert2(s->bit_left == BUF_BITS); - av_assert0(n <= s->buf_end - s->buf_ptr); - s->buf_ptr += n; +static inline void skip_put_bytes(PutBitContext *s, int n) +{ + av_assert2((put_bits_count(s) & 7) == 0); + av_assert2(s->bit_left == BUF_BITS); + av_assert0(n <= s->buf_end - s->buf_ptr); + s->buf_ptr += n; } /** @@ -377,10 +390,11 @@ static inline void skip_put_bytes(PutBitContext *s, int n) { * Must only be used if the actual values in the bitstream do not matter. * If n is < 0 the behavior is undefined. */ -static inline void skip_put_bits(PutBitContext *s, int n) { - unsigned bits = BUF_BITS - s->bit_left + n; - s->buf_ptr += sizeof(BitBuf) * (bits / BUF_BITS); - s->bit_left = BUF_BITS - (bits & (BUF_BITS - 1)); +static inline void skip_put_bits(PutBitContext *s, int n) +{ + unsigned bits = BUF_BITS - s->bit_left + n; + s->buf_ptr += sizeof(BitBuf) * (bits / BUF_BITS); + s->bit_left = BUF_BITS - (bits & (BUF_BITS - 1)); } /** @@ -388,16 +402,18 @@ static inline void skip_put_bits(PutBitContext *s, int n) { * * @param size the new size in bytes of the buffer where to put bits */ -static inline void set_put_bits_buffer_size(PutBitContext *s, int size) { - av_assert0(size <= INT_MAX / 8 - BUF_BITS); - s->buf_end = s->buf + size; +static inline void set_put_bits_buffer_size(PutBitContext *s, int size) +{ + av_assert0(size <= INT_MAX/8 - BUF_BITS); + s->buf_end = s->buf + size; } /** * Pad the bitstream with zeros up to the next byte boundary. */ -static inline void align_put_bits(PutBitContext *s) { - put_bits(s, s->bit_left & 7, 0); +static inline void align_put_bits(PutBitContext *s) +{ + put_bits(s, s->bit_left & 7, 0); } #undef AV_WBBUF diff --git a/third-party/cbs/startcode.h b/third-party/cbs/startcode.h new file mode 100644 index 00000000000..8b75832aaf7 --- /dev/null +++ b/third-party/cbs/startcode.h @@ -0,0 +1,36 @@ +/* + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * Accelerated start code search function for start codes common to + * MPEG-1/2/4 video, VC-1, H.264/5 + */ + +#ifndef AVCODEC_STARTCODE_H +#define AVCODEC_STARTCODE_H + +#include + +const uint8_t *avpriv_find_start_code(const uint8_t *p, + const uint8_t *end, + uint32_t *state); + +int ff_startcode_find_candidate_c(const uint8_t *buf, int size); + +#endif /* AVCODEC_STARTCODE_H */ diff --git a/third-party/cbs/version_major.h b/third-party/cbs/version_major.h new file mode 100644 index 00000000000..1e23ed5e03e --- /dev/null +++ b/third-party/cbs/version_major.h @@ -0,0 +1,54 @@ +/* + * This file is part of FFmpeg. + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#ifndef AVCODEC_VERSION_MAJOR_H +#define AVCODEC_VERSION_MAJOR_H + +/** + * @file + * @ingroup libavc + * Libavcodec version macros. + */ + +#define LIBAVCODEC_VERSION_MAJOR 59 + +/** + * FF_API_* defines may be placed below to indicate public API that will be + * dropped at a future version bump. The defines themselves are not part of + * the public API and may change, break or disappear at any time. + * + * @note, when bumping the major version it is recommended to manually + * disable each FF_API_* in its own commit instead of disabling them all + * at once through the bump. This improves the git bisect-ability of the change. + */ + +#define FF_API_OPENH264_SLICE_MODE (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_OPENH264_CABAC (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_UNUSED_CODEC_CAPS (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_THREAD_SAFE_CALLBACKS (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_DEBUG_MV (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_GET_FRAME_CLASS (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_AUTO_THREADS (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_INIT_PACKET (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_AVCTX_TIMEBASE (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_FLAG_TRUNCATED (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_SUB_TEXT_FORMAT (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_IDCT_NONE (LIBAVCODEC_VERSION_MAJOR < 60) +#define FF_API_SVTAV1_OPTS (LIBAVCODEC_VERSION_MAJOR < 60) + +#endif /* AVCODEC_VERSION_MAJOR_H */ diff --git a/third-party/cbs/video_levels.c b/third-party/cbs/video_levels.c deleted file mode 100644 index 24143dfcfca..00000000000 --- a/third-party/cbs/video_levels.c +++ /dev/null @@ -1,349 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#include - -#include "include/cbs/video_levels.h" - -// H.264 table A-1. -static const H264LevelDescriptor h264_levels[] = { - // Name MaxMBPS MaxBR MinCR - // | level_idc | MaxFS | MaxCPB | MaxMvsPer2Mb - // | | cs3f | | MaxDpbMbs | | MaxVmvR | | - { "1", 10, 0, 1485, 99, 396, 64, 175, 64, 2, 0 }, - { "1b", 11, 1, 1485, 99, 396, 128, 350, 64, 2, 0 }, - { "1b", 9, 0, 1485, 99, 396, 128, 350, 64, 2, 0 }, - { "1.1", 11, 0, 3000, 396, 900, 192, 500, 128, 2, 0 }, - { "1.2", 12, 0, 6000, 396, 2376, 384, 1000, 128, 2, 0 }, - { "1.3", 13, 0, 11880, 396, 2376, 768, 2000, 128, 2, 0 }, - { "2", 20, 0, 11880, 396, 2376, 2000, 2000, 128, 2, 0 }, - { "2.1", 21, 0, 19800, 792, 4752, 4000, 4000, 256, 2, 0 }, - { "2.2", 22, 0, 20250, 1620, 8100, 4000, 4000, 256, 2, 0 }, - { "3", 30, 0, 40500, 1620, 8100, 10000, 10000, 256, 2, 32 }, - { "3.1", 31, 0, 108000, 3600, 18000, 14000, 14000, 512, 4, 16 }, - { "3.2", 32, 0, 216000, 5120, 20480, 20000, 20000, 512, 4, 16 }, - { "4", 40, 0, 245760, 8192, 32768, 20000, 25000, 512, 4, 16 }, - { "4.1", 41, 0, 245760, 8192, 32768, 50000, 62500, 512, 2, 16 }, - { "4.2", 42, 0, 522240, 8704, 34816, 50000, 62500, 512, 2, 16 }, - { "5", 50, 0, 589824, 22080, 110400, 135000, 135000, 512, 2, 16 }, - { "5.1", 51, 0, 983040, 36864, 184320, 240000, 240000, 512, 2, 16 }, - { "5.2", 52, 0, 2073600, 36864, 184320, 240000, 240000, 512, 2, 16 }, - { "6", 60, 0, 4177920, 139264, 696320, 240000, 240000, 8192, 2, 16 }, - { "6.1", 61, 0, 8355840, 139264, 696320, 480000, 480000, 8192, 2, 16 }, - { "6.2", 62, 0, 16711680, 139264, 696320, 800000, 800000, 8192, 2, 16 }, -}; - -// H.264 table A-2 plus values from A-1. -static const struct { - int profile_idc; - int cpb_br_vcl_factor; - int cpb_br_nal_factor; -} h264_br_factors[] = { - { 66, 1000, 1200 }, - { 77, 1000, 1200 }, - { 88, 1000, 1200 }, - { 100, 1250, 1500 }, - { 110, 3000, 3600 }, - { 122, 4000, 4800 }, - { 244, 4000, 4800 }, - { 44, 4000, 4800 }, -}; - -// We are only ever interested in the NAL bitrate factor. -static int h264_get_br_factor(int profile_idc) { - int i; - for(i = 0; i < FF_ARRAY_ELEMS(h264_br_factors); i++) { - if(h264_br_factors[i].profile_idc == profile_idc) - return h264_br_factors[i].cpb_br_nal_factor; - } - // Default to the non-high profile value if not specified. - return 1200; -} - -const H264LevelDescriptor *ff_h264_guess_level(int profile_idc, - int64_t bitrate, - int framerate, - int width, int height, - int max_dec_frame_buffering) { - int width_mbs = (width + 15) / 16; - int height_mbs = (height + 15) / 16; - int no_cs3f = !(profile_idc == 66 || - profile_idc == 77 || - profile_idc == 88); - int i; - - for(i = 0; i < FF_ARRAY_ELEMS(h264_levels); i++) { - const H264LevelDescriptor *level = &h264_levels[i]; - - if(level->constraint_set3_flag && no_cs3f) - continue; - - if(bitrate > (int64_t)level->max_br * h264_get_br_factor(profile_idc)) - continue; - - if(width_mbs * height_mbs > level->max_fs) - continue; - if(width_mbs * width_mbs > 8 * level->max_fs) - continue; - if(height_mbs * height_mbs > 8 * level->max_fs) - continue; - - if(width_mbs && height_mbs) { - int max_dpb_frames = - FFMIN(level->max_dpb_mbs / (width_mbs * height_mbs), 16); - if(max_dec_frame_buffering > max_dpb_frames) - continue; - - if(framerate > (level->max_mbps / (width_mbs * height_mbs))) - continue; - } - - return level; - } - - // No usable levels found - frame is too big or bitrate is too high. - return NULL; -} - -static const H265LevelDescriptor h265_levels[] = { - // Name CpbFactor-Main MaxSliceSegmentsPerPicture - // | level_idc | CpbFactor-High MaxLumaSr BrFactor-High - // | | MaxLumaPs | | | MaxTileRows | BrFactor-Main | MinCr-Main - // | | | | | | | MaxTileCols | | | MinCr-High - { "1", 30, 36864, 350, 0, 16, 1, 1, 552960, 128, 0, 2, 2 }, - { "2", 60, 122880, 1500, 0, 16, 1, 1, 3686400, 1500, 0, 2, 2 }, - { "2.1", 63, 245760, 3000, 0, 20, 1, 1, 7372800, 3000, 0, 2, 2 }, - { "3", 90, 552960, 6000, 0, 30, 2, 2, 16588800, 6000, 0, 2, 2 }, - { "3.1", 93, 983040, 10000, 0, 40, 3, 3, 33177600, 10000, 0, 2, 2 }, - { "4", 120, 2228224, 12000, 30000, 75, 5, 5, 66846720, 12000, 30000, 4, 4 }, - { "4.1", 123, 2228224, 20000, 50000, 75, 5, 5, 133693440, 20000, 50000, 4, 4 }, - { "5", 150, 8912896, 25000, 100000, 200, 11, 10, 267386880, 25000, 100000, 6, 4 }, - { "5.1", 153, 8912896, 40000, 160000, 200, 11, 10, 534773760, 40000, 160000, 8, 4 }, - { "5.2", 156, 8912896, 60000, 240000, 200, 11, 10, 1069547520, 60000, 240000, 8, 4 }, - { "6", 180, 35651584, 60000, 240000, 600, 22, 20, 1069547520, 60000, 240000, 8, 4 }, - { "6.1", 183, 35651584, 120000, 480000, 600, 22, 20, 2139095040, 120000, 480000, 8, 4 }, - { "6.2", 186, 35651584, 240000, 800000, 600, 22, 20, 4278190080, 240000, 800000, 6, 4 }, -}; - -static const H265ProfileDescriptor h265_profiles[] = { - // profile_idc 8bit one-picture - // HT-profile | 422chroma | lower-bit-rate - // | 14bit | | 420chroma | | CpbVclFactor MinCrScaleFactor - // | | 12bit | | | monochrome| | CpbNalFactor | maxDpbPicBuf - // | | | 10bit | | | intra | | | FormatCapabilityFactor - { "Monochrome", // | | | | | | | | | | | - 4, 0, 2, 1, 1, 1, 1, 1, 1, 0, 0, 1, 667, 733, 1.000, 1.0, 6 }, - { "Monochrome 10", - 4, 0, 2, 1, 1, 0, 1, 1, 1, 0, 0, 1, 833, 917, 1.250, 1.0, 6 }, - { "Monochrome 12", - 4, 0, 2, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1000, 1100, 1.500, 1.0, 6 }, - { "Monochrome 16", - 4, 0, 2, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1333, 1467, 2.000, 1.0, 6 }, - { "Main", - 1, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1000, 1100, 1.500, 1.0, 6 }, - { "Screen-Extended Main", - 9, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1000, 1100, 1.500, 1.0, 7 }, - { "Main 10", - 2, 0, 2, 2, 2, 2, 2, 2, 2, 2, 0, 2, 1000, 1100, 1.875, 1.0, 6 }, - { "Screen-Extended Main 10", - 9, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1000, 1100, 1.875, 1.0, 7 }, - { "Main 12", - 4, 0, 2, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1500, 1650, 2.250, 1.0, 6 }, - { "Main Still Picture", - 3, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1000, 1100, 1.500, 1.0, 6 }, - { "Main 10 Still Picture", - 2, 0, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1000, 1100, 1.875, 1.0, 6 }, - { "Main 4:2:2 10", - 4, 0, 2, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1667, 1833, 2.500, 0.5, 6 }, - { "Main 4:2:2 12", - 4, 0, 2, 1, 0, 0, 1, 0, 0, 0, 0, 1, 2000, 2200, 3.000, 0.5, 6 }, - { "Main 4:4:4", - 4, 0, 2, 1, 1, 1, 0, 0, 0, 0, 0, 1, 2000, 2200, 3.000, 0.5, 6 }, - { "High Throughput 4:4:4", - 5, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 2000, 2200, 3.000, 0.5, 6 }, - { "Screen-Extended Main 4:4:4", - 9, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 2000, 2200, 3.000, 0.5, 7 }, - { "Screen-Extended High Throughput 4:4:4", - 9, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 2000, 2200, 3.000, 0.5, 7 }, - { "Main 4:4:4 10", - 4, 0, 2, 1, 1, 0, 0, 0, 0, 0, 0, 1, 2500, 2750, 3.750, 0.5, 6 }, - { "High Throughput 4:4:4 10", - 5, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 2500, 2750, 3.750, 0.5, 6 }, - { "Screen-Extended Main 4:4:4 10", - 9, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 2500, 2750, 3.750, 0.5, 7 }, - { "Screen-Extended High Throughput 4:4:4 10", - 9, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 2500, 2750, 3.750, 0.5, 7 }, - { "Main 4:4:4 12", - 4, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 3000, 3300, 4.500, 0.5, 6 }, - { "High Throughput 4:4:4 14", - 5, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3500, 3850, 5.250, 0.5, 6 }, - { "Screen-Extended High Throughput 4:4:4 14", - 9, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3500, 3850, 5.250, 0.5, 7 }, - { "Main Intra", - 4, 0, 2, 1, 1, 1, 1, 1, 0, 1, 0, 2, 1000, 1100, 1.500, 1.0, 6 }, - { "Main 10 Intra", - 4, 0, 2, 1, 1, 0, 1, 1, 0, 1, 0, 2, 1000, 1100, 1.875, 1.0, 6 }, - { "Main 12 Intra", - 4, 0, 2, 1, 0, 0, 1, 1, 0, 1, 0, 2, 1500, 1650, 2.250, 1.0, 6 }, - { "Main 4:2:2 10 Intra", - 4, 0, 2, 1, 1, 0, 1, 0, 0, 1, 0, 2, 1667, 1833, 2.500, 0.5, 6 }, - { "Main 4:2:2 12 Intra", - 4, 0, 2, 1, 0, 0, 1, 0, 0, 1, 0, 2, 2000, 2200, 3.000, 0.5, 6 }, - { "Main 4:4:4 Intra", - 4, 0, 2, 1, 1, 1, 0, 0, 0, 1, 0, 2, 2000, 2200, 3.000, 0.5, 6 }, - { "Main 4:4:4 10 Intra", - 4, 0, 2, 1, 1, 0, 0, 0, 0, 1, 0, 2, 2500, 2750, 3.750, 0.5, 6 }, - { "Main 4:4:4 12 Intra", - 4, 0, 2, 1, 0, 0, 0, 0, 0, 1, 0, 2, 3000, 3300, 4.500, 0.5, 6 }, - { "Main 4:4:4 16 Intra", - 4, 0, 2, 0, 0, 0, 0, 0, 0, 1, 0, 2, 4000, 4400, 6.000, 0.5, 6 }, - { "Main 4:4:4 Still Picture", - 4, 0, 2, 1, 1, 1, 0, 0, 0, 1, 1, 2, 2000, 2200, 3.000, 0.5, 6 }, - { "Main 4:4:4 16 Still Picture", - 4, 0, 2, 0, 0, 0, 0, 0, 0, 1, 1, 2, 4000, 4400, 6.000, 0.5, 6 }, - { "High Throughput 4:4:4 16 Intra", - 5, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 4000, 4400, 6.000, 0.5, 6 }, -}; - - -const H265ProfileDescriptor *ff_h265_get_profile(const H265RawProfileTierLevel *ptl) { - int i; - - if(ptl->general_profile_space) - return NULL; - - for(i = 0; i < FF_ARRAY_ELEMS(h265_profiles); i++) { - const H265ProfileDescriptor *profile = &h265_profiles[i]; - - if(ptl->general_profile_idc && - ptl->general_profile_idc != profile->profile_idc) - continue; - if(!ptl->general_profile_compatibility_flag[profile->profile_idc]) - continue; - -#define check_flag(name) \ - if(profile->name < 2) { \ - if(profile->name != ptl->general_##name##_constraint_flag) \ - continue; \ - } - check_flag(max_14bit); - check_flag(max_12bit); - check_flag(max_10bit); - check_flag(max_8bit); - check_flag(max_422chroma); - check_flag(max_420chroma); - check_flag(max_monochrome); - check_flag(intra); - check_flag(one_picture_only); - check_flag(lower_bit_rate); -#undef check_flag - - return profile; - } - - return NULL; -} - -const H265LevelDescriptor *ff_h265_guess_level(const H265RawProfileTierLevel *ptl, - int64_t bitrate, - int width, int height, - int slice_segments, - int tile_rows, int tile_cols, - int max_dec_pic_buffering) { - const H265ProfileDescriptor *profile; - int pic_size, tier_flag, lbr_flag, hbr_factor; - int i; - - if(ptl) - profile = ff_h265_get_profile(ptl); - else - profile = NULL; - if(!profile) { - // Default to using multiplication factors for Main profile. - profile = &h265_profiles[4]; - } - - pic_size = width * height; - - if(ptl) { - tier_flag = ptl->general_tier_flag; - lbr_flag = ptl->general_lower_bit_rate_constraint_flag; - } - else { - tier_flag = 0; - lbr_flag = profile->lower_bit_rate > 0; - } - if(profile->profile_idc == 1 || profile->profile_idc == 2) { - hbr_factor = 1; - } - else if(profile->high_throughput) { - if(profile->intra) - hbr_factor = 24 - 12 * lbr_flag; - else - hbr_factor = 6; - } - else { - hbr_factor = 2 - lbr_flag; - } - - for(i = 0; i < FF_ARRAY_ELEMS(h265_levels); i++) { - const H265LevelDescriptor *level = &h265_levels[i]; - int max_br, max_dpb_size; - - if(tier_flag && !level->max_br_high) - continue; - - if(pic_size > level->max_luma_ps) - continue; - if(width * width > 8 * level->max_luma_ps) - continue; - if(height * height > 8 * level->max_luma_ps) - continue; - - if(slice_segments > level->max_slice_segments_per_picture) - continue; - if(tile_rows > level->max_tile_rows) - continue; - if(tile_cols > level->max_tile_cols) - continue; - - if(tier_flag) - max_br = level->max_br_high; - else - max_br = level->max_br_main; - if(!max_br) - continue; - if(bitrate > (int64_t)profile->cpb_nal_factor * hbr_factor * max_br) - continue; - - if(pic_size <= (level->max_luma_ps >> 2)) - max_dpb_size = FFMIN(4 * profile->max_dpb_pic_buf, 16); - else if(pic_size <= (level->max_luma_ps >> 1)) - max_dpb_size = FFMIN(2 * profile->max_dpb_pic_buf, 16); - else if(pic_size <= (3 * level->max_luma_ps >> 2)) - max_dpb_size = FFMIN(4 * profile->max_dpb_pic_buf / 3, 16); - else - max_dpb_size = profile->max_dpb_pic_buf; - if(max_dec_pic_buffering > max_dpb_size) - continue; - - return level; - } - - return NULL; -} \ No newline at end of file From 7d736049f5275166814142c741158b54cea639c4 Mon Sep 17 00:00:00 2001 From: Brad Richardson Date: Sun, 20 Nov 2022 23:08:20 -0500 Subject: [PATCH 02/11] Updated endian checks, remove register specifiers --- third-party/cbs/include/cbs/config.h | 30 +++++++++++++++----------- third-party/cbs/include/cbs/get_bits.h | 22 ++++++++++++------- 2 files changed, 32 insertions(+), 20 deletions(-) diff --git a/third-party/cbs/include/cbs/config.h b/third-party/cbs/include/cbs/config.h index 23056826e3a..4c6f330eab9 100644 --- a/third-party/cbs/include/cbs/config.h +++ b/third-party/cbs/include/cbs/config.h @@ -2,27 +2,33 @@ #ifndef CBS_CONFIG_H #define CBS_CONFIG_H -#if defined(__BYTE_ORDER) && __BYTE_ORDER == __BIG_ENDIAN || \ - defined(__BIG_ENDIAN__) || \ - defined(__ARMEB__) || \ - defined(__THUMBEB__) || \ - defined(__AARCH64EB__) || \ +#if defined(__BYTE_ORDER) && __BYTE_ORDER == __BIG_ENDIAN || \ + defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ || \ + defined(__FLOAT_WORD_ORDER__) && __FLOAT_WORD_ORDER__ == __ORDER_BIG_ENDIAN__ || \ + defined(__BIG_ENDIAN__) || \ + defined(__ARMEB__) || \ + defined(__THUMBEB__) || \ + defined(__AARCH64EB__) || \ defined(_MIBSEB) || defined(__MIBSEB) || defined(__MIBSEB__) // It's a big-endian target architecture #define AV_HAVE_BIGENDIAN 1 -#elif defined(__BYTE_ORDER) && __BYTE_ORDER == __LITTLE_ENDIAN || \ - defined(__LITTLE_ENDIAN__) || \ - defined(__ARMEL__) || \ - defined(__THUMBEL__) || \ - defined(__AARCH64EL__) || \ - defined(_MIPSEL) || defined(__MIPSEL) || defined(__MIPSEL__) || \ +#elif defined(__BYTE_ORDER) && __BYTE_ORDER == __LITTLE_ENDIAN || \ + defined(__BYTE_ORDER) && __BYTE_ORDER == __PDP_ENDIAN || \ + defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ || \ + defined(__FLOAT_WORD_ORDER__) && __FLOAT_WORD_ORDER__ == __ORDER_LITTLE_ENDIAN__ || \ + defined(__LITTLE_ENDIAN__) || \ + defined(__ARMEL__) || \ + defined(__THUMBEL__) || \ + defined(__AARCH64EL__) || \ + defined(_MIPSEL) || defined(__MIPSEL) || defined(__MIPSEL__) || \ defined(_WIN32) // It's a little-endian target architecture #define AV_HAVE_BIGENDIAN 0 #else -#error "Unknown Endianness" +// https://manhnt.github.io/programming_technique/2018/08/15/oneline-macro-endian-check.html +#define AV_HAVE_BIGENDIAN (*(uint16_t *)"\0\xff" < 0x0100) #endif #endif \ No newline at end of file diff --git a/third-party/cbs/include/cbs/get_bits.h b/third-party/cbs/include/cbs/get_bits.h index 992765dc92c..93566e85815 100644 --- a/third-party/cbs/include/cbs/get_bits.h +++ b/third-party/cbs/include/cbs/get_bits.h @@ -327,8 +327,9 @@ static inline int get_xbits(GetBitContext *s, int n) return ((((uint32_t)(sign ^ cache)) >> (32 - n)) ^ sign) - sign; #else - register int sign; - register int32_t cache; + // [manual] Removed register specifier, incompatible with C++17 + int sign; + int32_t cache; OPEN_READER(re, s); av_assert2(n>0 && n<=25); UPDATE_CACHE(re, s); @@ -343,8 +344,9 @@ static inline int get_xbits(GetBitContext *s, int n) #if !CACHED_BITSTREAM_READER static inline int get_xbits_le(GetBitContext *s, int n) { - register int sign; - register int32_t cache; + // [manual] Removed register specifier, incompatible with C++17 + int sign; + int32_t cache; OPEN_READER(re, s); av_assert2(n>0 && n<=25); UPDATE_CACHE_LE(re, s); @@ -358,7 +360,8 @@ static inline int get_xbits_le(GetBitContext *s, int n) static inline int get_sbits(GetBitContext *s, int n) { - register int tmp; + // [manual] Removed register specifier, incompatible with C++17 + int tmp; #if CACHED_BITSTREAM_READER av_assert2(n>0 && n<=25); tmp = sign_extend(get_bits(s, n), n); @@ -378,7 +381,8 @@ static inline int get_sbits(GetBitContext *s, int n) */ static inline unsigned int get_bits(GetBitContext *s, int n) { - register unsigned int tmp; + // [manual] Removed register specifier, incompatible with C++17 + unsigned int tmp; #if CACHED_BITSTREAM_READER av_assert2(n>0 && n<=32); @@ -429,7 +433,8 @@ static inline unsigned int get_bits_le(GetBitContext *s, int n) return get_val(s, n, 1); #else - register int tmp; + // [manual] Removed register specifier, incompatible with C++17 + int tmp; OPEN_READER(re, s); av_assert2(n>0 && n<=25); UPDATE_CACHE_LE(re, s); @@ -445,7 +450,8 @@ static inline unsigned int get_bits_le(GetBitContext *s, int n) */ static inline unsigned int show_bits(GetBitContext *s, int n) { - register unsigned int tmp; + // [manual] Removed register specifier, incompatible with C++17 + unsigned int tmp; #if CACHED_BITSTREAM_READER if (n > s->bits_left) #ifdef BITSTREAM_READER_LE From a2a5fa382fc9fe3687f1e6773bf49f1b7a90eb3d Mon Sep 17 00:00:00 2001 From: Brad Richardson Date: Sun, 20 Nov 2022 23:31:09 -0500 Subject: [PATCH 03/11] AUR ffmpeg5, include vaapi library Add additional va libraries --- CMakeLists.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index a0f84a49de5..97096d9ef94 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -340,6 +340,9 @@ else() numa pulse pulse-simple + va + va-drm + va-x11 ) include_directories( From 8a4c5937b879b7ea79c88bfa9f7095a3e90df9c1 Mon Sep 17 00:00:00 2001 From: Brad Richardson Date: Mon, 21 Nov 2022 21:29:52 -0500 Subject: [PATCH 04/11] Readme indicating changes done Actually commit conflict resolution --- CMakeLists.txt | 31 -------------------------- third-party/cbs/README.rst | 23 +++++++++++++++++++ third-party/cbs/avcodec.h | 2 +- third-party/cbs/cbs.c | 4 ++-- third-party/cbs/cbs_av1.c | 2 +- third-party/cbs/cbs_h2645.c | 4 ++-- third-party/cbs/cbs_internal.h | 2 +- third-party/cbs/cbs_jpeg.c | 2 +- third-party/cbs/cbs_mpeg2.c | 2 +- third-party/cbs/cbs_sei.c | 2 +- third-party/cbs/h2645_parse.c | 4 ++-- third-party/cbs/h264_levels.c | 2 +- third-party/cbs/include/cbs/config.h | 2 +- third-party/cbs/include/cbs/get_bits.h | 12 +++++----- third-party/cbs/intmath.h | 3 +-- third-party/cbs/log2_tab.c | 2 +- third-party/cbs/put_bits.h | 2 +- 17 files changed, 46 insertions(+), 55 deletions(-) create mode 100644 third-party/cbs/README.rst diff --git a/CMakeLists.txt b/CMakeLists.txt index 97096d9ef94..9e96581e39c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -214,37 +214,6 @@ else() set(WAYLAND_FOUND OFF) endif() -<<<<<<< HEAD -======= - file( - DOWNLOAD "https://github.com/LizardByte/ffmpeg-prebuilt/releases/download/v1/pre-compiled-debian.zip" "${CMAKE_CURRENT_BINARY_DIR}/pre-compiled.zip" - TIMEOUT 60 - EXPECTED_HASH SHA256=baa26844f4bf25bad4e4de6e74026f3f083edb018e950bc09983210bb46a6a7d) - - file(ARCHIVE_EXTRACT - INPUT "${CMAKE_CURRENT_BINARY_DIR}/pre-compiled.zip" - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/pre-compiled) - - if(NOT DEFINED SUNSHINE_PREPARED_BINARIES) - set(SUNSHINE_PREPARED_BINARIES "${CMAKE_CURRENT_BINARY_DIR}/pre-compiled") - endif() - - set(FFMPEG_INCLUDE_DIRS - ${SUNSHINE_PREPARED_BINARIES}/include) - set(FFMPEG_LIBRARIES - ${SUNSHINE_PREPARED_BINARIES}/lib/libavcodec.a - ${SUNSHINE_PREPARED_BINARIES}/lib/libavdevice.a - ${SUNSHINE_PREPARED_BINARIES}/lib/libavfilter.a - ${SUNSHINE_PREPARED_BINARIES}/lib/libavformat.a - ${SUNSHINE_PREPARED_BINARIES}/lib/libavutil.a - ${SUNSHINE_PREPARED_BINARIES}/lib/libpostproc.a - ${SUNSHINE_PREPARED_BINARIES}/lib/libswresample.a - ${SUNSHINE_PREPARED_BINARIES}/lib/libswscale.a - ${SUNSHINE_PREPARED_BINARIES}/lib/libx264.a - ${SUNSHINE_PREPARED_BINARIES}/lib/libx265.a - z lzma) - ->>>>>>> 5aeccc2 (Update ffmpeg, preserving existing cbs build pattern) if(X11_FOUND) add_compile_definitions(SUNSHINE_BUILD_X11) include_directories(${X11_INCLUDE_DIR}) diff --git a/third-party/cbs/README.rst b/third-party/cbs/README.rst new file mode 100644 index 00000000000..6c43567819b --- /dev/null +++ b/third-party/cbs/README.rst @@ -0,0 +1,23 @@ +Overview +--------- +These source files are copied from FFmpeg's avcodec and avutil libraries. Internally, sunshine does stream and packet processing (see `cbs.cpp `) that isn't exposed by FFmpeg. This project enables that functionality. + +Modified files +-------------- +These files have had import paths changed or otherwise modified as noted in the file: + +- avcodec.h +- cbs_av1.c +- cbs_h2645.c +- cbs_internal.h +- cbs_jpeg.c +- cbs_mpeg2.c +- cbs_sei.c +- cbs.c +- h264_levels.c +- h2645_parse.c +- intmath.h +- log2_tab.c +- put_bits.h +- config.h +- get_bits.h diff --git a/third-party/cbs/avcodec.h b/third-party/cbs/avcodec.h index c4883ddce58..c7f9b855bf9 100644 --- a/third-party/cbs/avcodec.h +++ b/third-party/cbs/avcodec.h @@ -37,7 +37,7 @@ #include "libavutil/pixfmt.h" #include "libavutil/rational.h" -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/codec.h" #include "cbs/codec_desc.h" #include "cbs/codec_par.h" diff --git a/third-party/cbs/cbs.c b/third-party/cbs/cbs.c index b067bce21c3..4ae441a8968 100644 --- a/third-party/cbs/cbs.c +++ b/third-party/cbs/cbs.c @@ -18,7 +18,7 @@ #include -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/config.h" #include "libavutil/avassert.h" @@ -27,7 +27,7 @@ #include "libavutil/opt.h" #include "avcodec.h" -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/cbs.h" #include "cbs_internal.h" diff --git a/third-party/cbs/cbs_av1.c b/third-party/cbs/cbs_av1.c index 9035eed3521..0df0f37b3b1 100644 --- a/third-party/cbs/cbs_av1.c +++ b/third-party/cbs/cbs_av1.c @@ -20,7 +20,7 @@ #include "libavutil/opt.h" #include "libavutil/pixfmt.h" -// [manual] Changed include path +// [sunshine] Changed include path #include "avcodec.h" #include "cbs/cbs.h" #include "cbs_internal.h" diff --git a/third-party/cbs/cbs_h2645.c b/third-party/cbs/cbs_h2645.c index 96a3efa2643..6a67c9143c4 100644 --- a/third-party/cbs/cbs_h2645.c +++ b/third-party/cbs/cbs_h2645.c @@ -19,7 +19,7 @@ #include "libavutil/attributes.h" #include "libavutil/avassert.h" -// [manual] Changed include path +// [sunshine] Changed include path #include "bytestream.h" #include "cbs/cbs.h" #include "cbs_internal.h" @@ -29,7 +29,7 @@ #include "cbs/h2645_parse.h" #include "cbs/hevc.h" -// [manual] Added to resolve missing symbols +// [sunshine] Added to resolve missing symbols #include "intmath.h" #include "log2_tab.c" diff --git a/third-party/cbs/cbs_internal.h b/third-party/cbs/cbs_internal.h index 039d5184421..58bad02d85e 100644 --- a/third-party/cbs/cbs_internal.h +++ b/third-party/cbs/cbs_internal.h @@ -24,7 +24,7 @@ #include "libavutil/buffer.h" #include "libavutil/log.h" -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/cbs.h" #include "cbs/codec_id.h" #include "cbs/get_bits.h" diff --git a/third-party/cbs/cbs_jpeg.c b/third-party/cbs/cbs_jpeg.c index be8fd3dcee5..d0345f50fed 100644 --- a/third-party/cbs/cbs_jpeg.c +++ b/third-party/cbs/cbs_jpeg.c @@ -16,7 +16,7 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/cbs.h" #include "cbs_internal.h" #include "cbs/cbs_jpeg.h" diff --git a/third-party/cbs/cbs_mpeg2.c b/third-party/cbs/cbs_mpeg2.c index 2b6a3fcc7bc..3ad012fe24b 100644 --- a/third-party/cbs/cbs_mpeg2.c +++ b/third-party/cbs/cbs_mpeg2.c @@ -18,7 +18,7 @@ #include "libavutil/avassert.h" -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/cbs.h" #include "cbs_internal.h" #include "cbs/cbs_mpeg2.h" diff --git a/third-party/cbs/cbs_sei.c b/third-party/cbs/cbs_sei.c index ee89e79ace7..d18d3cb46fe 100644 --- a/third-party/cbs/cbs_sei.c +++ b/third-party/cbs/cbs_sei.c @@ -16,7 +16,7 @@ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/cbs.h" #include "cbs_internal.h" #include "cbs/cbs_h264.h" diff --git a/third-party/cbs/h2645_parse.c b/third-party/cbs/h2645_parse.c index ff65a59f0c1..41f2d5876e1 100644 --- a/third-party/cbs/h2645_parse.c +++ b/third-party/cbs/h2645_parse.c @@ -20,14 +20,14 @@ #include -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/config.h" #include "intmath.h" #include "libavutil/intreadwrite.h" #include "libavutil/mem.h" -// [manual] Changed include path +// [sunshine] Changed include path #include "bytestream.h" #include "cbs/hevc.h" #include "cbs/h264.h" diff --git a/third-party/cbs/h264_levels.c b/third-party/cbs/h264_levels.c index 2d65bc30eef..172ef452a53 100644 --- a/third-party/cbs/h264_levels.c +++ b/third-party/cbs/h264_levels.c @@ -18,7 +18,7 @@ #include #include "libavutil/macros.h" -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/h264_levels.h" // H.264 table A-1. diff --git a/third-party/cbs/include/cbs/config.h b/third-party/cbs/include/cbs/config.h index 4c6f330eab9..a778bdd9f20 100644 --- a/third-party/cbs/include/cbs/config.h +++ b/third-party/cbs/include/cbs/config.h @@ -1,4 +1,4 @@ -// [manual] Copied, generated file +// [sunshine] Copied, generated file #ifndef CBS_CONFIG_H #define CBS_CONFIG_H diff --git a/third-party/cbs/include/cbs/get_bits.h b/third-party/cbs/include/cbs/get_bits.h index 93566e85815..045c8c595cb 100644 --- a/third-party/cbs/include/cbs/get_bits.h +++ b/third-party/cbs/include/cbs/get_bits.h @@ -327,7 +327,7 @@ static inline int get_xbits(GetBitContext *s, int n) return ((((uint32_t)(sign ^ cache)) >> (32 - n)) ^ sign) - sign; #else - // [manual] Removed register specifier, incompatible with C++17 + // [sunshine] Removed register specifier, incompatible with C++17 int sign; int32_t cache; OPEN_READER(re, s); @@ -344,7 +344,7 @@ static inline int get_xbits(GetBitContext *s, int n) #if !CACHED_BITSTREAM_READER static inline int get_xbits_le(GetBitContext *s, int n) { - // [manual] Removed register specifier, incompatible with C++17 + // [sunshine] Removed register specifier, incompatible with C++17 int sign; int32_t cache; OPEN_READER(re, s); @@ -360,7 +360,7 @@ static inline int get_xbits_le(GetBitContext *s, int n) static inline int get_sbits(GetBitContext *s, int n) { - // [manual] Removed register specifier, incompatible with C++17 + // [sunshine] Removed register specifier, incompatible with C++17 int tmp; #if CACHED_BITSTREAM_READER av_assert2(n>0 && n<=25); @@ -381,7 +381,7 @@ static inline int get_sbits(GetBitContext *s, int n) */ static inline unsigned int get_bits(GetBitContext *s, int n) { - // [manual] Removed register specifier, incompatible with C++17 + // [sunshine] Removed register specifier, incompatible with C++17 unsigned int tmp; #if CACHED_BITSTREAM_READER @@ -433,7 +433,7 @@ static inline unsigned int get_bits_le(GetBitContext *s, int n) return get_val(s, n, 1); #else - // [manual] Removed register specifier, incompatible with C++17 + // [sunshine] Removed register specifier, incompatible with C++17 int tmp; OPEN_READER(re, s); av_assert2(n>0 && n<=25); @@ -450,7 +450,7 @@ static inline unsigned int get_bits_le(GetBitContext *s, int n) */ static inline unsigned int show_bits(GetBitContext *s, int n) { - // [manual] Removed register specifier, incompatible with C++17 + // [sunshine] Removed register specifier, incompatible with C++17 unsigned int tmp; #if CACHED_BITSTREAM_READER if (n > s->bits_left) diff --git a/third-party/cbs/intmath.h b/third-party/cbs/intmath.h index 8fdd2c88aa1..50ba735d56d 100644 --- a/third-party/cbs/intmath.h +++ b/third-party/cbs/intmath.h @@ -17,13 +17,12 @@ * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -// [manual] copied from libavutil #ifndef AVUTIL_INTMATH_H #define AVUTIL_INTMATH_H #include -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/config.h" #include "cbs/attributes.h" diff --git a/third-party/cbs/log2_tab.c b/third-party/cbs/log2_tab.c index 576fd7edbdb..3431401c745 100644 --- a/third-party/cbs/log2_tab.c +++ b/third-party/cbs/log2_tab.c @@ -17,7 +17,7 @@ * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ -// [manual] Copied from libavutil +// [sunshine] Copied from libavutil #include diff --git a/third-party/cbs/put_bits.h b/third-party/cbs/put_bits.h index 43409afdd34..fcb839a7f72 100644 --- a/third-party/cbs/put_bits.h +++ b/third-party/cbs/put_bits.h @@ -29,7 +29,7 @@ #include #include -// [manual] Changed include path +// [sunshine] Changed include path #include "cbs/config.h" #include "libavutil/intreadwrite.h" #include "libavutil/avassert.h" From 597482985a94d2f73524b8205e2b5f98710794d5 Mon Sep 17 00:00:00 2001 From: Brad Richardson Date: Tue, 13 Dec 2022 21:30:45 -0500 Subject: [PATCH 05/11] Remove duplicate platform libraries --- CMakeLists.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 9e96581e39c..95718bed29a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -309,9 +309,6 @@ else() numa pulse pulse-simple - va - va-drm - va-x11 ) include_directories( From 5b07c4ea47acd406da58354afc8751498454f7c4 Mon Sep 17 00:00:00 2001 From: Brad Richardson Date: Sun, 18 Dec 2022 08:55:11 -0500 Subject: [PATCH 06/11] Updates to use pre-built cbs library --- CMakeLists.txt | 9 +- third-party/cbs/CMakeLists.txt | 76 - third-party/cbs/README.rst | 23 - third-party/cbs/avcodec.h | 3170 ------------------- third-party/cbs/bytestream.h | 380 --- third-party/cbs/cbs.c | 1087 ------- third-party/cbs/cbs_av1.c | 1337 -------- third-party/cbs/cbs_av1_syntax_template.c | 2053 ------------ third-party/cbs/cbs_h2645.c | 1682 ---------- third-party/cbs/cbs_h264_syntax_template.c | 1262 -------- third-party/cbs/cbs_h265_syntax_template.c | 2101 ------------ third-party/cbs/cbs_internal.h | 218 -- third-party/cbs/cbs_jpeg.c | 467 --- third-party/cbs/cbs_jpeg_syntax_template.c | 196 -- third-party/cbs/cbs_mpeg2.c | 428 --- third-party/cbs/cbs_mpeg2_syntax_template.c | 425 --- third-party/cbs/cbs_sei.c | 370 --- third-party/cbs/cbs_sei_syntax_template.c | 322 -- third-party/cbs/cbs_vp9.c | 662 ---- third-party/cbs/cbs_vp9_syntax_template.c | 429 --- third-party/cbs/codec.h | 387 --- third-party/cbs/h2645_parse.c | 544 ---- third-party/cbs/h264_levels.c | 124 - third-party/cbs/h264_ps.h | 177 -- third-party/cbs/h264_sei.h | 224 -- third-party/cbs/hevc_sei.h | 169 - third-party/cbs/include/cbs/attributes.h | 173 - third-party/cbs/include/cbs/av1.h | 184 -- third-party/cbs/include/cbs/cbs.h | 445 --- third-party/cbs/include/cbs/cbs_av1.h | 464 --- third-party/cbs/include/cbs/cbs_bsf.h | 137 - third-party/cbs/include/cbs/cbs_h264.h | 427 --- third-party/cbs/include/cbs/cbs_h2645.h | 36 - third-party/cbs/include/cbs/cbs_h265.h | 700 ---- third-party/cbs/include/cbs/cbs_jpeg.h | 123 - third-party/cbs/include/cbs/cbs_mpeg2.h | 231 -- third-party/cbs/include/cbs/cbs_sei.h | 199 -- third-party/cbs/include/cbs/cbs_vp9.h | 213 -- third-party/cbs/include/cbs/codec_desc.h | 128 - third-party/cbs/include/cbs/codec_id.h | 634 ---- third-party/cbs/include/cbs/codec_par.h | 246 -- third-party/cbs/include/cbs/config.h | 34 - third-party/cbs/include/cbs/defs.h | 170 - third-party/cbs/include/cbs/get_bits.h | 864 ----- third-party/cbs/include/cbs/h264.h | 113 - third-party/cbs/include/cbs/h2645_parse.h | 139 - third-party/cbs/include/cbs/h264_levels.h | 51 - third-party/cbs/include/cbs/hevc.h | 160 - third-party/cbs/include/cbs/mathops.h | 245 -- third-party/cbs/include/cbs/packet.h | 731 ----- third-party/cbs/include/cbs/sei.h | 140 - third-party/cbs/include/cbs/vlc.h | 144 - third-party/cbs/intmath.h | 165 - third-party/cbs/log2_tab.c | 33 - third-party/cbs/put_bits.h | 422 --- third-party/cbs/startcode.h | 36 - third-party/cbs/version_major.h | 54 - 57 files changed, 1 insertion(+), 26162 deletions(-) delete mode 100644 third-party/cbs/CMakeLists.txt delete mode 100644 third-party/cbs/README.rst delete mode 100644 third-party/cbs/avcodec.h delete mode 100644 third-party/cbs/bytestream.h delete mode 100644 third-party/cbs/cbs.c delete mode 100644 third-party/cbs/cbs_av1.c delete mode 100644 third-party/cbs/cbs_av1_syntax_template.c delete mode 100644 third-party/cbs/cbs_h2645.c delete mode 100644 third-party/cbs/cbs_h264_syntax_template.c delete mode 100644 third-party/cbs/cbs_h265_syntax_template.c delete mode 100644 third-party/cbs/cbs_internal.h delete mode 100644 third-party/cbs/cbs_jpeg.c delete mode 100644 third-party/cbs/cbs_jpeg_syntax_template.c delete mode 100644 third-party/cbs/cbs_mpeg2.c delete mode 100644 third-party/cbs/cbs_mpeg2_syntax_template.c delete mode 100644 third-party/cbs/cbs_sei.c delete mode 100644 third-party/cbs/cbs_sei_syntax_template.c delete mode 100644 third-party/cbs/cbs_vp9.c delete mode 100644 third-party/cbs/cbs_vp9_syntax_template.c delete mode 100644 third-party/cbs/codec.h delete mode 100644 third-party/cbs/h2645_parse.c delete mode 100644 third-party/cbs/h264_levels.c delete mode 100644 third-party/cbs/h264_ps.h delete mode 100644 third-party/cbs/h264_sei.h delete mode 100644 third-party/cbs/hevc_sei.h delete mode 100644 third-party/cbs/include/cbs/attributes.h delete mode 100644 third-party/cbs/include/cbs/av1.h delete mode 100644 third-party/cbs/include/cbs/cbs.h delete mode 100644 third-party/cbs/include/cbs/cbs_av1.h delete mode 100644 third-party/cbs/include/cbs/cbs_bsf.h delete mode 100644 third-party/cbs/include/cbs/cbs_h264.h delete mode 100644 third-party/cbs/include/cbs/cbs_h2645.h delete mode 100644 third-party/cbs/include/cbs/cbs_h265.h delete mode 100644 third-party/cbs/include/cbs/cbs_jpeg.h delete mode 100644 third-party/cbs/include/cbs/cbs_mpeg2.h delete mode 100644 third-party/cbs/include/cbs/cbs_sei.h delete mode 100644 third-party/cbs/include/cbs/cbs_vp9.h delete mode 100644 third-party/cbs/include/cbs/codec_desc.h delete mode 100644 third-party/cbs/include/cbs/codec_id.h delete mode 100644 third-party/cbs/include/cbs/codec_par.h delete mode 100644 third-party/cbs/include/cbs/config.h delete mode 100644 third-party/cbs/include/cbs/defs.h delete mode 100644 third-party/cbs/include/cbs/get_bits.h delete mode 100644 third-party/cbs/include/cbs/h264.h delete mode 100644 third-party/cbs/include/cbs/h2645_parse.h delete mode 100644 third-party/cbs/include/cbs/h264_levels.h delete mode 100644 third-party/cbs/include/cbs/hevc.h delete mode 100644 third-party/cbs/include/cbs/mathops.h delete mode 100644 third-party/cbs/include/cbs/packet.h delete mode 100644 third-party/cbs/include/cbs/sei.h delete mode 100644 third-party/cbs/include/cbs/vlc.h delete mode 100644 third-party/cbs/intmath.h delete mode 100644 third-party/cbs/log2_tab.c delete mode 100644 third-party/cbs/put_bits.h delete mode 100644 third-party/cbs/startcode.h delete mode 100644 third-party/cbs/version_major.h diff --git a/CMakeLists.txt b/CMakeLists.txt index 95718bed29a..fdae6de7e68 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -404,21 +404,19 @@ set(FFMPEG_LIBRARIES ${FFMPEG_PREPARED_BINARIES}/lib/libswscale.a ${FFMPEG_PREPARED_BINARIES}/lib/libx264.a ${FFMPEG_PREPARED_BINARIES}/lib/libx265.a + ${FFMPEG_PREPARED_BINARIES}/lib/libcbs.a ${HDR10_PLUS_LIBRARY} ${FFMPEG_PLATFORM_LIBRARIES}) include_directories( ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/third-party - ${CMAKE_CURRENT_SOURCE_DIR}/third-party/cbs/include ${CMAKE_CURRENT_SOURCE_DIR}/third-party/moonlight-common-c/enet/include ${CMAKE_CURRENT_SOURCE_DIR}/third-party/moonlight-common-c/reedsolomon ${FFMPEG_INCLUDE_DIRS} ${PLATFORM_INCLUDE_DIRS} ) -add_subdirectory(third-party/cbs) - string(TOUPPER "x${CMAKE_BUILD_TYPE}" BUILD_TYPE) if("${BUILD_TYPE}" STREQUAL "XDEBUG") list(APPEND SUNSHINE_COMPILE_OPTIONS -O0 -ggdb3) @@ -446,13 +444,8 @@ else() endif() list(APPEND SUNSHINE_DEFINITIONS SUNSHINE_ASSETS_DIR="${SUNSHINE_ASSETS_DIR_DEF}") - -list(APPEND CBS_EXTERNAL_LIBRARIES - cbs) - list(APPEND SUNSHINE_EXTERNAL_LIBRARIES libminiupnpc-static - ${CBS_EXTERNAL_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} enet opus diff --git a/third-party/cbs/CMakeLists.txt b/third-party/cbs/CMakeLists.txt deleted file mode 100644 index 1287b4c4b2c..00000000000 --- a/third-party/cbs/CMakeLists.txt +++ /dev/null @@ -1,76 +0,0 @@ -cmake_minimum_required(VERSION 3.0) - -project(CBS) - -SET(CBS_SOURCE_FILES -include/cbs/av1.h -include/cbs/cbs_av1.h -include/cbs/cbs_bsf.h -include/cbs/cbs.h -include/cbs/cbs_h2645.h -include/cbs/cbs_h264.h -include/cbs/cbs_h265.h -include/cbs/cbs_jpeg.h -include/cbs/cbs_mpeg2.h -include/cbs/cbs_sei.h -include/cbs/cbs_vp9.h -include/cbs/codec_desc.h -include/cbs/codec_id.h -include/cbs/codec_par.h -include/cbs/config.h -include/cbs/defs.h -include/cbs/get_bits.h -include/cbs/h264_levels.h -include/cbs/h2645_parse.h -include/cbs/h264.h -include/cbs/hevc.h -include/cbs/mathops.h -include/cbs/packet.h -include/cbs/sei.h -include/cbs/vlc.h - -cbs.c -cbs_h2645.c -cbs_av1.c -cbs_vp9.c -cbs_mpeg2.c -cbs_jpeg.c -cbs_sei.c -h264_levels.c -h2645_parse.c - -avcodec.h -bytestream.h -cbs_internal.h -codec.h -h264_ps.h -h264_sei.h -hevc_sei.h -intmath.h -put_bits.h -version_major.h -) - -include_directories(include) - -if(DEFINED FFMPEG_INCLUDE_DIRS) -include_directories(${FFMPEG_INCLUDE_DIRS}) -endif() - -add_compile_definitions( - HAVE_THREADS=1 - HAVE_FAST_UNALIGNED - - PIC=1 - - CONFIG_CBS_AV1=1 - CONFIG_CBS_H264=1 - CONFIG_CBS_H265=1 - CONFIG_CBS_JPEG=1 - CONFIG_CBS_MPEG2=1 - CONFIG_CBS_VP9=1 - ) - - -add_library(cbs ${CBS_SOURCE_FILES}) -target_compile_options(cbs PRIVATE -Wall -Wno-incompatible-pointer-types -Wno-maybe-uninitialized -Wno-format -Wno-format-extra-args) \ No newline at end of file diff --git a/third-party/cbs/README.rst b/third-party/cbs/README.rst deleted file mode 100644 index 6c43567819b..00000000000 --- a/third-party/cbs/README.rst +++ /dev/null @@ -1,23 +0,0 @@ -Overview ---------- -These source files are copied from FFmpeg's avcodec and avutil libraries. Internally, sunshine does stream and packet processing (see `cbs.cpp `) that isn't exposed by FFmpeg. This project enables that functionality. - -Modified files --------------- -These files have had import paths changed or otherwise modified as noted in the file: - -- avcodec.h -- cbs_av1.c -- cbs_h2645.c -- cbs_internal.h -- cbs_jpeg.c -- cbs_mpeg2.c -- cbs_sei.c -- cbs.c -- h264_levels.c -- h2645_parse.c -- intmath.h -- log2_tab.c -- put_bits.h -- config.h -- get_bits.h diff --git a/third-party/cbs/avcodec.h b/third-party/cbs/avcodec.h deleted file mode 100644 index c7f9b855bf9..00000000000 --- a/third-party/cbs/avcodec.h +++ /dev/null @@ -1,3170 +0,0 @@ -/* - * copyright (c) 2001 Fabrice Bellard - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_AVCODEC_H -#define AVCODEC_AVCODEC_H - -/** - * @file - * @ingroup libavc - * Libavcodec external API header - */ - -#include "libavutil/samplefmt.h" -#include "libavutil/attributes.h" -#include "libavutil/avutil.h" -#include "libavutil/buffer.h" -#include "libavutil/dict.h" -#include "libavutil/frame.h" -#include "libavutil/log.h" -#include "libavutil/pixfmt.h" -#include "libavutil/rational.h" - -// [sunshine] Changed include path -#include "cbs/codec.h" -#include "cbs/codec_desc.h" -#include "cbs/codec_par.h" -#include "cbs/codec_id.h" -#include "cbs/defs.h" -#include "cbs/packet.h" -#include "version_major.h" -#ifndef HAVE_AV_CONFIG_H -/* When included as part of the ffmpeg build, only include the major version - * to avoid unnecessary rebuilds. When included externally, keep including - * the full version information. */ -#include "version.h" -#endif - -/** - * @defgroup libavc libavcodec - * Encoding/Decoding Library - * - * @{ - * - * @defgroup lavc_decoding Decoding - * @{ - * @} - * - * @defgroup lavc_encoding Encoding - * @{ - * @} - * - * @defgroup lavc_codec Codecs - * @{ - * @defgroup lavc_codec_native Native Codecs - * @{ - * @} - * @defgroup lavc_codec_wrappers External library wrappers - * @{ - * @} - * @defgroup lavc_codec_hwaccel Hardware Accelerators bridge - * @{ - * @} - * @} - * @defgroup lavc_internal Internal - * @{ - * @} - * @} - */ - -/** - * @ingroup libavc - * @defgroup lavc_encdec send/receive encoding and decoding API overview - * @{ - * - * The avcodec_send_packet()/avcodec_receive_frame()/avcodec_send_frame()/ - * avcodec_receive_packet() functions provide an encode/decode API, which - * decouples input and output. - * - * The API is very similar for encoding/decoding and audio/video, and works as - * follows: - * - Set up and open the AVCodecContext as usual. - * - Send valid input: - * - For decoding, call avcodec_send_packet() to give the decoder raw - * compressed data in an AVPacket. - * - For encoding, call avcodec_send_frame() to give the encoder an AVFrame - * containing uncompressed audio or video. - * - * In both cases, it is recommended that AVPackets and AVFrames are - * refcounted, or libavcodec might have to copy the input data. (libavformat - * always returns refcounted AVPackets, and av_frame_get_buffer() allocates - * refcounted AVFrames.) - * - Receive output in a loop. Periodically call one of the avcodec_receive_*() - * functions and process their output: - * - For decoding, call avcodec_receive_frame(). On success, it will return - * an AVFrame containing uncompressed audio or video data. - * - For encoding, call avcodec_receive_packet(). On success, it will return - * an AVPacket with a compressed frame. - * - * Repeat this call until it returns AVERROR(EAGAIN) or an error. The - * AVERROR(EAGAIN) return value means that new input data is required to - * return new output. In this case, continue with sending input. For each - * input frame/packet, the codec will typically return 1 output frame/packet, - * but it can also be 0 or more than 1. - * - * At the beginning of decoding or encoding, the codec might accept multiple - * input frames/packets without returning a frame, until its internal buffers - * are filled. This situation is handled transparently if you follow the steps - * outlined above. - * - * In theory, sending input can result in EAGAIN - this should happen only if - * not all output was received. You can use this to structure alternative decode - * or encode loops other than the one suggested above. For example, you could - * try sending new input on each iteration, and try to receive output if that - * returns EAGAIN. - * - * End of stream situations. These require "flushing" (aka draining) the codec, - * as the codec might buffer multiple frames or packets internally for - * performance or out of necessity (consider B-frames). - * This is handled as follows: - * - Instead of valid input, send NULL to the avcodec_send_packet() (decoding) - * or avcodec_send_frame() (encoding) functions. This will enter draining - * mode. - * - Call avcodec_receive_frame() (decoding) or avcodec_receive_packet() - * (encoding) in a loop until AVERROR_EOF is returned. The functions will - * not return AVERROR(EAGAIN), unless you forgot to enter draining mode. - * - Before decoding can be resumed again, the codec has to be reset with - * avcodec_flush_buffers(). - * - * Using the API as outlined above is highly recommended. But it is also - * possible to call functions outside of this rigid schema. For example, you can - * call avcodec_send_packet() repeatedly without calling - * avcodec_receive_frame(). In this case, avcodec_send_packet() will succeed - * until the codec's internal buffer has been filled up (which is typically of - * size 1 per output frame, after initial input), and then reject input with - * AVERROR(EAGAIN). Once it starts rejecting input, you have no choice but to - * read at least some output. - * - * Not all codecs will follow a rigid and predictable dataflow; the only - * guarantee is that an AVERROR(EAGAIN) return value on a send/receive call on - * one end implies that a receive/send call on the other end will succeed, or - * at least will not fail with AVERROR(EAGAIN). In general, no codec will - * permit unlimited buffering of input or output. - * - * A codec is not allowed to return AVERROR(EAGAIN) for both sending and receiving. This - * would be an invalid state, which could put the codec user into an endless - * loop. The API has no concept of time either: it cannot happen that trying to - * do avcodec_send_packet() results in AVERROR(EAGAIN), but a repeated call 1 second - * later accepts the packet (with no other receive/flush API calls involved). - * The API is a strict state machine, and the passage of time is not supposed - * to influence it. Some timing-dependent behavior might still be deemed - * acceptable in certain cases. But it must never result in both send/receive - * returning EAGAIN at the same time at any point. It must also absolutely be - * avoided that the current state is "unstable" and can "flip-flop" between - * the send/receive APIs allowing progress. For example, it's not allowed that - * the codec randomly decides that it actually wants to consume a packet now - * instead of returning a frame, after it just returned AVERROR(EAGAIN) on an - * avcodec_send_packet() call. - * @} - */ - -/** - * @defgroup lavc_core Core functions/structures. - * @ingroup libavc - * - * Basic definitions, functions for querying libavcodec capabilities, - * allocating core structures, etc. - * @{ - */ - -/** - * @ingroup lavc_encoding - * minimum encoding buffer size - * Used to avoid some checks during header writing. - */ -#define AV_INPUT_BUFFER_MIN_SIZE 16384 - -/** - * @ingroup lavc_encoding - */ -typedef struct RcOverride{ - int start_frame; - int end_frame; - int qscale; // If this is 0 then quality_factor will be used instead. - float quality_factor; -} RcOverride; - -/* encoding support - These flags can be passed in AVCodecContext.flags before initialization. - Note: Not everything is supported yet. -*/ - -/** - * Allow decoders to produce frames with data planes that are not aligned - * to CPU requirements (e.g. due to cropping). - */ -#define AV_CODEC_FLAG_UNALIGNED (1 << 0) -/** - * Use fixed qscale. - */ -#define AV_CODEC_FLAG_QSCALE (1 << 1) -/** - * 4 MV per MB allowed / advanced prediction for H.263. - */ -#define AV_CODEC_FLAG_4MV (1 << 2) -/** - * Output even those frames that might be corrupted. - */ -#define AV_CODEC_FLAG_OUTPUT_CORRUPT (1 << 3) -/** - * Use qpel MC. - */ -#define AV_CODEC_FLAG_QPEL (1 << 4) -/** - * Don't output frames whose parameters differ from first - * decoded frame in stream. - */ -#define AV_CODEC_FLAG_DROPCHANGED (1 << 5) -/** - * Use internal 2pass ratecontrol in first pass mode. - */ -#define AV_CODEC_FLAG_PASS1 (1 << 9) -/** - * Use internal 2pass ratecontrol in second pass mode. - */ -#define AV_CODEC_FLAG_PASS2 (1 << 10) -/** - * loop filter. - */ -#define AV_CODEC_FLAG_LOOP_FILTER (1 << 11) -/** - * Only decode/encode grayscale. - */ -#define AV_CODEC_FLAG_GRAY (1 << 13) -/** - * error[?] variables will be set during encoding. - */ -#define AV_CODEC_FLAG_PSNR (1 << 15) -#if FF_API_FLAG_TRUNCATED -/** - * Input bitstream might be truncated at a random location - * instead of only at frame boundaries. - * - * @deprecated use codec parsers for packetizing input - */ -#define AV_CODEC_FLAG_TRUNCATED (1 << 16) -#endif -/** - * Use interlaced DCT. - */ -#define AV_CODEC_FLAG_INTERLACED_DCT (1 << 18) -/** - * Force low delay. - */ -#define AV_CODEC_FLAG_LOW_DELAY (1 << 19) -/** - * Place global headers in extradata instead of every keyframe. - */ -#define AV_CODEC_FLAG_GLOBAL_HEADER (1 << 22) -/** - * Use only bitexact stuff (except (I)DCT). - */ -#define AV_CODEC_FLAG_BITEXACT (1 << 23) -/* Fx : Flag for H.263+ extra options */ -/** - * H.263 advanced intra coding / MPEG-4 AC prediction - */ -#define AV_CODEC_FLAG_AC_PRED (1 << 24) -/** - * interlaced motion estimation - */ -#define AV_CODEC_FLAG_INTERLACED_ME (1 << 29) -#define AV_CODEC_FLAG_CLOSED_GOP (1U << 31) - -/** - * Allow non spec compliant speedup tricks. - */ -#define AV_CODEC_FLAG2_FAST (1 << 0) -/** - * Skip bitstream encoding. - */ -#define AV_CODEC_FLAG2_NO_OUTPUT (1 << 2) -/** - * Place global headers at every keyframe instead of in extradata. - */ -#define AV_CODEC_FLAG2_LOCAL_HEADER (1 << 3) - -/** - * timecode is in drop frame format. DEPRECATED!!!! - */ -#define AV_CODEC_FLAG2_DROP_FRAME_TIMECODE (1 << 13) - -/** - * Input bitstream might be truncated at a packet boundaries - * instead of only at frame boundaries. - */ -#define AV_CODEC_FLAG2_CHUNKS (1 << 15) -/** - * Discard cropping information from SPS. - */ -#define AV_CODEC_FLAG2_IGNORE_CROP (1 << 16) - -/** - * Show all frames before the first keyframe - */ -#define AV_CODEC_FLAG2_SHOW_ALL (1 << 22) -/** - * Export motion vectors through frame side data - */ -#define AV_CODEC_FLAG2_EXPORT_MVS (1 << 28) -/** - * Do not skip samples and export skip information as frame side data - */ -#define AV_CODEC_FLAG2_SKIP_MANUAL (1 << 29) -/** - * Do not reset ASS ReadOrder field on flush (subtitles decoding) - */ -#define AV_CODEC_FLAG2_RO_FLUSH_NOOP (1 << 30) - -/* Unsupported options : - * Syntax Arithmetic coding (SAC) - * Reference Picture Selection - * Independent Segment Decoding */ -/* /Fx */ -/* codec capabilities */ - -/* Exported side data. - These flags can be passed in AVCodecContext.export_side_data before initialization. -*/ -/** - * Export motion vectors through frame side data - */ -#define AV_CODEC_EXPORT_DATA_MVS (1 << 0) -/** - * Export encoder Producer Reference Time through packet side data - */ -#define AV_CODEC_EXPORT_DATA_PRFT (1 << 1) -/** - * Decoding only. - * Export the AVVideoEncParams structure through frame side data. - */ -#define AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS (1 << 2) -/** - * Decoding only. - * Do not apply film grain, export it instead. - */ -#define AV_CODEC_EXPORT_DATA_FILM_GRAIN (1 << 3) - -/** - * The decoder will keep a reference to the frame and may reuse it later. - */ -#define AV_GET_BUFFER_FLAG_REF (1 << 0) - -/** - * The encoder will keep a reference to the packet and may reuse it later. - */ -#define AV_GET_ENCODE_BUFFER_FLAG_REF (1 << 0) - -struct AVCodecInternal; - -/** - * main external API structure. - * New fields can be added to the end with minor version bumps. - * Removal, reordering and changes to existing fields require a major - * version bump. - * You can use AVOptions (av_opt* / av_set/get*()) to access these fields from user - * applications. - * The name string for AVOptions options matches the associated command line - * parameter name and can be found in libavcodec/options_table.h - * The AVOption/command line parameter names differ in some cases from the C - * structure field names for historic reasons or brevity. - * sizeof(AVCodecContext) must not be used outside libav*. - */ -typedef struct AVCodecContext { - /** - * information on struct for av_log - * - set by avcodec_alloc_context3 - */ - const AVClass *av_class; - int log_level_offset; - - enum AVMediaType codec_type; /* see AVMEDIA_TYPE_xxx */ - const struct AVCodec *codec; - enum AVCodecID codec_id; /* see AV_CODEC_ID_xxx */ - - /** - * fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A'). - * This is used to work around some encoder bugs. - * A demuxer should set this to what is stored in the field used to identify the codec. - * If there are multiple such fields in a container then the demuxer should choose the one - * which maximizes the information about the used codec. - * If the codec tag field in a container is larger than 32 bits then the demuxer should - * remap the longer ID to 32 bits with a table or other structure. Alternatively a new - * extra_codec_tag + size could be added but for this a clear advantage must be demonstrated - * first. - * - encoding: Set by user, if not then the default based on codec_id will be used. - * - decoding: Set by user, will be converted to uppercase by libavcodec during init. - */ - unsigned int codec_tag; - - void *priv_data; - - /** - * Private context used for internal data. - * - * Unlike priv_data, this is not codec-specific. It is used in general - * libavcodec functions. - */ - struct AVCodecInternal *internal; - - /** - * Private data of the user, can be used to carry app specific stuff. - * - encoding: Set by user. - * - decoding: Set by user. - */ - void *opaque; - - /** - * the average bitrate - * - encoding: Set by user; unused for constant quantizer encoding. - * - decoding: Set by user, may be overwritten by libavcodec - * if this info is available in the stream - */ - int64_t bit_rate; - - /** - * number of bits the bitstream is allowed to diverge from the reference. - * the reference can be CBR (for CBR pass1) or VBR (for pass2) - * - encoding: Set by user; unused for constant quantizer encoding. - * - decoding: unused - */ - int bit_rate_tolerance; - - /** - * Global quality for codecs which cannot change it per frame. - * This should be proportional to MPEG-1/2/4 qscale. - * - encoding: Set by user. - * - decoding: unused - */ - int global_quality; - - /** - * - encoding: Set by user. - * - decoding: unused - */ - int compression_level; -#define FF_COMPRESSION_DEFAULT -1 - - /** - * AV_CODEC_FLAG_*. - * - encoding: Set by user. - * - decoding: Set by user. - */ - int flags; - - /** - * AV_CODEC_FLAG2_* - * - encoding: Set by user. - * - decoding: Set by user. - */ - int flags2; - - /** - * some codecs need / can use extradata like Huffman tables. - * MJPEG: Huffman tables - * rv10: additional flags - * MPEG-4: global headers (they can be in the bitstream or here) - * The allocated memory should be AV_INPUT_BUFFER_PADDING_SIZE bytes larger - * than extradata_size to avoid problems if it is read with the bitstream reader. - * The bytewise contents of extradata must not depend on the architecture or CPU endianness. - * Must be allocated with the av_malloc() family of functions. - * - encoding: Set/allocated/freed by libavcodec. - * - decoding: Set/allocated/freed by user. - */ - uint8_t *extradata; - int extradata_size; - - /** - * This is the fundamental unit of time (in seconds) in terms - * of which frame timestamps are represented. For fixed-fps content, - * timebase should be 1/framerate and timestamp increments should be - * identically 1. - * This often, but not always is the inverse of the frame rate or field rate - * for video. 1/time_base is not the average frame rate if the frame rate is not - * constant. - * - * Like containers, elementary streams also can store timestamps, 1/time_base - * is the unit in which these timestamps are specified. - * As example of such codec time base see ISO/IEC 14496-2:2001(E) - * vop_time_increment_resolution and fixed_vop_rate - * (fixed_vop_rate == 0 implies that it is different from the framerate) - * - * - encoding: MUST be set by user. - * - decoding: the use of this field for decoding is deprecated. - * Use framerate instead. - */ - AVRational time_base; - - /** - * For some codecs, the time base is closer to the field rate than the frame rate. - * Most notably, H.264 and MPEG-2 specify time_base as half of frame duration - * if no telecine is used ... - * - * Set to time_base ticks per frame. Default 1, e.g., H.264/MPEG-2 set it to 2. - */ - int ticks_per_frame; - - /** - * Codec delay. - * - * Encoding: Number of frames delay there will be from the encoder input to - * the decoder output. (we assume the decoder matches the spec) - * Decoding: Number of frames delay in addition to what a standard decoder - * as specified in the spec would produce. - * - * Video: - * Number of frames the decoded output will be delayed relative to the - * encoded input. - * - * Audio: - * For encoding, this field is unused (see initial_padding). - * - * For decoding, this is the number of samples the decoder needs to - * output before the decoder's output is valid. When seeking, you should - * start decoding this many samples prior to your desired seek point. - * - * - encoding: Set by libavcodec. - * - decoding: Set by libavcodec. - */ - int delay; - - - /* video only */ - /** - * picture width / height. - * - * @note Those fields may not match the values of the last - * AVFrame output by avcodec_receive_frame() due frame - * reordering. - * - * - encoding: MUST be set by user. - * - decoding: May be set by the user before opening the decoder if known e.g. - * from the container. Some decoders will require the dimensions - * to be set by the caller. During decoding, the decoder may - * overwrite those values as required while parsing the data. - */ - int width, height; - - /** - * Bitstream width / height, may be different from width/height e.g. when - * the decoded frame is cropped before being output or lowres is enabled. - * - * @note Those field may not match the value of the last - * AVFrame output by avcodec_receive_frame() due frame - * reordering. - * - * - encoding: unused - * - decoding: May be set by the user before opening the decoder if known - * e.g. from the container. During decoding, the decoder may - * overwrite those values as required while parsing the data. - */ - int coded_width, coded_height; - - /** - * the number of pictures in a group of pictures, or 0 for intra_only - * - encoding: Set by user. - * - decoding: unused - */ - int gop_size; - - /** - * Pixel format, see AV_PIX_FMT_xxx. - * May be set by the demuxer if known from headers. - * May be overridden by the decoder if it knows better. - * - * @note This field may not match the value of the last - * AVFrame output by avcodec_receive_frame() due frame - * reordering. - * - * - encoding: Set by user. - * - decoding: Set by user if known, overridden by libavcodec while - * parsing the data. - */ - enum AVPixelFormat pix_fmt; - - /** - * If non NULL, 'draw_horiz_band' is called by the libavcodec - * decoder to draw a horizontal band. It improves cache usage. Not - * all codecs can do that. You must check the codec capabilities - * beforehand. - * When multithreading is used, it may be called from multiple threads - * at the same time; threads might draw different parts of the same AVFrame, - * or multiple AVFrames, and there is no guarantee that slices will be drawn - * in order. - * The function is also used by hardware acceleration APIs. - * It is called at least once during frame decoding to pass - * the data needed for hardware render. - * In that mode instead of pixel data, AVFrame points to - * a structure specific to the acceleration API. The application - * reads the structure and can change some fields to indicate progress - * or mark state. - * - encoding: unused - * - decoding: Set by user. - * @param height the height of the slice - * @param y the y position of the slice - * @param type 1->top field, 2->bottom field, 3->frame - * @param offset offset into the AVFrame.data from which the slice should be read - */ - void (*draw_horiz_band)(struct AVCodecContext *s, - const AVFrame *src, int offset[AV_NUM_DATA_POINTERS], - int y, int type, int height); - - /** - * Callback to negotiate the pixel format. Decoding only, may be set by the - * caller before avcodec_open2(). - * - * Called by some decoders to select the pixel format that will be used for - * the output frames. This is mainly used to set up hardware acceleration, - * then the provided format list contains the corresponding hwaccel pixel - * formats alongside the "software" one. The software pixel format may also - * be retrieved from \ref sw_pix_fmt. - * - * This callback will be called when the coded frame properties (such as - * resolution, pixel format, etc.) change and more than one output format is - * supported for those new properties. If a hardware pixel format is chosen - * and initialization for it fails, the callback may be called again - * immediately. - * - * This callback may be called from different threads if the decoder is - * multi-threaded, but not from more than one thread simultaneously. - * - * @param fmt list of formats which may be used in the current - * configuration, terminated by AV_PIX_FMT_NONE. - * @warning Behavior is undefined if the callback returns a value other - * than one of the formats in fmt or AV_PIX_FMT_NONE. - * @return the chosen format or AV_PIX_FMT_NONE - */ - enum AVPixelFormat (*get_format)(struct AVCodecContext *s, const enum AVPixelFormat * fmt); - - /** - * maximum number of B-frames between non-B-frames - * Note: The output will be delayed by max_b_frames+1 relative to the input. - * - encoding: Set by user. - * - decoding: unused - */ - int max_b_frames; - - /** - * qscale factor between IP and B-frames - * If > 0 then the last P-frame quantizer will be used (q= lastp_q*factor+offset). - * If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). - * - encoding: Set by user. - * - decoding: unused - */ - float b_quant_factor; - - /** - * qscale offset between IP and B-frames - * - encoding: Set by user. - * - decoding: unused - */ - float b_quant_offset; - - /** - * Size of the frame reordering buffer in the decoder. - * For MPEG-2 it is 1 IPB or 0 low delay IP. - * - encoding: Set by libavcodec. - * - decoding: Set by libavcodec. - */ - int has_b_frames; - - /** - * qscale factor between P- and I-frames - * If > 0 then the last P-frame quantizer will be used (q = lastp_q * factor + offset). - * If < 0 then normal ratecontrol will be done (q= -normal_q*factor+offset). - * - encoding: Set by user. - * - decoding: unused - */ - float i_quant_factor; - - /** - * qscale offset between P and I-frames - * - encoding: Set by user. - * - decoding: unused - */ - float i_quant_offset; - - /** - * luminance masking (0-> disabled) - * - encoding: Set by user. - * - decoding: unused - */ - float lumi_masking; - - /** - * temporary complexity masking (0-> disabled) - * - encoding: Set by user. - * - decoding: unused - */ - float temporal_cplx_masking; - - /** - * spatial complexity masking (0-> disabled) - * - encoding: Set by user. - * - decoding: unused - */ - float spatial_cplx_masking; - - /** - * p block masking (0-> disabled) - * - encoding: Set by user. - * - decoding: unused - */ - float p_masking; - - /** - * darkness masking (0-> disabled) - * - encoding: Set by user. - * - decoding: unused - */ - float dark_masking; - - /** - * slice count - * - encoding: Set by libavcodec. - * - decoding: Set by user (or 0). - */ - int slice_count; - - /** - * slice offsets in the frame in bytes - * - encoding: Set/allocated by libavcodec. - * - decoding: Set/allocated by user (or NULL). - */ - int *slice_offset; - - /** - * sample aspect ratio (0 if unknown) - * That is the width of a pixel divided by the height of the pixel. - * Numerator and denominator must be relatively prime and smaller than 256 for some video standards. - * - encoding: Set by user. - * - decoding: Set by libavcodec. - */ - AVRational sample_aspect_ratio; - - /** - * motion estimation comparison function - * - encoding: Set by user. - * - decoding: unused - */ - int me_cmp; - /** - * subpixel motion estimation comparison function - * - encoding: Set by user. - * - decoding: unused - */ - int me_sub_cmp; - /** - * macroblock comparison function (not supported yet) - * - encoding: Set by user. - * - decoding: unused - */ - int mb_cmp; - /** - * interlaced DCT comparison function - * - encoding: Set by user. - * - decoding: unused - */ - int ildct_cmp; -#define FF_CMP_SAD 0 -#define FF_CMP_SSE 1 -#define FF_CMP_SATD 2 -#define FF_CMP_DCT 3 -#define FF_CMP_PSNR 4 -#define FF_CMP_BIT 5 -#define FF_CMP_RD 6 -#define FF_CMP_ZERO 7 -#define FF_CMP_VSAD 8 -#define FF_CMP_VSSE 9 -#define FF_CMP_NSSE 10 -#define FF_CMP_W53 11 -#define FF_CMP_W97 12 -#define FF_CMP_DCTMAX 13 -#define FF_CMP_DCT264 14 -#define FF_CMP_MEDIAN_SAD 15 -#define FF_CMP_CHROMA 256 - - /** - * ME diamond size & shape - * - encoding: Set by user. - * - decoding: unused - */ - int dia_size; - - /** - * amount of previous MV predictors (2a+1 x 2a+1 square) - * - encoding: Set by user. - * - decoding: unused - */ - int last_predictor_count; - - /** - * motion estimation prepass comparison function - * - encoding: Set by user. - * - decoding: unused - */ - int me_pre_cmp; - - /** - * ME prepass diamond size & shape - * - encoding: Set by user. - * - decoding: unused - */ - int pre_dia_size; - - /** - * subpel ME quality - * - encoding: Set by user. - * - decoding: unused - */ - int me_subpel_quality; - - /** - * maximum motion estimation search range in subpel units - * If 0 then no limit. - * - * - encoding: Set by user. - * - decoding: unused - */ - int me_range; - - /** - * slice flags - * - encoding: unused - * - decoding: Set by user. - */ - int slice_flags; -#define SLICE_FLAG_CODED_ORDER 0x0001 ///< draw_horiz_band() is called in coded order instead of display -#define SLICE_FLAG_ALLOW_FIELD 0x0002 ///< allow draw_horiz_band() with field slices (MPEG-2 field pics) -#define SLICE_FLAG_ALLOW_PLANE 0x0004 ///< allow draw_horiz_band() with 1 component at a time (SVQ1) - - /** - * macroblock decision mode - * - encoding: Set by user. - * - decoding: unused - */ - int mb_decision; -#define FF_MB_DECISION_SIMPLE 0 ///< uses mb_cmp -#define FF_MB_DECISION_BITS 1 ///< chooses the one which needs the fewest bits -#define FF_MB_DECISION_RD 2 ///< rate distortion - - /** - * custom intra quantization matrix - * Must be allocated with the av_malloc() family of functions, and will be freed in - * avcodec_free_context(). - * - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. - * - decoding: Set/allocated/freed by libavcodec. - */ - uint16_t *intra_matrix; - - /** - * custom inter quantization matrix - * Must be allocated with the av_malloc() family of functions, and will be freed in - * avcodec_free_context(). - * - encoding: Set/allocated by user, freed by libavcodec. Can be NULL. - * - decoding: Set/allocated/freed by libavcodec. - */ - uint16_t *inter_matrix; - - /** - * precision of the intra DC coefficient - 8 - * - encoding: Set by user. - * - decoding: Set by libavcodec - */ - int intra_dc_precision; - - /** - * Number of macroblock rows at the top which are skipped. - * - encoding: unused - * - decoding: Set by user. - */ - int skip_top; - - /** - * Number of macroblock rows at the bottom which are skipped. - * - encoding: unused - * - decoding: Set by user. - */ - int skip_bottom; - - /** - * minimum MB Lagrange multiplier - * - encoding: Set by user. - * - decoding: unused - */ - int mb_lmin; - - /** - * maximum MB Lagrange multiplier - * - encoding: Set by user. - * - decoding: unused - */ - int mb_lmax; - - /** - * - encoding: Set by user. - * - decoding: unused - */ - int bidir_refine; - - /** - * minimum GOP size - * - encoding: Set by user. - * - decoding: unused - */ - int keyint_min; - - /** - * number of reference frames - * - encoding: Set by user. - * - decoding: Set by lavc. - */ - int refs; - - /** - * Note: Value depends upon the compare function used for fullpel ME. - * - encoding: Set by user. - * - decoding: unused - */ - int mv0_threshold; - - /** - * Chromaticity coordinates of the source primaries. - * - encoding: Set by user - * - decoding: Set by libavcodec - */ - enum AVColorPrimaries color_primaries; - - /** - * Color Transfer Characteristic. - * - encoding: Set by user - * - decoding: Set by libavcodec - */ - enum AVColorTransferCharacteristic color_trc; - - /** - * YUV colorspace type. - * - encoding: Set by user - * - decoding: Set by libavcodec - */ - enum AVColorSpace colorspace; - - /** - * MPEG vs JPEG YUV range. - * - encoding: Set by user - * - decoding: Set by libavcodec - */ - enum AVColorRange color_range; - - /** - * This defines the location of chroma samples. - * - encoding: Set by user - * - decoding: Set by libavcodec - */ - enum AVChromaLocation chroma_sample_location; - - /** - * Number of slices. - * Indicates number of picture subdivisions. Used for parallelized - * decoding. - * - encoding: Set by user - * - decoding: unused - */ - int slices; - - /** Field order - * - encoding: set by libavcodec - * - decoding: Set by user. - */ - enum AVFieldOrder field_order; - - /* audio only */ - int sample_rate; ///< samples per second - -#if FF_API_OLD_CHANNEL_LAYOUT - /** - * number of audio channels - * @deprecated use ch_layout.nb_channels - */ - attribute_deprecated - int channels; -#endif - - /** - * audio sample format - * - encoding: Set by user. - * - decoding: Set by libavcodec. - */ - enum AVSampleFormat sample_fmt; ///< sample format - - /* The following data should not be initialized. */ - /** - * Number of samples per channel in an audio frame. - * - * - encoding: set by libavcodec in avcodec_open2(). Each submitted frame - * except the last must contain exactly frame_size samples per channel. - * May be 0 when the codec has AV_CODEC_CAP_VARIABLE_FRAME_SIZE set, then the - * frame size is not restricted. - * - decoding: may be set by some decoders to indicate constant frame size - */ - int frame_size; - - /** - * Frame counter, set by libavcodec. - * - * - decoding: total number of frames returned from the decoder so far. - * - encoding: total number of frames passed to the encoder so far. - * - * @note the counter is not incremented if encoding/decoding resulted in - * an error. - */ - int frame_number; - - /** - * number of bytes per packet if constant and known or 0 - * Used by some WAV based audio codecs. - */ - int block_align; - - /** - * Audio cutoff bandwidth (0 means "automatic") - * - encoding: Set by user. - * - decoding: unused - */ - int cutoff; - -#if FF_API_OLD_CHANNEL_LAYOUT - /** - * Audio channel layout. - * - encoding: set by user. - * - decoding: set by user, may be overwritten by libavcodec. - * @deprecated use ch_layout - */ - attribute_deprecated - uint64_t channel_layout; - - /** - * Request decoder to use this channel layout if it can (0 for default) - * - encoding: unused - * - decoding: Set by user. - * @deprecated use "downmix" codec private option - */ - attribute_deprecated - uint64_t request_channel_layout; -#endif - - /** - * Type of service that the audio stream conveys. - * - encoding: Set by user. - * - decoding: Set by libavcodec. - */ - enum AVAudioServiceType audio_service_type; - - /** - * desired sample format - * - encoding: Not used. - * - decoding: Set by user. - * Decoder will decode to this format if it can. - */ - enum AVSampleFormat request_sample_fmt; - - /** - * This callback is called at the beginning of each frame to get data - * buffer(s) for it. There may be one contiguous buffer for all the data or - * there may be a buffer per each data plane or anything in between. What - * this means is, you may set however many entries in buf[] you feel necessary. - * Each buffer must be reference-counted using the AVBuffer API (see description - * of buf[] below). - * - * The following fields will be set in the frame before this callback is - * called: - * - format - * - width, height (video only) - * - sample_rate, channel_layout, nb_samples (audio only) - * Their values may differ from the corresponding values in - * AVCodecContext. This callback must use the frame values, not the codec - * context values, to calculate the required buffer size. - * - * This callback must fill the following fields in the frame: - * - data[] - * - linesize[] - * - extended_data: - * * if the data is planar audio with more than 8 channels, then this - * callback must allocate and fill extended_data to contain all pointers - * to all data planes. data[] must hold as many pointers as it can. - * extended_data must be allocated with av_malloc() and will be freed in - * av_frame_unref(). - * * otherwise extended_data must point to data - * - buf[] must contain one or more pointers to AVBufferRef structures. Each of - * the frame's data and extended_data pointers must be contained in these. That - * is, one AVBufferRef for each allocated chunk of memory, not necessarily one - * AVBufferRef per data[] entry. See: av_buffer_create(), av_buffer_alloc(), - * and av_buffer_ref(). - * - extended_buf and nb_extended_buf must be allocated with av_malloc() by - * this callback and filled with the extra buffers if there are more - * buffers than buf[] can hold. extended_buf will be freed in - * av_frame_unref(). - * - * If AV_CODEC_CAP_DR1 is not set then get_buffer2() must call - * avcodec_default_get_buffer2() instead of providing buffers allocated by - * some other means. - * - * Each data plane must be aligned to the maximum required by the target - * CPU. - * - * @see avcodec_default_get_buffer2() - * - * Video: - * - * If AV_GET_BUFFER_FLAG_REF is set in flags then the frame may be reused - * (read and/or written to if it is writable) later by libavcodec. - * - * avcodec_align_dimensions2() should be used to find the required width and - * height, as they normally need to be rounded up to the next multiple of 16. - * - * Some decoders do not support linesizes changing between frames. - * - * If frame multithreading is used, this callback may be called from a - * different thread, but not from more than one at once. Does not need to be - * reentrant. - * - * @see avcodec_align_dimensions2() - * - * Audio: - * - * Decoders request a buffer of a particular size by setting - * AVFrame.nb_samples prior to calling get_buffer2(). The decoder may, - * however, utilize only part of the buffer by setting AVFrame.nb_samples - * to a smaller value in the output frame. - * - * As a convenience, av_samples_get_buffer_size() and - * av_samples_fill_arrays() in libavutil may be used by custom get_buffer2() - * functions to find the required data size and to fill data pointers and - * linesize. In AVFrame.linesize, only linesize[0] may be set for audio - * since all planes must be the same size. - * - * @see av_samples_get_buffer_size(), av_samples_fill_arrays() - * - * - encoding: unused - * - decoding: Set by libavcodec, user can override. - */ - int (*get_buffer2)(struct AVCodecContext *s, AVFrame *frame, int flags); - - /* - encoding parameters */ - float qcompress; ///< amount of qscale change between easy & hard scenes (0.0-1.0) - float qblur; ///< amount of qscale smoothing over time (0.0-1.0) - - /** - * minimum quantizer - * - encoding: Set by user. - * - decoding: unused - */ - int qmin; - - /** - * maximum quantizer - * - encoding: Set by user. - * - decoding: unused - */ - int qmax; - - /** - * maximum quantizer difference between frames - * - encoding: Set by user. - * - decoding: unused - */ - int max_qdiff; - - /** - * decoder bitstream buffer size - * - encoding: Set by user. - * - decoding: unused - */ - int rc_buffer_size; - - /** - * ratecontrol override, see RcOverride - * - encoding: Allocated/set/freed by user. - * - decoding: unused - */ - int rc_override_count; - RcOverride *rc_override; - - /** - * maximum bitrate - * - encoding: Set by user. - * - decoding: Set by user, may be overwritten by libavcodec. - */ - int64_t rc_max_rate; - - /** - * minimum bitrate - * - encoding: Set by user. - * - decoding: unused - */ - int64_t rc_min_rate; - - /** - * Ratecontrol attempt to use, at maximum, of what can be used without an underflow. - * - encoding: Set by user. - * - decoding: unused. - */ - float rc_max_available_vbv_use; - - /** - * Ratecontrol attempt to use, at least, times the amount needed to prevent a vbv overflow. - * - encoding: Set by user. - * - decoding: unused. - */ - float rc_min_vbv_overflow_use; - - /** - * Number of bits which should be loaded into the rc buffer before decoding starts. - * - encoding: Set by user. - * - decoding: unused - */ - int rc_initial_buffer_occupancy; - - /** - * trellis RD quantization - * - encoding: Set by user. - * - decoding: unused - */ - int trellis; - - /** - * pass1 encoding statistics output buffer - * - encoding: Set by libavcodec. - * - decoding: unused - */ - char *stats_out; - - /** - * pass2 encoding statistics input buffer - * Concatenated stuff from stats_out of pass1 should be placed here. - * - encoding: Allocated/set/freed by user. - * - decoding: unused - */ - char *stats_in; - - /** - * Work around bugs in encoders which sometimes cannot be detected automatically. - * - encoding: Set by user - * - decoding: Set by user - */ - int workaround_bugs; -#define FF_BUG_AUTODETECT 1 ///< autodetection -#define FF_BUG_XVID_ILACE 4 -#define FF_BUG_UMP4 8 -#define FF_BUG_NO_PADDING 16 -#define FF_BUG_AMV 32 -#define FF_BUG_QPEL_CHROMA 64 -#define FF_BUG_STD_QPEL 128 -#define FF_BUG_QPEL_CHROMA2 256 -#define FF_BUG_DIRECT_BLOCKSIZE 512 -#define FF_BUG_EDGE 1024 -#define FF_BUG_HPEL_CHROMA 2048 -#define FF_BUG_DC_CLIP 4096 -#define FF_BUG_MS 8192 ///< Work around various bugs in Microsoft's broken decoders. -#define FF_BUG_TRUNCATED 16384 -#define FF_BUG_IEDGE 32768 - - /** - * strictly follow the standard (MPEG-4, ...). - * - encoding: Set by user. - * - decoding: Set by user. - * Setting this to STRICT or higher means the encoder and decoder will - * generally do stupid things, whereas setting it to unofficial or lower - * will mean the encoder might produce output that is not supported by all - * spec-compliant decoders. Decoders don't differentiate between normal, - * unofficial and experimental (that is, they always try to decode things - * when they can) unless they are explicitly asked to behave stupidly - * (=strictly conform to the specs) - */ - int strict_std_compliance; -#define FF_COMPLIANCE_VERY_STRICT 2 ///< Strictly conform to an older more strict version of the spec or reference software. -#define FF_COMPLIANCE_STRICT 1 ///< Strictly conform to all the things in the spec no matter what consequences. -#define FF_COMPLIANCE_NORMAL 0 -#define FF_COMPLIANCE_UNOFFICIAL -1 ///< Allow unofficial extensions -#define FF_COMPLIANCE_EXPERIMENTAL -2 ///< Allow nonstandardized experimental things. - - /** - * error concealment flags - * - encoding: unused - * - decoding: Set by user. - */ - int error_concealment; -#define FF_EC_GUESS_MVS 1 -#define FF_EC_DEBLOCK 2 -#define FF_EC_FAVOR_INTER 256 - - /** - * debug - * - encoding: Set by user. - * - decoding: Set by user. - */ - int debug; -#define FF_DEBUG_PICT_INFO 1 -#define FF_DEBUG_RC 2 -#define FF_DEBUG_BITSTREAM 4 -#define FF_DEBUG_MB_TYPE 8 -#define FF_DEBUG_QP 16 -#define FF_DEBUG_DCT_COEFF 0x00000040 -#define FF_DEBUG_SKIP 0x00000080 -#define FF_DEBUG_STARTCODE 0x00000100 -#define FF_DEBUG_ER 0x00000400 -#define FF_DEBUG_MMCO 0x00000800 -#define FF_DEBUG_BUGS 0x00001000 -#define FF_DEBUG_BUFFERS 0x00008000 -#define FF_DEBUG_THREADS 0x00010000 -#define FF_DEBUG_GREEN_MD 0x00800000 -#define FF_DEBUG_NOMC 0x01000000 - - /** - * Error recognition; may misdetect some more or less valid parts as errors. - * - encoding: Set by user. - * - decoding: Set by user. - */ - int err_recognition; - -/** - * Verify checksums embedded in the bitstream (could be of either encoded or - * decoded data, depending on the codec) and print an error message on mismatch. - * If AV_EF_EXPLODE is also set, a mismatching checksum will result in the - * decoder returning an error. - */ -#define AV_EF_CRCCHECK (1<<0) -#define AV_EF_BITSTREAM (1<<1) ///< detect bitstream specification deviations -#define AV_EF_BUFFER (1<<2) ///< detect improper bitstream length -#define AV_EF_EXPLODE (1<<3) ///< abort decoding on minor error detection - -#define AV_EF_IGNORE_ERR (1<<15) ///< ignore errors and continue -#define AV_EF_CAREFUL (1<<16) ///< consider things that violate the spec, are fast to calculate and have not been seen in the wild as errors -#define AV_EF_COMPLIANT (1<<17) ///< consider all spec non compliances as errors -#define AV_EF_AGGRESSIVE (1<<18) ///< consider things that a sane encoder should not do as an error - - - /** - * opaque 64-bit number (generally a PTS) that will be reordered and - * output in AVFrame.reordered_opaque - * - encoding: Set by libavcodec to the reordered_opaque of the input - * frame corresponding to the last returned packet. Only - * supported by encoders with the - * AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE capability. - * - decoding: Set by user. - */ - int64_t reordered_opaque; - - /** - * Hardware accelerator in use - * - encoding: unused. - * - decoding: Set by libavcodec - */ - const struct AVHWAccel *hwaccel; - - /** - * Hardware accelerator context. - * For some hardware accelerators, a global context needs to be - * provided by the user. In that case, this holds display-dependent - * data FFmpeg cannot instantiate itself. Please refer to the - * FFmpeg HW accelerator documentation to know how to fill this. - * - encoding: unused - * - decoding: Set by user - */ - void *hwaccel_context; - - /** - * error - * - encoding: Set by libavcodec if flags & AV_CODEC_FLAG_PSNR. - * - decoding: unused - */ - uint64_t error[AV_NUM_DATA_POINTERS]; - - /** - * DCT algorithm, see FF_DCT_* below - * - encoding: Set by user. - * - decoding: unused - */ - int dct_algo; -#define FF_DCT_AUTO 0 -#define FF_DCT_FASTINT 1 -#define FF_DCT_INT 2 -#define FF_DCT_MMX 3 -#define FF_DCT_ALTIVEC 5 -#define FF_DCT_FAAN 6 - - /** - * IDCT algorithm, see FF_IDCT_* below. - * - encoding: Set by user. - * - decoding: Set by user. - */ - int idct_algo; -#define FF_IDCT_AUTO 0 -#define FF_IDCT_INT 1 -#define FF_IDCT_SIMPLE 2 -#define FF_IDCT_SIMPLEMMX 3 -#define FF_IDCT_ARM 7 -#define FF_IDCT_ALTIVEC 8 -#define FF_IDCT_SIMPLEARM 10 -#define FF_IDCT_XVID 14 -#define FF_IDCT_SIMPLEARMV5TE 16 -#define FF_IDCT_SIMPLEARMV6 17 -#define FF_IDCT_FAAN 20 -#define FF_IDCT_SIMPLENEON 22 -#if FF_API_IDCT_NONE -// formerly used by xvmc -#define FF_IDCT_NONE 24 -#endif -#define FF_IDCT_SIMPLEAUTO 128 - - /** - * bits per sample/pixel from the demuxer (needed for huffyuv). - * - encoding: Set by libavcodec. - * - decoding: Set by user. - */ - int bits_per_coded_sample; - - /** - * Bits per sample/pixel of internal libavcodec pixel/sample format. - * - encoding: set by user. - * - decoding: set by libavcodec. - */ - int bits_per_raw_sample; - - /** - * low resolution decoding, 1-> 1/2 size, 2->1/4 size - * - encoding: unused - * - decoding: Set by user. - */ - int lowres; - - /** - * thread count - * is used to decide how many independent tasks should be passed to execute() - * - encoding: Set by user. - * - decoding: Set by user. - */ - int thread_count; - - /** - * Which multithreading methods to use. - * Use of FF_THREAD_FRAME will increase decoding delay by one frame per thread, - * so clients which cannot provide future frames should not use it. - * - * - encoding: Set by user, otherwise the default is used. - * - decoding: Set by user, otherwise the default is used. - */ - int thread_type; -#define FF_THREAD_FRAME 1 ///< Decode more than one frame at once -#define FF_THREAD_SLICE 2 ///< Decode more than one part of a single frame at once - - /** - * Which multithreading methods are in use by the codec. - * - encoding: Set by libavcodec. - * - decoding: Set by libavcodec. - */ - int active_thread_type; - -#if FF_API_THREAD_SAFE_CALLBACKS - /** - * Set by the client if its custom get_buffer() callback can be called - * synchronously from another thread, which allows faster multithreaded decoding. - * draw_horiz_band() will be called from other threads regardless of this setting. - * Ignored if the default get_buffer() is used. - * - encoding: Set by user. - * - decoding: Set by user. - * - * @deprecated the custom get_buffer2() callback should always be - * thread-safe. Thread-unsafe get_buffer2() implementations will be - * invalid starting with LIBAVCODEC_VERSION_MAJOR=60; in other words, - * libavcodec will behave as if this field was always set to 1. - * Callers that want to be forward compatible with future libavcodec - * versions should wrap access to this field in - * #if LIBAVCODEC_VERSION_MAJOR < 60 - */ - attribute_deprecated - int thread_safe_callbacks; -#endif - - /** - * The codec may call this to execute several independent things. - * It will return only after finishing all tasks. - * The user may replace this with some multithreaded implementation, - * the default implementation will execute the parts serially. - * @param count the number of things to execute - * - encoding: Set by libavcodec, user can override. - * - decoding: Set by libavcodec, user can override. - */ - int (*execute)(struct AVCodecContext *c, int (*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size); - - /** - * The codec may call this to execute several independent things. - * It will return only after finishing all tasks. - * The user may replace this with some multithreaded implementation, - * the default implementation will execute the parts serially. - * @param c context passed also to func - * @param count the number of things to execute - * @param arg2 argument passed unchanged to func - * @param ret return values of executed functions, must have space for "count" values. May be NULL. - * @param func function that will be called count times, with jobnr from 0 to count-1. - * threadnr will be in the range 0 to c->thread_count-1 < MAX_THREADS and so that no - * two instances of func executing at the same time will have the same threadnr. - * @return always 0 currently, but code should handle a future improvement where when any call to func - * returns < 0 no further calls to func may be done and < 0 is returned. - * - encoding: Set by libavcodec, user can override. - * - decoding: Set by libavcodec, user can override. - */ - int (*execute2)(struct AVCodecContext *c, int (*func)(struct AVCodecContext *c2, void *arg, int jobnr, int threadnr), void *arg2, int *ret, int count); - - /** - * noise vs. sse weight for the nsse comparison function - * - encoding: Set by user. - * - decoding: unused - */ - int nsse_weight; - - /** - * profile - * - encoding: Set by user. - * - decoding: Set by libavcodec. - */ - int profile; -#define FF_PROFILE_UNKNOWN -99 -#define FF_PROFILE_RESERVED -100 - -#define FF_PROFILE_AAC_MAIN 0 -#define FF_PROFILE_AAC_LOW 1 -#define FF_PROFILE_AAC_SSR 2 -#define FF_PROFILE_AAC_LTP 3 -#define FF_PROFILE_AAC_HE 4 -#define FF_PROFILE_AAC_HE_V2 28 -#define FF_PROFILE_AAC_LD 22 -#define FF_PROFILE_AAC_ELD 38 -#define FF_PROFILE_MPEG2_AAC_LOW 128 -#define FF_PROFILE_MPEG2_AAC_HE 131 - -#define FF_PROFILE_DNXHD 0 -#define FF_PROFILE_DNXHR_LB 1 -#define FF_PROFILE_DNXHR_SQ 2 -#define FF_PROFILE_DNXHR_HQ 3 -#define FF_PROFILE_DNXHR_HQX 4 -#define FF_PROFILE_DNXHR_444 5 - -#define FF_PROFILE_DTS 20 -#define FF_PROFILE_DTS_ES 30 -#define FF_PROFILE_DTS_96_24 40 -#define FF_PROFILE_DTS_HD_HRA 50 -#define FF_PROFILE_DTS_HD_MA 60 -#define FF_PROFILE_DTS_EXPRESS 70 - -#define FF_PROFILE_MPEG2_422 0 -#define FF_PROFILE_MPEG2_HIGH 1 -#define FF_PROFILE_MPEG2_SS 2 -#define FF_PROFILE_MPEG2_SNR_SCALABLE 3 -#define FF_PROFILE_MPEG2_MAIN 4 -#define FF_PROFILE_MPEG2_SIMPLE 5 - -#define FF_PROFILE_H264_CONSTRAINED (1<<9) // 8+1; constraint_set1_flag -#define FF_PROFILE_H264_INTRA (1<<11) // 8+3; constraint_set3_flag - -#define FF_PROFILE_H264_BASELINE 66 -#define FF_PROFILE_H264_CONSTRAINED_BASELINE (66|FF_PROFILE_H264_CONSTRAINED) -#define FF_PROFILE_H264_MAIN 77 -#define FF_PROFILE_H264_EXTENDED 88 -#define FF_PROFILE_H264_HIGH 100 -#define FF_PROFILE_H264_HIGH_10 110 -#define FF_PROFILE_H264_HIGH_10_INTRA (110|FF_PROFILE_H264_INTRA) -#define FF_PROFILE_H264_MULTIVIEW_HIGH 118 -#define FF_PROFILE_H264_HIGH_422 122 -#define FF_PROFILE_H264_HIGH_422_INTRA (122|FF_PROFILE_H264_INTRA) -#define FF_PROFILE_H264_STEREO_HIGH 128 -#define FF_PROFILE_H264_HIGH_444 144 -#define FF_PROFILE_H264_HIGH_444_PREDICTIVE 244 -#define FF_PROFILE_H264_HIGH_444_INTRA (244|FF_PROFILE_H264_INTRA) -#define FF_PROFILE_H264_CAVLC_444 44 - -#define FF_PROFILE_VC1_SIMPLE 0 -#define FF_PROFILE_VC1_MAIN 1 -#define FF_PROFILE_VC1_COMPLEX 2 -#define FF_PROFILE_VC1_ADVANCED 3 - -#define FF_PROFILE_MPEG4_SIMPLE 0 -#define FF_PROFILE_MPEG4_SIMPLE_SCALABLE 1 -#define FF_PROFILE_MPEG4_CORE 2 -#define FF_PROFILE_MPEG4_MAIN 3 -#define FF_PROFILE_MPEG4_N_BIT 4 -#define FF_PROFILE_MPEG4_SCALABLE_TEXTURE 5 -#define FF_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION 6 -#define FF_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE 7 -#define FF_PROFILE_MPEG4_HYBRID 8 -#define FF_PROFILE_MPEG4_ADVANCED_REAL_TIME 9 -#define FF_PROFILE_MPEG4_CORE_SCALABLE 10 -#define FF_PROFILE_MPEG4_ADVANCED_CODING 11 -#define FF_PROFILE_MPEG4_ADVANCED_CORE 12 -#define FF_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE 13 -#define FF_PROFILE_MPEG4_SIMPLE_STUDIO 14 -#define FF_PROFILE_MPEG4_ADVANCED_SIMPLE 15 - -#define FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_0 1 -#define FF_PROFILE_JPEG2000_CSTREAM_RESTRICTION_1 2 -#define FF_PROFILE_JPEG2000_CSTREAM_NO_RESTRICTION 32768 -#define FF_PROFILE_JPEG2000_DCINEMA_2K 3 -#define FF_PROFILE_JPEG2000_DCINEMA_4K 4 - -#define FF_PROFILE_VP9_0 0 -#define FF_PROFILE_VP9_1 1 -#define FF_PROFILE_VP9_2 2 -#define FF_PROFILE_VP9_3 3 - -#define FF_PROFILE_HEVC_MAIN 1 -#define FF_PROFILE_HEVC_MAIN_10 2 -#define FF_PROFILE_HEVC_MAIN_STILL_PICTURE 3 -#define FF_PROFILE_HEVC_REXT 4 - -#define FF_PROFILE_VVC_MAIN_10 1 -#define FF_PROFILE_VVC_MAIN_10_444 33 - -#define FF_PROFILE_AV1_MAIN 0 -#define FF_PROFILE_AV1_HIGH 1 -#define FF_PROFILE_AV1_PROFESSIONAL 2 - -#define FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT 0xc0 -#define FF_PROFILE_MJPEG_HUFFMAN_EXTENDED_SEQUENTIAL_DCT 0xc1 -#define FF_PROFILE_MJPEG_HUFFMAN_PROGRESSIVE_DCT 0xc2 -#define FF_PROFILE_MJPEG_HUFFMAN_LOSSLESS 0xc3 -#define FF_PROFILE_MJPEG_JPEG_LS 0xf7 - -#define FF_PROFILE_SBC_MSBC 1 - -#define FF_PROFILE_PRORES_PROXY 0 -#define FF_PROFILE_PRORES_LT 1 -#define FF_PROFILE_PRORES_STANDARD 2 -#define FF_PROFILE_PRORES_HQ 3 -#define FF_PROFILE_PRORES_4444 4 -#define FF_PROFILE_PRORES_XQ 5 - -#define FF_PROFILE_ARIB_PROFILE_A 0 -#define FF_PROFILE_ARIB_PROFILE_C 1 - -#define FF_PROFILE_KLVA_SYNC 0 -#define FF_PROFILE_KLVA_ASYNC 1 - - /** - * level - * - encoding: Set by user. - * - decoding: Set by libavcodec. - */ - int level; -#define FF_LEVEL_UNKNOWN -99 - - /** - * Skip loop filtering for selected frames. - * - encoding: unused - * - decoding: Set by user. - */ - enum AVDiscard skip_loop_filter; - - /** - * Skip IDCT/dequantization for selected frames. - * - encoding: unused - * - decoding: Set by user. - */ - enum AVDiscard skip_idct; - - /** - * Skip decoding for selected frames. - * - encoding: unused - * - decoding: Set by user. - */ - enum AVDiscard skip_frame; - - /** - * Header containing style information for text subtitles. - * For SUBTITLE_ASS subtitle type, it should contain the whole ASS - * [Script Info] and [V4+ Styles] section, plus the [Events] line and - * the Format line following. It shouldn't include any Dialogue line. - * - encoding: Set/allocated/freed by user (before avcodec_open2()) - * - decoding: Set/allocated/freed by libavcodec (by avcodec_open2()) - */ - uint8_t *subtitle_header; - int subtitle_header_size; - - /** - * Audio only. The number of "priming" samples (padding) inserted by the - * encoder at the beginning of the audio. I.e. this number of leading - * decoded samples must be discarded by the caller to get the original audio - * without leading padding. - * - * - decoding: unused - * - encoding: Set by libavcodec. The timestamps on the output packets are - * adjusted by the encoder so that they always refer to the - * first sample of the data actually contained in the packet, - * including any added padding. E.g. if the timebase is - * 1/samplerate and the timestamp of the first input sample is - * 0, the timestamp of the first output packet will be - * -initial_padding. - */ - int initial_padding; - - /** - * - decoding: For codecs that store a framerate value in the compressed - * bitstream, the decoder may export it here. { 0, 1} when - * unknown. - * - encoding: May be used to signal the framerate of CFR content to an - * encoder. - */ - AVRational framerate; - - /** - * Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx. - * - encoding: unused. - * - decoding: Set by libavcodec before calling get_format() - */ - enum AVPixelFormat sw_pix_fmt; - - /** - * Timebase in which pkt_dts/pts and AVPacket.dts/pts are. - * - encoding unused. - * - decoding set by user. - */ - AVRational pkt_timebase; - - /** - * AVCodecDescriptor - * - encoding: unused. - * - decoding: set by libavcodec. - */ - const AVCodecDescriptor *codec_descriptor; - - /** - * Current statistics for PTS correction. - * - decoding: maintained and used by libavcodec, not intended to be used by user apps - * - encoding: unused - */ - int64_t pts_correction_num_faulty_pts; /// Number of incorrect PTS values so far - int64_t pts_correction_num_faulty_dts; /// Number of incorrect DTS values so far - int64_t pts_correction_last_pts; /// PTS of the last frame - int64_t pts_correction_last_dts; /// DTS of the last frame - - /** - * Character encoding of the input subtitles file. - * - decoding: set by user - * - encoding: unused - */ - char *sub_charenc; - - /** - * Subtitles character encoding mode. Formats or codecs might be adjusting - * this setting (if they are doing the conversion themselves for instance). - * - decoding: set by libavcodec - * - encoding: unused - */ - int sub_charenc_mode; -#define FF_SUB_CHARENC_MODE_DO_NOTHING -1 ///< do nothing (demuxer outputs a stream supposed to be already in UTF-8, or the codec is bitmap for instance) -#define FF_SUB_CHARENC_MODE_AUTOMATIC 0 ///< libavcodec will select the mode itself -#define FF_SUB_CHARENC_MODE_PRE_DECODER 1 ///< the AVPacket data needs to be recoded to UTF-8 before being fed to the decoder, requires iconv -#define FF_SUB_CHARENC_MODE_IGNORE 2 ///< neither convert the subtitles, nor check them for valid UTF-8 - - /** - * Skip processing alpha if supported by codec. - * Note that if the format uses pre-multiplied alpha (common with VP6, - * and recommended due to better video quality/compression) - * the image will look as if alpha-blended onto a black background. - * However for formats that do not use pre-multiplied alpha - * there might be serious artefacts (though e.g. libswscale currently - * assumes pre-multiplied alpha anyway). - * - * - decoding: set by user - * - encoding: unused - */ - int skip_alpha; - - /** - * Number of samples to skip after a discontinuity - * - decoding: unused - * - encoding: set by libavcodec - */ - int seek_preroll; - -#if FF_API_DEBUG_MV - /** - * @deprecated unused - */ - attribute_deprecated - int debug_mv; -#define FF_DEBUG_VIS_MV_P_FOR 0x00000001 //visualize forward predicted MVs of P frames -#define FF_DEBUG_VIS_MV_B_FOR 0x00000002 //visualize forward predicted MVs of B frames -#define FF_DEBUG_VIS_MV_B_BACK 0x00000004 //visualize backward predicted MVs of B frames -#endif - - /** - * custom intra quantization matrix - * - encoding: Set by user, can be NULL. - * - decoding: unused. - */ - uint16_t *chroma_intra_matrix; - - /** - * dump format separator. - * can be ", " or "\n " or anything else - * - encoding: Set by user. - * - decoding: Set by user. - */ - uint8_t *dump_separator; - - /** - * ',' separated list of allowed decoders. - * If NULL then all are allowed - * - encoding: unused - * - decoding: set by user - */ - char *codec_whitelist; - - /** - * Properties of the stream that gets decoded - * - encoding: unused - * - decoding: set by libavcodec - */ - unsigned properties; -#define FF_CODEC_PROPERTY_LOSSLESS 0x00000001 -#define FF_CODEC_PROPERTY_CLOSED_CAPTIONS 0x00000002 -#define FF_CODEC_PROPERTY_FILM_GRAIN 0x00000004 - - /** - * Additional data associated with the entire coded stream. - * - * - decoding: unused - * - encoding: may be set by libavcodec after avcodec_open2(). - */ - AVPacketSideData *coded_side_data; - int nb_coded_side_data; - - /** - * A reference to the AVHWFramesContext describing the input (for encoding) - * or output (decoding) frames. The reference is set by the caller and - * afterwards owned (and freed) by libavcodec - it should never be read by - * the caller after being set. - * - * - decoding: This field should be set by the caller from the get_format() - * callback. The previous reference (if any) will always be - * unreffed by libavcodec before the get_format() call. - * - * If the default get_buffer2() is used with a hwaccel pixel - * format, then this AVHWFramesContext will be used for - * allocating the frame buffers. - * - * - encoding: For hardware encoders configured to use a hwaccel pixel - * format, this field should be set by the caller to a reference - * to the AVHWFramesContext describing input frames. - * AVHWFramesContext.format must be equal to - * AVCodecContext.pix_fmt. - * - * This field should be set before avcodec_open2() is called. - */ - AVBufferRef *hw_frames_ctx; - -#if FF_API_SUB_TEXT_FORMAT - /** - * @deprecated unused - */ - attribute_deprecated - int sub_text_format; -#define FF_SUB_TEXT_FMT_ASS 0 -#endif - - /** - * Audio only. The amount of padding (in samples) appended by the encoder to - * the end of the audio. I.e. this number of decoded samples must be - * discarded by the caller from the end of the stream to get the original - * audio without any trailing padding. - * - * - decoding: unused - * - encoding: unused - */ - int trailing_padding; - - /** - * The number of pixels per image to maximally accept. - * - * - decoding: set by user - * - encoding: set by user - */ - int64_t max_pixels; - - /** - * A reference to the AVHWDeviceContext describing the device which will - * be used by a hardware encoder/decoder. The reference is set by the - * caller and afterwards owned (and freed) by libavcodec. - * - * This should be used if either the codec device does not require - * hardware frames or any that are used are to be allocated internally by - * libavcodec. If the user wishes to supply any of the frames used as - * encoder input or decoder output then hw_frames_ctx should be used - * instead. When hw_frames_ctx is set in get_format() for a decoder, this - * field will be ignored while decoding the associated stream segment, but - * may again be used on a following one after another get_format() call. - * - * For both encoders and decoders this field should be set before - * avcodec_open2() is called and must not be written to thereafter. - * - * Note that some decoders may require this field to be set initially in - * order to support hw_frames_ctx at all - in that case, all frames - * contexts used must be created on the same device. - */ - AVBufferRef *hw_device_ctx; - - /** - * Bit set of AV_HWACCEL_FLAG_* flags, which affect hardware accelerated - * decoding (if active). - * - encoding: unused - * - decoding: Set by user (either before avcodec_open2(), or in the - * AVCodecContext.get_format callback) - */ - int hwaccel_flags; - - /** - * Video decoding only. Certain video codecs support cropping, meaning that - * only a sub-rectangle of the decoded frame is intended for display. This - * option controls how cropping is handled by libavcodec. - * - * When set to 1 (the default), libavcodec will apply cropping internally. - * I.e. it will modify the output frame width/height fields and offset the - * data pointers (only by as much as possible while preserving alignment, or - * by the full amount if the AV_CODEC_FLAG_UNALIGNED flag is set) so that - * the frames output by the decoder refer only to the cropped area. The - * crop_* fields of the output frames will be zero. - * - * When set to 0, the width/height fields of the output frames will be set - * to the coded dimensions and the crop_* fields will describe the cropping - * rectangle. Applying the cropping is left to the caller. - * - * @warning When hardware acceleration with opaque output frames is used, - * libavcodec is unable to apply cropping from the top/left border. - * - * @note when this option is set to zero, the width/height fields of the - * AVCodecContext and output AVFrames have different meanings. The codec - * context fields store display dimensions (with the coded dimensions in - * coded_width/height), while the frame fields store the coded dimensions - * (with the display dimensions being determined by the crop_* fields). - */ - int apply_cropping; - - /* - * Video decoding only. Sets the number of extra hardware frames which - * the decoder will allocate for use by the caller. This must be set - * before avcodec_open2() is called. - * - * Some hardware decoders require all frames that they will use for - * output to be defined in advance before decoding starts. For such - * decoders, the hardware frame pool must therefore be of a fixed size. - * The extra frames set here are on top of any number that the decoder - * needs internally in order to operate normally (for example, frames - * used as reference pictures). - */ - int extra_hw_frames; - - /** - * The percentage of damaged samples to discard a frame. - * - * - decoding: set by user - * - encoding: unused - */ - int discard_damaged_percentage; - - /** - * The number of samples per frame to maximally accept. - * - * - decoding: set by user - * - encoding: set by user - */ - int64_t max_samples; - - /** - * Bit set of AV_CODEC_EXPORT_DATA_* flags, which affects the kind of - * metadata exported in frame, packet, or coded stream side data by - * decoders and encoders. - * - * - decoding: set by user - * - encoding: set by user - */ - int export_side_data; - - /** - * This callback is called at the beginning of each packet to get a data - * buffer for it. - * - * The following field will be set in the packet before this callback is - * called: - * - size - * This callback must use the above value to calculate the required buffer size, - * which must padded by at least AV_INPUT_BUFFER_PADDING_SIZE bytes. - * - * In some specific cases, the encoder may not use the entire buffer allocated by this - * callback. This will be reflected in the size value in the packet once returned by - * avcodec_receive_packet(). - * - * This callback must fill the following fields in the packet: - * - data: alignment requirements for AVPacket apply, if any. Some architectures and - * encoders may benefit from having aligned data. - * - buf: must contain a pointer to an AVBufferRef structure. The packet's - * data pointer must be contained in it. See: av_buffer_create(), av_buffer_alloc(), - * and av_buffer_ref(). - * - * If AV_CODEC_CAP_DR1 is not set then get_encode_buffer() must call - * avcodec_default_get_encode_buffer() instead of providing a buffer allocated by - * some other means. - * - * The flags field may contain a combination of AV_GET_ENCODE_BUFFER_FLAG_ flags. - * They may be used for example to hint what use the buffer may get after being - * created. - * Implementations of this callback may ignore flags they don't understand. - * If AV_GET_ENCODE_BUFFER_FLAG_REF is set in flags then the packet may be reused - * (read and/or written to if it is writable) later by libavcodec. - * - * This callback must be thread-safe, as when frame threading is used, it may - * be called from multiple threads simultaneously. - * - * @see avcodec_default_get_encode_buffer() - * - * - encoding: Set by libavcodec, user can override. - * - decoding: unused - */ - int (*get_encode_buffer)(struct AVCodecContext *s, AVPacket *pkt, int flags); - - /** - * Audio channel layout. - * - encoding: must be set by the caller, to one of AVCodec.ch_layouts. - * - decoding: may be set by the caller if known e.g. from the container. - * The decoder can then override during decoding as needed. - */ - AVChannelLayout ch_layout; -} AVCodecContext; - -/** - * @defgroup lavc_hwaccel AVHWAccel - * - * @note Nothing in this structure should be accessed by the user. At some - * point in future it will not be externally visible at all. - * - * @{ - */ -typedef struct AVHWAccel { - /** - * Name of the hardware accelerated codec. - * The name is globally unique among encoders and among decoders (but an - * encoder and a decoder can share the same name). - */ - const char *name; - - /** - * Type of codec implemented by the hardware accelerator. - * - * See AVMEDIA_TYPE_xxx - */ - enum AVMediaType type; - - /** - * Codec implemented by the hardware accelerator. - * - * See AV_CODEC_ID_xxx - */ - enum AVCodecID id; - - /** - * Supported pixel format. - * - * Only hardware accelerated formats are supported here. - */ - enum AVPixelFormat pix_fmt; - - /** - * Hardware accelerated codec capabilities. - * see AV_HWACCEL_CODEC_CAP_* - */ - int capabilities; - - /***************************************************************** - * No fields below this line are part of the public API. They - * may not be used outside of libavcodec and can be changed and - * removed at will. - * New public fields should be added right above. - ***************************************************************** - */ - - /** - * Allocate a custom buffer - */ - int (*alloc_frame)(AVCodecContext *avctx, AVFrame *frame); - - /** - * Called at the beginning of each frame or field picture. - * - * Meaningful frame information (codec specific) is guaranteed to - * be parsed at this point. This function is mandatory. - * - * Note that buf can be NULL along with buf_size set to 0. - * Otherwise, this means the whole frame is available at this point. - * - * @param avctx the codec context - * @param buf the frame data buffer base - * @param buf_size the size of the frame in bytes - * @return zero if successful, a negative value otherwise - */ - int (*start_frame)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size); - - /** - * Callback for parameter data (SPS/PPS/VPS etc). - * - * Useful for hardware decoders which keep persistent state about the - * video parameters, and need to receive any changes to update that state. - * - * @param avctx the codec context - * @param type the nal unit type - * @param buf the nal unit data buffer - * @param buf_size the size of the nal unit in bytes - * @return zero if successful, a negative value otherwise - */ - int (*decode_params)(AVCodecContext *avctx, int type, const uint8_t *buf, uint32_t buf_size); - - /** - * Callback for each slice. - * - * Meaningful slice information (codec specific) is guaranteed to - * be parsed at this point. This function is mandatory. - * - * @param avctx the codec context - * @param buf the slice data buffer base - * @param buf_size the size of the slice in bytes - * @return zero if successful, a negative value otherwise - */ - int (*decode_slice)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size); - - /** - * Called at the end of each frame or field picture. - * - * The whole picture is parsed at this point and can now be sent - * to the hardware accelerator. This function is mandatory. - * - * @param avctx the codec context - * @return zero if successful, a negative value otherwise - */ - int (*end_frame)(AVCodecContext *avctx); - - /** - * Size of per-frame hardware accelerator private data. - * - * Private data is allocated with av_mallocz() before - * AVCodecContext.get_buffer() and deallocated after - * AVCodecContext.release_buffer(). - */ - int frame_priv_data_size; - - /** - * Initialize the hwaccel private data. - * - * This will be called from ff_get_format(), after hwaccel and - * hwaccel_context are set and the hwaccel private data in AVCodecInternal - * is allocated. - */ - int (*init)(AVCodecContext *avctx); - - /** - * Uninitialize the hwaccel private data. - * - * This will be called from get_format() or avcodec_close(), after hwaccel - * and hwaccel_context are already uninitialized. - */ - int (*uninit)(AVCodecContext *avctx); - - /** - * Size of the private data to allocate in - * AVCodecInternal.hwaccel_priv_data. - */ - int priv_data_size; - - /** - * Internal hwaccel capabilities. - */ - int caps_internal; - - /** - * Fill the given hw_frames context with current codec parameters. Called - * from get_format. Refer to avcodec_get_hw_frames_parameters() for - * details. - * - * This CAN be called before AVHWAccel.init is called, and you must assume - * that avctx->hwaccel_priv_data is invalid. - */ - int (*frame_params)(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx); -} AVHWAccel; - -/** - * HWAccel is experimental and is thus avoided in favor of non experimental - * codecs - */ -#define AV_HWACCEL_CODEC_CAP_EXPERIMENTAL 0x0200 - -/** - * Hardware acceleration should be used for decoding even if the codec level - * used is unknown or higher than the maximum supported level reported by the - * hardware driver. - * - * It's generally a good idea to pass this flag unless you have a specific - * reason not to, as hardware tends to under-report supported levels. - */ -#define AV_HWACCEL_FLAG_IGNORE_LEVEL (1 << 0) - -/** - * Hardware acceleration can output YUV pixel formats with a different chroma - * sampling than 4:2:0 and/or other than 8 bits per component. - */ -#define AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH (1 << 1) - -/** - * Hardware acceleration should still be attempted for decoding when the - * codec profile does not match the reported capabilities of the hardware. - * - * For example, this can be used to try to decode baseline profile H.264 - * streams in hardware - it will often succeed, because many streams marked - * as baseline profile actually conform to constrained baseline profile. - * - * @warning If the stream is actually not supported then the behaviour is - * undefined, and may include returning entirely incorrect output - * while indicating success. - */ -#define AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH (1 << 2) - -/** - * @} - */ - -enum AVSubtitleType { - SUBTITLE_NONE, - - SUBTITLE_BITMAP, ///< A bitmap, pict will be set - - /** - * Plain text, the text field must be set by the decoder and is - * authoritative. ass and pict fields may contain approximations. - */ - SUBTITLE_TEXT, - - /** - * Formatted text, the ass field must be set by the decoder and is - * authoritative. pict and text fields may contain approximations. - */ - SUBTITLE_ASS, -}; - -#define AV_SUBTITLE_FLAG_FORCED 0x00000001 - -typedef struct AVSubtitleRect { - int x; ///< top left corner of pict, undefined when pict is not set - int y; ///< top left corner of pict, undefined when pict is not set - int w; ///< width of pict, undefined when pict is not set - int h; ///< height of pict, undefined when pict is not set - int nb_colors; ///< number of colors in pict, undefined when pict is not set - - /** - * data+linesize for the bitmap of this subtitle. - * Can be set for text/ass as well once they are rendered. - */ - uint8_t *data[4]; - int linesize[4]; - - enum AVSubtitleType type; - - char *text; ///< 0 terminated plain UTF-8 text - - /** - * 0 terminated ASS/SSA compatible event line. - * The presentation of this is unaffected by the other values in this - * struct. - */ - char *ass; - - int flags; -} AVSubtitleRect; - -typedef struct AVSubtitle { - uint16_t format; /* 0 = graphics */ - uint32_t start_display_time; /* relative to packet pts, in ms */ - uint32_t end_display_time; /* relative to packet pts, in ms */ - unsigned num_rects; - AVSubtitleRect **rects; - int64_t pts; ///< Same as packet pts, in AV_TIME_BASE -} AVSubtitle; - -/** - * Return the LIBAVCODEC_VERSION_INT constant. - */ -unsigned avcodec_version(void); - -/** - * Return the libavcodec build-time configuration. - */ -const char *avcodec_configuration(void); - -/** - * Return the libavcodec license. - */ -const char *avcodec_license(void); - -/** - * Allocate an AVCodecContext and set its fields to default values. The - * resulting struct should be freed with avcodec_free_context(). - * - * @param codec if non-NULL, allocate private data and initialize defaults - * for the given codec. It is illegal to then call avcodec_open2() - * with a different codec. - * If NULL, then the codec-specific defaults won't be initialized, - * which may result in suboptimal default settings (this is - * important mainly for encoders, e.g. libx264). - * - * @return An AVCodecContext filled with default values or NULL on failure. - */ -AVCodecContext *avcodec_alloc_context3(const AVCodec *codec); - -/** - * Free the codec context and everything associated with it and write NULL to - * the provided pointer. - */ -void avcodec_free_context(AVCodecContext **avctx); - -/** - * Get the AVClass for AVCodecContext. It can be used in combination with - * AV_OPT_SEARCH_FAKE_OBJ for examining options. - * - * @see av_opt_find(). - */ -const AVClass *avcodec_get_class(void); - -#if FF_API_GET_FRAME_CLASS -/** - * @deprecated This function should not be used. - */ -attribute_deprecated -const AVClass *avcodec_get_frame_class(void); -#endif - -/** - * Get the AVClass for AVSubtitleRect. It can be used in combination with - * AV_OPT_SEARCH_FAKE_OBJ for examining options. - * - * @see av_opt_find(). - */ -const AVClass *avcodec_get_subtitle_rect_class(void); - -/** - * Fill the parameters struct based on the values from the supplied codec - * context. Any allocated fields in par are freed and replaced with duplicates - * of the corresponding fields in codec. - * - * @return >= 0 on success, a negative AVERROR code on failure - */ -int avcodec_parameters_from_context(AVCodecParameters *par, - const AVCodecContext *codec); - -/** - * Fill the codec context based on the values from the supplied codec - * parameters. Any allocated fields in codec that have a corresponding field in - * par are freed and replaced with duplicates of the corresponding field in par. - * Fields in codec that do not have a counterpart in par are not touched. - * - * @return >= 0 on success, a negative AVERROR code on failure. - */ -int avcodec_parameters_to_context(AVCodecContext *codec, - const AVCodecParameters *par); - -/** - * Initialize the AVCodecContext to use the given AVCodec. Prior to using this - * function the context has to be allocated with avcodec_alloc_context3(). - * - * The functions avcodec_find_decoder_by_name(), avcodec_find_encoder_by_name(), - * avcodec_find_decoder() and avcodec_find_encoder() provide an easy way for - * retrieving a codec. - * - * @note Always call this function before using decoding routines (such as - * @ref avcodec_receive_frame()). - * - * @code - * av_dict_set(&opts, "b", "2.5M", 0); - * codec = avcodec_find_decoder(AV_CODEC_ID_H264); - * if (!codec) - * exit(1); - * - * context = avcodec_alloc_context3(codec); - * - * if (avcodec_open2(context, codec, opts) < 0) - * exit(1); - * @endcode - * - * @param avctx The context to initialize. - * @param codec The codec to open this context for. If a non-NULL codec has been - * previously passed to avcodec_alloc_context3() or - * for this context, then this parameter MUST be either NULL or - * equal to the previously passed codec. - * @param options A dictionary filled with AVCodecContext and codec-private options. - * On return this object will be filled with options that were not found. - * - * @return zero on success, a negative value on error - * @see avcodec_alloc_context3(), avcodec_find_decoder(), avcodec_find_encoder(), - * av_dict_set(), av_opt_find(). - */ -int avcodec_open2(AVCodecContext *avctx, const AVCodec *codec, AVDictionary **options); - -/** - * Close a given AVCodecContext and free all the data associated with it - * (but not the AVCodecContext itself). - * - * Calling this function on an AVCodecContext that hasn't been opened will free - * the codec-specific data allocated in avcodec_alloc_context3() with a non-NULL - * codec. Subsequent calls will do nothing. - * - * @note Do not use this function. Use avcodec_free_context() to destroy a - * codec context (either open or closed). Opening and closing a codec context - * multiple times is not supported anymore -- use multiple codec contexts - * instead. - */ -int avcodec_close(AVCodecContext *avctx); - -/** - * Free all allocated data in the given subtitle struct. - * - * @param sub AVSubtitle to free. - */ -void avsubtitle_free(AVSubtitle *sub); - -/** - * @} - */ - -/** - * @addtogroup lavc_decoding - * @{ - */ - -/** - * The default callback for AVCodecContext.get_buffer2(). It is made public so - * it can be called by custom get_buffer2() implementations for decoders without - * AV_CODEC_CAP_DR1 set. - */ -int avcodec_default_get_buffer2(AVCodecContext *s, AVFrame *frame, int flags); - -/** - * The default callback for AVCodecContext.get_encode_buffer(). It is made public so - * it can be called by custom get_encode_buffer() implementations for encoders without - * AV_CODEC_CAP_DR1 set. - */ -int avcodec_default_get_encode_buffer(AVCodecContext *s, AVPacket *pkt, int flags); - -/** - * Modify width and height values so that they will result in a memory - * buffer that is acceptable for the codec if you do not use any horizontal - * padding. - * - * May only be used if a codec with AV_CODEC_CAP_DR1 has been opened. - */ -void avcodec_align_dimensions(AVCodecContext *s, int *width, int *height); - -/** - * Modify width and height values so that they will result in a memory - * buffer that is acceptable for the codec if you also ensure that all - * line sizes are a multiple of the respective linesize_align[i]. - * - * May only be used if a codec with AV_CODEC_CAP_DR1 has been opened. - */ -void avcodec_align_dimensions2(AVCodecContext *s, int *width, int *height, - int linesize_align[AV_NUM_DATA_POINTERS]); - -/** - * Converts AVChromaLocation to swscale x/y chroma position. - * - * The positions represent the chroma (0,0) position in a coordinates system - * with luma (0,0) representing the origin and luma(1,1) representing 256,256 - * - * @param xpos horizontal chroma sample position - * @param ypos vertical chroma sample position - */ -int avcodec_enum_to_chroma_pos(int *xpos, int *ypos, enum AVChromaLocation pos); - -/** - * Converts swscale x/y chroma position to AVChromaLocation. - * - * The positions represent the chroma (0,0) position in a coordinates system - * with luma (0,0) representing the origin and luma(1,1) representing 256,256 - * - * @param xpos horizontal chroma sample position - * @param ypos vertical chroma sample position - */ -enum AVChromaLocation avcodec_chroma_pos_to_enum(int xpos, int ypos); - -/** - * Decode a subtitle message. - * Return a negative value on error, otherwise return the number of bytes used. - * If no subtitle could be decompressed, got_sub_ptr is zero. - * Otherwise, the subtitle is stored in *sub. - * Note that AV_CODEC_CAP_DR1 is not available for subtitle codecs. This is for - * simplicity, because the performance difference is expected to be negligible - * and reusing a get_buffer written for video codecs would probably perform badly - * due to a potentially very different allocation pattern. - * - * Some decoders (those marked with AV_CODEC_CAP_DELAY) have a delay between input - * and output. This means that for some packets they will not immediately - * produce decoded output and need to be flushed at the end of decoding to get - * all the decoded data. Flushing is done by calling this function with packets - * with avpkt->data set to NULL and avpkt->size set to 0 until it stops - * returning subtitles. It is safe to flush even those decoders that are not - * marked with AV_CODEC_CAP_DELAY, then no subtitles will be returned. - * - * @note The AVCodecContext MUST have been opened with @ref avcodec_open2() - * before packets may be fed to the decoder. - * - * @param avctx the codec context - * @param[out] sub The preallocated AVSubtitle in which the decoded subtitle will be stored, - * must be freed with avsubtitle_free if *got_sub_ptr is set. - * @param[in,out] got_sub_ptr Zero if no subtitle could be decompressed, otherwise, it is nonzero. - * @param[in] avpkt The input AVPacket containing the input buffer. - */ -int avcodec_decode_subtitle2(AVCodecContext *avctx, AVSubtitle *sub, - int *got_sub_ptr, - AVPacket *avpkt); - -/** - * Supply raw packet data as input to a decoder. - * - * Internally, this call will copy relevant AVCodecContext fields, which can - * influence decoding per-packet, and apply them when the packet is actually - * decoded. (For example AVCodecContext.skip_frame, which might direct the - * decoder to drop the frame contained by the packet sent with this function.) - * - * @warning The input buffer, avpkt->data must be AV_INPUT_BUFFER_PADDING_SIZE - * larger than the actual read bytes because some optimized bitstream - * readers read 32 or 64 bits at once and could read over the end. - * - * @note The AVCodecContext MUST have been opened with @ref avcodec_open2() - * before packets may be fed to the decoder. - * - * @param avctx codec context - * @param[in] avpkt The input AVPacket. Usually, this will be a single video - * frame, or several complete audio frames. - * Ownership of the packet remains with the caller, and the - * decoder will not write to the packet. The decoder may create - * a reference to the packet data (or copy it if the packet is - * not reference-counted). - * Unlike with older APIs, the packet is always fully consumed, - * and if it contains multiple frames (e.g. some audio codecs), - * will require you to call avcodec_receive_frame() multiple - * times afterwards before you can send a new packet. - * It can be NULL (or an AVPacket with data set to NULL and - * size set to 0); in this case, it is considered a flush - * packet, which signals the end of the stream. Sending the - * first flush packet will return success. Subsequent ones are - * unnecessary and will return AVERROR_EOF. If the decoder - * still has frames buffered, it will return them after sending - * a flush packet. - * - * @return 0 on success, otherwise negative error code: - * AVERROR(EAGAIN): input is not accepted in the current state - user - * must read output with avcodec_receive_frame() (once - * all output is read, the packet should be resent, and - * the call will not fail with EAGAIN). - * AVERROR_EOF: the decoder has been flushed, and no new packets can - * be sent to it (also returned if more than 1 flush - * packet is sent) - * AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush - * AVERROR(ENOMEM): failed to add packet to internal queue, or similar - * other errors: legitimate decoding errors - */ -int avcodec_send_packet(AVCodecContext *avctx, const AVPacket *avpkt); - -/** - * Return decoded output data from a decoder. - * - * @param avctx codec context - * @param frame This will be set to a reference-counted video or audio - * frame (depending on the decoder type) allocated by the - * decoder. Note that the function will always call - * av_frame_unref(frame) before doing anything else. - * - * @return - * 0: success, a frame was returned - * AVERROR(EAGAIN): output is not available in this state - user must try - * to send new input - * AVERROR_EOF: the decoder has been fully flushed, and there will be - * no more output frames - * AVERROR(EINVAL): codec not opened, or it is an encoder - * AVERROR_INPUT_CHANGED: current decoded frame has changed parameters - * with respect to first decoded frame. Applicable - * when flag AV_CODEC_FLAG_DROPCHANGED is set. - * other negative values: legitimate decoding errors - */ -int avcodec_receive_frame(AVCodecContext *avctx, AVFrame *frame); - -/** - * Supply a raw video or audio frame to the encoder. Use avcodec_receive_packet() - * to retrieve buffered output packets. - * - * @param avctx codec context - * @param[in] frame AVFrame containing the raw audio or video frame to be encoded. - * Ownership of the frame remains with the caller, and the - * encoder will not write to the frame. The encoder may create - * a reference to the frame data (or copy it if the frame is - * not reference-counted). - * It can be NULL, in which case it is considered a flush - * packet. This signals the end of the stream. If the encoder - * still has packets buffered, it will return them after this - * call. Once flushing mode has been entered, additional flush - * packets are ignored, and sending frames will return - * AVERROR_EOF. - * - * For audio: - * If AV_CODEC_CAP_VARIABLE_FRAME_SIZE is set, then each frame - * can have any number of samples. - * If it is not set, frame->nb_samples must be equal to - * avctx->frame_size for all frames except the last. - * The final frame may be smaller than avctx->frame_size. - * @return 0 on success, otherwise negative error code: - * AVERROR(EAGAIN): input is not accepted in the current state - user - * must read output with avcodec_receive_packet() (once - * all output is read, the packet should be resent, and - * the call will not fail with EAGAIN). - * AVERROR_EOF: the encoder has been flushed, and no new frames can - * be sent to it - * AVERROR(EINVAL): codec not opened, it is a decoder, or requires flush - * AVERROR(ENOMEM): failed to add packet to internal queue, or similar - * other errors: legitimate encoding errors - */ -int avcodec_send_frame(AVCodecContext *avctx, const AVFrame *frame); - -/** - * Read encoded data from the encoder. - * - * @param avctx codec context - * @param avpkt This will be set to a reference-counted packet allocated by the - * encoder. Note that the function will always call - * av_packet_unref(avpkt) before doing anything else. - * @return 0 on success, otherwise negative error code: - * AVERROR(EAGAIN): output is not available in the current state - user - * must try to send input - * AVERROR_EOF: the encoder has been fully flushed, and there will be - * no more output packets - * AVERROR(EINVAL): codec not opened, or it is a decoder - * other errors: legitimate encoding errors - */ -int avcodec_receive_packet(AVCodecContext *avctx, AVPacket *avpkt); - -/** - * Create and return a AVHWFramesContext with values adequate for hardware - * decoding. This is meant to get called from the get_format callback, and is - * a helper for preparing a AVHWFramesContext for AVCodecContext.hw_frames_ctx. - * This API is for decoding with certain hardware acceleration modes/APIs only. - * - * The returned AVHWFramesContext is not initialized. The caller must do this - * with av_hwframe_ctx_init(). - * - * Calling this function is not a requirement, but makes it simpler to avoid - * codec or hardware API specific details when manually allocating frames. - * - * Alternatively to this, an API user can set AVCodecContext.hw_device_ctx, - * which sets up AVCodecContext.hw_frames_ctx fully automatically, and makes - * it unnecessary to call this function or having to care about - * AVHWFramesContext initialization at all. - * - * There are a number of requirements for calling this function: - * - * - It must be called from get_format with the same avctx parameter that was - * passed to get_format. Calling it outside of get_format is not allowed, and - * can trigger undefined behavior. - * - The function is not always supported (see description of return values). - * Even if this function returns successfully, hwaccel initialization could - * fail later. (The degree to which implementations check whether the stream - * is actually supported varies. Some do this check only after the user's - * get_format callback returns.) - * - The hw_pix_fmt must be one of the choices suggested by get_format. If the - * user decides to use a AVHWFramesContext prepared with this API function, - * the user must return the same hw_pix_fmt from get_format. - * - The device_ref passed to this function must support the given hw_pix_fmt. - * - After calling this API function, it is the user's responsibility to - * initialize the AVHWFramesContext (returned by the out_frames_ref parameter), - * and to set AVCodecContext.hw_frames_ctx to it. If done, this must be done - * before returning from get_format (this is implied by the normal - * AVCodecContext.hw_frames_ctx API rules). - * - The AVHWFramesContext parameters may change every time time get_format is - * called. Also, AVCodecContext.hw_frames_ctx is reset before get_format. So - * you are inherently required to go through this process again on every - * get_format call. - * - It is perfectly possible to call this function without actually using - * the resulting AVHWFramesContext. One use-case might be trying to reuse a - * previously initialized AVHWFramesContext, and calling this API function - * only to test whether the required frame parameters have changed. - * - Fields that use dynamically allocated values of any kind must not be set - * by the user unless setting them is explicitly allowed by the documentation. - * If the user sets AVHWFramesContext.free and AVHWFramesContext.user_opaque, - * the new free callback must call the potentially set previous free callback. - * This API call may set any dynamically allocated fields, including the free - * callback. - * - * The function will set at least the following fields on AVHWFramesContext - * (potentially more, depending on hwaccel API): - * - * - All fields set by av_hwframe_ctx_alloc(). - * - Set the format field to hw_pix_fmt. - * - Set the sw_format field to the most suited and most versatile format. (An - * implication is that this will prefer generic formats over opaque formats - * with arbitrary restrictions, if possible.) - * - Set the width/height fields to the coded frame size, rounded up to the - * API-specific minimum alignment. - * - Only _if_ the hwaccel requires a pre-allocated pool: set the initial_pool_size - * field to the number of maximum reference surfaces possible with the codec, - * plus 1 surface for the user to work (meaning the user can safely reference - * at most 1 decoded surface at a time), plus additional buffering introduced - * by frame threading. If the hwaccel does not require pre-allocation, the - * field is left to 0, and the decoder will allocate new surfaces on demand - * during decoding. - * - Possibly AVHWFramesContext.hwctx fields, depending on the underlying - * hardware API. - * - * Essentially, out_frames_ref returns the same as av_hwframe_ctx_alloc(), but - * with basic frame parameters set. - * - * The function is stateless, and does not change the AVCodecContext or the - * device_ref AVHWDeviceContext. - * - * @param avctx The context which is currently calling get_format, and which - * implicitly contains all state needed for filling the returned - * AVHWFramesContext properly. - * @param device_ref A reference to the AVHWDeviceContext describing the device - * which will be used by the hardware decoder. - * @param hw_pix_fmt The hwaccel format you are going to return from get_format. - * @param out_frames_ref On success, set to a reference to an _uninitialized_ - * AVHWFramesContext, created from the given device_ref. - * Fields will be set to values required for decoding. - * Not changed if an error is returned. - * @return zero on success, a negative value on error. The following error codes - * have special semantics: - * AVERROR(ENOENT): the decoder does not support this functionality. Setup - * is always manual, or it is a decoder which does not - * support setting AVCodecContext.hw_frames_ctx at all, - * or it is a software format. - * AVERROR(EINVAL): it is known that hardware decoding is not supported for - * this configuration, or the device_ref is not supported - * for the hwaccel referenced by hw_pix_fmt. - */ -int avcodec_get_hw_frames_parameters(AVCodecContext *avctx, - AVBufferRef *device_ref, - enum AVPixelFormat hw_pix_fmt, - AVBufferRef **out_frames_ref); - - - -/** - * @defgroup lavc_parsing Frame parsing - * @{ - */ - -enum AVPictureStructure { - AV_PICTURE_STRUCTURE_UNKNOWN, //< unknown - AV_PICTURE_STRUCTURE_TOP_FIELD, //< coded as top field - AV_PICTURE_STRUCTURE_BOTTOM_FIELD, //< coded as bottom field - AV_PICTURE_STRUCTURE_FRAME, //< coded as frame -}; - -typedef struct AVCodecParserContext { - void *priv_data; - const struct AVCodecParser *parser; - int64_t frame_offset; /* offset of the current frame */ - int64_t cur_offset; /* current offset - (incremented by each av_parser_parse()) */ - int64_t next_frame_offset; /* offset of the next frame */ - /* video info */ - int pict_type; /* XXX: Put it back in AVCodecContext. */ - /** - * This field is used for proper frame duration computation in lavf. - * It signals, how much longer the frame duration of the current frame - * is compared to normal frame duration. - * - * frame_duration = (1 + repeat_pict) * time_base - * - * It is used by codecs like H.264 to display telecined material. - */ - int repeat_pict; /* XXX: Put it back in AVCodecContext. */ - int64_t pts; /* pts of the current frame */ - int64_t dts; /* dts of the current frame */ - - /* private data */ - int64_t last_pts; - int64_t last_dts; - int fetch_timestamp; - -#define AV_PARSER_PTS_NB 4 - int cur_frame_start_index; - int64_t cur_frame_offset[AV_PARSER_PTS_NB]; - int64_t cur_frame_pts[AV_PARSER_PTS_NB]; - int64_t cur_frame_dts[AV_PARSER_PTS_NB]; - - int flags; -#define PARSER_FLAG_COMPLETE_FRAMES 0x0001 -#define PARSER_FLAG_ONCE 0x0002 -/// Set if the parser has a valid file offset -#define PARSER_FLAG_FETCHED_OFFSET 0x0004 -#define PARSER_FLAG_USE_CODEC_TS 0x1000 - - int64_t offset; ///< byte offset from starting packet start - int64_t cur_frame_end[AV_PARSER_PTS_NB]; - - /** - * Set by parser to 1 for key frames and 0 for non-key frames. - * It is initialized to -1, so if the parser doesn't set this flag, - * old-style fallback using AV_PICTURE_TYPE_I picture type as key frames - * will be used. - */ - int key_frame; - - // Timestamp generation support: - /** - * Synchronization point for start of timestamp generation. - * - * Set to >0 for sync point, 0 for no sync point and <0 for undefined - * (default). - * - * For example, this corresponds to presence of H.264 buffering period - * SEI message. - */ - int dts_sync_point; - - /** - * Offset of the current timestamp against last timestamp sync point in - * units of AVCodecContext.time_base. - * - * Set to INT_MIN when dts_sync_point unused. Otherwise, it must - * contain a valid timestamp offset. - * - * Note that the timestamp of sync point has usually a nonzero - * dts_ref_dts_delta, which refers to the previous sync point. Offset of - * the next frame after timestamp sync point will be usually 1. - * - * For example, this corresponds to H.264 cpb_removal_delay. - */ - int dts_ref_dts_delta; - - /** - * Presentation delay of current frame in units of AVCodecContext.time_base. - * - * Set to INT_MIN when dts_sync_point unused. Otherwise, it must - * contain valid non-negative timestamp delta (presentation time of a frame - * must not lie in the past). - * - * This delay represents the difference between decoding and presentation - * time of the frame. - * - * For example, this corresponds to H.264 dpb_output_delay. - */ - int pts_dts_delta; - - /** - * Position of the packet in file. - * - * Analogous to cur_frame_pts/dts - */ - int64_t cur_frame_pos[AV_PARSER_PTS_NB]; - - /** - * Byte position of currently parsed frame in stream. - */ - int64_t pos; - - /** - * Previous frame byte position. - */ - int64_t last_pos; - - /** - * Duration of the current frame. - * For audio, this is in units of 1 / AVCodecContext.sample_rate. - * For all other types, this is in units of AVCodecContext.time_base. - */ - int duration; - - enum AVFieldOrder field_order; - - /** - * Indicate whether a picture is coded as a frame, top field or bottom field. - * - * For example, H.264 field_pic_flag equal to 0 corresponds to - * AV_PICTURE_STRUCTURE_FRAME. An H.264 picture with field_pic_flag - * equal to 1 and bottom_field_flag equal to 0 corresponds to - * AV_PICTURE_STRUCTURE_TOP_FIELD. - */ - enum AVPictureStructure picture_structure; - - /** - * Picture number incremented in presentation or output order. - * This field may be reinitialized at the first picture of a new sequence. - * - * For example, this corresponds to H.264 PicOrderCnt. - */ - int output_picture_number; - - /** - * Dimensions of the decoded video intended for presentation. - */ - int width; - int height; - - /** - * Dimensions of the coded video. - */ - int coded_width; - int coded_height; - - /** - * The format of the coded data, corresponds to enum AVPixelFormat for video - * and for enum AVSampleFormat for audio. - * - * Note that a decoder can have considerable freedom in how exactly it - * decodes the data, so the format reported here might be different from the - * one returned by a decoder. - */ - int format; -} AVCodecParserContext; - -typedef struct AVCodecParser { - int codec_ids[7]; /* several codec IDs are permitted */ - int priv_data_size; - int (*parser_init)(AVCodecParserContext *s); - /* This callback never returns an error, a negative value means that - * the frame start was in a previous packet. */ - int (*parser_parse)(AVCodecParserContext *s, - AVCodecContext *avctx, - const uint8_t **poutbuf, int *poutbuf_size, - const uint8_t *buf, int buf_size); - void (*parser_close)(AVCodecParserContext *s); - int (*split)(AVCodecContext *avctx, const uint8_t *buf, int buf_size); -} AVCodecParser; - -/** - * Iterate over all registered codec parsers. - * - * @param opaque a pointer where libavcodec will store the iteration state. Must - * point to NULL to start the iteration. - * - * @return the next registered codec parser or NULL when the iteration is - * finished - */ -const AVCodecParser *av_parser_iterate(void **opaque); - -AVCodecParserContext *av_parser_init(int codec_id); - -/** - * Parse a packet. - * - * @param s parser context. - * @param avctx codec context. - * @param poutbuf set to pointer to parsed buffer or NULL if not yet finished. - * @param poutbuf_size set to size of parsed buffer or zero if not yet finished. - * @param buf input buffer. - * @param buf_size buffer size in bytes without the padding. I.e. the full buffer - size is assumed to be buf_size + AV_INPUT_BUFFER_PADDING_SIZE. - To signal EOF, this should be 0 (so that the last frame - can be output). - * @param pts input presentation timestamp. - * @param dts input decoding timestamp. - * @param pos input byte position in stream. - * @return the number of bytes of the input bitstream used. - * - * Example: - * @code - * while(in_len){ - * len = av_parser_parse2(myparser, AVCodecContext, &data, &size, - * in_data, in_len, - * pts, dts, pos); - * in_data += len; - * in_len -= len; - * - * if(size) - * decode_frame(data, size); - * } - * @endcode - */ -int av_parser_parse2(AVCodecParserContext *s, - AVCodecContext *avctx, - uint8_t **poutbuf, int *poutbuf_size, - const uint8_t *buf, int buf_size, - int64_t pts, int64_t dts, - int64_t pos); - -void av_parser_close(AVCodecParserContext *s); - -/** - * @} - * @} - */ - -/** - * @addtogroup lavc_encoding - * @{ - */ - -int avcodec_encode_subtitle(AVCodecContext *avctx, uint8_t *buf, int buf_size, - const AVSubtitle *sub); - - -/** - * @} - */ - -/** - * @defgroup lavc_misc Utility functions - * @ingroup libavc - * - * Miscellaneous utility functions related to both encoding and decoding - * (or neither). - * @{ - */ - -/** - * @defgroup lavc_misc_pixfmt Pixel formats - * - * Functions for working with pixel formats. - * @{ - */ - -/** - * Return a value representing the fourCC code associated to the - * pixel format pix_fmt, or 0 if no associated fourCC code can be - * found. - */ -unsigned int avcodec_pix_fmt_to_codec_tag(enum AVPixelFormat pix_fmt); - -/** - * Find the best pixel format to convert to given a certain source pixel - * format. When converting from one pixel format to another, information loss - * may occur. For example, when converting from RGB24 to GRAY, the color - * information will be lost. Similarly, other losses occur when converting from - * some formats to other formats. avcodec_find_best_pix_fmt_of_2() searches which of - * the given pixel formats should be used to suffer the least amount of loss. - * The pixel formats from which it chooses one, are determined by the - * pix_fmt_list parameter. - * - * - * @param[in] pix_fmt_list AV_PIX_FMT_NONE terminated array of pixel formats to choose from - * @param[in] src_pix_fmt source pixel format - * @param[in] has_alpha Whether the source pixel format alpha channel is used. - * @param[out] loss_ptr Combination of flags informing you what kind of losses will occur. - * @return The best pixel format to convert to or -1 if none was found. - */ -enum AVPixelFormat avcodec_find_best_pix_fmt_of_list(const enum AVPixelFormat *pix_fmt_list, - enum AVPixelFormat src_pix_fmt, - int has_alpha, int *loss_ptr); - -enum AVPixelFormat avcodec_default_get_format(struct AVCodecContext *s, const enum AVPixelFormat * fmt); - -/** - * @} - */ - -void avcodec_string(char *buf, int buf_size, AVCodecContext *enc, int encode); - -int avcodec_default_execute(AVCodecContext *c, int (*func)(AVCodecContext *c2, void *arg2),void *arg, int *ret, int count, int size); -int avcodec_default_execute2(AVCodecContext *c, int (*func)(AVCodecContext *c2, void *arg2, int, int),void *arg, int *ret, int count); -//FIXME func typedef - -/** - * Fill AVFrame audio data and linesize pointers. - * - * The buffer buf must be a preallocated buffer with a size big enough - * to contain the specified samples amount. The filled AVFrame data - * pointers will point to this buffer. - * - * AVFrame extended_data channel pointers are allocated if necessary for - * planar audio. - * - * @param frame the AVFrame - * frame->nb_samples must be set prior to calling the - * function. This function fills in frame->data, - * frame->extended_data, frame->linesize[0]. - * @param nb_channels channel count - * @param sample_fmt sample format - * @param buf buffer to use for frame data - * @param buf_size size of buffer - * @param align plane size sample alignment (0 = default) - * @return >=0 on success, negative error code on failure - * @todo return the size in bytes required to store the samples in - * case of success, at the next libavutil bump - */ -int avcodec_fill_audio_frame(AVFrame *frame, int nb_channels, - enum AVSampleFormat sample_fmt, const uint8_t *buf, - int buf_size, int align); - -/** - * Reset the internal codec state / flush internal buffers. Should be called - * e.g. when seeking or when switching to a different stream. - * - * @note for decoders, this function just releases any references the decoder - * might keep internally, but the caller's references remain valid. - * - * @note for encoders, this function will only do something if the encoder - * declares support for AV_CODEC_CAP_ENCODER_FLUSH. When called, the encoder - * will drain any remaining packets, and can then be re-used for a different - * stream (as opposed to sending a null frame which will leave the encoder - * in a permanent EOF state after draining). This can be desirable if the - * cost of tearing down and replacing the encoder instance is high. - */ -void avcodec_flush_buffers(AVCodecContext *avctx); - -/** - * Return audio frame duration. - * - * @param avctx codec context - * @param frame_bytes size of the frame, or 0 if unknown - * @return frame duration, in samples, if known. 0 if not able to - * determine. - */ -int av_get_audio_frame_duration(AVCodecContext *avctx, int frame_bytes); - -/* memory */ - -/** - * Same behaviour av_fast_malloc but the buffer has additional - * AV_INPUT_BUFFER_PADDING_SIZE at the end which will always be 0. - * - * In addition the whole buffer will initially and after resizes - * be 0-initialized so that no uninitialized data will ever appear. - */ -void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size); - -/** - * Same behaviour av_fast_padded_malloc except that buffer will always - * be 0-initialized after call. - */ -void av_fast_padded_mallocz(void *ptr, unsigned int *size, size_t min_size); - -/** - * @return a positive value if s is open (i.e. avcodec_open2() was called on it - * with no corresponding avcodec_close()), 0 otherwise. - */ -int avcodec_is_open(AVCodecContext *s); - -/** - * @} - */ - -#endif /* AVCODEC_AVCODEC_H */ diff --git a/third-party/cbs/bytestream.h b/third-party/cbs/bytestream.h deleted file mode 100644 index d0033f14f36..00000000000 --- a/third-party/cbs/bytestream.h +++ /dev/null @@ -1,380 +0,0 @@ -/* - * Bytestream functions - * copyright (c) 2006 Baptiste Coudurier - * Copyright (c) 2012 Aneesh Dogra (lionaneesh) - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_BYTESTREAM_H -#define AVCODEC_BYTESTREAM_H - -#include -#include - -#include "libavutil/avassert.h" -#include "libavutil/common.h" -#include "libavutil/intreadwrite.h" - -typedef struct GetByteContext { - const uint8_t *buffer, *buffer_end, *buffer_start; -} GetByteContext; - -typedef struct PutByteContext { - uint8_t *buffer, *buffer_end, *buffer_start; - int eof; -} PutByteContext; - -#define DEF(type, name, bytes, read, write) \ -static av_always_inline type bytestream_get_ ## name(const uint8_t **b) \ -{ \ - (*b) += bytes; \ - return read(*b - bytes); \ -} \ -static av_always_inline void bytestream_put_ ## name(uint8_t **b, \ - const type value) \ -{ \ - write(*b, value); \ - (*b) += bytes; \ -} \ -static av_always_inline void bytestream2_put_ ## name ## u(PutByteContext *p, \ - const type value) \ -{ \ - bytestream_put_ ## name(&p->buffer, value); \ -} \ -static av_always_inline void bytestream2_put_ ## name(PutByteContext *p, \ - const type value) \ -{ \ - if (!p->eof && (p->buffer_end - p->buffer >= bytes)) { \ - write(p->buffer, value); \ - p->buffer += bytes; \ - } else \ - p->eof = 1; \ -} \ -static av_always_inline type bytestream2_get_ ## name ## u(GetByteContext *g) \ -{ \ - return bytestream_get_ ## name(&g->buffer); \ -} \ -static av_always_inline type bytestream2_get_ ## name(GetByteContext *g) \ -{ \ - if (g->buffer_end - g->buffer < bytes) { \ - g->buffer = g->buffer_end; \ - return 0; \ - } \ - return bytestream2_get_ ## name ## u(g); \ -} \ -static av_always_inline type bytestream2_peek_ ## name ## u(GetByteContext *g) \ -{ \ - return read(g->buffer); \ -} \ -static av_always_inline type bytestream2_peek_ ## name(GetByteContext *g) \ -{ \ - if (g->buffer_end - g->buffer < bytes) \ - return 0; \ - return bytestream2_peek_ ## name ## u(g); \ -} - -DEF(uint64_t, le64, 8, AV_RL64, AV_WL64) -DEF(unsigned int, le32, 4, AV_RL32, AV_WL32) -DEF(unsigned int, le24, 3, AV_RL24, AV_WL24) -DEF(unsigned int, le16, 2, AV_RL16, AV_WL16) -DEF(uint64_t, be64, 8, AV_RB64, AV_WB64) -DEF(unsigned int, be32, 4, AV_RB32, AV_WB32) -DEF(unsigned int, be24, 3, AV_RB24, AV_WB24) -DEF(unsigned int, be16, 2, AV_RB16, AV_WB16) -DEF(unsigned int, byte, 1, AV_RB8 , AV_WB8) - -#if AV_HAVE_BIGENDIAN -# define bytestream2_get_ne16 bytestream2_get_be16 -# define bytestream2_get_ne24 bytestream2_get_be24 -# define bytestream2_get_ne32 bytestream2_get_be32 -# define bytestream2_get_ne64 bytestream2_get_be64 -# define bytestream2_get_ne16u bytestream2_get_be16u -# define bytestream2_get_ne24u bytestream2_get_be24u -# define bytestream2_get_ne32u bytestream2_get_be32u -# define bytestream2_get_ne64u bytestream2_get_be64u -# define bytestream2_put_ne16 bytestream2_put_be16 -# define bytestream2_put_ne24 bytestream2_put_be24 -# define bytestream2_put_ne32 bytestream2_put_be32 -# define bytestream2_put_ne64 bytestream2_put_be64 -# define bytestream2_peek_ne16 bytestream2_peek_be16 -# define bytestream2_peek_ne24 bytestream2_peek_be24 -# define bytestream2_peek_ne32 bytestream2_peek_be32 -# define bytestream2_peek_ne64 bytestream2_peek_be64 -#else -# define bytestream2_get_ne16 bytestream2_get_le16 -# define bytestream2_get_ne24 bytestream2_get_le24 -# define bytestream2_get_ne32 bytestream2_get_le32 -# define bytestream2_get_ne64 bytestream2_get_le64 -# define bytestream2_get_ne16u bytestream2_get_le16u -# define bytestream2_get_ne24u bytestream2_get_le24u -# define bytestream2_get_ne32u bytestream2_get_le32u -# define bytestream2_get_ne64u bytestream2_get_le64u -# define bytestream2_put_ne16 bytestream2_put_le16 -# define bytestream2_put_ne24 bytestream2_put_le24 -# define bytestream2_put_ne32 bytestream2_put_le32 -# define bytestream2_put_ne64 bytestream2_put_le64 -# define bytestream2_peek_ne16 bytestream2_peek_le16 -# define bytestream2_peek_ne24 bytestream2_peek_le24 -# define bytestream2_peek_ne32 bytestream2_peek_le32 -# define bytestream2_peek_ne64 bytestream2_peek_le64 -#endif - -static av_always_inline void bytestream2_init(GetByteContext *g, - const uint8_t *buf, - int buf_size) -{ - av_assert0(buf_size >= 0); - g->buffer = buf; - g->buffer_start = buf; - g->buffer_end = buf + buf_size; -} - -static av_always_inline void bytestream2_init_writer(PutByteContext *p, - uint8_t *buf, - int buf_size) -{ - av_assert0(buf_size >= 0); - p->buffer = buf; - p->buffer_start = buf; - p->buffer_end = buf + buf_size; - p->eof = 0; -} - -static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g) -{ - return g->buffer_end - g->buffer; -} - -static av_always_inline int bytestream2_get_bytes_left_p(PutByteContext *p) -{ - return p->buffer_end - p->buffer; -} - -static av_always_inline void bytestream2_skip(GetByteContext *g, - unsigned int size) -{ - g->buffer += FFMIN(g->buffer_end - g->buffer, size); -} - -static av_always_inline void bytestream2_skipu(GetByteContext *g, - unsigned int size) -{ - g->buffer += size; -} - -static av_always_inline void bytestream2_skip_p(PutByteContext *p, - unsigned int size) -{ - int size2; - if (p->eof) - return; - size2 = FFMIN(p->buffer_end - p->buffer, size); - if (size2 != size) - p->eof = 1; - p->buffer += size2; -} - -static av_always_inline int bytestream2_tell(GetByteContext *g) -{ - return (int)(g->buffer - g->buffer_start); -} - -static av_always_inline int bytestream2_tell_p(PutByteContext *p) -{ - return (int)(p->buffer - p->buffer_start); -} - -static av_always_inline int bytestream2_size(GetByteContext *g) -{ - return (int)(g->buffer_end - g->buffer_start); -} - -static av_always_inline int bytestream2_size_p(PutByteContext *p) -{ - return (int)(p->buffer_end - p->buffer_start); -} - -static av_always_inline int bytestream2_seek(GetByteContext *g, - int offset, - int whence) -{ - switch (whence) { - case SEEK_CUR: - offset = av_clip(offset, -(g->buffer - g->buffer_start), - g->buffer_end - g->buffer); - g->buffer += offset; - break; - case SEEK_END: - offset = av_clip(offset, -(g->buffer_end - g->buffer_start), 0); - g->buffer = g->buffer_end + offset; - break; - case SEEK_SET: - offset = av_clip(offset, 0, g->buffer_end - g->buffer_start); - g->buffer = g->buffer_start + offset; - break; - default: - return AVERROR(EINVAL); - } - return bytestream2_tell(g); -} - -static av_always_inline int bytestream2_seek_p(PutByteContext *p, - int offset, - int whence) -{ - p->eof = 0; - switch (whence) { - case SEEK_CUR: - if (p->buffer_end - p->buffer < offset) - p->eof = 1; - offset = av_clip(offset, -(p->buffer - p->buffer_start), - p->buffer_end - p->buffer); - p->buffer += offset; - break; - case SEEK_END: - if (offset > 0) - p->eof = 1; - offset = av_clip(offset, -(p->buffer_end - p->buffer_start), 0); - p->buffer = p->buffer_end + offset; - break; - case SEEK_SET: - if (p->buffer_end - p->buffer_start < offset) - p->eof = 1; - offset = av_clip(offset, 0, p->buffer_end - p->buffer_start); - p->buffer = p->buffer_start + offset; - break; - default: - return AVERROR(EINVAL); - } - return bytestream2_tell_p(p); -} - -static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, - uint8_t *dst, - unsigned int size) -{ - int size2 = FFMIN(g->buffer_end - g->buffer, size); - memcpy(dst, g->buffer, size2); - g->buffer += size2; - return size2; -} - -static av_always_inline unsigned int bytestream2_get_bufferu(GetByteContext *g, - uint8_t *dst, - unsigned int size) -{ - memcpy(dst, g->buffer, size); - g->buffer += size; - return size; -} - -static av_always_inline unsigned int bytestream2_put_buffer(PutByteContext *p, - const uint8_t *src, - unsigned int size) -{ - int size2; - if (p->eof) - return 0; - size2 = FFMIN(p->buffer_end - p->buffer, size); - if (size2 != size) - p->eof = 1; - memcpy(p->buffer, src, size2); - p->buffer += size2; - return size2; -} - -static av_always_inline unsigned int bytestream2_put_bufferu(PutByteContext *p, - const uint8_t *src, - unsigned int size) -{ - memcpy(p->buffer, src, size); - p->buffer += size; - return size; -} - -static av_always_inline void bytestream2_set_buffer(PutByteContext *p, - const uint8_t c, - unsigned int size) -{ - int size2; - if (p->eof) - return; - size2 = FFMIN(p->buffer_end - p->buffer, size); - if (size2 != size) - p->eof = 1; - memset(p->buffer, c, size2); - p->buffer += size2; -} - -static av_always_inline void bytestream2_set_bufferu(PutByteContext *p, - const uint8_t c, - unsigned int size) -{ - memset(p->buffer, c, size); - p->buffer += size; -} - -static av_always_inline unsigned int bytestream2_get_eof(PutByteContext *p) -{ - return p->eof; -} - -static av_always_inline unsigned int bytestream2_copy_bufferu(PutByteContext *p, - GetByteContext *g, - unsigned int size) -{ - memcpy(p->buffer, g->buffer, size); - p->buffer += size; - g->buffer += size; - return size; -} - -static av_always_inline unsigned int bytestream2_copy_buffer(PutByteContext *p, - GetByteContext *g, - unsigned int size) -{ - int size2; - - if (p->eof) - return 0; - size = FFMIN(g->buffer_end - g->buffer, size); - size2 = FFMIN(p->buffer_end - p->buffer, size); - if (size2 != size) - p->eof = 1; - - return bytestream2_copy_bufferu(p, g, size2); -} - -static av_always_inline unsigned int bytestream_get_buffer(const uint8_t **b, - uint8_t *dst, - unsigned int size) -{ - memcpy(dst, *b, size); - (*b) += size; - return size; -} - -static av_always_inline void bytestream_put_buffer(uint8_t **b, - const uint8_t *src, - unsigned int size) -{ - memcpy(*b, src, size); - (*b) += size; -} - -#endif /* AVCODEC_BYTESTREAM_H */ diff --git a/third-party/cbs/cbs.c b/third-party/cbs/cbs.c deleted file mode 100644 index 4ae441a8968..00000000000 --- a/third-party/cbs/cbs.c +++ /dev/null @@ -1,1087 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#include - -// [sunshine] Changed include path -#include "cbs/config.h" - -#include "libavutil/avassert.h" -#include "libavutil/buffer.h" -#include "libavutil/common.h" -#include "libavutil/opt.h" - -#include "avcodec.h" -// [sunshine] Changed include path -#include "cbs/cbs.h" -#include "cbs_internal.h" - - -static const CodedBitstreamType *const cbs_type_table[] = { -#if CONFIG_CBS_AV1 - &ff_cbs_type_av1, -#endif -#if CONFIG_CBS_H264 - &ff_cbs_type_h264, -#endif -#if CONFIG_CBS_H265 - &ff_cbs_type_h265, -#endif -#if CONFIG_CBS_JPEG - &ff_cbs_type_jpeg, -#endif -#if CONFIG_CBS_MPEG2 - &ff_cbs_type_mpeg2, -#endif -#if CONFIG_CBS_VP9 - &ff_cbs_type_vp9, -#endif -}; - -const enum AVCodecID ff_cbs_all_codec_ids[] = { -#if CONFIG_CBS_AV1 - AV_CODEC_ID_AV1, -#endif -#if CONFIG_CBS_H264 - AV_CODEC_ID_H264, -#endif -#if CONFIG_CBS_H265 - AV_CODEC_ID_H265, -#endif -#if CONFIG_CBS_JPEG - AV_CODEC_ID_MJPEG, -#endif -#if CONFIG_CBS_MPEG2 - AV_CODEC_ID_MPEG2VIDEO, -#endif -#if CONFIG_CBS_VP9 - AV_CODEC_ID_VP9, -#endif - AV_CODEC_ID_NONE -}; - -av_cold int ff_cbs_init(CodedBitstreamContext **ctx_ptr, - enum AVCodecID codec_id, void *log_ctx) -{ - CodedBitstreamContext *ctx; - const CodedBitstreamType *type; - int i; - - type = NULL; - for (i = 0; i < FF_ARRAY_ELEMS(cbs_type_table); i++) { - if (cbs_type_table[i]->codec_id == codec_id) { - type = cbs_type_table[i]; - break; - } - } - if (!type) - return AVERROR(EINVAL); - - ctx = av_mallocz(sizeof(*ctx)); - if (!ctx) - return AVERROR(ENOMEM); - - ctx->log_ctx = log_ctx; - ctx->codec = type; /* Must be before any error */ - - if (type->priv_data_size) { - ctx->priv_data = av_mallocz(ctx->codec->priv_data_size); - if (!ctx->priv_data) { - av_freep(&ctx); - return AVERROR(ENOMEM); - } - if (type->priv_class) { - *(const AVClass **)ctx->priv_data = type->priv_class; - av_opt_set_defaults(ctx->priv_data); - } - } - - ctx->decompose_unit_types = NULL; - - ctx->trace_enable = 0; - ctx->trace_level = AV_LOG_TRACE; - - *ctx_ptr = ctx; - return 0; -} - -av_cold void ff_cbs_flush(CodedBitstreamContext *ctx) -{ - if (ctx->codec->flush) - ctx->codec->flush(ctx); -} - -av_cold void ff_cbs_close(CodedBitstreamContext **ctx_ptr) -{ - CodedBitstreamContext *ctx = *ctx_ptr; - - if (!ctx) - return; - - if (ctx->codec->close) - ctx->codec->close(ctx); - - av_freep(&ctx->write_buffer); - - if (ctx->codec->priv_class && ctx->priv_data) - av_opt_free(ctx->priv_data); - - av_freep(&ctx->priv_data); - av_freep(ctx_ptr); -} - -static void cbs_unit_uninit(CodedBitstreamUnit *unit) -{ - av_buffer_unref(&unit->content_ref); - unit->content = NULL; - - av_buffer_unref(&unit->data_ref); - unit->data = NULL; - unit->data_size = 0; - unit->data_bit_padding = 0; -} - -void ff_cbs_fragment_reset(CodedBitstreamFragment *frag) -{ - int i; - - for (i = 0; i < frag->nb_units; i++) - cbs_unit_uninit(&frag->units[i]); - frag->nb_units = 0; - - av_buffer_unref(&frag->data_ref); - frag->data = NULL; - frag->data_size = 0; - frag->data_bit_padding = 0; -} - -av_cold void ff_cbs_fragment_free(CodedBitstreamFragment *frag) -{ - ff_cbs_fragment_reset(frag); - - av_freep(&frag->units); - frag->nb_units_allocated = 0; -} - -static int cbs_read_fragment_content(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) -{ - int err, i, j; - - for (i = 0; i < frag->nb_units; i++) { - CodedBitstreamUnit *unit = &frag->units[i]; - - if (ctx->decompose_unit_types) { - for (j = 0; j < ctx->nb_decompose_unit_types; j++) { - if (ctx->decompose_unit_types[j] == unit->type) - break; - } - if (j >= ctx->nb_decompose_unit_types) - continue; - } - - av_buffer_unref(&unit->content_ref); - unit->content = NULL; - - av_assert0(unit->data && unit->data_ref); - - err = ctx->codec->read_unit(ctx, unit); - if (err == AVERROR(ENOSYS)) { - av_log(ctx->log_ctx, AV_LOG_VERBOSE, - "Decomposition unimplemented for unit %d " - "(type %"PRIu32").\n", i, unit->type); - } else if (err == AVERROR(EAGAIN)) { - av_log(ctx->log_ctx, AV_LOG_VERBOSE, - "Skipping decomposition of unit %d " - "(type %"PRIu32").\n", i, unit->type); - av_buffer_unref(&unit->content_ref); - unit->content = NULL; - } else if (err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to read unit %d " - "(type %"PRIu32").\n", i, unit->type); - return err; - } - } - - return 0; -} - -static int cbs_fill_fragment_data(CodedBitstreamFragment *frag, - const uint8_t *data, size_t size) -{ - av_assert0(!frag->data && !frag->data_ref); - - frag->data_ref = - av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if (!frag->data_ref) - return AVERROR(ENOMEM); - - frag->data = frag->data_ref->data; - frag->data_size = size; - - memcpy(frag->data, data, size); - memset(frag->data + size, 0, - AV_INPUT_BUFFER_PADDING_SIZE); - - return 0; -} - -static int cbs_read_data(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - AVBufferRef *buf, - const uint8_t *data, size_t size, - int header) -{ - int err; - - if (buf) { - frag->data_ref = av_buffer_ref(buf); - if (!frag->data_ref) - return AVERROR(ENOMEM); - - frag->data = (uint8_t *)data; - frag->data_size = size; - - } else { - err = cbs_fill_fragment_data(frag, data, size); - if (err < 0) - return err; - } - - err = ctx->codec->split_fragment(ctx, frag, header); - if (err < 0) - return err; - - return cbs_read_fragment_content(ctx, frag); -} - -int ff_cbs_read_extradata(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVCodecParameters *par) -{ - return cbs_read_data(ctx, frag, NULL, - par->extradata, - par->extradata_size, 1); -} - -int ff_cbs_read_extradata_from_codec(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVCodecContext *avctx) -{ - return cbs_read_data(ctx, frag, NULL, - avctx->extradata, - avctx->extradata_size, 1); -} - -int ff_cbs_read_packet(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVPacket *pkt) -{ - return cbs_read_data(ctx, frag, pkt->buf, - pkt->data, pkt->size, 0); -} - -int ff_cbs_read_packet_side_data(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVPacket *pkt) -{ - size_t side_data_size; - const uint8_t *side_data = - av_packet_get_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, - &side_data_size); - - return cbs_read_data(ctx, frag, NULL, - side_data, side_data_size, 1); -} - -int ff_cbs_read(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const uint8_t *data, size_t size) -{ - return cbs_read_data(ctx, frag, NULL, - data, size, 0); -} - -/** - * Allocate a new internal data buffer of the given size in the unit. - * - * The data buffer will have input padding. - */ -static int cbs_alloc_unit_data(CodedBitstreamUnit *unit, - size_t size) -{ - av_assert0(!unit->data && !unit->data_ref); - - unit->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if (!unit->data_ref) - return AVERROR(ENOMEM); - - unit->data = unit->data_ref->data; - unit->data_size = size; - - memset(unit->data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - - return 0; -} - -static int cbs_write_unit_data(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - PutBitContext pbc; - int ret; - - if (!ctx->write_buffer) { - // Initial write buffer size is 1MB. - ctx->write_buffer_size = 1024 * 1024; - - reallocate_and_try_again: - ret = av_reallocp(&ctx->write_buffer, ctx->write_buffer_size); - if (ret < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Unable to allocate a " - "sufficiently large write buffer (last attempt " - "%zu bytes).\n", ctx->write_buffer_size); - return ret; - } - } - - init_put_bits(&pbc, ctx->write_buffer, ctx->write_buffer_size); - - ret = ctx->codec->write_unit(ctx, unit, &pbc); - if (ret < 0) { - if (ret == AVERROR(ENOSPC)) { - // Overflow. - if (ctx->write_buffer_size == INT_MAX / 8) - return AVERROR(ENOMEM); - ctx->write_buffer_size = FFMIN(2 * ctx->write_buffer_size, INT_MAX / 8); - goto reallocate_and_try_again; - } - // Write failed for some other reason. - return ret; - } - - // Overflow but we didn't notice. - av_assert0(put_bits_count(&pbc) <= 8 * ctx->write_buffer_size); - - if (put_bits_count(&pbc) % 8) - unit->data_bit_padding = 8 - put_bits_count(&pbc) % 8; - else - unit->data_bit_padding = 0; - - flush_put_bits(&pbc); - - ret = cbs_alloc_unit_data(unit, put_bytes_output(&pbc)); - if (ret < 0) - return ret; - - memcpy(unit->data, ctx->write_buffer, unit->data_size); - - return 0; -} - -int ff_cbs_write_fragment_data(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) -{ - int err, i; - - for (i = 0; i < frag->nb_units; i++) { - CodedBitstreamUnit *unit = &frag->units[i]; - - if (!unit->content) - continue; - - av_buffer_unref(&unit->data_ref); - unit->data = NULL; - - err = cbs_write_unit_data(ctx, unit); - if (err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to write unit %d " - "(type %"PRIu32").\n", i, unit->type); - return err; - } - av_assert0(unit->data && unit->data_ref); - } - - av_buffer_unref(&frag->data_ref); - frag->data = NULL; - - err = ctx->codec->assemble_fragment(ctx, frag); - if (err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to assemble fragment.\n"); - return err; - } - av_assert0(frag->data && frag->data_ref); - - return 0; -} - -int ff_cbs_write_extradata(CodedBitstreamContext *ctx, - AVCodecParameters *par, - CodedBitstreamFragment *frag) -{ - int err; - - err = ff_cbs_write_fragment_data(ctx, frag); - if (err < 0) - return err; - - av_freep(&par->extradata); - - par->extradata = av_malloc(frag->data_size + - AV_INPUT_BUFFER_PADDING_SIZE); - if (!par->extradata) - return AVERROR(ENOMEM); - - memcpy(par->extradata, frag->data, frag->data_size); - memset(par->extradata + frag->data_size, 0, - AV_INPUT_BUFFER_PADDING_SIZE); - par->extradata_size = frag->data_size; - - return 0; -} - -int ff_cbs_write_packet(CodedBitstreamContext *ctx, - AVPacket *pkt, - CodedBitstreamFragment *frag) -{ - AVBufferRef *buf; - int err; - - err = ff_cbs_write_fragment_data(ctx, frag); - if (err < 0) - return err; - - buf = av_buffer_ref(frag->data_ref); - if (!buf) - return AVERROR(ENOMEM); - - av_buffer_unref(&pkt->buf); - - pkt->buf = buf; - pkt->data = frag->data; - pkt->size = frag->data_size; - - return 0; -} - - -void ff_cbs_trace_header(CodedBitstreamContext *ctx, - const char *name) -{ - if (!ctx->trace_enable) - return; - - av_log(ctx->log_ctx, ctx->trace_level, "%s\n", name); -} - -void ff_cbs_trace_syntax_element(CodedBitstreamContext *ctx, int position, - const char *str, const int *subscripts, - const char *bits, int64_t value) -{ - char name[256]; - size_t name_len, bits_len; - int pad, subs, i, j, k, n; - - if (!ctx->trace_enable) - return; - - av_assert0(value >= INT_MIN && value <= UINT32_MAX); - - subs = subscripts ? subscripts[0] : 0; - n = 0; - for (i = j = 0; str[i];) { - if (str[i] == '[') { - if (n < subs) { - ++n; - k = snprintf(name + j, sizeof(name) - j, "[%d", subscripts[n]); - av_assert0(k > 0 && j + k < sizeof(name)); - j += k; - for (++i; str[i] && str[i] != ']'; i++); - av_assert0(str[i] == ']'); - } else { - while (str[i] && str[i] != ']') - name[j++] = str[i++]; - av_assert0(str[i] == ']'); - } - } else { - av_assert0(j + 1 < sizeof(name)); - name[j++] = str[i++]; - } - } - av_assert0(j + 1 < sizeof(name)); - name[j] = 0; - av_assert0(n == subs); - - name_len = strlen(name); - bits_len = strlen(bits); - - if (name_len + bits_len > 60) - pad = bits_len + 2; - else - pad = 61 - name_len; - - av_log(ctx->log_ctx, ctx->trace_level, "%-10d %s%*s = %"PRId64"\n", - position, name, pad, bits, value); -} - -int ff_cbs_read_unsigned(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, uint32_t *write_to, - uint32_t range_min, uint32_t range_max) -{ - uint32_t value; - int position; - - av_assert0(width > 0 && width <= 32); - - if (get_bits_left(gbc) < width) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid value at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - - if (ctx->trace_enable) - position = get_bits_count(gbc); - - value = get_bits_long(gbc, width); - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (i = 0; i < width; i++) - bits[i] = value >> (width - i - 1) & 1 ? '1' : '0'; - bits[i] = 0; - - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - } - - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - *write_to = value; - return 0; -} - -int ff_cbs_write_unsigned(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, uint32_t value, - uint32_t range_min, uint32_t range_max) -{ - av_assert0(width > 0 && width <= 32); - - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - if (put_bits_left(pbc) < width) - return AVERROR(ENOSPC); - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (i = 0; i < width; i++) - bits[i] = value >> (width - i - 1) & 1 ? '1' : '0'; - bits[i] = 0; - - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } - - if (width < 32) - put_bits(pbc, width, value); - else - put_bits32(pbc, value); - - return 0; -} - -int ff_cbs_read_signed(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, int32_t *write_to, - int32_t range_min, int32_t range_max) -{ - int32_t value; - int position; - - av_assert0(width > 0 && width <= 32); - - if (get_bits_left(gbc) < width) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid value at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - - if (ctx->trace_enable) - position = get_bits_count(gbc); - - value = get_sbits_long(gbc, width); - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (i = 0; i < width; i++) - bits[i] = value & (1U << (width - i - 1)) ? '1' : '0'; - bits[i] = 0; - - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - } - - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRId32", but must be in [%"PRId32",%"PRId32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - *write_to = value; - return 0; -} - -int ff_cbs_write_signed(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, int32_t value, - int32_t range_min, int32_t range_max) -{ - av_assert0(width > 0 && width <= 32); - - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRId32", but must be in [%"PRId32",%"PRId32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - if (put_bits_left(pbc) < width) - return AVERROR(ENOSPC); - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (i = 0; i < width; i++) - bits[i] = value & (1U << (width - i - 1)) ? '1' : '0'; - bits[i] = 0; - - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } - - if (width < 32) - put_sbits(pbc, width, value); - else - put_bits32(pbc, value); - - return 0; -} - - -int ff_cbs_alloc_unit_content(CodedBitstreamUnit *unit, - size_t size, - void (*free)(void *opaque, uint8_t *data)) -{ - av_assert0(!unit->content && !unit->content_ref); - - unit->content = av_mallocz(size); - if (!unit->content) - return AVERROR(ENOMEM); - - unit->content_ref = av_buffer_create(unit->content, size, - free, NULL, 0); - if (!unit->content_ref) { - av_freep(&unit->content); - return AVERROR(ENOMEM); - } - - return 0; -} - -static int cbs_insert_unit(CodedBitstreamFragment *frag, - int position) -{ - CodedBitstreamUnit *units; - - if (frag->nb_units < frag->nb_units_allocated) { - units = frag->units; - - if (position < frag->nb_units) - memmove(units + position + 1, units + position, - (frag->nb_units - position) * sizeof(*units)); - } else { - units = av_malloc_array(frag->nb_units*2 + 1, sizeof(*units)); - if (!units) - return AVERROR(ENOMEM); - - frag->nb_units_allocated = 2*frag->nb_units_allocated + 1; - - if (position > 0) - memcpy(units, frag->units, position * sizeof(*units)); - - if (position < frag->nb_units) - memcpy(units + position + 1, frag->units + position, - (frag->nb_units - position) * sizeof(*units)); - } - - memset(units + position, 0, sizeof(*units)); - - if (units != frag->units) { - av_free(frag->units); - frag->units = units; - } - - ++frag->nb_units; - - return 0; -} - -int ff_cbs_insert_unit_content(CodedBitstreamFragment *frag, - int position, - CodedBitstreamUnitType type, - void *content, - AVBufferRef *content_buf) -{ - CodedBitstreamUnit *unit; - AVBufferRef *content_ref; - int err; - - if (position == -1) - position = frag->nb_units; - av_assert0(position >= 0 && position <= frag->nb_units); - - if (content_buf) { - content_ref = av_buffer_ref(content_buf); - if (!content_ref) - return AVERROR(ENOMEM); - } else { - content_ref = NULL; - } - - err = cbs_insert_unit(frag, position); - if (err < 0) { - av_buffer_unref(&content_ref); - return err; - } - - unit = &frag->units[position]; - unit->type = type; - unit->content = content; - unit->content_ref = content_ref; - - return 0; -} - -static int cbs_insert_unit_data(CodedBitstreamFragment *frag, - CodedBitstreamUnitType type, - uint8_t *data, size_t data_size, - AVBufferRef *data_buf, - int position) -{ - CodedBitstreamUnit *unit; - AVBufferRef *data_ref; - int err; - - av_assert0(position >= 0 && position <= frag->nb_units); - - if (data_buf) - data_ref = av_buffer_ref(data_buf); - else - data_ref = av_buffer_create(data, data_size, NULL, NULL, 0); - if (!data_ref) { - if (!data_buf) - av_free(data); - return AVERROR(ENOMEM); - } - - err = cbs_insert_unit(frag, position); - if (err < 0) { - av_buffer_unref(&data_ref); - return err; - } - - unit = &frag->units[position]; - unit->type = type; - unit->data = data; - unit->data_size = data_size; - unit->data_ref = data_ref; - - return 0; -} - -int ff_cbs_append_unit_data(CodedBitstreamFragment *frag, - CodedBitstreamUnitType type, - uint8_t *data, size_t data_size, - AVBufferRef *data_buf) -{ - return cbs_insert_unit_data(frag, type, - data, data_size, data_buf, - frag->nb_units); -} - -void ff_cbs_delete_unit(CodedBitstreamFragment *frag, - int position) -{ - av_assert0(0 <= position && position < frag->nb_units - && "Unit to be deleted not in fragment."); - - cbs_unit_uninit(&frag->units[position]); - - --frag->nb_units; - - if (frag->nb_units > 0) - memmove(frag->units + position, - frag->units + position + 1, - (frag->nb_units - position) * sizeof(*frag->units)); -} - -static void cbs_default_free_unit_content(void *opaque, uint8_t *data) -{ - const CodedBitstreamUnitTypeDescriptor *desc = opaque; - if (desc->content_type == CBS_CONTENT_TYPE_INTERNAL_REFS) { - int i; - for (i = 0; i < desc->nb_ref_offsets; i++) { - void **ptr = (void**)(data + desc->ref_offsets[i]); - av_buffer_unref((AVBufferRef**)(ptr + 1)); - } - } - av_free(data); -} - -static const CodedBitstreamUnitTypeDescriptor - *cbs_find_unit_type_desc(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - const CodedBitstreamUnitTypeDescriptor *desc; - int i, j; - - if (!ctx->codec->unit_types) - return NULL; - - for (i = 0;; i++) { - desc = &ctx->codec->unit_types[i]; - if (desc->nb_unit_types == 0) - break; - if (desc->nb_unit_types == CBS_UNIT_TYPE_RANGE) { - if (unit->type >= desc->unit_type_range_start && - unit->type <= desc->unit_type_range_end) - return desc; - } else { - for (j = 0; j < desc->nb_unit_types; j++) { - if (desc->unit_types[j] == unit->type) - return desc; - } - } - } - return NULL; -} - -int ff_cbs_alloc_unit_content2(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - const CodedBitstreamUnitTypeDescriptor *desc; - - av_assert0(!unit->content && !unit->content_ref); - - desc = cbs_find_unit_type_desc(ctx, unit); - if (!desc) - return AVERROR(ENOSYS); - - unit->content = av_mallocz(desc->content_size); - if (!unit->content) - return AVERROR(ENOMEM); - - unit->content_ref = - av_buffer_create(unit->content, desc->content_size, - desc->content_free ? desc->content_free - : cbs_default_free_unit_content, - (void*)desc, 0); - if (!unit->content_ref) { - av_freep(&unit->content); - return AVERROR(ENOMEM); - } - - return 0; -} - -static int cbs_clone_unit_content(AVBufferRef **clone_ref, - CodedBitstreamUnit *unit, - const CodedBitstreamUnitTypeDescriptor *desc) -{ - uint8_t *src, *copy; - uint8_t **src_ptr, **copy_ptr; - AVBufferRef **src_buf, **copy_buf; - int err, i; - - av_assert0(unit->content); - src = unit->content; - - copy = av_memdup(src, desc->content_size); - if (!copy) - return AVERROR(ENOMEM); - - for (i = 0; i < desc->nb_ref_offsets; i++) { - src_ptr = (uint8_t**)(src + desc->ref_offsets[i]); - src_buf = (AVBufferRef**)(src_ptr + 1); - copy_ptr = (uint8_t**)(copy + desc->ref_offsets[i]); - copy_buf = (AVBufferRef**)(copy_ptr + 1); - - if (!*src_ptr) { - av_assert0(!*src_buf); - continue; - } - if (!*src_buf) { - // We can't handle a non-refcounted pointer here - we don't - // have enough information to handle whatever structure lies - // at the other end of it. - err = AVERROR(EINVAL); - goto fail; - } - - // src_ptr is required to point somewhere inside src_buf. If it - // doesn't, there is a bug somewhere. - av_assert0(*src_ptr >= (*src_buf)->data && - *src_ptr < (*src_buf)->data + (*src_buf)->size); - - *copy_buf = av_buffer_ref(*src_buf); - if (!*copy_buf) { - err = AVERROR(ENOMEM); - goto fail; - } - *copy_ptr = (*copy_buf)->data + (*src_ptr - (*src_buf)->data); - } - - *clone_ref = av_buffer_create(copy, desc->content_size, - desc->content_free ? desc->content_free : - cbs_default_free_unit_content, - (void*)desc, 0); - if (!*clone_ref) { - err = AVERROR(ENOMEM); - goto fail; - } - - return 0; - -fail: - for (--i; i >= 0; i--) - av_buffer_unref((AVBufferRef**)(copy + desc->ref_offsets[i])); - av_freep(©); - *clone_ref = NULL; - return err; -} - -int ff_cbs_make_unit_refcounted(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - const CodedBitstreamUnitTypeDescriptor *desc; - AVBufferRef *ref; - int err; - - av_assert0(unit->content); - if (unit->content_ref) { - // Already refcounted, nothing to do. - return 0; - } - - desc = cbs_find_unit_type_desc(ctx, unit); - if (!desc) - return AVERROR(ENOSYS); - - switch (desc->content_type) { - case CBS_CONTENT_TYPE_POD: - ref = av_buffer_alloc(desc->content_size); - if (!ref) - return AVERROR(ENOMEM); - memcpy(ref->data, unit->content, desc->content_size); - err = 0; - break; - - case CBS_CONTENT_TYPE_INTERNAL_REFS: - err = cbs_clone_unit_content(&ref, unit, desc); - break; - - case CBS_CONTENT_TYPE_COMPLEX: - if (!desc->content_clone) - return AVERROR_PATCHWELCOME; - err = desc->content_clone(&ref, unit); - break; - - default: - av_assert0(0 && "Invalid content type."); - } - - if (err < 0) - return err; - - unit->content_ref = ref; - unit->content = ref->data; - return 0; -} - -int ff_cbs_make_unit_writable(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - const CodedBitstreamUnitTypeDescriptor *desc; - AVBufferRef *ref; - int err; - - // This can only be applied to refcounted units. - err = ff_cbs_make_unit_refcounted(ctx, unit); - if (err < 0) - return err; - av_assert0(unit->content && unit->content_ref); - - if (av_buffer_is_writable(unit->content_ref)) - return 0; - - desc = cbs_find_unit_type_desc(ctx, unit); - if (!desc) - return AVERROR(ENOSYS); - - switch (desc->content_type) { - case CBS_CONTENT_TYPE_POD: - err = av_buffer_make_writable(&unit->content_ref); - break; - - case CBS_CONTENT_TYPE_INTERNAL_REFS: - err = cbs_clone_unit_content(&ref, unit, desc); - break; - - case CBS_CONTENT_TYPE_COMPLEX: - if (!desc->content_clone) - return AVERROR_PATCHWELCOME; - err = desc->content_clone(&ref, unit); - break; - - default: - av_assert0(0 && "Invalid content type."); - } - if (err < 0) - return err; - - if (desc->content_type != CBS_CONTENT_TYPE_POD) { - av_buffer_unref(&unit->content_ref); - unit->content_ref = ref; - } - unit->content = unit->content_ref->data; - return 0; -} diff --git a/third-party/cbs/cbs_av1.c b/third-party/cbs/cbs_av1.c deleted file mode 100644 index 0df0f37b3b1..00000000000 --- a/third-party/cbs/cbs_av1.c +++ /dev/null @@ -1,1337 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#include "libavutil/avassert.h" -#include "libavutil/opt.h" -#include "libavutil/pixfmt.h" - -// [sunshine] Changed include path -#include "avcodec.h" -#include "cbs/cbs.h" -#include "cbs_internal.h" -#include "cbs/cbs_av1.h" - - -static int cbs_av1_read_uvlc(CodedBitstreamContext *ctx, GetBitContext *gbc, - const char *name, uint32_t *write_to, - uint32_t range_min, uint32_t range_max) -{ - uint32_t zeroes, bits_value, value; - int position; - - if (ctx->trace_enable) - position = get_bits_count(gbc); - - zeroes = 0; - while (1) { - if (get_bits_left(gbc) < 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid uvlc code at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - - if (get_bits1(gbc)) - break; - ++zeroes; - } - - if (zeroes >= 32) { - value = MAX_UINT_BITS(32); - } else { - if (get_bits_left(gbc) < zeroes) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid uvlc code at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - - bits_value = get_bits_long(gbc, zeroes); - value = bits_value + (UINT32_C(1) << zeroes) - 1; - } - - if (ctx->trace_enable) { - char bits[65]; - int i, j, k; - - if (zeroes >= 32) { - while (zeroes > 32) { - k = FFMIN(zeroes - 32, 32); - for (i = 0; i < k; i++) - bits[i] = '0'; - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, position, name, - NULL, bits, 0); - zeroes -= k; - position += k; - } - } - - for (i = 0; i < zeroes; i++) - bits[i] = '0'; - bits[i++] = '1'; - - if (zeroes < 32) { - for (j = 0; j < zeroes; j++) - bits[i++] = (bits_value >> (zeroes - j - 1) & 1) ? '1' : '0'; - } - - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, position, name, - NULL, bits, value); - } - - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - *write_to = value; - return 0; -} - -static int cbs_av1_write_uvlc(CodedBitstreamContext *ctx, PutBitContext *pbc, - const char *name, uint32_t value, - uint32_t range_min, uint32_t range_max) -{ - uint32_t v; - int position, zeroes; - - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - if (ctx->trace_enable) - position = put_bits_count(pbc); - - zeroes = av_log2(value + 1); - v = value - (1U << zeroes) + 1; - put_bits(pbc, zeroes, 0); - put_bits(pbc, 1, 1); - put_bits(pbc, zeroes, v); - - if (ctx->trace_enable) { - char bits[65]; - int i, j; - i = 0; - for (j = 0; j < zeroes; j++) - bits[i++] = '0'; - bits[i++] = '1'; - for (j = 0; j < zeroes; j++) - bits[i++] = (v >> (zeroes - j - 1) & 1) ? '1' : '0'; - bits[i++] = 0; - ff_cbs_trace_syntax_element(ctx, position, name, NULL, - bits, value); - } - - return 0; -} - -static int cbs_av1_read_leb128(CodedBitstreamContext *ctx, GetBitContext *gbc, - const char *name, uint64_t *write_to) -{ - uint64_t value; - int position, err, i; - - if (ctx->trace_enable) - position = get_bits_count(gbc); - - value = 0; - for (i = 0; i < 8; i++) { - int subscript[2] = { 1, i }; - uint32_t byte; - err = ff_cbs_read_unsigned(ctx, gbc, 8, "leb128_byte[i]", subscript, - &byte, 0x00, 0xff); - if (err < 0) - return err; - - value |= (uint64_t)(byte & 0x7f) << (i * 7); - if (!(byte & 0x80)) - break; - } - - if (value > UINT32_MAX) - return AVERROR_INVALIDDATA; - - if (ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, name, NULL, "", value); - - *write_to = value; - return 0; -} - -static int cbs_av1_write_leb128(CodedBitstreamContext *ctx, PutBitContext *pbc, - const char *name, uint64_t value) -{ - int position, err, len, i; - uint8_t byte; - - len = (av_log2(value) + 7) / 7; - - if (ctx->trace_enable) - position = put_bits_count(pbc); - - for (i = 0; i < len; i++) { - int subscript[2] = { 1, i }; - - byte = value >> (7 * i) & 0x7f; - if (i < len - 1) - byte |= 0x80; - - err = ff_cbs_write_unsigned(ctx, pbc, 8, "leb128_byte[i]", subscript, - byte, 0x00, 0xff); - if (err < 0) - return err; - } - - if (ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, name, NULL, "", value); - - return 0; -} - -static int cbs_av1_read_ns(CodedBitstreamContext *ctx, GetBitContext *gbc, - uint32_t n, const char *name, - const int *subscripts, uint32_t *write_to) -{ - uint32_t m, v, extra_bit, value; - int position, w; - - av_assert0(n > 0); - - if (ctx->trace_enable) - position = get_bits_count(gbc); - - w = av_log2(n) + 1; - m = (1 << w) - n; - - if (get_bits_left(gbc) < w) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid non-symmetric value at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - - if (w - 1 > 0) - v = get_bits(gbc, w - 1); - else - v = 0; - - if (v < m) { - value = v; - } else { - extra_bit = get_bits1(gbc); - value = (v << 1) - m + extra_bit; - } - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (i = 0; i < w - 1; i++) - bits[i] = (v >> i & 1) ? '1' : '0'; - if (v >= m) - bits[i++] = extra_bit ? '1' : '0'; - bits[i] = 0; - - ff_cbs_trace_syntax_element(ctx, position, - name, subscripts, bits, value); - } - - *write_to = value; - return 0; -} - -static int cbs_av1_write_ns(CodedBitstreamContext *ctx, PutBitContext *pbc, - uint32_t n, const char *name, - const int *subscripts, uint32_t value) -{ - uint32_t w, m, v, extra_bit; - int position; - - if (value > n) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRIu32", but must be in [0,%"PRIu32"].\n", - name, value, n); - return AVERROR_INVALIDDATA; - } - - if (ctx->trace_enable) - position = put_bits_count(pbc); - - w = av_log2(n) + 1; - m = (1 << w) - n; - - if (put_bits_left(pbc) < w) - return AVERROR(ENOSPC); - - if (value < m) { - v = value; - put_bits(pbc, w - 1, v); - } else { - v = m + ((value - m) >> 1); - extra_bit = (value - m) & 1; - put_bits(pbc, w - 1, v); - put_bits(pbc, 1, extra_bit); - } - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (i = 0; i < w - 1; i++) - bits[i] = (v >> i & 1) ? '1' : '0'; - if (value >= m) - bits[i++] = extra_bit ? '1' : '0'; - bits[i] = 0; - - ff_cbs_trace_syntax_element(ctx, position, - name, subscripts, bits, value); - } - - return 0; -} - -static int cbs_av1_read_increment(CodedBitstreamContext *ctx, GetBitContext *gbc, - uint32_t range_min, uint32_t range_max, - const char *name, uint32_t *write_to) -{ - uint32_t value; - int position, i; - char bits[33]; - - av_assert0(range_min <= range_max && range_max - range_min < sizeof(bits) - 1); - if (ctx->trace_enable) - position = get_bits_count(gbc); - - for (i = 0, value = range_min; value < range_max;) { - if (get_bits_left(gbc) < 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid increment value at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - if (get_bits1(gbc)) { - bits[i++] = '1'; - ++value; - } else { - bits[i++] = '0'; - break; - } - } - - if (ctx->trace_enable) { - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, position, - name, NULL, bits, value); - } - - *write_to = value; - return 0; -} - -static int cbs_av1_write_increment(CodedBitstreamContext *ctx, PutBitContext *pbc, - uint32_t range_min, uint32_t range_max, - const char *name, uint32_t value) -{ - int len; - - av_assert0(range_min <= range_max && range_max - range_min < 32); - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - if (value == range_max) - len = range_max - range_min; - else - len = value - range_min + 1; - if (put_bits_left(pbc) < len) - return AVERROR(ENOSPC); - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (i = 0; i < len; i++) { - if (range_min + i == value) - bits[i] = '0'; - else - bits[i] = '1'; - } - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, NULL, bits, value); - } - - if (len > 0) - put_bits(pbc, len, (1 << len) - 1 - (value != range_max)); - - return 0; -} - -static int cbs_av1_read_subexp(CodedBitstreamContext *ctx, GetBitContext *gbc, - uint32_t range_max, const char *name, - const int *subscripts, uint32_t *write_to) -{ - uint32_t value; - int position, err; - uint32_t max_len, len, range_offset, range_bits; - - if (ctx->trace_enable) - position = get_bits_count(gbc); - - av_assert0(range_max > 0); - max_len = av_log2(range_max - 1) - 3; - - err = cbs_av1_read_increment(ctx, gbc, 0, max_len, - "subexp_more_bits", &len); - if (err < 0) - return err; - - if (len) { - range_bits = 2 + len; - range_offset = 1 << range_bits; - } else { - range_bits = 3; - range_offset = 0; - } - - if (len < max_len) { - err = ff_cbs_read_unsigned(ctx, gbc, range_bits, - "subexp_bits", NULL, &value, - 0, MAX_UINT_BITS(range_bits)); - if (err < 0) - return err; - - } else { - err = cbs_av1_read_ns(ctx, gbc, range_max - range_offset, - "subexp_final_bits", NULL, &value); - if (err < 0) - return err; - } - value += range_offset; - - if (ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, - name, subscripts, "", value); - - *write_to = value; - return err; -} - -static int cbs_av1_write_subexp(CodedBitstreamContext *ctx, PutBitContext *pbc, - uint32_t range_max, const char *name, - const int *subscripts, uint32_t value) -{ - int position, err; - uint32_t max_len, len, range_offset, range_bits; - - if (value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRIu32", but must be in [0,%"PRIu32"].\n", - name, value, range_max); - return AVERROR_INVALIDDATA; - } - - if (ctx->trace_enable) - position = put_bits_count(pbc); - - av_assert0(range_max > 0); - max_len = av_log2(range_max - 1) - 3; - - if (value < 8) { - range_bits = 3; - range_offset = 0; - len = 0; - } else { - range_bits = av_log2(value); - len = range_bits - 2; - if (len > max_len) { - // The top bin is combined with the one below it. - av_assert0(len == max_len + 1); - --range_bits; - len = max_len; - } - range_offset = 1 << range_bits; - } - - err = cbs_av1_write_increment(ctx, pbc, 0, max_len, - "subexp_more_bits", len); - if (err < 0) - return err; - - if (len < max_len) { - err = ff_cbs_write_unsigned(ctx, pbc, range_bits, - "subexp_bits", NULL, - value - range_offset, - 0, MAX_UINT_BITS(range_bits)); - if (err < 0) - return err; - - } else { - err = cbs_av1_write_ns(ctx, pbc, range_max - range_offset, - "subexp_final_bits", NULL, - value - range_offset); - if (err < 0) - return err; - } - - if (ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, - name, subscripts, "", value); - - return err; -} - - -static int cbs_av1_tile_log2(int blksize, int target) -{ - int k; - for (k = 0; (blksize << k) < target; k++); - return k; -} - -static int cbs_av1_get_relative_dist(const AV1RawSequenceHeader *seq, - unsigned int a, unsigned int b) -{ - unsigned int diff, m; - if (!seq->enable_order_hint) - return 0; - diff = a - b; - m = 1 << seq->order_hint_bits_minus_1; - diff = (diff & (m - 1)) - (diff & m); - return diff; -} - -static size_t cbs_av1_get_payload_bytes_left(GetBitContext *gbc) -{ - GetBitContext tmp = *gbc; - size_t size = 0; - for (int i = 0; get_bits_left(&tmp) >= 8; i++) { - if (get_bits(&tmp, 8)) - size = i; - } - return size; -} - - -#define HEADER(name) do { \ - ff_cbs_trace_header(ctx, name); \ - } while (0) - -#define CHECK(call) do { \ - err = (call); \ - if (err < 0) \ - return err; \ - } while (0) - -#define FUNC_NAME(rw, codec, name) cbs_ ## codec ## _ ## rw ## _ ## name -#define FUNC_AV1(rw, name) FUNC_NAME(rw, av1, name) -#define FUNC(name) FUNC_AV1(READWRITE, name) - -#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]){ subs, __VA_ARGS__ }) : NULL) - -#define fb(width, name) \ - xf(width, name, current->name, 0, MAX_UINT_BITS(width), 0, ) -#define fc(width, name, range_min, range_max) \ - xf(width, name, current->name, range_min, range_max, 0, ) -#define flag(name) fb(1, name) -#define su(width, name) \ - xsu(width, name, current->name, 0, ) - -#define fbs(width, name, subs, ...) \ - xf(width, name, current->name, 0, MAX_UINT_BITS(width), subs, __VA_ARGS__) -#define fcs(width, name, range_min, range_max, subs, ...) \ - xf(width, name, current->name, range_min, range_max, subs, __VA_ARGS__) -#define flags(name, subs, ...) \ - xf(1, name, current->name, 0, 1, subs, __VA_ARGS__) -#define sus(width, name, subs, ...) \ - xsu(width, name, current->name, subs, __VA_ARGS__) - -#define fixed(width, name, value) do { \ - av_unused uint32_t fixed_value = value; \ - xf(width, name, fixed_value, value, value, 0, ); \ - } while (0) - - -#define READ -#define READWRITE read -#define RWContext GetBitContext - -#define xf(width, name, var, range_min, range_max, subs, ...) do { \ - uint32_t value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while (0) - -#define xsu(width, name, var, subs, ...) do { \ - int32_t value; \ - CHECK(ff_cbs_read_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), &value, \ - MIN_INT_BITS(width), \ - MAX_INT_BITS(width))); \ - var = value; \ - } while (0) - -#define uvlc(name, range_min, range_max) do { \ - uint32_t value; \ - CHECK(cbs_av1_read_uvlc(ctx, rw, #name, \ - &value, range_min, range_max)); \ - current->name = value; \ - } while (0) - -#define ns(max_value, name, subs, ...) do { \ - uint32_t value; \ - CHECK(cbs_av1_read_ns(ctx, rw, max_value, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), &value)); \ - current->name = value; \ - } while (0) - -#define increment(name, min, max) do { \ - uint32_t value; \ - CHECK(cbs_av1_read_increment(ctx, rw, min, max, #name, &value)); \ - current->name = value; \ - } while (0) - -#define subexp(name, max, subs, ...) do { \ - uint32_t value; \ - CHECK(cbs_av1_read_subexp(ctx, rw, max, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), &value)); \ - current->name = value; \ - } while (0) - -#define delta_q(name) do { \ - uint8_t delta_coded; \ - int8_t delta_q; \ - xf(1, name.delta_coded, delta_coded, 0, 1, 0, ); \ - if (delta_coded) \ - xsu(1 + 6, name.delta_q, delta_q, 0, ); \ - else \ - delta_q = 0; \ - current->name = delta_q; \ - } while (0) - -#define leb128(name) do { \ - uint64_t value; \ - CHECK(cbs_av1_read_leb128(ctx, rw, #name, &value)); \ - current->name = value; \ - } while (0) - -#define infer(name, value) do { \ - current->name = value; \ - } while (0) - -#define byte_alignment(rw) (get_bits_count(rw) % 8) - -#include "cbs_av1_syntax_template.c" - -#undef READ -#undef READWRITE -#undef RWContext -#undef xf -#undef xsu -#undef uvlc -#undef ns -#undef increment -#undef subexp -#undef delta_q -#undef leb128 -#undef infer -#undef byte_alignment - - -#define WRITE -#define READWRITE write -#define RWContext PutBitContext - -#define xf(width, name, var, range_min, range_max, subs, ...) do { \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - var, range_min, range_max)); \ - } while (0) - -#define xsu(width, name, var, subs, ...) do { \ - CHECK(ff_cbs_write_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), var, \ - MIN_INT_BITS(width), \ - MAX_INT_BITS(width))); \ - } while (0) - -#define uvlc(name, range_min, range_max) do { \ - CHECK(cbs_av1_write_uvlc(ctx, rw, #name, current->name, \ - range_min, range_max)); \ - } while (0) - -#define ns(max_value, name, subs, ...) do { \ - CHECK(cbs_av1_write_ns(ctx, rw, max_value, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - current->name)); \ - } while (0) - -#define increment(name, min, max) do { \ - CHECK(cbs_av1_write_increment(ctx, rw, min, max, #name, \ - current->name)); \ - } while (0) - -#define subexp(name, max, subs, ...) do { \ - CHECK(cbs_av1_write_subexp(ctx, rw, max, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - current->name)); \ - } while (0) - -#define delta_q(name) do { \ - xf(1, name.delta_coded, current->name != 0, 0, 1, 0, ); \ - if (current->name) \ - xsu(1 + 6, name.delta_q, current->name, 0, ); \ - } while (0) - -#define leb128(name) do { \ - CHECK(cbs_av1_write_leb128(ctx, rw, #name, current->name)); \ - } while (0) - -#define infer(name, value) do { \ - if (current->name != (value)) { \ - av_log(ctx->log_ctx, AV_LOG_ERROR, \ - "%s does not match inferred value: " \ - "%"PRId64", but should be %"PRId64".\n", \ - #name, (int64_t)current->name, (int64_t)(value)); \ - return AVERROR_INVALIDDATA; \ - } \ - } while (0) - -#define byte_alignment(rw) (put_bits_count(rw) % 8) - -#include "cbs_av1_syntax_template.c" - -#undef WRITE -#undef READWRITE -#undef RWContext -#undef xf -#undef xsu -#undef uvlc -#undef ns -#undef increment -#undef subexp -#undef delta_q -#undef leb128 -#undef infer -#undef byte_alignment - - -static int cbs_av1_split_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header) -{ - GetBitContext gbc; - uint8_t *data; - size_t size; - uint64_t obu_length; - int pos, err, trace; - - // Don't include this parsing in trace output. - trace = ctx->trace_enable; - ctx->trace_enable = 0; - - data = frag->data; - size = frag->data_size; - - if (INT_MAX / 8 < size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid fragment: " - "too large (%zu bytes).\n", size); - err = AVERROR_INVALIDDATA; - goto fail; - } - - if (header && size && data[0] & 0x80) { - // first bit is nonzero, the extradata does not consist purely of - // OBUs. Expect MP4/Matroska AV1CodecConfigurationRecord - int config_record_version = data[0] & 0x7f; - - if (config_record_version != 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "Unknown version %d of AV1CodecConfigurationRecord " - "found!\n", - config_record_version); - err = AVERROR_INVALIDDATA; - goto fail; - } - - if (size <= 4) { - if (size < 4) { - av_log(ctx->log_ctx, AV_LOG_WARNING, - "Undersized AV1CodecConfigurationRecord v%d found!\n", - config_record_version); - err = AVERROR_INVALIDDATA; - goto fail; - } - - goto success; - } - - // In AV1CodecConfigurationRecord v1, actual OBUs start after - // four bytes. Thus set the offset as required for properly - // parsing them. - data += 4; - size -= 4; - } - - while (size > 0) { - AV1RawOBUHeader header; - uint64_t obu_size; - - init_get_bits(&gbc, data, 8 * size); - - err = cbs_av1_read_obu_header(ctx, &gbc, &header); - if (err < 0) - goto fail; - - if (header.obu_has_size_field) { - if (get_bits_left(&gbc) < 8) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid OBU: fragment " - "too short (%zu bytes).\n", size); - err = AVERROR_INVALIDDATA; - goto fail; - } - err = cbs_av1_read_leb128(ctx, &gbc, "obu_size", &obu_size); - if (err < 0) - goto fail; - } else - obu_size = size - 1 - header.obu_extension_flag; - - pos = get_bits_count(&gbc); - av_assert0(pos % 8 == 0 && pos / 8 <= size); - - obu_length = pos / 8 + obu_size; - - if (size < obu_length) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid OBU length: " - "%"PRIu64", but only %zu bytes remaining in fragment.\n", - obu_length, size); - err = AVERROR_INVALIDDATA; - goto fail; - } - - err = ff_cbs_append_unit_data(frag, header.obu_type, - data, obu_length, frag->data_ref); - if (err < 0) - goto fail; - - data += obu_length; - size -= obu_length; - } - -success: - err = 0; -fail: - ctx->trace_enable = trace; - return err; -} - -static int cbs_av1_ref_tile_data(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - GetBitContext *gbc, - AV1RawTileData *td) -{ - int pos; - - pos = get_bits_count(gbc); - if (pos >= 8 * unit->data_size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Bitstream ended before " - "any data in tile group (%d bits read).\n", pos); - return AVERROR_INVALIDDATA; - } - // Must be byte-aligned at this point. - av_assert0(pos % 8 == 0); - - td->data_ref = av_buffer_ref(unit->data_ref); - if (!td->data_ref) - return AVERROR(ENOMEM); - - td->data = unit->data + pos / 8; - td->data_size = unit->data_size - pos / 8; - - return 0; -} - -static int cbs_av1_read_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - AV1RawOBU *obu; - GetBitContext gbc; - int err, start_pos, end_pos; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if (err < 0) - return err; - obu = unit->content; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if (err < 0) - return err; - - err = cbs_av1_read_obu_header(ctx, &gbc, &obu->header); - if (err < 0) - return err; - av_assert0(obu->header.obu_type == unit->type); - - if (obu->header.obu_has_size_field) { - uint64_t obu_size; - err = cbs_av1_read_leb128(ctx, &gbc, "obu_size", &obu_size); - if (err < 0) - return err; - obu->obu_size = obu_size; - } else { - if (unit->data_size < 1 + obu->header.obu_extension_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid OBU length: " - "unit too short (%zu).\n", unit->data_size); - return AVERROR_INVALIDDATA; - } - obu->obu_size = unit->data_size - 1 - obu->header.obu_extension_flag; - } - - start_pos = get_bits_count(&gbc); - - if (obu->header.obu_extension_flag) { - if (obu->header.obu_type != AV1_OBU_SEQUENCE_HEADER && - obu->header.obu_type != AV1_OBU_TEMPORAL_DELIMITER && - priv->operating_point_idc) { - int in_temporal_layer = - (priv->operating_point_idc >> priv->temporal_id ) & 1; - int in_spatial_layer = - (priv->operating_point_idc >> (priv->spatial_id + 8)) & 1; - if (!in_temporal_layer || !in_spatial_layer) { - return AVERROR(EAGAIN); // drop_obu() - } - } - } - - switch (obu->header.obu_type) { - case AV1_OBU_SEQUENCE_HEADER: - { - err = cbs_av1_read_sequence_header_obu(ctx, &gbc, - &obu->obu.sequence_header); - if (err < 0) - return err; - - if (priv->operating_point >= 0) { - AV1RawSequenceHeader *sequence_header = &obu->obu.sequence_header; - - if (priv->operating_point > sequence_header->operating_points_cnt_minus_1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid Operating Point %d requested. " - "Must not be higher than %u.\n", - priv->operating_point, sequence_header->operating_points_cnt_minus_1); - return AVERROR(EINVAL); - } - priv->operating_point_idc = sequence_header->operating_point_idc[priv->operating_point]; - } - - av_buffer_unref(&priv->sequence_header_ref); - priv->sequence_header = NULL; - - priv->sequence_header_ref = av_buffer_ref(unit->content_ref); - if (!priv->sequence_header_ref) - return AVERROR(ENOMEM); - priv->sequence_header = &obu->obu.sequence_header; - } - break; - case AV1_OBU_TEMPORAL_DELIMITER: - { - err = cbs_av1_read_temporal_delimiter_obu(ctx, &gbc); - if (err < 0) - return err; - } - break; - case AV1_OBU_FRAME_HEADER: - case AV1_OBU_REDUNDANT_FRAME_HEADER: - { - err = cbs_av1_read_frame_header_obu(ctx, &gbc, - &obu->obu.frame_header, - obu->header.obu_type == - AV1_OBU_REDUNDANT_FRAME_HEADER, - unit->data_ref); - if (err < 0) - return err; - } - break; - case AV1_OBU_TILE_GROUP: - { - err = cbs_av1_read_tile_group_obu(ctx, &gbc, - &obu->obu.tile_group); - if (err < 0) - return err; - - err = cbs_av1_ref_tile_data(ctx, unit, &gbc, - &obu->obu.tile_group.tile_data); - if (err < 0) - return err; - } - break; - case AV1_OBU_FRAME: - { - err = cbs_av1_read_frame_obu(ctx, &gbc, &obu->obu.frame, - unit->data_ref); - if (err < 0) - return err; - - err = cbs_av1_ref_tile_data(ctx, unit, &gbc, - &obu->obu.frame.tile_group.tile_data); - if (err < 0) - return err; - } - break; - case AV1_OBU_TILE_LIST: - { - err = cbs_av1_read_tile_list_obu(ctx, &gbc, - &obu->obu.tile_list); - if (err < 0) - return err; - - err = cbs_av1_ref_tile_data(ctx, unit, &gbc, - &obu->obu.tile_list.tile_data); - if (err < 0) - return err; - } - break; - case AV1_OBU_METADATA: - { - err = cbs_av1_read_metadata_obu(ctx, &gbc, &obu->obu.metadata); - if (err < 0) - return err; - } - break; - case AV1_OBU_PADDING: - { - err = cbs_av1_read_padding_obu(ctx, &gbc, &obu->obu.padding); - if (err < 0) - return err; - } - break; - default: - return AVERROR(ENOSYS); - } - - end_pos = get_bits_count(&gbc); - av_assert0(end_pos <= unit->data_size * 8); - - if (obu->obu_size > 0 && - obu->header.obu_type != AV1_OBU_TILE_GROUP && - obu->header.obu_type != AV1_OBU_TILE_LIST && - obu->header.obu_type != AV1_OBU_FRAME) { - int nb_bits = obu->obu_size * 8 + start_pos - end_pos; - - if (nb_bits <= 0) - return AVERROR_INVALIDDATA; - - err = cbs_av1_read_trailing_bits(ctx, &gbc, nb_bits); - if (err < 0) - return err; - } - - return 0; -} - -static int cbs_av1_write_obu(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - AV1RawOBU *obu = unit->content; - PutBitContext pbc_tmp; - AV1RawTileData *td; - size_t header_size; - int err, start_pos, end_pos, data_pos; - - // OBUs in the normal bitstream format must contain a size field - // in every OBU (in annex B it is optional, but we don't support - // writing that). - obu->header.obu_has_size_field = 1; - - err = cbs_av1_write_obu_header(ctx, pbc, &obu->header); - if (err < 0) - return err; - - if (obu->header.obu_has_size_field) { - pbc_tmp = *pbc; - // Add space for the size field to fill later. - put_bits32(pbc, 0); - put_bits32(pbc, 0); - } - - td = NULL; - start_pos = put_bits_count(pbc); - - switch (obu->header.obu_type) { - case AV1_OBU_SEQUENCE_HEADER: - { - err = cbs_av1_write_sequence_header_obu(ctx, pbc, - &obu->obu.sequence_header); - if (err < 0) - return err; - - av_buffer_unref(&priv->sequence_header_ref); - priv->sequence_header = NULL; - - err = ff_cbs_make_unit_refcounted(ctx, unit); - if (err < 0) - return err; - - priv->sequence_header_ref = av_buffer_ref(unit->content_ref); - if (!priv->sequence_header_ref) - return AVERROR(ENOMEM); - priv->sequence_header = &obu->obu.sequence_header; - } - break; - case AV1_OBU_TEMPORAL_DELIMITER: - { - err = cbs_av1_write_temporal_delimiter_obu(ctx, pbc); - if (err < 0) - return err; - } - break; - case AV1_OBU_FRAME_HEADER: - case AV1_OBU_REDUNDANT_FRAME_HEADER: - { - err = cbs_av1_write_frame_header_obu(ctx, pbc, - &obu->obu.frame_header, - obu->header.obu_type == - AV1_OBU_REDUNDANT_FRAME_HEADER, - NULL); - if (err < 0) - return err; - } - break; - case AV1_OBU_TILE_GROUP: - { - err = cbs_av1_write_tile_group_obu(ctx, pbc, - &obu->obu.tile_group); - if (err < 0) - return err; - - td = &obu->obu.tile_group.tile_data; - } - break; - case AV1_OBU_FRAME: - { - err = cbs_av1_write_frame_obu(ctx, pbc, &obu->obu.frame, NULL); - if (err < 0) - return err; - - td = &obu->obu.frame.tile_group.tile_data; - } - break; - case AV1_OBU_TILE_LIST: - { - err = cbs_av1_write_tile_list_obu(ctx, pbc, &obu->obu.tile_list); - if (err < 0) - return err; - - td = &obu->obu.tile_list.tile_data; - } - break; - case AV1_OBU_METADATA: - { - err = cbs_av1_write_metadata_obu(ctx, pbc, &obu->obu.metadata); - if (err < 0) - return err; - } - break; - case AV1_OBU_PADDING: - { - err = cbs_av1_write_padding_obu(ctx, pbc, &obu->obu.padding); - if (err < 0) - return err; - } - break; - default: - return AVERROR(ENOSYS); - } - - end_pos = put_bits_count(pbc); - header_size = (end_pos - start_pos + 7) / 8; - if (td) { - obu->obu_size = header_size + td->data_size; - } else if (header_size > 0) { - // Add trailing bits and recalculate. - err = cbs_av1_write_trailing_bits(ctx, pbc, 8 - end_pos % 8); - if (err < 0) - return err; - end_pos = put_bits_count(pbc); - obu->obu_size = header_size = (end_pos - start_pos + 7) / 8; - } else { - // Empty OBU. - obu->obu_size = 0; - } - - end_pos = put_bits_count(pbc); - // Must now be byte-aligned. - av_assert0(end_pos % 8 == 0); - flush_put_bits(pbc); - start_pos /= 8; - end_pos /= 8; - - *pbc = pbc_tmp; - err = cbs_av1_write_leb128(ctx, pbc, "obu_size", obu->obu_size); - if (err < 0) - return err; - - data_pos = put_bits_count(pbc) / 8; - flush_put_bits(pbc); - av_assert0(data_pos <= start_pos); - - if (8 * obu->obu_size > put_bits_left(pbc)) - return AVERROR(ENOSPC); - - if (obu->obu_size > 0) { - memmove(pbc->buf + data_pos, - pbc->buf + start_pos, header_size); - skip_put_bytes(pbc, header_size); - - if (td) { - memcpy(pbc->buf + data_pos + header_size, - td->data, td->data_size); - skip_put_bytes(pbc, td->data_size); - } - } - - // OBU data must be byte-aligned. - av_assert0(put_bits_count(pbc) % 8 == 0); - - return 0; -} - -static int cbs_av1_assemble_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) -{ - size_t size, pos; - int i; - - size = 0; - for (i = 0; i < frag->nb_units; i++) - size += frag->units[i].data_size; - - frag->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if (!frag->data_ref) - return AVERROR(ENOMEM); - frag->data = frag->data_ref->data; - memset(frag->data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - - pos = 0; - for (i = 0; i < frag->nb_units; i++) { - memcpy(frag->data + pos, frag->units[i].data, - frag->units[i].data_size); - pos += frag->units[i].data_size; - } - av_assert0(pos == size); - frag->data_size = size; - - return 0; -} - -static void cbs_av1_flush(CodedBitstreamContext *ctx) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - - av_buffer_unref(&priv->frame_header_ref); - priv->sequence_header = NULL; - priv->frame_header = NULL; - - memset(priv->ref, 0, sizeof(priv->ref)); - priv->operating_point_idc = 0; - priv->seen_frame_header = 0; - priv->tile_num = 0; -} - -static void cbs_av1_close(CodedBitstreamContext *ctx) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - - av_buffer_unref(&priv->sequence_header_ref); - av_buffer_unref(&priv->frame_header_ref); -} - -static void cbs_av1_free_metadata(void *unit, uint8_t *content) -{ - AV1RawOBU *obu = (AV1RawOBU*)content; - AV1RawMetadata *md; - - av_assert0(obu->header.obu_type == AV1_OBU_METADATA); - md = &obu->obu.metadata; - - switch (md->metadata_type) { - case AV1_METADATA_TYPE_ITUT_T35: - av_buffer_unref(&md->metadata.itut_t35.payload_ref); - break; - } - av_free(content); -} - -static const CodedBitstreamUnitTypeDescriptor cbs_av1_unit_types[] = { - CBS_UNIT_TYPE_POD(AV1_OBU_SEQUENCE_HEADER, AV1RawOBU), - CBS_UNIT_TYPE_POD(AV1_OBU_TEMPORAL_DELIMITER, AV1RawOBU), - CBS_UNIT_TYPE_POD(AV1_OBU_FRAME_HEADER, AV1RawOBU), - CBS_UNIT_TYPE_POD(AV1_OBU_REDUNDANT_FRAME_HEADER, AV1RawOBU), - - CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_TILE_GROUP, AV1RawOBU, - obu.tile_group.tile_data.data), - CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_FRAME, AV1RawOBU, - obu.frame.tile_group.tile_data.data), - CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_TILE_LIST, AV1RawOBU, - obu.tile_list.tile_data.data), - CBS_UNIT_TYPE_INTERNAL_REF(AV1_OBU_PADDING, AV1RawOBU, - obu.padding.payload), - - CBS_UNIT_TYPE_COMPLEX(AV1_OBU_METADATA, AV1RawOBU, - &cbs_av1_free_metadata), - - CBS_UNIT_TYPE_END_OF_LIST -}; - -#define OFFSET(x) offsetof(CodedBitstreamAV1Context, x) -static const AVOption cbs_av1_options[] = { - { "operating_point", "Set operating point to select layers to parse from a scalable bitstream", - OFFSET(operating_point), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AV1_MAX_OPERATING_POINTS - 1, 0 }, - { NULL } -}; - -static const AVClass cbs_av1_class = { - .class_name = "cbs_av1", - .item_name = av_default_item_name, - .option = cbs_av1_options, - .version = LIBAVUTIL_VERSION_INT, -}; - -const CodedBitstreamType ff_cbs_type_av1 = { - .codec_id = AV_CODEC_ID_AV1, - - .priv_class = &cbs_av1_class, - .priv_data_size = sizeof(CodedBitstreamAV1Context), - - .unit_types = cbs_av1_unit_types, - - .split_fragment = &cbs_av1_split_fragment, - .read_unit = &cbs_av1_read_unit, - .write_unit = &cbs_av1_write_obu, - .assemble_fragment = &cbs_av1_assemble_fragment, - - .flush = &cbs_av1_flush, - .close = &cbs_av1_close, -}; diff --git a/third-party/cbs/cbs_av1_syntax_template.c b/third-party/cbs/cbs_av1_syntax_template.c deleted file mode 100644 index d98d3d42dea..00000000000 --- a/third-party/cbs/cbs_av1_syntax_template.c +++ /dev/null @@ -1,2053 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -static int FUNC(obu_header)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawOBUHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - int err; - - HEADER("OBU header"); - - fc(1, obu_forbidden_bit, 0, 0); - - fc(4, obu_type, 0, AV1_OBU_PADDING); - flag(obu_extension_flag); - flag(obu_has_size_field); - - fc(1, obu_reserved_1bit, 0, 0); - - if (current->obu_extension_flag) { - fb(3, temporal_id); - fb(2, spatial_id); - fc(3, extension_header_reserved_3bits, 0, 0); - } else { - infer(temporal_id, 0); - infer(spatial_id, 0); - } - - priv->temporal_id = current->temporal_id; - priv->spatial_id = current->spatial_id; - - return 0; -} - -static int FUNC(trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw, int nb_bits) -{ - int err; - - av_assert0(nb_bits > 0); - - fixed(1, trailing_one_bit, 1); - --nb_bits; - - while (nb_bits > 0) { - fixed(1, trailing_zero_bit, 0); - --nb_bits; - } - - return 0; -} - -static int FUNC(byte_alignment)(CodedBitstreamContext *ctx, RWContext *rw) -{ - int err; - - while (byte_alignment(rw) != 0) - fixed(1, zero_bit, 0); - - return 0; -} - -static int FUNC(color_config)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawColorConfig *current, int seq_profile) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - int err; - - flag(high_bitdepth); - - if (seq_profile == FF_PROFILE_AV1_PROFESSIONAL && - current->high_bitdepth) { - flag(twelve_bit); - priv->bit_depth = current->twelve_bit ? 12 : 10; - } else { - priv->bit_depth = current->high_bitdepth ? 10 : 8; - } - - if (seq_profile == FF_PROFILE_AV1_HIGH) - infer(mono_chrome, 0); - else - flag(mono_chrome); - priv->num_planes = current->mono_chrome ? 1 : 3; - - flag(color_description_present_flag); - if (current->color_description_present_flag) { - fb(8, color_primaries); - fb(8, transfer_characteristics); - fb(8, matrix_coefficients); - } else { - infer(color_primaries, AVCOL_PRI_UNSPECIFIED); - infer(transfer_characteristics, AVCOL_TRC_UNSPECIFIED); - infer(matrix_coefficients, AVCOL_SPC_UNSPECIFIED); - } - - if (current->mono_chrome) { - flag(color_range); - - infer(subsampling_x, 1); - infer(subsampling_y, 1); - infer(chroma_sample_position, AV1_CSP_UNKNOWN); - infer(separate_uv_delta_q, 0); - - } else if (current->color_primaries == AVCOL_PRI_BT709 && - current->transfer_characteristics == AVCOL_TRC_IEC61966_2_1 && - current->matrix_coefficients == AVCOL_SPC_RGB) { - infer(color_range, 1); - infer(subsampling_x, 0); - infer(subsampling_y, 0); - flag(separate_uv_delta_q); - - } else { - flag(color_range); - - if (seq_profile == FF_PROFILE_AV1_MAIN) { - infer(subsampling_x, 1); - infer(subsampling_y, 1); - } else if (seq_profile == FF_PROFILE_AV1_HIGH) { - infer(subsampling_x, 0); - infer(subsampling_y, 0); - } else { - if (priv->bit_depth == 12) { - fb(1, subsampling_x); - if (current->subsampling_x) - fb(1, subsampling_y); - else - infer(subsampling_y, 0); - } else { - infer(subsampling_x, 1); - infer(subsampling_y, 0); - } - } - if (current->subsampling_x && current->subsampling_y) { - fc(2, chroma_sample_position, AV1_CSP_UNKNOWN, - AV1_CSP_COLOCATED); - } - - flag(separate_uv_delta_q); - } - - return 0; -} - -static int FUNC(timing_info)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawTimingInfo *current) -{ - int err; - - fc(32, num_units_in_display_tick, 1, MAX_UINT_BITS(32)); - fc(32, time_scale, 1, MAX_UINT_BITS(32)); - - flag(equal_picture_interval); - if (current->equal_picture_interval) - uvlc(num_ticks_per_picture_minus_1, 0, MAX_UINT_BITS(32) - 1); - - return 0; -} - -static int FUNC(decoder_model_info)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawDecoderModelInfo *current) -{ - int err; - - fb(5, buffer_delay_length_minus_1); - fb(32, num_units_in_decoding_tick); - fb(5, buffer_removal_time_length_minus_1); - fb(5, frame_presentation_time_length_minus_1); - - return 0; -} - -static int FUNC(sequence_header_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawSequenceHeader *current) -{ - int i, err; - - HEADER("Sequence Header"); - - fc(3, seq_profile, FF_PROFILE_AV1_MAIN, - FF_PROFILE_AV1_PROFESSIONAL); - flag(still_picture); - flag(reduced_still_picture_header); - - if (current->reduced_still_picture_header) { - infer(timing_info_present_flag, 0); - infer(decoder_model_info_present_flag, 0); - infer(initial_display_delay_present_flag, 0); - infer(operating_points_cnt_minus_1, 0); - infer(operating_point_idc[0], 0); - - fb(5, seq_level_idx[0]); - - infer(seq_tier[0], 0); - infer(decoder_model_present_for_this_op[0], 0); - infer(initial_display_delay_present_for_this_op[0], 0); - - } else { - flag(timing_info_present_flag); - if (current->timing_info_present_flag) { - CHECK(FUNC(timing_info)(ctx, rw, ¤t->timing_info)); - - flag(decoder_model_info_present_flag); - if (current->decoder_model_info_present_flag) { - CHECK(FUNC(decoder_model_info) - (ctx, rw, ¤t->decoder_model_info)); - } - } else { - infer(decoder_model_info_present_flag, 0); - } - - flag(initial_display_delay_present_flag); - - fb(5, operating_points_cnt_minus_1); - for (i = 0; i <= current->operating_points_cnt_minus_1; i++) { - fbs(12, operating_point_idc[i], 1, i); - fbs(5, seq_level_idx[i], 1, i); - - if (current->seq_level_idx[i] > 7) - flags(seq_tier[i], 1, i); - else - infer(seq_tier[i], 0); - - if (current->decoder_model_info_present_flag) { - flags(decoder_model_present_for_this_op[i], 1, i); - if (current->decoder_model_present_for_this_op[i]) { - int n = current->decoder_model_info.buffer_delay_length_minus_1 + 1; - fbs(n, decoder_buffer_delay[i], 1, i); - fbs(n, encoder_buffer_delay[i], 1, i); - flags(low_delay_mode_flag[i], 1, i); - } - } else { - infer(decoder_model_present_for_this_op[i], 0); - } - - if (current->initial_display_delay_present_flag) { - flags(initial_display_delay_present_for_this_op[i], 1, i); - if (current->initial_display_delay_present_for_this_op[i]) - fbs(4, initial_display_delay_minus_1[i], 1, i); - } - } - } - - fb(4, frame_width_bits_minus_1); - fb(4, frame_height_bits_minus_1); - - fb(current->frame_width_bits_minus_1 + 1, max_frame_width_minus_1); - fb(current->frame_height_bits_minus_1 + 1, max_frame_height_minus_1); - - if (current->reduced_still_picture_header) - infer(frame_id_numbers_present_flag, 0); - else - flag(frame_id_numbers_present_flag); - if (current->frame_id_numbers_present_flag) { - fb(4, delta_frame_id_length_minus_2); - fb(3, additional_frame_id_length_minus_1); - } - - flag(use_128x128_superblock); - flag(enable_filter_intra); - flag(enable_intra_edge_filter); - - if (current->reduced_still_picture_header) { - infer(enable_interintra_compound, 0); - infer(enable_masked_compound, 0); - infer(enable_warped_motion, 0); - infer(enable_dual_filter, 0); - infer(enable_order_hint, 0); - infer(enable_jnt_comp, 0); - infer(enable_ref_frame_mvs, 0); - - infer(seq_force_screen_content_tools, - AV1_SELECT_SCREEN_CONTENT_TOOLS); - infer(seq_force_integer_mv, - AV1_SELECT_INTEGER_MV); - } else { - flag(enable_interintra_compound); - flag(enable_masked_compound); - flag(enable_warped_motion); - flag(enable_dual_filter); - - flag(enable_order_hint); - if (current->enable_order_hint) { - flag(enable_jnt_comp); - flag(enable_ref_frame_mvs); - } else { - infer(enable_jnt_comp, 0); - infer(enable_ref_frame_mvs, 0); - } - - flag(seq_choose_screen_content_tools); - if (current->seq_choose_screen_content_tools) - infer(seq_force_screen_content_tools, - AV1_SELECT_SCREEN_CONTENT_TOOLS); - else - fb(1, seq_force_screen_content_tools); - if (current->seq_force_screen_content_tools > 0) { - flag(seq_choose_integer_mv); - if (current->seq_choose_integer_mv) - infer(seq_force_integer_mv, - AV1_SELECT_INTEGER_MV); - else - fb(1, seq_force_integer_mv); - } else { - infer(seq_force_integer_mv, AV1_SELECT_INTEGER_MV); - } - - if (current->enable_order_hint) - fb(3, order_hint_bits_minus_1); - } - - flag(enable_superres); - flag(enable_cdef); - flag(enable_restoration); - - CHECK(FUNC(color_config)(ctx, rw, ¤t->color_config, - current->seq_profile)); - - flag(film_grain_params_present); - - return 0; -} - -static int FUNC(temporal_delimiter_obu)(CodedBitstreamContext *ctx, RWContext *rw) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - - HEADER("Temporal Delimiter"); - - priv->seen_frame_header = 0; - - return 0; -} - -static int FUNC(set_frame_refs)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - static const uint8_t ref_frame_list[AV1_NUM_REF_FRAMES - 2] = { - AV1_REF_FRAME_LAST2, AV1_REF_FRAME_LAST3, AV1_REF_FRAME_BWDREF, - AV1_REF_FRAME_ALTREF2, AV1_REF_FRAME_ALTREF - }; - int8_t ref_frame_idx[AV1_REFS_PER_FRAME], used_frame[AV1_NUM_REF_FRAMES]; - int16_t shifted_order_hints[AV1_NUM_REF_FRAMES]; - int cur_frame_hint, latest_order_hint, earliest_order_hint, ref; - int i, j; - - for (i = 0; i < AV1_REFS_PER_FRAME; i++) - ref_frame_idx[i] = -1; - ref_frame_idx[AV1_REF_FRAME_LAST - AV1_REF_FRAME_LAST] = current->last_frame_idx; - ref_frame_idx[AV1_REF_FRAME_GOLDEN - AV1_REF_FRAME_LAST] = current->golden_frame_idx; - - for (i = 0; i < AV1_NUM_REF_FRAMES; i++) - used_frame[i] = 0; - used_frame[current->last_frame_idx] = 1; - used_frame[current->golden_frame_idx] = 1; - - cur_frame_hint = 1 << (seq->order_hint_bits_minus_1); - for (i = 0; i < AV1_NUM_REF_FRAMES; i++) - shifted_order_hints[i] = cur_frame_hint + - cbs_av1_get_relative_dist(seq, priv->ref[i].order_hint, - priv->order_hint); - - latest_order_hint = shifted_order_hints[current->last_frame_idx]; - earliest_order_hint = shifted_order_hints[current->golden_frame_idx]; - - ref = -1; - for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { - int hint = shifted_order_hints[i]; - if (!used_frame[i] && hint >= cur_frame_hint && - (ref < 0 || hint >= latest_order_hint)) { - ref = i; - latest_order_hint = hint; - } - } - if (ref >= 0) { - ref_frame_idx[AV1_REF_FRAME_ALTREF - AV1_REF_FRAME_LAST] = ref; - used_frame[ref] = 1; - } - - ref = -1; - for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { - int hint = shifted_order_hints[i]; - if (!used_frame[i] && hint >= cur_frame_hint && - (ref < 0 || hint < earliest_order_hint)) { - ref = i; - earliest_order_hint = hint; - } - } - if (ref >= 0) { - ref_frame_idx[AV1_REF_FRAME_BWDREF - AV1_REF_FRAME_LAST] = ref; - used_frame[ref] = 1; - } - - ref = -1; - for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { - int hint = shifted_order_hints[i]; - if (!used_frame[i] && hint >= cur_frame_hint && - (ref < 0 || hint < earliest_order_hint)) { - ref = i; - earliest_order_hint = hint; - } - } - if (ref >= 0) { - ref_frame_idx[AV1_REF_FRAME_ALTREF2 - AV1_REF_FRAME_LAST] = ref; - used_frame[ref] = 1; - } - - for (i = 0; i < AV1_REFS_PER_FRAME - 2; i++) { - int ref_frame = ref_frame_list[i]; - if (ref_frame_idx[ref_frame - AV1_REF_FRAME_LAST] < 0 ) { - ref = -1; - for (j = 0; j < AV1_NUM_REF_FRAMES; j++) { - int hint = shifted_order_hints[j]; - if (!used_frame[j] && hint < cur_frame_hint && - (ref < 0 || hint >= latest_order_hint)) { - ref = j; - latest_order_hint = hint; - } - } - if (ref >= 0) { - ref_frame_idx[ref_frame - AV1_REF_FRAME_LAST] = ref; - used_frame[ref] = 1; - } - } - } - - ref = -1; - for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { - int hint = shifted_order_hints[i]; - if (ref < 0 || hint < earliest_order_hint) { - ref = i; - earliest_order_hint = hint; - } - } - for (i = 0; i < AV1_REFS_PER_FRAME; i++) { - if (ref_frame_idx[i] < 0) - ref_frame_idx[i] = ref; - infer(ref_frame_idx[i], ref_frame_idx[i]); - } - - return 0; -} - -static int FUNC(superres_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int denom, err; - - if (seq->enable_superres) - flag(use_superres); - else - infer(use_superres, 0); - - if (current->use_superres) { - fb(3, coded_denom); - denom = current->coded_denom + AV1_SUPERRES_DENOM_MIN; - } else { - denom = AV1_SUPERRES_NUM; - } - - priv->upscaled_width = priv->frame_width; - priv->frame_width = (priv->upscaled_width * AV1_SUPERRES_NUM + - denom / 2) / denom; - - return 0; -} - -static int FUNC(frame_size)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int err; - - if (current->frame_size_override_flag) { - fb(seq->frame_width_bits_minus_1 + 1, frame_width_minus_1); - fb(seq->frame_height_bits_minus_1 + 1, frame_height_minus_1); - } else { - infer(frame_width_minus_1, seq->max_frame_width_minus_1); - infer(frame_height_minus_1, seq->max_frame_height_minus_1); - } - - priv->frame_width = current->frame_width_minus_1 + 1; - priv->frame_height = current->frame_height_minus_1 + 1; - - CHECK(FUNC(superres_params)(ctx, rw, current)); - - return 0; -} - -static int FUNC(render_size)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - int err; - - flag(render_and_frame_size_different); - - if (current->render_and_frame_size_different) { - fb(16, render_width_minus_1); - fb(16, render_height_minus_1); - } else { - infer(render_width_minus_1, current->frame_width_minus_1); - infer(render_height_minus_1, current->frame_height_minus_1); - } - - priv->render_width = current->render_width_minus_1 + 1; - priv->render_height = current->render_height_minus_1 + 1; - - return 0; -} - -static int FUNC(frame_size_with_refs)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - int i, err; - - for (i = 0; i < AV1_REFS_PER_FRAME; i++) { - flags(found_ref[i], 1, i); - if (current->found_ref[i]) { - AV1ReferenceFrameState *ref = - &priv->ref[current->ref_frame_idx[i]]; - - if (!ref->valid) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "Missing reference frame needed for frame size " - "(ref = %d, ref_frame_idx = %d).\n", - i, current->ref_frame_idx[i]); - return AVERROR_INVALIDDATA; - } - - infer(frame_width_minus_1, ref->upscaled_width - 1); - infer(frame_height_minus_1, ref->frame_height - 1); - infer(render_width_minus_1, ref->render_width - 1); - infer(render_height_minus_1, ref->render_height - 1); - - priv->upscaled_width = ref->upscaled_width; - priv->frame_width = priv->upscaled_width; - priv->frame_height = ref->frame_height; - priv->render_width = ref->render_width; - priv->render_height = ref->render_height; - break; - } - } - - if (i >= AV1_REFS_PER_FRAME) { - CHECK(FUNC(frame_size)(ctx, rw, current)); - CHECK(FUNC(render_size)(ctx, rw, current)); - } else { - CHECK(FUNC(superres_params)(ctx, rw, current)); - } - - return 0; -} - -static int FUNC(interpolation_filter)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - int err; - - flag(is_filter_switchable); - if (current->is_filter_switchable) - infer(interpolation_filter, - AV1_INTERPOLATION_FILTER_SWITCHABLE); - else - fb(2, interpolation_filter); - - return 0; -} - -static int FUNC(tile_info)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int mi_cols, mi_rows, sb_cols, sb_rows, sb_shift, sb_size; - int max_tile_width_sb, max_tile_height_sb, max_tile_area_sb; - int min_log2_tile_cols, max_log2_tile_cols, max_log2_tile_rows; - int min_log2_tiles, min_log2_tile_rows; - int i, err; - - mi_cols = 2 * ((priv->frame_width + 7) >> 3); - mi_rows = 2 * ((priv->frame_height + 7) >> 3); - - sb_cols = seq->use_128x128_superblock ? ((mi_cols + 31) >> 5) - : ((mi_cols + 15) >> 4); - sb_rows = seq->use_128x128_superblock ? ((mi_rows + 31) >> 5) - : ((mi_rows + 15) >> 4); - - sb_shift = seq->use_128x128_superblock ? 5 : 4; - sb_size = sb_shift + 2; - - max_tile_width_sb = AV1_MAX_TILE_WIDTH >> sb_size; - max_tile_area_sb = AV1_MAX_TILE_AREA >> (2 * sb_size); - - min_log2_tile_cols = cbs_av1_tile_log2(max_tile_width_sb, sb_cols); - max_log2_tile_cols = cbs_av1_tile_log2(1, FFMIN(sb_cols, AV1_MAX_TILE_COLS)); - max_log2_tile_rows = cbs_av1_tile_log2(1, FFMIN(sb_rows, AV1_MAX_TILE_ROWS)); - min_log2_tiles = FFMAX(min_log2_tile_cols, - cbs_av1_tile_log2(max_tile_area_sb, sb_rows * sb_cols)); - - flag(uniform_tile_spacing_flag); - - if (current->uniform_tile_spacing_flag) { - int tile_width_sb, tile_height_sb; - - increment(tile_cols_log2, min_log2_tile_cols, max_log2_tile_cols); - - tile_width_sb = (sb_cols + (1 << current->tile_cols_log2) - 1) >> - current->tile_cols_log2; - current->tile_cols = (sb_cols + tile_width_sb - 1) / tile_width_sb; - - min_log2_tile_rows = FFMAX(min_log2_tiles - current->tile_cols_log2, 0); - - increment(tile_rows_log2, min_log2_tile_rows, max_log2_tile_rows); - - tile_height_sb = (sb_rows + (1 << current->tile_rows_log2) - 1) >> - current->tile_rows_log2; - current->tile_rows = (sb_rows + tile_height_sb - 1) / tile_height_sb; - - for (i = 0; i < current->tile_cols - 1; i++) - infer(width_in_sbs_minus_1[i], tile_width_sb - 1); - infer(width_in_sbs_minus_1[i], - sb_cols - (current->tile_cols - 1) * tile_width_sb - 1); - for (i = 0; i < current->tile_rows - 1; i++) - infer(height_in_sbs_minus_1[i], tile_height_sb - 1); - infer(height_in_sbs_minus_1[i], - sb_rows - (current->tile_rows - 1) * tile_height_sb - 1); - - } else { - int widest_tile_sb, start_sb, size_sb, max_width, max_height; - - widest_tile_sb = 0; - - start_sb = 0; - for (i = 0; start_sb < sb_cols && i < AV1_MAX_TILE_COLS; i++) { - max_width = FFMIN(sb_cols - start_sb, max_tile_width_sb); - ns(max_width, width_in_sbs_minus_1[i], 1, i); - size_sb = current->width_in_sbs_minus_1[i] + 1; - widest_tile_sb = FFMAX(size_sb, widest_tile_sb); - start_sb += size_sb; - } - current->tile_cols_log2 = cbs_av1_tile_log2(1, i); - current->tile_cols = i; - - if (min_log2_tiles > 0) - max_tile_area_sb = (sb_rows * sb_cols) >> (min_log2_tiles + 1); - else - max_tile_area_sb = sb_rows * sb_cols; - max_tile_height_sb = FFMAX(max_tile_area_sb / widest_tile_sb, 1); - - start_sb = 0; - for (i = 0; start_sb < sb_rows && i < AV1_MAX_TILE_ROWS; i++) { - max_height = FFMIN(sb_rows - start_sb, max_tile_height_sb); - ns(max_height, height_in_sbs_minus_1[i], 1, i); - size_sb = current->height_in_sbs_minus_1[i] + 1; - start_sb += size_sb; - } - current->tile_rows_log2 = cbs_av1_tile_log2(1, i); - current->tile_rows = i; - } - - if (current->tile_cols_log2 > 0 || - current->tile_rows_log2 > 0) { - fb(current->tile_cols_log2 + current->tile_rows_log2, - context_update_tile_id); - fb(2, tile_size_bytes_minus1); - } else { - infer(context_update_tile_id, 0); - } - - priv->tile_cols = current->tile_cols; - priv->tile_rows = current->tile_rows; - - return 0; -} - -static int FUNC(quantization_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int err; - - fb(8, base_q_idx); - - delta_q(delta_q_y_dc); - - if (priv->num_planes > 1) { - if (seq->color_config.separate_uv_delta_q) - flag(diff_uv_delta); - else - infer(diff_uv_delta, 0); - - delta_q(delta_q_u_dc); - delta_q(delta_q_u_ac); - - if (current->diff_uv_delta) { - delta_q(delta_q_v_dc); - delta_q(delta_q_v_ac); - } else { - infer(delta_q_v_dc, current->delta_q_u_dc); - infer(delta_q_v_ac, current->delta_q_u_ac); - } - } else { - infer(delta_q_u_dc, 0); - infer(delta_q_u_ac, 0); - infer(delta_q_v_dc, 0); - infer(delta_q_v_ac, 0); - } - - flag(using_qmatrix); - if (current->using_qmatrix) { - fb(4, qm_y); - fb(4, qm_u); - if (seq->color_config.separate_uv_delta_q) - fb(4, qm_v); - else - infer(qm_v, current->qm_u); - } - - return 0; -} - -static int FUNC(segmentation_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - static const uint8_t bits[AV1_SEG_LVL_MAX] = { 8, 6, 6, 6, 6, 3, 0, 0 }; - static const uint8_t sign[AV1_SEG_LVL_MAX] = { 1, 1, 1, 1, 1, 0, 0, 0 }; - static const uint8_t default_feature_enabled[AV1_SEG_LVL_MAX] = { 0 }; - static const int16_t default_feature_value[AV1_SEG_LVL_MAX] = { 0 }; - int i, j, err; - - flag(segmentation_enabled); - - if (current->segmentation_enabled) { - if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { - infer(segmentation_update_map, 1); - infer(segmentation_temporal_update, 0); - infer(segmentation_update_data, 1); - } else { - flag(segmentation_update_map); - if (current->segmentation_update_map) - flag(segmentation_temporal_update); - else - infer(segmentation_temporal_update, 0); - flag(segmentation_update_data); - } - - for (i = 0; i < AV1_MAX_SEGMENTS; i++) { - const uint8_t *ref_feature_enabled; - const int16_t *ref_feature_value; - - if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { - ref_feature_enabled = default_feature_enabled; - ref_feature_value = default_feature_value; - } else { - ref_feature_enabled = - priv->ref[current->ref_frame_idx[current->primary_ref_frame]].feature_enabled[i]; - ref_feature_value = - priv->ref[current->ref_frame_idx[current->primary_ref_frame]].feature_value[i]; - } - - for (j = 0; j < AV1_SEG_LVL_MAX; j++) { - if (current->segmentation_update_data) { - flags(feature_enabled[i][j], 2, i, j); - - if (current->feature_enabled[i][j] && bits[j] > 0) { - if (sign[j]) - sus(1 + bits[j], feature_value[i][j], 2, i, j); - else - fbs(bits[j], feature_value[i][j], 2, i, j); - } else { - infer(feature_value[i][j], 0); - } - } else { - infer(feature_enabled[i][j], ref_feature_enabled[j]); - infer(feature_value[i][j], ref_feature_value[j]); - } - } - } - } else { - for (i = 0; i < AV1_MAX_SEGMENTS; i++) { - for (j = 0; j < AV1_SEG_LVL_MAX; j++) { - infer(feature_enabled[i][j], 0); - infer(feature_value[i][j], 0); - } - } - } - - return 0; -} - -static int FUNC(delta_q_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - int err; - - if (current->base_q_idx > 0) - flag(delta_q_present); - else - infer(delta_q_present, 0); - - if (current->delta_q_present) - fb(2, delta_q_res); - - return 0; -} - -static int FUNC(delta_lf_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - int err; - - if (current->delta_q_present) { - if (!current->allow_intrabc) - flag(delta_lf_present); - else - infer(delta_lf_present, 0); - if (current->delta_lf_present) { - fb(2, delta_lf_res); - flag(delta_lf_multi); - } else { - infer(delta_lf_res, 0); - infer(delta_lf_multi, 0); - } - } else { - infer(delta_lf_present, 0); - infer(delta_lf_res, 0); - infer(delta_lf_multi, 0); - } - - return 0; -} - -static int FUNC(loop_filter_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - static const int8_t default_loop_filter_ref_deltas[AV1_TOTAL_REFS_PER_FRAME] = - { 1, 0, 0, 0, -1, 0, -1, -1 }; - static const int8_t default_loop_filter_mode_deltas[2] = { 0, 0 }; - int i, err; - - if (priv->coded_lossless || current->allow_intrabc) { - infer(loop_filter_level[0], 0); - infer(loop_filter_level[1], 0); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_INTRA], 1); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST], 0); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST2], 0); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST3], 0); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_BWDREF], 0); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_GOLDEN], -1); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_ALTREF], -1); - infer(loop_filter_ref_deltas[AV1_REF_FRAME_ALTREF2], -1); - for (i = 0; i < 2; i++) - infer(loop_filter_mode_deltas[i], 0); - return 0; - } - - fb(6, loop_filter_level[0]); - fb(6, loop_filter_level[1]); - - if (priv->num_planes > 1) { - if (current->loop_filter_level[0] || - current->loop_filter_level[1]) { - fb(6, loop_filter_level[2]); - fb(6, loop_filter_level[3]); - } - } - - fb(3, loop_filter_sharpness); - - flag(loop_filter_delta_enabled); - if (current->loop_filter_delta_enabled) { - const int8_t *ref_loop_filter_ref_deltas, *ref_loop_filter_mode_deltas; - - if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { - ref_loop_filter_ref_deltas = default_loop_filter_ref_deltas; - ref_loop_filter_mode_deltas = default_loop_filter_mode_deltas; - } else { - ref_loop_filter_ref_deltas = - priv->ref[current->ref_frame_idx[current->primary_ref_frame]].loop_filter_ref_deltas; - ref_loop_filter_mode_deltas = - priv->ref[current->ref_frame_idx[current->primary_ref_frame]].loop_filter_mode_deltas; - } - - flag(loop_filter_delta_update); - for (i = 0; i < AV1_TOTAL_REFS_PER_FRAME; i++) { - if (current->loop_filter_delta_update) - flags(update_ref_delta[i], 1, i); - else - infer(update_ref_delta[i], 0); - if (current->update_ref_delta[i]) - sus(1 + 6, loop_filter_ref_deltas[i], 1, i); - else - infer(loop_filter_ref_deltas[i], ref_loop_filter_ref_deltas[i]); - } - for (i = 0; i < 2; i++) { - if (current->loop_filter_delta_update) - flags(update_mode_delta[i], 1, i); - else - infer(update_mode_delta[i], 0); - if (current->update_mode_delta[i]) - sus(1 + 6, loop_filter_mode_deltas[i], 1, i); - else - infer(loop_filter_mode_deltas[i], ref_loop_filter_mode_deltas[i]); - } - } else { - for (i = 0; i < AV1_TOTAL_REFS_PER_FRAME; i++) - infer(loop_filter_ref_deltas[i], default_loop_filter_ref_deltas[i]); - for (i = 0; i < 2; i++) - infer(loop_filter_mode_deltas[i], default_loop_filter_mode_deltas[i]); - } - - return 0; -} - -static int FUNC(cdef_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int i, err; - - if (priv->coded_lossless || current->allow_intrabc || - !seq->enable_cdef) { - infer(cdef_damping_minus_3, 0); - infer(cdef_bits, 0); - infer(cdef_y_pri_strength[0], 0); - infer(cdef_y_sec_strength[0], 0); - infer(cdef_uv_pri_strength[0], 0); - infer(cdef_uv_sec_strength[0], 0); - - return 0; - } - - fb(2, cdef_damping_minus_3); - fb(2, cdef_bits); - - for (i = 0; i < (1 << current->cdef_bits); i++) { - fbs(4, cdef_y_pri_strength[i], 1, i); - fbs(2, cdef_y_sec_strength[i], 1, i); - - if (priv->num_planes > 1) { - fbs(4, cdef_uv_pri_strength[i], 1, i); - fbs(2, cdef_uv_sec_strength[i], 1, i); - } - } - - return 0; -} - -static int FUNC(lr_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int uses_lr, uses_chroma_lr; - int i, err; - - if (priv->all_lossless || current->allow_intrabc || - !seq->enable_restoration) { - return 0; - } - - uses_lr = uses_chroma_lr = 0; - for (i = 0; i < priv->num_planes; i++) { - fbs(2, lr_type[i], 1, i); - - if (current->lr_type[i] != AV1_RESTORE_NONE) { - uses_lr = 1; - if (i > 0) - uses_chroma_lr = 1; - } - } - - if (uses_lr) { - if (seq->use_128x128_superblock) - increment(lr_unit_shift, 1, 2); - else - increment(lr_unit_shift, 0, 2); - - if(seq->color_config.subsampling_x && - seq->color_config.subsampling_y && uses_chroma_lr) { - fb(1, lr_uv_shift); - } else { - infer(lr_uv_shift, 0); - } - } - - return 0; -} - -static int FUNC(read_tx_mode)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - int err; - - if (priv->coded_lossless) - infer(tx_mode, 0); - else - increment(tx_mode, 1, 2); - - return 0; -} - -static int FUNC(frame_reference_mode)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - int err; - - if (current->frame_type == AV1_FRAME_INTRA_ONLY || - current->frame_type == AV1_FRAME_KEY) - infer(reference_select, 0); - else - flag(reference_select); - - return 0; -} - -static int FUNC(skip_mode_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int skip_mode_allowed; - int err; - - if (current->frame_type == AV1_FRAME_KEY || - current->frame_type == AV1_FRAME_INTRA_ONLY || - !current->reference_select || !seq->enable_order_hint) { - skip_mode_allowed = 0; - } else { - int forward_idx, backward_idx; - int forward_hint, backward_hint; - int ref_hint, dist, i; - - forward_idx = -1; - backward_idx = -1; - for (i = 0; i < AV1_REFS_PER_FRAME; i++) { - ref_hint = priv->ref[current->ref_frame_idx[i]].order_hint; - dist = cbs_av1_get_relative_dist(seq, ref_hint, - priv->order_hint); - if (dist < 0) { - if (forward_idx < 0 || - cbs_av1_get_relative_dist(seq, ref_hint, - forward_hint) > 0) { - forward_idx = i; - forward_hint = ref_hint; - } - } else if (dist > 0) { - if (backward_idx < 0 || - cbs_av1_get_relative_dist(seq, ref_hint, - backward_hint) < 0) { - backward_idx = i; - backward_hint = ref_hint; - } - } - } - - if (forward_idx < 0) { - skip_mode_allowed = 0; - } else if (backward_idx >= 0) { - skip_mode_allowed = 1; - // Frames for skip mode are forward_idx and backward_idx. - } else { - int second_forward_idx; - int second_forward_hint; - - second_forward_idx = -1; - for (i = 0; i < AV1_REFS_PER_FRAME; i++) { - ref_hint = priv->ref[current->ref_frame_idx[i]].order_hint; - if (cbs_av1_get_relative_dist(seq, ref_hint, - forward_hint) < 0) { - if (second_forward_idx < 0 || - cbs_av1_get_relative_dist(seq, ref_hint, - second_forward_hint) > 0) { - second_forward_idx = i; - second_forward_hint = ref_hint; - } - } - } - - if (second_forward_idx < 0) { - skip_mode_allowed = 0; - } else { - skip_mode_allowed = 1; - // Frames for skip mode are forward_idx and second_forward_idx. - } - } - } - - if (skip_mode_allowed) - flag(skip_mode_present); - else - infer(skip_mode_present, 0); - - return 0; -} - -static int FUNC(global_motion_param)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current, - int type, int ref, int idx) -{ - uint32_t abs_bits, prec_bits, num_syms; - int err; - - if (idx < 2) { - if (type == AV1_WARP_MODEL_TRANSLATION) { - abs_bits = AV1_GM_ABS_TRANS_ONLY_BITS - !current->allow_high_precision_mv; - prec_bits = AV1_GM_TRANS_ONLY_PREC_BITS - !current->allow_high_precision_mv; - } else { - abs_bits = AV1_GM_ABS_TRANS_BITS; - prec_bits = AV1_GM_TRANS_PREC_BITS; - } - } else { - abs_bits = AV1_GM_ABS_ALPHA_BITS; - prec_bits = AV1_GM_ALPHA_PREC_BITS; - } - - num_syms = 2 * (1 << abs_bits) + 1; - subexp(gm_params[ref][idx], num_syms, 2, ref, idx); - - // Actual gm_params value is not reconstructed here. - (void)prec_bits; - - return 0; -} - -static int FUNC(global_motion_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - int ref, type; - int err; - - if (current->frame_type == AV1_FRAME_KEY || - current->frame_type == AV1_FRAME_INTRA_ONLY) - return 0; - - for (ref = AV1_REF_FRAME_LAST; ref <= AV1_REF_FRAME_ALTREF; ref++) { - flags(is_global[ref], 1, ref); - if (current->is_global[ref]) { - flags(is_rot_zoom[ref], 1, ref); - if (current->is_rot_zoom[ref]) { - type = AV1_WARP_MODEL_ROTZOOM; - } else { - flags(is_translation[ref], 1, ref); - type = current->is_translation[ref] ? AV1_WARP_MODEL_TRANSLATION - : AV1_WARP_MODEL_AFFINE; - } - } else { - type = AV1_WARP_MODEL_IDENTITY; - } - - if (type >= AV1_WARP_MODEL_ROTZOOM) { - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 2)); - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 3)); - if (type == AV1_WARP_MODEL_AFFINE) { - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 4)); - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 5)); - } else { - // gm_params[ref][4] = -gm_params[ref][3] - // gm_params[ref][5] = gm_params[ref][2] - } - } - if (type >= AV1_WARP_MODEL_TRANSLATION) { - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 0)); - CHECK(FUNC(global_motion_param)(ctx, rw, current, type, ref, 1)); - } - } - - return 0; -} - -static int FUNC(film_grain_params)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFilmGrainParams *current, - AV1RawFrameHeader *frame_header) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq = priv->sequence_header; - int num_pos_luma, num_pos_chroma; - int i, err; - - if (!seq->film_grain_params_present || - (!frame_header->show_frame && !frame_header->showable_frame)) - return 0; - - flag(apply_grain); - - if (!current->apply_grain) - return 0; - - fb(16, grain_seed); - - if (frame_header->frame_type == AV1_FRAME_INTER) - flag(update_grain); - else - infer(update_grain, 1); - - if (!current->update_grain) { - fb(3, film_grain_params_ref_idx); - return 0; - } - - fc(4, num_y_points, 0, 14); - for (i = 0; i < current->num_y_points; i++) { - fcs(8, point_y_value[i], - i ? current->point_y_value[i - 1] + 1 : 0, - MAX_UINT_BITS(8) - (current->num_y_points - i - 1), - 1, i); - fbs(8, point_y_scaling[i], 1, i); - } - - if (seq->color_config.mono_chrome) - infer(chroma_scaling_from_luma, 0); - else - flag(chroma_scaling_from_luma); - - if (seq->color_config.mono_chrome || - current->chroma_scaling_from_luma || - (seq->color_config.subsampling_x == 1 && - seq->color_config.subsampling_y == 1 && - current->num_y_points == 0)) { - infer(num_cb_points, 0); - infer(num_cr_points, 0); - } else { - fc(4, num_cb_points, 0, 10); - for (i = 0; i < current->num_cb_points; i++) { - fcs(8, point_cb_value[i], - i ? current->point_cb_value[i - 1] + 1 : 0, - MAX_UINT_BITS(8) - (current->num_cb_points - i - 1), - 1, i); - fbs(8, point_cb_scaling[i], 1, i); - } - fc(4, num_cr_points, 0, 10); - for (i = 0; i < current->num_cr_points; i++) { - fcs(8, point_cr_value[i], - i ? current->point_cr_value[i - 1] + 1 : 0, - MAX_UINT_BITS(8) - (current->num_cr_points - i - 1), - 1, i); - fbs(8, point_cr_scaling[i], 1, i); - } - } - - fb(2, grain_scaling_minus_8); - fb(2, ar_coeff_lag); - num_pos_luma = 2 * current->ar_coeff_lag * (current->ar_coeff_lag + 1); - if (current->num_y_points) { - num_pos_chroma = num_pos_luma + 1; - for (i = 0; i < num_pos_luma; i++) - fbs(8, ar_coeffs_y_plus_128[i], 1, i); - } else { - num_pos_chroma = num_pos_luma; - } - if (current->chroma_scaling_from_luma || current->num_cb_points) { - for (i = 0; i < num_pos_chroma; i++) - fbs(8, ar_coeffs_cb_plus_128[i], 1, i); - } - if (current->chroma_scaling_from_luma || current->num_cr_points) { - for (i = 0; i < num_pos_chroma; i++) - fbs(8, ar_coeffs_cr_plus_128[i], 1, i); - } - fb(2, ar_coeff_shift_minus_6); - fb(2, grain_scale_shift); - if (current->num_cb_points) { - fb(8, cb_mult); - fb(8, cb_luma_mult); - fb(9, cb_offset); - } - if (current->num_cr_points) { - fb(8, cr_mult); - fb(8, cr_luma_mult); - fb(9, cr_offset); - } - - flag(overlap_flag); - flag(clip_to_restricted_range); - - return 0; -} - -static int FUNC(uncompressed_header)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq; - int id_len, diff_len, all_frames, frame_is_intra, order_hint_bits; - int i, err; - - if (!priv->sequence_header) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "No sequence header available: " - "unable to decode frame header.\n"); - return AVERROR_INVALIDDATA; - } - seq = priv->sequence_header; - - id_len = seq->additional_frame_id_length_minus_1 + - seq->delta_frame_id_length_minus_2 + 3; - all_frames = (1 << AV1_NUM_REF_FRAMES) - 1; - - if (seq->reduced_still_picture_header) { - infer(show_existing_frame, 0); - infer(frame_type, AV1_FRAME_KEY); - infer(show_frame, 1); - infer(showable_frame, 0); - frame_is_intra = 1; - - } else { - flag(show_existing_frame); - - if (current->show_existing_frame) { - AV1ReferenceFrameState *ref; - - fb(3, frame_to_show_map_idx); - ref = &priv->ref[current->frame_to_show_map_idx]; - - if (!ref->valid) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Missing reference frame needed for " - "show_existing_frame (frame_to_show_map_idx = %d).\n", - current->frame_to_show_map_idx); - return AVERROR_INVALIDDATA; - } - - if (seq->decoder_model_info_present_flag && - !seq->timing_info.equal_picture_interval) { - fb(seq->decoder_model_info.frame_presentation_time_length_minus_1 + 1, - frame_presentation_time); - } - - if (seq->frame_id_numbers_present_flag) - fb(id_len, display_frame_id); - - infer(frame_type, ref->frame_type); - if (current->frame_type == AV1_FRAME_KEY) { - infer(refresh_frame_flags, all_frames); - - // Section 7.21 - infer(current_frame_id, ref->frame_id); - priv->upscaled_width = ref->upscaled_width; - priv->frame_width = ref->frame_width; - priv->frame_height = ref->frame_height; - priv->render_width = ref->render_width; - priv->render_height = ref->render_height; - priv->bit_depth = ref->bit_depth; - priv->order_hint = ref->order_hint; - } else - infer(refresh_frame_flags, 0); - - infer(frame_width_minus_1, ref->upscaled_width - 1); - infer(frame_height_minus_1, ref->frame_height - 1); - infer(render_width_minus_1, ref->render_width - 1); - infer(render_height_minus_1, ref->render_height - 1); - - // Section 7.20 - goto update_refs; - } - - fb(2, frame_type); - frame_is_intra = (current->frame_type == AV1_FRAME_INTRA_ONLY || - current->frame_type == AV1_FRAME_KEY); - - flag(show_frame); - if (current->show_frame && - seq->decoder_model_info_present_flag && - !seq->timing_info.equal_picture_interval) { - fb(seq->decoder_model_info.frame_presentation_time_length_minus_1 + 1, - frame_presentation_time); - } - if (current->show_frame) - infer(showable_frame, current->frame_type != AV1_FRAME_KEY); - else - flag(showable_frame); - - if (current->frame_type == AV1_FRAME_SWITCH || - (current->frame_type == AV1_FRAME_KEY && current->show_frame)) - infer(error_resilient_mode, 1); - else - flag(error_resilient_mode); - } - - if (current->frame_type == AV1_FRAME_KEY && current->show_frame) { - for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { - priv->ref[i].valid = 0; - priv->ref[i].order_hint = 0; - } - } - - flag(disable_cdf_update); - - if (seq->seq_force_screen_content_tools == - AV1_SELECT_SCREEN_CONTENT_TOOLS) { - flag(allow_screen_content_tools); - } else { - infer(allow_screen_content_tools, - seq->seq_force_screen_content_tools); - } - if (current->allow_screen_content_tools) { - if (seq->seq_force_integer_mv == AV1_SELECT_INTEGER_MV) - flag(force_integer_mv); - else - infer(force_integer_mv, seq->seq_force_integer_mv); - } else { - infer(force_integer_mv, 0); - } - - if (seq->frame_id_numbers_present_flag) { - fb(id_len, current_frame_id); - - diff_len = seq->delta_frame_id_length_minus_2 + 2; - for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { - if (current->current_frame_id > (1 << diff_len)) { - if (priv->ref[i].frame_id > current->current_frame_id || - priv->ref[i].frame_id < (current->current_frame_id - - (1 << diff_len))) - priv->ref[i].valid = 0; - } else { - if (priv->ref[i].frame_id > current->current_frame_id && - priv->ref[i].frame_id < ((1 << id_len) + - current->current_frame_id - - (1 << diff_len))) - priv->ref[i].valid = 0; - } - } - } else { - infer(current_frame_id, 0); - } - - if (current->frame_type == AV1_FRAME_SWITCH) - infer(frame_size_override_flag, 1); - else if(seq->reduced_still_picture_header) - infer(frame_size_override_flag, 0); - else - flag(frame_size_override_flag); - - order_hint_bits = - seq->enable_order_hint ? seq->order_hint_bits_minus_1 + 1 : 0; - if (order_hint_bits > 0) - fb(order_hint_bits, order_hint); - else - infer(order_hint, 0); - priv->order_hint = current->order_hint; - - if (frame_is_intra || current->error_resilient_mode) - infer(primary_ref_frame, AV1_PRIMARY_REF_NONE); - else - fb(3, primary_ref_frame); - - if (seq->decoder_model_info_present_flag) { - flag(buffer_removal_time_present_flag); - if (current->buffer_removal_time_present_flag) { - for (i = 0; i <= seq->operating_points_cnt_minus_1; i++) { - if (seq->decoder_model_present_for_this_op[i]) { - int op_pt_idc = seq->operating_point_idc[i]; - int in_temporal_layer = (op_pt_idc >> priv->temporal_id ) & 1; - int in_spatial_layer = (op_pt_idc >> (priv->spatial_id + 8)) & 1; - if (seq->operating_point_idc[i] == 0 || - (in_temporal_layer && in_spatial_layer)) { - fbs(seq->decoder_model_info.buffer_removal_time_length_minus_1 + 1, - buffer_removal_time[i], 1, i); - } - } - } - } - } - - if (current->frame_type == AV1_FRAME_SWITCH || - (current->frame_type == AV1_FRAME_KEY && current->show_frame)) - infer(refresh_frame_flags, all_frames); - else - fb(8, refresh_frame_flags); - - if (!frame_is_intra || current->refresh_frame_flags != all_frames) { - if (seq->enable_order_hint) { - for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { - if (current->error_resilient_mode) - fbs(order_hint_bits, ref_order_hint[i], 1, i); - else - infer(ref_order_hint[i], priv->ref[i].order_hint); - if (current->ref_order_hint[i] != priv->ref[i].order_hint) - priv->ref[i].valid = 0; - } - } - } - - if (current->frame_type == AV1_FRAME_KEY || - current->frame_type == AV1_FRAME_INTRA_ONLY) { - CHECK(FUNC(frame_size)(ctx, rw, current)); - CHECK(FUNC(render_size)(ctx, rw, current)); - - if (current->allow_screen_content_tools && - priv->upscaled_width == priv->frame_width) - flag(allow_intrabc); - else - infer(allow_intrabc, 0); - - } else { - if (!seq->enable_order_hint) { - infer(frame_refs_short_signaling, 0); - } else { - flag(frame_refs_short_signaling); - if (current->frame_refs_short_signaling) { - fb(3, last_frame_idx); - fb(3, golden_frame_idx); - CHECK(FUNC(set_frame_refs)(ctx, rw, current)); - } - } - - for (i = 0; i < AV1_REFS_PER_FRAME; i++) { - if (!current->frame_refs_short_signaling) - fbs(3, ref_frame_idx[i], 1, i); - if (seq->frame_id_numbers_present_flag) { - fbs(seq->delta_frame_id_length_minus_2 + 2, - delta_frame_id_minus1[i], 1, i); - } - } - - if (current->frame_size_override_flag && - !current->error_resilient_mode) { - CHECK(FUNC(frame_size_with_refs)(ctx, rw, current)); - } else { - CHECK(FUNC(frame_size)(ctx, rw, current)); - CHECK(FUNC(render_size)(ctx, rw, current)); - } - - if (current->force_integer_mv) - infer(allow_high_precision_mv, 0); - else - flag(allow_high_precision_mv); - - CHECK(FUNC(interpolation_filter)(ctx, rw, current)); - - flag(is_motion_mode_switchable); - - if (current->error_resilient_mode || - !seq->enable_ref_frame_mvs) - infer(use_ref_frame_mvs, 0); - else - flag(use_ref_frame_mvs); - - infer(allow_intrabc, 0); - } - - if (!frame_is_intra) { - // Derive reference frame sign biases. - } - - if (seq->reduced_still_picture_header || current->disable_cdf_update) - infer(disable_frame_end_update_cdf, 1); - else - flag(disable_frame_end_update_cdf); - - if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) { - // Init non-coeff CDFs. - // Setup past independence. - } else { - // Load CDF tables from previous frame. - // Load params from previous frame. - } - - if (current->use_ref_frame_mvs) { - // Perform motion field estimation process. - } - - CHECK(FUNC(tile_info)(ctx, rw, current)); - - CHECK(FUNC(quantization_params)(ctx, rw, current)); - - CHECK(FUNC(segmentation_params)(ctx, rw, current)); - - CHECK(FUNC(delta_q_params)(ctx, rw, current)); - - CHECK(FUNC(delta_lf_params)(ctx, rw, current)); - - // Init coeff CDFs / load previous segments. - - priv->coded_lossless = 1; - for (i = 0; i < AV1_MAX_SEGMENTS; i++) { - int qindex; - if (current->feature_enabled[i][AV1_SEG_LVL_ALT_Q]) { - qindex = (current->base_q_idx + - current->feature_value[i][AV1_SEG_LVL_ALT_Q]); - } else { - qindex = current->base_q_idx; - } - qindex = av_clip_uintp2(qindex, 8); - - if (qindex || current->delta_q_y_dc || - current->delta_q_u_ac || current->delta_q_u_dc || - current->delta_q_v_ac || current->delta_q_v_dc) { - priv->coded_lossless = 0; - } - } - priv->all_lossless = priv->coded_lossless && - priv->frame_width == priv->upscaled_width; - - CHECK(FUNC(loop_filter_params)(ctx, rw, current)); - - CHECK(FUNC(cdef_params)(ctx, rw, current)); - - CHECK(FUNC(lr_params)(ctx, rw, current)); - - CHECK(FUNC(read_tx_mode)(ctx, rw, current)); - - CHECK(FUNC(frame_reference_mode)(ctx, rw, current)); - - CHECK(FUNC(skip_mode_params)(ctx, rw, current)); - - if (frame_is_intra || current->error_resilient_mode || - !seq->enable_warped_motion) - infer(allow_warped_motion, 0); - else - flag(allow_warped_motion); - - flag(reduced_tx_set); - - CHECK(FUNC(global_motion_params)(ctx, rw, current)); - - CHECK(FUNC(film_grain_params)(ctx, rw, ¤t->film_grain, current)); - - av_log(ctx->log_ctx, AV_LOG_DEBUG, "Frame %d: size %dx%d " - "upscaled %d render %dx%d subsample %dx%d " - "bitdepth %d tiles %dx%d.\n", priv->order_hint, - priv->frame_width, priv->frame_height, priv->upscaled_width, - priv->render_width, priv->render_height, - seq->color_config.subsampling_x + 1, - seq->color_config.subsampling_y + 1, priv->bit_depth, - priv->tile_rows, priv->tile_cols); - -update_refs: - for (i = 0; i < AV1_NUM_REF_FRAMES; i++) { - if (current->refresh_frame_flags & (1 << i)) { - priv->ref[i] = (AV1ReferenceFrameState) { - .valid = 1, - .frame_id = current->current_frame_id, - .upscaled_width = priv->upscaled_width, - .frame_width = priv->frame_width, - .frame_height = priv->frame_height, - .render_width = priv->render_width, - .render_height = priv->render_height, - .frame_type = current->frame_type, - .subsampling_x = seq->color_config.subsampling_x, - .subsampling_y = seq->color_config.subsampling_y, - .bit_depth = priv->bit_depth, - .order_hint = priv->order_hint, - }; - memcpy(priv->ref[i].loop_filter_ref_deltas, current->loop_filter_ref_deltas, - sizeof(current->loop_filter_ref_deltas)); - memcpy(priv->ref[i].loop_filter_mode_deltas, current->loop_filter_mode_deltas, - sizeof(current->loop_filter_mode_deltas)); - memcpy(priv->ref[i].feature_enabled, current->feature_enabled, - sizeof(current->feature_enabled)); - memcpy(priv->ref[i].feature_value, current->feature_value, - sizeof(current->feature_value)); - } - } - - return 0; -} - -static int FUNC(frame_header_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrameHeader *current, int redundant, - AVBufferRef *rw_buffer_ref) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - int start_pos, fh_bits, fh_bytes, err; - uint8_t *fh_start; - - if (priv->seen_frame_header) { - if (!redundant) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid repeated " - "frame header OBU.\n"); - return AVERROR_INVALIDDATA; - } else { - GetBitContext fh; - size_t i, b; - uint32_t val; - - HEADER("Redundant Frame Header"); - - av_assert0(priv->frame_header_ref && priv->frame_header); - - init_get_bits(&fh, priv->frame_header, - priv->frame_header_size); - for (i = 0; i < priv->frame_header_size; i += 8) { - b = FFMIN(priv->frame_header_size - i, 8); - val = get_bits(&fh, b); - xf(b, frame_header_copy[i], - val, val, val, 1, i / 8); - } - } - } else { - if (redundant) - HEADER("Redundant Frame Header (used as Frame Header)"); - else - HEADER("Frame Header"); - -#ifdef READ - start_pos = get_bits_count(rw); -#else - start_pos = put_bits_count(rw); -#endif - - CHECK(FUNC(uncompressed_header)(ctx, rw, current)); - - priv->tile_num = 0; - - if (current->show_existing_frame) { - priv->seen_frame_header = 0; - } else { - priv->seen_frame_header = 1; - - av_buffer_unref(&priv->frame_header_ref); - -#ifdef READ - fh_bits = get_bits_count(rw) - start_pos; - fh_start = (uint8_t*)rw->buffer + start_pos / 8; -#else - // Need to flush the bitwriter so that we can copy its output, - // but use a copy so we don't affect the caller's structure. - { - PutBitContext tmp = *rw; - flush_put_bits(&tmp); - } - - fh_bits = put_bits_count(rw) - start_pos; - fh_start = rw->buf + start_pos / 8; -#endif - fh_bytes = (fh_bits + 7) / 8; - - priv->frame_header_size = fh_bits; - - if (rw_buffer_ref) { - priv->frame_header_ref = av_buffer_ref(rw_buffer_ref); - if (!priv->frame_header_ref) - return AVERROR(ENOMEM); - priv->frame_header = fh_start; - } else { - priv->frame_header_ref = - av_buffer_alloc(fh_bytes + AV_INPUT_BUFFER_PADDING_SIZE); - if (!priv->frame_header_ref) - return AVERROR(ENOMEM); - priv->frame_header = priv->frame_header_ref->data; - memcpy(priv->frame_header, fh_start, fh_bytes); - } - } - } - - return 0; -} - -static int FUNC(tile_group_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawTileGroup *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - int num_tiles, tile_bits; - int err; - - HEADER("Tile Group"); - - num_tiles = priv->tile_cols * priv->tile_rows; - if (num_tiles > 1) - flag(tile_start_and_end_present_flag); - else - infer(tile_start_and_end_present_flag, 0); - - if (num_tiles == 1 || !current->tile_start_and_end_present_flag) { - infer(tg_start, 0); - infer(tg_end, num_tiles - 1); - } else { - tile_bits = cbs_av1_tile_log2(1, priv->tile_cols) + - cbs_av1_tile_log2(1, priv->tile_rows); - fc(tile_bits, tg_start, priv->tile_num, num_tiles - 1); - fc(tile_bits, tg_end, current->tg_start, num_tiles - 1); - } - - priv->tile_num = current->tg_end + 1; - - CHECK(FUNC(byte_alignment)(ctx, rw)); - - // Reset header for next frame. - if (current->tg_end == num_tiles - 1) - priv->seen_frame_header = 0; - - // Tile data follows. - - return 0; -} - -static int FUNC(frame_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawFrame *current, - AVBufferRef *rw_buffer_ref) -{ - int err; - - CHECK(FUNC(frame_header_obu)(ctx, rw, ¤t->header, - 0, rw_buffer_ref)); - - CHECK(FUNC(byte_alignment)(ctx, rw)); - - CHECK(FUNC(tile_group_obu)(ctx, rw, ¤t->tile_group)); - - return 0; -} - -static int FUNC(tile_list_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawTileList *current) -{ - int err; - - fb(8, output_frame_width_in_tiles_minus_1); - fb(8, output_frame_height_in_tiles_minus_1); - - fb(16, tile_count_minus_1); - - // Tile data follows. - - return 0; -} - -static int FUNC(metadata_hdr_cll)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataHDRCLL *current) -{ - int err; - - fb(16, max_cll); - fb(16, max_fall); - - return 0; -} - -static int FUNC(metadata_hdr_mdcv)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataHDRMDCV *current) -{ - int err, i; - - for (i = 0; i < 3; i++) { - fbs(16, primary_chromaticity_x[i], 1, i); - fbs(16, primary_chromaticity_y[i], 1, i); - } - - fb(16, white_point_chromaticity_x); - fb(16, white_point_chromaticity_y); - - fc(32, luminance_max, 1, MAX_UINT_BITS(32)); - // luminance_min must be lower than luminance_max. Convert luminance_max from - // 24.8 fixed point to 18.14 fixed point in order to compare them. - fc(32, luminance_min, 0, FFMIN(((uint64_t)current->luminance_max << 6) - 1, - MAX_UINT_BITS(32))); - - return 0; -} - -static int FUNC(scalability_structure)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataScalability *current) -{ - CodedBitstreamAV1Context *priv = ctx->priv_data; - const AV1RawSequenceHeader *seq; - int err, i, j; - - if (!priv->sequence_header) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "No sequence header available: " - "unable to parse scalability metadata.\n"); - return AVERROR_INVALIDDATA; - } - seq = priv->sequence_header; - - fb(2, spatial_layers_cnt_minus_1); - flag(spatial_layer_dimensions_present_flag); - flag(spatial_layer_description_present_flag); - flag(temporal_group_description_present_flag); - fc(3, scalability_structure_reserved_3bits, 0, 0); - if (current->spatial_layer_dimensions_present_flag) { - for (i = 0; i <= current->spatial_layers_cnt_minus_1; i++) { - fcs(16, spatial_layer_max_width[i], - 0, seq->max_frame_width_minus_1 + 1, 1, i); - fcs(16, spatial_layer_max_height[i], - 0, seq->max_frame_height_minus_1 + 1, 1, i); - } - } - if (current->spatial_layer_description_present_flag) { - for (i = 0; i <= current->spatial_layers_cnt_minus_1; i++) - fbs(8, spatial_layer_ref_id[i], 1, i); - } - if (current->temporal_group_description_present_flag) { - fb(8, temporal_group_size); - for (i = 0; i < current->temporal_group_size; i++) { - fbs(3, temporal_group_temporal_id[i], 1, i); - flags(temporal_group_temporal_switching_up_point_flag[i], 1, i); - flags(temporal_group_spatial_switching_up_point_flag[i], 1, i); - fbs(3, temporal_group_ref_cnt[i], 1, i); - for (j = 0; j < current->temporal_group_ref_cnt[i]; j++) { - fbs(8, temporal_group_ref_pic_diff[i][j], 2, i, j); - } - } - } - - return 0; -} - -static int FUNC(metadata_scalability)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataScalability *current) -{ - int err; - - fb(8, scalability_mode_idc); - - if (current->scalability_mode_idc == AV1_SCALABILITY_SS) - CHECK(FUNC(scalability_structure)(ctx, rw, current)); - - return 0; -} - -static int FUNC(metadata_itut_t35)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataITUTT35 *current) -{ - int err; - size_t i; - - fb(8, itu_t_t35_country_code); - if (current->itu_t_t35_country_code == 0xff) - fb(8, itu_t_t35_country_code_extension_byte); - -#ifdef READ - // The payload runs up to the start of the trailing bits, but there might - // be arbitrarily many trailing zeroes so we need to read through twice. - current->payload_size = cbs_av1_get_payload_bytes_left(rw); - - current->payload_ref = av_buffer_alloc(current->payload_size); - if (!current->payload_ref) - return AVERROR(ENOMEM); - current->payload = current->payload_ref->data; -#endif - - for (i = 0; i < current->payload_size; i++) - xf(8, itu_t_t35_payload_bytes[i], current->payload[i], - 0x00, 0xff, 1, i); - - return 0; -} - -static int FUNC(metadata_timecode)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadataTimecode *current) -{ - int err; - - fb(5, counting_type); - flag(full_timestamp_flag); - flag(discontinuity_flag); - flag(cnt_dropped_flag); - fb(9, n_frames); - - if (current->full_timestamp_flag) { - fc(6, seconds_value, 0, 59); - fc(6, minutes_value, 0, 59); - fc(5, hours_value, 0, 23); - } else { - flag(seconds_flag); - if (current->seconds_flag) { - fc(6, seconds_value, 0, 59); - flag(minutes_flag); - if (current->minutes_flag) { - fc(6, minutes_value, 0, 59); - flag(hours_flag); - if (current->hours_flag) - fc(5, hours_value, 0, 23); - } - } - } - - fb(5, time_offset_length); - if (current->time_offset_length > 0) - fb(current->time_offset_length, time_offset_value); - else - infer(time_offset_length, 0); - - return 0; -} - -static int FUNC(metadata_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawMetadata *current) -{ - int err; - - leb128(metadata_type); - - switch (current->metadata_type) { - case AV1_METADATA_TYPE_HDR_CLL: - CHECK(FUNC(metadata_hdr_cll)(ctx, rw, ¤t->metadata.hdr_cll)); - break; - case AV1_METADATA_TYPE_HDR_MDCV: - CHECK(FUNC(metadata_hdr_mdcv)(ctx, rw, ¤t->metadata.hdr_mdcv)); - break; - case AV1_METADATA_TYPE_SCALABILITY: - CHECK(FUNC(metadata_scalability)(ctx, rw, ¤t->metadata.scalability)); - break; - case AV1_METADATA_TYPE_ITUT_T35: - CHECK(FUNC(metadata_itut_t35)(ctx, rw, ¤t->metadata.itut_t35)); - break; - case AV1_METADATA_TYPE_TIMECODE: - CHECK(FUNC(metadata_timecode)(ctx, rw, ¤t->metadata.timecode)); - break; - default: - // Unknown metadata type. - return AVERROR_PATCHWELCOME; - } - - return 0; -} - -static int FUNC(padding_obu)(CodedBitstreamContext *ctx, RWContext *rw, - AV1RawPadding *current) -{ - int i, err; - - HEADER("Padding"); - -#ifdef READ - // The payload runs up to the start of the trailing bits, but there might - // be arbitrarily many trailing zeroes so we need to read through twice. - current->payload_size = cbs_av1_get_payload_bytes_left(rw); - - current->payload_ref = av_buffer_alloc(current->payload_size); - if (!current->payload_ref) - return AVERROR(ENOMEM); - current->payload = current->payload_ref->data; -#endif - - for (i = 0; i < current->payload_size; i++) - xf(8, obu_padding_byte[i], current->payload[i], 0x00, 0xff, 1, i); - - return 0; -} diff --git a/third-party/cbs/cbs_h2645.c b/third-party/cbs/cbs_h2645.c deleted file mode 100644 index 6a67c9143c4..00000000000 --- a/third-party/cbs/cbs_h2645.c +++ /dev/null @@ -1,1682 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#include "libavutil/attributes.h" -#include "libavutil/avassert.h" - -// [sunshine] Changed include path -#include "bytestream.h" -#include "cbs/cbs.h" -#include "cbs_internal.h" -#include "cbs/cbs_h264.h" -#include "cbs/cbs_h265.h" -#include "cbs/h264.h" -#include "cbs/h2645_parse.h" -#include "cbs/hevc.h" - -// [sunshine] Added to resolve missing symbols -#include "intmath.h" -#include "log2_tab.c" - -static int cbs_read_ue_golomb(CodedBitstreamContext *ctx, GetBitContext *gbc, - const char *name, const int *subscripts, - uint32_t *write_to, - uint32_t range_min, uint32_t range_max) -{ - uint32_t value; - int position, i, j; - unsigned int k; - char bits[65]; - - position = get_bits_count(gbc); - - for (i = 0; i < 32; i++) { - if (get_bits_left(gbc) < i + 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid ue-golomb code at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - k = get_bits1(gbc); - bits[i] = k ? '1' : '0'; - if (k) - break; - } - if (i >= 32) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid ue-golomb code at " - "%s: more than 31 zeroes.\n", name); - return AVERROR_INVALIDDATA; - } - value = 1; - for (j = 0; j < i; j++) { - k = get_bits1(gbc); - bits[i + j + 1] = k ? '1' : '0'; - value = value << 1 | k; - } - bits[i + j + 1] = 0; - --value; - - if (ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - *write_to = value; - return 0; -} - -static int cbs_read_se_golomb(CodedBitstreamContext *ctx, GetBitContext *gbc, - const char *name, const int *subscripts, - int32_t *write_to, - int32_t range_min, int32_t range_max) -{ - int32_t value; - int position, i, j; - unsigned int k; - uint32_t v; - char bits[65]; - - position = get_bits_count(gbc); - - for (i = 0; i < 32; i++) { - if (get_bits_left(gbc) < i + 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid se-golomb code at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - k = get_bits1(gbc); - bits[i] = k ? '1' : '0'; - if (k) - break; - } - if (i >= 32) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid se-golomb code at " - "%s: more than 31 zeroes.\n", name); - return AVERROR_INVALIDDATA; - } - v = 1; - for (j = 0; j < i; j++) { - k = get_bits1(gbc); - bits[i + j + 1] = k ? '1' : '0'; - v = v << 1 | k; - } - bits[i + j + 1] = 0; - if (v & 1) - value = -(int32_t)(v / 2); - else - value = v / 2; - - if (ctx->trace_enable) - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRId32", but must be in [%"PRId32",%"PRId32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - *write_to = value; - return 0; -} - -static int cbs_write_ue_golomb(CodedBitstreamContext *ctx, PutBitContext *pbc, - const char *name, const int *subscripts, - uint32_t value, - uint32_t range_min, uint32_t range_max) -{ - int len; - - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - av_assert0(value != UINT32_MAX); - - len = av_log2(value + 1); - if (put_bits_left(pbc) < 2 * len + 1) - return AVERROR(ENOSPC); - - if (ctx->trace_enable) { - char bits[65]; - int i; - - for (i = 0; i < len; i++) - bits[i] = '0'; - bits[len] = '1'; - for (i = 0; i < len; i++) - bits[len + i + 1] = (value + 1) >> (len - i - 1) & 1 ? '1' : '0'; - bits[len + len + 1] = 0; - - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } - - put_bits(pbc, len, 0); - if (len + 1 < 32) - put_bits(pbc, len + 1, value + 1); - else - put_bits32(pbc, value + 1); - - return 0; -} - -static int cbs_write_se_golomb(CodedBitstreamContext *ctx, PutBitContext *pbc, - const char *name, const int *subscripts, - int32_t value, - int32_t range_min, int32_t range_max) -{ - int len; - uint32_t uvalue; - - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRId32", but must be in [%"PRId32",%"PRId32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - av_assert0(value != INT32_MIN); - - if (value == 0) - uvalue = 0; - else if (value > 0) - uvalue = 2 * (uint32_t)value - 1; - else - uvalue = 2 * (uint32_t)-value; - - len = av_log2(uvalue + 1); - if (put_bits_left(pbc) < 2 * len + 1) - return AVERROR(ENOSPC); - - if (ctx->trace_enable) { - char bits[65]; - int i; - - for (i = 0; i < len; i++) - bits[i] = '0'; - bits[len] = '1'; - for (i = 0; i < len; i++) - bits[len + i + 1] = (uvalue + 1) >> (len - i - 1) & 1 ? '1' : '0'; - bits[len + len + 1] = 0; - - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } - - put_bits(pbc, len, 0); - if (len + 1 < 32) - put_bits(pbc, len + 1, uvalue + 1); - else - put_bits32(pbc, uvalue + 1); - - return 0; -} - -// payload_extension_present() - true if we are before the last 1-bit -// in the payload structure, which must be in the last byte. -static int cbs_h265_payload_extension_present(GetBitContext *gbc, uint32_t payload_size, - int cur_pos) -{ - int bits_left = payload_size * 8 - cur_pos; - return (bits_left > 0 && - (bits_left > 7 || show_bits(gbc, bits_left) & MAX_UINT_BITS(bits_left - 1))); -} - -#define HEADER(name) do { \ - ff_cbs_trace_header(ctx, name); \ - } while (0) - -#define CHECK(call) do { \ - err = (call); \ - if (err < 0) \ - return err; \ - } while (0) - -#define FUNC_NAME2(rw, codec, name) cbs_ ## codec ## _ ## rw ## _ ## name -#define FUNC_NAME1(rw, codec, name) FUNC_NAME2(rw, codec, name) -#define FUNC_H264(name) FUNC_NAME1(READWRITE, h264, name) -#define FUNC_H265(name) FUNC_NAME1(READWRITE, h265, name) -#define FUNC_SEI(name) FUNC_NAME1(READWRITE, sei, name) - -#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]){ subs, __VA_ARGS__ }) : NULL) - -#define u(width, name, range_min, range_max) \ - xu(width, name, current->name, range_min, range_max, 0, ) -#define ub(width, name) \ - xu(width, name, current->name, 0, MAX_UINT_BITS(width), 0, ) -#define flag(name) ub(1, name) -#define ue(name, range_min, range_max) \ - xue(name, current->name, range_min, range_max, 0, ) -#define i(width, name, range_min, range_max) \ - xi(width, name, current->name, range_min, range_max, 0, ) -#define ib(width, name) \ - xi(width, name, current->name, MIN_INT_BITS(width), MAX_INT_BITS(width), 0, ) -#define se(name, range_min, range_max) \ - xse(name, current->name, range_min, range_max, 0, ) - -#define us(width, name, range_min, range_max, subs, ...) \ - xu(width, name, current->name, range_min, range_max, subs, __VA_ARGS__) -#define ubs(width, name, subs, ...) \ - xu(width, name, current->name, 0, MAX_UINT_BITS(width), subs, __VA_ARGS__) -#define flags(name, subs, ...) \ - xu(1, name, current->name, 0, 1, subs, __VA_ARGS__) -#define ues(name, range_min, range_max, subs, ...) \ - xue(name, current->name, range_min, range_max, subs, __VA_ARGS__) -#define is(width, name, range_min, range_max, subs, ...) \ - xi(width, name, current->name, range_min, range_max, subs, __VA_ARGS__) -#define ibs(width, name, subs, ...) \ - xi(width, name, current->name, MIN_INT_BITS(width), MAX_INT_BITS(width), subs, __VA_ARGS__) -#define ses(name, range_min, range_max, subs, ...) \ - xse(name, current->name, range_min, range_max, subs, __VA_ARGS__) - -#define fixed(width, name, value) do { \ - av_unused uint32_t fixed_value = value; \ - xu(width, name, fixed_value, value, value, 0, ); \ - } while (0) - - -#define READ -#define READWRITE read -#define RWContext GetBitContext - -#define xu(width, name, var, range_min, range_max, subs, ...) do { \ - uint32_t value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while (0) -#define xue(name, var, range_min, range_max, subs, ...) do { \ - uint32_t value; \ - CHECK(cbs_read_ue_golomb(ctx, rw, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while (0) -#define xi(width, name, var, range_min, range_max, subs, ...) do { \ - int32_t value; \ - CHECK(ff_cbs_read_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while (0) -#define xse(name, var, range_min, range_max, subs, ...) do { \ - int32_t value; \ - CHECK(cbs_read_se_golomb(ctx, rw, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while (0) - - -#define infer(name, value) do { \ - current->name = value; \ - } while (0) - -static int cbs_h2645_read_more_rbsp_data(GetBitContext *gbc) -{ - int bits_left = get_bits_left(gbc); - if (bits_left > 8) - return 1; - if (bits_left == 0) - return 0; - if (show_bits(gbc, bits_left) & MAX_UINT_BITS(bits_left - 1)) - return 1; - return 0; -} - -#define more_rbsp_data(var) ((var) = cbs_h2645_read_more_rbsp_data(rw)) - -#define bit_position(rw) (get_bits_count(rw)) -#define byte_alignment(rw) (get_bits_count(rw) % 8) - -#define allocate(name, size) do { \ - name ## _ref = av_buffer_allocz(size + \ - AV_INPUT_BUFFER_PADDING_SIZE); \ - if (!name ## _ref) \ - return AVERROR(ENOMEM); \ - name = name ## _ref->data; \ - } while (0) - -#define FUNC(name) FUNC_SEI(name) -#include "cbs_sei_syntax_template.c" -#undef FUNC - -#define FUNC(name) FUNC_H264(name) -#include "cbs_h264_syntax_template.c" -#undef FUNC - -#define FUNC(name) FUNC_H265(name) -#include "cbs_h265_syntax_template.c" -#undef FUNC - -#undef READ -#undef READWRITE -#undef RWContext -#undef xu -#undef xi -#undef xue -#undef xse -#undef infer -#undef more_rbsp_data -#undef bit_position -#undef byte_alignment -#undef allocate - - -#define WRITE -#define READWRITE write -#define RWContext PutBitContext - -#define xu(width, name, var, range_min, range_max, subs, ...) do { \ - uint32_t value = var; \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - value, range_min, range_max)); \ - } while (0) -#define xue(name, var, range_min, range_max, subs, ...) do { \ - uint32_t value = var; \ - CHECK(cbs_write_ue_golomb(ctx, rw, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - value, range_min, range_max)); \ - } while (0) -#define xi(width, name, var, range_min, range_max, subs, ...) do { \ - int32_t value = var; \ - CHECK(ff_cbs_write_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - value, range_min, range_max)); \ - } while (0) -#define xse(name, var, range_min, range_max, subs, ...) do { \ - int32_t value = var; \ - CHECK(cbs_write_se_golomb(ctx, rw, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - value, range_min, range_max)); \ - } while (0) - -#define infer(name, value) do { \ - if (current->name != (value)) { \ - av_log(ctx->log_ctx, AV_LOG_ERROR, \ - "%s does not match inferred value: " \ - "%"PRId64", but should be %"PRId64".\n", \ - #name, (int64_t)current->name, (int64_t)(value)); \ - return AVERROR_INVALIDDATA; \ - } \ - } while (0) - -#define more_rbsp_data(var) (var) - -#define bit_position(rw) (put_bits_count(rw)) -#define byte_alignment(rw) (put_bits_count(rw) % 8) - -#define allocate(name, size) do { \ - if (!name) { \ - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s must be set " \ - "for writing.\n", #name); \ - return AVERROR_INVALIDDATA; \ - } \ - } while (0) - -#define FUNC(name) FUNC_SEI(name) -#include "cbs_sei_syntax_template.c" -#undef FUNC - -#define FUNC(name) FUNC_H264(name) -#include "cbs_h264_syntax_template.c" -#undef FUNC - -#define FUNC(name) FUNC_H265(name) -#include "cbs_h265_syntax_template.c" -#undef FUNC - -#undef WRITE -#undef READWRITE -#undef RWContext -#undef xu -#undef xi -#undef xue -#undef xse -#undef u -#undef i -#undef flag -#undef ue -#undef se -#undef infer -#undef more_rbsp_data -#undef bit_position -#undef byte_alignment -#undef allocate - - -static int cbs_h2645_fragment_add_nals(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const H2645Packet *packet) -{ - int err, i; - - for (i = 0; i < packet->nb_nals; i++) { - const H2645NAL *nal = &packet->nals[i]; - AVBufferRef *ref; - size_t size = nal->size; - - if (nal->nuh_layer_id > 0) - continue; - - // Remove trailing zeroes. - while (size > 0 && nal->data[size - 1] == 0) - --size; - if (size == 0) { - av_log(ctx->log_ctx, AV_LOG_VERBOSE, "Discarding empty 0 NAL unit\n"); - continue; - } - - ref = (nal->data == nal->raw_data) ? frag->data_ref - : packet->rbsp.rbsp_buffer_ref; - - err = ff_cbs_append_unit_data(frag, nal->type, - (uint8_t*)nal->data, size, ref); - if (err < 0) - return err; - } - - return 0; -} - -static int cbs_h2645_split_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header) -{ - enum AVCodecID codec_id = ctx->codec->codec_id; - CodedBitstreamH2645Context *priv = ctx->priv_data; - GetByteContext gbc; - int err; - - av_assert0(frag->data && frag->nb_units == 0); - if (frag->data_size == 0) - return 0; - - if (header && frag->data[0] && codec_id == AV_CODEC_ID_H264) { - // AVCC header. - size_t size, start, end; - int i, count, version; - - priv->mp4 = 1; - - bytestream2_init(&gbc, frag->data, frag->data_size); - - if (bytestream2_get_bytes_left(&gbc) < 6) - return AVERROR_INVALIDDATA; - - version = bytestream2_get_byte(&gbc); - if (version != 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid AVCC header: " - "first byte %u.\n", version); - return AVERROR_INVALIDDATA; - } - - bytestream2_skip(&gbc, 3); - priv->nal_length_size = (bytestream2_get_byte(&gbc) & 3) + 1; - - // SPS array. - count = bytestream2_get_byte(&gbc) & 0x1f; - start = bytestream2_tell(&gbc); - for (i = 0; i < count; i++) { - if (bytestream2_get_bytes_left(&gbc) < 2 * (count - i)) - return AVERROR_INVALIDDATA; - size = bytestream2_get_be16(&gbc); - if (bytestream2_get_bytes_left(&gbc) < size) - return AVERROR_INVALIDDATA; - bytestream2_skip(&gbc, size); - } - end = bytestream2_tell(&gbc); - - err = ff_h2645_packet_split(&priv->read_packet, - frag->data + start, end - start, - ctx->log_ctx, 1, 2, AV_CODEC_ID_H264, 1, 1); - if (err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split AVCC SPS array.\n"); - return err; - } - err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); - if (err < 0) - return err; - - // PPS array. - count = bytestream2_get_byte(&gbc); - start = bytestream2_tell(&gbc); - for (i = 0; i < count; i++) { - if (bytestream2_get_bytes_left(&gbc) < 2 * (count - i)) - return AVERROR_INVALIDDATA; - size = bytestream2_get_be16(&gbc); - if (bytestream2_get_bytes_left(&gbc) < size) - return AVERROR_INVALIDDATA; - bytestream2_skip(&gbc, size); - } - end = bytestream2_tell(&gbc); - - err = ff_h2645_packet_split(&priv->read_packet, - frag->data + start, end - start, - ctx->log_ctx, 1, 2, AV_CODEC_ID_H264, 1, 1); - if (err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split AVCC PPS array.\n"); - return err; - } - err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); - if (err < 0) - return err; - - if (bytestream2_get_bytes_left(&gbc) > 0) { - av_log(ctx->log_ctx, AV_LOG_WARNING, "%u bytes left at end of AVCC " - "header.\n", bytestream2_get_bytes_left(&gbc)); - } - - } else if (header && frag->data[0] && codec_id == AV_CODEC_ID_HEVC) { - // HVCC header. - size_t size, start, end; - int i, j, nb_arrays, nal_unit_type, nb_nals, version; - - priv->mp4 = 1; - - bytestream2_init(&gbc, frag->data, frag->data_size); - - if (bytestream2_get_bytes_left(&gbc) < 23) - return AVERROR_INVALIDDATA; - - version = bytestream2_get_byte(&gbc); - if (version != 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid HVCC header: " - "first byte %u.\n", version); - return AVERROR_INVALIDDATA; - } - - bytestream2_skip(&gbc, 20); - priv->nal_length_size = (bytestream2_get_byte(&gbc) & 3) + 1; - - nb_arrays = bytestream2_get_byte(&gbc); - for (i = 0; i < nb_arrays; i++) { - nal_unit_type = bytestream2_get_byte(&gbc) & 0x3f; - nb_nals = bytestream2_get_be16(&gbc); - - start = bytestream2_tell(&gbc); - for (j = 0; j < nb_nals; j++) { - if (bytestream2_get_bytes_left(&gbc) < 2) - return AVERROR_INVALIDDATA; - size = bytestream2_get_be16(&gbc); - if (bytestream2_get_bytes_left(&gbc) < size) - return AVERROR_INVALIDDATA; - bytestream2_skip(&gbc, size); - } - end = bytestream2_tell(&gbc); - - err = ff_h2645_packet_split(&priv->read_packet, - frag->data + start, end - start, - ctx->log_ctx, 1, 2, AV_CODEC_ID_HEVC, 1, 1); - if (err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split " - "HVCC array %d (%d NAL units of type %d).\n", - i, nb_nals, nal_unit_type); - return err; - } - err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); - if (err < 0) - return err; - } - - } else { - // Annex B, or later MP4 with already-known parameters. - - err = ff_h2645_packet_split(&priv->read_packet, - frag->data, frag->data_size, - ctx->log_ctx, - priv->mp4, priv->nal_length_size, - codec_id, 1, 1); - if (err < 0) - return err; - - err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet); - if (err < 0) - return err; - } - - return 0; -} - -#define cbs_h2645_replace_ps(h26n, ps_name, ps_var, id_element) \ -static int cbs_h26 ## h26n ## _replace_ ## ps_var(CodedBitstreamContext *ctx, \ - CodedBitstreamUnit *unit) \ -{ \ - CodedBitstreamH26 ## h26n ## Context *priv = ctx->priv_data; \ - H26 ## h26n ## Raw ## ps_name *ps_var = unit->content; \ - unsigned int id = ps_var->id_element; \ - int err; \ - if (id >= FF_ARRAY_ELEMS(priv->ps_var)) { \ - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid " #ps_name \ - " id : %d.\n", id); \ - return AVERROR_INVALIDDATA; \ - } \ - err = ff_cbs_make_unit_refcounted(ctx, unit); \ - if (err < 0) \ - return err; \ - if (priv->ps_var[id] == priv->active_ ## ps_var) \ - priv->active_ ## ps_var = NULL ; \ - av_buffer_unref(&priv->ps_var ## _ref[id]); \ - av_assert0(unit->content_ref); \ - priv->ps_var ## _ref[id] = av_buffer_ref(unit->content_ref); \ - if (!priv->ps_var ## _ref[id]) \ - return AVERROR(ENOMEM); \ - priv->ps_var[id] = (H26 ## h26n ## Raw ## ps_name *)priv->ps_var ## _ref[id]->data; \ - return 0; \ -} - -cbs_h2645_replace_ps(4, SPS, sps, seq_parameter_set_id) -cbs_h2645_replace_ps(4, PPS, pps, pic_parameter_set_id) -cbs_h2645_replace_ps(5, VPS, vps, vps_video_parameter_set_id) -cbs_h2645_replace_ps(5, SPS, sps, sps_seq_parameter_set_id) -cbs_h2645_replace_ps(5, PPS, pps, pps_pic_parameter_set_id) - -static int cbs_h264_read_nal_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - GetBitContext gbc; - int err; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if (err < 0) - return err; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if (err < 0) - return err; - - switch (unit->type) { - case H264_NAL_SPS: - { - H264RawSPS *sps = unit->content; - - err = cbs_h264_read_sps(ctx, &gbc, sps); - if (err < 0) - return err; - - err = cbs_h264_replace_sps(ctx, unit); - if (err < 0) - return err; - } - break; - - case H264_NAL_SPS_EXT: - { - err = cbs_h264_read_sps_extension(ctx, &gbc, unit->content); - if (err < 0) - return err; - } - break; - - case H264_NAL_PPS: - { - H264RawPPS *pps = unit->content; - - err = cbs_h264_read_pps(ctx, &gbc, pps); - if (err < 0) - return err; - - err = cbs_h264_replace_pps(ctx, unit); - if (err < 0) - return err; - } - break; - - case H264_NAL_SLICE: - case H264_NAL_IDR_SLICE: - case H264_NAL_AUXILIARY_SLICE: - { - H264RawSlice *slice = unit->content; - int pos, len; - - err = cbs_h264_read_slice_header(ctx, &gbc, &slice->header); - if (err < 0) - return err; - - if (!cbs_h2645_read_more_rbsp_data(&gbc)) - return AVERROR_INVALIDDATA; - - pos = get_bits_count(&gbc); - len = unit->data_size; - - slice->data_size = len - pos / 8; - slice->data_ref = av_buffer_ref(unit->data_ref); - if (!slice->data_ref) - return AVERROR(ENOMEM); - slice->data = unit->data + pos / 8; - slice->data_bit_start = pos % 8; - } - break; - - case H264_NAL_AUD: - { - err = cbs_h264_read_aud(ctx, &gbc, unit->content); - if (err < 0) - return err; - } - break; - - case H264_NAL_SEI: - { - err = cbs_h264_read_sei(ctx, &gbc, unit->content); - if (err < 0) - return err; - } - break; - - case H264_NAL_FILLER_DATA: - { - err = cbs_h264_read_filler(ctx, &gbc, unit->content); - if (err < 0) - return err; - } - break; - - case H264_NAL_END_SEQUENCE: - case H264_NAL_END_STREAM: - { - err = (unit->type == H264_NAL_END_SEQUENCE ? - cbs_h264_read_end_of_sequence : - cbs_h264_read_end_of_stream)(ctx, &gbc, unit->content); - if (err < 0) - return err; - } - break; - - default: - return AVERROR(ENOSYS); - } - - return 0; -} - -static int cbs_h265_read_nal_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - GetBitContext gbc; - int err; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if (err < 0) - return err; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if (err < 0) - return err; - - switch (unit->type) { - case HEVC_NAL_VPS: - { - H265RawVPS *vps = unit->content; - - err = cbs_h265_read_vps(ctx, &gbc, vps); - if (err < 0) - return err; - - err = cbs_h265_replace_vps(ctx, unit); - if (err < 0) - return err; - } - break; - case HEVC_NAL_SPS: - { - H265RawSPS *sps = unit->content; - - err = cbs_h265_read_sps(ctx, &gbc, sps); - if (err < 0) - return err; - - err = cbs_h265_replace_sps(ctx, unit); - if (err < 0) - return err; - } - break; - - case HEVC_NAL_PPS: - { - H265RawPPS *pps = unit->content; - - err = cbs_h265_read_pps(ctx, &gbc, pps); - if (err < 0) - return err; - - err = cbs_h265_replace_pps(ctx, unit); - if (err < 0) - return err; - } - break; - - case HEVC_NAL_TRAIL_N: - case HEVC_NAL_TRAIL_R: - case HEVC_NAL_TSA_N: - case HEVC_NAL_TSA_R: - case HEVC_NAL_STSA_N: - case HEVC_NAL_STSA_R: - case HEVC_NAL_RADL_N: - case HEVC_NAL_RADL_R: - case HEVC_NAL_RASL_N: - case HEVC_NAL_RASL_R: - case HEVC_NAL_BLA_W_LP: - case HEVC_NAL_BLA_W_RADL: - case HEVC_NAL_BLA_N_LP: - case HEVC_NAL_IDR_W_RADL: - case HEVC_NAL_IDR_N_LP: - case HEVC_NAL_CRA_NUT: - { - H265RawSlice *slice = unit->content; - int pos, len; - - err = cbs_h265_read_slice_segment_header(ctx, &gbc, &slice->header); - if (err < 0) - return err; - - if (!cbs_h2645_read_more_rbsp_data(&gbc)) - return AVERROR_INVALIDDATA; - - pos = get_bits_count(&gbc); - len = unit->data_size; - - slice->data_size = len - pos / 8; - slice->data_ref = av_buffer_ref(unit->data_ref); - if (!slice->data_ref) - return AVERROR(ENOMEM); - slice->data = unit->data + pos / 8; - slice->data_bit_start = pos % 8; - } - break; - - case HEVC_NAL_AUD: - { - err = cbs_h265_read_aud(ctx, &gbc, unit->content); - if (err < 0) - return err; - } - break; - - case HEVC_NAL_SEI_PREFIX: - case HEVC_NAL_SEI_SUFFIX: - { - err = cbs_h265_read_sei(ctx, &gbc, unit->content, - unit->type == HEVC_NAL_SEI_PREFIX); - - if (err < 0) - return err; - } - break; - - default: - return AVERROR(ENOSYS); - } - - return 0; -} - -static int cbs_h2645_write_slice_data(CodedBitstreamContext *ctx, - PutBitContext *pbc, const uint8_t *data, - size_t data_size, int data_bit_start) -{ - size_t rest = data_size - (data_bit_start + 7) / 8; - const uint8_t *pos = data + data_bit_start / 8; - - av_assert0(data_bit_start >= 0 && - data_size > data_bit_start / 8); - - if (data_size * 8 + 8 > put_bits_left(pbc)) - return AVERROR(ENOSPC); - - if (!rest) - goto rbsp_stop_one_bit; - - // First copy the remaining bits of the first byte - // The above check ensures that we do not accidentally - // copy beyond the rbsp_stop_one_bit. - if (data_bit_start % 8) - put_bits(pbc, 8 - data_bit_start % 8, - *pos++ & MAX_UINT_BITS(8 - data_bit_start % 8)); - - if (put_bits_count(pbc) % 8 == 0) { - // If the writer is aligned at this point, - // memcpy can be used to improve performance. - // This happens normally for CABAC. - flush_put_bits(pbc); - memcpy(put_bits_ptr(pbc), pos, rest); - skip_put_bytes(pbc, rest); - } else { - // If not, we have to copy manually. - // rbsp_stop_one_bit forces us to special-case - // the last byte. - uint8_t temp; - int i; - - for (; rest > 4; rest -= 4, pos += 4) - put_bits32(pbc, AV_RB32(pos)); - - for (; rest > 1; rest--, pos++) - put_bits(pbc, 8, *pos); - - rbsp_stop_one_bit: - temp = rest ? *pos : *pos & MAX_UINT_BITS(8 - data_bit_start % 8); - - av_assert0(temp); - i = ff_ctz(*pos); - temp = temp >> i; - i = rest ? (8 - i) : (8 - i - data_bit_start % 8); - put_bits(pbc, i, temp); - if (put_bits_count(pbc) % 8) - put_bits(pbc, 8 - put_bits_count(pbc) % 8, 0); - } - - return 0; -} - -static int cbs_h264_write_nal_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) -{ - int err; - - switch (unit->type) { - case H264_NAL_SPS: - { - H264RawSPS *sps = unit->content; - - err = cbs_h264_write_sps(ctx, pbc, sps); - if (err < 0) - return err; - - err = cbs_h264_replace_sps(ctx, unit); - if (err < 0) - return err; - } - break; - - case H264_NAL_SPS_EXT: - { - H264RawSPSExtension *sps_ext = unit->content; - - err = cbs_h264_write_sps_extension(ctx, pbc, sps_ext); - if (err < 0) - return err; - } - break; - - case H264_NAL_PPS: - { - H264RawPPS *pps = unit->content; - - err = cbs_h264_write_pps(ctx, pbc, pps); - if (err < 0) - return err; - - err = cbs_h264_replace_pps(ctx, unit); - if (err < 0) - return err; - } - break; - - case H264_NAL_SLICE: - case H264_NAL_IDR_SLICE: - case H264_NAL_AUXILIARY_SLICE: - { - H264RawSlice *slice = unit->content; - - err = cbs_h264_write_slice_header(ctx, pbc, &slice->header); - if (err < 0) - return err; - - if (slice->data) { - err = cbs_h2645_write_slice_data(ctx, pbc, slice->data, - slice->data_size, - slice->data_bit_start); - if (err < 0) - return err; - } else { - // No slice data - that was just the header. - // (Bitstream may be unaligned!) - } - } - break; - - case H264_NAL_AUD: - { - err = cbs_h264_write_aud(ctx, pbc, unit->content); - if (err < 0) - return err; - } - break; - - case H264_NAL_SEI: - { - err = cbs_h264_write_sei(ctx, pbc, unit->content); - if (err < 0) - return err; - } - break; - - case H264_NAL_FILLER_DATA: - { - err = cbs_h264_write_filler(ctx, pbc, unit->content); - if (err < 0) - return err; - } - break; - - case H264_NAL_END_SEQUENCE: - { - err = cbs_h264_write_end_of_sequence(ctx, pbc, unit->content); - if (err < 0) - return err; - } - break; - - case H264_NAL_END_STREAM: - { - err = cbs_h264_write_end_of_stream(ctx, pbc, unit->content); - if (err < 0) - return err; - } - break; - - default: - av_log(ctx->log_ctx, AV_LOG_ERROR, "Write unimplemented for " - "NAL unit type %"PRIu32".\n", unit->type); - return AVERROR_PATCHWELCOME; - } - - return 0; -} - -static int cbs_h265_write_nal_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) -{ - int err; - - switch (unit->type) { - case HEVC_NAL_VPS: - { - H265RawVPS *vps = unit->content; - - err = cbs_h265_write_vps(ctx, pbc, vps); - if (err < 0) - return err; - - err = cbs_h265_replace_vps(ctx, unit); - if (err < 0) - return err; - } - break; - - case HEVC_NAL_SPS: - { - H265RawSPS *sps = unit->content; - - err = cbs_h265_write_sps(ctx, pbc, sps); - if (err < 0) - return err; - - err = cbs_h265_replace_sps(ctx, unit); - if (err < 0) - return err; - } - break; - - case HEVC_NAL_PPS: - { - H265RawPPS *pps = unit->content; - - err = cbs_h265_write_pps(ctx, pbc, pps); - if (err < 0) - return err; - - err = cbs_h265_replace_pps(ctx, unit); - if (err < 0) - return err; - } - break; - - case HEVC_NAL_TRAIL_N: - case HEVC_NAL_TRAIL_R: - case HEVC_NAL_TSA_N: - case HEVC_NAL_TSA_R: - case HEVC_NAL_STSA_N: - case HEVC_NAL_STSA_R: - case HEVC_NAL_RADL_N: - case HEVC_NAL_RADL_R: - case HEVC_NAL_RASL_N: - case HEVC_NAL_RASL_R: - case HEVC_NAL_BLA_W_LP: - case HEVC_NAL_BLA_W_RADL: - case HEVC_NAL_BLA_N_LP: - case HEVC_NAL_IDR_W_RADL: - case HEVC_NAL_IDR_N_LP: - case HEVC_NAL_CRA_NUT: - { - H265RawSlice *slice = unit->content; - - err = cbs_h265_write_slice_segment_header(ctx, pbc, &slice->header); - if (err < 0) - return err; - - if (slice->data) { - err = cbs_h2645_write_slice_data(ctx, pbc, slice->data, - slice->data_size, - slice->data_bit_start); - if (err < 0) - return err; - } else { - // No slice data - that was just the header. - } - } - break; - - case HEVC_NAL_AUD: - { - err = cbs_h265_write_aud(ctx, pbc, unit->content); - if (err < 0) - return err; - } - break; - - case HEVC_NAL_SEI_PREFIX: - case HEVC_NAL_SEI_SUFFIX: - { - err = cbs_h265_write_sei(ctx, pbc, unit->content, - unit->type == HEVC_NAL_SEI_PREFIX); - - if (err < 0) - return err; - } - break; - - default: - av_log(ctx->log_ctx, AV_LOG_ERROR, "Write unimplemented for " - "NAL unit type %"PRIu32".\n", unit->type); - return AVERROR_PATCHWELCOME; - } - - return 0; -} - -static int cbs_h2645_unit_requires_zero_byte(enum AVCodecID codec_id, - CodedBitstreamUnitType type, - int nal_unit_index) -{ - // Section B.1.2 in H.264, section B.2.2 in H.265. - if (nal_unit_index == 0) { - // Assume that this is the first NAL unit in an access unit. - return 1; - } - if (codec_id == AV_CODEC_ID_H264) - return type == H264_NAL_SPS || type == H264_NAL_PPS; - if (codec_id == AV_CODEC_ID_HEVC) - return type == HEVC_NAL_VPS || type == HEVC_NAL_SPS || type == HEVC_NAL_PPS; - return 0; -} - -static int cbs_h2645_assemble_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) -{ - uint8_t *data; - size_t max_size, dp, sp; - int err, i, zero_run; - - for (i = 0; i < frag->nb_units; i++) { - // Data should already all have been written when we get here. - av_assert0(frag->units[i].data); - } - - max_size = 0; - for (i = 0; i < frag->nb_units; i++) { - // Start code + content with worst-case emulation prevention. - max_size += 4 + frag->units[i].data_size * 3 / 2; - } - - data = av_realloc(NULL, max_size + AV_INPUT_BUFFER_PADDING_SIZE); - if (!data) - return AVERROR(ENOMEM); - - dp = 0; - for (i = 0; i < frag->nb_units; i++) { - CodedBitstreamUnit *unit = &frag->units[i]; - - if (unit->data_bit_padding > 0) { - if (i < frag->nb_units - 1) - av_log(ctx->log_ctx, AV_LOG_WARNING, "Probably invalid " - "unaligned padding on non-final NAL unit.\n"); - else - frag->data_bit_padding = unit->data_bit_padding; - } - - if (cbs_h2645_unit_requires_zero_byte(ctx->codec->codec_id, unit->type, i)) { - // zero_byte - data[dp++] = 0; - } - // start_code_prefix_one_3bytes - data[dp++] = 0; - data[dp++] = 0; - data[dp++] = 1; - - zero_run = 0; - for (sp = 0; sp < unit->data_size; sp++) { - if (zero_run < 2) { - if (unit->data[sp] == 0) - ++zero_run; - else - zero_run = 0; - } else { - if ((unit->data[sp] & ~3) == 0) { - // emulation_prevention_three_byte - data[dp++] = 3; - } - zero_run = unit->data[sp] == 0; - } - data[dp++] = unit->data[sp]; - } - } - - av_assert0(dp <= max_size); - err = av_reallocp(&data, dp + AV_INPUT_BUFFER_PADDING_SIZE); - if (err) - return err; - memset(data + dp, 0, AV_INPUT_BUFFER_PADDING_SIZE); - - frag->data_ref = av_buffer_create(data, dp + AV_INPUT_BUFFER_PADDING_SIZE, - NULL, NULL, 0); - if (!frag->data_ref) { - av_freep(&data); - return AVERROR(ENOMEM); - } - - frag->data = data; - frag->data_size = dp; - - return 0; -} - -static void cbs_h264_flush(CodedBitstreamContext *ctx) -{ - CodedBitstreamH264Context *h264 = ctx->priv_data; - - for (int i = 0; i < FF_ARRAY_ELEMS(h264->sps); i++) { - av_buffer_unref(&h264->sps_ref[i]); - h264->sps[i] = NULL; - } - for (int i = 0; i < FF_ARRAY_ELEMS(h264->pps); i++) { - av_buffer_unref(&h264->pps_ref[i]); - h264->pps[i] = NULL; - } - - h264->active_sps = NULL; - h264->active_pps = NULL; - h264->last_slice_nal_unit_type = 0; -} - -static void cbs_h264_close(CodedBitstreamContext *ctx) -{ - CodedBitstreamH264Context *h264 = ctx->priv_data; - int i; - - ff_h2645_packet_uninit(&h264->common.read_packet); - - for (i = 0; i < FF_ARRAY_ELEMS(h264->sps); i++) - av_buffer_unref(&h264->sps_ref[i]); - for (i = 0; i < FF_ARRAY_ELEMS(h264->pps); i++) - av_buffer_unref(&h264->pps_ref[i]); -} - -static void cbs_h265_flush(CodedBitstreamContext *ctx) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - - for (int i = 0; i < FF_ARRAY_ELEMS(h265->vps); i++) { - av_buffer_unref(&h265->vps_ref[i]); - h265->vps[i] = NULL; - } - for (int i = 0; i < FF_ARRAY_ELEMS(h265->sps); i++) { - av_buffer_unref(&h265->sps_ref[i]); - h265->sps[i] = NULL; - } - for (int i = 0; i < FF_ARRAY_ELEMS(h265->pps); i++) { - av_buffer_unref(&h265->pps_ref[i]); - h265->pps[i] = NULL; - } - - h265->active_vps = NULL; - h265->active_sps = NULL; - h265->active_pps = NULL; -} - -static void cbs_h265_close(CodedBitstreamContext *ctx) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - int i; - - ff_h2645_packet_uninit(&h265->common.read_packet); - - for (i = 0; i < FF_ARRAY_ELEMS(h265->vps); i++) - av_buffer_unref(&h265->vps_ref[i]); - for (i = 0; i < FF_ARRAY_ELEMS(h265->sps); i++) - av_buffer_unref(&h265->sps_ref[i]); - for (i = 0; i < FF_ARRAY_ELEMS(h265->pps); i++) - av_buffer_unref(&h265->pps_ref[i]); -} - -static void cbs_h264_free_sei(void *opaque, uint8_t *content) -{ - H264RawSEI *sei = (H264RawSEI*)content; - ff_cbs_sei_free_message_list(&sei->message_list); - av_free(content); -} - -static const CodedBitstreamUnitTypeDescriptor cbs_h264_unit_types[] = { - CBS_UNIT_TYPE_POD(H264_NAL_SPS, H264RawSPS), - CBS_UNIT_TYPE_POD(H264_NAL_SPS_EXT, H264RawSPSExtension), - - CBS_UNIT_TYPE_INTERNAL_REF(H264_NAL_PPS, H264RawPPS, slice_group_id), - - { - .nb_unit_types = 3, - .unit_types = { - H264_NAL_IDR_SLICE, - H264_NAL_SLICE, - H264_NAL_AUXILIARY_SLICE, - }, - .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, - .content_size = sizeof(H264RawSlice), - .nb_ref_offsets = 1, - .ref_offsets = { offsetof(H264RawSlice, data) }, - }, - - CBS_UNIT_TYPE_POD(H264_NAL_AUD, H264RawAUD), - CBS_UNIT_TYPE_POD(H264_NAL_FILLER_DATA, H264RawFiller), - CBS_UNIT_TYPE_POD(H264_NAL_END_SEQUENCE, H264RawNALUnitHeader), - CBS_UNIT_TYPE_POD(H264_NAL_END_STREAM, H264RawNALUnitHeader), - - CBS_UNIT_TYPE_COMPLEX(H264_NAL_SEI, H264RawSEI, &cbs_h264_free_sei), - - CBS_UNIT_TYPE_END_OF_LIST -}; - -static void cbs_h265_free_sei(void *opaque, uint8_t *content) -{ - H265RawSEI *sei = (H265RawSEI*)content; - ff_cbs_sei_free_message_list(&sei->message_list); - av_free(content); -} - -static const CodedBitstreamUnitTypeDescriptor cbs_h265_unit_types[] = { - CBS_UNIT_TYPE_INTERNAL_REF(HEVC_NAL_VPS, H265RawVPS, extension_data.data), - CBS_UNIT_TYPE_INTERNAL_REF(HEVC_NAL_SPS, H265RawSPS, extension_data.data), - CBS_UNIT_TYPE_INTERNAL_REF(HEVC_NAL_PPS, H265RawPPS, extension_data.data), - - CBS_UNIT_TYPE_POD(HEVC_NAL_AUD, H265RawAUD), - - { - // Slices of non-IRAP pictures. - .nb_unit_types = CBS_UNIT_TYPE_RANGE, - .unit_type_range_start = HEVC_NAL_TRAIL_N, - .unit_type_range_end = HEVC_NAL_RASL_R, - - .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, - .content_size = sizeof(H265RawSlice), - .nb_ref_offsets = 1, - .ref_offsets = { offsetof(H265RawSlice, data) }, - }, - - { - // Slices of IRAP pictures. - .nb_unit_types = CBS_UNIT_TYPE_RANGE, - .unit_type_range_start = HEVC_NAL_BLA_W_LP, - .unit_type_range_end = HEVC_NAL_CRA_NUT, - - .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, - .content_size = sizeof(H265RawSlice), - .nb_ref_offsets = 1, - .ref_offsets = { offsetof(H265RawSlice, data) }, - }, - - { - .nb_unit_types = 2, - .unit_types = { - HEVC_NAL_SEI_PREFIX, - HEVC_NAL_SEI_SUFFIX - }, - .content_type = CBS_CONTENT_TYPE_COMPLEX, - .content_size = sizeof(H265RawSEI), - .content_free = &cbs_h265_free_sei, - }, - - CBS_UNIT_TYPE_END_OF_LIST -}; - -const CodedBitstreamType ff_cbs_type_h264 = { - .codec_id = AV_CODEC_ID_H264, - - .priv_data_size = sizeof(CodedBitstreamH264Context), - - .unit_types = cbs_h264_unit_types, - - .split_fragment = &cbs_h2645_split_fragment, - .read_unit = &cbs_h264_read_nal_unit, - .write_unit = &cbs_h264_write_nal_unit, - .assemble_fragment = &cbs_h2645_assemble_fragment, - - .flush = &cbs_h264_flush, - .close = &cbs_h264_close, -}; - -const CodedBitstreamType ff_cbs_type_h265 = { - .codec_id = AV_CODEC_ID_HEVC, - - .priv_data_size = sizeof(CodedBitstreamH265Context), - - .unit_types = cbs_h265_unit_types, - - .split_fragment = &cbs_h2645_split_fragment, - .read_unit = &cbs_h265_read_nal_unit, - .write_unit = &cbs_h265_write_nal_unit, - .assemble_fragment = &cbs_h2645_assemble_fragment, - - .flush = &cbs_h265_flush, - .close = &cbs_h265_close, -}; - -static const SEIMessageTypeDescriptor cbs_sei_common_types[] = { - { - SEI_TYPE_FILLER_PAYLOAD, - 1, 1, - sizeof(SEIRawFillerPayload), - SEI_MESSAGE_RW(sei, filler_payload), - }, - { - SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35, - 1, 1, - sizeof(SEIRawUserDataRegistered), - SEI_MESSAGE_RW(sei, user_data_registered), - }, - { - SEI_TYPE_USER_DATA_UNREGISTERED, - 1, 1, - sizeof(SEIRawUserDataUnregistered), - SEI_MESSAGE_RW(sei, user_data_unregistered), - }, - { - SEI_TYPE_MASTERING_DISPLAY_COLOUR_VOLUME, - 1, 0, - sizeof(SEIRawMasteringDisplayColourVolume), - SEI_MESSAGE_RW(sei, mastering_display_colour_volume), - }, - { - SEI_TYPE_CONTENT_LIGHT_LEVEL_INFO, - 1, 0, - sizeof(SEIRawContentLightLevelInfo), - SEI_MESSAGE_RW(sei, content_light_level_info), - }, - { - SEI_TYPE_ALTERNATIVE_TRANSFER_CHARACTERISTICS, - 1, 0, - sizeof(SEIRawAlternativeTransferCharacteristics), - SEI_MESSAGE_RW(sei, alternative_transfer_characteristics), - }, - SEI_MESSAGE_TYPE_END, -}; - -static const SEIMessageTypeDescriptor cbs_sei_h264_types[] = { - { - SEI_TYPE_BUFFERING_PERIOD, - 1, 0, - sizeof(H264RawSEIBufferingPeriod), - SEI_MESSAGE_RW(h264, sei_buffering_period), - }, - { - SEI_TYPE_PIC_TIMING, - 1, 0, - sizeof(H264RawSEIPicTiming), - SEI_MESSAGE_RW(h264, sei_pic_timing), - }, - { - SEI_TYPE_PAN_SCAN_RECT, - 1, 0, - sizeof(H264RawSEIPanScanRect), - SEI_MESSAGE_RW(h264, sei_pan_scan_rect), - }, - { - SEI_TYPE_RECOVERY_POINT, - 1, 0, - sizeof(H264RawSEIRecoveryPoint), - SEI_MESSAGE_RW(h264, sei_recovery_point), - }, - { - SEI_TYPE_FILM_GRAIN_CHARACTERISTICS, - 1, 0, - sizeof(H264RawFilmGrainCharacteristics), - SEI_MESSAGE_RW(h264, film_grain_characteristics), - }, - { - SEI_TYPE_DISPLAY_ORIENTATION, - 1, 0, - sizeof(H264RawSEIDisplayOrientation), - SEI_MESSAGE_RW(h264, sei_display_orientation), - }, - SEI_MESSAGE_TYPE_END -}; - -static const SEIMessageTypeDescriptor cbs_sei_h265_types[] = { - { - SEI_TYPE_BUFFERING_PERIOD, - 1, 0, - sizeof(H265RawSEIBufferingPeriod), - SEI_MESSAGE_RW(h265, sei_buffering_period), - }, - { - SEI_TYPE_PIC_TIMING, - 1, 0, - sizeof(H265RawSEIPicTiming), - SEI_MESSAGE_RW(h265, sei_pic_timing), - }, - { - SEI_TYPE_PAN_SCAN_RECT, - 1, 0, - sizeof(H265RawSEIPanScanRect), - SEI_MESSAGE_RW(h265, sei_pan_scan_rect), - }, - { - SEI_TYPE_RECOVERY_POINT, - 1, 0, - sizeof(H265RawSEIRecoveryPoint), - SEI_MESSAGE_RW(h265, sei_recovery_point), - }, - { - SEI_TYPE_FILM_GRAIN_CHARACTERISTICS, - 1, 0, - sizeof(H265RawFilmGrainCharacteristics), - SEI_MESSAGE_RW(h265, film_grain_characteristics), - }, - { - SEI_TYPE_DISPLAY_ORIENTATION, - 1, 0, - sizeof(H265RawSEIDisplayOrientation), - SEI_MESSAGE_RW(h265, sei_display_orientation), - }, - { - SEI_TYPE_ACTIVE_PARAMETER_SETS, - 1, 0, - sizeof(H265RawSEIActiveParameterSets), - SEI_MESSAGE_RW(h265, sei_active_parameter_sets), - }, - { - SEI_TYPE_DECODED_PICTURE_HASH, - 0, 1, - sizeof(H265RawSEIDecodedPictureHash), - SEI_MESSAGE_RW(h265, sei_decoded_picture_hash), - }, - { - SEI_TYPE_TIME_CODE, - 1, 0, - sizeof(H265RawSEITimeCode), - SEI_MESSAGE_RW(h265, sei_time_code), - }, - { - SEI_TYPE_ALPHA_CHANNEL_INFO, - 1, 0, - sizeof(H265RawSEIAlphaChannelInfo), - SEI_MESSAGE_RW(h265, sei_alpha_channel_info), - }, - SEI_MESSAGE_TYPE_END -}; - -const SEIMessageTypeDescriptor *ff_cbs_sei_find_type(CodedBitstreamContext *ctx, - int payload_type) -{ - const SEIMessageTypeDescriptor *codec_list; - int i; - - for (i = 0; cbs_sei_common_types[i].type >= 0; i++) { - if (cbs_sei_common_types[i].type == payload_type) - return &cbs_sei_common_types[i]; - } - - switch (ctx->codec->codec_id) { - case AV_CODEC_ID_H264: - codec_list = cbs_sei_h264_types; - break; - case AV_CODEC_ID_H265: - codec_list = cbs_sei_h265_types; - break; - default: - return NULL; - } - - for (i = 0; codec_list[i].type >= 0; i++) { - if (codec_list[i].type == payload_type) - return &codec_list[i]; - } - - return NULL; -} diff --git a/third-party/cbs/cbs_h264_syntax_template.c b/third-party/cbs/cbs_h264_syntax_template.c deleted file mode 100644 index 0f8bba4a0da..00000000000 --- a/third-party/cbs/cbs_h264_syntax_template.c +++ /dev/null @@ -1,1262 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw) -{ - int err; - - fixed(1, rbsp_stop_one_bit, 1); - while (byte_alignment(rw) != 0) - fixed(1, rbsp_alignment_zero_bit, 0); - - return 0; -} - -static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawNALUnitHeader *current, - uint32_t valid_type_mask) -{ - int err; - - fixed(1, forbidden_zero_bit, 0); - ub(2, nal_ref_idc); - ub(5, nal_unit_type); - - if (!(1 << current->nal_unit_type & valid_type_mask)) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid NAL unit type %d.\n", - current->nal_unit_type); - return AVERROR_INVALIDDATA; - } - - if (current->nal_unit_type == 14 || - current->nal_unit_type == 20 || - current->nal_unit_type == 21) { - if (current->nal_unit_type != 21) - flag(svc_extension_flag); - else - flag(avc_3d_extension_flag); - - if (current->svc_extension_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SVC not supported.\n"); - return AVERROR_PATCHWELCOME; - - } else if (current->avc_3d_extension_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "3DAVC not supported.\n"); - return AVERROR_PATCHWELCOME; - - } else { - av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC not supported.\n"); - return AVERROR_PATCHWELCOME; - } - } - - return 0; -} - -static int FUNC(scaling_list)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawScalingList *current, - int size_of_scaling_list) -{ - int err, i, scale; - - scale = 8; - for (i = 0; i < size_of_scaling_list; i++) { - ses(delta_scale[i], -128, +127, 1, i); - scale = (scale + current->delta_scale[i] + 256) % 256; - if (scale == 0) - break; - } - - return 0; -} - -static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawHRD *current) -{ - int err, i; - - ue(cpb_cnt_minus1, 0, 31); - ub(4, bit_rate_scale); - ub(4, cpb_size_scale); - - for (i = 0; i <= current->cpb_cnt_minus1; i++) { - ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - flags(cbr_flag[i], 1, i); - } - - ub(5, initial_cpb_removal_delay_length_minus1); - ub(5, cpb_removal_delay_length_minus1); - ub(5, dpb_output_delay_length_minus1); - ub(5, time_offset_length); - - return 0; -} - -static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawVUI *current, H264RawSPS *sps) -{ - int err; - - flag(aspect_ratio_info_present_flag); - if (current->aspect_ratio_info_present_flag) { - ub(8, aspect_ratio_idc); - if (current->aspect_ratio_idc == 255) { - ub(16, sar_width); - ub(16, sar_height); - } - } else { - infer(aspect_ratio_idc, 0); - } - - flag(overscan_info_present_flag); - if (current->overscan_info_present_flag) - flag(overscan_appropriate_flag); - - flag(video_signal_type_present_flag); - if (current->video_signal_type_present_flag) { - ub(3, video_format); - flag(video_full_range_flag); - flag(colour_description_present_flag); - if (current->colour_description_present_flag) { - ub(8, colour_primaries); - ub(8, transfer_characteristics); - ub(8, matrix_coefficients); - } else { - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - } - } else { - infer(video_format, 5); - infer(video_full_range_flag, 0); - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - } - - flag(chroma_loc_info_present_flag); - if (current->chroma_loc_info_present_flag) { - ue(chroma_sample_loc_type_top_field, 0, 5); - ue(chroma_sample_loc_type_bottom_field, 0, 5); - } else { - infer(chroma_sample_loc_type_top_field, 0); - infer(chroma_sample_loc_type_bottom_field, 0); - } - - flag(timing_info_present_flag); - if (current->timing_info_present_flag) { - u(32, num_units_in_tick, 1, UINT32_MAX); - u(32, time_scale, 1, UINT32_MAX); - flag(fixed_frame_rate_flag); - } else { - infer(fixed_frame_rate_flag, 0); - } - - flag(nal_hrd_parameters_present_flag); - if (current->nal_hrd_parameters_present_flag) - CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->nal_hrd_parameters)); - - flag(vcl_hrd_parameters_present_flag); - if (current->vcl_hrd_parameters_present_flag) - CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->vcl_hrd_parameters)); - - if (current->nal_hrd_parameters_present_flag || - current->vcl_hrd_parameters_present_flag) - flag(low_delay_hrd_flag); - else - infer(low_delay_hrd_flag, 1 - current->fixed_frame_rate_flag); - - flag(pic_struct_present_flag); - - flag(bitstream_restriction_flag); - if (current->bitstream_restriction_flag) { - flag(motion_vectors_over_pic_boundaries_flag); - ue(max_bytes_per_pic_denom, 0, 16); - ue(max_bits_per_mb_denom, 0, 16); - // The current version of the standard constrains this to be in - // [0,15], but older versions allow 16. - ue(log2_max_mv_length_horizontal, 0, 16); - ue(log2_max_mv_length_vertical, 0, 16); - ue(max_num_reorder_frames, 0, H264_MAX_DPB_FRAMES); - ue(max_dec_frame_buffering, 0, H264_MAX_DPB_FRAMES); - } else { - infer(motion_vectors_over_pic_boundaries_flag, 1); - infer(max_bytes_per_pic_denom, 2); - infer(max_bits_per_mb_denom, 1); - infer(log2_max_mv_length_horizontal, 15); - infer(log2_max_mv_length_vertical, 15); - - if ((sps->profile_idc == 44 || sps->profile_idc == 86 || - sps->profile_idc == 100 || sps->profile_idc == 110 || - sps->profile_idc == 122 || sps->profile_idc == 244) && - sps->constraint_set3_flag) { - infer(max_num_reorder_frames, 0); - infer(max_dec_frame_buffering, 0); - } else { - infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES); - infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES); - } - } - - return 0; -} - -static int FUNC(vui_parameters_default)(CodedBitstreamContext *ctx, - RWContext *rw, H264RawVUI *current, - H264RawSPS *sps) -{ - infer(aspect_ratio_idc, 0); - - infer(video_format, 5); - infer(video_full_range_flag, 0); - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - - infer(chroma_sample_loc_type_top_field, 0); - infer(chroma_sample_loc_type_bottom_field, 0); - - infer(fixed_frame_rate_flag, 0); - infer(low_delay_hrd_flag, 1); - - infer(pic_struct_present_flag, 0); - - infer(motion_vectors_over_pic_boundaries_flag, 1); - infer(max_bytes_per_pic_denom, 2); - infer(max_bits_per_mb_denom, 1); - infer(log2_max_mv_length_horizontal, 15); - infer(log2_max_mv_length_vertical, 15); - - if ((sps->profile_idc == 44 || sps->profile_idc == 86 || - sps->profile_idc == 100 || sps->profile_idc == 110 || - sps->profile_idc == 122 || sps->profile_idc == 244) && - sps->constraint_set3_flag) { - infer(max_num_reorder_frames, 0); - infer(max_dec_frame_buffering, 0); - } else { - infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES); - infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES); - } - - return 0; -} - -static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSPS *current) -{ - int err, i; - - HEADER("Sequence Parameter Set"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_SPS)); - - ub(8, profile_idc); - - flag(constraint_set0_flag); - flag(constraint_set1_flag); - flag(constraint_set2_flag); - flag(constraint_set3_flag); - flag(constraint_set4_flag); - flag(constraint_set5_flag); - - u(2, reserved_zero_2bits, 0, 0); - - ub(8, level_idc); - - ue(seq_parameter_set_id, 0, 31); - - if (current->profile_idc == 100 || current->profile_idc == 110 || - current->profile_idc == 122 || current->profile_idc == 244 || - current->profile_idc == 44 || current->profile_idc == 83 || - current->profile_idc == 86 || current->profile_idc == 118 || - current->profile_idc == 128 || current->profile_idc == 138) { - ue(chroma_format_idc, 0, 3); - - if (current->chroma_format_idc == 3) - flag(separate_colour_plane_flag); - else - infer(separate_colour_plane_flag, 0); - - ue(bit_depth_luma_minus8, 0, 6); - ue(bit_depth_chroma_minus8, 0, 6); - - flag(qpprime_y_zero_transform_bypass_flag); - - flag(seq_scaling_matrix_present_flag); - if (current->seq_scaling_matrix_present_flag) { - for (i = 0; i < ((current->chroma_format_idc != 3) ? 8 : 12); i++) { - flags(seq_scaling_list_present_flag[i], 1, i); - if (current->seq_scaling_list_present_flag[i]) { - if (i < 6) - CHECK(FUNC(scaling_list)(ctx, rw, - ¤t->scaling_list_4x4[i], - 16)); - else - CHECK(FUNC(scaling_list)(ctx, rw, - ¤t->scaling_list_8x8[i - 6], - 64)); - } - } - } - } else { - infer(chroma_format_idc, current->profile_idc == 183 ? 0 : 1); - - infer(separate_colour_plane_flag, 0); - infer(bit_depth_luma_minus8, 0); - infer(bit_depth_chroma_minus8, 0); - } - - ue(log2_max_frame_num_minus4, 0, 12); - ue(pic_order_cnt_type, 0, 2); - - if (current->pic_order_cnt_type == 0) { - ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12); - } else if (current->pic_order_cnt_type == 1) { - flag(delta_pic_order_always_zero_flag); - se(offset_for_non_ref_pic, INT32_MIN + 1, INT32_MAX); - se(offset_for_top_to_bottom_field, INT32_MIN + 1, INT32_MAX); - ue(num_ref_frames_in_pic_order_cnt_cycle, 0, 255); - - for (i = 0; i < current->num_ref_frames_in_pic_order_cnt_cycle; i++) - ses(offset_for_ref_frame[i], INT32_MIN + 1, INT32_MAX, 1, i); - } - - ue(max_num_ref_frames, 0, H264_MAX_DPB_FRAMES); - flag(gaps_in_frame_num_allowed_flag); - - ue(pic_width_in_mbs_minus1, 0, H264_MAX_MB_WIDTH); - ue(pic_height_in_map_units_minus1, 0, H264_MAX_MB_HEIGHT); - - flag(frame_mbs_only_flag); - if (!current->frame_mbs_only_flag) - flag(mb_adaptive_frame_field_flag); - - flag(direct_8x8_inference_flag); - - flag(frame_cropping_flag); - if (current->frame_cropping_flag) { - ue(frame_crop_left_offset, 0, H264_MAX_WIDTH); - ue(frame_crop_right_offset, 0, H264_MAX_WIDTH); - ue(frame_crop_top_offset, 0, H264_MAX_HEIGHT); - ue(frame_crop_bottom_offset, 0, H264_MAX_HEIGHT); - } - - flag(vui_parameters_present_flag); - if (current->vui_parameters_present_flag) - CHECK(FUNC(vui_parameters)(ctx, rw, ¤t->vui, current)); - else - CHECK(FUNC(vui_parameters_default)(ctx, rw, ¤t->vui, current)); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(sps_extension)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSPSExtension *current) -{ - int err; - - HEADER("Sequence Parameter Set Extension"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_SPS_EXT)); - - ue(seq_parameter_set_id, 0, 31); - - ue(aux_format_idc, 0, 3); - - if (current->aux_format_idc != 0) { - int bits; - - ue(bit_depth_aux_minus8, 0, 4); - flag(alpha_incr_flag); - - bits = current->bit_depth_aux_minus8 + 9; - ub(bits, alpha_opaque_value); - ub(bits, alpha_transparent_value); - } - - flag(additional_extension_flag); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawPPS *current) -{ - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps; - int err, i; - - HEADER("Picture Parameter Set"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_PPS)); - - ue(pic_parameter_set_id, 0, 255); - ue(seq_parameter_set_id, 0, 31); - - sps = h264->sps[current->seq_parameter_set_id]; - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - current->seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - - flag(entropy_coding_mode_flag); - flag(bottom_field_pic_order_in_frame_present_flag); - - ue(num_slice_groups_minus1, 0, 7); - if (current->num_slice_groups_minus1 > 0) { - unsigned int pic_size; - int iGroup; - - pic_size = (sps->pic_width_in_mbs_minus1 + 1) * - (sps->pic_height_in_map_units_minus1 + 1); - - ue(slice_group_map_type, 0, 6); - - if (current->slice_group_map_type == 0) { - for (iGroup = 0; iGroup <= current->num_slice_groups_minus1; iGroup++) - ues(run_length_minus1[iGroup], 0, pic_size - 1, 1, iGroup); - - } else if (current->slice_group_map_type == 2) { - for (iGroup = 0; iGroup < current->num_slice_groups_minus1; iGroup++) { - ues(top_left[iGroup], 0, pic_size - 1, 1, iGroup); - ues(bottom_right[iGroup], - current->top_left[iGroup], pic_size - 1, 1, iGroup); - } - } else if (current->slice_group_map_type == 3 || - current->slice_group_map_type == 4 || - current->slice_group_map_type == 5) { - flag(slice_group_change_direction_flag); - ue(slice_group_change_rate_minus1, 0, pic_size - 1); - } else if (current->slice_group_map_type == 6) { - ue(pic_size_in_map_units_minus1, pic_size - 1, pic_size - 1); - - allocate(current->slice_group_id, - current->pic_size_in_map_units_minus1 + 1); - for (i = 0; i <= current->pic_size_in_map_units_minus1; i++) - us(av_log2(2 * current->num_slice_groups_minus1 + 1), - slice_group_id[i], 0, current->num_slice_groups_minus1, 1, i); - } - } - - ue(num_ref_idx_l0_default_active_minus1, 0, 31); - ue(num_ref_idx_l1_default_active_minus1, 0, 31); - - flag(weighted_pred_flag); - u(2, weighted_bipred_idc, 0, 2); - - se(pic_init_qp_minus26, -26 - 6 * sps->bit_depth_luma_minus8, +25); - se(pic_init_qs_minus26, -26, +25); - se(chroma_qp_index_offset, -12, +12); - - flag(deblocking_filter_control_present_flag); - flag(constrained_intra_pred_flag); - flag(redundant_pic_cnt_present_flag); - - if (more_rbsp_data(current->more_rbsp_data)) - { - flag(transform_8x8_mode_flag); - - flag(pic_scaling_matrix_present_flag); - if (current->pic_scaling_matrix_present_flag) { - for (i = 0; i < 6 + (((sps->chroma_format_idc != 3) ? 2 : 6) * - current->transform_8x8_mode_flag); i++) { - flags(pic_scaling_list_present_flag[i], 1, i); - if (current->pic_scaling_list_present_flag[i]) { - if (i < 6) - CHECK(FUNC(scaling_list)(ctx, rw, - ¤t->scaling_list_4x4[i], - 16)); - else - CHECK(FUNC(scaling_list)(ctx, rw, - ¤t->scaling_list_8x8[i - 6], - 64)); - } - } - } - - se(second_chroma_qp_index_offset, -12, +12); - } else { - infer(transform_8x8_mode_flag, 0); - infer(pic_scaling_matrix_present_flag, 0); - infer(second_chroma_qp_index_offset, current->chroma_qp_index_offset); - } - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(sei_buffering_period)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIBufferingPeriod *current, - SEIMessageState *sei) -{ - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps; - int err, i, length; - - HEADER("Buffering Period"); - - ue(seq_parameter_set_id, 0, 31); - - sps = h264->sps[current->seq_parameter_set_id]; - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - current->seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h264->active_sps = sps; - - if (sps->vui.nal_hrd_parameters_present_flag) { - for (i = 0; i <= sps->vui.nal_hrd_parameters.cpb_cnt_minus1; i++) { - length = sps->vui.nal_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1; - xu(length, initial_cpb_removal_delay[SchedSelIdx], - current->nal.initial_cpb_removal_delay[i], - 1, MAX_UINT_BITS(length), 1, i); - xu(length, initial_cpb_removal_delay_offset[SchedSelIdx], - current->nal.initial_cpb_removal_delay_offset[i], - 0, MAX_UINT_BITS(length), 1, i); - } - } - - if (sps->vui.vcl_hrd_parameters_present_flag) { - for (i = 0; i <= sps->vui.vcl_hrd_parameters.cpb_cnt_minus1; i++) { - length = sps->vui.vcl_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1; - xu(length, initial_cpb_removal_delay[SchedSelIdx], - current->vcl.initial_cpb_removal_delay[i], - 1, MAX_UINT_BITS(length), 1, i); - xu(length, initial_cpb_removal_delay_offset[SchedSelIdx], - current->vcl.initial_cpb_removal_delay_offset[i], - 0, MAX_UINT_BITS(length), 1, i); - } - } - - return 0; -} - -static int FUNC(sei_pic_timestamp)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIPicTimestamp *current, - const H264RawSPS *sps) -{ - uint8_t time_offset_length; - int err; - - u(2, ct_type, 0, 2); - flag(nuit_field_based_flag); - u(5, counting_type, 0, 6); - flag(full_timestamp_flag); - flag(discontinuity_flag); - flag(cnt_dropped_flag); - ub(8, n_frames); - if (current->full_timestamp_flag) { - u(6, seconds_value, 0, 59); - u(6, minutes_value, 0, 59); - u(5, hours_value, 0, 23); - } else { - flag(seconds_flag); - if (current->seconds_flag) { - u(6, seconds_value, 0, 59); - flag(minutes_flag); - if (current->minutes_flag) { - u(6, minutes_value, 0, 59); - flag(hours_flag); - if (current->hours_flag) - u(5, hours_value, 0, 23); - } - } - } - - if (sps->vui.nal_hrd_parameters_present_flag) - time_offset_length = sps->vui.nal_hrd_parameters.time_offset_length; - else if (sps->vui.vcl_hrd_parameters_present_flag) - time_offset_length = sps->vui.vcl_hrd_parameters.time_offset_length; - else - time_offset_length = 24; - - if (time_offset_length > 0) - ib(time_offset_length, time_offset); - else - infer(time_offset, 0); - - return 0; -} - -static int FUNC(sei_pic_timing)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIPicTiming *current, - SEIMessageState *sei) -{ - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps; - int err; - - HEADER("Picture Timing"); - - sps = h264->active_sps; - if (!sps) { - // If there is exactly one possible SPS but it is not yet active - // then just assume that it should be the active one. - int i, k = -1; - for (i = 0; i < H264_MAX_SPS_COUNT; i++) { - if (h264->sps[i]) { - if (k >= 0) { - k = -1; - break; - } - k = i; - } - } - if (k >= 0) - sps = h264->sps[k]; - } - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "No active SPS for pic_timing.\n"); - return AVERROR_INVALIDDATA; - } - - if (sps->vui.nal_hrd_parameters_present_flag || - sps->vui.vcl_hrd_parameters_present_flag) { - const H264RawHRD *hrd; - - if (sps->vui.nal_hrd_parameters_present_flag) - hrd = &sps->vui.nal_hrd_parameters; - else if (sps->vui.vcl_hrd_parameters_present_flag) - hrd = &sps->vui.vcl_hrd_parameters; - else { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "No HRD parameters for pic_timing.\n"); - return AVERROR_INVALIDDATA; - } - - ub(hrd->cpb_removal_delay_length_minus1 + 1, cpb_removal_delay); - ub(hrd->dpb_output_delay_length_minus1 + 1, dpb_output_delay); - } - - if (sps->vui.pic_struct_present_flag) { - static const uint8_t num_clock_ts[9] = { - 1, 1, 1, 2, 2, 3, 3, 2, 3 - }; - int i; - - u(4, pic_struct, 0, 8); - if (current->pic_struct > 8) - return AVERROR_INVALIDDATA; - - for (i = 0; i < num_clock_ts[current->pic_struct]; i++) { - flags(clock_timestamp_flag[i], 1, i); - if (current->clock_timestamp_flag[i]) - CHECK(FUNC(sei_pic_timestamp)(ctx, rw, - ¤t->timestamp[i], sps)); - } - } - - return 0; -} - -static int FUNC(sei_pan_scan_rect)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIPanScanRect *current, - SEIMessageState *sei) -{ - int err, i; - - HEADER("Pan-Scan Rectangle"); - - ue(pan_scan_rect_id, 0, UINT32_MAX - 1); - flag(pan_scan_rect_cancel_flag); - - if (!current->pan_scan_rect_cancel_flag) { - ue(pan_scan_cnt_minus1, 0, 2); - - for (i = 0; i <= current->pan_scan_cnt_minus1; i++) { - ses(pan_scan_rect_left_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_right_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_top_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_bottom_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - } - - ue(pan_scan_rect_repetition_period, 0, 16384); - } - - return 0; -} - -static int FUNC(sei_recovery_point)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIRecoveryPoint *current, - SEIMessageState *sei) -{ - int err; - - HEADER("Recovery Point"); - - ue(recovery_frame_cnt, 0, 65535); - flag(exact_match_flag); - flag(broken_link_flag); - u(2, changing_slice_group_idc, 0, 2); - - return 0; -} - -static int FUNC(film_grain_characteristics)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawFilmGrainCharacteristics *current, - SEIMessageState *state) -{ - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps; - int err, c, i, j; - - HEADER("Film Grain Characteristics"); - - sps = h264->active_sps; - if (!sps) { - // If there is exactly one possible SPS but it is not yet active - // then just assume that it should be the active one. - int i, k = -1; - for (i = 0; i < H264_MAX_SPS_COUNT; i++) { - if (h264->sps[i]) { - if (k >= 0) { - k = -1; - break; - } - k = i; - } - } - if (k >= 0) - sps = h264->sps[k]; - } - - flag(film_grain_characteristics_cancel_flag); - if (!current->film_grain_characteristics_cancel_flag) { - int filmGrainBitDepth[3]; - - u(2, film_grain_model_id, 0, 1); - flag(separate_colour_description_present_flag); - if (current->separate_colour_description_present_flag) { - ub(3, film_grain_bit_depth_luma_minus8); - ub(3, film_grain_bit_depth_chroma_minus8); - flag(film_grain_full_range_flag); - ub(8, film_grain_colour_primaries); - ub(8, film_grain_transfer_characteristics); - ub(8, film_grain_matrix_coefficients); - } else { - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "No active SPS for film_grain_characteristics.\n"); - return AVERROR_INVALIDDATA; - } - infer(film_grain_bit_depth_luma_minus8, sps->bit_depth_luma_minus8); - infer(film_grain_bit_depth_chroma_minus8, sps->bit_depth_chroma_minus8); - infer(film_grain_full_range_flag, sps->vui.video_full_range_flag); - infer(film_grain_colour_primaries, sps->vui.colour_primaries); - infer(film_grain_transfer_characteristics, sps->vui.transfer_characteristics); - infer(film_grain_matrix_coefficients, sps->vui.matrix_coefficients); - } - - filmGrainBitDepth[0] = current->film_grain_bit_depth_luma_minus8 + 8; - filmGrainBitDepth[1] = - filmGrainBitDepth[2] = current->film_grain_bit_depth_chroma_minus8 + 8; - - u(2, blending_mode_id, 0, 1); - ub(4, log2_scale_factor); - for (c = 0; c < 3; c++) - flags(comp_model_present_flag[c], 1, c); - for (c = 0; c < 3; c++) { - if (current->comp_model_present_flag[c]) { - ubs(8, num_intensity_intervals_minus1[c], 1, c); - us(3, num_model_values_minus1[c], 0, 5, 1, c); - for (i = 0; i <= current->num_intensity_intervals_minus1[c]; i++) { - ubs(8, intensity_interval_lower_bound[c][i], 2, c, i); - ubs(8, intensity_interval_upper_bound[c][i], 2, c, i); - for (j = 0; j <= current->num_model_values_minus1[c]; j++) - ses(comp_model_value[c][i][j], 0 - current->film_grain_model_id * (1 << (filmGrainBitDepth[c] - 1)), - ((1 << filmGrainBitDepth[c]) - 1) - current->film_grain_model_id * (1 << (filmGrainBitDepth[c] - 1)), - 3, c, i, j); - } - } - } - ue(film_grain_characteristics_repetition_period, 0, 16384); - } - - return 0; -} - -static int FUNC(sei_display_orientation)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEIDisplayOrientation *current, - SEIMessageState *sei) -{ - int err; - - HEADER("Display Orientation"); - - flag(display_orientation_cancel_flag); - if (!current->display_orientation_cancel_flag) { - flag(hor_flip); - flag(ver_flip); - ub(16, anticlockwise_rotation); - ue(display_orientation_repetition_period, 0, 16384); - flag(display_orientation_extension_flag); - } - - return 0; -} - -static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSEI *current) -{ - int err; - - HEADER("Supplemental Enhancement Information"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_SEI)); - - CHECK(FUNC_SEI(message_list)(ctx, rw, ¤t->message_list, 1)); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawAUD *current) -{ - int err; - - HEADER("Access Unit Delimiter"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_AUD)); - - ub(3, primary_pic_type); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(ref_pic_list_modification)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSliceHeader *current) -{ - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps = h264->active_sps; - int err, i, mopn; - - if (current->slice_type % 5 != 2 && - current->slice_type % 5 != 4) { - flag(ref_pic_list_modification_flag_l0); - if (current->ref_pic_list_modification_flag_l0) { - for (i = 0; i < H264_MAX_RPLM_COUNT; i++) { - xue(modification_of_pic_nums_idc, - current->rplm_l0[i].modification_of_pic_nums_idc, 0, 3, 0); - - mopn = current->rplm_l0[i].modification_of_pic_nums_idc; - if (mopn == 3) - break; - - if (mopn == 0 || mopn == 1) - xue(abs_diff_pic_num_minus1, - current->rplm_l0[i].abs_diff_pic_num_minus1, - 0, (1 + current->field_pic_flag) * - (1 << (sps->log2_max_frame_num_minus4 + 4)), 0); - else if (mopn == 2) - xue(long_term_pic_num, - current->rplm_l0[i].long_term_pic_num, - 0, sps->max_num_ref_frames - 1, 0); - } - } - } - - if (current->slice_type % 5 == 1) { - flag(ref_pic_list_modification_flag_l1); - if (current->ref_pic_list_modification_flag_l1) { - for (i = 0; i < H264_MAX_RPLM_COUNT; i++) { - xue(modification_of_pic_nums_idc, - current->rplm_l1[i].modification_of_pic_nums_idc, 0, 3, 0); - - mopn = current->rplm_l1[i].modification_of_pic_nums_idc; - if (mopn == 3) - break; - - if (mopn == 0 || mopn == 1) - xue(abs_diff_pic_num_minus1, - current->rplm_l1[i].abs_diff_pic_num_minus1, - 0, (1 + current->field_pic_flag) * - (1 << (sps->log2_max_frame_num_minus4 + 4)), 0); - else if (mopn == 2) - xue(long_term_pic_num, - current->rplm_l1[i].long_term_pic_num, - 0, sps->max_num_ref_frames - 1, 0); - } - } - } - - return 0; -} - -static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSliceHeader *current) -{ - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps = h264->active_sps; - int chroma; - int err, i, j; - - ue(luma_log2_weight_denom, 0, 7); - - chroma = !sps->separate_colour_plane_flag && sps->chroma_format_idc != 0; - if (chroma) - ue(chroma_log2_weight_denom, 0, 7); - - for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { - flags(luma_weight_l0_flag[i], 1, i); - if (current->luma_weight_l0_flag[i]) { - ses(luma_weight_l0[i], -128, +127, 1, i); - ses(luma_offset_l0[i], -128, +127, 1, i); - } - if (chroma) { - flags(chroma_weight_l0_flag[i], 1, i); - if (current->chroma_weight_l0_flag[i]) { - for (j = 0; j < 2; j++) { - ses(chroma_weight_l0[i][j], -128, +127, 2, i, j); - ses(chroma_offset_l0[i][j], -128, +127, 2, i, j); - } - } - } - } - - if (current->slice_type % 5 == 1) { - for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { - flags(luma_weight_l1_flag[i], 1, i); - if (current->luma_weight_l1_flag[i]) { - ses(luma_weight_l1[i], -128, +127, 1, i); - ses(luma_offset_l1[i], -128, +127, 1, i); - } - if (chroma) { - flags(chroma_weight_l1_flag[i], 1, i); - if (current->chroma_weight_l1_flag[i]) { - for (j = 0; j < 2; j++) { - ses(chroma_weight_l1[i][j], -128, +127, 2, i, j); - ses(chroma_offset_l1[i][j], -128, +127, 2, i, j); - } - } - } - } - } - - return 0; -} - -static int FUNC(dec_ref_pic_marking)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSliceHeader *current, int idr_pic_flag) -{ - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps = h264->active_sps; - int err, i; - uint32_t mmco; - - if (idr_pic_flag) { - flag(no_output_of_prior_pics_flag); - flag(long_term_reference_flag); - } else { - flag(adaptive_ref_pic_marking_mode_flag); - if (current->adaptive_ref_pic_marking_mode_flag) { - for (i = 0; i < H264_MAX_MMCO_COUNT; i++) { - xue(memory_management_control_operation, - current->mmco[i].memory_management_control_operation, - 0, 6, 0); - - mmco = current->mmco[i].memory_management_control_operation; - if (mmco == 0) - break; - - if (mmco == 1 || mmco == 3) - xue(difference_of_pic_nums_minus1, - current->mmco[i].difference_of_pic_nums_minus1, - 0, INT32_MAX, 0); - if (mmco == 2) - xue(long_term_pic_num, - current->mmco[i].long_term_pic_num, - 0, sps->max_num_ref_frames - 1, 0); - if (mmco == 3 || mmco == 6) - xue(long_term_frame_idx, - current->mmco[i].long_term_frame_idx, - 0, sps->max_num_ref_frames - 1, 0); - if (mmco == 4) - xue(max_long_term_frame_idx_plus1, - current->mmco[i].max_long_term_frame_idx_plus1, - 0, sps->max_num_ref_frames, 0); - } - if (i == H264_MAX_MMCO_COUNT) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many " - "memory management control operations.\n"); - return AVERROR_INVALIDDATA; - } - } - } - - return 0; -} - -static int FUNC(slice_header)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawSliceHeader *current) -{ - CodedBitstreamH264Context *h264 = ctx->priv_data; - const H264RawSPS *sps; - const H264RawPPS *pps; - int err; - int idr_pic_flag; - int slice_type_i, slice_type_p, slice_type_b; - int slice_type_si, slice_type_sp; - - HEADER("Slice Header"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_SLICE | - 1 << H264_NAL_IDR_SLICE | - 1 << H264_NAL_AUXILIARY_SLICE)); - - if (current->nal_unit_header.nal_unit_type == H264_NAL_AUXILIARY_SLICE) { - if (!h264->last_slice_nal_unit_type) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Auxiliary slice " - "is not decodable without the main picture " - "in the same access unit.\n"); - return AVERROR_INVALIDDATA; - } - idr_pic_flag = h264->last_slice_nal_unit_type == H264_NAL_IDR_SLICE; - } else { - idr_pic_flag = current->nal_unit_header.nal_unit_type == H264_NAL_IDR_SLICE; - } - - ue(first_mb_in_slice, 0, H264_MAX_MB_PIC_SIZE - 1); - ue(slice_type, 0, 9); - - slice_type_i = current->slice_type % 5 == 2; - slice_type_p = current->slice_type % 5 == 0; - slice_type_b = current->slice_type % 5 == 1; - slice_type_si = current->slice_type % 5 == 4; - slice_type_sp = current->slice_type % 5 == 3; - - if (idr_pic_flag && !(slice_type_i || slice_type_si)) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid slice type %d " - "for IDR picture.\n", current->slice_type); - return AVERROR_INVALIDDATA; - } - - ue(pic_parameter_set_id, 0, 255); - - pps = h264->pps[current->pic_parameter_set_id]; - if (!pps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n", - current->pic_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h264->active_pps = pps; - - sps = h264->sps[pps->seq_parameter_set_id]; - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - pps->seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h264->active_sps = sps; - - if (sps->separate_colour_plane_flag) - u(2, colour_plane_id, 0, 2); - - ub(sps->log2_max_frame_num_minus4 + 4, frame_num); - - if (!sps->frame_mbs_only_flag) { - flag(field_pic_flag); - if (current->field_pic_flag) - flag(bottom_field_flag); - else - infer(bottom_field_flag, 0); - } else { - infer(field_pic_flag, 0); - infer(bottom_field_flag, 0); - } - - if (idr_pic_flag) - ue(idr_pic_id, 0, 65535); - - if (sps->pic_order_cnt_type == 0) { - ub(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, pic_order_cnt_lsb); - if (pps->bottom_field_pic_order_in_frame_present_flag && - !current->field_pic_flag) - se(delta_pic_order_cnt_bottom, INT32_MIN + 1, INT32_MAX); - - } else if (sps->pic_order_cnt_type == 1) { - if (!sps->delta_pic_order_always_zero_flag) { - se(delta_pic_order_cnt[0], INT32_MIN + 1, INT32_MAX); - if (pps->bottom_field_pic_order_in_frame_present_flag && - !current->field_pic_flag) - se(delta_pic_order_cnt[1], INT32_MIN + 1, INT32_MAX); - else - infer(delta_pic_order_cnt[1], 0); - } else { - infer(delta_pic_order_cnt[0], 0); - infer(delta_pic_order_cnt[1], 0); - } - } - - if (pps->redundant_pic_cnt_present_flag) - ue(redundant_pic_cnt, 0, 127); - else - infer(redundant_pic_cnt, 0); - - if (current->nal_unit_header.nal_unit_type != H264_NAL_AUXILIARY_SLICE - && !current->redundant_pic_cnt) - h264->last_slice_nal_unit_type = - current->nal_unit_header.nal_unit_type; - - if (slice_type_b) - flag(direct_spatial_mv_pred_flag); - - if (slice_type_p || slice_type_sp || slice_type_b) { - flag(num_ref_idx_active_override_flag); - if (current->num_ref_idx_active_override_flag) { - ue(num_ref_idx_l0_active_minus1, 0, 31); - if (slice_type_b) - ue(num_ref_idx_l1_active_minus1, 0, 31); - } else { - infer(num_ref_idx_l0_active_minus1, - pps->num_ref_idx_l0_default_active_minus1); - infer(num_ref_idx_l1_active_minus1, - pps->num_ref_idx_l1_default_active_minus1); - } - } - - if (current->nal_unit_header.nal_unit_type == 20 || - current->nal_unit_header.nal_unit_type == 21) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC / 3DAVC not supported.\n"); - return AVERROR_PATCHWELCOME; - } else { - CHECK(FUNC(ref_pic_list_modification)(ctx, rw, current)); - } - - if ((pps->weighted_pred_flag && (slice_type_p || slice_type_sp)) || - (pps->weighted_bipred_idc == 1 && slice_type_b)) { - CHECK(FUNC(pred_weight_table)(ctx, rw, current)); - } - - if (current->nal_unit_header.nal_ref_idc != 0) { - CHECK(FUNC(dec_ref_pic_marking)(ctx, rw, current, idr_pic_flag)); - } - - if (pps->entropy_coding_mode_flag && - !slice_type_i && !slice_type_si) { - ue(cabac_init_idc, 0, 2); - } - - se(slice_qp_delta, - 51 - 6 * sps->bit_depth_luma_minus8, - + 51 + 6 * sps->bit_depth_luma_minus8); - if (slice_type_sp || slice_type_si) { - if (slice_type_sp) - flag(sp_for_switch_flag); - se(slice_qs_delta, -51, +51); - } - - if (pps->deblocking_filter_control_present_flag) { - ue(disable_deblocking_filter_idc, 0, 2); - if (current->disable_deblocking_filter_idc != 1) { - se(slice_alpha_c0_offset_div2, -6, +6); - se(slice_beta_offset_div2, -6, +6); - } else { - infer(slice_alpha_c0_offset_div2, 0); - infer(slice_beta_offset_div2, 0); - } - } else { - infer(disable_deblocking_filter_idc, 0); - infer(slice_alpha_c0_offset_div2, 0); - infer(slice_beta_offset_div2, 0); - } - - if (pps->num_slice_groups_minus1 > 0 && - pps->slice_group_map_type >= 3 && - pps->slice_group_map_type <= 5) { - unsigned int pic_size, max, bits; - - pic_size = (sps->pic_width_in_mbs_minus1 + 1) * - (sps->pic_height_in_map_units_minus1 + 1); - max = (pic_size + pps->slice_group_change_rate_minus1) / - (pps->slice_group_change_rate_minus1 + 1); - bits = av_ceil_log2(max + 1); - - u(bits, slice_group_change_cycle, 0, max); - } - - if (pps->entropy_coding_mode_flag) { - while (byte_alignment(rw)) - fixed(1, cabac_alignment_one_bit, 1); - } - - return 0; -} - -static int FUNC(filler)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawFiller *current) -{ - int err; - - HEADER("Filler Data"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - 1 << H264_NAL_FILLER_DATA)); - -#ifdef READ - while (show_bits(rw, 8) == 0xff) { - fixed(8, ff_byte, 0xff); - ++current->filler_size; - } -#else - { - uint32_t i; - for (i = 0; i < current->filler_size; i++) - fixed(8, ff_byte, 0xff); - } -#endif - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(end_of_sequence)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawNALUnitHeader *current) -{ - HEADER("End of Sequence"); - - return FUNC(nal_unit_header)(ctx, rw, current, - 1 << H264_NAL_END_SEQUENCE); -} - -static int FUNC(end_of_stream)(CodedBitstreamContext *ctx, RWContext *rw, - H264RawNALUnitHeader *current) -{ - HEADER("End of Stream"); - - return FUNC(nal_unit_header)(ctx, rw, current, - 1 << H264_NAL_END_STREAM); -} diff --git a/third-party/cbs/cbs_h265_syntax_template.c b/third-party/cbs/cbs_h265_syntax_template.c deleted file mode 100644 index 2d4b9547185..00000000000 --- a/third-party/cbs/cbs_h265_syntax_template.c +++ /dev/null @@ -1,2101 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw) -{ - int err; - - fixed(1, rbsp_stop_one_bit, 1); - while (byte_alignment(rw) != 0) - fixed(1, rbsp_alignment_zero_bit, 0); - - return 0; -} - -static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawNALUnitHeader *current, - int expected_nal_unit_type) -{ - int err; - - fixed(1, forbidden_zero_bit, 0); - - if (expected_nal_unit_type >= 0) - u(6, nal_unit_type, expected_nal_unit_type, - expected_nal_unit_type); - else - ub(6, nal_unit_type); - - u(6, nuh_layer_id, 0, 62); - u(3, nuh_temporal_id_plus1, 1, 7); - - return 0; -} - -static int FUNC(byte_alignment)(CodedBitstreamContext *ctx, RWContext *rw) -{ - int err; - - fixed(1, alignment_bit_equal_to_one, 1); - while (byte_alignment(rw) != 0) - fixed(1, alignment_bit_equal_to_zero, 0); - - return 0; -} - -static int FUNC(extension_data)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawExtensionData *current) -{ - int err; - size_t k; -#ifdef READ - GetBitContext start; - uint8_t bit; - start = *rw; - for (k = 0; cbs_h2645_read_more_rbsp_data(rw); k++) - skip_bits(rw, 1); - current->bit_length = k; - if (k > 0) { - *rw = start; - allocate(current->data, (current->bit_length + 7) / 8); - for (k = 0; k < current->bit_length; k++) { - xu(1, extension_data, bit, 0, 1, 0); - current->data[k / 8] |= bit << (7 - k % 8); - } - } -#else - for (k = 0; k < current->bit_length; k++) - xu(1, extension_data, current->data[k / 8] >> (7 - k % 8) & 1, 0, 1, 0); -#endif - return 0; -} - -static int FUNC(profile_tier_level)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawProfileTierLevel *current, - int profile_present_flag, - int max_num_sub_layers_minus1) -{ - int err, i, j; - - if (profile_present_flag) { - u(2, general_profile_space, 0, 0); - flag(general_tier_flag); - ub(5, general_profile_idc); - - for (j = 0; j < 32; j++) - flags(general_profile_compatibility_flag[j], 1, j); - - flag(general_progressive_source_flag); - flag(general_interlaced_source_flag); - flag(general_non_packed_constraint_flag); - flag(general_frame_only_constraint_flag); - -#define profile_compatible(x) (current->general_profile_idc == (x) || \ - current->general_profile_compatibility_flag[x]) - if (profile_compatible(4) || profile_compatible(5) || - profile_compatible(6) || profile_compatible(7) || - profile_compatible(8) || profile_compatible(9) || - profile_compatible(10) || profile_compatible(11)) { - flag(general_max_12bit_constraint_flag); - flag(general_max_10bit_constraint_flag); - flag(general_max_8bit_constraint_flag); - flag(general_max_422chroma_constraint_flag); - flag(general_max_420chroma_constraint_flag); - flag(general_max_monochrome_constraint_flag); - flag(general_intra_constraint_flag); - flag(general_one_picture_only_constraint_flag); - flag(general_lower_bit_rate_constraint_flag); - - if (profile_compatible(5) || profile_compatible(9) || - profile_compatible(10) || profile_compatible(11)) { - flag(general_max_14bit_constraint_flag); - fixed(24, general_reserved_zero_33bits, 0); - fixed( 9, general_reserved_zero_33bits, 0); - } else { - fixed(24, general_reserved_zero_34bits, 0); - fixed(10, general_reserved_zero_34bits, 0); - } - } else if (profile_compatible(2)) { - fixed(7, general_reserved_zero_7bits, 0); - flag(general_one_picture_only_constraint_flag); - fixed(24, general_reserved_zero_35bits, 0); - fixed(11, general_reserved_zero_35bits, 0); - } else { - fixed(24, general_reserved_zero_43bits, 0); - fixed(19, general_reserved_zero_43bits, 0); - } - - if (profile_compatible(1) || profile_compatible(2) || - profile_compatible(3) || profile_compatible(4) || - profile_compatible(5) || profile_compatible(9) || - profile_compatible(11)) { - flag(general_inbld_flag); - } else { - fixed(1, general_reserved_zero_bit, 0); - } -#undef profile_compatible - } - - ub(8, general_level_idc); - - for (i = 0; i < max_num_sub_layers_minus1; i++) { - flags(sub_layer_profile_present_flag[i], 1, i); - flags(sub_layer_level_present_flag[i], 1, i); - } - - if (max_num_sub_layers_minus1 > 0) { - for (i = max_num_sub_layers_minus1; i < 8; i++) - fixed(2, reserved_zero_2bits, 0); - } - - for (i = 0; i < max_num_sub_layers_minus1; i++) { - if (current->sub_layer_profile_present_flag[i]) { - us(2, sub_layer_profile_space[i], 0, 0, 1, i); - flags(sub_layer_tier_flag[i], 1, i); - ubs(5, sub_layer_profile_idc[i], 1, i); - - for (j = 0; j < 32; j++) - flags(sub_layer_profile_compatibility_flag[i][j], 2, i, j); - - flags(sub_layer_progressive_source_flag[i], 1, i); - flags(sub_layer_interlaced_source_flag[i], 1, i); - flags(sub_layer_non_packed_constraint_flag[i], 1, i); - flags(sub_layer_frame_only_constraint_flag[i], 1, i); - -#define profile_compatible(x) (current->sub_layer_profile_idc[i] == (x) || \ - current->sub_layer_profile_compatibility_flag[i][x]) - if (profile_compatible(4) || profile_compatible(5) || - profile_compatible(6) || profile_compatible(7) || - profile_compatible(8) || profile_compatible(9) || - profile_compatible(10) || profile_compatible(11)) { - flags(sub_layer_max_12bit_constraint_flag[i], 1, i); - flags(sub_layer_max_10bit_constraint_flag[i], 1, i); - flags(sub_layer_max_8bit_constraint_flag[i], 1, i); - flags(sub_layer_max_422chroma_constraint_flag[i], 1, i); - flags(sub_layer_max_420chroma_constraint_flag[i], 1, i); - flags(sub_layer_max_monochrome_constraint_flag[i], 1, i); - flags(sub_layer_intra_constraint_flag[i], 1, i); - flags(sub_layer_one_picture_only_constraint_flag[i], 1, i); - flags(sub_layer_lower_bit_rate_constraint_flag[i], 1, i); - - if (profile_compatible(5) || profile_compatible(9) || - profile_compatible(10) || profile_compatible(11)) { - flags(sub_layer_max_14bit_constraint_flag[i], 1, i); - fixed(24, sub_layer_reserved_zero_33bits, 0); - fixed( 9, sub_layer_reserved_zero_33bits, 0); - } else { - fixed(24, sub_layer_reserved_zero_34bits, 0); - fixed(10, sub_layer_reserved_zero_34bits, 0); - } - } else if (profile_compatible(2)) { - fixed(7, sub_layer_reserved_zero_7bits, 0); - flags(sub_layer_one_picture_only_constraint_flag[i], 1, i); - fixed(24, sub_layer_reserved_zero_43bits, 0); - fixed(11, sub_layer_reserved_zero_43bits, 0); - } else { - fixed(24, sub_layer_reserved_zero_43bits, 0); - fixed(19, sub_layer_reserved_zero_43bits, 0); - } - - if (profile_compatible(1) || profile_compatible(2) || - profile_compatible(3) || profile_compatible(4) || - profile_compatible(5) || profile_compatible(9) || - profile_compatible(11)) { - flags(sub_layer_inbld_flag[i], 1, i); - } else { - fixed(1, sub_layer_reserved_zero_bit, 0); - } -#undef profile_compatible - } - if (current->sub_layer_level_present_flag[i]) - ubs(8, sub_layer_level_idc[i], 1, i); - } - - return 0; -} - -static int FUNC(sub_layer_hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawHRDParameters *hrd, - int nal, int sub_layer_id) -{ - H265RawSubLayerHRDParameters *current; - int err, i; - - if (nal) - current = &hrd->nal_sub_layer_hrd_parameters[sub_layer_id]; - else - current = &hrd->vcl_sub_layer_hrd_parameters[sub_layer_id]; - - for (i = 0; i <= hrd->cpb_cnt_minus1[sub_layer_id]; i++) { - ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - if (hrd->sub_pic_hrd_params_present_flag) { - ues(cpb_size_du_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - ues(bit_rate_du_value_minus1[i], 0, UINT32_MAX - 1, 1, i); - } - flags(cbr_flag[i], 1, i); - } - - return 0; -} - -static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawHRDParameters *current, int common_inf_present_flag, - int max_num_sub_layers_minus1) -{ - int err, i; - - if (common_inf_present_flag) { - flag(nal_hrd_parameters_present_flag); - flag(vcl_hrd_parameters_present_flag); - - if (current->nal_hrd_parameters_present_flag || - current->vcl_hrd_parameters_present_flag) { - flag(sub_pic_hrd_params_present_flag); - if (current->sub_pic_hrd_params_present_flag) { - ub(8, tick_divisor_minus2); - ub(5, du_cpb_removal_delay_increment_length_minus1); - flag(sub_pic_cpb_params_in_pic_timing_sei_flag); - ub(5, dpb_output_delay_du_length_minus1); - } - - ub(4, bit_rate_scale); - ub(4, cpb_size_scale); - if (current->sub_pic_hrd_params_present_flag) - ub(4, cpb_size_du_scale); - - ub(5, initial_cpb_removal_delay_length_minus1); - ub(5, au_cpb_removal_delay_length_minus1); - ub(5, dpb_output_delay_length_minus1); - } else { - infer(sub_pic_hrd_params_present_flag, 0); - - infer(initial_cpb_removal_delay_length_minus1, 23); - infer(au_cpb_removal_delay_length_minus1, 23); - infer(dpb_output_delay_length_minus1, 23); - } - } - - for (i = 0; i <= max_num_sub_layers_minus1; i++) { - flags(fixed_pic_rate_general_flag[i], 1, i); - - if (!current->fixed_pic_rate_general_flag[i]) - flags(fixed_pic_rate_within_cvs_flag[i], 1, i); - else - infer(fixed_pic_rate_within_cvs_flag[i], 1); - - if (current->fixed_pic_rate_within_cvs_flag[i]) { - ues(elemental_duration_in_tc_minus1[i], 0, 2047, 1, i); - infer(low_delay_hrd_flag[i], 0); - } else - flags(low_delay_hrd_flag[i], 1, i); - - if (!current->low_delay_hrd_flag[i]) - ues(cpb_cnt_minus1[i], 0, 31, 1, i); - else - infer(cpb_cnt_minus1[i], 0); - - if (current->nal_hrd_parameters_present_flag) - CHECK(FUNC(sub_layer_hrd_parameters)(ctx, rw, current, 0, i)); - if (current->vcl_hrd_parameters_present_flag) - CHECK(FUNC(sub_layer_hrd_parameters)(ctx, rw, current, 1, i)); - } - - return 0; -} - -static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawVUI *current, const H265RawSPS *sps) -{ - int err; - - flag(aspect_ratio_info_present_flag); - if (current->aspect_ratio_info_present_flag) { - ub(8, aspect_ratio_idc); - if (current->aspect_ratio_idc == 255) { - ub(16, sar_width); - ub(16, sar_height); - } - } else { - infer(aspect_ratio_idc, 0); - } - - flag(overscan_info_present_flag); - if (current->overscan_info_present_flag) - flag(overscan_appropriate_flag); - - flag(video_signal_type_present_flag); - if (current->video_signal_type_present_flag) { - ub(3, video_format); - flag(video_full_range_flag); - flag(colour_description_present_flag); - if (current->colour_description_present_flag) { - ub(8, colour_primaries); - ub(8, transfer_characteristics); - ub(8, matrix_coefficients); - } else { - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - } - } else { - infer(video_format, 5); - infer(video_full_range_flag, 0); - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - } - - flag(chroma_loc_info_present_flag); - if (current->chroma_loc_info_present_flag) { - ue(chroma_sample_loc_type_top_field, 0, 5); - ue(chroma_sample_loc_type_bottom_field, 0, 5); - } else { - infer(chroma_sample_loc_type_top_field, 0); - infer(chroma_sample_loc_type_bottom_field, 0); - } - - flag(neutral_chroma_indication_flag); - flag(field_seq_flag); - flag(frame_field_info_present_flag); - - flag(default_display_window_flag); - if (current->default_display_window_flag) { - ue(def_disp_win_left_offset, 0, 16384); - ue(def_disp_win_right_offset, 0, 16384); - ue(def_disp_win_top_offset, 0, 16384); - ue(def_disp_win_bottom_offset, 0, 16384); - } - - flag(vui_timing_info_present_flag); - if (current->vui_timing_info_present_flag) { - u(32, vui_num_units_in_tick, 1, UINT32_MAX); - u(32, vui_time_scale, 1, UINT32_MAX); - flag(vui_poc_proportional_to_timing_flag); - if (current->vui_poc_proportional_to_timing_flag) - ue(vui_num_ticks_poc_diff_one_minus1, 0, UINT32_MAX - 1); - - flag(vui_hrd_parameters_present_flag); - if (current->vui_hrd_parameters_present_flag) { - CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->hrd_parameters, - 1, sps->sps_max_sub_layers_minus1)); - } - } - - flag(bitstream_restriction_flag); - if (current->bitstream_restriction_flag) { - flag(tiles_fixed_structure_flag); - flag(motion_vectors_over_pic_boundaries_flag); - flag(restricted_ref_pic_lists_flag); - ue(min_spatial_segmentation_idc, 0, 4095); - ue(max_bytes_per_pic_denom, 0, 16); - ue(max_bits_per_min_cu_denom, 0, 16); - ue(log2_max_mv_length_horizontal, 0, 16); - ue(log2_max_mv_length_vertical, 0, 16); - } else { - infer(tiles_fixed_structure_flag, 0); - infer(motion_vectors_over_pic_boundaries_flag, 1); - infer(min_spatial_segmentation_idc, 0); - infer(max_bytes_per_pic_denom, 2); - infer(max_bits_per_min_cu_denom, 1); - infer(log2_max_mv_length_horizontal, 15); - infer(log2_max_mv_length_vertical, 15); - } - - return 0; -} - -static int FUNC(vps)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawVPS *current) -{ - int err, i, j; - - HEADER("Video Parameter Set"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_VPS)); - - ub(4, vps_video_parameter_set_id); - - flag(vps_base_layer_internal_flag); - flag(vps_base_layer_available_flag); - u(6, vps_max_layers_minus1, 0, HEVC_MAX_LAYERS - 1); - u(3, vps_max_sub_layers_minus1, 0, HEVC_MAX_SUB_LAYERS - 1); - flag(vps_temporal_id_nesting_flag); - - if (current->vps_max_sub_layers_minus1 == 0 && - current->vps_temporal_id_nesting_flag != 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " - "vps_temporal_id_nesting_flag must be 1 if " - "vps_max_sub_layers_minus1 is 0.\n"); - return AVERROR_INVALIDDATA; - } - - fixed(16, vps_reserved_0xffff_16bits, 0xffff); - - CHECK(FUNC(profile_tier_level)(ctx, rw, ¤t->profile_tier_level, - 1, current->vps_max_sub_layers_minus1)); - - flag(vps_sub_layer_ordering_info_present_flag); - for (i = (current->vps_sub_layer_ordering_info_present_flag ? - 0 : current->vps_max_sub_layers_minus1); - i <= current->vps_max_sub_layers_minus1; i++) { - ues(vps_max_dec_pic_buffering_minus1[i], - 0, HEVC_MAX_DPB_SIZE - 1, 1, i); - ues(vps_max_num_reorder_pics[i], - 0, current->vps_max_dec_pic_buffering_minus1[i], 1, i); - ues(vps_max_latency_increase_plus1[i], - 0, UINT32_MAX - 1, 1, i); - } - if (!current->vps_sub_layer_ordering_info_present_flag) { - for (i = 0; i < current->vps_max_sub_layers_minus1; i++) { - infer(vps_max_dec_pic_buffering_minus1[i], - current->vps_max_dec_pic_buffering_minus1[current->vps_max_sub_layers_minus1]); - infer(vps_max_num_reorder_pics[i], - current->vps_max_num_reorder_pics[current->vps_max_sub_layers_minus1]); - infer(vps_max_latency_increase_plus1[i], - current->vps_max_latency_increase_plus1[current->vps_max_sub_layers_minus1]); - } - } - - u(6, vps_max_layer_id, 0, HEVC_MAX_LAYERS - 1); - ue(vps_num_layer_sets_minus1, 0, HEVC_MAX_LAYER_SETS - 1); - for (i = 1; i <= current->vps_num_layer_sets_minus1; i++) { - for (j = 0; j <= current->vps_max_layer_id; j++) - flags(layer_id_included_flag[i][j], 2, i, j); - } - for (j = 0; j <= current->vps_max_layer_id; j++) - infer(layer_id_included_flag[0][j], j == 0); - - flag(vps_timing_info_present_flag); - if (current->vps_timing_info_present_flag) { - u(32, vps_num_units_in_tick, 1, UINT32_MAX); - u(32, vps_time_scale, 1, UINT32_MAX); - flag(vps_poc_proportional_to_timing_flag); - if (current->vps_poc_proportional_to_timing_flag) - ue(vps_num_ticks_poc_diff_one_minus1, 0, UINT32_MAX - 1); - ue(vps_num_hrd_parameters, 0, current->vps_num_layer_sets_minus1 + 1); - for (i = 0; i < current->vps_num_hrd_parameters; i++) { - ues(hrd_layer_set_idx[i], - current->vps_base_layer_internal_flag ? 0 : 1, - current->vps_num_layer_sets_minus1, 1, i); - if (i > 0) - flags(cprms_present_flag[i], 1, i); - else - infer(cprms_present_flag[0], 1); - - CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->hrd_parameters[i], - current->cprms_present_flag[i], - current->vps_max_sub_layers_minus1)); - } - } - - flag(vps_extension_flag); - if (current->vps_extension_flag) - CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data)); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(st_ref_pic_set)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSTRefPicSet *current, int st_rps_idx, - const H265RawSPS *sps) -{ - int err, i, j; - - if (st_rps_idx != 0) - flag(inter_ref_pic_set_prediction_flag); - else - infer(inter_ref_pic_set_prediction_flag, 0); - - if (current->inter_ref_pic_set_prediction_flag) { - unsigned int ref_rps_idx, num_delta_pocs, num_ref_pics; - const H265RawSTRefPicSet *ref; - int delta_rps, d_poc; - int ref_delta_poc_s0[HEVC_MAX_REFS], ref_delta_poc_s1[HEVC_MAX_REFS]; - int delta_poc_s0[HEVC_MAX_REFS], delta_poc_s1[HEVC_MAX_REFS]; - uint8_t used_by_curr_pic_s0[HEVC_MAX_REFS], - used_by_curr_pic_s1[HEVC_MAX_REFS]; - - if (st_rps_idx == sps->num_short_term_ref_pic_sets) - ue(delta_idx_minus1, 0, st_rps_idx - 1); - else - infer(delta_idx_minus1, 0); - - ref_rps_idx = st_rps_idx - (current->delta_idx_minus1 + 1); - ref = &sps->st_ref_pic_set[ref_rps_idx]; - num_delta_pocs = ref->num_negative_pics + ref->num_positive_pics; - av_assert0(num_delta_pocs < HEVC_MAX_DPB_SIZE); - - flag(delta_rps_sign); - ue(abs_delta_rps_minus1, 0, INT16_MAX); - delta_rps = (1 - 2 * current->delta_rps_sign) * - (current->abs_delta_rps_minus1 + 1); - - num_ref_pics = 0; - for (j = 0; j <= num_delta_pocs; j++) { - flags(used_by_curr_pic_flag[j], 1, j); - if (!current->used_by_curr_pic_flag[j]) - flags(use_delta_flag[j], 1, j); - else - infer(use_delta_flag[j], 1); - if (current->use_delta_flag[j]) - ++num_ref_pics; - } - if (num_ref_pics >= HEVC_MAX_DPB_SIZE) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " - "short-term ref pic set %d " - "contains too many pictures.\n", st_rps_idx); - return AVERROR_INVALIDDATA; - } - - // Since the stored form of an RPS here is actually the delta-step - // form used when inter_ref_pic_set_prediction_flag is not set, we - // need to reconstruct that here in order to be able to refer to - // the RPS later (which is required for parsing, because we don't - // even know what syntax elements appear without it). Therefore, - // this code takes the delta-step form of the reference set, turns - // it into the delta-array form, applies the prediction process of - // 7.4.8, converts the result back to the delta-step form, and - // stores that as the current set for future use. Note that the - // inferences here mean that writers using prediction will need - // to fill in the delta-step values correctly as well - since the - // whole RPS prediction process is somewhat overly sophisticated, - // this hopefully forms a useful check for them to ensure their - // predicted form actually matches what was intended rather than - // an onerous additional requirement. - - d_poc = 0; - for (i = 0; i < ref->num_negative_pics; i++) { - d_poc -= ref->delta_poc_s0_minus1[i] + 1; - ref_delta_poc_s0[i] = d_poc; - } - d_poc = 0; - for (i = 0; i < ref->num_positive_pics; i++) { - d_poc += ref->delta_poc_s1_minus1[i] + 1; - ref_delta_poc_s1[i] = d_poc; - } - - i = 0; - for (j = ref->num_positive_pics - 1; j >= 0; j--) { - d_poc = ref_delta_poc_s1[j] + delta_rps; - if (d_poc < 0 && current->use_delta_flag[ref->num_negative_pics + j]) { - delta_poc_s0[i] = d_poc; - used_by_curr_pic_s0[i++] = - current->used_by_curr_pic_flag[ref->num_negative_pics + j]; - } - } - if (delta_rps < 0 && current->use_delta_flag[num_delta_pocs]) { - delta_poc_s0[i] = delta_rps; - used_by_curr_pic_s0[i++] = - current->used_by_curr_pic_flag[num_delta_pocs]; - } - for (j = 0; j < ref->num_negative_pics; j++) { - d_poc = ref_delta_poc_s0[j] + delta_rps; - if (d_poc < 0 && current->use_delta_flag[j]) { - delta_poc_s0[i] = d_poc; - used_by_curr_pic_s0[i++] = current->used_by_curr_pic_flag[j]; - } - } - - infer(num_negative_pics, i); - for (i = 0; i < current->num_negative_pics; i++) { - infer(delta_poc_s0_minus1[i], - -(delta_poc_s0[i] - (i == 0 ? 0 : delta_poc_s0[i - 1])) - 1); - infer(used_by_curr_pic_s0_flag[i], used_by_curr_pic_s0[i]); - } - - i = 0; - for (j = ref->num_negative_pics - 1; j >= 0; j--) { - d_poc = ref_delta_poc_s0[j] + delta_rps; - if (d_poc > 0 && current->use_delta_flag[j]) { - delta_poc_s1[i] = d_poc; - used_by_curr_pic_s1[i++] = current->used_by_curr_pic_flag[j]; - } - } - if (delta_rps > 0 && current->use_delta_flag[num_delta_pocs]) { - delta_poc_s1[i] = delta_rps; - used_by_curr_pic_s1[i++] = - current->used_by_curr_pic_flag[num_delta_pocs]; - } - for (j = 0; j < ref->num_positive_pics; j++) { - d_poc = ref_delta_poc_s1[j] + delta_rps; - if (d_poc > 0 && current->use_delta_flag[ref->num_negative_pics + j]) { - delta_poc_s1[i] = d_poc; - used_by_curr_pic_s1[i++] = - current->used_by_curr_pic_flag[ref->num_negative_pics + j]; - } - } - - infer(num_positive_pics, i); - for (i = 0; i < current->num_positive_pics; i++) { - infer(delta_poc_s1_minus1[i], - delta_poc_s1[i] - (i == 0 ? 0 : delta_poc_s1[i - 1]) - 1); - infer(used_by_curr_pic_s1_flag[i], used_by_curr_pic_s1[i]); - } - - } else { - ue(num_negative_pics, 0, 15); - ue(num_positive_pics, 0, 15 - current->num_negative_pics); - - for (i = 0; i < current->num_negative_pics; i++) { - ues(delta_poc_s0_minus1[i], 0, INT16_MAX, 1, i); - flags(used_by_curr_pic_s0_flag[i], 1, i); - } - - for (i = 0; i < current->num_positive_pics; i++) { - ues(delta_poc_s1_minus1[i], 0, INT16_MAX, 1, i); - flags(used_by_curr_pic_s1_flag[i], 1, i); - } - } - - return 0; -} - -static int FUNC(scaling_list_data)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawScalingList *current) -{ - int sizeId, matrixId; - int err, n, i; - - for (sizeId = 0; sizeId < 4; sizeId++) { - for (matrixId = 0; matrixId < 6; matrixId += (sizeId == 3 ? 3 : 1)) { - flags(scaling_list_pred_mode_flag[sizeId][matrixId], - 2, sizeId, matrixId); - if (!current->scaling_list_pred_mode_flag[sizeId][matrixId]) { - ues(scaling_list_pred_matrix_id_delta[sizeId][matrixId], - 0, sizeId == 3 ? matrixId / 3 : matrixId, - 2, sizeId, matrixId); - } else { - n = FFMIN(64, 1 << (4 + (sizeId << 1))); - if (sizeId > 1) { - ses(scaling_list_dc_coef_minus8[sizeId - 2][matrixId], -7, +247, - 2, sizeId - 2, matrixId); - } - for (i = 0; i < n; i++) { - ses(scaling_list_delta_coeff[sizeId][matrixId][i], - -128, +127, 3, sizeId, matrixId, i); - } - } - } - } - - return 0; -} - -static int FUNC(sps_range_extension)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSPS *current) -{ - int err; - - flag(transform_skip_rotation_enabled_flag); - flag(transform_skip_context_enabled_flag); - flag(implicit_rdpcm_enabled_flag); - flag(explicit_rdpcm_enabled_flag); - flag(extended_precision_processing_flag); - flag(intra_smoothing_disabled_flag); - flag(high_precision_offsets_enabled_flag); - flag(persistent_rice_adaptation_enabled_flag); - flag(cabac_bypass_alignment_enabled_flag); - - return 0; -} - -static int FUNC(sps_scc_extension)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSPS *current) -{ - int err, comp, i; - - flag(sps_curr_pic_ref_enabled_flag); - - flag(palette_mode_enabled_flag); - if (current->palette_mode_enabled_flag) { - ue(palette_max_size, 0, 64); - ue(delta_palette_max_predictor_size, 0, 128); - - flag(sps_palette_predictor_initializer_present_flag); - if (current->sps_palette_predictor_initializer_present_flag) { - ue(sps_num_palette_predictor_initializer_minus1, 0, 127); - for (comp = 0; comp < (current->chroma_format_idc ? 3 : 1); comp++) { - int bit_depth = comp == 0 ? current->bit_depth_luma_minus8 + 8 - : current->bit_depth_chroma_minus8 + 8; - for (i = 0; i <= current->sps_num_palette_predictor_initializer_minus1; i++) - ubs(bit_depth, sps_palette_predictor_initializers[comp][i], 2, comp, i); - } - } - } - - u(2, motion_vector_resolution_control_idc, 0, 2); - flag(intra_boundary_filtering_disable_flag); - - return 0; -} - -static int FUNC(vui_parameters_default)(CodedBitstreamContext *ctx, - RWContext *rw, H265RawVUI *current, - H265RawSPS *sps) -{ - infer(aspect_ratio_idc, 0); - - infer(video_format, 5); - infer(video_full_range_flag, 0); - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - - infer(chroma_sample_loc_type_top_field, 0); - infer(chroma_sample_loc_type_bottom_field, 0); - - infer(tiles_fixed_structure_flag, 0); - infer(motion_vectors_over_pic_boundaries_flag, 1); - infer(min_spatial_segmentation_idc, 0); - infer(max_bytes_per_pic_denom, 2); - infer(max_bits_per_min_cu_denom, 1); - infer(log2_max_mv_length_horizontal, 15); - infer(log2_max_mv_length_vertical, 15); - - return 0; -} - -static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSPS *current) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawVPS *vps; - int err, i; - unsigned int min_cb_log2_size_y, ctb_log2_size_y, - min_cb_size_y, min_tb_log2_size_y; - - HEADER("Sequence Parameter Set"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_SPS)); - - ub(4, sps_video_parameter_set_id); - h265->active_vps = vps = h265->vps[current->sps_video_parameter_set_id]; - - u(3, sps_max_sub_layers_minus1, 0, HEVC_MAX_SUB_LAYERS - 1); - flag(sps_temporal_id_nesting_flag); - if (vps) { - if (vps->vps_max_sub_layers_minus1 > current->sps_max_sub_layers_minus1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " - "sps_max_sub_layers_minus1 (%d) must be less than or equal to " - "vps_max_sub_layers_minus1 (%d).\n", - vps->vps_max_sub_layers_minus1, - current->sps_max_sub_layers_minus1); - return AVERROR_INVALIDDATA; - } - if (vps->vps_temporal_id_nesting_flag && - !current->sps_temporal_id_nesting_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " - "sps_temporal_id_nesting_flag must be 1 if " - "vps_temporal_id_nesting_flag is 1.\n"); - return AVERROR_INVALIDDATA; - } - } - - CHECK(FUNC(profile_tier_level)(ctx, rw, ¤t->profile_tier_level, - 1, current->sps_max_sub_layers_minus1)); - - ue(sps_seq_parameter_set_id, 0, 15); - - ue(chroma_format_idc, 0, 3); - if (current->chroma_format_idc == 3) - flag(separate_colour_plane_flag); - else - infer(separate_colour_plane_flag, 0); - - ue(pic_width_in_luma_samples, 1, HEVC_MAX_WIDTH); - ue(pic_height_in_luma_samples, 1, HEVC_MAX_HEIGHT); - - flag(conformance_window_flag); - if (current->conformance_window_flag) { - ue(conf_win_left_offset, 0, current->pic_width_in_luma_samples); - ue(conf_win_right_offset, 0, current->pic_width_in_luma_samples); - ue(conf_win_top_offset, 0, current->pic_height_in_luma_samples); - ue(conf_win_bottom_offset, 0, current->pic_height_in_luma_samples); - } else { - infer(conf_win_left_offset, 0); - infer(conf_win_right_offset, 0); - infer(conf_win_top_offset, 0); - infer(conf_win_bottom_offset, 0); - } - - ue(bit_depth_luma_minus8, 0, 8); - ue(bit_depth_chroma_minus8, 0, 8); - - ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12); - - flag(sps_sub_layer_ordering_info_present_flag); - for (i = (current->sps_sub_layer_ordering_info_present_flag ? - 0 : current->sps_max_sub_layers_minus1); - i <= current->sps_max_sub_layers_minus1; i++) { - ues(sps_max_dec_pic_buffering_minus1[i], - 0, HEVC_MAX_DPB_SIZE - 1, 1, i); - ues(sps_max_num_reorder_pics[i], - 0, current->sps_max_dec_pic_buffering_minus1[i], 1, i); - ues(sps_max_latency_increase_plus1[i], - 0, UINT32_MAX - 1, 1, i); - } - if (!current->sps_sub_layer_ordering_info_present_flag) { - for (i = 0; i < current->sps_max_sub_layers_minus1; i++) { - infer(sps_max_dec_pic_buffering_minus1[i], - current->sps_max_dec_pic_buffering_minus1[current->sps_max_sub_layers_minus1]); - infer(sps_max_num_reorder_pics[i], - current->sps_max_num_reorder_pics[current->sps_max_sub_layers_minus1]); - infer(sps_max_latency_increase_plus1[i], - current->sps_max_latency_increase_plus1[current->sps_max_sub_layers_minus1]); - } - } - - ue(log2_min_luma_coding_block_size_minus3, 0, 3); - min_cb_log2_size_y = current->log2_min_luma_coding_block_size_minus3 + 3; - - ue(log2_diff_max_min_luma_coding_block_size, 0, 3); - ctb_log2_size_y = min_cb_log2_size_y + - current->log2_diff_max_min_luma_coding_block_size; - - min_cb_size_y = 1 << min_cb_log2_size_y; - if (current->pic_width_in_luma_samples % min_cb_size_y || - current->pic_height_in_luma_samples % min_cb_size_y) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid dimensions: %ux%u not divisible " - "by MinCbSizeY = %u.\n", current->pic_width_in_luma_samples, - current->pic_height_in_luma_samples, min_cb_size_y); - return AVERROR_INVALIDDATA; - } - - ue(log2_min_luma_transform_block_size_minus2, 0, min_cb_log2_size_y - 3); - min_tb_log2_size_y = current->log2_min_luma_transform_block_size_minus2 + 2; - - ue(log2_diff_max_min_luma_transform_block_size, - 0, FFMIN(ctb_log2_size_y, 5) - min_tb_log2_size_y); - - ue(max_transform_hierarchy_depth_inter, - 0, ctb_log2_size_y - min_tb_log2_size_y); - ue(max_transform_hierarchy_depth_intra, - 0, ctb_log2_size_y - min_tb_log2_size_y); - - flag(scaling_list_enabled_flag); - if (current->scaling_list_enabled_flag) { - flag(sps_scaling_list_data_present_flag); - if (current->sps_scaling_list_data_present_flag) - CHECK(FUNC(scaling_list_data)(ctx, rw, ¤t->scaling_list)); - } else { - infer(sps_scaling_list_data_present_flag, 0); - } - - flag(amp_enabled_flag); - flag(sample_adaptive_offset_enabled_flag); - - flag(pcm_enabled_flag); - if (current->pcm_enabled_flag) { - u(4, pcm_sample_bit_depth_luma_minus1, - 0, current->bit_depth_luma_minus8 + 8 - 1); - u(4, pcm_sample_bit_depth_chroma_minus1, - 0, current->bit_depth_chroma_minus8 + 8 - 1); - - ue(log2_min_pcm_luma_coding_block_size_minus3, - FFMIN(min_cb_log2_size_y, 5) - 3, FFMIN(ctb_log2_size_y, 5) - 3); - ue(log2_diff_max_min_pcm_luma_coding_block_size, - 0, FFMIN(ctb_log2_size_y, 5) - (current->log2_min_pcm_luma_coding_block_size_minus3 + 3)); - - flag(pcm_loop_filter_disabled_flag); - } - - ue(num_short_term_ref_pic_sets, 0, HEVC_MAX_SHORT_TERM_REF_PIC_SETS); - for (i = 0; i < current->num_short_term_ref_pic_sets; i++) - CHECK(FUNC(st_ref_pic_set)(ctx, rw, ¤t->st_ref_pic_set[i], i, current)); - - flag(long_term_ref_pics_present_flag); - if (current->long_term_ref_pics_present_flag) { - ue(num_long_term_ref_pics_sps, 0, HEVC_MAX_LONG_TERM_REF_PICS); - for (i = 0; i < current->num_long_term_ref_pics_sps; i++) { - ubs(current->log2_max_pic_order_cnt_lsb_minus4 + 4, - lt_ref_pic_poc_lsb_sps[i], 1, i); - flags(used_by_curr_pic_lt_sps_flag[i], 1, i); - } - } - - flag(sps_temporal_mvp_enabled_flag); - flag(strong_intra_smoothing_enabled_flag); - - flag(vui_parameters_present_flag); - if (current->vui_parameters_present_flag) - CHECK(FUNC(vui_parameters)(ctx, rw, ¤t->vui, current)); - else - CHECK(FUNC(vui_parameters_default)(ctx, rw, ¤t->vui, current)); - - flag(sps_extension_present_flag); - if (current->sps_extension_present_flag) { - flag(sps_range_extension_flag); - flag(sps_multilayer_extension_flag); - flag(sps_3d_extension_flag); - flag(sps_scc_extension_flag); - ub(4, sps_extension_4bits); - } - - if (current->sps_range_extension_flag) - CHECK(FUNC(sps_range_extension)(ctx, rw, current)); - if (current->sps_multilayer_extension_flag) - return AVERROR_PATCHWELCOME; - if (current->sps_3d_extension_flag) - return AVERROR_PATCHWELCOME; - if (current->sps_scc_extension_flag) - CHECK(FUNC(sps_scc_extension)(ctx, rw, current)); - if (current->sps_extension_4bits) - CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data)); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(pps_range_extension)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawPPS *current) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps = h265->active_sps; - int err, i; - - if (current->transform_skip_enabled_flag) - ue(log2_max_transform_skip_block_size_minus2, 0, 3); - flag(cross_component_prediction_enabled_flag); - - flag(chroma_qp_offset_list_enabled_flag); - if (current->chroma_qp_offset_list_enabled_flag) { - ue(diff_cu_chroma_qp_offset_depth, - 0, sps->log2_diff_max_min_luma_coding_block_size); - ue(chroma_qp_offset_list_len_minus1, 0, 5); - for (i = 0; i <= current->chroma_qp_offset_list_len_minus1; i++) { - ses(cb_qp_offset_list[i], -12, +12, 1, i); - ses(cr_qp_offset_list[i], -12, +12, 1, i); - } - } - - ue(log2_sao_offset_scale_luma, 0, FFMAX(0, sps->bit_depth_luma_minus8 - 2)); - ue(log2_sao_offset_scale_chroma, 0, FFMAX(0, sps->bit_depth_chroma_minus8 - 2)); - - return 0; -} - -static int FUNC(pps_scc_extension)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawPPS *current) -{ - int err, comp, i; - - flag(pps_curr_pic_ref_enabled_flag); - - flag(residual_adaptive_colour_transform_enabled_flag); - if (current->residual_adaptive_colour_transform_enabled_flag) { - flag(pps_slice_act_qp_offsets_present_flag); - se(pps_act_y_qp_offset_plus5, -7, +17); - se(pps_act_cb_qp_offset_plus5, -7, +17); - se(pps_act_cr_qp_offset_plus3, -9, +15); - } else { - infer(pps_slice_act_qp_offsets_present_flag, 0); - infer(pps_act_y_qp_offset_plus5, 0); - infer(pps_act_cb_qp_offset_plus5, 0); - infer(pps_act_cr_qp_offset_plus3, 0); - } - - flag(pps_palette_predictor_initializer_present_flag); - if (current->pps_palette_predictor_initializer_present_flag) { - ue(pps_num_palette_predictor_initializer, 0, 128); - if (current->pps_num_palette_predictor_initializer > 0) { - flag(monochrome_palette_flag); - ue(luma_bit_depth_entry_minus8, 0, 8); - if (!current->monochrome_palette_flag) - ue(chroma_bit_depth_entry_minus8, 0, 8); - for (comp = 0; comp < (current->monochrome_palette_flag ? 1 : 3); comp++) { - int bit_depth = comp == 0 ? current->luma_bit_depth_entry_minus8 + 8 - : current->chroma_bit_depth_entry_minus8 + 8; - for (i = 0; i < current->pps_num_palette_predictor_initializer; i++) - ubs(bit_depth, pps_palette_predictor_initializers[comp][i], 2, comp, i); - } - } - } - - return 0; -} - -static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawPPS *current) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps; - int err, i; - - HEADER("Picture Parameter Set"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_PPS)); - - ue(pps_pic_parameter_set_id, 0, 63); - ue(pps_seq_parameter_set_id, 0, 15); - sps = h265->sps[current->pps_seq_parameter_set_id]; - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - current->pps_seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h265->active_sps = sps; - - flag(dependent_slice_segments_enabled_flag); - flag(output_flag_present_flag); - ub(3, num_extra_slice_header_bits); - flag(sign_data_hiding_enabled_flag); - flag(cabac_init_present_flag); - - ue(num_ref_idx_l0_default_active_minus1, 0, 14); - ue(num_ref_idx_l1_default_active_minus1, 0, 14); - - se(init_qp_minus26, -(26 + 6 * sps->bit_depth_luma_minus8), +25); - - flag(constrained_intra_pred_flag); - flag(transform_skip_enabled_flag); - flag(cu_qp_delta_enabled_flag); - if (current->cu_qp_delta_enabled_flag) - ue(diff_cu_qp_delta_depth, - 0, sps->log2_diff_max_min_luma_coding_block_size); - else - infer(diff_cu_qp_delta_depth, 0); - - se(pps_cb_qp_offset, -12, +12); - se(pps_cr_qp_offset, -12, +12); - flag(pps_slice_chroma_qp_offsets_present_flag); - - flag(weighted_pred_flag); - flag(weighted_bipred_flag); - - flag(transquant_bypass_enabled_flag); - flag(tiles_enabled_flag); - flag(entropy_coding_sync_enabled_flag); - - if (current->tiles_enabled_flag) { - ue(num_tile_columns_minus1, 0, HEVC_MAX_TILE_COLUMNS); - ue(num_tile_rows_minus1, 0, HEVC_MAX_TILE_ROWS); - flag(uniform_spacing_flag); - if (!current->uniform_spacing_flag) { - for (i = 0; i < current->num_tile_columns_minus1; i++) - ues(column_width_minus1[i], 0, sps->pic_width_in_luma_samples, 1, i); - for (i = 0; i < current->num_tile_rows_minus1; i++) - ues(row_height_minus1[i], 0, sps->pic_height_in_luma_samples, 1, i); - } - flag(loop_filter_across_tiles_enabled_flag); - } else { - infer(num_tile_columns_minus1, 0); - infer(num_tile_rows_minus1, 0); - } - - flag(pps_loop_filter_across_slices_enabled_flag); - flag(deblocking_filter_control_present_flag); - if (current->deblocking_filter_control_present_flag) { - flag(deblocking_filter_override_enabled_flag); - flag(pps_deblocking_filter_disabled_flag); - if (!current->pps_deblocking_filter_disabled_flag) { - se(pps_beta_offset_div2, -6, +6); - se(pps_tc_offset_div2, -6, +6); - } else { - infer(pps_beta_offset_div2, 0); - infer(pps_tc_offset_div2, 0); - } - } else { - infer(deblocking_filter_override_enabled_flag, 0); - infer(pps_deblocking_filter_disabled_flag, 0); - infer(pps_beta_offset_div2, 0); - infer(pps_tc_offset_div2, 0); - } - - flag(pps_scaling_list_data_present_flag); - if (current->pps_scaling_list_data_present_flag) - CHECK(FUNC(scaling_list_data)(ctx, rw, ¤t->scaling_list)); - - flag(lists_modification_present_flag); - - ue(log2_parallel_merge_level_minus2, - 0, (sps->log2_min_luma_coding_block_size_minus3 + 3 + - sps->log2_diff_max_min_luma_coding_block_size - 2)); - - flag(slice_segment_header_extension_present_flag); - - flag(pps_extension_present_flag); - if (current->pps_extension_present_flag) { - flag(pps_range_extension_flag); - flag(pps_multilayer_extension_flag); - flag(pps_3d_extension_flag); - flag(pps_scc_extension_flag); - ub(4, pps_extension_4bits); - } - if (current->pps_range_extension_flag) - CHECK(FUNC(pps_range_extension)(ctx, rw, current)); - if (current->pps_multilayer_extension_flag) - return AVERROR_PATCHWELCOME; - if (current->pps_3d_extension_flag) - return AVERROR_PATCHWELCOME; - if (current->pps_scc_extension_flag) - CHECK(FUNC(pps_scc_extension)(ctx, rw, current)); - if (current->pps_extension_4bits) - CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data)); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawAUD *current) -{ - int err; - - HEADER("Access Unit Delimiter"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_AUD)); - - u(3, pic_type, 0, 2); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(ref_pic_lists_modification)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSliceHeader *current, - unsigned int num_pic_total_curr) -{ - unsigned int entry_size; - int err, i; - - entry_size = av_log2(num_pic_total_curr - 1) + 1; - - flag(ref_pic_list_modification_flag_l0); - if (current->ref_pic_list_modification_flag_l0) { - for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) - us(entry_size, list_entry_l0[i], 0, num_pic_total_curr - 1, 1, i); - } - - if (current->slice_type == HEVC_SLICE_B) { - flag(ref_pic_list_modification_flag_l1); - if (current->ref_pic_list_modification_flag_l1) { - for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) - us(entry_size, list_entry_l1[i], 0, num_pic_total_curr - 1, 1, i); - } - } - - return 0; -} - -static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSliceHeader *current) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps = h265->active_sps; - int err, i, j; - int chroma = !sps->separate_colour_plane_flag && - sps->chroma_format_idc != 0; - - ue(luma_log2_weight_denom, 0, 7); - if (chroma) - se(delta_chroma_log2_weight_denom, -7, 7); - else - infer(delta_chroma_log2_weight_denom, 0); - - for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { - if (1 /* is not same POC and same layer_id */) - flags(luma_weight_l0_flag[i], 1, i); - else - infer(luma_weight_l0_flag[i], 0); - } - if (chroma) { - for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { - if (1 /* is not same POC and same layer_id */) - flags(chroma_weight_l0_flag[i], 1, i); - else - infer(chroma_weight_l0_flag[i], 0); - } - } - - for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) { - if (current->luma_weight_l0_flag[i]) { - ses(delta_luma_weight_l0[i], -128, +127, 1, i); - ses(luma_offset_l0[i], - -(1 << (sps->bit_depth_luma_minus8 + 8 - 1)), - ((1 << (sps->bit_depth_luma_minus8 + 8 - 1)) - 1), 1, i); - } else { - infer(delta_luma_weight_l0[i], 0); - infer(luma_offset_l0[i], 0); - } - if (current->chroma_weight_l0_flag[i]) { - for (j = 0; j < 2; j++) { - ses(delta_chroma_weight_l0[i][j], -128, +127, 2, i, j); - ses(chroma_offset_l0[i][j], - -(4 << (sps->bit_depth_chroma_minus8 + 8 - 1)), - ((4 << (sps->bit_depth_chroma_minus8 + 8 - 1)) - 1), 2, i, j); - } - } else { - for (j = 0; j < 2; j++) { - infer(delta_chroma_weight_l0[i][j], 0); - infer(chroma_offset_l0[i][j], 0); - } - } - } - - if (current->slice_type == HEVC_SLICE_B) { - for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { - if (1 /* RefPicList1[i] is not CurrPic, nor is it in a different layer */) - flags(luma_weight_l1_flag[i], 1, i); - else - infer(luma_weight_l1_flag[i], 0); - } - if (chroma) { - for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { - if (1 /* RefPicList1[i] is not CurrPic, nor is it in a different layer */) - flags(chroma_weight_l1_flag[i], 1, i); - else - infer(chroma_weight_l1_flag[i], 0); - } - } - - for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) { - if (current->luma_weight_l1_flag[i]) { - ses(delta_luma_weight_l1[i], -128, +127, 1, i); - ses(luma_offset_l1[i], - -(1 << (sps->bit_depth_luma_minus8 + 8 - 1)), - ((1 << (sps->bit_depth_luma_minus8 + 8 - 1)) - 1), 1, i); - } else { - infer(delta_luma_weight_l1[i], 0); - infer(luma_offset_l1[i], 0); - } - if (current->chroma_weight_l1_flag[i]) { - for (j = 0; j < 2; j++) { - ses(delta_chroma_weight_l1[i][j], -128, +127, 2, i, j); - ses(chroma_offset_l1[i][j], - -(4 << (sps->bit_depth_chroma_minus8 + 8 - 1)), - ((4 << (sps->bit_depth_chroma_minus8 + 8 - 1)) - 1), 2, i, j); - } - } else { - for (j = 0; j < 2; j++) { - infer(delta_chroma_weight_l1[i][j], 0); - infer(chroma_offset_l1[i][j], 0); - } - } - } - } - - return 0; -} - -static int FUNC(slice_segment_header)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSliceHeader *current) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps; - const H265RawPPS *pps; - unsigned int min_cb_log2_size_y, ctb_log2_size_y, ctb_size_y; - unsigned int pic_width_in_ctbs_y, pic_height_in_ctbs_y, pic_size_in_ctbs_y; - unsigned int num_pic_total_curr = 0; - int err, i; - - HEADER("Slice Segment Header"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, -1)); - - flag(first_slice_segment_in_pic_flag); - - if (current->nal_unit_header.nal_unit_type >= HEVC_NAL_BLA_W_LP && - current->nal_unit_header.nal_unit_type <= HEVC_NAL_RSV_IRAP_VCL23) - flag(no_output_of_prior_pics_flag); - - ue(slice_pic_parameter_set_id, 0, 63); - - pps = h265->pps[current->slice_pic_parameter_set_id]; - if (!pps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n", - current->slice_pic_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h265->active_pps = pps; - - sps = h265->sps[pps->pps_seq_parameter_set_id]; - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - pps->pps_seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h265->active_sps = sps; - - min_cb_log2_size_y = sps->log2_min_luma_coding_block_size_minus3 + 3; - ctb_log2_size_y = min_cb_log2_size_y + sps->log2_diff_max_min_luma_coding_block_size; - ctb_size_y = 1 << ctb_log2_size_y; - pic_width_in_ctbs_y = - (sps->pic_width_in_luma_samples + ctb_size_y - 1) / ctb_size_y; - pic_height_in_ctbs_y = - (sps->pic_height_in_luma_samples + ctb_size_y - 1) / ctb_size_y; - pic_size_in_ctbs_y = pic_width_in_ctbs_y * pic_height_in_ctbs_y; - - if (!current->first_slice_segment_in_pic_flag) { - unsigned int address_size = av_log2(pic_size_in_ctbs_y - 1) + 1; - if (pps->dependent_slice_segments_enabled_flag) - flag(dependent_slice_segment_flag); - else - infer(dependent_slice_segment_flag, 0); - u(address_size, slice_segment_address, 0, pic_size_in_ctbs_y - 1); - } else { - infer(dependent_slice_segment_flag, 0); - } - - if (!current->dependent_slice_segment_flag) { - for (i = 0; i < pps->num_extra_slice_header_bits; i++) - flags(slice_reserved_flag[i], 1, i); - - ue(slice_type, 0, 2); - - if (pps->output_flag_present_flag) - flag(pic_output_flag); - - if (sps->separate_colour_plane_flag) - u(2, colour_plane_id, 0, 2); - - if (current->nal_unit_header.nal_unit_type != HEVC_NAL_IDR_W_RADL && - current->nal_unit_header.nal_unit_type != HEVC_NAL_IDR_N_LP) { - const H265RawSTRefPicSet *rps; - int dpb_slots_remaining; - - ub(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, slice_pic_order_cnt_lsb); - - flag(short_term_ref_pic_set_sps_flag); - if (!current->short_term_ref_pic_set_sps_flag) { - CHECK(FUNC(st_ref_pic_set)(ctx, rw, ¤t->short_term_ref_pic_set, - sps->num_short_term_ref_pic_sets, sps)); - rps = ¤t->short_term_ref_pic_set; - } else if (sps->num_short_term_ref_pic_sets > 1) { - unsigned int idx_size = av_log2(sps->num_short_term_ref_pic_sets - 1) + 1; - u(idx_size, short_term_ref_pic_set_idx, - 0, sps->num_short_term_ref_pic_sets - 1); - rps = &sps->st_ref_pic_set[current->short_term_ref_pic_set_idx]; - } else { - infer(short_term_ref_pic_set_idx, 0); - rps = &sps->st_ref_pic_set[0]; - } - - dpb_slots_remaining = HEVC_MAX_DPB_SIZE - 1 - - rps->num_negative_pics - rps->num_positive_pics; - if (pps->pps_curr_pic_ref_enabled_flag && - (sps->sample_adaptive_offset_enabled_flag || - !pps->pps_deblocking_filter_disabled_flag || - pps->deblocking_filter_override_enabled_flag)) { - // This picture will occupy two DPB slots. - if (dpb_slots_remaining == 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: " - "short-term ref pic set contains too many pictures " - "to use with current picture reference enabled.\n"); - return AVERROR_INVALIDDATA; - } - --dpb_slots_remaining; - } - - num_pic_total_curr = 0; - for (i = 0; i < rps->num_negative_pics; i++) - if (rps->used_by_curr_pic_s0_flag[i]) - ++num_pic_total_curr; - for (i = 0; i < rps->num_positive_pics; i++) - if (rps->used_by_curr_pic_s1_flag[i]) - ++num_pic_total_curr; - - if (sps->long_term_ref_pics_present_flag) { - unsigned int idx_size; - - if (sps->num_long_term_ref_pics_sps > 0) { - ue(num_long_term_sps, 0, FFMIN(sps->num_long_term_ref_pics_sps, - dpb_slots_remaining)); - idx_size = av_log2(sps->num_long_term_ref_pics_sps - 1) + 1; - dpb_slots_remaining -= current->num_long_term_sps; - } else { - infer(num_long_term_sps, 0); - idx_size = 0; - } - ue(num_long_term_pics, 0, dpb_slots_remaining); - - for (i = 0; i < current->num_long_term_sps + - current->num_long_term_pics; i++) { - if (i < current->num_long_term_sps) { - if (sps->num_long_term_ref_pics_sps > 1) - us(idx_size, lt_idx_sps[i], - 0, sps->num_long_term_ref_pics_sps - 1, 1, i); - if (sps->used_by_curr_pic_lt_sps_flag[current->lt_idx_sps[i]]) - ++num_pic_total_curr; - } else { - ubs(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, poc_lsb_lt[i], 1, i); - flags(used_by_curr_pic_lt_flag[i], 1, i); - if (current->used_by_curr_pic_lt_flag[i]) - ++num_pic_total_curr; - } - flags(delta_poc_msb_present_flag[i], 1, i); - if (current->delta_poc_msb_present_flag[i]) - ues(delta_poc_msb_cycle_lt[i], 0, UINT32_MAX - 1, 1, i); - else - infer(delta_poc_msb_cycle_lt[i], 0); - } - } - - if (sps->sps_temporal_mvp_enabled_flag) - flag(slice_temporal_mvp_enabled_flag); - else - infer(slice_temporal_mvp_enabled_flag, 0); - - if (pps->pps_curr_pic_ref_enabled_flag) - ++num_pic_total_curr; - } - - if (sps->sample_adaptive_offset_enabled_flag) { - flag(slice_sao_luma_flag); - if (!sps->separate_colour_plane_flag && sps->chroma_format_idc != 0) - flag(slice_sao_chroma_flag); - else - infer(slice_sao_chroma_flag, 0); - } else { - infer(slice_sao_luma_flag, 0); - infer(slice_sao_chroma_flag, 0); - } - - if (current->slice_type == HEVC_SLICE_P || - current->slice_type == HEVC_SLICE_B) { - flag(num_ref_idx_active_override_flag); - if (current->num_ref_idx_active_override_flag) { - ue(num_ref_idx_l0_active_minus1, 0, 14); - if (current->slice_type == HEVC_SLICE_B) - ue(num_ref_idx_l1_active_minus1, 0, 14); - else - infer(num_ref_idx_l1_active_minus1, pps->num_ref_idx_l1_default_active_minus1); - } else { - infer(num_ref_idx_l0_active_minus1, pps->num_ref_idx_l0_default_active_minus1); - infer(num_ref_idx_l1_active_minus1, pps->num_ref_idx_l1_default_active_minus1); - } - - if (pps->lists_modification_present_flag && num_pic_total_curr > 1) - CHECK(FUNC(ref_pic_lists_modification)(ctx, rw, current, - num_pic_total_curr)); - - if (current->slice_type == HEVC_SLICE_B) - flag(mvd_l1_zero_flag); - if (pps->cabac_init_present_flag) - flag(cabac_init_flag); - else - infer(cabac_init_flag, 0); - if (current->slice_temporal_mvp_enabled_flag) { - if (current->slice_type == HEVC_SLICE_B) - flag(collocated_from_l0_flag); - else - infer(collocated_from_l0_flag, 1); - if (current->collocated_from_l0_flag) { - if (current->num_ref_idx_l0_active_minus1 > 0) - ue(collocated_ref_idx, 0, current->num_ref_idx_l0_active_minus1); - else - infer(collocated_ref_idx, 0); - } else { - if (current->num_ref_idx_l1_active_minus1 > 0) - ue(collocated_ref_idx, 0, current->num_ref_idx_l1_active_minus1); - else - infer(collocated_ref_idx, 0); - } - } - - if ((pps->weighted_pred_flag && current->slice_type == HEVC_SLICE_P) || - (pps->weighted_bipred_flag && current->slice_type == HEVC_SLICE_B)) - CHECK(FUNC(pred_weight_table)(ctx, rw, current)); - - ue(five_minus_max_num_merge_cand, 0, 4); - if (sps->motion_vector_resolution_control_idc == 2) - flag(use_integer_mv_flag); - else - infer(use_integer_mv_flag, sps->motion_vector_resolution_control_idc); - } - - se(slice_qp_delta, - - 6 * sps->bit_depth_luma_minus8 - (pps->init_qp_minus26 + 26), - + 51 - (pps->init_qp_minus26 + 26)); - if (pps->pps_slice_chroma_qp_offsets_present_flag) { - se(slice_cb_qp_offset, -12, +12); - se(slice_cr_qp_offset, -12, +12); - } else { - infer(slice_cb_qp_offset, 0); - infer(slice_cr_qp_offset, 0); - } - if (pps->pps_slice_act_qp_offsets_present_flag) { - se(slice_act_y_qp_offset, - -12 - (pps->pps_act_y_qp_offset_plus5 - 5), - +12 - (pps->pps_act_y_qp_offset_plus5 - 5)); - se(slice_act_cb_qp_offset, - -12 - (pps->pps_act_cb_qp_offset_plus5 - 5), - +12 - (pps->pps_act_cb_qp_offset_plus5 - 5)); - se(slice_act_cr_qp_offset, - -12 - (pps->pps_act_cr_qp_offset_plus3 - 3), - +12 - (pps->pps_act_cr_qp_offset_plus3 - 3)); - } else { - infer(slice_act_y_qp_offset, 0); - infer(slice_act_cb_qp_offset, 0); - infer(slice_act_cr_qp_offset, 0); - } - if (pps->chroma_qp_offset_list_enabled_flag) - flag(cu_chroma_qp_offset_enabled_flag); - else - infer(cu_chroma_qp_offset_enabled_flag, 0); - - if (pps->deblocking_filter_override_enabled_flag) - flag(deblocking_filter_override_flag); - else - infer(deblocking_filter_override_flag, 0); - if (current->deblocking_filter_override_flag) { - flag(slice_deblocking_filter_disabled_flag); - if (!current->slice_deblocking_filter_disabled_flag) { - se(slice_beta_offset_div2, -6, +6); - se(slice_tc_offset_div2, -6, +6); - } else { - infer(slice_beta_offset_div2, pps->pps_beta_offset_div2); - infer(slice_tc_offset_div2, pps->pps_tc_offset_div2); - } - } else { - infer(slice_deblocking_filter_disabled_flag, - pps->pps_deblocking_filter_disabled_flag); - infer(slice_beta_offset_div2, pps->pps_beta_offset_div2); - infer(slice_tc_offset_div2, pps->pps_tc_offset_div2); - } - if (pps->pps_loop_filter_across_slices_enabled_flag && - (current->slice_sao_luma_flag || current->slice_sao_chroma_flag || - !current->slice_deblocking_filter_disabled_flag)) - flag(slice_loop_filter_across_slices_enabled_flag); - else - infer(slice_loop_filter_across_slices_enabled_flag, - pps->pps_loop_filter_across_slices_enabled_flag); - } - - if (pps->tiles_enabled_flag || pps->entropy_coding_sync_enabled_flag) { - unsigned int num_entry_point_offsets_limit; - if (!pps->tiles_enabled_flag && pps->entropy_coding_sync_enabled_flag) - num_entry_point_offsets_limit = pic_height_in_ctbs_y - 1; - else if (pps->tiles_enabled_flag && !pps->entropy_coding_sync_enabled_flag) - num_entry_point_offsets_limit = - (pps->num_tile_columns_minus1 + 1) * (pps->num_tile_rows_minus1 + 1); - else - num_entry_point_offsets_limit = - (pps->num_tile_columns_minus1 + 1) * pic_height_in_ctbs_y - 1; - ue(num_entry_point_offsets, 0, num_entry_point_offsets_limit); - - if (current->num_entry_point_offsets > HEVC_MAX_ENTRY_POINT_OFFSETS) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many entry points: " - "%"PRIu16".\n", current->num_entry_point_offsets); - return AVERROR_PATCHWELCOME; - } - - if (current->num_entry_point_offsets > 0) { - ue(offset_len_minus1, 0, 31); - for (i = 0; i < current->num_entry_point_offsets; i++) - ubs(current->offset_len_minus1 + 1, entry_point_offset_minus1[i], 1, i); - } - } - - if (pps->slice_segment_header_extension_present_flag) { - ue(slice_segment_header_extension_length, 0, 256); - for (i = 0; i < current->slice_segment_header_extension_length; i++) - us(8, slice_segment_header_extension_data_byte[i], 0x00, 0xff, 1, i); - } - - CHECK(FUNC(byte_alignment)(ctx, rw)); - - return 0; -} - -static int FUNC(sei_buffering_period) - (CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIBufferingPeriod *current, SEIMessageState *sei) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps; - const H265RawHRDParameters *hrd; - int err, i, length; - -#ifdef READ - int start_pos, end_pos; - start_pos = get_bits_count(rw); -#endif - - HEADER("Buffering Period"); - - ue(bp_seq_parameter_set_id, 0, HEVC_MAX_SPS_COUNT - 1); - - sps = h265->sps[current->bp_seq_parameter_set_id]; - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n", - current->bp_seq_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h265->active_sps = sps; - - if (!sps->vui_parameters_present_flag || - !sps->vui.vui_hrd_parameters_present_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Buffering period SEI requires " - "HRD parameters to be present in SPS.\n"); - return AVERROR_INVALIDDATA; - } - hrd = &sps->vui.hrd_parameters; - if (!hrd->nal_hrd_parameters_present_flag && - !hrd->vcl_hrd_parameters_present_flag) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Buffering period SEI requires " - "NAL or VCL HRD parameters to be present.\n"); - return AVERROR_INVALIDDATA; - } - - if (!hrd->sub_pic_hrd_params_present_flag) - flag(irap_cpb_params_present_flag); - else - infer(irap_cpb_params_present_flag, 0); - if (current->irap_cpb_params_present_flag) { - length = hrd->au_cpb_removal_delay_length_minus1 + 1; - ub(length, cpb_delay_offset); - length = hrd->dpb_output_delay_length_minus1 + 1; - ub(length, dpb_delay_offset); - } else { - infer(cpb_delay_offset, 0); - infer(dpb_delay_offset, 0); - } - - flag(concatenation_flag); - - length = hrd->au_cpb_removal_delay_length_minus1 + 1; - ub(length, au_cpb_removal_delay_delta_minus1); - - if (hrd->nal_hrd_parameters_present_flag) { - for (i = 0; i <= hrd->cpb_cnt_minus1[0]; i++) { - length = hrd->initial_cpb_removal_delay_length_minus1 + 1; - - ubs(length, nal_initial_cpb_removal_delay[i], 1, i); - ubs(length, nal_initial_cpb_removal_offset[i], 1, i); - - if (hrd->sub_pic_hrd_params_present_flag || - current->irap_cpb_params_present_flag) { - ubs(length, nal_initial_alt_cpb_removal_delay[i], 1, i); - ubs(length, nal_initial_alt_cpb_removal_offset[i], 1, i); - } - } - } - if (hrd->vcl_hrd_parameters_present_flag) { - for (i = 0; i <= hrd->cpb_cnt_minus1[0]; i++) { - length = hrd->initial_cpb_removal_delay_length_minus1 + 1; - - ubs(length, vcl_initial_cpb_removal_delay[i], 1, i); - ubs(length, vcl_initial_cpb_removal_offset[i], 1, i); - - if (hrd->sub_pic_hrd_params_present_flag || - current->irap_cpb_params_present_flag) { - ubs(length, vcl_initial_alt_cpb_removal_delay[i], 1, i); - ubs(length, vcl_initial_alt_cpb_removal_offset[i], 1, i); - } - } - } - -#ifdef READ - end_pos = get_bits_count(rw); - if (cbs_h265_payload_extension_present(rw, sei->payload_size, - end_pos - start_pos)) - flag(use_alt_cpb_params_flag); - else - infer(use_alt_cpb_params_flag, 0); -#else - // If unknown extension data exists, then use_alt_cpb_params_flag is - // coded in the bitstream and must be written even if it's 0. - if (current->use_alt_cpb_params_flag || sei->extension_present) { - flag(use_alt_cpb_params_flag); - // Ensure this bit is not the last in the payload by making the - // more_data_in_payload() check evaluate to true, so it may not - // be mistaken as something else by decoders. - sei->extension_present = 1; - } -#endif - - return 0; -} - -static int FUNC(sei_pic_timing) - (CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIPicTiming *current, SEIMessageState *sei) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps; - const H265RawHRDParameters *hrd; - int err, expected_source_scan_type, i, length; - - HEADER("Picture Timing"); - - sps = h265->active_sps; - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "No active SPS for pic_timing.\n"); - return AVERROR_INVALIDDATA; - } - - expected_source_scan_type = 2 - - 2 * sps->profile_tier_level.general_interlaced_source_flag - - sps->profile_tier_level.general_progressive_source_flag; - - if (sps->vui.frame_field_info_present_flag) { - u(4, pic_struct, 0, 12); - u(2, source_scan_type, - expected_source_scan_type >= 0 ? expected_source_scan_type : 0, - expected_source_scan_type >= 0 ? expected_source_scan_type : 2); - flag(duplicate_flag); - } else { - infer(pic_struct, 0); - infer(source_scan_type, - expected_source_scan_type >= 0 ? expected_source_scan_type : 2); - infer(duplicate_flag, 0); - } - - if (sps->vui_parameters_present_flag && - sps->vui.vui_hrd_parameters_present_flag) - hrd = &sps->vui.hrd_parameters; - else - hrd = NULL; - if (hrd && (hrd->nal_hrd_parameters_present_flag || - hrd->vcl_hrd_parameters_present_flag)) { - length = hrd->au_cpb_removal_delay_length_minus1 + 1; - ub(length, au_cpb_removal_delay_minus1); - - length = hrd->dpb_output_delay_length_minus1 + 1; - ub(length, pic_dpb_output_delay); - - if (hrd->sub_pic_hrd_params_present_flag) { - length = hrd->dpb_output_delay_du_length_minus1 + 1; - ub(length, pic_dpb_output_du_delay); - } - - if (hrd->sub_pic_hrd_params_present_flag && - hrd->sub_pic_cpb_params_in_pic_timing_sei_flag) { - // Each decoding unit must contain at least one slice segment. - ue(num_decoding_units_minus1, 0, HEVC_MAX_SLICE_SEGMENTS); - flag(du_common_cpb_removal_delay_flag); - - length = hrd->du_cpb_removal_delay_increment_length_minus1 + 1; - if (current->du_common_cpb_removal_delay_flag) - ub(length, du_common_cpb_removal_delay_increment_minus1); - - for (i = 0; i <= current->num_decoding_units_minus1; i++) { - ues(num_nalus_in_du_minus1[i], - 0, HEVC_MAX_SLICE_SEGMENTS, 1, i); - if (!current->du_common_cpb_removal_delay_flag && - i < current->num_decoding_units_minus1) - ubs(length, du_cpb_removal_delay_increment_minus1[i], 1, i); - } - } - } - - return 0; -} - -static int FUNC(sei_pan_scan_rect) - (CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIPanScanRect *current, SEIMessageState *sei) -{ - int err, i; - - HEADER("Pan-Scan Rectangle"); - - ue(pan_scan_rect_id, 0, UINT32_MAX - 1); - flag(pan_scan_rect_cancel_flag); - - if (!current->pan_scan_rect_cancel_flag) { - ue(pan_scan_cnt_minus1, 0, 2); - - for (i = 0; i <= current->pan_scan_cnt_minus1; i++) { - ses(pan_scan_rect_left_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_right_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_top_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - ses(pan_scan_rect_bottom_offset[i], INT32_MIN + 1, INT32_MAX, 1, i); - } - - flag(pan_scan_rect_persistence_flag); - } - - return 0; -} - -static int FUNC(sei_recovery_point) - (CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIRecoveryPoint *current, SEIMessageState *sei) -{ - int err; - - HEADER("Recovery Point"); - - se(recovery_poc_cnt, -32768, 32767); - - flag(exact_match_flag); - flag(broken_link_flag); - - return 0; -} - -static int FUNC(film_grain_characteristics)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawFilmGrainCharacteristics *current, - SEIMessageState *state) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps = h265->active_sps; - int err, c, i, j; - - HEADER("Film Grain Characteristics"); - - flag(film_grain_characteristics_cancel_flag); - if (!current->film_grain_characteristics_cancel_flag) { - int filmGrainBitDepth[3]; - - u(2, film_grain_model_id, 0, 1); - flag(separate_colour_description_present_flag); - if (current->separate_colour_description_present_flag) { - ub(3, film_grain_bit_depth_luma_minus8); - ub(3, film_grain_bit_depth_chroma_minus8); - flag(film_grain_full_range_flag); - ub(8, film_grain_colour_primaries); - ub(8, film_grain_transfer_characteristics); - ub(8, film_grain_matrix_coeffs); - } else { - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "No active SPS for film_grain_characteristics.\n"); - return AVERROR_INVALIDDATA; - } - infer(film_grain_bit_depth_luma_minus8, sps->bit_depth_luma_minus8); - infer(film_grain_bit_depth_chroma_minus8, sps->bit_depth_chroma_minus8); - infer(film_grain_full_range_flag, sps->vui.video_full_range_flag); - infer(film_grain_colour_primaries, sps->vui.colour_primaries); - infer(film_grain_transfer_characteristics, sps->vui.transfer_characteristics); - infer(film_grain_matrix_coeffs, sps->vui.matrix_coefficients); - } - - filmGrainBitDepth[0] = current->film_grain_bit_depth_luma_minus8 + 8; - filmGrainBitDepth[1] = - filmGrainBitDepth[2] = current->film_grain_bit_depth_chroma_minus8 + 8; - - u(2, blending_mode_id, 0, 1); - ub(4, log2_scale_factor); - for (c = 0; c < 3; c++) - flags(comp_model_present_flag[c], 1, c); - for (c = 0; c < 3; c++) { - if (current->comp_model_present_flag[c]) { - ubs(8, num_intensity_intervals_minus1[c], 1, c); - us(3, num_model_values_minus1[c], 0, 5, 1, c); - for (i = 0; i <= current->num_intensity_intervals_minus1[c]; i++) { - ubs(8, intensity_interval_lower_bound[c][i], 2, c, i); - ubs(8, intensity_interval_upper_bound[c][i], 2, c, i); - for (j = 0; j <= current->num_model_values_minus1[c]; j++) - ses(comp_model_value[c][i][j], 0 - current->film_grain_model_id * (1 << (filmGrainBitDepth[c] - 1)), - ((1 << filmGrainBitDepth[c]) - 1) - current->film_grain_model_id * (1 << (filmGrainBitDepth[c] - 1)), - 3, c, i, j); - } - } - } - flag(film_grain_characteristics_persistence_flag); - } - - return 0; -} - -static int FUNC(sei_display_orientation) - (CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIDisplayOrientation *current, SEIMessageState *sei) -{ - int err; - - HEADER("Display Orientation"); - - flag(display_orientation_cancel_flag); - if (!current->display_orientation_cancel_flag) { - flag(hor_flip); - flag(ver_flip); - ub(16, anticlockwise_rotation); - flag(display_orientation_persistence_flag); - } - - return 0; -} - -static int FUNC(sei_active_parameter_sets) - (CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIActiveParameterSets *current, SEIMessageState *sei) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawVPS *vps; - int err, i; - - HEADER("Active Parameter Sets"); - - u(4, active_video_parameter_set_id, 0, HEVC_MAX_VPS_COUNT); - vps = h265->vps[current->active_video_parameter_set_id]; - if (!vps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "VPS id %d not available for active " - "parameter sets.\n", current->active_video_parameter_set_id); - return AVERROR_INVALIDDATA; - } - h265->active_vps = vps; - - flag(self_contained_cvs_flag); - flag(no_parameter_set_update_flag); - - ue(num_sps_ids_minus1, 0, HEVC_MAX_SPS_COUNT - 1); - for (i = 0; i <= current->num_sps_ids_minus1; i++) - ues(active_seq_parameter_set_id[i], 0, HEVC_MAX_SPS_COUNT - 1, 1, i); - - for (i = vps->vps_base_layer_internal_flag; - i <= FFMIN(62, vps->vps_max_layers_minus1); i++) { - ues(layer_sps_idx[i], 0, current->num_sps_ids_minus1, 1, i); - - if (i == 0) - h265->active_sps = h265->sps[current->active_seq_parameter_set_id[current->layer_sps_idx[0]]]; - } - - return 0; -} - -static int FUNC(sei_decoded_picture_hash) - (CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIDecodedPictureHash *current, SEIMessageState *sei) -{ - CodedBitstreamH265Context *h265 = ctx->priv_data; - const H265RawSPS *sps = h265->active_sps; - int err, c, i; - - HEADER("Decoded Picture Hash"); - - if (!sps) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "No active SPS for decoded picture hash.\n"); - return AVERROR_INVALIDDATA; - } - - u(8, hash_type, 0, 2); - - for (c = 0; c < (sps->chroma_format_idc == 0 ? 1 : 3); c++) { - if (current->hash_type == 0) { - for (i = 0; i < 16; i++) - us(8, picture_md5[c][i], 0x00, 0xff, 2, c, i); - } else if (current->hash_type == 1) { - us(16, picture_crc[c], 0x0000, 0xffff, 1, c); - } else if (current->hash_type == 2) { - us(32, picture_checksum[c], 0x00000000, 0xffffffff, 1, c); - } - } - - return 0; -} - -static int FUNC(sei_time_code) - (CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEITimeCode *current, SEIMessageState *sei) -{ - int err, i; - - HEADER("Time Code"); - - u(2, num_clock_ts, 1, 3); - - for (i = 0; i < current->num_clock_ts; i++) { - flags(clock_timestamp_flag[i], 1, i); - - if (current->clock_timestamp_flag[i]) { - flags(units_field_based_flag[i], 1, i); - us(5, counting_type[i], 0, 6, 1, i); - flags(full_timestamp_flag[i], 1, i); - flags(discontinuity_flag[i], 1, i); - flags(cnt_dropped_flag[i], 1, i); - - ubs(9, n_frames[i], 1, i); - - if (current->full_timestamp_flag[i]) { - us(6, seconds_value[i], 0, 59, 1, i); - us(6, minutes_value[i], 0, 59, 1, i); - us(5, hours_value[i], 0, 23, 1, i); - } else { - flags(seconds_flag[i], 1, i); - if (current->seconds_flag[i]) { - us(6, seconds_value[i], 0, 59, 1, i); - flags(minutes_flag[i], 1, i); - if (current->minutes_flag[i]) { - us(6, minutes_value[i], 0, 59, 1, i); - flags(hours_flag[i], 1, i); - if (current->hours_flag[i]) - us(5, hours_value[i], 0, 23, 1, i); - } - } - } - - ubs(5, time_offset_length[i], 1, i); - if (current->time_offset_length[i] > 0) - ibs(current->time_offset_length[i], time_offset_value[i], 1, i); - else - infer(time_offset_value[i], 0); - } - } - - return 0; -} - -static int FUNC(sei_alpha_channel_info) - (CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEIAlphaChannelInfo *current, SEIMessageState *sei) -{ - int err, length; - - HEADER("Alpha Channel Information"); - - flag(alpha_channel_cancel_flag); - if (!current->alpha_channel_cancel_flag) { - ub(3, alpha_channel_use_idc); - ub(3, alpha_channel_bit_depth_minus8); - length = current->alpha_channel_bit_depth_minus8 + 9; - ub(length, alpha_transparent_value); - ub(length, alpha_opaque_value); - flag(alpha_channel_incr_flag); - flag(alpha_channel_clip_flag); - if (current->alpha_channel_clip_flag) - flag(alpha_channel_clip_type_flag); - } else { - infer(alpha_channel_use_idc, 2); - infer(alpha_channel_incr_flag, 0); - infer(alpha_channel_clip_flag, 0); - } - - return 0; -} - -static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw, - H265RawSEI *current, int prefix) -{ - int err; - - if (prefix) - HEADER("Prefix Supplemental Enhancement Information"); - else - HEADER("Suffix Supplemental Enhancement Information"); - - CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, - prefix ? HEVC_NAL_SEI_PREFIX - : HEVC_NAL_SEI_SUFFIX)); - - CHECK(FUNC_SEI(message_list)(ctx, rw, ¤t->message_list, prefix)); - - CHECK(FUNC(rbsp_trailing_bits)(ctx, rw)); - - return 0; -} diff --git a/third-party/cbs/cbs_internal.h b/third-party/cbs/cbs_internal.h deleted file mode 100644 index 58bad02d85e..00000000000 --- a/third-party/cbs/cbs_internal.h +++ /dev/null @@ -1,218 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_INTERNAL_H -#define AVCODEC_CBS_INTERNAL_H - -#include - -#include "libavutil/buffer.h" -#include "libavutil/log.h" - -// [sunshine] Changed include path -#include "cbs/cbs.h" -#include "cbs/codec_id.h" -#include "cbs/get_bits.h" -#include "put_bits.h" - - -enum CBSContentType { - // Unit content is a simple structure. - CBS_CONTENT_TYPE_POD, - // Unit content contains some references to other structures, but all - // managed via buffer reference counting. The descriptor defines the - // structure offsets of every buffer reference. - CBS_CONTENT_TYPE_INTERNAL_REFS, - // Unit content is something more complex. The descriptor defines - // special functions to manage the content. - CBS_CONTENT_TYPE_COMPLEX, -}; - -enum { - // Maximum number of unit types described by the same unit type - // descriptor. - CBS_MAX_UNIT_TYPES = 3, - // Maximum number of reference buffer offsets in any one unit. - CBS_MAX_REF_OFFSETS = 2, - // Special value used in a unit type descriptor to indicate that it - // applies to a large range of types rather than a set of discrete - // values. - CBS_UNIT_TYPE_RANGE = -1, -}; - -typedef const struct CodedBitstreamUnitTypeDescriptor { - // Number of entries in the unit_types array, or the special value - // CBS_UNIT_TYPE_RANGE to indicate that the range fields should be - // used instead. - int nb_unit_types; - - // Array of unit types that this entry describes. - const CodedBitstreamUnitType unit_types[CBS_MAX_UNIT_TYPES]; - - // Start and end of unit type range, used if nb_unit_types is - // CBS_UNIT_TYPE_RANGE. - const CodedBitstreamUnitType unit_type_range_start; - const CodedBitstreamUnitType unit_type_range_end; - - // The type of content described. - enum CBSContentType content_type; - // The size of the structure which should be allocated to contain - // the decomposed content of this type of unit. - size_t content_size; - - // Number of entries in the ref_offsets array. Only used if the - // content_type is CBS_CONTENT_TYPE_INTERNAL_REFS. - int nb_ref_offsets; - // The structure must contain two adjacent elements: - // type *field; - // AVBufferRef *field_ref; - // where field points to something in the buffer referred to by - // field_ref. This offset is then set to offsetof(struct, field). - size_t ref_offsets[CBS_MAX_REF_OFFSETS]; - - void (*content_free)(void *opaque, uint8_t *data); - int (*content_clone)(AVBufferRef **ref, CodedBitstreamUnit *unit); -} CodedBitstreamUnitTypeDescriptor; - -typedef struct CodedBitstreamType { - enum AVCodecID codec_id; - - // A class for the private data, used to declare private AVOptions. - // This field is NULL for types that do not declare any options. - // If this field is non-NULL, the first member of the filter private data - // must be a pointer to AVClass. - const AVClass *priv_class; - - size_t priv_data_size; - - // List of unit type descriptors for this codec. - // Terminated by a descriptor with nb_unit_types equal to zero. - const CodedBitstreamUnitTypeDescriptor *unit_types; - - // Split frag->data into coded bitstream units, creating the - // frag->units array. Fill data but not content on each unit. - // The header argument should be set if the fragment came from - // a header block, which may require different parsing for some - // codecs (e.g. the AVCC header in H.264). - int (*split_fragment)(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header); - - // Read the unit->data bitstream and decompose it, creating - // unit->content. - int (*read_unit)(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit); - - // Write the data bitstream from unit->content into pbc. - // Return value AVERROR(ENOSPC) indicates that pbc was too small. - int (*write_unit)(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc); - - // Read the data from all of frag->units and assemble it into - // a bitstream for the whole fragment. - int (*assemble_fragment)(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag); - - // Reset the codec internal state. - void (*flush)(CodedBitstreamContext *ctx); - - // Free the codec internal state. - void (*close)(CodedBitstreamContext *ctx); -} CodedBitstreamType; - - -// Helper functions for trace output. - -void ff_cbs_trace_header(CodedBitstreamContext *ctx, - const char *name); - -void ff_cbs_trace_syntax_element(CodedBitstreamContext *ctx, int position, - const char *name, const int *subscripts, - const char *bitstring, int64_t value); - - -// Helper functions for read/write of common bitstream elements, including -// generation of trace output. - -int ff_cbs_read_unsigned(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, uint32_t *write_to, - uint32_t range_min, uint32_t range_max); - -int ff_cbs_write_unsigned(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, uint32_t value, - uint32_t range_min, uint32_t range_max); - -int ff_cbs_read_signed(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, int32_t *write_to, - int32_t range_min, int32_t range_max); - -int ff_cbs_write_signed(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, int32_t value, - int32_t range_min, int32_t range_max); - -// The largest unsigned value representable in N bits, suitable for use as -// range_max in the above functions. -#define MAX_UINT_BITS(length) ((UINT64_C(1) << (length)) - 1) - -// The largest signed value representable in N bits, suitable for use as -// range_max in the above functions. -#define MAX_INT_BITS(length) ((INT64_C(1) << ((length) - 1)) - 1) - -// The smallest signed value representable in N bits, suitable for use as -// range_min in the above functions. -#define MIN_INT_BITS(length) (-(INT64_C(1) << ((length) - 1))) - - -#define CBS_UNIT_TYPE_POD(type, structure) { \ - .nb_unit_types = 1, \ - .unit_types = { type }, \ - .content_type = CBS_CONTENT_TYPE_POD, \ - .content_size = sizeof(structure), \ - } -#define CBS_UNIT_TYPE_INTERNAL_REF(type, structure, ref_field) { \ - .nb_unit_types = 1, \ - .unit_types = { type }, \ - .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, \ - .content_size = sizeof(structure), \ - .nb_ref_offsets = 1, \ - .ref_offsets = { offsetof(structure, ref_field) }, \ - } -#define CBS_UNIT_TYPE_COMPLEX(type, structure, free_func) { \ - .nb_unit_types = 1, \ - .unit_types = { type }, \ - .content_type = CBS_CONTENT_TYPE_COMPLEX, \ - .content_size = sizeof(structure), \ - .content_free = free_func, \ - } -#define CBS_UNIT_TYPE_END_OF_LIST { .nb_unit_types = 0 } - - -extern const CodedBitstreamType ff_cbs_type_av1; -extern const CodedBitstreamType ff_cbs_type_h264; -extern const CodedBitstreamType ff_cbs_type_h265; -extern const CodedBitstreamType ff_cbs_type_jpeg; -extern const CodedBitstreamType ff_cbs_type_mpeg2; -extern const CodedBitstreamType ff_cbs_type_vp9; - - -#endif /* AVCODEC_CBS_INTERNAL_H */ diff --git a/third-party/cbs/cbs_jpeg.c b/third-party/cbs/cbs_jpeg.c deleted file mode 100644 index d0345f50fed..00000000000 --- a/third-party/cbs/cbs_jpeg.c +++ /dev/null @@ -1,467 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -// [sunshine] Changed include path -#include "cbs/cbs.h" -#include "cbs_internal.h" -#include "cbs/cbs_jpeg.h" - - -#define HEADER(name) do { \ - ff_cbs_trace_header(ctx, name); \ - } while (0) - -#define CHECK(call) do { \ - err = (call); \ - if (err < 0) \ - return err; \ - } while (0) - -#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]){ subs, __VA_ARGS__ }) : NULL) - -#define u(width, name, range_min, range_max) \ - xu(width, name, range_min, range_max, 0, ) -#define us(width, name, sub, range_min, range_max) \ - xu(width, name, range_min, range_max, 1, sub) - - -#define READ -#define READWRITE read -#define RWContext GetBitContext -#define FUNC(name) cbs_jpeg_read_ ## name - -#define xu(width, name, range_min, range_max, subs, ...) do { \ - uint32_t value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - current->name = value; \ - } while (0) - -#include "cbs_jpeg_syntax_template.c" - -#undef READ -#undef READWRITE -#undef RWContext -#undef FUNC -#undef xu - -#define WRITE -#define READWRITE write -#define RWContext PutBitContext -#define FUNC(name) cbs_jpeg_write_ ## name - -#define xu(width, name, range_min, range_max, subs, ...) do { \ - uint32_t value = current->name; \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - value, range_min, range_max)); \ - } while (0) - - -#include "cbs_jpeg_syntax_template.c" - -#undef WRITE -#undef READWRITE -#undef RWContext -#undef FUNC -#undef xu - - -static void cbs_jpeg_free_application_data(void *opaque, uint8_t *content) -{ - JPEGRawApplicationData *ad = (JPEGRawApplicationData*)content; - av_buffer_unref(&ad->Ap_ref); - av_freep(&content); -} - -static void cbs_jpeg_free_comment(void *opaque, uint8_t *content) -{ - JPEGRawComment *comment = (JPEGRawComment*)content; - av_buffer_unref(&comment->Cm_ref); - av_freep(&content); -} - -static void cbs_jpeg_free_scan(void *opaque, uint8_t *content) -{ - JPEGRawScan *scan = (JPEGRawScan*)content; - av_buffer_unref(&scan->data_ref); - av_freep(&content); -} - -static int cbs_jpeg_split_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header) -{ - AVBufferRef *data_ref; - uint8_t *data; - size_t data_size; - int start, end, marker, next_start, next_marker; - int err, i, j, length; - - if (frag->data_size < 4) { - // Definitely too short to be meaningful. - return AVERROR_INVALIDDATA; - } - - for (i = 0; i + 1 < frag->data_size && frag->data[i] != 0xff; i++); - if (i > 0) { - av_log(ctx->log_ctx, AV_LOG_WARNING, "Discarding %d bytes at " - "beginning of image.\n", i); - } - for (++i; i + 1 < frag->data_size && frag->data[i] == 0xff; i++); - if (i + 1 >= frag->data_size && frag->data[i]) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " - "no SOI marker found.\n"); - return AVERROR_INVALIDDATA; - } - marker = frag->data[i]; - if (marker != JPEG_MARKER_SOI) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: first " - "marker is %02x, should be SOI.\n", marker); - return AVERROR_INVALIDDATA; - } - for (++i; i + 1 < frag->data_size && frag->data[i] == 0xff; i++); - if (i + 1 >= frag->data_size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " - "no image content found.\n"); - return AVERROR_INVALIDDATA; - } - marker = frag->data[i]; - start = i + 1; - - do { - if (marker == JPEG_MARKER_EOI) { - break; - } else if (marker == JPEG_MARKER_SOS) { - next_marker = -1; - end = start; - for (i = start; i + 1 < frag->data_size; i++) { - if (frag->data[i] != 0xff) - continue; - end = i; - for (++i; i + 1 < frag->data_size && - frag->data[i] == 0xff; i++); - if (i + 1 < frag->data_size) { - if (frag->data[i] == 0x00) - continue; - next_marker = frag->data[i]; - next_start = i + 1; - } - break; - } - } else { - i = start; - if (i + 2 > frag->data_size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " - "truncated at %02x marker.\n", marker); - return AVERROR_INVALIDDATA; - } - length = AV_RB16(frag->data + i); - if (i + length > frag->data_size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid JPEG image: " - "truncated at %02x marker segment.\n", marker); - return AVERROR_INVALIDDATA; - } - end = start + length; - - i = end; - if (frag->data[i] != 0xff) { - next_marker = -1; - } else { - for (++i; i + 1 < frag->data_size && - frag->data[i] == 0xff; i++); - if (i + 1 >= frag->data_size) { - next_marker = -1; - } else { - next_marker = frag->data[i]; - next_start = i + 1; - } - } - } - - if (marker == JPEG_MARKER_SOS) { - length = AV_RB16(frag->data + start); - - if (length > end - start) - return AVERROR_INVALIDDATA; - - data_ref = NULL; - data = av_malloc(end - start + - AV_INPUT_BUFFER_PADDING_SIZE); - if (!data) - return AVERROR(ENOMEM); - - memcpy(data, frag->data + start, length); - for (i = start + length, j = length; i < end; i++, j++) { - if (frag->data[i] == 0xff) { - while (frag->data[i] == 0xff) - ++i; - data[j] = 0xff; - } else { - data[j] = frag->data[i]; - } - } - data_size = j; - - memset(data + data_size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - - } else { - data = frag->data + start; - data_size = end - start; - data_ref = frag->data_ref; - } - - err = ff_cbs_append_unit_data(frag, marker, - data, data_size, data_ref); - if (err < 0) - return err; - - marker = next_marker; - start = next_start; - } while (next_marker != -1); - - return 0; -} - -static int cbs_jpeg_read_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - GetBitContext gbc; - int err; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if (err < 0) - return err; - - if (unit->type >= JPEG_MARKER_SOF0 && - unit->type <= JPEG_MARKER_SOF3) { - err = ff_cbs_alloc_unit_content(unit, - sizeof(JPEGRawFrameHeader), - NULL); - if (err < 0) - return err; - - err = cbs_jpeg_read_frame_header(ctx, &gbc, unit->content); - if (err < 0) - return err; - - } else if (unit->type >= JPEG_MARKER_APPN && - unit->type <= JPEG_MARKER_APPN + 15) { - err = ff_cbs_alloc_unit_content(unit, - sizeof(JPEGRawApplicationData), - &cbs_jpeg_free_application_data); - if (err < 0) - return err; - - err = cbs_jpeg_read_application_data(ctx, &gbc, unit->content); - if (err < 0) - return err; - - } else if (unit->type == JPEG_MARKER_SOS) { - JPEGRawScan *scan; - int pos; - - err = ff_cbs_alloc_unit_content(unit, - sizeof(JPEGRawScan), - &cbs_jpeg_free_scan); - if (err < 0) - return err; - scan = unit->content; - - err = cbs_jpeg_read_scan_header(ctx, &gbc, &scan->header); - if (err < 0) - return err; - - pos = get_bits_count(&gbc); - av_assert0(pos % 8 == 0); - if (pos > 0) { - scan->data_size = unit->data_size - pos / 8; - scan->data_ref = av_buffer_ref(unit->data_ref); - if (!scan->data_ref) - return AVERROR(ENOMEM); - scan->data = unit->data + pos / 8; - } - - } else { - switch (unit->type) { -#define SEGMENT(marker, type, func, free) \ - case JPEG_MARKER_ ## marker: \ - { \ - err = ff_cbs_alloc_unit_content(unit, \ - sizeof(type), free); \ - if (err < 0) \ - return err; \ - err = cbs_jpeg_read_ ## func(ctx, &gbc, unit->content); \ - if (err < 0) \ - return err; \ - } \ - break - SEGMENT(DQT, JPEGRawQuantisationTableSpecification, dqt, NULL); - SEGMENT(DHT, JPEGRawHuffmanTableSpecification, dht, NULL); - SEGMENT(COM, JPEGRawComment, comment, &cbs_jpeg_free_comment); -#undef SEGMENT - default: - return AVERROR(ENOSYS); - } - } - - return 0; -} - -static int cbs_jpeg_write_scan(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) -{ - JPEGRawScan *scan = unit->content; - int err; - - err = cbs_jpeg_write_scan_header(ctx, pbc, &scan->header); - if (err < 0) - return err; - - if (scan->data) { - if (scan->data_size * 8 > put_bits_left(pbc)) - return AVERROR(ENOSPC); - - av_assert0(put_bits_count(pbc) % 8 == 0); - - flush_put_bits(pbc); - - memcpy(put_bits_ptr(pbc), scan->data, scan->data_size); - skip_put_bytes(pbc, scan->data_size); - } - - return 0; -} - -static int cbs_jpeg_write_segment(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) -{ - int err; - - if (unit->type >= JPEG_MARKER_SOF0 && - unit->type <= JPEG_MARKER_SOF3) { - err = cbs_jpeg_write_frame_header(ctx, pbc, unit->content); - } else if (unit->type >= JPEG_MARKER_APPN && - unit->type <= JPEG_MARKER_APPN + 15) { - err = cbs_jpeg_write_application_data(ctx, pbc, unit->content); - } else { - switch (unit->type) { -#define SEGMENT(marker, func) \ - case JPEG_MARKER_ ## marker: \ - err = cbs_jpeg_write_ ## func(ctx, pbc, unit->content); \ - break; - SEGMENT(DQT, dqt); - SEGMENT(DHT, dht); - SEGMENT(COM, comment); - default: - return AVERROR_PATCHWELCOME; - } - } - - return err; -} - -static int cbs_jpeg_write_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) -{ - if (unit->type == JPEG_MARKER_SOS) - return cbs_jpeg_write_scan (ctx, unit, pbc); - else - return cbs_jpeg_write_segment(ctx, unit, pbc); -} - -static int cbs_jpeg_assemble_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) -{ - const CodedBitstreamUnit *unit; - uint8_t *data; - size_t size, dp, sp; - int i; - - size = 4; // SOI + EOI. - for (i = 0; i < frag->nb_units; i++) { - unit = &frag->units[i]; - size += 2 + unit->data_size; - if (unit->type == JPEG_MARKER_SOS) { - for (sp = 0; sp < unit->data_size; sp++) { - if (unit->data[sp] == 0xff) - ++size; - } - } - } - - frag->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if (!frag->data_ref) - return AVERROR(ENOMEM); - data = frag->data_ref->data; - - dp = 0; - - data[dp++] = 0xff; - data[dp++] = JPEG_MARKER_SOI; - - for (i = 0; i < frag->nb_units; i++) { - unit = &frag->units[i]; - - data[dp++] = 0xff; - data[dp++] = unit->type; - - if (unit->type != JPEG_MARKER_SOS) { - memcpy(data + dp, unit->data, unit->data_size); - dp += unit->data_size; - } else { - sp = AV_RB16(unit->data); - av_assert0(sp <= unit->data_size); - memcpy(data + dp, unit->data, sp); - dp += sp; - - for (; sp < unit->data_size; sp++) { - if (unit->data[sp] == 0xff) { - data[dp++] = 0xff; - data[dp++] = 0x00; - } else { - data[dp++] = unit->data[sp]; - } - } - } - } - - data[dp++] = 0xff; - data[dp++] = JPEG_MARKER_EOI; - - av_assert0(dp == size); - - memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - frag->data = data; - frag->data_size = size; - - return 0; -} - -const CodedBitstreamType ff_cbs_type_jpeg = { - .codec_id = AV_CODEC_ID_MJPEG, - - .split_fragment = &cbs_jpeg_split_fragment, - .read_unit = &cbs_jpeg_read_unit, - .write_unit = &cbs_jpeg_write_unit, - .assemble_fragment = &cbs_jpeg_assemble_fragment, -}; diff --git a/third-party/cbs/cbs_jpeg_syntax_template.c b/third-party/cbs/cbs_jpeg_syntax_template.c deleted file mode 100644 index e06abdc674b..00000000000 --- a/third-party/cbs/cbs_jpeg_syntax_template.c +++ /dev/null @@ -1,196 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -static int FUNC(frame_header)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawFrameHeader *current) -{ - int err, i; - - HEADER("Frame Header"); - - u(16, Lf, 8, 8 + 3 * JPEG_MAX_COMPONENTS); - - u(8, P, 2, 16); - u(16, Y, 0, JPEG_MAX_HEIGHT); - u(16, X, 1, JPEG_MAX_WIDTH); - u(8, Nf, 1, JPEG_MAX_COMPONENTS); - - for (i = 0; i < current->Nf; i++) { - us(8, C[i], i, 0, JPEG_MAX_COMPONENTS); - us(4, H[i], i, 1, 4); - us(4, V[i], i, 1, 4); - us(8, Tq[i], i, 0, 3); - } - - return 0; -} - -static int FUNC(quantisation_table)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawQuantisationTable *current) -{ - int err, i; - - u(4, Pq, 0, 1); - u(4, Tq, 0, 3); - - if (current->Pq) { - for (i = 0; i < 64; i++) - us(16, Q[i], i, 1, 255); - } else { - for (i = 0; i < 64; i++) - us(8, Q[i], i, 1, 255); - } - - return 0; -} - -static int FUNC(dqt)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawQuantisationTableSpecification *current) -{ - int err, i, n; - - HEADER("Quantisation Tables"); - - u(16, Lq, 2, 2 + 4 * 65); - n = current->Lq / 65; - - for (i = 0; i < n; i++) - CHECK(FUNC(quantisation_table)(ctx, rw, ¤t->table[i])); - - return 0; -} - -static int FUNC(huffman_table)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawHuffmanTable *current) -{ - int err, i, j, ij; - - u(4, Tc, 0, 1); - u(4, Th, 0, 3); - - for (i = 0; i < 16; i++) - us(8, L[i], i, 0, 255); - - ij = 0; - for (i = 0; i < 16; i++) { - for (j = 0; j < current->L[i]; j++) { - if (ij >= FF_ARRAY_ELEMS(current->V)) - return AVERROR_INVALIDDATA; - us(8, V[ij], ij, 0, 255); - ++ij; - } - } - - return 0; -} - -static int FUNC(dht)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawHuffmanTableSpecification *current) -{ - int err, i, j, n; - - HEADER("Huffman Tables"); - - u(16, Lh, 2, 2 + 8 * (1 + 16 + 256)); - - n = 2; - for (i = 0; n < current->Lh; i++) { - if (i >= 8) - return AVERROR_INVALIDDATA; - - CHECK(FUNC(huffman_table)(ctx, rw, ¤t->table[i])); - - ++n; - for (j = 0; j < 16; j++) - n += 1 + current->table[i].L[j]; - } - - return 0; -} - -static int FUNC(scan_header)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawScanHeader *current) -{ - int err, j; - - HEADER("Scan"); - - u(16, Ls, 6, 6 + 2 * JPEG_MAX_COMPONENTS); - - u(8, Ns, 1, 4); - for (j = 0; j < current->Ns; j++) { - us(8, Cs[j], j, 0, JPEG_MAX_COMPONENTS); - us(4, Td[j], j, 0, 3); - us(4, Ta[j], j, 0, 3); - } - - u(8, Ss, 0, 63); - u(8, Se, 0, 63); - u(4, Ah, 0, 13); - u(4, Al, 0, 15); - - return 0; -} - -static int FUNC(application_data)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawApplicationData *current) -{ - int err, i; - - HEADER("Application Data"); - - u(16, Lp, 2, 65535); - - if (current->Lp > 2) { -#ifdef READ - current->Ap_ref = av_buffer_alloc(current->Lp - 2); - if (!current->Ap_ref) - return AVERROR(ENOMEM); - current->Ap = current->Ap_ref->data; -#endif - - for (i = 0; i < current->Lp - 2; i++) - us(8, Ap[i], i, 0, 255); - } - - return 0; -} - -static int FUNC(comment)(CodedBitstreamContext *ctx, RWContext *rw, - JPEGRawComment *current) -{ - int err, i; - - HEADER("Comment"); - - u(16, Lc, 2, 65535); - - if (current->Lc > 2) { -#ifdef READ - current->Cm_ref = av_buffer_alloc(current->Lc - 2); - if (!current->Cm_ref) - return AVERROR(ENOMEM); - current->Cm = current->Cm_ref->data; -#endif - - for (i = 0; i < current->Lc - 2; i++) - us(8, Cm[i], i, 0, 255); - } - - return 0; -} diff --git a/third-party/cbs/cbs_mpeg2.c b/third-party/cbs/cbs_mpeg2.c deleted file mode 100644 index 3ad012fe24b..00000000000 --- a/third-party/cbs/cbs_mpeg2.c +++ /dev/null @@ -1,428 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#include "libavutil/avassert.h" - -// [sunshine] Changed include path -#include "cbs/cbs.h" -#include "cbs_internal.h" -#include "cbs/cbs_mpeg2.h" -#include "startcode.h" - - -#define HEADER(name) do { \ - ff_cbs_trace_header(ctx, name); \ - } while (0) - -#define CHECK(call) do { \ - err = (call); \ - if (err < 0) \ - return err; \ - } while (0) - -#define FUNC_NAME(rw, codec, name) cbs_ ## codec ## _ ## rw ## _ ## name -#define FUNC_MPEG2(rw, name) FUNC_NAME(rw, mpeg2, name) -#define FUNC(name) FUNC_MPEG2(READWRITE, name) - -#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]){ subs, __VA_ARGS__ }) : NULL) - -#define ui(width, name) \ - xui(width, name, current->name, 0, MAX_UINT_BITS(width), 0, ) -#define uir(width, name) \ - xui(width, name, current->name, 1, MAX_UINT_BITS(width), 0, ) -#define uis(width, name, subs, ...) \ - xui(width, name, current->name, 0, MAX_UINT_BITS(width), subs, __VA_ARGS__) -#define uirs(width, name, subs, ...) \ - xui(width, name, current->name, 1, MAX_UINT_BITS(width), subs, __VA_ARGS__) -#define xui(width, name, var, range_min, range_max, subs, ...) \ - xuia(width, #name, var, range_min, range_max, subs, __VA_ARGS__) -#define sis(width, name, subs, ...) \ - xsi(width, name, current->name, subs, __VA_ARGS__) - -#define marker_bit() \ - bit("marker_bit", 1) -#define bit(string, value) do { \ - av_unused uint32_t bit = value; \ - xuia(1, string, bit, value, value, 0, ); \ - } while (0) - - -#define READ -#define READWRITE read -#define RWContext GetBitContext - -#define xuia(width, string, var, range_min, range_max, subs, ...) do { \ - uint32_t value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, string, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, range_min, range_max)); \ - var = value; \ - } while (0) - -#define xsi(width, name, var, subs, ...) do { \ - int32_t value; \ - CHECK(ff_cbs_read_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), &value, \ - MIN_INT_BITS(width), \ - MAX_INT_BITS(width))); \ - var = value; \ - } while (0) - -#define nextbits(width, compare, var) \ - (get_bits_left(rw) >= width && \ - (var = show_bits(rw, width)) == (compare)) - -#define infer(name, value) do { \ - current->name = value; \ - } while (0) - -#include "cbs_mpeg2_syntax_template.c" - -#undef READ -#undef READWRITE -#undef RWContext -#undef xuia -#undef xsi -#undef nextbits -#undef infer - - -#define WRITE -#define READWRITE write -#define RWContext PutBitContext - -#define xuia(width, string, var, range_min, range_max, subs, ...) do { \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, string, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - var, range_min, range_max)); \ - } while (0) - -#define xsi(width, name, var, subs, ...) do { \ - CHECK(ff_cbs_write_signed(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), var, \ - MIN_INT_BITS(width), \ - MAX_INT_BITS(width))); \ - } while (0) - -#define nextbits(width, compare, var) (var) - -#define infer(name, value) do { \ - if (current->name != (value)) { \ - av_log(ctx->log_ctx, AV_LOG_WARNING, "Warning: " \ - "%s does not match inferred value: " \ - "%"PRId64", but should be %"PRId64".\n", \ - #name, (int64_t)current->name, (int64_t)(value)); \ - } \ - } while (0) - -#include "cbs_mpeg2_syntax_template.c" - -#undef WRITE -#undef READWRITE -#undef RWContext -#undef xuia -#undef xsi -#undef nextbits -#undef infer - - -static int cbs_mpeg2_split_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header) -{ - const uint8_t *start; - uint32_t start_code = -1; - int err; - - start = avpriv_find_start_code(frag->data, frag->data + frag->data_size, - &start_code); - if (start_code >> 8 != 0x000001) { - // No start code found. - return AVERROR_INVALIDDATA; - } - - do { - CodedBitstreamUnitType unit_type = start_code & 0xff; - const uint8_t *end; - size_t unit_size; - - // Reset start_code to ensure that avpriv_find_start_code() - // really reads a new start code and does not reuse the old - // start code in any way (as e.g. happens when there is a - // Sequence End unit at the very end of a packet). - start_code = UINT32_MAX; - end = avpriv_find_start_code(start--, frag->data + frag->data_size, - &start_code); - - // start points to the byte containing the start_code_identifier - // (may be the last byte of fragment->data); end points to the byte - // following the byte containing the start code identifier (or to - // the end of fragment->data). - if (start_code >> 8 == 0x000001) { - // Unit runs from start to the beginning of the start code - // pointed to by end (including any padding zeroes). - unit_size = (end - 4) - start; - } else { - // We didn't find a start code, so this is the final unit. - unit_size = end - start; - } - - err = ff_cbs_append_unit_data(frag, unit_type, (uint8_t*)start, - unit_size, frag->data_ref); - if (err < 0) - return err; - - start = end; - - // Do we have a further unit to add to the fragment? - } while ((start_code >> 8) == 0x000001); - - return 0; -} - -static int cbs_mpeg2_read_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - GetBitContext gbc; - int err; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if (err < 0) - return err; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if (err < 0) - return err; - - if (MPEG2_START_IS_SLICE(unit->type)) { - MPEG2RawSlice *slice = unit->content; - int pos, len; - - err = cbs_mpeg2_read_slice_header(ctx, &gbc, &slice->header); - if (err < 0) - return err; - - if (!get_bits_left(&gbc)) - return AVERROR_INVALIDDATA; - - pos = get_bits_count(&gbc); - len = unit->data_size; - - slice->data_size = len - pos / 8; - slice->data_ref = av_buffer_ref(unit->data_ref); - if (!slice->data_ref) - return AVERROR(ENOMEM); - slice->data = unit->data + pos / 8; - - slice->data_bit_start = pos % 8; - - } else { - switch (unit->type) { -#define START(start_code, type, read_func, free_func) \ - case start_code: \ - { \ - type *header = unit->content; \ - err = cbs_mpeg2_read_ ## read_func(ctx, &gbc, header); \ - if (err < 0) \ - return err; \ - } \ - break; - START(MPEG2_START_PICTURE, MPEG2RawPictureHeader, - picture_header, &cbs_mpeg2_free_picture_header); - START(MPEG2_START_USER_DATA, MPEG2RawUserData, - user_data, &cbs_mpeg2_free_user_data); - START(MPEG2_START_SEQUENCE_HEADER, MPEG2RawSequenceHeader, - sequence_header, NULL); - START(MPEG2_START_EXTENSION, MPEG2RawExtensionData, - extension_data, NULL); - START(MPEG2_START_GROUP, MPEG2RawGroupOfPicturesHeader, - group_of_pictures_header, NULL); - START(MPEG2_START_SEQUENCE_END, MPEG2RawSequenceEnd, - sequence_end, NULL); -#undef START - default: - return AVERROR(ENOSYS); - } - } - - return 0; -} - -static int cbs_mpeg2_write_header(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) -{ - int err; - - switch (unit->type) { -#define START(start_code, type, func) \ - case start_code: \ - err = cbs_mpeg2_write_ ## func(ctx, pbc, unit->content); \ - break; - START(MPEG2_START_PICTURE, MPEG2RawPictureHeader, picture_header); - START(MPEG2_START_USER_DATA, MPEG2RawUserData, user_data); - START(MPEG2_START_SEQUENCE_HEADER, MPEG2RawSequenceHeader, sequence_header); - START(MPEG2_START_EXTENSION, MPEG2RawExtensionData, extension_data); - START(MPEG2_START_GROUP, MPEG2RawGroupOfPicturesHeader, - group_of_pictures_header); - START(MPEG2_START_SEQUENCE_END, MPEG2RawSequenceEnd, sequence_end); -#undef START - default: - av_log(ctx->log_ctx, AV_LOG_ERROR, "Write unimplemented for start " - "code %02"PRIx32".\n", unit->type); - return AVERROR_PATCHWELCOME; - } - - return err; -} - -static int cbs_mpeg2_write_slice(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) -{ - MPEG2RawSlice *slice = unit->content; - int err; - - err = cbs_mpeg2_write_slice_header(ctx, pbc, &slice->header); - if (err < 0) - return err; - - if (slice->data) { - size_t rest = slice->data_size - (slice->data_bit_start + 7) / 8; - uint8_t *pos = slice->data + slice->data_bit_start / 8; - - av_assert0(slice->data_bit_start >= 0 && - slice->data_size > slice->data_bit_start / 8); - - if (slice->data_size * 8 + 8 > put_bits_left(pbc)) - return AVERROR(ENOSPC); - - // First copy the remaining bits of the first byte - if (slice->data_bit_start % 8) - put_bits(pbc, 8 - slice->data_bit_start % 8, - *pos++ & MAX_UINT_BITS(8 - slice->data_bit_start % 8)); - - if (put_bits_count(pbc) % 8 == 0) { - // If the writer is aligned at this point, - // memcpy can be used to improve performance. - // This is the normal case. - flush_put_bits(pbc); - memcpy(put_bits_ptr(pbc), pos, rest); - skip_put_bytes(pbc, rest); - } else { - // If not, we have to copy manually: - for (; rest > 3; rest -= 4, pos += 4) - put_bits32(pbc, AV_RB32(pos)); - - for (; rest; rest--, pos++) - put_bits(pbc, 8, *pos); - - // Align with zeros - put_bits(pbc, 8 - put_bits_count(pbc) % 8, 0); - } - } - - return 0; -} - -static int cbs_mpeg2_write_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) -{ - if (MPEG2_START_IS_SLICE(unit->type)) - return cbs_mpeg2_write_slice (ctx, unit, pbc); - else - return cbs_mpeg2_write_header(ctx, unit, pbc); -} - -static int cbs_mpeg2_assemble_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) -{ - uint8_t *data; - size_t size, dp; - int i; - - size = 0; - for (i = 0; i < frag->nb_units; i++) - size += 3 + frag->units[i].data_size; - - frag->data_ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if (!frag->data_ref) - return AVERROR(ENOMEM); - data = frag->data_ref->data; - - dp = 0; - for (i = 0; i < frag->nb_units; i++) { - CodedBitstreamUnit *unit = &frag->units[i]; - - data[dp++] = 0; - data[dp++] = 0; - data[dp++] = 1; - - memcpy(data + dp, unit->data, unit->data_size); - dp += unit->data_size; - } - - av_assert0(dp == size); - - memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - frag->data = data; - frag->data_size = size; - - return 0; -} - -static const CodedBitstreamUnitTypeDescriptor cbs_mpeg2_unit_types[] = { - CBS_UNIT_TYPE_INTERNAL_REF(MPEG2_START_PICTURE, MPEG2RawPictureHeader, - extra_information_picture.extra_information), - - { - .nb_unit_types = CBS_UNIT_TYPE_RANGE, - .unit_type_range_start = 0x01, - .unit_type_range_end = 0xaf, - - .content_type = CBS_CONTENT_TYPE_INTERNAL_REFS, - .content_size = sizeof(MPEG2RawSlice), - .nb_ref_offsets = 2, - .ref_offsets = { offsetof(MPEG2RawSlice, header.extra_information_slice.extra_information), - offsetof(MPEG2RawSlice, data) }, - }, - - CBS_UNIT_TYPE_INTERNAL_REF(MPEG2_START_USER_DATA, MPEG2RawUserData, - user_data), - - CBS_UNIT_TYPE_POD(MPEG2_START_SEQUENCE_HEADER, MPEG2RawSequenceHeader), - CBS_UNIT_TYPE_POD(MPEG2_START_EXTENSION, MPEG2RawExtensionData), - CBS_UNIT_TYPE_POD(MPEG2_START_SEQUENCE_END, MPEG2RawSequenceEnd), - CBS_UNIT_TYPE_POD(MPEG2_START_GROUP, MPEG2RawGroupOfPicturesHeader), - - CBS_UNIT_TYPE_END_OF_LIST -}; - -const CodedBitstreamType ff_cbs_type_mpeg2 = { - .codec_id = AV_CODEC_ID_MPEG2VIDEO, - - .priv_data_size = sizeof(CodedBitstreamMPEG2Context), - - .unit_types = cbs_mpeg2_unit_types, - - .split_fragment = &cbs_mpeg2_split_fragment, - .read_unit = &cbs_mpeg2_read_unit, - .write_unit = &cbs_mpeg2_write_unit, - .assemble_fragment = &cbs_mpeg2_assemble_fragment, -}; diff --git a/third-party/cbs/cbs_mpeg2_syntax_template.c b/third-party/cbs/cbs_mpeg2_syntax_template.c deleted file mode 100644 index 5165a14cd50..00000000000 --- a/third-party/cbs/cbs_mpeg2_syntax_template.c +++ /dev/null @@ -1,425 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -static int FUNC(sequence_header)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawSequenceHeader *current) -{ - CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; - int err, i; - - HEADER("Sequence Header"); - - ui(8, sequence_header_code); - - uir(12, horizontal_size_value); - uir(12, vertical_size_value); - - mpeg2->horizontal_size = current->horizontal_size_value; - mpeg2->vertical_size = current->vertical_size_value; - - uir(4, aspect_ratio_information); - uir(4, frame_rate_code); - ui(18, bit_rate_value); - - marker_bit(); - - ui(10, vbv_buffer_size_value); - ui(1, constrained_parameters_flag); - - ui(1, load_intra_quantiser_matrix); - if (current->load_intra_quantiser_matrix) { - for (i = 0; i < 64; i++) - uirs(8, intra_quantiser_matrix[i], 1, i); - } - - ui(1, load_non_intra_quantiser_matrix); - if (current->load_non_intra_quantiser_matrix) { - for (i = 0; i < 64; i++) - uirs(8, non_intra_quantiser_matrix[i], 1, i); - } - - return 0; -} - -static int FUNC(user_data)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawUserData *current) -{ - size_t k; - int err; - - HEADER("User Data"); - - ui(8, user_data_start_code); - -#ifdef READ - k = get_bits_left(rw); - av_assert0(k % 8 == 0); - current->user_data_length = k /= 8; - if (k > 0) { - current->user_data_ref = av_buffer_allocz(k + AV_INPUT_BUFFER_PADDING_SIZE); - if (!current->user_data_ref) - return AVERROR(ENOMEM); - current->user_data = current->user_data_ref->data; - } -#endif - - for (k = 0; k < current->user_data_length; k++) - uis(8, user_data[k], 1, k); - - return 0; -} - -static int FUNC(sequence_extension)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawSequenceExtension *current) -{ - CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; - int err; - - HEADER("Sequence Extension"); - - ui(8, profile_and_level_indication); - ui(1, progressive_sequence); - ui(2, chroma_format); - ui(2, horizontal_size_extension); - ui(2, vertical_size_extension); - - mpeg2->horizontal_size = (mpeg2->horizontal_size & 0xfff) | - current->horizontal_size_extension << 12; - mpeg2->vertical_size = (mpeg2->vertical_size & 0xfff) | - current->vertical_size_extension << 12; - mpeg2->progressive_sequence = current->progressive_sequence; - - ui(12, bit_rate_extension); - marker_bit(); - ui(8, vbv_buffer_size_extension); - ui(1, low_delay); - ui(2, frame_rate_extension_n); - ui(5, frame_rate_extension_d); - - return 0; -} - -static int FUNC(sequence_display_extension)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawSequenceDisplayExtension *current) -{ - int err; - - HEADER("Sequence Display Extension"); - - ui(3, video_format); - - ui(1, colour_description); - if (current->colour_description) { -#ifdef READ -#define READ_AND_PATCH(name) do { \ - ui(8, name); \ - if (current->name == 0) { \ - current->name = 2; \ - av_log(ctx->log_ctx, AV_LOG_WARNING, "%s in a sequence display " \ - "extension had the invalid value 0. Setting it to 2 " \ - "(meaning unknown) instead.\n", #name); \ - } \ - } while (0) - READ_AND_PATCH(colour_primaries); - READ_AND_PATCH(transfer_characteristics); - READ_AND_PATCH(matrix_coefficients); -#undef READ_AND_PATCH -#else - uir(8, colour_primaries); - uir(8, transfer_characteristics); - uir(8, matrix_coefficients); -#endif - } else { - infer(colour_primaries, 2); - infer(transfer_characteristics, 2); - infer(matrix_coefficients, 2); - } - - ui(14, display_horizontal_size); - marker_bit(); - ui(14, display_vertical_size); - - return 0; -} - -static int FUNC(group_of_pictures_header)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawGroupOfPicturesHeader *current) -{ - int err; - - HEADER("Group of Pictures Header"); - - ui(8, group_start_code); - - ui(25, time_code); - ui(1, closed_gop); - ui(1, broken_link); - - return 0; -} - -static int FUNC(extra_information)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawExtraInformation *current, - const char *element_name, const char *marker_name) -{ - int err; - size_t k; -#ifdef READ - GetBitContext start = *rw; - uint8_t bit; - - for (k = 0; nextbits(1, 1, bit); k++) - skip_bits(rw, 1 + 8); - current->extra_information_length = k; - if (k > 0) { - *rw = start; - current->extra_information_ref = - av_buffer_allocz(k + AV_INPUT_BUFFER_PADDING_SIZE); - if (!current->extra_information_ref) - return AVERROR(ENOMEM); - current->extra_information = current->extra_information_ref->data; - } -#endif - - for (k = 0; k < current->extra_information_length; k++) { - bit(marker_name, 1); - xuia(8, element_name, - current->extra_information[k], 0, 255, 1, k); - } - - bit(marker_name, 0); - - return 0; -} - -static int FUNC(picture_header)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawPictureHeader *current) -{ - int err; - - HEADER("Picture Header"); - - ui(8, picture_start_code); - - ui(10, temporal_reference); - uir(3, picture_coding_type); - ui(16, vbv_delay); - - if (current->picture_coding_type == 2 || - current->picture_coding_type == 3) { - ui(1, full_pel_forward_vector); - ui(3, forward_f_code); - } - - if (current->picture_coding_type == 3) { - ui(1, full_pel_backward_vector); - ui(3, backward_f_code); - } - - CHECK(FUNC(extra_information)(ctx, rw, ¤t->extra_information_picture, - "extra_information_picture[k]", "extra_bit_picture")); - - return 0; -} - -static int FUNC(picture_coding_extension)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawPictureCodingExtension *current) -{ - CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; - int err; - - HEADER("Picture Coding Extension"); - - uir(4, f_code[0][0]); - uir(4, f_code[0][1]); - uir(4, f_code[1][0]); - uir(4, f_code[1][1]); - - ui(2, intra_dc_precision); - ui(2, picture_structure); - ui(1, top_field_first); - ui(1, frame_pred_frame_dct); - ui(1, concealment_motion_vectors); - ui(1, q_scale_type); - ui(1, intra_vlc_format); - ui(1, alternate_scan); - ui(1, repeat_first_field); - ui(1, chroma_420_type); - ui(1, progressive_frame); - - if (mpeg2->progressive_sequence) { - if (current->repeat_first_field) { - if (current->top_field_first) - mpeg2->number_of_frame_centre_offsets = 3; - else - mpeg2->number_of_frame_centre_offsets = 2; - } else { - mpeg2->number_of_frame_centre_offsets = 1; - } - } else { - if (current->picture_structure == 1 || // Top field. - current->picture_structure == 2) { // Bottom field. - mpeg2->number_of_frame_centre_offsets = 1; - } else { - if (current->repeat_first_field) - mpeg2->number_of_frame_centre_offsets = 3; - else - mpeg2->number_of_frame_centre_offsets = 2; - } - } - - ui(1, composite_display_flag); - if (current->composite_display_flag) { - ui(1, v_axis); - ui(3, field_sequence); - ui(1, sub_carrier); - ui(7, burst_amplitude); - ui(8, sub_carrier_phase); - } - - return 0; -} - -static int FUNC(quant_matrix_extension)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawQuantMatrixExtension *current) -{ - int err, i; - - HEADER("Quant Matrix Extension"); - - ui(1, load_intra_quantiser_matrix); - if (current->load_intra_quantiser_matrix) { - for (i = 0; i < 64; i++) - uirs(8, intra_quantiser_matrix[i], 1, i); - } - - ui(1, load_non_intra_quantiser_matrix); - if (current->load_non_intra_quantiser_matrix) { - for (i = 0; i < 64; i++) - uirs(8, non_intra_quantiser_matrix[i], 1, i); - } - - ui(1, load_chroma_intra_quantiser_matrix); - if (current->load_chroma_intra_quantiser_matrix) { - for (i = 0; i < 64; i++) - uirs(8, intra_quantiser_matrix[i], 1, i); - } - - ui(1, load_chroma_non_intra_quantiser_matrix); - if (current->load_chroma_non_intra_quantiser_matrix) { - for (i = 0; i < 64; i++) - uirs(8, chroma_non_intra_quantiser_matrix[i], 1, i); - } - - return 0; -} - -static int FUNC(picture_display_extension)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawPictureDisplayExtension *current) -{ - CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; - int err, i; - - HEADER("Picture Display Extension"); - - for (i = 0; i < mpeg2->number_of_frame_centre_offsets; i++) { - sis(16, frame_centre_horizontal_offset[i], 1, i); - marker_bit(); - sis(16, frame_centre_vertical_offset[i], 1, i); - marker_bit(); - } - - return 0; -} - -static int FUNC(extension_data)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawExtensionData *current) -{ - int err; - - HEADER("Extension Data"); - - ui(8, extension_start_code); - ui(4, extension_start_code_identifier); - - switch (current->extension_start_code_identifier) { - case MPEG2_EXTENSION_SEQUENCE: - return FUNC(sequence_extension) - (ctx, rw, ¤t->data.sequence); - case MPEG2_EXTENSION_SEQUENCE_DISPLAY: - return FUNC(sequence_display_extension) - (ctx, rw, ¤t->data.sequence_display); - case MPEG2_EXTENSION_QUANT_MATRIX: - return FUNC(quant_matrix_extension) - (ctx, rw, ¤t->data.quant_matrix); - case MPEG2_EXTENSION_PICTURE_DISPLAY: - return FUNC(picture_display_extension) - (ctx, rw, ¤t->data.picture_display); - case MPEG2_EXTENSION_PICTURE_CODING: - return FUNC(picture_coding_extension) - (ctx, rw, ¤t->data.picture_coding); - default: - av_log(ctx->log_ctx, AV_LOG_ERROR, "Extension ID %d not supported.\n", - current->extension_start_code_identifier); - return AVERROR_PATCHWELCOME; - } -} - -static int FUNC(slice_header)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawSliceHeader *current) -{ - CodedBitstreamMPEG2Context *mpeg2 = ctx->priv_data; - int err; - - HEADER("Slice Header"); - - ui(8, slice_vertical_position); - - if (mpeg2->vertical_size > 2800) - ui(3, slice_vertical_position_extension); - if (mpeg2->scalable) { - if (mpeg2->scalable_mode == 0) - ui(7, priority_breakpoint); - } - - uir(5, quantiser_scale_code); - - if (nextbits(1, 1, current->slice_extension_flag)) { - ui(1, slice_extension_flag); - ui(1, intra_slice); - ui(1, slice_picture_id_enable); - ui(6, slice_picture_id); - } - - CHECK(FUNC(extra_information)(ctx, rw, ¤t->extra_information_slice, - "extra_information_slice[k]", "extra_bit_slice")); - - return 0; -} - -static int FUNC(sequence_end)(CodedBitstreamContext *ctx, RWContext *rw, - MPEG2RawSequenceEnd *current) -{ - int err; - - HEADER("Sequence End"); - - ui(8, sequence_end_code); - - return 0; -} diff --git a/third-party/cbs/cbs_sei.c b/third-party/cbs/cbs_sei.c deleted file mode 100644 index d18d3cb46fe..00000000000 --- a/third-party/cbs/cbs_sei.c +++ /dev/null @@ -1,370 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -// [sunshine] Changed include path -#include "cbs/cbs.h" -#include "cbs_internal.h" -#include "cbs/cbs_h264.h" -#include "cbs/cbs_h265.h" -#include "cbs/cbs_sei.h" - -static void cbs_free_user_data_registered(void *opaque, uint8_t *data) -{ - SEIRawUserDataRegistered *udr = (SEIRawUserDataRegistered*)data; - av_buffer_unref(&udr->data_ref); - av_free(udr); -} - -static void cbs_free_user_data_unregistered(void *opaque, uint8_t *data) -{ - SEIRawUserDataUnregistered *udu = (SEIRawUserDataUnregistered*)data; - av_buffer_unref(&udu->data_ref); - av_free(udu); -} - -int ff_cbs_sei_alloc_message_payload(SEIRawMessage *message, - const SEIMessageTypeDescriptor *desc) -{ - void (*free_func)(void*, uint8_t*); - - av_assert0(message->payload == NULL && - message->payload_ref == NULL); - message->payload_type = desc->type; - - if (desc->type == SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35) - free_func = &cbs_free_user_data_registered; - else if (desc->type == SEI_TYPE_USER_DATA_UNREGISTERED) - free_func = &cbs_free_user_data_unregistered; - else - free_func = NULL; - - if (free_func) { - message->payload = av_mallocz(desc->size); - if (!message->payload) - return AVERROR(ENOMEM); - message->payload_ref = - av_buffer_create(message->payload, desc->size, - free_func, NULL, 0); - } else { - message->payload_ref = av_buffer_alloc(desc->size); - } - if (!message->payload_ref) { - av_freep(&message->payload); - return AVERROR(ENOMEM); - } - message->payload = message->payload_ref->data; - - return 0; -} - -int ff_cbs_sei_list_add(SEIRawMessageList *list) -{ - void *ptr; - int old_count = list->nb_messages_allocated; - - av_assert0(list->nb_messages <= old_count); - if (list->nb_messages + 1 > old_count) { - int new_count = 2 * old_count + 1; - - ptr = av_realloc_array(list->messages, - new_count, sizeof(*list->messages)); - if (!ptr) - return AVERROR(ENOMEM); - - list->messages = ptr; - list->nb_messages_allocated = new_count; - - // Zero the newly-added entries. - memset(list->messages + old_count, 0, - (new_count - old_count) * sizeof(*list->messages)); - } - ++list->nb_messages; - return 0; -} - -void ff_cbs_sei_free_message_list(SEIRawMessageList *list) -{ - for (int i = 0; i < list->nb_messages; i++) { - SEIRawMessage *message = &list->messages[i]; - av_buffer_unref(&message->payload_ref); - av_buffer_unref(&message->extension_data_ref); - } - av_free(list->messages); -} - -static int cbs_sei_get_unit(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - int prefix, - CodedBitstreamUnit **sei_unit) -{ - CodedBitstreamUnit *unit; - int sei_type, highest_vcl_type, err, i, position; - - switch (ctx->codec->codec_id) { - case AV_CODEC_ID_H264: - // (We can ignore auxiliary slices because we only have prefix - // SEI in H.264 and an auxiliary picture must always follow a - // primary picture.) - highest_vcl_type = H264_NAL_IDR_SLICE; - if (prefix) - sei_type = H264_NAL_SEI; - else - return AVERROR(EINVAL); - break; - case AV_CODEC_ID_H265: - highest_vcl_type = HEVC_NAL_RSV_VCL31; - if (prefix) - sei_type = HEVC_NAL_SEI_PREFIX; - else - sei_type = HEVC_NAL_SEI_SUFFIX; - break; - default: - return AVERROR(EINVAL); - } - - // Find an existing SEI NAL unit of the right type. - unit = NULL; - for (i = 0; i < au->nb_units; i++) { - if (au->units[i].type == sei_type) { - unit = &au->units[i]; - break; - } - } - - if (unit) { - *sei_unit = unit; - return 0; - } - - // Need to add a new SEI NAL unit ... - if (prefix) { - // ... before the first VCL NAL unit. - for (i = 0; i < au->nb_units; i++) { - if (au->units[i].type < highest_vcl_type) - break; - } - position = i; - } else { - // ... after the last VCL NAL unit. - for (i = au->nb_units - 1; i >= 0; i--) { - if (au->units[i].type < highest_vcl_type) - break; - } - if (i < 0) { - // No VCL units; just put it at the end. - position = au->nb_units; - } else { - position = i + 1; - } - } - - err = ff_cbs_insert_unit_content(au, position, sei_type, - NULL, NULL); - if (err < 0) - return err; - unit = &au->units[position]; - unit->type = sei_type; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if (err < 0) - return err; - - switch (ctx->codec->codec_id) { - case AV_CODEC_ID_H264: - { - H264RawSEI sei = { - .nal_unit_header = { - .nal_ref_idc = 0, - .nal_unit_type = sei_type, - }, - }; - memcpy(unit->content, &sei, sizeof(sei)); - } - break; - case AV_CODEC_ID_H265: - { - H265RawSEI sei = { - .nal_unit_header = { - .nal_unit_type = sei_type, - .nuh_layer_id = 0, - .nuh_temporal_id_plus1 = 1, - }, - }; - memcpy(unit->content, &sei, sizeof(sei)); - } - break; - default: - av_assert0(0); - } - - *sei_unit = unit; - return 0; -} - -static int cbs_sei_get_message_list(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - SEIRawMessageList **list) -{ - switch (ctx->codec->codec_id) { - case AV_CODEC_ID_H264: - { - H264RawSEI *sei = unit->content; - if (unit->type != H264_NAL_SEI) - return AVERROR(EINVAL); - *list = &sei->message_list; - } - break; - case AV_CODEC_ID_H265: - { - H265RawSEI *sei = unit->content; - if (unit->type != HEVC_NAL_SEI_PREFIX && - unit->type != HEVC_NAL_SEI_SUFFIX) - return AVERROR(EINVAL); - *list = &sei->message_list; - } - break; - default: - return AVERROR(EINVAL); - } - - return 0; -} - -int ff_cbs_sei_add_message(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - int prefix, - uint32_t payload_type, - void *payload_data, - AVBufferRef *payload_buf) -{ - const SEIMessageTypeDescriptor *desc; - CodedBitstreamUnit *unit; - SEIRawMessageList *list; - SEIRawMessage *message; - AVBufferRef *payload_ref; - int err; - - desc = ff_cbs_sei_find_type(ctx, payload_type); - if (!desc) - return AVERROR(EINVAL); - - // Find an existing SEI unit or make a new one to add to. - err = cbs_sei_get_unit(ctx, au, prefix, &unit); - if (err < 0) - return err; - - // Find the message list inside the codec-dependent unit. - err = cbs_sei_get_message_list(ctx, unit, &list); - if (err < 0) - return err; - - // Add a new message to the message list. - err = ff_cbs_sei_list_add(list); - if (err < 0) - return err; - - if (payload_buf) { - payload_ref = av_buffer_ref(payload_buf); - if (!payload_ref) - return AVERROR(ENOMEM); - } else { - payload_ref = NULL; - } - - message = &list->messages[list->nb_messages - 1]; - - message->payload_type = payload_type; - message->payload = payload_data; - message->payload_ref = payload_ref; - - return 0; -} - -int ff_cbs_sei_find_message(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - uint32_t payload_type, - SEIRawMessage **iter) -{ - int err, i, j, found; - - found = 0; - for (i = 0; i < au->nb_units; i++) { - CodedBitstreamUnit *unit = &au->units[i]; - SEIRawMessageList *list; - - err = cbs_sei_get_message_list(ctx, unit, &list); - if (err < 0) - continue; - - for (j = 0; j < list->nb_messages; j++) { - SEIRawMessage *message = &list->messages[j]; - - if (message->payload_type == payload_type) { - if (!*iter || found) { - *iter = message; - return 0; - } - if (message == *iter) - found = 1; - } - } - } - - return AVERROR(ENOENT); -} - -static void cbs_sei_delete_message(SEIRawMessageList *list, - int position) -{ - SEIRawMessage *message; - - av_assert0(0 <= position && position < list->nb_messages); - - message = &list->messages[position]; - av_buffer_unref(&message->payload_ref); - av_buffer_unref(&message->extension_data_ref); - - --list->nb_messages; - - if (list->nb_messages > 0) { - memmove(list->messages + position, - list->messages + position + 1, - (list->nb_messages - position) * sizeof(*list->messages)); - } -} - -void ff_cbs_sei_delete_message_type(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - uint32_t payload_type) -{ - int err, i, j; - - for (i = 0; i < au->nb_units; i++) { - CodedBitstreamUnit *unit = &au->units[i]; - SEIRawMessageList *list; - - err = cbs_sei_get_message_list(ctx, unit, &list); - if (err < 0) - continue; - - for (j = list->nb_messages - 1; j >= 0; j--) { - if (list->messages[j].payload_type == payload_type) - cbs_sei_delete_message(list, j); - } - } -} diff --git a/third-party/cbs/cbs_sei_syntax_template.c b/third-party/cbs/cbs_sei_syntax_template.c deleted file mode 100644 index 0ef7b42ed9c..00000000000 --- a/third-party/cbs/cbs_sei_syntax_template.c +++ /dev/null @@ -1,322 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -static int FUNC(filler_payload) - (CodedBitstreamContext *ctx, RWContext *rw, - SEIRawFillerPayload *current, SEIMessageState *state) -{ - int err, i; - - HEADER("Filler Payload"); - -#ifdef READ - current->payload_size = state->payload_size; -#endif - - for (i = 0; i < current->payload_size; i++) - fixed(8, ff_byte, 0xff); - - return 0; -} - -static int FUNC(user_data_registered) - (CodedBitstreamContext *ctx, RWContext *rw, - SEIRawUserDataRegistered *current, SEIMessageState *state) -{ - int err, i, j; - - HEADER("User Data Registered ITU-T T.35"); - - u(8, itu_t_t35_country_code, 0x00, 0xff); - if (current->itu_t_t35_country_code != 0xff) - i = 1; - else { - u(8, itu_t_t35_country_code_extension_byte, 0x00, 0xff); - i = 2; - } - -#ifdef READ - if (state->payload_size < i) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "Invalid SEI user data registered payload.\n"); - return AVERROR_INVALIDDATA; - } - current->data_length = state->payload_size - i; -#endif - - allocate(current->data, current->data_length); - for (j = 0; j < current->data_length; j++) - xu(8, itu_t_t35_payload_byte[], current->data[j], 0x00, 0xff, 1, i + j); - - return 0; -} - -static int FUNC(user_data_unregistered) - (CodedBitstreamContext *ctx, RWContext *rw, - SEIRawUserDataUnregistered *current, SEIMessageState *state) -{ - int err, i; - - HEADER("User Data Unregistered"); - -#ifdef READ - if (state->payload_size < 16) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "Invalid SEI user data unregistered payload.\n"); - return AVERROR_INVALIDDATA; - } - current->data_length = state->payload_size - 16; -#endif - - for (i = 0; i < 16; i++) - us(8, uuid_iso_iec_11578[i], 0x00, 0xff, 1, i); - - allocate(current->data, current->data_length); - - for (i = 0; i < current->data_length; i++) - xu(8, user_data_payload_byte[i], current->data[i], 0x00, 0xff, 1, i); - - return 0; -} - -static int FUNC(mastering_display_colour_volume) - (CodedBitstreamContext *ctx, RWContext *rw, - SEIRawMasteringDisplayColourVolume *current, SEIMessageState *state) -{ - int err, c; - - HEADER("Mastering Display Colour Volume"); - - for (c = 0; c < 3; c++) { - ubs(16, display_primaries_x[c], 1, c); - ubs(16, display_primaries_y[c], 1, c); - } - - ub(16, white_point_x); - ub(16, white_point_y); - - ub(32, max_display_mastering_luminance); - ub(32, min_display_mastering_luminance); - - return 0; -} - -static int FUNC(content_light_level_info) - (CodedBitstreamContext *ctx, RWContext *rw, - SEIRawContentLightLevelInfo *current, SEIMessageState *state) -{ - int err; - - HEADER("Content Light Level Information"); - - ub(16, max_content_light_level); - ub(16, max_pic_average_light_level); - - return 0; -} - -static int FUNC(alternative_transfer_characteristics) - (CodedBitstreamContext *ctx, RWContext *rw, - SEIRawAlternativeTransferCharacteristics *current, - SEIMessageState *state) -{ - int err; - - HEADER("Alternative Transfer Characteristics"); - - ub(8, preferred_transfer_characteristics); - - return 0; -} - -static int FUNC(message)(CodedBitstreamContext *ctx, RWContext *rw, - SEIRawMessage *current) -{ - const SEIMessageTypeDescriptor *desc; - int err, i; - - desc = ff_cbs_sei_find_type(ctx, current->payload_type); - if (desc) { - SEIMessageState state = { - .payload_type = current->payload_type, - .payload_size = current->payload_size, - .extension_present = current->extension_bit_length > 0, - }; - int start_position, current_position, bits_written; - -#ifdef READ - CHECK(ff_cbs_sei_alloc_message_payload(current, desc)); -#endif - - start_position = bit_position(rw); - - CHECK(desc->READWRITE(ctx, rw, current->payload, &state)); - - current_position = bit_position(rw); - bits_written = current_position - start_position; - - if (byte_alignment(rw) || state.extension_present || - bits_written < 8 * current->payload_size) { - size_t bits_left; - -#ifdef READ - GetBitContext tmp = *rw; - int trailing_bits, trailing_zero_bits; - - bits_left = 8 * current->payload_size - bits_written; - if (bits_left > 8) - skip_bits_long(&tmp, bits_left - 8); - trailing_bits = get_bits(&tmp, FFMIN(bits_left, 8)); - if (trailing_bits == 0) { - // The trailing bits must contain a bit_equal_to_one, so - // they can't all be zero. - return AVERROR_INVALIDDATA; - } - trailing_zero_bits = ff_ctz(trailing_bits); - current->extension_bit_length = - bits_left - 1 - trailing_zero_bits; -#endif - - if (current->extension_bit_length > 0) { - allocate(current->extension_data, - (current->extension_bit_length + 7) / 8); - - bits_left = current->extension_bit_length; - for (i = 0; bits_left > 0; i++) { - int length = FFMIN(bits_left, 8); - xu(length, reserved_payload_extension_data, - current->extension_data[i], - 0, MAX_UINT_BITS(length), 0); - bits_left -= length; - } - } - - fixed(1, bit_equal_to_one, 1); - while (byte_alignment(rw)) - fixed(1, bit_equal_to_zero, 0); - } - -#ifdef WRITE - current->payload_size = (put_bits_count(rw) - start_position) / 8; -#endif - } else { - uint8_t *data; - - allocate(current->payload, current->payload_size); - data = current->payload; - - for (i = 0; i < current->payload_size; i++) - xu(8, payload_byte[i], data[i], 0, 255, 1, i); - } - - return 0; -} - -static int FUNC(message_list)(CodedBitstreamContext *ctx, RWContext *rw, - SEIRawMessageList *current, int prefix) -{ - SEIRawMessage *message; - int err, k; - -#ifdef READ - for (k = 0;; k++) { - uint32_t payload_type = 0; - uint32_t payload_size = 0; - uint32_t tmp; - GetBitContext payload_gbc; - - while (show_bits(rw, 8) == 0xff) { - fixed(8, ff_byte, 0xff); - payload_type += 255; - } - xu(8, last_payload_type_byte, tmp, 0, 254, 0); - payload_type += tmp; - - while (show_bits(rw, 8) == 0xff) { - fixed(8, ff_byte, 0xff); - payload_size += 255; - } - xu(8, last_payload_size_byte, tmp, 0, 254, 0); - payload_size += tmp; - - // There must be space remaining for both the payload and - // the trailing bits on the SEI NAL unit. - if (payload_size + 1 > get_bits_left(rw) / 8) { - av_log(ctx->log_ctx, AV_LOG_ERROR, - "Invalid SEI message: payload_size too large " - "(%"PRIu32" bytes).\n", payload_size); - return AVERROR_INVALIDDATA; - } - CHECK(init_get_bits(&payload_gbc, rw->buffer, - get_bits_count(rw) + 8 * payload_size)); - skip_bits_long(&payload_gbc, get_bits_count(rw)); - - CHECK(ff_cbs_sei_list_add(current)); - message = ¤t->messages[k]; - - message->payload_type = payload_type; - message->payload_size = payload_size; - - CHECK(FUNC(message)(ctx, &payload_gbc, message)); - - skip_bits_long(rw, 8 * payload_size); - - if (!cbs_h2645_read_more_rbsp_data(rw)) - break; - } -#else - for (k = 0; k < current->nb_messages; k++) { - PutBitContext start_state; - uint32_t tmp; - int trace, i; - - message = ¤t->messages[k]; - - // We write the payload twice in order to find the size. Trace - // output is switched off for the first write. - trace = ctx->trace_enable; - ctx->trace_enable = 0; - - start_state = *rw; - for (i = 0; i < 2; i++) { - *rw = start_state; - - tmp = message->payload_type; - while (tmp >= 255) { - fixed(8, ff_byte, 0xff); - tmp -= 255; - } - xu(8, last_payload_type_byte, tmp, 0, 254, 0); - - tmp = message->payload_size; - while (tmp >= 255) { - fixed(8, ff_byte, 0xff); - tmp -= 255; - } - xu(8, last_payload_size_byte, tmp, 0, 254, 0); - - err = FUNC(message)(ctx, rw, message); - ctx->trace_enable = trace; - if (err < 0) - return err; - } - } -#endif - - return 0; -} diff --git a/third-party/cbs/cbs_vp9.c b/third-party/cbs/cbs_vp9.c deleted file mode 100644 index 77f7aad5b80..00000000000 --- a/third-party/cbs/cbs_vp9.c +++ /dev/null @@ -1,662 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#include "libavutil/avassert.h" - -#include "cbs/cbs.h" -#include "cbs_internal.h" -#include "cbs/cbs_vp9.h" - - -static int cbs_vp9_read_s(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, int32_t *write_to) -{ - uint32_t magnitude; - int position, sign; - int32_t value; - - if (ctx->trace_enable) - position = get_bits_count(gbc); - - if (get_bits_left(gbc) < width + 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid signed value at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - - magnitude = get_bits(gbc, width); - sign = get_bits1(gbc); - value = sign ? -(int32_t)magnitude : magnitude; - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (i = 0; i < width; i++) - bits[i] = magnitude >> (width - i - 1) & 1 ? '1' : '0'; - bits[i] = sign ? '1' : '0'; - bits[i + 1] = 0; - - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - } - - *write_to = value; - return 0; -} - -static int cbs_vp9_write_s(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, int32_t value) -{ - uint32_t magnitude; - int sign; - - if (put_bits_left(pbc) < width + 1) - return AVERROR(ENOSPC); - - sign = value < 0; - magnitude = sign ? -value : value; - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (i = 0; i < width; i++) - bits[i] = magnitude >> (width - i - 1) & 1 ? '1' : '0'; - bits[i] = sign ? '1' : '0'; - bits[i + 1] = 0; - - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } - - put_bits(pbc, width, magnitude); - put_bits(pbc, 1, sign); - - return 0; -} - -static int cbs_vp9_read_increment(CodedBitstreamContext *ctx, GetBitContext *gbc, - uint32_t range_min, uint32_t range_max, - const char *name, uint32_t *write_to) -{ - uint32_t value; - int position, i; - char bits[8]; - - av_assert0(range_min <= range_max && range_max - range_min < sizeof(bits) - 1); - if (ctx->trace_enable) - position = get_bits_count(gbc); - - for (i = 0, value = range_min; value < range_max;) { - if (get_bits_left(gbc) < 1) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid increment value at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - if (get_bits1(gbc)) { - bits[i++] = '1'; - ++value; - } else { - bits[i++] = '0'; - break; - } - } - - if (ctx->trace_enable) { - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, position, name, NULL, bits, value); - } - - *write_to = value; - return 0; -} - -static int cbs_vp9_write_increment(CodedBitstreamContext *ctx, PutBitContext *pbc, - uint32_t range_min, uint32_t range_max, - const char *name, uint32_t value) -{ - int len; - - av_assert0(range_min <= range_max && range_max - range_min < 8); - if (value < range_min || value > range_max) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: " - "%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n", - name, value, range_min, range_max); - return AVERROR_INVALIDDATA; - } - - if (value == range_max) - len = range_max - range_min; - else - len = value - range_min + 1; - if (put_bits_left(pbc) < len) - return AVERROR(ENOSPC); - - if (ctx->trace_enable) { - char bits[8]; - int i; - for (i = 0; i < len; i++) { - if (range_min + i == value) - bits[i] = '0'; - else - bits[i] = '1'; - } - bits[i] = 0; - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, NULL, bits, value); - } - - if (len > 0) - put_bits(pbc, len, (1 << len) - 1 - (value != range_max)); - - return 0; -} - -static int cbs_vp9_read_le(CodedBitstreamContext *ctx, GetBitContext *gbc, - int width, const char *name, - const int *subscripts, uint32_t *write_to) -{ - uint32_t value; - int position, b; - - av_assert0(width % 8 == 0); - - if (ctx->trace_enable) - position = get_bits_count(gbc); - - if (get_bits_left(gbc) < width) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid le value at " - "%s: bitstream ended.\n", name); - return AVERROR_INVALIDDATA; - } - - value = 0; - for (b = 0; b < width; b += 8) - value |= get_bits(gbc, 8) << b; - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (b = 0; b < width; b += 8) - for (i = 0; i < 8; i++) - bits[b + i] = value >> (b + i) & 1 ? '1' : '0'; - bits[b] = 0; - - ff_cbs_trace_syntax_element(ctx, position, name, subscripts, - bits, value); - } - - *write_to = value; - return 0; -} - -static int cbs_vp9_write_le(CodedBitstreamContext *ctx, PutBitContext *pbc, - int width, const char *name, - const int *subscripts, uint32_t value) -{ - int b; - - av_assert0(width % 8 == 0); - - if (put_bits_left(pbc) < width) - return AVERROR(ENOSPC); - - if (ctx->trace_enable) { - char bits[33]; - int i; - for (b = 0; b < width; b += 8) - for (i = 0; i < 8; i++) - bits[b + i] = value >> (b + i) & 1 ? '1' : '0'; - bits[b] = 0; - - ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), - name, subscripts, bits, value); - } - - for (b = 0; b < width; b += 8) - put_bits(pbc, 8, value >> b & 0xff); - - return 0; -} - -#define HEADER(name) do { \ - ff_cbs_trace_header(ctx, name); \ - } while (0) - -#define CHECK(call) do { \ - err = (call); \ - if (err < 0) \ - return err; \ - } while (0) - -#define FUNC_NAME(rw, codec, name) cbs_ ## codec ## _ ## rw ## _ ## name -#define FUNC_VP9(rw, name) FUNC_NAME(rw, vp9, name) -#define FUNC(name) FUNC_VP9(READWRITE, name) - -#define SUBSCRIPTS(subs, ...) (subs > 0 ? ((int[subs + 1]){ subs, __VA_ARGS__ }) : NULL) - -#define f(width, name) \ - xf(width, name, current->name, 0, ) -#define s(width, name) \ - xs(width, name, current->name, 0, ) -#define fs(width, name, subs, ...) \ - xf(width, name, current->name, subs, __VA_ARGS__) -#define ss(width, name, subs, ...) \ - xs(width, name, current->name, subs, __VA_ARGS__) - -#define READ -#define READWRITE read -#define RWContext GetBitContext - -#define xf(width, name, var, subs, ...) do { \ - uint32_t value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - &value, 0, (1 << width) - 1)); \ - var = value; \ - } while (0) -#define xs(width, name, var, subs, ...) do { \ - int32_t value; \ - CHECK(cbs_vp9_read_s(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), &value)); \ - var = value; \ - } while (0) - - -#define increment(name, min, max) do { \ - uint32_t value; \ - CHECK(cbs_vp9_read_increment(ctx, rw, min, max, #name, &value)); \ - current->name = value; \ - } while (0) - -#define fle(width, name, subs, ...) do { \ - CHECK(cbs_vp9_read_le(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), ¤t->name)); \ - } while (0) - -#define delta_q(name) do { \ - uint8_t delta_coded; \ - int8_t delta_q; \ - xf(1, name.delta_coded, delta_coded, 0, ); \ - if (delta_coded) \ - xs(4, name.delta_q, delta_q, 0, ); \ - else \ - delta_q = 0; \ - current->name = delta_q; \ - } while (0) - -#define prob(name, subs, ...) do { \ - uint8_t prob_coded; \ - uint8_t prob; \ - xf(1, name.prob_coded, prob_coded, subs, __VA_ARGS__); \ - if (prob_coded) \ - xf(8, name.prob, prob, subs, __VA_ARGS__); \ - else \ - prob = 255; \ - current->name = prob; \ - } while (0) - -#define fixed(width, name, value) do { \ - av_unused uint32_t fixed_value; \ - CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \ - 0, &fixed_value, value, value)); \ - } while (0) - -#define infer(name, value) do { \ - current->name = value; \ - } while (0) - -#define byte_alignment(rw) (get_bits_count(rw) % 8) - -#include "cbs_vp9_syntax_template.c" - -#undef READ -#undef READWRITE -#undef RWContext -#undef xf -#undef xs -#undef increment -#undef fle -#undef delta_q -#undef prob -#undef fixed -#undef infer -#undef byte_alignment - - -#define WRITE -#define READWRITE write -#define RWContext PutBitContext - -#define xf(width, name, var, subs, ...) do { \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), \ - var, 0, (1 << width) - 1)); \ - } while (0) -#define xs(width, name, var, subs, ...) do { \ - CHECK(cbs_vp9_write_s(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), var)); \ - } while (0) - -#define increment(name, min, max) do { \ - CHECK(cbs_vp9_write_increment(ctx, rw, min, max, #name, current->name)); \ - } while (0) - -#define fle(width, name, subs, ...) do { \ - CHECK(cbs_vp9_write_le(ctx, rw, width, #name, \ - SUBSCRIPTS(subs, __VA_ARGS__), current->name)); \ - } while (0) - -#define delta_q(name) do { \ - xf(1, name.delta_coded, !!current->name, 0, ); \ - if (current->name) \ - xs(4, name.delta_q, current->name, 0, ); \ - } while (0) - -#define prob(name, subs, ...) do { \ - xf(1, name.prob_coded, current->name != 255, subs, __VA_ARGS__); \ - if (current->name != 255) \ - xf(8, name.prob, current->name, subs, __VA_ARGS__); \ - } while (0) - -#define fixed(width, name, value) do { \ - CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \ - 0, value, value, value)); \ - } while (0) - -#define infer(name, value) do { \ - if (current->name != (value)) { \ - av_log(ctx->log_ctx, AV_LOG_WARNING, "Warning: " \ - "%s does not match inferred value: " \ - "%"PRId64", but should be %"PRId64".\n", \ - #name, (int64_t)current->name, (int64_t)(value)); \ - } \ - } while (0) - -#define byte_alignment(rw) (put_bits_count(rw) % 8) - -#include "cbs_vp9_syntax_template.c" - -#undef WRITE -#undef READWRITE -#undef RWContext -#undef xf -#undef xs -#undef increment -#undef fle -#undef delta_q -#undef prob -#undef fixed -#undef infer -#undef byte_alignment - - -static int cbs_vp9_split_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - int header) -{ - uint8_t superframe_header; - int err; - - if (frag->data_size == 0) - return AVERROR_INVALIDDATA; - - // Last byte in the packet. - superframe_header = frag->data[frag->data_size - 1]; - - if ((superframe_header & 0xe0) == 0xc0) { - VP9RawSuperframeIndex sfi; - GetBitContext gbc; - size_t index_size, pos; - int i; - - index_size = 2 + (((superframe_header & 0x18) >> 3) + 1) * - ((superframe_header & 0x07) + 1); - - if (index_size > frag->data_size) - return AVERROR_INVALIDDATA; - - err = init_get_bits(&gbc, frag->data + frag->data_size - index_size, - 8 * index_size); - if (err < 0) - return err; - - err = cbs_vp9_read_superframe_index(ctx, &gbc, &sfi); - if (err < 0) - return err; - - pos = 0; - for (i = 0; i <= sfi.frames_in_superframe_minus_1; i++) { - if (pos + sfi.frame_sizes[i] + index_size > frag->data_size) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Frame %d too large " - "in superframe: %"PRIu32" bytes.\n", - i, sfi.frame_sizes[i]); - return AVERROR_INVALIDDATA; - } - - err = ff_cbs_append_unit_data(frag, 0, - frag->data + pos, - sfi.frame_sizes[i], - frag->data_ref); - if (err < 0) - return err; - - pos += sfi.frame_sizes[i]; - } - if (pos + index_size != frag->data_size) { - av_log(ctx->log_ctx, AV_LOG_WARNING, "Extra padding at " - "end of superframe: %zu bytes.\n", - frag->data_size - (pos + index_size)); - } - - return 0; - - } else { - err = ff_cbs_append_unit_data(frag, 0, - frag->data, frag->data_size, - frag->data_ref); - if (err < 0) - return err; - } - - return 0; -} - -static int cbs_vp9_read_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit) -{ - VP9RawFrame *frame; - GetBitContext gbc; - int err, pos; - - err = init_get_bits(&gbc, unit->data, 8 * unit->data_size); - if (err < 0) - return err; - - err = ff_cbs_alloc_unit_content2(ctx, unit); - if (err < 0) - return err; - frame = unit->content; - - err = cbs_vp9_read_frame(ctx, &gbc, frame); - if (err < 0) - return err; - - pos = get_bits_count(&gbc); - av_assert0(pos % 8 == 0); - pos /= 8; - av_assert0(pos <= unit->data_size); - - if (pos == unit->data_size) { - // No data (e.g. a show-existing-frame frame). - } else { - frame->data_ref = av_buffer_ref(unit->data_ref); - if (!frame->data_ref) - return AVERROR(ENOMEM); - - frame->data = unit->data + pos; - frame->data_size = unit->data_size - pos; - } - - return 0; -} - -static int cbs_vp9_write_unit(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit, - PutBitContext *pbc) -{ - VP9RawFrame *frame = unit->content; - int err; - - err = cbs_vp9_write_frame(ctx, pbc, frame); - if (err < 0) - return err; - - // Frame must be byte-aligned. - av_assert0(put_bits_count(pbc) % 8 == 0); - - if (frame->data) { - if (frame->data_size > put_bits_left(pbc) / 8) - return AVERROR(ENOSPC); - - flush_put_bits(pbc); - memcpy(put_bits_ptr(pbc), frame->data, frame->data_size); - skip_put_bytes(pbc, frame->data_size); - } - - return 0; -} - -static int cbs_vp9_assemble_fragment(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag) -{ - int err; - - if (frag->nb_units == 1) { - // Output is just the content of the single frame. - - CodedBitstreamUnit *frame = &frag->units[0]; - - frag->data_ref = av_buffer_ref(frame->data_ref); - if (!frag->data_ref) - return AVERROR(ENOMEM); - - frag->data = frame->data; - frag->data_size = frame->data_size; - - } else { - // Build superframe out of frames. - - VP9RawSuperframeIndex sfi; - PutBitContext pbc; - AVBufferRef *ref; - uint8_t *data; - size_t size, max, pos; - int i, size_len; - - if (frag->nb_units > 8) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many frames to " - "make superframe: %d.\n", frag->nb_units); - return AVERROR(EINVAL); - } - - max = 0; - for (i = 0; i < frag->nb_units; i++) - if (max < frag->units[i].data_size) - max = frag->units[i].data_size; - - if (max < 2) - size_len = 1; - else - size_len = av_log2(max) / 8 + 1; - av_assert0(size_len <= 4); - - sfi.superframe_marker = VP9_SUPERFRAME_MARKER; - sfi.bytes_per_framesize_minus_1 = size_len - 1; - sfi.frames_in_superframe_minus_1 = frag->nb_units - 1; - - size = 2; - for (i = 0; i < frag->nb_units; i++) { - size += size_len + frag->units[i].data_size; - sfi.frame_sizes[i] = frag->units[i].data_size; - } - - ref = av_buffer_alloc(size + AV_INPUT_BUFFER_PADDING_SIZE); - if (!ref) - return AVERROR(ENOMEM); - data = ref->data; - memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - - pos = 0; - for (i = 0; i < frag->nb_units; i++) { - av_assert0(size - pos > frag->units[i].data_size); - memcpy(data + pos, frag->units[i].data, - frag->units[i].data_size); - pos += frag->units[i].data_size; - } - av_assert0(size - pos == 2 + frag->nb_units * size_len); - - init_put_bits(&pbc, data + pos, size - pos); - - err = cbs_vp9_write_superframe_index(ctx, &pbc, &sfi); - if (err < 0) { - av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to write " - "superframe index.\n"); - av_buffer_unref(&ref); - return err; - } - - av_assert0(put_bits_left(&pbc) == 0); - flush_put_bits(&pbc); - - frag->data_ref = ref; - frag->data = data; - frag->data_size = size; - } - - return 0; -} - -static void cbs_vp9_flush(CodedBitstreamContext *ctx) -{ - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - - memset(vp9->ref, 0, sizeof(vp9->ref)); -} - -static const CodedBitstreamUnitTypeDescriptor cbs_vp9_unit_types[] = { - CBS_UNIT_TYPE_INTERNAL_REF(0, VP9RawFrame, data), - CBS_UNIT_TYPE_END_OF_LIST -}; - -const CodedBitstreamType ff_cbs_type_vp9 = { - .codec_id = AV_CODEC_ID_VP9, - - .priv_data_size = sizeof(CodedBitstreamVP9Context), - - .unit_types = cbs_vp9_unit_types, - - .split_fragment = &cbs_vp9_split_fragment, - .read_unit = &cbs_vp9_read_unit, - .write_unit = &cbs_vp9_write_unit, - - .flush = &cbs_vp9_flush, - - .assemble_fragment = &cbs_vp9_assemble_fragment, -}; diff --git a/third-party/cbs/cbs_vp9_syntax_template.c b/third-party/cbs/cbs_vp9_syntax_template.c deleted file mode 100644 index 2f08eccf180..00000000000 --- a/third-party/cbs/cbs_vp9_syntax_template.c +++ /dev/null @@ -1,429 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -static int FUNC(frame_sync_code)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) -{ - int err; - - fixed(8, frame_sync_byte_0, VP9_FRAME_SYNC_0); - fixed(8, frame_sync_byte_1, VP9_FRAME_SYNC_1); - fixed(8, frame_sync_byte_2, VP9_FRAME_SYNC_2); - - return 0; -} - -static int FUNC(color_config)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current, int profile) -{ - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - int err; - - if (profile >= 2) { - f(1, ten_or_twelve_bit); - vp9->bit_depth = current->ten_or_twelve_bit ? 12 : 10; - } else - vp9->bit_depth = 8; - - f(3, color_space); - - if (current->color_space != VP9_CS_RGB) { - f(1, color_range); - if (profile == 1 || profile == 3) { - f(1, subsampling_x); - f(1, subsampling_y); - fixed(1, reserved_zero, 0); - } else { - infer(subsampling_x, 1); - infer(subsampling_y, 1); - } - } else { - infer(color_range, 1); - if (profile == 1 || profile == 3) { - infer(subsampling_x, 0); - infer(subsampling_y, 0); - fixed(1, reserved_zero, 0); - } - } - - vp9->subsampling_x = current->subsampling_x; - vp9->subsampling_y = current->subsampling_y; - - return 0; -} - -static int FUNC(frame_size)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) -{ - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - int err; - - f(16, frame_width_minus_1); - f(16, frame_height_minus_1); - - vp9->frame_width = current->frame_width_minus_1 + 1; - vp9->frame_height = current->frame_height_minus_1 + 1; - - vp9->mi_cols = (vp9->frame_width + 7) >> 3; - vp9->mi_rows = (vp9->frame_height + 7) >> 3; - vp9->sb64_cols = (vp9->mi_cols + 7) >> 3; - vp9->sb64_rows = (vp9->mi_rows + 7) >> 3; - - return 0; -} - -static int FUNC(render_size)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) -{ - int err; - - f(1, render_and_frame_size_different); - - if (current->render_and_frame_size_different) { - f(16, render_width_minus_1); - f(16, render_height_minus_1); - } - - return 0; -} - -static int FUNC(frame_size_with_refs)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) -{ - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - int err, i; - - for (i = 0; i < VP9_REFS_PER_FRAME; i++) { - fs(1, found_ref[i], 1, i); - if (current->found_ref[i]) { - VP9ReferenceFrameState *ref = - &vp9->ref[current->ref_frame_idx[i]]; - - vp9->frame_width = ref->frame_width; - vp9->frame_height = ref->frame_height; - - vp9->subsampling_x = ref->subsampling_x; - vp9->subsampling_y = ref->subsampling_y; - vp9->bit_depth = ref->bit_depth; - - break; - } - } - if (i >= VP9_REFS_PER_FRAME) - CHECK(FUNC(frame_size)(ctx, rw, current)); - else { - vp9->mi_cols = (vp9->frame_width + 7) >> 3; - vp9->mi_rows = (vp9->frame_height + 7) >> 3; - vp9->sb64_cols = (vp9->mi_cols + 7) >> 3; - vp9->sb64_rows = (vp9->mi_rows + 7) >> 3; - } - CHECK(FUNC(render_size)(ctx, rw, current)); - - return 0; -} - -static int FUNC(interpolation_filter)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) -{ - int err; - - f(1, is_filter_switchable); - if (!current->is_filter_switchable) - f(2, raw_interpolation_filter_type); - - return 0; -} - -static int FUNC(loop_filter_params)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) -{ - int err, i; - - f(6, loop_filter_level); - f(3, loop_filter_sharpness); - - f(1, loop_filter_delta_enabled); - if (current->loop_filter_delta_enabled) { - f(1, loop_filter_delta_update); - if (current->loop_filter_delta_update) { - for (i = 0; i < VP9_MAX_REF_FRAMES; i++) { - fs(1, update_ref_delta[i], 1, i); - if (current->update_ref_delta[i]) - ss(6, loop_filter_ref_deltas[i], 1, i); - } - for (i = 0; i < 2; i++) { - fs(1, update_mode_delta[i], 1, i); - if (current->update_mode_delta[i]) - ss(6, loop_filter_mode_deltas[i], 1, i); - } - } - } - - return 0; -} - -static int FUNC(quantization_params)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) -{ - int err; - - f(8, base_q_idx); - - delta_q(delta_q_y_dc); - delta_q(delta_q_uv_dc); - delta_q(delta_q_uv_ac); - - return 0; -} - -static int FUNC(segmentation_params)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) -{ - static const uint8_t segmentation_feature_bits[VP9_SEG_LVL_MAX] = { 8, 6, 2, 0 }; - static const uint8_t segmentation_feature_signed[VP9_SEG_LVL_MAX] = { 1, 1, 0, 0 }; - - int err, i, j; - - f(1, segmentation_enabled); - - if (current->segmentation_enabled) { - f(1, segmentation_update_map); - if (current->segmentation_update_map) { - for (i = 0; i < 7; i++) - prob(segmentation_tree_probs[i], 1, i); - f(1, segmentation_temporal_update); - for (i = 0; i < 3; i++) { - if (current->segmentation_temporal_update) - prob(segmentation_pred_prob[i], 1, i); - else - infer(segmentation_pred_prob[i], 255); - } - } - - f(1, segmentation_update_data); - if (current->segmentation_update_data) { - f(1, segmentation_abs_or_delta_update); - for (i = 0; i < VP9_MAX_SEGMENTS; i++) { - for (j = 0; j < VP9_SEG_LVL_MAX; j++) { - fs(1, feature_enabled[i][j], 2, i, j); - if (current->feature_enabled[i][j] && - segmentation_feature_bits[j]) { - fs(segmentation_feature_bits[j], - feature_value[i][j], 2, i, j); - if (segmentation_feature_signed[j]) - fs(1, feature_sign[i][j], 2, i, j); - else - infer(feature_sign[i][j], 0); - } else { - infer(feature_value[i][j], 0); - infer(feature_sign[i][j], 0); - } - } - } - } - } - - return 0; -} - -static int FUNC(tile_info)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) -{ - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - int min_log2_tile_cols, max_log2_tile_cols; - int err; - - min_log2_tile_cols = 0; - while ((VP9_MAX_TILE_WIDTH_B64 << min_log2_tile_cols) < vp9->sb64_cols) - ++min_log2_tile_cols; - max_log2_tile_cols = 0; - while ((vp9->sb64_cols >> (max_log2_tile_cols + 1)) >= VP9_MIN_TILE_WIDTH_B64) - ++max_log2_tile_cols; - - increment(tile_cols_log2, min_log2_tile_cols, max_log2_tile_cols); - - increment(tile_rows_log2, 0, 2); - - return 0; -} - -static int FUNC(uncompressed_header)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrameHeader *current) -{ - CodedBitstreamVP9Context *vp9 = ctx->priv_data; - int err, i; - - f(2, frame_marker); - - f(1, profile_low_bit); - f(1, profile_high_bit); - vp9->profile = (current->profile_high_bit << 1) + current->profile_low_bit; - if (vp9->profile == 3) - fixed(1, reserved_zero, 0); - - f(1, show_existing_frame); - if (current->show_existing_frame) { - f(3, frame_to_show_map_idx); - infer(header_size_in_bytes, 0); - infer(refresh_frame_flags, 0x00); - infer(loop_filter_level, 0); - return 0; - } - - f(1, frame_type); - f(1, show_frame); - f(1, error_resilient_mode); - - if (current->frame_type == VP9_KEY_FRAME) { - CHECK(FUNC(frame_sync_code)(ctx, rw, current)); - CHECK(FUNC(color_config)(ctx, rw, current, vp9->profile)); - CHECK(FUNC(frame_size)(ctx, rw, current)); - CHECK(FUNC(render_size)(ctx, rw, current)); - - infer(refresh_frame_flags, 0xff); - - } else { - if (current->show_frame == 0) - f(1, intra_only); - else - infer(intra_only, 0); - - if (current->error_resilient_mode == 0) - f(2, reset_frame_context); - else - infer(reset_frame_context, 0); - - if (current->intra_only == 1) { - CHECK(FUNC(frame_sync_code)(ctx, rw, current)); - - if (vp9->profile > 0) { - CHECK(FUNC(color_config)(ctx, rw, current, vp9->profile)); - } else { - infer(color_space, 1); - infer(subsampling_x, 1); - infer(subsampling_y, 1); - vp9->bit_depth = 8; - - vp9->subsampling_x = current->subsampling_x; - vp9->subsampling_y = current->subsampling_y; - } - - f(8, refresh_frame_flags); - - CHECK(FUNC(frame_size)(ctx, rw, current)); - CHECK(FUNC(render_size)(ctx, rw, current)); - } else { - f(8, refresh_frame_flags); - - for (i = 0; i < VP9_REFS_PER_FRAME; i++) { - fs(3, ref_frame_idx[i], 1, i); - fs(1, ref_frame_sign_bias[VP9_LAST_FRAME + i], - 1, VP9_LAST_FRAME + i); - } - - CHECK(FUNC(frame_size_with_refs)(ctx, rw, current)); - f(1, allow_high_precision_mv); - CHECK(FUNC(interpolation_filter)(ctx, rw, current)); - } - } - - if (current->error_resilient_mode == 0) { - f(1, refresh_frame_context); - f(1, frame_parallel_decoding_mode); - } else { - infer(refresh_frame_context, 0); - infer(frame_parallel_decoding_mode, 1); - } - - f(2, frame_context_idx); - - CHECK(FUNC(loop_filter_params)(ctx, rw, current)); - CHECK(FUNC(quantization_params)(ctx, rw, current)); - CHECK(FUNC(segmentation_params)(ctx, rw, current)); - CHECK(FUNC(tile_info)(ctx, rw, current)); - - f(16, header_size_in_bytes); - - for (i = 0; i < VP9_NUM_REF_FRAMES; i++) { - if (current->refresh_frame_flags & (1 << i)) { - vp9->ref[i] = (VP9ReferenceFrameState) { - .frame_width = vp9->frame_width, - .frame_height = vp9->frame_height, - .subsampling_x = vp9->subsampling_x, - .subsampling_y = vp9->subsampling_y, - .bit_depth = vp9->bit_depth, - }; - } - } - - av_log(ctx->log_ctx, AV_LOG_DEBUG, "Frame: size %dx%d " - "subsample %dx%d bit_depth %d tiles %dx%d.\n", - vp9->frame_width, vp9->frame_height, - vp9->subsampling_x, vp9->subsampling_y, - vp9->bit_depth, 1 << current->tile_cols_log2, - 1 << current->tile_rows_log2); - - return 0; -} - -static int FUNC(trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw) -{ - int err; - while (byte_alignment(rw) != 0) - fixed(1, zero_bit, 0); - - return 0; -} - -static int FUNC(frame)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawFrame *current) -{ - int err; - - HEADER("Frame"); - - CHECK(FUNC(uncompressed_header)(ctx, rw, ¤t->header)); - - CHECK(FUNC(trailing_bits)(ctx, rw)); - - return 0; -} - -static int FUNC(superframe_index)(CodedBitstreamContext *ctx, RWContext *rw, - VP9RawSuperframeIndex *current) -{ - int err, i; - - HEADER("Superframe Index"); - - f(3, superframe_marker); - f(2, bytes_per_framesize_minus_1); - f(3, frames_in_superframe_minus_1); - - for (i = 0; i <= current->frames_in_superframe_minus_1; i++) { - // Surprise little-endian! - fle(8 * (current->bytes_per_framesize_minus_1 + 1), - frame_sizes[i], 1, i); - } - - f(3, superframe_marker); - f(2, bytes_per_framesize_minus_1); - f(3, frames_in_superframe_minus_1); - - return 0; -} diff --git a/third-party/cbs/codec.h b/third-party/cbs/codec.h deleted file mode 100644 index 03e8be90a2f..00000000000 --- a/third-party/cbs/codec.h +++ /dev/null @@ -1,387 +0,0 @@ -/* - * AVCodec public API - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CODEC_H -#define AVCODEC_CODEC_H - -#include - -#include "libavutil/avutil.h" -#include "libavutil/hwcontext.h" -#include "libavutil/log.h" -#include "libavutil/pixfmt.h" -#include "libavutil/rational.h" -#include "libavutil/samplefmt.h" - -#include "libavcodec/codec_id.h" -#include "libavcodec/version_major.h" - -/** - * @addtogroup lavc_core - * @{ - */ - -/** - * Decoder can use draw_horiz_band callback. - */ -#define AV_CODEC_CAP_DRAW_HORIZ_BAND (1 << 0) -/** - * Codec uses get_buffer() or get_encode_buffer() for allocating buffers and - * supports custom allocators. - * If not set, it might not use get_buffer() or get_encode_buffer() at all, or - * use operations that assume the buffer was allocated by - * avcodec_default_get_buffer2 or avcodec_default_get_encode_buffer. - */ -#define AV_CODEC_CAP_DR1 (1 << 1) -#if FF_API_FLAG_TRUNCATED -/** - * @deprecated Use parsers to always send proper frames. - */ -#define AV_CODEC_CAP_TRUNCATED (1 << 3) -#endif -/** - * Encoder or decoder requires flushing with NULL input at the end in order to - * give the complete and correct output. - * - * NOTE: If this flag is not set, the codec is guaranteed to never be fed with - * with NULL data. The user can still send NULL data to the public encode - * or decode function, but libavcodec will not pass it along to the codec - * unless this flag is set. - * - * Decoders: - * The decoder has a non-zero delay and needs to be fed with avpkt->data=NULL, - * avpkt->size=0 at the end to get the delayed data until the decoder no longer - * returns frames. - * - * Encoders: - * The encoder needs to be fed with NULL data at the end of encoding until the - * encoder no longer returns data. - * - * NOTE: For encoders implementing the AVCodec.encode2() function, setting this - * flag also means that the encoder must set the pts and duration for - * each output packet. If this flag is not set, the pts and duration will - * be determined by libavcodec from the input frame. - */ -#define AV_CODEC_CAP_DELAY (1 << 5) -/** - * Codec can be fed a final frame with a smaller size. - * This can be used to prevent truncation of the last audio samples. - */ -#define AV_CODEC_CAP_SMALL_LAST_FRAME (1 << 6) - -/** - * Codec can output multiple frames per AVPacket - * Normally demuxers return one frame at a time, demuxers which do not do - * are connected to a parser to split what they return into proper frames. - * This flag is reserved to the very rare category of codecs which have a - * bitstream that cannot be split into frames without timeconsuming - * operations like full decoding. Demuxers carrying such bitstreams thus - * may return multiple frames in a packet. This has many disadvantages like - * prohibiting stream copy in many cases thus it should only be considered - * as a last resort. - */ -#define AV_CODEC_CAP_SUBFRAMES (1 << 8) -/** - * Codec is experimental and is thus avoided in favor of non experimental - * encoders - */ -#define AV_CODEC_CAP_EXPERIMENTAL (1 << 9) -/** - * Codec should fill in channel configuration and samplerate instead of container - */ -#define AV_CODEC_CAP_CHANNEL_CONF (1 << 10) -/** - * Codec supports frame-level multithreading. - */ -#define AV_CODEC_CAP_FRAME_THREADS (1 << 12) -/** - * Codec supports slice-based (or partition-based) multithreading. - */ -#define AV_CODEC_CAP_SLICE_THREADS (1 << 13) -/** - * Codec supports changed parameters at any point. - */ -#define AV_CODEC_CAP_PARAM_CHANGE (1 << 14) -/** - * Codec supports multithreading through a method other than slice- or - * frame-level multithreading. Typically this marks wrappers around - * multithreading-capable external libraries. - */ -#define AV_CODEC_CAP_OTHER_THREADS (1 << 15) -#if FF_API_AUTO_THREADS -#define AV_CODEC_CAP_AUTO_THREADS AV_CODEC_CAP_OTHER_THREADS -#endif -/** - * Audio encoder supports receiving a different number of samples in each call. - */ -#define AV_CODEC_CAP_VARIABLE_FRAME_SIZE (1 << 16) -/** - * Decoder is not a preferred choice for probing. - * This indicates that the decoder is not a good choice for probing. - * It could for example be an expensive to spin up hardware decoder, - * or it could simply not provide a lot of useful information about - * the stream. - * A decoder marked with this flag should only be used as last resort - * choice for probing. - */ -#define AV_CODEC_CAP_AVOID_PROBING (1 << 17) - -#if FF_API_UNUSED_CODEC_CAPS -/** - * Deprecated and unused. Use AVCodecDescriptor.props instead - */ -#define AV_CODEC_CAP_INTRA_ONLY 0x40000000 -/** - * Deprecated and unused. Use AVCodecDescriptor.props instead - */ -#define AV_CODEC_CAP_LOSSLESS 0x80000000 -#endif - -/** - * Codec is backed by a hardware implementation. Typically used to - * identify a non-hwaccel hardware decoder. For information about hwaccels, use - * avcodec_get_hw_config() instead. - */ -#define AV_CODEC_CAP_HARDWARE (1 << 18) - -/** - * Codec is potentially backed by a hardware implementation, but not - * necessarily. This is used instead of AV_CODEC_CAP_HARDWARE, if the - * implementation provides some sort of internal fallback. - */ -#define AV_CODEC_CAP_HYBRID (1 << 19) - -/** - * This codec takes the reordered_opaque field from input AVFrames - * and returns it in the corresponding field in AVCodecContext after - * encoding. - */ -#define AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE (1 << 20) - -/** - * This encoder can be flushed using avcodec_flush_buffers(). If this flag is - * not set, the encoder must be closed and reopened to ensure that no frames - * remain pending. - */ -#define AV_CODEC_CAP_ENCODER_FLUSH (1 << 21) - -/** - * AVProfile. - */ -typedef struct AVProfile { - int profile; - const char *name; ///< short name for the profile -} AVProfile; - -/** - * AVCodec. - */ -typedef struct AVCodec { - /** - * Name of the codec implementation. - * The name is globally unique among encoders and among decoders (but an - * encoder and a decoder can share the same name). - * This is the primary way to find a codec from the user perspective. - */ - const char *name; - /** - * Descriptive name for the codec, meant to be more human readable than name. - * You should use the NULL_IF_CONFIG_SMALL() macro to define it. - */ - const char *long_name; - enum AVMediaType type; - enum AVCodecID id; - /** - * Codec capabilities. - * see AV_CODEC_CAP_* - */ - int capabilities; - uint8_t max_lowres; ///< maximum value for lowres supported by the decoder - const AVRational *supported_framerates; ///< array of supported framerates, or NULL if any, array is terminated by {0,0} - const enum AVPixelFormat *pix_fmts; ///< array of supported pixel formats, or NULL if unknown, array is terminated by -1 - const int *supported_samplerates; ///< array of supported audio samplerates, or NULL if unknown, array is terminated by 0 - const enum AVSampleFormat *sample_fmts; ///< array of supported sample formats, or NULL if unknown, array is terminated by -1 -#if FF_API_OLD_CHANNEL_LAYOUT - /** - * @deprecated use ch_layouts instead - */ - attribute_deprecated - const uint64_t *channel_layouts; ///< array of support channel layouts, or NULL if unknown. array is terminated by 0 -#endif - const AVClass *priv_class; ///< AVClass for the private context - const AVProfile *profiles; ///< array of recognized profiles, or NULL if unknown, array is terminated by {FF_PROFILE_UNKNOWN} - - /** - * Group name of the codec implementation. - * This is a short symbolic name of the wrapper backing this codec. A - * wrapper uses some kind of external implementation for the codec, such - * as an external library, or a codec implementation provided by the OS or - * the hardware. - * If this field is NULL, this is a builtin, libavcodec native codec. - * If non-NULL, this will be the suffix in AVCodec.name in most cases - * (usually AVCodec.name will be of the form "_"). - */ - const char *wrapper_name; - - /** - * Array of supported channel layouts, terminated with a zeroed layout. - */ - const AVChannelLayout *ch_layouts; -} AVCodec; - -/** - * Iterate over all registered codecs. - * - * @param opaque a pointer where libavcodec will store the iteration state. Must - * point to NULL to start the iteration. - * - * @return the next registered codec or NULL when the iteration is - * finished - */ -const AVCodec *av_codec_iterate(void **opaque); - -/** - * Find a registered decoder with a matching codec ID. - * - * @param id AVCodecID of the requested decoder - * @return A decoder if one was found, NULL otherwise. - */ -const AVCodec *avcodec_find_decoder(enum AVCodecID id); - -/** - * Find a registered decoder with the specified name. - * - * @param name name of the requested decoder - * @return A decoder if one was found, NULL otherwise. - */ -const AVCodec *avcodec_find_decoder_by_name(const char *name); - -/** - * Find a registered encoder with a matching codec ID. - * - * @param id AVCodecID of the requested encoder - * @return An encoder if one was found, NULL otherwise. - */ -const AVCodec *avcodec_find_encoder(enum AVCodecID id); - -/** - * Find a registered encoder with the specified name. - * - * @param name name of the requested encoder - * @return An encoder if one was found, NULL otherwise. - */ -const AVCodec *avcodec_find_encoder_by_name(const char *name); -/** - * @return a non-zero number if codec is an encoder, zero otherwise - */ -int av_codec_is_encoder(const AVCodec *codec); - -/** - * @return a non-zero number if codec is a decoder, zero otherwise - */ -int av_codec_is_decoder(const AVCodec *codec); - -/** - * Return a name for the specified profile, if available. - * - * @param codec the codec that is searched for the given profile - * @param profile the profile value for which a name is requested - * @return A name for the profile if found, NULL otherwise. - */ -const char *av_get_profile_name(const AVCodec *codec, int profile); - -enum { - /** - * The codec supports this format via the hw_device_ctx interface. - * - * When selecting this format, AVCodecContext.hw_device_ctx should - * have been set to a device of the specified type before calling - * avcodec_open2(). - */ - AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX = 0x01, - /** - * The codec supports this format via the hw_frames_ctx interface. - * - * When selecting this format for a decoder, - * AVCodecContext.hw_frames_ctx should be set to a suitable frames - * context inside the get_format() callback. The frames context - * must have been created on a device of the specified type. - * - * When selecting this format for an encoder, - * AVCodecContext.hw_frames_ctx should be set to the context which - * will be used for the input frames before calling avcodec_open2(). - */ - AV_CODEC_HW_CONFIG_METHOD_HW_FRAMES_CTX = 0x02, - /** - * The codec supports this format by some internal method. - * - * This format can be selected without any additional configuration - - * no device or frames context is required. - */ - AV_CODEC_HW_CONFIG_METHOD_INTERNAL = 0x04, - /** - * The codec supports this format by some ad-hoc method. - * - * Additional settings and/or function calls are required. See the - * codec-specific documentation for details. (Methods requiring - * this sort of configuration are deprecated and others should be - * used in preference.) - */ - AV_CODEC_HW_CONFIG_METHOD_AD_HOC = 0x08, -}; - -typedef struct AVCodecHWConfig { - /** - * For decoders, a hardware pixel format which that decoder may be - * able to decode to if suitable hardware is available. - * - * For encoders, a pixel format which the encoder may be able to - * accept. If set to AV_PIX_FMT_NONE, this applies to all pixel - * formats supported by the codec. - */ - enum AVPixelFormat pix_fmt; - /** - * Bit set of AV_CODEC_HW_CONFIG_METHOD_* flags, describing the possible - * setup methods which can be used with this configuration. - */ - int methods; - /** - * The device type associated with the configuration. - * - * Must be set for AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX and - * AV_CODEC_HW_CONFIG_METHOD_HW_FRAMES_CTX, otherwise unused. - */ - enum AVHWDeviceType device_type; -} AVCodecHWConfig; - -/** - * Retrieve supported hardware configurations for a codec. - * - * Values of index from zero to some maximum return the indexed configuration - * descriptor; all other values return NULL. If the codec does not support - * any hardware configurations then it will always return NULL. - */ -const AVCodecHWConfig *avcodec_get_hw_config(const AVCodec *codec, int index); - -/** - * @} - */ - -#endif /* AVCODEC_CODEC_H */ diff --git a/third-party/cbs/h2645_parse.c b/third-party/cbs/h2645_parse.c deleted file mode 100644 index 41f2d5876e1..00000000000 --- a/third-party/cbs/h2645_parse.c +++ /dev/null @@ -1,544 +0,0 @@ -/* - * H.264/HEVC common parsing code - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#include - -// [sunshine] Changed include path -#include "cbs/config.h" - -#include "intmath.h" -#include "libavutil/intreadwrite.h" -#include "libavutil/mem.h" - -// [sunshine] Changed include path -#include "bytestream.h" -#include "cbs/hevc.h" -#include "cbs/h264.h" -#include "cbs/h2645_parse.h" - -int ff_h2645_extract_rbsp(const uint8_t *src, int length, - H2645RBSP *rbsp, H2645NAL *nal, int small_padding) -{ - int i, si, di; - uint8_t *dst; - - nal->skipped_bytes = 0; -#define STARTCODE_TEST \ - if (i + 2 < length && src[i + 1] == 0 && src[i + 2] <= 3) { \ - if (src[i + 2] != 3 && src[i + 2] != 0) { \ - /* startcode, so we must be past the end */ \ - length = i; \ - } \ - break; \ - } -#if HAVE_FAST_UNALIGNED -#define FIND_FIRST_ZERO \ - if (i > 0 && !src[i]) \ - i--; \ - while (src[i]) \ - i++ -#if HAVE_FAST_64BIT - for (i = 0; i + 1 < length; i += 9) { - if (!((~AV_RN64(src + i) & - (AV_RN64(src + i) - 0x0100010001000101ULL)) & - 0x8000800080008080ULL)) - continue; - FIND_FIRST_ZERO; - STARTCODE_TEST; - i -= 7; - } -#else - for (i = 0; i + 1 < length; i += 5) { - if (!((~AV_RN32(src + i) & - (AV_RN32(src + i) - 0x01000101U)) & - 0x80008080U)) - continue; - FIND_FIRST_ZERO; - STARTCODE_TEST; - i -= 3; - } -#endif /* HAVE_FAST_64BIT */ -#else - for (i = 0; i + 1 < length; i += 2) { - if (src[i]) - continue; - if (i > 0 && src[i - 1] == 0) - i--; - STARTCODE_TEST; - } -#endif /* HAVE_FAST_UNALIGNED */ - - if (i >= length - 1 && small_padding) { // no escaped 0 - nal->data = - nal->raw_data = src; - nal->size = - nal->raw_size = length; - return length; - } else if (i > length) - i = length; - - dst = &rbsp->rbsp_buffer[rbsp->rbsp_buffer_size]; - - memcpy(dst, src, i); - si = di = i; - while (si + 2 < length) { - // remove escapes (very rare 1:2^22) - if (src[si + 2] > 3) { - dst[di++] = src[si++]; - dst[di++] = src[si++]; - } else if (src[si] == 0 && src[si + 1] == 0 && src[si + 2] != 0) { - if (src[si + 2] == 3) { // escape - dst[di++] = 0; - dst[di++] = 0; - si += 3; - - if (nal->skipped_bytes_pos) { - nal->skipped_bytes++; - if (nal->skipped_bytes_pos_size < nal->skipped_bytes) { - nal->skipped_bytes_pos_size *= 2; - av_assert0(nal->skipped_bytes_pos_size >= nal->skipped_bytes); - av_reallocp_array(&nal->skipped_bytes_pos, - nal->skipped_bytes_pos_size, - sizeof(*nal->skipped_bytes_pos)); - if (!nal->skipped_bytes_pos) { - nal->skipped_bytes_pos_size = 0; - return AVERROR(ENOMEM); - } - } - if (nal->skipped_bytes_pos) - nal->skipped_bytes_pos[nal->skipped_bytes-1] = di - 1; - } - continue; - } else // next start code - goto nsc; - } - - dst[di++] = src[si++]; - } - while (si < length) - dst[di++] = src[si++]; - -nsc: - memset(dst + di, 0, AV_INPUT_BUFFER_PADDING_SIZE); - - nal->data = dst; - nal->size = di; - nal->raw_data = src; - nal->raw_size = si; - rbsp->rbsp_buffer_size += si; - - return si; -} - -static const char *const hevc_nal_type_name[64] = { - "TRAIL_N", // HEVC_NAL_TRAIL_N - "TRAIL_R", // HEVC_NAL_TRAIL_R - "TSA_N", // HEVC_NAL_TSA_N - "TSA_R", // HEVC_NAL_TSA_R - "STSA_N", // HEVC_NAL_STSA_N - "STSA_R", // HEVC_NAL_STSA_R - "RADL_N", // HEVC_NAL_RADL_N - "RADL_R", // HEVC_NAL_RADL_R - "RASL_N", // HEVC_NAL_RASL_N - "RASL_R", // HEVC_NAL_RASL_R - "RSV_VCL_N10", // HEVC_NAL_VCL_N10 - "RSV_VCL_R11", // HEVC_NAL_VCL_R11 - "RSV_VCL_N12", // HEVC_NAL_VCL_N12 - "RSV_VLC_R13", // HEVC_NAL_VCL_R13 - "RSV_VCL_N14", // HEVC_NAL_VCL_N14 - "RSV_VCL_R15", // HEVC_NAL_VCL_R15 - "BLA_W_LP", // HEVC_NAL_BLA_W_LP - "BLA_W_RADL", // HEVC_NAL_BLA_W_RADL - "BLA_N_LP", // HEVC_NAL_BLA_N_LP - "IDR_W_RADL", // HEVC_NAL_IDR_W_RADL - "IDR_N_LP", // HEVC_NAL_IDR_N_LP - "CRA_NUT", // HEVC_NAL_CRA_NUT - "RSV_IRAP_VCL22", // HEVC_NAL_RSV_IRAP_VCL22 - "RSV_IRAP_VCL23", // HEVC_NAL_RSV_IRAP_VCL23 - "RSV_VCL24", // HEVC_NAL_RSV_VCL24 - "RSV_VCL25", // HEVC_NAL_RSV_VCL25 - "RSV_VCL26", // HEVC_NAL_RSV_VCL26 - "RSV_VCL27", // HEVC_NAL_RSV_VCL27 - "RSV_VCL28", // HEVC_NAL_RSV_VCL28 - "RSV_VCL29", // HEVC_NAL_RSV_VCL29 - "RSV_VCL30", // HEVC_NAL_RSV_VCL30 - "RSV_VCL31", // HEVC_NAL_RSV_VCL31 - "VPS", // HEVC_NAL_VPS - "SPS", // HEVC_NAL_SPS - "PPS", // HEVC_NAL_PPS - "AUD", // HEVC_NAL_AUD - "EOS_NUT", // HEVC_NAL_EOS_NUT - "EOB_NUT", // HEVC_NAL_EOB_NUT - "FD_NUT", // HEVC_NAL_FD_NUT - "SEI_PREFIX", // HEVC_NAL_SEI_PREFIX - "SEI_SUFFIX", // HEVC_NAL_SEI_SUFFIX - "RSV_NVCL41", // HEVC_NAL_RSV_NVCL41 - "RSV_NVCL42", // HEVC_NAL_RSV_NVCL42 - "RSV_NVCL43", // HEVC_NAL_RSV_NVCL43 - "RSV_NVCL44", // HEVC_NAL_RSV_NVCL44 - "RSV_NVCL45", // HEVC_NAL_RSV_NVCL45 - "RSV_NVCL46", // HEVC_NAL_RSV_NVCL46 - "RSV_NVCL47", // HEVC_NAL_RSV_NVCL47 - "UNSPEC48", // HEVC_NAL_UNSPEC48 - "UNSPEC49", // HEVC_NAL_UNSPEC49 - "UNSPEC50", // HEVC_NAL_UNSPEC50 - "UNSPEC51", // HEVC_NAL_UNSPEC51 - "UNSPEC52", // HEVC_NAL_UNSPEC52 - "UNSPEC53", // HEVC_NAL_UNSPEC53 - "UNSPEC54", // HEVC_NAL_UNSPEC54 - "UNSPEC55", // HEVC_NAL_UNSPEC55 - "UNSPEC56", // HEVC_NAL_UNSPEC56 - "UNSPEC57", // HEVC_NAL_UNSPEC57 - "UNSPEC58", // HEVC_NAL_UNSPEC58 - "UNSPEC59", // HEVC_NAL_UNSPEC59 - "UNSPEC60", // HEVC_NAL_UNSPEC60 - "UNSPEC61", // HEVC_NAL_UNSPEC61 - "UNSPEC62", // HEVC_NAL_UNSPEC62 - "UNSPEC63", // HEVC_NAL_UNSPEC63 -}; - -static const char *hevc_nal_unit_name(int nal_type) -{ - av_assert0(nal_type >= 0 && nal_type < 64); - return hevc_nal_type_name[nal_type]; -} - -static const char *const h264_nal_type_name[32] = { - "Unspecified 0", //H264_NAL_UNSPECIFIED - "Coded slice of a non-IDR picture", // H264_NAL_SLICE - "Coded slice data partition A", // H264_NAL_DPA - "Coded slice data partition B", // H264_NAL_DPB - "Coded slice data partition C", // H264_NAL_DPC - "IDR", // H264_NAL_IDR_SLICE - "SEI", // H264_NAL_SEI - "SPS", // H264_NAL_SPS - "PPS", // H264_NAL_PPS - "AUD", // H264_NAL_AUD - "End of sequence", // H264_NAL_END_SEQUENCE - "End of stream", // H264_NAL_END_STREAM - "Filler data", // H264_NAL_FILLER_DATA - "SPS extension", // H264_NAL_SPS_EXT - "Prefix", // H264_NAL_PREFIX - "Subset SPS", // H264_NAL_SUB_SPS - "Depth parameter set", // H264_NAL_DPS - "Reserved 17", // H264_NAL_RESERVED17 - "Reserved 18", // H264_NAL_RESERVED18 - "Auxiliary coded picture without partitioning", // H264_NAL_AUXILIARY_SLICE - "Slice extension", // H264_NAL_EXTEN_SLICE - "Slice extension for a depth view or a 3D-AVC texture view", // H264_NAL_DEPTH_EXTEN_SLICE - "Reserved 22", // H264_NAL_RESERVED22 - "Reserved 23", // H264_NAL_RESERVED23 - "Unspecified 24", // H264_NAL_UNSPECIFIED24 - "Unspecified 25", // H264_NAL_UNSPECIFIED25 - "Unspecified 26", // H264_NAL_UNSPECIFIED26 - "Unspecified 27", // H264_NAL_UNSPECIFIED27 - "Unspecified 28", // H264_NAL_UNSPECIFIED28 - "Unspecified 29", // H264_NAL_UNSPECIFIED29 - "Unspecified 30", // H264_NAL_UNSPECIFIED30 - "Unspecified 31", // H264_NAL_UNSPECIFIED31 -}; - -static const char *h264_nal_unit_name(int nal_type) -{ - av_assert0(nal_type >= 0 && nal_type < 32); - return h264_nal_type_name[nal_type]; -} - -static int get_bit_length(H2645NAL *nal, int min_size, int skip_trailing_zeros) -{ - int size = nal->size; - int trailing_padding = 0; - - while (skip_trailing_zeros && size > 0 && nal->data[size - 1] == 0) - size--; - - if (!size) - return 0; - - if (size <= min_size) { - if (nal->size < min_size) - return AVERROR_INVALIDDATA; - size = min_size; - } else { - int v = nal->data[size - 1]; - /* remove the stop bit and following trailing zeros, - * or nothing for damaged bitstreams */ - if (v) - trailing_padding = ff_ctz(v) + 1; - } - - if (size > INT_MAX / 8) - return AVERROR(ERANGE); - size *= 8; - - return size - trailing_padding; -} - -/** - * @return AVERROR_INVALIDDATA if the packet is not a valid NAL unit, - * 0 otherwise - */ -static int hevc_parse_nal_header(H2645NAL *nal, void *logctx) -{ - GetBitContext *gb = &nal->gb; - - if (get_bits1(gb) != 0) - return AVERROR_INVALIDDATA; - - nal->type = get_bits(gb, 6); - - nal->nuh_layer_id = get_bits(gb, 6); - nal->temporal_id = get_bits(gb, 3) - 1; - if (nal->temporal_id < 0) - return AVERROR_INVALIDDATA; - - av_log(logctx, AV_LOG_DEBUG, - "nal_unit_type: %d(%s), nuh_layer_id: %d, temporal_id: %d\n", - nal->type, hevc_nal_unit_name(nal->type), nal->nuh_layer_id, nal->temporal_id); - - return 0; -} - -static int h264_parse_nal_header(H2645NAL *nal, void *logctx) -{ - GetBitContext *gb = &nal->gb; - - if (get_bits1(gb) != 0) - return AVERROR_INVALIDDATA; - - nal->ref_idc = get_bits(gb, 2); - nal->type = get_bits(gb, 5); - - av_log(logctx, AV_LOG_DEBUG, - "nal_unit_type: %d(%s), nal_ref_idc: %d\n", - nal->type, h264_nal_unit_name(nal->type), nal->ref_idc); - - return 0; -} - -static int find_next_start_code(const uint8_t *buf, const uint8_t *next_avc) -{ - int i = 0; - - if (buf + 3 >= next_avc) - return next_avc - buf; - - while (buf + i + 3 < next_avc) { - if (buf[i] == 0 && buf[i + 1] == 0 && buf[i + 2] == 1) - break; - i++; - } - return i + 3; -} - -static void alloc_rbsp_buffer(H2645RBSP *rbsp, unsigned int size, int use_ref) -{ - int min_size = size; - - if (size > INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE) - goto fail; - size += AV_INPUT_BUFFER_PADDING_SIZE; - - if (rbsp->rbsp_buffer_alloc_size >= size && - (!rbsp->rbsp_buffer_ref || av_buffer_is_writable(rbsp->rbsp_buffer_ref))) { - av_assert0(rbsp->rbsp_buffer); - memset(rbsp->rbsp_buffer + min_size, 0, AV_INPUT_BUFFER_PADDING_SIZE); - return; - } - - size = FFMIN(size + size / 16 + 32, INT_MAX); - - if (rbsp->rbsp_buffer_ref) - av_buffer_unref(&rbsp->rbsp_buffer_ref); - else - av_free(rbsp->rbsp_buffer); - - rbsp->rbsp_buffer = av_mallocz(size); - if (!rbsp->rbsp_buffer) - goto fail; - rbsp->rbsp_buffer_alloc_size = size; - - if (use_ref) { - rbsp->rbsp_buffer_ref = av_buffer_create(rbsp->rbsp_buffer, size, - NULL, NULL, 0); - if (!rbsp->rbsp_buffer_ref) - goto fail; - } - - return; - -fail: - rbsp->rbsp_buffer_alloc_size = 0; - if (rbsp->rbsp_buffer_ref) { - av_buffer_unref(&rbsp->rbsp_buffer_ref); - rbsp->rbsp_buffer = NULL; - } else - av_freep(&rbsp->rbsp_buffer); - - return; -} - -int ff_h2645_packet_split(H2645Packet *pkt, const uint8_t *buf, int length, - void *logctx, int is_nalff, int nal_length_size, - enum AVCodecID codec_id, int small_padding, int use_ref) -{ - GetByteContext bc; - int consumed, ret = 0; - int next_avc = is_nalff ? 0 : length; - int64_t padding = small_padding ? 0 : MAX_MBPAIR_SIZE; - - bytestream2_init(&bc, buf, length); - alloc_rbsp_buffer(&pkt->rbsp, length + padding, use_ref); - - if (!pkt->rbsp.rbsp_buffer) - return AVERROR(ENOMEM); - - pkt->rbsp.rbsp_buffer_size = 0; - pkt->nb_nals = 0; - while (bytestream2_get_bytes_left(&bc) >= 4) { - H2645NAL *nal; - int extract_length = 0; - int skip_trailing_zeros = 1; - - if (bytestream2_tell(&bc) == next_avc) { - int i = 0; - extract_length = get_nalsize(nal_length_size, - bc.buffer, bytestream2_get_bytes_left(&bc), &i, logctx); - if (extract_length < 0) - return extract_length; - - bytestream2_skip(&bc, nal_length_size); - - next_avc = bytestream2_tell(&bc) + extract_length; - } else { - int buf_index; - - if (bytestream2_tell(&bc) > next_avc) - av_log(logctx, AV_LOG_WARNING, "Exceeded next NALFF position, re-syncing.\n"); - - /* search start code */ - buf_index = find_next_start_code(bc.buffer, buf + next_avc); - - bytestream2_skip(&bc, buf_index); - - if (!bytestream2_get_bytes_left(&bc)) { - if (pkt->nb_nals > 0) { - // No more start codes: we discarded some irrelevant - // bytes at the end of the packet. - return 0; - } else { - av_log(logctx, AV_LOG_ERROR, "No start code is found.\n"); - return AVERROR_INVALIDDATA; - } - } - - extract_length = FFMIN(bytestream2_get_bytes_left(&bc), next_avc - bytestream2_tell(&bc)); - - if (bytestream2_tell(&bc) >= next_avc) { - /* skip to the start of the next NAL */ - bytestream2_skip(&bc, next_avc - bytestream2_tell(&bc)); - continue; - } - } - - if (pkt->nals_allocated < pkt->nb_nals + 1) { - int new_size = pkt->nals_allocated + 1; - void *tmp; - - if (new_size >= INT_MAX / sizeof(*pkt->nals)) - return AVERROR(ENOMEM); - - tmp = av_fast_realloc(pkt->nals, &pkt->nal_buffer_size, new_size * sizeof(*pkt->nals)); - if (!tmp) - return AVERROR(ENOMEM); - - pkt->nals = tmp; - memset(pkt->nals + pkt->nals_allocated, 0, sizeof(*pkt->nals)); - - nal = &pkt->nals[pkt->nb_nals]; - nal->skipped_bytes_pos_size = FFMIN(1024, extract_length/3+1); // initial buffer size - nal->skipped_bytes_pos = av_malloc_array(nal->skipped_bytes_pos_size, sizeof(*nal->skipped_bytes_pos)); - if (!nal->skipped_bytes_pos) - return AVERROR(ENOMEM); - - pkt->nals_allocated = new_size; - } - nal = &pkt->nals[pkt->nb_nals]; - - consumed = ff_h2645_extract_rbsp(bc.buffer, extract_length, &pkt->rbsp, nal, small_padding); - if (consumed < 0) - return consumed; - - if (is_nalff && (extract_length != consumed) && extract_length) - av_log(logctx, AV_LOG_DEBUG, - "NALFF: Consumed only %d bytes instead of %d\n", - consumed, extract_length); - - bytestream2_skip(&bc, consumed); - - /* see commit 3566042a0 */ - if (bytestream2_get_bytes_left(&bc) >= 4 && - bytestream2_peek_be32(&bc) == 0x000001E0) - skip_trailing_zeros = 0; - - nal->size_bits = get_bit_length(nal, 1 + (codec_id == AV_CODEC_ID_HEVC), - skip_trailing_zeros); - - if (nal->size <= 0 || nal->size_bits <= 0) - continue; - - ret = init_get_bits(&nal->gb, nal->data, nal->size_bits); - if (ret < 0) - return ret; - - /* Reset type in case it contains a stale value from a previously parsed NAL */ - nal->type = 0; - - if (codec_id == AV_CODEC_ID_HEVC) - ret = hevc_parse_nal_header(nal, logctx); - else - ret = h264_parse_nal_header(nal, logctx); - if (ret < 0) { - av_log(logctx, AV_LOG_WARNING, "Invalid NAL unit %d, skipping.\n", - nal->type); - continue; - } - - pkt->nb_nals++; - } - - return 0; -} - -void ff_h2645_packet_uninit(H2645Packet *pkt) -{ - int i; - for (i = 0; i < pkt->nals_allocated; i++) { - av_freep(&pkt->nals[i].skipped_bytes_pos); - } - av_freep(&pkt->nals); - pkt->nals_allocated = pkt->nal_buffer_size = 0; - if (pkt->rbsp.rbsp_buffer_ref) { - av_buffer_unref(&pkt->rbsp.rbsp_buffer_ref); - pkt->rbsp.rbsp_buffer = NULL; - } else - av_freep(&pkt->rbsp.rbsp_buffer); - pkt->rbsp.rbsp_buffer_alloc_size = pkt->rbsp.rbsp_buffer_size = 0; -} diff --git a/third-party/cbs/h264_levels.c b/third-party/cbs/h264_levels.c deleted file mode 100644 index 172ef452a53..00000000000 --- a/third-party/cbs/h264_levels.c +++ /dev/null @@ -1,124 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#include -#include "libavutil/macros.h" -// [sunshine] Changed include path -#include "cbs/h264_levels.h" - -// H.264 table A-1. -static const H264LevelDescriptor h264_levels[] = { - // Name MaxMBPS MaxBR MinCR - // | level_idc | MaxFS | MaxCPB | MaxMvsPer2Mb - // | | cs3f | | MaxDpbMbs | | MaxVmvR | | - { "1", 10, 0, 1485, 99, 396, 64, 175, 64, 2, 0 }, - { "1b", 11, 1, 1485, 99, 396, 128, 350, 64, 2, 0 }, - { "1b", 9, 0, 1485, 99, 396, 128, 350, 64, 2, 0 }, - { "1.1", 11, 0, 3000, 396, 900, 192, 500, 128, 2, 0 }, - { "1.2", 12, 0, 6000, 396, 2376, 384, 1000, 128, 2, 0 }, - { "1.3", 13, 0, 11880, 396, 2376, 768, 2000, 128, 2, 0 }, - { "2", 20, 0, 11880, 396, 2376, 2000, 2000, 128, 2, 0 }, - { "2.1", 21, 0, 19800, 792, 4752, 4000, 4000, 256, 2, 0 }, - { "2.2", 22, 0, 20250, 1620, 8100, 4000, 4000, 256, 2, 0 }, - { "3", 30, 0, 40500, 1620, 8100, 10000, 10000, 256, 2, 32 }, - { "3.1", 31, 0, 108000, 3600, 18000, 14000, 14000, 512, 4, 16 }, - { "3.2", 32, 0, 216000, 5120, 20480, 20000, 20000, 512, 4, 16 }, - { "4", 40, 0, 245760, 8192, 32768, 20000, 25000, 512, 4, 16 }, - { "4.1", 41, 0, 245760, 8192, 32768, 50000, 62500, 512, 2, 16 }, - { "4.2", 42, 0, 522240, 8704, 34816, 50000, 62500, 512, 2, 16 }, - { "5", 50, 0, 589824, 22080, 110400, 135000, 135000, 512, 2, 16 }, - { "5.1", 51, 0, 983040, 36864, 184320, 240000, 240000, 512, 2, 16 }, - { "5.2", 52, 0, 2073600, 36864, 184320, 240000, 240000, 512, 2, 16 }, - { "6", 60, 0, 4177920, 139264, 696320, 240000, 240000, 8192, 2, 16 }, - { "6.1", 61, 0, 8355840, 139264, 696320, 480000, 480000, 8192, 2, 16 }, - { "6.2", 62, 0, 16711680, 139264, 696320, 800000, 800000, 8192, 2, 16 }, -}; - -// H.264 table A-2 plus values from A-1. -static const struct { - int profile_idc; - int cpb_br_vcl_factor; - int cpb_br_nal_factor; -} h264_br_factors[] = { - { 66, 1000, 1200 }, - { 77, 1000, 1200 }, - { 88, 1000, 1200 }, - { 100, 1250, 1500 }, - { 110, 3000, 3600 }, - { 122, 4000, 4800 }, - { 244, 4000, 4800 }, - { 44, 4000, 4800 }, -}; - -// We are only ever interested in the NAL bitrate factor. -static int h264_get_br_factor(int profile_idc) -{ - int i; - for (i = 0; i < FF_ARRAY_ELEMS(h264_br_factors); i++) { - if (h264_br_factors[i].profile_idc == profile_idc) - return h264_br_factors[i].cpb_br_nal_factor; - } - // Default to the non-high profile value if not specified. - return 1200; -} - -const H264LevelDescriptor *ff_h264_guess_level(int profile_idc, - int64_t bitrate, - int framerate, - int width, int height, - int max_dec_frame_buffering) -{ - int width_mbs = (width + 15) / 16; - int height_mbs = (height + 15) / 16; - int no_cs3f = !(profile_idc == 66 || - profile_idc == 77 || - profile_idc == 88); - int i; - - for (i = 0; i < FF_ARRAY_ELEMS(h264_levels); i++) { - const H264LevelDescriptor *level = &h264_levels[i]; - - if (level->constraint_set3_flag && no_cs3f) - continue; - - if (bitrate > (int64_t)level->max_br * h264_get_br_factor(profile_idc)) - continue; - - if (width_mbs * height_mbs > level->max_fs) - continue; - if (width_mbs * width_mbs > 8 * level->max_fs) - continue; - if (height_mbs * height_mbs > 8 * level->max_fs) - continue; - - if (width_mbs && height_mbs) { - int max_dpb_frames = - FFMIN(level->max_dpb_mbs / (width_mbs * height_mbs), 16); - if (max_dec_frame_buffering > max_dpb_frames) - continue; - - if (framerate > (level->max_mbps / (width_mbs * height_mbs))) - continue; - } - - return level; - } - - // No usable levels found - frame is too big or bitrate is too high. - return NULL; -} diff --git a/third-party/cbs/h264_ps.h b/third-party/cbs/h264_ps.h deleted file mode 100644 index dc52835ed4c..00000000000 --- a/third-party/cbs/h264_ps.h +++ /dev/null @@ -1,177 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * H.264 parameter set handling - */ - -#ifndef AVCODEC_H264_PS_H -#define AVCODEC_H264_PS_H - -#include - -#include "libavutil/buffer.h" -#include "libavutil/pixfmt.h" -#include "libavutil/rational.h" - -#include "avcodec.h" -#include "get_bits.h" -#include "h264.h" - -#define MAX_SPS_COUNT 32 -#define MAX_PPS_COUNT 256 -#define MAX_LOG2_MAX_FRAME_NUM (12 + 4) - -/** - * Sequence parameter set - */ -typedef struct SPS { - unsigned int sps_id; - int profile_idc; - int level_idc; - int chroma_format_idc; - int transform_bypass; ///< qpprime_y_zero_transform_bypass_flag - int log2_max_frame_num; ///< log2_max_frame_num_minus4 + 4 - int poc_type; ///< pic_order_cnt_type - int log2_max_poc_lsb; ///< log2_max_pic_order_cnt_lsb_minus4 - int delta_pic_order_always_zero_flag; - int offset_for_non_ref_pic; - int offset_for_top_to_bottom_field; - int poc_cycle_length; ///< num_ref_frames_in_pic_order_cnt_cycle - int ref_frame_count; ///< num_ref_frames - int gaps_in_frame_num_allowed_flag; - int mb_width; ///< pic_width_in_mbs_minus1 + 1 - ///< (pic_height_in_map_units_minus1 + 1) * (2 - frame_mbs_only_flag) - int mb_height; - int frame_mbs_only_flag; - int mb_aff; ///< mb_adaptive_frame_field_flag - int direct_8x8_inference_flag; - int crop; ///< frame_cropping_flag - - /* those 4 are already in luma samples */ - unsigned int crop_left; ///< frame_cropping_rect_left_offset - unsigned int crop_right; ///< frame_cropping_rect_right_offset - unsigned int crop_top; ///< frame_cropping_rect_top_offset - unsigned int crop_bottom; ///< frame_cropping_rect_bottom_offset - int vui_parameters_present_flag; - AVRational sar; - int video_signal_type_present_flag; - int full_range; - int colour_description_present_flag; - enum AVColorPrimaries color_primaries; - enum AVColorTransferCharacteristic color_trc; - enum AVColorSpace colorspace; - enum AVChromaLocation chroma_location; - - int timing_info_present_flag; - uint32_t num_units_in_tick; - uint32_t time_scale; - int fixed_frame_rate_flag; - int32_t offset_for_ref_frame[256]; - int bitstream_restriction_flag; - int num_reorder_frames; - int scaling_matrix_present; - uint8_t scaling_matrix4[6][16]; - uint8_t scaling_matrix8[6][64]; - int nal_hrd_parameters_present_flag; - int vcl_hrd_parameters_present_flag; - int pic_struct_present_flag; - int time_offset_length; - int cpb_cnt; ///< See H.264 E.1.2 - int initial_cpb_removal_delay_length; ///< initial_cpb_removal_delay_length_minus1 + 1 - int cpb_removal_delay_length; ///< cpb_removal_delay_length_minus1 + 1 - int dpb_output_delay_length; ///< dpb_output_delay_length_minus1 + 1 - int bit_depth_luma; ///< bit_depth_luma_minus8 + 8 - int bit_depth_chroma; ///< bit_depth_chroma_minus8 + 8 - int residual_color_transform_flag; ///< residual_colour_transform_flag - int constraint_set_flags; ///< constraint_set[0-3]_flag - uint8_t data[4096]; - size_t data_size; -} SPS; - -/** - * Picture parameter set - */ -typedef struct PPS { - unsigned int sps_id; - int cabac; ///< entropy_coding_mode_flag - int pic_order_present; ///< pic_order_present_flag - int slice_group_count; ///< num_slice_groups_minus1 + 1 - int mb_slice_group_map_type; - unsigned int ref_count[2]; ///< num_ref_idx_l0/1_active_minus1 + 1 - int weighted_pred; ///< weighted_pred_flag - int weighted_bipred_idc; - int init_qp; ///< pic_init_qp_minus26 + 26 - int init_qs; ///< pic_init_qs_minus26 + 26 - int chroma_qp_index_offset[2]; - int deblocking_filter_parameters_present; ///< deblocking_filter_parameters_present_flag - int constrained_intra_pred; ///< constrained_intra_pred_flag - int redundant_pic_cnt_present; ///< redundant_pic_cnt_present_flag - int transform_8x8_mode; ///< transform_8x8_mode_flag - uint8_t scaling_matrix4[6][16]; - uint8_t scaling_matrix8[6][64]; - uint8_t chroma_qp_table[2][QP_MAX_NUM+1]; ///< pre-scaled (with chroma_qp_index_offset) version of qp_table - int chroma_qp_diff; - uint8_t data[4096]; - size_t data_size; - - uint32_t dequant4_buffer[6][QP_MAX_NUM + 1][16]; - uint32_t dequant8_buffer[6][QP_MAX_NUM + 1][64]; - uint32_t(*dequant4_coeff[6])[16]; - uint32_t(*dequant8_coeff[6])[64]; - - AVBufferRef *sps_ref; - const SPS *sps; -} PPS; - -typedef struct H264ParamSets { - AVBufferRef *sps_list[MAX_SPS_COUNT]; - AVBufferRef *pps_list[MAX_PPS_COUNT]; - - AVBufferRef *pps_ref; - /* currently active parameters sets */ - const PPS *pps; - const SPS *sps; - - int overread_warning_printed[2]; -} H264ParamSets; - -/** - * compute profile from sps - */ -int ff_h264_get_profile(const SPS *sps); - -/** - * Decode SPS - */ -int ff_h264_decode_seq_parameter_set(GetBitContext *gb, AVCodecContext *avctx, - H264ParamSets *ps, int ignore_truncation); - -/** - * Decode PPS - */ -int ff_h264_decode_picture_parameter_set(GetBitContext *gb, AVCodecContext *avctx, - H264ParamSets *ps, int bit_length); - -/** - * Uninit H264 param sets structure. - */ -void ff_h264_ps_uninit(H264ParamSets *ps); - -#endif /* AVCODEC_H264_PS_H */ diff --git a/third-party/cbs/h264_sei.h b/third-party/cbs/h264_sei.h deleted file mode 100644 index f9166b45dff..00000000000 --- a/third-party/cbs/h264_sei.h +++ /dev/null @@ -1,224 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_H264_SEI_H -#define AVCODEC_H264_SEI_H - -#include "get_bits.h" -#include "h264_ps.h" -#include "sei.h" - - -/** - * pic_struct in picture timing SEI message - */ -typedef enum { - H264_SEI_PIC_STRUCT_FRAME = 0, ///< 0: %frame - H264_SEI_PIC_STRUCT_TOP_FIELD = 1, ///< 1: top field - H264_SEI_PIC_STRUCT_BOTTOM_FIELD = 2, ///< 2: bottom field - H264_SEI_PIC_STRUCT_TOP_BOTTOM = 3, ///< 3: top field, bottom field, in that order - H264_SEI_PIC_STRUCT_BOTTOM_TOP = 4, ///< 4: bottom field, top field, in that order - H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP = 5, ///< 5: top field, bottom field, top field repeated, in that order - H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM = 6, ///< 6: bottom field, top field, bottom field repeated, in that order - H264_SEI_PIC_STRUCT_FRAME_DOUBLING = 7, ///< 7: %frame doubling - H264_SEI_PIC_STRUCT_FRAME_TRIPLING = 8 ///< 8: %frame tripling -} H264_SEI_PicStructType; - -/** - * frame_packing_arrangement types - */ -typedef enum { - H264_SEI_FPA_TYPE_CHECKERBOARD = 0, - H264_SEI_FPA_TYPE_INTERLEAVE_COLUMN = 1, - H264_SEI_FPA_TYPE_INTERLEAVE_ROW = 2, - H264_SEI_FPA_TYPE_SIDE_BY_SIDE = 3, - H264_SEI_FPA_TYPE_TOP_BOTTOM = 4, - H264_SEI_FPA_TYPE_INTERLEAVE_TEMPORAL = 5, - H264_SEI_FPA_TYPE_2D = 6, -} H264_SEI_FpaType; - -typedef struct H264SEITimeCode { - /* When not continuously receiving full timecodes, we have to reference - the previous timecode received */ - int full; - int frame; - int seconds; - int minutes; - int hours; - int dropframe; -} H264SEITimeCode; - -typedef struct H264SEIPictureTiming { - // maximum size of pic_timing according to the spec should be 274 bits - uint8_t payload[40]; - int payload_size_bits; - - int present; - H264_SEI_PicStructType pic_struct; - - /** - * Bit set of clock types for fields/frames in picture timing SEI message. - * For each found ct_type, appropriate bit is set (e.g., bit 1 for - * interlaced). - */ - int ct_type; - - /** - * dpb_output_delay in picture timing SEI message, see H.264 C.2.2 - */ - int dpb_output_delay; - - /** - * cpb_removal_delay in picture timing SEI message, see H.264 C.1.2 - */ - int cpb_removal_delay; - - /** - * Maximum three timecodes in a pic_timing SEI. - */ - H264SEITimeCode timecode[3]; - - /** - * Number of timecode in use - */ - int timecode_cnt; -} H264SEIPictureTiming; - -typedef struct H264SEIAFD { - int present; - uint8_t active_format_description; -} H264SEIAFD; - -typedef struct H264SEIA53Caption { - AVBufferRef *buf_ref; -} H264SEIA53Caption; - -typedef struct H264SEIUnregistered { - int x264_build; - AVBufferRef **buf_ref; - int nb_buf_ref; -} H264SEIUnregistered; - -typedef struct H264SEIRecoveryPoint { - /** - * recovery_frame_cnt - * - * Set to -1 if no recovery point SEI message found or to number of frames - * before playback synchronizes. Frames having recovery point are key - * frames. - */ - int recovery_frame_cnt; -} H264SEIRecoveryPoint; - -typedef struct H264SEIBufferingPeriod { - int present; ///< Buffering period SEI flag - int initial_cpb_removal_delay[32]; ///< Initial timestamps for CPBs -} H264SEIBufferingPeriod; - -typedef struct H264SEIFramePacking { - int present; - int arrangement_id; - int arrangement_cancel_flag; ///< is previous arrangement canceled, -1 if never received - H264_SEI_FpaType arrangement_type; - int arrangement_repetition_period; - int content_interpretation_type; - int quincunx_sampling_flag; - int current_frame_is_frame0_flag; -} H264SEIFramePacking; - -typedef struct H264SEIDisplayOrientation { - int present; - int anticlockwise_rotation; - int hflip, vflip; -} H264SEIDisplayOrientation; - -typedef struct H264SEIGreenMetaData { - uint8_t green_metadata_type; - uint8_t period_type; - uint16_t num_seconds; - uint16_t num_pictures; - uint8_t percent_non_zero_macroblocks; - uint8_t percent_intra_coded_macroblocks; - uint8_t percent_six_tap_filtering; - uint8_t percent_alpha_point_deblocking_instance; - uint8_t xsd_metric_type; - uint16_t xsd_metric_value; -} H264SEIGreenMetaData; - -typedef struct H264SEIAlternativeTransfer { - int present; - int preferred_transfer_characteristics; -} H264SEIAlternativeTransfer; - -typedef struct H264SEIFilmGrainCharacteristics { - int present; - int model_id; - int separate_colour_description_present_flag; - int bit_depth_luma; - int bit_depth_chroma; - int full_range; - int color_primaries; - int transfer_characteristics; - int matrix_coeffs; - int blending_mode_id; - int log2_scale_factor; - int comp_model_present_flag[3]; - uint16_t num_intensity_intervals[3]; - uint8_t num_model_values[3]; - uint8_t intensity_interval_lower_bound[3][256]; - uint8_t intensity_interval_upper_bound[3][256]; - int16_t comp_model_value[3][256][6]; - int repetition_period; -} H264SEIFilmGrainCharacteristics; - -typedef struct H264SEIContext { - H264SEIPictureTiming picture_timing; - H264SEIAFD afd; - H264SEIA53Caption a53_caption; - H264SEIUnregistered unregistered; - H264SEIRecoveryPoint recovery_point; - H264SEIBufferingPeriod buffering_period; - H264SEIFramePacking frame_packing; - H264SEIDisplayOrientation display_orientation; - H264SEIGreenMetaData green_metadata; - H264SEIAlternativeTransfer alternative_transfer; - H264SEIFilmGrainCharacteristics film_grain_characteristics; -} H264SEIContext; - -struct H264ParamSets; - -int ff_h264_sei_decode(H264SEIContext *h, GetBitContext *gb, - const struct H264ParamSets *ps, void *logctx); - -/** - * Reset SEI values at the beginning of the frame. - */ -void ff_h264_sei_uninit(H264SEIContext *h); - -/** - * Get stereo_mode string from the h264 frame_packing_arrangement - */ -const char *ff_h264_sei_stereo_mode(const H264SEIFramePacking *h); - -/** - * Parse the contents of a picture timing message given an active SPS. - */ -int ff_h264_sei_process_picture_timing(H264SEIPictureTiming *h, const SPS *sps, - void *logctx); - -#endif /* AVCODEC_H264_SEI_H */ diff --git a/third-party/cbs/hevc_sei.h b/third-party/cbs/hevc_sei.h deleted file mode 100644 index ef987f67818..00000000000 --- a/third-party/cbs/hevc_sei.h +++ /dev/null @@ -1,169 +0,0 @@ -/* - * HEVC Supplementary Enhancement Information messages - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_HEVC_SEI_H -#define AVCODEC_HEVC_SEI_H - -#include - -#include "libavutil/buffer.h" - -#include "get_bits.h" -#include "hevc.h" -#include "sei.h" - - -typedef enum { - HEVC_SEI_PIC_STRUCT_FRAME_DOUBLING = 7, - HEVC_SEI_PIC_STRUCT_FRAME_TRIPLING = 8 -} HEVC_SEI_PicStructType; - -typedef struct HEVCSEIPictureHash { - uint8_t md5[3][16]; - uint8_t is_md5; -} HEVCSEIPictureHash; - -typedef struct HEVCSEIFramePacking { - int present; - int arrangement_type; - int content_interpretation_type; - int quincunx_subsampling; - int current_frame_is_frame0_flag; -} HEVCSEIFramePacking; - -typedef struct HEVCSEIDisplayOrientation { - int present; - int anticlockwise_rotation; - int hflip, vflip; -} HEVCSEIDisplayOrientation; - -typedef struct HEVCSEIPictureTiming { - int picture_struct; -} HEVCSEIPictureTiming; - -typedef struct HEVCSEIA53Caption { - AVBufferRef *buf_ref; -} HEVCSEIA53Caption; - -typedef struct HEVCSEIUnregistered { - AVBufferRef **buf_ref; - int nb_buf_ref; -} HEVCSEIUnregistered; - -typedef struct HEVCSEIMasteringDisplay { - int present; - uint16_t display_primaries[3][2]; - uint16_t white_point[2]; - uint32_t max_luminance; - uint32_t min_luminance; -} HEVCSEIMasteringDisplay; - -typedef struct HEVCSEIDynamicHDRPlus { - AVBufferRef *info; -} HEVCSEIDynamicHDRPlus; - -typedef struct HEVCSEIDynamicHDRVivid { - AVBufferRef *info; -} HEVCSEIDynamicHDRVivid; - -typedef struct HEVCSEIContentLight { - int present; - uint16_t max_content_light_level; - uint16_t max_pic_average_light_level; -} HEVCSEIContentLight; - -typedef struct HEVCSEIAlternativeTransfer { - int present; - int preferred_transfer_characteristics; -} HEVCSEIAlternativeTransfer; - -typedef struct HEVCSEITimeCode { - int present; - uint8_t num_clock_ts; - uint8_t clock_timestamp_flag[3]; - uint8_t units_field_based_flag[3]; - uint8_t counting_type[3]; - uint8_t full_timestamp_flag[3]; - uint8_t discontinuity_flag[3]; - uint8_t cnt_dropped_flag[3]; - uint16_t n_frames[3]; - uint8_t seconds_value[3]; - uint8_t minutes_value[3]; - uint8_t hours_value[3]; - uint8_t seconds_flag[3]; - uint8_t minutes_flag[3]; - uint8_t hours_flag[3]; - uint8_t time_offset_length[3]; - int32_t time_offset_value[3]; -} HEVCSEITimeCode; - -typedef struct HEVCSEIFilmGrainCharacteristics { - int present; - int model_id; - int separate_colour_description_present_flag; - int bit_depth_luma; - int bit_depth_chroma; - int full_range; - int color_primaries; - int transfer_characteristics; - int matrix_coeffs; - int blending_mode_id; - int log2_scale_factor; - int comp_model_present_flag[3]; - uint16_t num_intensity_intervals[3]; - uint8_t num_model_values[3]; - uint8_t intensity_interval_lower_bound[3][256]; - uint8_t intensity_interval_upper_bound[3][256]; - int16_t comp_model_value[3][256][6]; - int persistence_flag; -} HEVCSEIFilmGrainCharacteristics; - -typedef struct HEVCSEI { - HEVCSEIPictureHash picture_hash; - HEVCSEIFramePacking frame_packing; - HEVCSEIDisplayOrientation display_orientation; - HEVCSEIPictureTiming picture_timing; - HEVCSEIA53Caption a53_caption; - HEVCSEIUnregistered unregistered; - HEVCSEIMasteringDisplay mastering_display; - HEVCSEIDynamicHDRPlus dynamic_hdr_plus; - HEVCSEIDynamicHDRVivid dynamic_hdr_vivid; - HEVCSEIContentLight content_light; - int active_seq_parameter_set_id; - HEVCSEIAlternativeTransfer alternative_transfer; - HEVCSEITimeCode timecode; - HEVCSEIFilmGrainCharacteristics film_grain_characteristics; -} HEVCSEI; - -struct HEVCParamSets; - -int ff_hevc_decode_nal_sei(GetBitContext *gb, void *logctx, HEVCSEI *s, - const struct HEVCParamSets *ps, enum HEVCNALUnitType type); - -/** - * Reset SEI values that are stored on the Context. - * e.g. Caption data that was extracted during NAL - * parsing. - * - * @param s HEVCContext. - */ -void ff_hevc_reset_sei(HEVCSEI *s); - -#endif /* AVCODEC_HEVC_SEI_H */ diff --git a/third-party/cbs/include/cbs/attributes.h b/third-party/cbs/include/cbs/attributes.h deleted file mode 100644 index 04c615c952c..00000000000 --- a/third-party/cbs/include/cbs/attributes.h +++ /dev/null @@ -1,173 +0,0 @@ -/* - * copyright (c) 2006 Michael Niedermayer - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * Macro definitions for various function/variable attributes - */ - -#ifndef AVUTIL_ATTRIBUTES_H -#define AVUTIL_ATTRIBUTES_H - -#ifdef __GNUC__ -# define AV_GCC_VERSION_AT_LEAST(x,y) (__GNUC__ > (x) || __GNUC__ == (x) && __GNUC_MINOR__ >= (y)) -# define AV_GCC_VERSION_AT_MOST(x,y) (__GNUC__ < (x) || __GNUC__ == (x) && __GNUC_MINOR__ <= (y)) -#else -# define AV_GCC_VERSION_AT_LEAST(x,y) 0 -# define AV_GCC_VERSION_AT_MOST(x,y) 0 -#endif - -#ifdef __has_builtin -# define AV_HAS_BUILTIN(x) __has_builtin(x) -#else -# define AV_HAS_BUILTIN(x) 0 -#endif - -#ifndef av_always_inline -#if AV_GCC_VERSION_AT_LEAST(3,1) -# define av_always_inline __attribute__((always_inline)) inline -#elif defined(_MSC_VER) -# define av_always_inline __forceinline -#else -# define av_always_inline inline -#endif -#endif - -#ifndef av_extern_inline -#if defined(__ICL) && __ICL >= 1210 || defined(__GNUC_STDC_INLINE__) -# define av_extern_inline extern inline -#else -# define av_extern_inline inline -#endif -#endif - -#if AV_GCC_VERSION_AT_LEAST(3,4) -# define av_warn_unused_result __attribute__((warn_unused_result)) -#else -# define av_warn_unused_result -#endif - -#if AV_GCC_VERSION_AT_LEAST(3,1) -# define av_noinline __attribute__((noinline)) -#elif defined(_MSC_VER) -# define av_noinline __declspec(noinline) -#else -# define av_noinline -#endif - -#if AV_GCC_VERSION_AT_LEAST(3,1) || defined(__clang__) -# define av_pure __attribute__((pure)) -#else -# define av_pure -#endif - -#if AV_GCC_VERSION_AT_LEAST(2,6) || defined(__clang__) -# define av_const __attribute__((const)) -#else -# define av_const -#endif - -#if AV_GCC_VERSION_AT_LEAST(4,3) || defined(__clang__) -# define av_cold __attribute__((cold)) -#else -# define av_cold -#endif - -#if AV_GCC_VERSION_AT_LEAST(4,1) && !defined(__llvm__) -# define av_flatten __attribute__((flatten)) -#else -# define av_flatten -#endif - -#if AV_GCC_VERSION_AT_LEAST(3,1) -# define attribute_deprecated __attribute__((deprecated)) -#elif defined(_MSC_VER) -# define attribute_deprecated __declspec(deprecated) -#else -# define attribute_deprecated -#endif - -/** - * Disable warnings about deprecated features - * This is useful for sections of code kept for backward compatibility and - * scheduled for removal. - */ -#ifndef AV_NOWARN_DEPRECATED -#if AV_GCC_VERSION_AT_LEAST(4,6) || defined(__clang__) -# define AV_NOWARN_DEPRECATED(code) \ - _Pragma("GCC diagnostic push") \ - _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") \ - code \ - _Pragma("GCC diagnostic pop") -#elif defined(_MSC_VER) -# define AV_NOWARN_DEPRECATED(code) \ - __pragma(warning(push)) \ - __pragma(warning(disable : 4996)) \ - code; \ - __pragma(warning(pop)) -#else -# define AV_NOWARN_DEPRECATED(code) code -#endif -#endif - -#if defined(__GNUC__) || defined(__clang__) -# define av_unused __attribute__((unused)) -#else -# define av_unused -#endif - -/** - * Mark a variable as used and prevent the compiler from optimizing it - * away. This is useful for variables accessed only from inline - * assembler without the compiler being aware. - */ -#if AV_GCC_VERSION_AT_LEAST(3,1) || defined(__clang__) -# define av_used __attribute__((used)) -#else -# define av_used -#endif - -#if AV_GCC_VERSION_AT_LEAST(3,3) || defined(__clang__) -# define av_alias __attribute__((may_alias)) -#else -# define av_alias -#endif - -#if (defined(__GNUC__) || defined(__clang__)) && !defined(__INTEL_COMPILER) -# define av_uninit(x) x=x -#else -# define av_uninit(x) x -#endif - -#if defined(__GNUC__) || defined(__clang__) -# define av_builtin_constant_p __builtin_constant_p -# define av_printf_format(fmtpos, attrpos) __attribute__((__format__(__printf__, fmtpos, attrpos))) -#else -# define av_builtin_constant_p(x) 0 -# define av_printf_format(fmtpos, attrpos) -#endif - -#if AV_GCC_VERSION_AT_LEAST(2,5) || defined(__clang__) -# define av_noreturn __attribute__((noreturn)) -#else -# define av_noreturn -#endif - -#endif /* AVUTIL_ATTRIBUTES_H */ diff --git a/third-party/cbs/include/cbs/av1.h b/third-party/cbs/include/cbs/av1.h deleted file mode 100644 index 384f7cddc7e..00000000000 --- a/third-party/cbs/include/cbs/av1.h +++ /dev/null @@ -1,184 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * AV1 common definitions - */ - -#ifndef AVCODEC_AV1_H -#define AVCODEC_AV1_H - -// OBU types (section 6.2.2). -typedef enum { - // 0 reserved. - AV1_OBU_SEQUENCE_HEADER = 1, - AV1_OBU_TEMPORAL_DELIMITER = 2, - AV1_OBU_FRAME_HEADER = 3, - AV1_OBU_TILE_GROUP = 4, - AV1_OBU_METADATA = 5, - AV1_OBU_FRAME = 6, - AV1_OBU_REDUNDANT_FRAME_HEADER = 7, - AV1_OBU_TILE_LIST = 8, - // 9-14 reserved. - AV1_OBU_PADDING = 15, -} AV1_OBU_Type; - -// Metadata types (section 6.7.1). -enum { - AV1_METADATA_TYPE_HDR_CLL = 1, - AV1_METADATA_TYPE_HDR_MDCV = 2, - AV1_METADATA_TYPE_SCALABILITY = 3, - AV1_METADATA_TYPE_ITUT_T35 = 4, - AV1_METADATA_TYPE_TIMECODE = 5, -}; - -// Frame types (section 6.8.2). -enum { - AV1_FRAME_KEY = 0, - AV1_FRAME_INTER = 1, - AV1_FRAME_INTRA_ONLY = 2, - AV1_FRAME_SWITCH = 3, -}; - -// Reference frames (section 6.10.24). -enum { - AV1_REF_FRAME_INTRA = 0, - AV1_REF_FRAME_LAST = 1, - AV1_REF_FRAME_LAST2 = 2, - AV1_REF_FRAME_LAST3 = 3, - AV1_REF_FRAME_GOLDEN = 4, - AV1_REF_FRAME_BWDREF = 5, - AV1_REF_FRAME_ALTREF2 = 6, - AV1_REF_FRAME_ALTREF = 7, -}; - -// Constants (section 3). -enum { - AV1_MAX_OPERATING_POINTS = 32, - - AV1_MAX_SB_SIZE = 128, - AV1_MI_SIZE = 4, - - AV1_MAX_TILE_WIDTH = 4096, - AV1_MAX_TILE_AREA = 4096 * 2304, - AV1_MAX_TILE_ROWS = 64, - AV1_MAX_TILE_COLS = 64, - - AV1_NUM_REF_FRAMES = 8, - AV1_REFS_PER_FRAME = 7, - AV1_TOTAL_REFS_PER_FRAME = 8, - AV1_PRIMARY_REF_NONE = 7, - - AV1_MAX_SEGMENTS = 8, - AV1_SEG_LVL_MAX = 8, - - AV1_SEG_LVL_ALT_Q = 0, - AV1_SEG_LVL_ALT_LF_Y_V = 1, - AV1_SEG_LVL_REF_FRAME = 5, - AV1_SEG_LVL_SKIP = 6, - AV1_SEG_LVL_GLOBAL_MV = 7, - - AV1_SELECT_SCREEN_CONTENT_TOOLS = 2, - AV1_SELECT_INTEGER_MV = 2, - - AV1_SUPERRES_NUM = 8, - AV1_SUPERRES_DENOM_MIN = 9, - - AV1_INTERPOLATION_FILTER_SWITCHABLE = 4, - - AV1_GM_ABS_ALPHA_BITS = 12, - AV1_GM_ALPHA_PREC_BITS = 15, - AV1_GM_ABS_TRANS_ONLY_BITS = 9, - AV1_GM_TRANS_ONLY_PREC_BITS = 3, - AV1_GM_ABS_TRANS_BITS = 12, - AV1_GM_TRANS_PREC_BITS = 6, - AV1_WARPEDMODEL_PREC_BITS = 16, - - AV1_WARP_MODEL_IDENTITY = 0, - AV1_WARP_MODEL_TRANSLATION = 1, - AV1_WARP_MODEL_ROTZOOM = 2, - AV1_WARP_MODEL_AFFINE = 3, - AV1_WARP_PARAM_REDUCE_BITS = 6, - - AV1_DIV_LUT_BITS = 8, - AV1_DIV_LUT_PREC_BITS = 14, - AV1_DIV_LUT_NUM = 257, - - AV1_MAX_LOOP_FILTER = 63, -}; - - -// The main colour configuration information uses the same ISO/IEC 23001-8 -// (H.273) enums as FFmpeg does, so separate definitions are not required. - -// Chroma sample position. -enum { - AV1_CSP_UNKNOWN = 0, - AV1_CSP_VERTICAL = 1, // -> AVCHROMA_LOC_LEFT. - AV1_CSP_COLOCATED = 2, // -> AVCHROMA_LOC_TOPLEFT. -}; - -// Scalability modes (section 6.7.5) -enum { - AV1_SCALABILITY_L1T2 = 0, - AV1_SCALABILITY_L1T3 = 1, - AV1_SCALABILITY_L2T1 = 2, - AV1_SCALABILITY_L2T2 = 3, - AV1_SCALABILITY_L2T3 = 4, - AV1_SCALABILITY_S2T1 = 5, - AV1_SCALABILITY_S2T2 = 6, - AV1_SCALABILITY_S2T3 = 7, - AV1_SCALABILITY_L2T1h = 8, - AV1_SCALABILITY_L2T2h = 9, - AV1_SCALABILITY_L2T3h = 10, - AV1_SCALABILITY_S2T1h = 11, - AV1_SCALABILITY_S2T2h = 12, - AV1_SCALABILITY_S2T3h = 13, - AV1_SCALABILITY_SS = 14, - AV1_SCALABILITY_L3T1 = 15, - AV1_SCALABILITY_L3T2 = 16, - AV1_SCALABILITY_L3T3 = 17, - AV1_SCALABILITY_S3T1 = 18, - AV1_SCALABILITY_S3T2 = 19, - AV1_SCALABILITY_S3T3 = 20, - AV1_SCALABILITY_L3T2_KEY = 21, - AV1_SCALABILITY_L3T3_KEY = 22, - AV1_SCALABILITY_L4T5_KEY = 23, - AV1_SCALABILITY_L4T7_KEY = 24, - AV1_SCALABILITY_L3T2_KEY_SHIFT = 25, - AV1_SCALABILITY_L3T3_KEY_SHIFT = 26, - AV1_SCALABILITY_L4T5_KEY_SHIFT = 27, - AV1_SCALABILITY_L4T7_KEY_SHIFT = 28, -}; - -// Frame Restoration types (section 6.10.15) -enum { - AV1_RESTORE_NONE = 0, - AV1_RESTORE_WIENER = 1, - AV1_RESTORE_SGRPROJ = 2, - AV1_RESTORE_SWITCHABLE = 3, -}; - -// Sequence Headers are actually unbounded because one can use -// an arbitrary number of leading zeroes when encoding via uvlc. -// The following estimate is based around using the lowest number -// of bits for uvlc encoding. -#define AV1_SANE_SEQUENCE_HEADER_MAX_BITS 3138 - -#endif /* AVCODEC_AV1_H */ diff --git a/third-party/cbs/include/cbs/cbs.h b/third-party/cbs/include/cbs/cbs.h deleted file mode 100644 index 5583063b5e9..00000000000 --- a/third-party/cbs/include/cbs/cbs.h +++ /dev/null @@ -1,445 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_H -#define AVCODEC_CBS_H - -#include -#include - -#include "libavutil/buffer.h" - -#include "codec_id.h" -#include "codec_par.h" -#include "packet.h" - - -/* - * This defines a framework for converting between a coded bitstream - * and structures defining all individual syntax elements found in - * such a stream. - * - * Conversion in both directions is possible. Given a coded bitstream - * (any meaningful fragment), it can be parsed and decomposed into - * syntax elements stored in a set of codec-specific structures. - * Similarly, given a set of those same codec-specific structures the - * syntax elements can be serialised and combined to create a coded - * bitstream. - */ - -struct AVCodecContext; -struct CodedBitstreamType; - -/** - * The codec-specific type of a bitstream unit. - * - * AV1: obu_type - * H.264 / AVC: nal_unit_type - * H.265 / HEVC: nal_unit_type - * JPEG: marker value (without 0xff prefix) - * MPEG-2: start code value (without prefix) - * VP9: unused, set to zero (every unit is a frame) - */ -typedef uint32_t CodedBitstreamUnitType; - -/** - * Coded bitstream unit structure. - * - * A bitstream unit the smallest element of a bitstream which - * is meaningful on its own. For example, an H.264 NAL unit. - * - * See the codec-specific header for the meaning of this for any - * particular codec. - */ -typedef struct CodedBitstreamUnit { - /** - * Codec-specific type of this unit. - */ - CodedBitstreamUnitType type; - - /** - * Pointer to the directly-parsable bitstream form of this unit. - * - * May be NULL if the unit currently only exists in decomposed form. - */ - uint8_t *data; - /** - * The number of bytes in the bitstream (including any padding bits - * in the final byte). - */ - size_t data_size; - /** - * The number of bits which should be ignored in the final byte. - * - * This supports non-byte-aligned bitstreams. - */ - size_t data_bit_padding; - /** - * A reference to the buffer containing data. - * - * Must be set if data is not NULL. - */ - AVBufferRef *data_ref; - - /** - * Pointer to the decomposed form of this unit. - * - * The type of this structure depends on both the codec and the - * type of this unit. May be NULL if the unit only exists in - * bitstream form. - */ - void *content; - /** - * If content is reference counted, a reference to the buffer containing - * content. Null if content is not reference counted. - */ - AVBufferRef *content_ref; -} CodedBitstreamUnit; - -/** - * Coded bitstream fragment structure, combining one or more units. - * - * This is any sequence of units. It need not form some greater whole, - * though in many cases it will. For example, an H.264 access unit, - * which is composed of a sequence of H.264 NAL units. - */ -typedef struct CodedBitstreamFragment { - /** - * Pointer to the bitstream form of this fragment. - * - * May be NULL if the fragment only exists as component units. - */ - uint8_t *data; - /** - * The number of bytes in the bitstream. - * - * The number of bytes in the bitstream (including any padding bits - * in the final byte). - */ - size_t data_size; - /** - * The number of bits which should be ignored in the final byte. - */ - size_t data_bit_padding; - /** - * A reference to the buffer containing data. - * - * Must be set if data is not NULL. - */ - AVBufferRef *data_ref; - - /** - * Number of units in this fragment. - * - * This may be zero if the fragment only exists in bitstream form - * and has not been decomposed. - */ - int nb_units; - - /** - * Number of allocated units. - * - * Must always be >= nb_units; designed for internal use by cbs. - */ - int nb_units_allocated; - - /** - * Pointer to an array of units of length nb_units_allocated. - * Only the first nb_units are valid. - * - * Must be NULL if nb_units_allocated is zero. - */ - CodedBitstreamUnit *units; -} CodedBitstreamFragment; - -/** - * Context structure for coded bitstream operations. - */ -typedef struct CodedBitstreamContext { - /** - * Logging context to be passed to all av_log() calls associated - * with this context. - */ - void *log_ctx; - - /** - * Internal codec-specific hooks. - */ - const struct CodedBitstreamType *codec; - - /** - * Internal codec-specific data. - * - * This contains any information needed when reading/writing - * bitsteams which will not necessarily be present in a fragment. - * For example, for H.264 it contains all currently visible - * parameter sets - they are required to determine the bitstream - * syntax but need not be present in every access unit. - */ - void *priv_data; - - /** - * Array of unit types which should be decomposed when reading. - * - * Types not in this list will be available in bitstream form only. - * If NULL, all supported types will be decomposed. - */ - const CodedBitstreamUnitType *decompose_unit_types; - /** - * Length of the decompose_unit_types array. - */ - int nb_decompose_unit_types; - - /** - * Enable trace output during read/write operations. - */ - int trace_enable; - /** - * Log level to use for trace output. - * - * From AV_LOG_*; defaults to AV_LOG_TRACE. - */ - int trace_level; - - /** - * Write buffer. Used as intermediate buffer when writing units. - * For internal use of cbs only. - */ - uint8_t *write_buffer; - size_t write_buffer_size; -} CodedBitstreamContext; - - -/** - * Table of all supported codec IDs. - * - * Terminated by AV_CODEC_ID_NONE. - */ -extern const enum AVCodecID ff_cbs_all_codec_ids[]; - - -/** - * Create and initialise a new context for the given codec. - */ -int ff_cbs_init(CodedBitstreamContext **ctx, - enum AVCodecID codec_id, void *log_ctx); - -/** - * Reset all internal state in a context. - */ -void ff_cbs_flush(CodedBitstreamContext *ctx); - -/** - * Close a context and free all internal state. - */ -void ff_cbs_close(CodedBitstreamContext **ctx); - - -/** - * Read the extradata bitstream found in codec parameters into a - * fragment, then split into units and decompose. - * - * This also updates the internal state, so will need to be called for - * codecs with extradata to read parameter sets necessary for further - * parsing even if the fragment itself is not desired. - * - * The fragment must have been zeroed or reset via ff_cbs_fragment_reset - * before use. - */ -int ff_cbs_read_extradata(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVCodecParameters *par); - -/** - * Read the extradata bitstream found in a codec context into a - * fragment, then split into units and decompose. - * - * This acts identical to ff_cbs_read_extradata() for the case where - * you already have a codec context. - */ -int ff_cbs_read_extradata_from_codec(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const struct AVCodecContext *avctx); - -int ff_cbs_read_packet_side_data(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVPacket *pkt); - -/** - * Read the data bitstream from a packet into a fragment, then - * split into units and decompose. - * - * This also updates the internal state of the coded bitstream context - * with any persistent data from the fragment which may be required to - * read following fragments (e.g. parameter sets). - * - * The fragment must have been zeroed or reset via ff_cbs_fragment_reset - * before use. - */ -int ff_cbs_read_packet(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const AVPacket *pkt); - -/** - * Read a bitstream from a memory region into a fragment, then - * split into units and decompose. - * - * This also updates the internal state of the coded bitstream context - * with any persistent data from the fragment which may be required to - * read following fragments (e.g. parameter sets). - * - * The fragment must have been zeroed or reset via ff_cbs_fragment_reset - * before use. - */ -int ff_cbs_read(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag, - const uint8_t *data, size_t size); - - -/** - * Write the content of the fragment to its own internal buffer. - * - * Writes the content of all units and then assembles them into a new - * data buffer. When modifying the content of decomposed units, this - * can be used to regenerate the bitstream form of units or the whole - * fragment so that it can be extracted for other use. - * - * This also updates the internal state of the coded bitstream context - * with any persistent data from the fragment which may be required to - * write following fragments (e.g. parameter sets). - */ -int ff_cbs_write_fragment_data(CodedBitstreamContext *ctx, - CodedBitstreamFragment *frag); - -/** - * Write the bitstream of a fragment to the extradata in codec parameters. - * - * Modifies context and fragment as ff_cbs_write_fragment_data does and - * replaces any existing extradata in the structure. - */ -int ff_cbs_write_extradata(CodedBitstreamContext *ctx, - AVCodecParameters *par, - CodedBitstreamFragment *frag); - -/** - * Write the bitstream of a fragment to a packet. - * - * Modifies context and fragment as ff_cbs_write_fragment_data does. - * - * On success, the packet's buf is unreferenced and its buf, data and - * size fields are set to the corresponding values from the newly updated - * fragment; other fields are not touched. On failure, the packet is not - * touched at all. - */ -int ff_cbs_write_packet(CodedBitstreamContext *ctx, - AVPacket *pkt, - CodedBitstreamFragment *frag); - - -/** - * Free the units contained in a fragment as well as the fragment's - * own data buffer, but not the units array itself. - */ -void ff_cbs_fragment_reset(CodedBitstreamFragment *frag); - -/** - * Free the units array of a fragment in addition to what - * ff_cbs_fragment_reset does. - */ -void ff_cbs_fragment_free(CodedBitstreamFragment *frag); - -/** - * Allocate a new internal content buffer of the given size in the unit. - * - * The content will be zeroed. - */ -int ff_cbs_alloc_unit_content(CodedBitstreamUnit *unit, - size_t size, - void (*free)(void *opaque, uint8_t *content)); - -/** - * Allocate a new internal content buffer matching the type of the unit. - * - * The content will be zeroed. - */ -int ff_cbs_alloc_unit_content2(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit); - -/** - * Insert a new unit into a fragment with the given content. - * - * The content structure continues to be owned by the caller if - * content_buf is not supplied. - */ -int ff_cbs_insert_unit_content(CodedBitstreamFragment *frag, - int position, - CodedBitstreamUnitType type, - void *content, - AVBufferRef *content_buf); - -/** - * Add a new unit to a fragment with the given data bitstream. - * - * If data_buf is not supplied then data must have been allocated with - * av_malloc() and will on success become owned by the unit after this - * call or freed on error. - */ -int ff_cbs_append_unit_data(CodedBitstreamFragment *frag, - CodedBitstreamUnitType type, - uint8_t *data, size_t data_size, - AVBufferRef *data_buf); - -/** - * Delete a unit from a fragment and free all memory it uses. - * - * Requires position to be >= 0 and < frag->nb_units. - */ -void ff_cbs_delete_unit(CodedBitstreamFragment *frag, - int position); - - -/** - * Make the content of a unit refcounted. - * - * If the unit is not refcounted, this will do a deep copy of the unit - * content to new refcounted buffers. - * - * It is not valid to call this function on a unit which does not have - * decomposed content. - */ -int ff_cbs_make_unit_refcounted(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit); - -/** - * Make the content of a unit writable so that internal fields can be - * modified. - * - * If it is known that there are no other references to the content of - * the unit, does nothing and returns success. Otherwise (including the - * case where the unit content is not refcounted), it does a full clone - * of the content (including any internal buffers) to make a new copy, - * and replaces the existing references inside the unit with that. - * - * It is not valid to call this function on a unit which does not have - * decomposed content. - */ -int ff_cbs_make_unit_writable(CodedBitstreamContext *ctx, - CodedBitstreamUnit *unit); - - -#endif /* AVCODEC_CBS_H */ diff --git a/third-party/cbs/include/cbs/cbs_av1.h b/third-party/cbs/include/cbs/cbs_av1.h deleted file mode 100644 index 1fc80dcfa05..00000000000 --- a/third-party/cbs/include/cbs/cbs_av1.h +++ /dev/null @@ -1,464 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_AV1_H -#define AVCODEC_CBS_AV1_H - -#include -#include - -#include "av1.h" -#include "cbs.h" - - -typedef struct AV1RawOBUHeader { - uint8_t obu_forbidden_bit; - uint8_t obu_type; - uint8_t obu_extension_flag; - uint8_t obu_has_size_field; - uint8_t obu_reserved_1bit; - - uint8_t temporal_id; - uint8_t spatial_id; - uint8_t extension_header_reserved_3bits; -} AV1RawOBUHeader; - -typedef struct AV1RawColorConfig { - uint8_t high_bitdepth; - uint8_t twelve_bit; - uint8_t mono_chrome; - - uint8_t color_description_present_flag; - uint8_t color_primaries; - uint8_t transfer_characteristics; - uint8_t matrix_coefficients; - - uint8_t color_range; - uint8_t subsampling_x; - uint8_t subsampling_y; - uint8_t chroma_sample_position; - uint8_t separate_uv_delta_q; -} AV1RawColorConfig; - -typedef struct AV1RawTimingInfo { - uint32_t num_units_in_display_tick; - uint32_t time_scale; - - uint8_t equal_picture_interval; - uint32_t num_ticks_per_picture_minus_1; -} AV1RawTimingInfo; - -typedef struct AV1RawDecoderModelInfo { - uint8_t buffer_delay_length_minus_1; - uint32_t num_units_in_decoding_tick; - uint8_t buffer_removal_time_length_minus_1; - uint8_t frame_presentation_time_length_minus_1; -} AV1RawDecoderModelInfo; - -typedef struct AV1RawSequenceHeader { - uint8_t seq_profile; - uint8_t still_picture; - uint8_t reduced_still_picture_header; - - uint8_t timing_info_present_flag; - uint8_t decoder_model_info_present_flag; - uint8_t initial_display_delay_present_flag; - uint8_t operating_points_cnt_minus_1; - - AV1RawTimingInfo timing_info; - AV1RawDecoderModelInfo decoder_model_info; - - uint16_t operating_point_idc[AV1_MAX_OPERATING_POINTS]; - uint8_t seq_level_idx[AV1_MAX_OPERATING_POINTS]; - uint8_t seq_tier[AV1_MAX_OPERATING_POINTS]; - uint8_t decoder_model_present_for_this_op[AV1_MAX_OPERATING_POINTS]; - uint32_t decoder_buffer_delay[AV1_MAX_OPERATING_POINTS]; - uint32_t encoder_buffer_delay[AV1_MAX_OPERATING_POINTS]; - uint8_t low_delay_mode_flag[AV1_MAX_OPERATING_POINTS]; - uint8_t initial_display_delay_present_for_this_op[AV1_MAX_OPERATING_POINTS]; - uint8_t initial_display_delay_minus_1[AV1_MAX_OPERATING_POINTS]; - - uint8_t frame_width_bits_minus_1; - uint8_t frame_height_bits_minus_1; - uint16_t max_frame_width_minus_1; - uint16_t max_frame_height_minus_1; - - uint8_t frame_id_numbers_present_flag; - uint8_t delta_frame_id_length_minus_2; - uint8_t additional_frame_id_length_minus_1; - - uint8_t use_128x128_superblock; - uint8_t enable_filter_intra; - uint8_t enable_intra_edge_filter; - uint8_t enable_interintra_compound; - uint8_t enable_masked_compound; - uint8_t enable_warped_motion; - uint8_t enable_dual_filter; - - uint8_t enable_order_hint; - uint8_t enable_jnt_comp; - uint8_t enable_ref_frame_mvs; - - uint8_t seq_choose_screen_content_tools; - uint8_t seq_force_screen_content_tools; - uint8_t seq_choose_integer_mv; - uint8_t seq_force_integer_mv; - - uint8_t order_hint_bits_minus_1; - - uint8_t enable_superres; - uint8_t enable_cdef; - uint8_t enable_restoration; - - AV1RawColorConfig color_config; - - uint8_t film_grain_params_present; -} AV1RawSequenceHeader; - -typedef struct AV1RawFilmGrainParams { - uint8_t apply_grain; - uint16_t grain_seed; - uint8_t update_grain; - uint8_t film_grain_params_ref_idx; - uint8_t num_y_points; - uint8_t point_y_value[14]; - uint8_t point_y_scaling[14]; - uint8_t chroma_scaling_from_luma; - uint8_t num_cb_points; - uint8_t point_cb_value[10]; - uint8_t point_cb_scaling[10]; - uint8_t num_cr_points; - uint8_t point_cr_value[10]; - uint8_t point_cr_scaling[10]; - uint8_t grain_scaling_minus_8; - uint8_t ar_coeff_lag; - uint8_t ar_coeffs_y_plus_128[24]; - uint8_t ar_coeffs_cb_plus_128[25]; - uint8_t ar_coeffs_cr_plus_128[25]; - uint8_t ar_coeff_shift_minus_6; - uint8_t grain_scale_shift; - uint8_t cb_mult; - uint8_t cb_luma_mult; - uint16_t cb_offset; - uint8_t cr_mult; - uint8_t cr_luma_mult; - uint16_t cr_offset; - uint8_t overlap_flag; - uint8_t clip_to_restricted_range; -} AV1RawFilmGrainParams; - -typedef struct AV1RawFrameHeader { - uint8_t show_existing_frame; - uint8_t frame_to_show_map_idx; - uint32_t frame_presentation_time; - uint32_t display_frame_id; - - uint8_t frame_type; - uint8_t show_frame; - uint8_t showable_frame; - - uint8_t error_resilient_mode; - uint8_t disable_cdf_update; - uint8_t allow_screen_content_tools; - uint8_t force_integer_mv; - - uint32_t current_frame_id; - uint8_t frame_size_override_flag; - uint8_t order_hint; - - uint8_t buffer_removal_time_present_flag; - uint32_t buffer_removal_time[AV1_MAX_OPERATING_POINTS]; - - uint8_t primary_ref_frame; - uint16_t frame_width_minus_1; - uint16_t frame_height_minus_1; - uint8_t use_superres; - uint8_t coded_denom; - uint8_t render_and_frame_size_different; - uint16_t render_width_minus_1; - uint16_t render_height_minus_1; - - uint8_t found_ref[AV1_REFS_PER_FRAME]; - - uint8_t refresh_frame_flags; - uint8_t allow_intrabc; - uint8_t ref_order_hint[AV1_NUM_REF_FRAMES]; - uint8_t frame_refs_short_signaling; - uint8_t last_frame_idx; - uint8_t golden_frame_idx; - int8_t ref_frame_idx[AV1_REFS_PER_FRAME]; - uint32_t delta_frame_id_minus1[AV1_REFS_PER_FRAME]; - - uint8_t allow_high_precision_mv; - uint8_t is_filter_switchable; - uint8_t interpolation_filter; - uint8_t is_motion_mode_switchable; - uint8_t use_ref_frame_mvs; - - uint8_t disable_frame_end_update_cdf; - - uint8_t uniform_tile_spacing_flag; - uint8_t tile_cols_log2; - uint8_t tile_rows_log2; - uint8_t width_in_sbs_minus_1[AV1_MAX_TILE_COLS]; - uint8_t height_in_sbs_minus_1[AV1_MAX_TILE_ROWS]; - uint16_t context_update_tile_id; - uint8_t tile_size_bytes_minus1; - - // These are derived values, but it's very unhelpful to have to - // recalculate them all the time so we store them here. - uint16_t tile_cols; - uint16_t tile_rows; - - uint8_t base_q_idx; - int8_t delta_q_y_dc; - uint8_t diff_uv_delta; - int8_t delta_q_u_dc; - int8_t delta_q_u_ac; - int8_t delta_q_v_dc; - int8_t delta_q_v_ac; - uint8_t using_qmatrix; - uint8_t qm_y; - uint8_t qm_u; - uint8_t qm_v; - - uint8_t segmentation_enabled; - uint8_t segmentation_update_map; - uint8_t segmentation_temporal_update; - uint8_t segmentation_update_data; - uint8_t feature_enabled[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; - int16_t feature_value[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; - - uint8_t delta_q_present; - uint8_t delta_q_res; - uint8_t delta_lf_present; - uint8_t delta_lf_res; - uint8_t delta_lf_multi; - - uint8_t loop_filter_level[4]; - uint8_t loop_filter_sharpness; - uint8_t loop_filter_delta_enabled; - uint8_t loop_filter_delta_update; - uint8_t update_ref_delta[AV1_TOTAL_REFS_PER_FRAME]; - int8_t loop_filter_ref_deltas[AV1_TOTAL_REFS_PER_FRAME]; - uint8_t update_mode_delta[2]; - int8_t loop_filter_mode_deltas[2]; - - uint8_t cdef_damping_minus_3; - uint8_t cdef_bits; - uint8_t cdef_y_pri_strength[8]; - uint8_t cdef_y_sec_strength[8]; - uint8_t cdef_uv_pri_strength[8]; - uint8_t cdef_uv_sec_strength[8]; - - uint8_t lr_type[3]; - uint8_t lr_unit_shift; - uint8_t lr_uv_shift; - - uint8_t tx_mode; - uint8_t reference_select; - uint8_t skip_mode_present; - - uint8_t allow_warped_motion; - uint8_t reduced_tx_set; - - uint8_t is_global[AV1_TOTAL_REFS_PER_FRAME]; - uint8_t is_rot_zoom[AV1_TOTAL_REFS_PER_FRAME]; - uint8_t is_translation[AV1_TOTAL_REFS_PER_FRAME]; - //AV1RawSubexp gm_params[AV1_TOTAL_REFS_PER_FRAME][6]; - uint32_t gm_params[AV1_TOTAL_REFS_PER_FRAME][6]; - - AV1RawFilmGrainParams film_grain; -} AV1RawFrameHeader; - -typedef struct AV1RawTileData { - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; -} AV1RawTileData; - -typedef struct AV1RawTileGroup { - uint8_t tile_start_and_end_present_flag; - uint16_t tg_start; - uint16_t tg_end; - - AV1RawTileData tile_data; -} AV1RawTileGroup; - -typedef struct AV1RawFrame { - AV1RawFrameHeader header; - AV1RawTileGroup tile_group; -} AV1RawFrame; - -typedef struct AV1RawTileList { - uint8_t output_frame_width_in_tiles_minus_1; - uint8_t output_frame_height_in_tiles_minus_1; - uint16_t tile_count_minus_1; - - AV1RawTileData tile_data; -} AV1RawTileList; - -typedef struct AV1RawMetadataHDRCLL { - uint16_t max_cll; - uint16_t max_fall; -} AV1RawMetadataHDRCLL; - -typedef struct AV1RawMetadataHDRMDCV { - uint16_t primary_chromaticity_x[3]; - uint16_t primary_chromaticity_y[3]; - uint16_t white_point_chromaticity_x; - uint16_t white_point_chromaticity_y; - uint32_t luminance_max; - uint32_t luminance_min; -} AV1RawMetadataHDRMDCV; - -typedef struct AV1RawMetadataScalability { - uint8_t scalability_mode_idc; - uint8_t spatial_layers_cnt_minus_1; - uint8_t spatial_layer_dimensions_present_flag; - uint8_t spatial_layer_description_present_flag; - uint8_t temporal_group_description_present_flag; - uint8_t scalability_structure_reserved_3bits; - uint16_t spatial_layer_max_width[4]; - uint16_t spatial_layer_max_height[4]; - uint8_t spatial_layer_ref_id[4]; - uint8_t temporal_group_size; - uint8_t temporal_group_temporal_id[255]; - uint8_t temporal_group_temporal_switching_up_point_flag[255]; - uint8_t temporal_group_spatial_switching_up_point_flag[255]; - uint8_t temporal_group_ref_cnt[255]; - uint8_t temporal_group_ref_pic_diff[255][7]; -} AV1RawMetadataScalability; - -typedef struct AV1RawMetadataITUTT35 { - uint8_t itu_t_t35_country_code; - uint8_t itu_t_t35_country_code_extension_byte; - - uint8_t *payload; - AVBufferRef *payload_ref; - size_t payload_size; -} AV1RawMetadataITUTT35; - -typedef struct AV1RawMetadataTimecode { - uint8_t counting_type; - uint8_t full_timestamp_flag; - uint8_t discontinuity_flag; - uint8_t cnt_dropped_flag; - uint16_t n_frames; - uint8_t seconds_value; - uint8_t minutes_value; - uint8_t hours_value; - uint8_t seconds_flag; - uint8_t minutes_flag; - uint8_t hours_flag; - uint8_t time_offset_length; - uint32_t time_offset_value; -} AV1RawMetadataTimecode; - -typedef struct AV1RawMetadata { - uint64_t metadata_type; - union { - AV1RawMetadataHDRCLL hdr_cll; - AV1RawMetadataHDRMDCV hdr_mdcv; - AV1RawMetadataScalability scalability; - AV1RawMetadataITUTT35 itut_t35; - AV1RawMetadataTimecode timecode; - } metadata; -} AV1RawMetadata; - -typedef struct AV1RawPadding { - uint8_t *payload; - AVBufferRef *payload_ref; - size_t payload_size; -} AV1RawPadding; - - -typedef struct AV1RawOBU { - AV1RawOBUHeader header; - - size_t obu_size; - - union { - AV1RawSequenceHeader sequence_header; - AV1RawFrameHeader frame_header; - AV1RawFrame frame; - AV1RawTileGroup tile_group; - AV1RawTileList tile_list; - AV1RawMetadata metadata; - AV1RawPadding padding; - } obu; -} AV1RawOBU; - -typedef struct AV1ReferenceFrameState { - int valid; // RefValid - int frame_id; // RefFrameId - int upscaled_width; // RefUpscaledWidth - int frame_width; // RefFrameWidth - int frame_height; // RefFrameHeight - int render_width; // RefRenderWidth - int render_height; // RefRenderHeight - int frame_type; // RefFrameType - int subsampling_x; // RefSubsamplingX - int subsampling_y; // RefSubsamplingY - int bit_depth; // RefBitDepth - int order_hint; // RefOrderHint - - int8_t loop_filter_ref_deltas[AV1_TOTAL_REFS_PER_FRAME]; - int8_t loop_filter_mode_deltas[2]; - uint8_t feature_enabled[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; - int16_t feature_value[AV1_MAX_SEGMENTS][AV1_SEG_LVL_MAX]; -} AV1ReferenceFrameState; - -typedef struct CodedBitstreamAV1Context { - const AVClass *class; - - AV1RawSequenceHeader *sequence_header; - AVBufferRef *sequence_header_ref; - - int seen_frame_header; - AVBufferRef *frame_header_ref; - uint8_t *frame_header; - size_t frame_header_size; - - int temporal_id; - int spatial_id; - int operating_point_idc; - - int bit_depth; - int order_hint; - int frame_width; - int frame_height; - int upscaled_width; - int render_width; - int render_height; - - int num_planes; - int coded_lossless; - int all_lossless; - int tile_cols; - int tile_rows; - int tile_num; - - AV1ReferenceFrameState ref[AV1_NUM_REF_FRAMES]; - - // AVOptions - int operating_point; -} CodedBitstreamAV1Context; - - -#endif /* AVCODEC_CBS_AV1_H */ diff --git a/third-party/cbs/include/cbs/cbs_bsf.h b/third-party/cbs/include/cbs/cbs_bsf.h deleted file mode 100644 index aa7385c8f22..00000000000 --- a/third-party/cbs/include/cbs/cbs_bsf.h +++ /dev/null @@ -1,137 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_BSF_H -#define AVCODEC_CBS_BSF_H - -#include "libavutil/log.h" -#include "libavutil/opt.h" - -#include "bsf.h" -#include "codec_id.h" -#include "cbs.h" -#include "packet.h" - - -typedef struct CBSBSFType { - enum AVCodecID codec_id; - - // Name of a frame fragment in this codec (e.g. "access unit", - // "temporal unit"). - const char *fragment_name; - - // Name of a unit for this BSF, for use in error messages (e.g. - // "NAL unit", "OBU"). - const char *unit_name; - - // Update the content of a fragment with whatever metadata changes - // are desired. The associated AVPacket is provided so that any side - // data associated with the fragment can be inspected or edited. If - // pkt is NULL, then an extradata header fragment is being updated. - int (*update_fragment)(AVBSFContext *bsf, AVPacket *pkt, - CodedBitstreamFragment *frag); -} CBSBSFType; - -// Common structure for all generic CBS BSF users. An instance of this -// structure must be the first member of the BSF private context (to be -// pointed to by AVBSFContext.priv_data). -typedef struct CBSBSFContext { - const AVClass *class; - const CBSBSFType *type; - - CodedBitstreamContext *input; - CodedBitstreamContext *output; - CodedBitstreamFragment fragment; -} CBSBSFContext; - -/** - * Initialise generic CBS BSF setup. - * - * Creates the input and output CBS instances, and applies the filter to - * the extradata on the input codecpar if any is present. - * - * Since it calls the update_fragment() function immediately to deal with - * extradata, this should be called after any codec-specific setup is done - * (probably at the end of the FFBitStreamFilter.init function). - */ -int ff_cbs_bsf_generic_init(AVBSFContext *bsf, const CBSBSFType *type); - -/** - * Close a generic CBS BSF instance. - * - * If no other deinitialisation is required then this function can be used - * directly as FFBitStreamFilter.close. - */ -void ff_cbs_bsf_generic_close(AVBSFContext *bsf); - -/** - * Filter operation for CBS BSF. - * - * Reads the input packet into a CBS fragment, calls update_fragment() on - * it, then writes the result to an output packet. If the input packet - * has AV_PKT_DATA_NEW_EXTRADATA side-data associated with it then it does - * the same thing to that new extradata to form the output side-data first. - * - * If the BSF does not do anything else then this function can be used - * directly as FFBitStreamFilter.filter. - */ -int ff_cbs_bsf_generic_filter(AVBSFContext *bsf, AVPacket *pkt); - - -// Options for element manipulation. -enum { - // Pass this element through unchanged. - BSF_ELEMENT_PASS, - // Insert this element, replacing any existing instances of it. - // Associated values may be provided explicitly (as addtional options) - // or implicitly (either as side data or deduced from other parts of - // the stream). - BSF_ELEMENT_INSERT, - // Remove this element if it appears in the stream. - BSF_ELEMENT_REMOVE, - // Extract this element to side data, so that further manipulation - // can happen elsewhere. - BSF_ELEMENT_EXTRACT, -}; - -#define BSF_ELEMENT_OPTIONS_PIR(name, help, field, opt_flags) \ - { name, help, OFFSET(field), AV_OPT_TYPE_INT, \ - { .i64 = BSF_ELEMENT_PASS }, \ - BSF_ELEMENT_PASS, BSF_ELEMENT_REMOVE, opt_flags, name }, \ - { "pass", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_PASS }, .flags = opt_flags, .unit = name }, \ - { "insert", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_INSERT }, .flags = opt_flags, .unit = name }, \ - { "remove", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_REMOVE }, .flags = opt_flags, .unit = name } - -#define BSF_ELEMENT_OPTIONS_PIRE(name, help, field, opt_flags) \ - { name, help, OFFSET(field), AV_OPT_TYPE_INT, \ - { .i64 = BSF_ELEMENT_PASS }, \ - BSF_ELEMENT_PASS, BSF_ELEMENT_EXTRACT, opt_flags, name }, \ - { "pass", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_PASS }, .flags = opt_flags, .unit = name }, \ - { "insert", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_INSERT }, .flags = opt_flags, .unit = name }, \ - { "remove", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_REMOVE }, .flags = opt_flags, .unit = name }, \ - { "extract", NULL, 0, AV_OPT_TYPE_CONST, \ - { .i64 = BSF_ELEMENT_EXTRACT }, .flags = opt_flags, .unit = name } \ - - -#endif /* AVCODEC_CBS_BSF_H */ diff --git a/third-party/cbs/include/cbs/cbs_h264.h b/third-party/cbs/include/cbs/cbs_h264.h deleted file mode 100644 index ca9b688c057..00000000000 --- a/third-party/cbs/include/cbs/cbs_h264.h +++ /dev/null @@ -1,427 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_H264_H -#define AVCODEC_CBS_H264_H - -#include -#include - -#include "cbs.h" -#include "cbs_h2645.h" -#include "cbs_sei.h" -#include "h264.h" - - -typedef struct H264RawNALUnitHeader { - uint8_t nal_ref_idc; - uint8_t nal_unit_type; - - uint8_t svc_extension_flag; - uint8_t avc_3d_extension_flag; -} H264RawNALUnitHeader; - -typedef struct H264RawScalingList { - int8_t delta_scale[64]; -} H264RawScalingList; - -typedef struct H264RawHRD { - uint8_t cpb_cnt_minus1; - uint8_t bit_rate_scale; - uint8_t cpb_size_scale; - - uint32_t bit_rate_value_minus1[H264_MAX_CPB_CNT]; - uint32_t cpb_size_value_minus1[H264_MAX_CPB_CNT]; - uint8_t cbr_flag[H264_MAX_CPB_CNT]; - - uint8_t initial_cpb_removal_delay_length_minus1; - uint8_t cpb_removal_delay_length_minus1; - uint8_t dpb_output_delay_length_minus1; - uint8_t time_offset_length; -} H264RawHRD; - -typedef struct H264RawVUI { - uint8_t aspect_ratio_info_present_flag; - uint8_t aspect_ratio_idc; - uint16_t sar_width; - uint16_t sar_height; - - uint8_t overscan_info_present_flag; - uint8_t overscan_appropriate_flag; - - uint8_t video_signal_type_present_flag; - uint8_t video_format; - uint8_t video_full_range_flag; - uint8_t colour_description_present_flag; - uint8_t colour_primaries; - uint8_t transfer_characteristics; - uint8_t matrix_coefficients; - - uint8_t chroma_loc_info_present_flag; - uint8_t chroma_sample_loc_type_top_field; - uint8_t chroma_sample_loc_type_bottom_field; - - uint8_t timing_info_present_flag; - uint32_t num_units_in_tick; - uint32_t time_scale; - uint8_t fixed_frame_rate_flag; - - uint8_t nal_hrd_parameters_present_flag; - H264RawHRD nal_hrd_parameters; - uint8_t vcl_hrd_parameters_present_flag; - H264RawHRD vcl_hrd_parameters; - uint8_t low_delay_hrd_flag; - - uint8_t pic_struct_present_flag; - - uint8_t bitstream_restriction_flag; - uint8_t motion_vectors_over_pic_boundaries_flag; - uint8_t max_bytes_per_pic_denom; - uint8_t max_bits_per_mb_denom; - uint8_t log2_max_mv_length_horizontal; - uint8_t log2_max_mv_length_vertical; - uint8_t max_num_reorder_frames; - uint8_t max_dec_frame_buffering; -} H264RawVUI; - -typedef struct H264RawSPS { - H264RawNALUnitHeader nal_unit_header; - - uint8_t profile_idc; - uint8_t constraint_set0_flag; - uint8_t constraint_set1_flag; - uint8_t constraint_set2_flag; - uint8_t constraint_set3_flag; - uint8_t constraint_set4_flag; - uint8_t constraint_set5_flag; - uint8_t reserved_zero_2bits; - uint8_t level_idc; - - uint8_t seq_parameter_set_id; - - uint8_t chroma_format_idc; - uint8_t separate_colour_plane_flag; - uint8_t bit_depth_luma_minus8; - uint8_t bit_depth_chroma_minus8; - uint8_t qpprime_y_zero_transform_bypass_flag; - - uint8_t seq_scaling_matrix_present_flag; - uint8_t seq_scaling_list_present_flag[12]; - H264RawScalingList scaling_list_4x4[6]; - H264RawScalingList scaling_list_8x8[6]; - - uint8_t log2_max_frame_num_minus4; - uint8_t pic_order_cnt_type; - uint8_t log2_max_pic_order_cnt_lsb_minus4; - uint8_t delta_pic_order_always_zero_flag; - int32_t offset_for_non_ref_pic; - int32_t offset_for_top_to_bottom_field; - uint8_t num_ref_frames_in_pic_order_cnt_cycle; - int32_t offset_for_ref_frame[256]; - - uint8_t max_num_ref_frames; - uint8_t gaps_in_frame_num_allowed_flag; - - uint16_t pic_width_in_mbs_minus1; - uint16_t pic_height_in_map_units_minus1; - - uint8_t frame_mbs_only_flag; - uint8_t mb_adaptive_frame_field_flag; - uint8_t direct_8x8_inference_flag; - - uint8_t frame_cropping_flag; - uint16_t frame_crop_left_offset; - uint16_t frame_crop_right_offset; - uint16_t frame_crop_top_offset; - uint16_t frame_crop_bottom_offset; - - uint8_t vui_parameters_present_flag; - H264RawVUI vui; -} H264RawSPS; - -typedef struct H264RawSPSExtension { - H264RawNALUnitHeader nal_unit_header; - - uint8_t seq_parameter_set_id; - - uint8_t aux_format_idc; - uint8_t bit_depth_aux_minus8; - uint8_t alpha_incr_flag; - uint16_t alpha_opaque_value; - uint16_t alpha_transparent_value; - - uint8_t additional_extension_flag; -} H264RawSPSExtension; - -typedef struct H264RawPPS { - H264RawNALUnitHeader nal_unit_header; - - uint8_t pic_parameter_set_id; - uint8_t seq_parameter_set_id; - - uint8_t entropy_coding_mode_flag; - uint8_t bottom_field_pic_order_in_frame_present_flag; - - uint8_t num_slice_groups_minus1; - uint8_t slice_group_map_type; - uint16_t run_length_minus1[H264_MAX_SLICE_GROUPS]; - uint16_t top_left[H264_MAX_SLICE_GROUPS]; - uint16_t bottom_right[H264_MAX_SLICE_GROUPS]; - uint8_t slice_group_change_direction_flag; - uint16_t slice_group_change_rate_minus1; - uint16_t pic_size_in_map_units_minus1; - - uint8_t *slice_group_id; - AVBufferRef *slice_group_id_ref; - - uint8_t num_ref_idx_l0_default_active_minus1; - uint8_t num_ref_idx_l1_default_active_minus1; - - uint8_t weighted_pred_flag; - uint8_t weighted_bipred_idc; - - int8_t pic_init_qp_minus26; - int8_t pic_init_qs_minus26; - int8_t chroma_qp_index_offset; - - uint8_t deblocking_filter_control_present_flag; - uint8_t constrained_intra_pred_flag; - - uint8_t more_rbsp_data; - - uint8_t redundant_pic_cnt_present_flag; - uint8_t transform_8x8_mode_flag; - - uint8_t pic_scaling_matrix_present_flag; - uint8_t pic_scaling_list_present_flag[12]; - H264RawScalingList scaling_list_4x4[6]; - H264RawScalingList scaling_list_8x8[6]; - - int8_t second_chroma_qp_index_offset; -} H264RawPPS; - -typedef struct H264RawAUD { - H264RawNALUnitHeader nal_unit_header; - - uint8_t primary_pic_type; -} H264RawAUD; - -typedef struct H264RawSEIBufferingPeriod { - uint8_t seq_parameter_set_id; - struct { - uint32_t initial_cpb_removal_delay[H264_MAX_CPB_CNT]; - uint32_t initial_cpb_removal_delay_offset[H264_MAX_CPB_CNT]; - } nal, vcl; -} H264RawSEIBufferingPeriod; - -typedef struct H264RawSEIPicTimestamp { - uint8_t ct_type; - uint8_t nuit_field_based_flag; - uint8_t counting_type; - uint8_t full_timestamp_flag; - uint8_t discontinuity_flag; - uint8_t cnt_dropped_flag; - uint8_t n_frames; - uint8_t seconds_flag; - uint8_t seconds_value; - uint8_t minutes_flag; - uint8_t minutes_value; - uint8_t hours_flag; - uint8_t hours_value; - int32_t time_offset; -} H264RawSEIPicTimestamp; - -typedef struct H264RawSEIPicTiming { - uint32_t cpb_removal_delay; - uint32_t dpb_output_delay; - uint8_t pic_struct; - uint8_t clock_timestamp_flag[3]; - H264RawSEIPicTimestamp timestamp[3]; -} H264RawSEIPicTiming; - -typedef struct H264RawSEIPanScanRect { - uint32_t pan_scan_rect_id; - uint8_t pan_scan_rect_cancel_flag; - uint8_t pan_scan_cnt_minus1; - int32_t pan_scan_rect_left_offset[3]; - int32_t pan_scan_rect_right_offset[3]; - int32_t pan_scan_rect_top_offset[3]; - int32_t pan_scan_rect_bottom_offset[3]; - uint16_t pan_scan_rect_repetition_period; -} H264RawSEIPanScanRect; - -typedef struct H264RawSEIRecoveryPoint { - uint16_t recovery_frame_cnt; - uint8_t exact_match_flag; - uint8_t broken_link_flag; - uint8_t changing_slice_group_idc; -} H264RawSEIRecoveryPoint; - -typedef struct H264RawFilmGrainCharacteristics { - uint8_t film_grain_characteristics_cancel_flag; - uint8_t film_grain_model_id; - uint8_t separate_colour_description_present_flag; - uint8_t film_grain_bit_depth_luma_minus8; - uint8_t film_grain_bit_depth_chroma_minus8; - uint8_t film_grain_full_range_flag; - uint8_t film_grain_colour_primaries; - uint8_t film_grain_transfer_characteristics; - uint8_t film_grain_matrix_coefficients; - uint8_t blending_mode_id; - uint8_t log2_scale_factor; - uint8_t comp_model_present_flag[3]; - uint8_t num_intensity_intervals_minus1[3]; - uint8_t num_model_values_minus1[3]; - uint8_t intensity_interval_lower_bound[3][256]; - uint8_t intensity_interval_upper_bound[3][256]; - int16_t comp_model_value[3][256][6]; - uint8_t film_grain_characteristics_repetition_period; -} H264RawFilmGrainCharacteristics; - -typedef struct H264RawSEIDisplayOrientation { - uint8_t display_orientation_cancel_flag; - uint8_t hor_flip; - uint8_t ver_flip; - uint16_t anticlockwise_rotation; - uint16_t display_orientation_repetition_period; - uint8_t display_orientation_extension_flag; -} H264RawSEIDisplayOrientation; - -typedef struct H264RawSEI { - H264RawNALUnitHeader nal_unit_header; - SEIRawMessageList message_list; -} H264RawSEI; - -typedef struct H264RawSliceHeader { - H264RawNALUnitHeader nal_unit_header; - - uint32_t first_mb_in_slice; - uint8_t slice_type; - - uint8_t pic_parameter_set_id; - - uint8_t colour_plane_id; - - uint16_t frame_num; - uint8_t field_pic_flag; - uint8_t bottom_field_flag; - - uint16_t idr_pic_id; - - uint16_t pic_order_cnt_lsb; - int32_t delta_pic_order_cnt_bottom; - int32_t delta_pic_order_cnt[2]; - - uint8_t redundant_pic_cnt; - uint8_t direct_spatial_mv_pred_flag; - - uint8_t num_ref_idx_active_override_flag; - uint8_t num_ref_idx_l0_active_minus1; - uint8_t num_ref_idx_l1_active_minus1; - - uint8_t ref_pic_list_modification_flag_l0; - uint8_t ref_pic_list_modification_flag_l1; - struct { - uint8_t modification_of_pic_nums_idc; - int32_t abs_diff_pic_num_minus1; - uint8_t long_term_pic_num; - } rplm_l0[H264_MAX_RPLM_COUNT], rplm_l1[H264_MAX_RPLM_COUNT]; - - uint8_t luma_log2_weight_denom; - uint8_t chroma_log2_weight_denom; - - uint8_t luma_weight_l0_flag[H264_MAX_REFS]; - int8_t luma_weight_l0[H264_MAX_REFS]; - int8_t luma_offset_l0[H264_MAX_REFS]; - uint8_t chroma_weight_l0_flag[H264_MAX_REFS]; - int8_t chroma_weight_l0[H264_MAX_REFS][2]; - int8_t chroma_offset_l0[H264_MAX_REFS][2]; - - uint8_t luma_weight_l1_flag[H264_MAX_REFS]; - int8_t luma_weight_l1[H264_MAX_REFS]; - int8_t luma_offset_l1[H264_MAX_REFS]; - uint8_t chroma_weight_l1_flag[H264_MAX_REFS]; - int8_t chroma_weight_l1[H264_MAX_REFS][2]; - int8_t chroma_offset_l1[H264_MAX_REFS][2]; - - uint8_t no_output_of_prior_pics_flag; - uint8_t long_term_reference_flag; - - uint8_t adaptive_ref_pic_marking_mode_flag; - struct { - uint8_t memory_management_control_operation; - int32_t difference_of_pic_nums_minus1; - uint8_t long_term_pic_num; - uint8_t long_term_frame_idx; - uint8_t max_long_term_frame_idx_plus1; - } mmco[H264_MAX_MMCO_COUNT]; - - uint8_t cabac_init_idc; - - int8_t slice_qp_delta; - - uint8_t sp_for_switch_flag; - int8_t slice_qs_delta; - - uint8_t disable_deblocking_filter_idc; - int8_t slice_alpha_c0_offset_div2; - int8_t slice_beta_offset_div2; - - uint16_t slice_group_change_cycle; -} H264RawSliceHeader; - -typedef struct H264RawSlice { - H264RawSliceHeader header; - - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; - int data_bit_start; -} H264RawSlice; - -typedef struct H264RawFiller { - H264RawNALUnitHeader nal_unit_header; - - uint32_t filler_size; -} H264RawFiller; - - -typedef struct CodedBitstreamH264Context { - // Reader/writer context in common with the H.265 implementation. - CodedBitstreamH2645Context common; - - // All currently available parameter sets. These are updated when - // any parameter set NAL unit is read/written with this context. - AVBufferRef *sps_ref[H264_MAX_SPS_COUNT]; - AVBufferRef *pps_ref[H264_MAX_PPS_COUNT]; - H264RawSPS *sps[H264_MAX_SPS_COUNT]; - H264RawPPS *pps[H264_MAX_PPS_COUNT]; - - // The currently active parameter sets. These are updated when any - // NAL unit refers to the relevant parameter set. These pointers - // must also be present in the arrays above. - const H264RawSPS *active_sps; - const H264RawPPS *active_pps; - - // The NAL unit type of the most recent normal slice. This is required - // to be able to read/write auxiliary slices, because IdrPicFlag is - // otherwise unknown. - uint8_t last_slice_nal_unit_type; -} CodedBitstreamH264Context; - -#endif /* AVCODEC_CBS_H264_H */ diff --git a/third-party/cbs/include/cbs/cbs_h2645.h b/third-party/cbs/include/cbs/cbs_h2645.h deleted file mode 100644 index f4c987a5119..00000000000 --- a/third-party/cbs/include/cbs/cbs_h2645.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_H2645_H -#define AVCODEC_CBS_H2645_H - -#include "h2645_parse.h" - - -typedef struct CodedBitstreamH2645Context { - // If set, the stream being read is in MP4 (AVCC/HVCC) format. If not - // set, the stream is assumed to be in annex B format. - int mp4; - // Size in bytes of the NAL length field for MP4 format. - int nal_length_size; - // Packet reader. - H2645Packet read_packet; -} CodedBitstreamH2645Context; - - -#endif /* AVCODEC_CBS_H2645_H */ diff --git a/third-party/cbs/include/cbs/cbs_h265.h b/third-party/cbs/include/cbs/cbs_h265.h deleted file mode 100644 index f7cbd4970d0..00000000000 --- a/third-party/cbs/include/cbs/cbs_h265.h +++ /dev/null @@ -1,700 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_H265_H -#define AVCODEC_CBS_H265_H - -#include -#include - -#include "cbs_h2645.h" -#include "cbs_sei.h" -#include "hevc.h" - -typedef struct H265RawNALUnitHeader { - uint8_t nal_unit_type; - uint8_t nuh_layer_id; - uint8_t nuh_temporal_id_plus1; -} H265RawNALUnitHeader; - -typedef struct H265RawProfileTierLevel { - uint8_t general_profile_space; - uint8_t general_tier_flag; - uint8_t general_profile_idc; - - uint8_t general_profile_compatibility_flag[32]; - - uint8_t general_progressive_source_flag; - uint8_t general_interlaced_source_flag; - uint8_t general_non_packed_constraint_flag; - uint8_t general_frame_only_constraint_flag; - - uint8_t general_max_12bit_constraint_flag; - uint8_t general_max_10bit_constraint_flag; - uint8_t general_max_8bit_constraint_flag; - uint8_t general_max_422chroma_constraint_flag; - uint8_t general_max_420chroma_constraint_flag; - uint8_t general_max_monochrome_constraint_flag; - uint8_t general_intra_constraint_flag; - uint8_t general_one_picture_only_constraint_flag; - uint8_t general_lower_bit_rate_constraint_flag; - uint8_t general_max_14bit_constraint_flag; - - uint8_t general_inbld_flag; - - uint8_t general_level_idc; - - uint8_t sub_layer_profile_present_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_level_present_flag[HEVC_MAX_SUB_LAYERS]; - - uint8_t sub_layer_profile_space[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_tier_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_profile_idc[HEVC_MAX_SUB_LAYERS]; - - uint8_t sub_layer_profile_compatibility_flag[HEVC_MAX_SUB_LAYERS][32]; - - uint8_t sub_layer_progressive_source_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_interlaced_source_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_non_packed_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_frame_only_constraint_flag[HEVC_MAX_SUB_LAYERS]; - - uint8_t sub_layer_max_12bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_10bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_8bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_422chroma_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_420chroma_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_monochrome_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_intra_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_one_picture_only_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_lower_bit_rate_constraint_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t sub_layer_max_14bit_constraint_flag[HEVC_MAX_SUB_LAYERS]; - - uint8_t sub_layer_inbld_flag[HEVC_MAX_SUB_LAYERS]; - - uint8_t sub_layer_level_idc[HEVC_MAX_SUB_LAYERS]; -} H265RawProfileTierLevel; - -typedef struct H265RawSubLayerHRDParameters { - uint32_t bit_rate_value_minus1[HEVC_MAX_CPB_CNT]; - uint32_t cpb_size_value_minus1[HEVC_MAX_CPB_CNT]; - uint32_t cpb_size_du_value_minus1[HEVC_MAX_CPB_CNT]; - uint32_t bit_rate_du_value_minus1[HEVC_MAX_CPB_CNT]; - uint8_t cbr_flag[HEVC_MAX_CPB_CNT]; -} H265RawSubLayerHRDParameters; - -typedef struct H265RawHRDParameters { - uint8_t nal_hrd_parameters_present_flag; - uint8_t vcl_hrd_parameters_present_flag; - - uint8_t sub_pic_hrd_params_present_flag; - uint8_t tick_divisor_minus2; - uint8_t du_cpb_removal_delay_increment_length_minus1; - uint8_t sub_pic_cpb_params_in_pic_timing_sei_flag; - uint8_t dpb_output_delay_du_length_minus1; - - uint8_t bit_rate_scale; - uint8_t cpb_size_scale; - uint8_t cpb_size_du_scale; - - uint8_t initial_cpb_removal_delay_length_minus1; - uint8_t au_cpb_removal_delay_length_minus1; - uint8_t dpb_output_delay_length_minus1; - - uint8_t fixed_pic_rate_general_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t fixed_pic_rate_within_cvs_flag[HEVC_MAX_SUB_LAYERS]; - uint16_t elemental_duration_in_tc_minus1[HEVC_MAX_SUB_LAYERS]; - uint8_t low_delay_hrd_flag[HEVC_MAX_SUB_LAYERS]; - uint8_t cpb_cnt_minus1[HEVC_MAX_SUB_LAYERS]; - H265RawSubLayerHRDParameters nal_sub_layer_hrd_parameters[HEVC_MAX_SUB_LAYERS]; - H265RawSubLayerHRDParameters vcl_sub_layer_hrd_parameters[HEVC_MAX_SUB_LAYERS]; -} H265RawHRDParameters; - -typedef struct H265RawVUI { - uint8_t aspect_ratio_info_present_flag; - uint8_t aspect_ratio_idc; - uint16_t sar_width; - uint16_t sar_height; - - uint8_t overscan_info_present_flag; - uint8_t overscan_appropriate_flag; - - uint8_t video_signal_type_present_flag; - uint8_t video_format; - uint8_t video_full_range_flag; - uint8_t colour_description_present_flag; - uint8_t colour_primaries; - uint8_t transfer_characteristics; - uint8_t matrix_coefficients; - - uint8_t chroma_loc_info_present_flag; - uint8_t chroma_sample_loc_type_top_field; - uint8_t chroma_sample_loc_type_bottom_field; - - uint8_t neutral_chroma_indication_flag; - uint8_t field_seq_flag; - uint8_t frame_field_info_present_flag; - - uint8_t default_display_window_flag; - uint16_t def_disp_win_left_offset; - uint16_t def_disp_win_right_offset; - uint16_t def_disp_win_top_offset; - uint16_t def_disp_win_bottom_offset; - - uint8_t vui_timing_info_present_flag; - uint32_t vui_num_units_in_tick; - uint32_t vui_time_scale; - uint8_t vui_poc_proportional_to_timing_flag; - uint32_t vui_num_ticks_poc_diff_one_minus1; - uint8_t vui_hrd_parameters_present_flag; - H265RawHRDParameters hrd_parameters; - - uint8_t bitstream_restriction_flag; - uint8_t tiles_fixed_structure_flag; - uint8_t motion_vectors_over_pic_boundaries_flag; - uint8_t restricted_ref_pic_lists_flag; - uint16_t min_spatial_segmentation_idc; - uint8_t max_bytes_per_pic_denom; - uint8_t max_bits_per_min_cu_denom; - uint8_t log2_max_mv_length_horizontal; - uint8_t log2_max_mv_length_vertical; -} H265RawVUI; - -typedef struct H265RawExtensionData { - uint8_t *data; - AVBufferRef *data_ref; - size_t bit_length; -} H265RawExtensionData; - -typedef struct H265RawVPS { - H265RawNALUnitHeader nal_unit_header; - - uint8_t vps_video_parameter_set_id; - - uint8_t vps_base_layer_internal_flag; - uint8_t vps_base_layer_available_flag; - uint8_t vps_max_layers_minus1; - uint8_t vps_max_sub_layers_minus1; - uint8_t vps_temporal_id_nesting_flag; - - H265RawProfileTierLevel profile_tier_level; - - uint8_t vps_sub_layer_ordering_info_present_flag; - uint8_t vps_max_dec_pic_buffering_minus1[HEVC_MAX_SUB_LAYERS]; - uint8_t vps_max_num_reorder_pics[HEVC_MAX_SUB_LAYERS]; - uint32_t vps_max_latency_increase_plus1[HEVC_MAX_SUB_LAYERS]; - - uint8_t vps_max_layer_id; - uint16_t vps_num_layer_sets_minus1; - uint8_t layer_id_included_flag[HEVC_MAX_LAYER_SETS][HEVC_MAX_LAYERS]; - - uint8_t vps_timing_info_present_flag; - uint32_t vps_num_units_in_tick; - uint32_t vps_time_scale; - uint8_t vps_poc_proportional_to_timing_flag; - uint32_t vps_num_ticks_poc_diff_one_minus1; - uint16_t vps_num_hrd_parameters; - uint16_t hrd_layer_set_idx[HEVC_MAX_LAYER_SETS]; - uint8_t cprms_present_flag[HEVC_MAX_LAYER_SETS]; - H265RawHRDParameters hrd_parameters[HEVC_MAX_LAYER_SETS]; - - uint8_t vps_extension_flag; - H265RawExtensionData extension_data; -} H265RawVPS; - -typedef struct H265RawSTRefPicSet { - uint8_t inter_ref_pic_set_prediction_flag; - - uint8_t delta_idx_minus1; - uint8_t delta_rps_sign; - uint16_t abs_delta_rps_minus1; - - uint8_t used_by_curr_pic_flag[HEVC_MAX_REFS]; - uint8_t use_delta_flag[HEVC_MAX_REFS]; - - uint8_t num_negative_pics; - uint8_t num_positive_pics; - uint16_t delta_poc_s0_minus1[HEVC_MAX_REFS]; - uint8_t used_by_curr_pic_s0_flag[HEVC_MAX_REFS]; - uint16_t delta_poc_s1_minus1[HEVC_MAX_REFS]; - uint8_t used_by_curr_pic_s1_flag[HEVC_MAX_REFS]; -} H265RawSTRefPicSet; - -typedef struct H265RawScalingList { - uint8_t scaling_list_pred_mode_flag[4][6]; - uint8_t scaling_list_pred_matrix_id_delta[4][6]; - int16_t scaling_list_dc_coef_minus8[4][6]; - int8_t scaling_list_delta_coeff[4][6][64]; -} H265RawScalingList; - -typedef struct H265RawSPS { - H265RawNALUnitHeader nal_unit_header; - - uint8_t sps_video_parameter_set_id; - - uint8_t sps_max_sub_layers_minus1; - uint8_t sps_temporal_id_nesting_flag; - - H265RawProfileTierLevel profile_tier_level; - - uint8_t sps_seq_parameter_set_id; - - uint8_t chroma_format_idc; - uint8_t separate_colour_plane_flag; - - uint16_t pic_width_in_luma_samples; - uint16_t pic_height_in_luma_samples; - - uint8_t conformance_window_flag; - uint16_t conf_win_left_offset; - uint16_t conf_win_right_offset; - uint16_t conf_win_top_offset; - uint16_t conf_win_bottom_offset; - - uint8_t bit_depth_luma_minus8; - uint8_t bit_depth_chroma_minus8; - - uint8_t log2_max_pic_order_cnt_lsb_minus4; - - uint8_t sps_sub_layer_ordering_info_present_flag; - uint8_t sps_max_dec_pic_buffering_minus1[HEVC_MAX_SUB_LAYERS]; - uint8_t sps_max_num_reorder_pics[HEVC_MAX_SUB_LAYERS]; - uint32_t sps_max_latency_increase_plus1[HEVC_MAX_SUB_LAYERS]; - - uint8_t log2_min_luma_coding_block_size_minus3; - uint8_t log2_diff_max_min_luma_coding_block_size; - uint8_t log2_min_luma_transform_block_size_minus2; - uint8_t log2_diff_max_min_luma_transform_block_size; - uint8_t max_transform_hierarchy_depth_inter; - uint8_t max_transform_hierarchy_depth_intra; - - uint8_t scaling_list_enabled_flag; - uint8_t sps_scaling_list_data_present_flag; - H265RawScalingList scaling_list; - - uint8_t amp_enabled_flag; - uint8_t sample_adaptive_offset_enabled_flag; - - uint8_t pcm_enabled_flag; - uint8_t pcm_sample_bit_depth_luma_minus1; - uint8_t pcm_sample_bit_depth_chroma_minus1; - uint8_t log2_min_pcm_luma_coding_block_size_minus3; - uint8_t log2_diff_max_min_pcm_luma_coding_block_size; - uint8_t pcm_loop_filter_disabled_flag; - - uint8_t num_short_term_ref_pic_sets; - H265RawSTRefPicSet st_ref_pic_set[HEVC_MAX_SHORT_TERM_REF_PIC_SETS]; - - uint8_t long_term_ref_pics_present_flag; - uint8_t num_long_term_ref_pics_sps; - uint16_t lt_ref_pic_poc_lsb_sps[HEVC_MAX_LONG_TERM_REF_PICS]; - uint8_t used_by_curr_pic_lt_sps_flag[HEVC_MAX_LONG_TERM_REF_PICS]; - - uint8_t sps_temporal_mvp_enabled_flag; - uint8_t strong_intra_smoothing_enabled_flag; - - uint8_t vui_parameters_present_flag; - H265RawVUI vui; - - uint8_t sps_extension_present_flag; - uint8_t sps_range_extension_flag; - uint8_t sps_multilayer_extension_flag; - uint8_t sps_3d_extension_flag; - uint8_t sps_scc_extension_flag; - uint8_t sps_extension_4bits; - - H265RawExtensionData extension_data; - - // Range extension. - uint8_t transform_skip_rotation_enabled_flag; - uint8_t transform_skip_context_enabled_flag; - uint8_t implicit_rdpcm_enabled_flag; - uint8_t explicit_rdpcm_enabled_flag; - uint8_t extended_precision_processing_flag; - uint8_t intra_smoothing_disabled_flag; - uint8_t high_precision_offsets_enabled_flag; - uint8_t persistent_rice_adaptation_enabled_flag; - uint8_t cabac_bypass_alignment_enabled_flag; - - // Screen content coding extension. - uint8_t sps_curr_pic_ref_enabled_flag; - uint8_t palette_mode_enabled_flag; - uint8_t palette_max_size; - uint8_t delta_palette_max_predictor_size; - uint8_t sps_palette_predictor_initializer_present_flag; - uint8_t sps_num_palette_predictor_initializer_minus1; - uint16_t sps_palette_predictor_initializers[3][128]; - - uint8_t motion_vector_resolution_control_idc; - uint8_t intra_boundary_filtering_disable_flag; -} H265RawSPS; - -typedef struct H265RawPPS { - H265RawNALUnitHeader nal_unit_header; - - uint8_t pps_pic_parameter_set_id; - uint8_t pps_seq_parameter_set_id; - - uint8_t dependent_slice_segments_enabled_flag; - uint8_t output_flag_present_flag; - uint8_t num_extra_slice_header_bits; - uint8_t sign_data_hiding_enabled_flag; - uint8_t cabac_init_present_flag; - - uint8_t num_ref_idx_l0_default_active_minus1; - uint8_t num_ref_idx_l1_default_active_minus1; - - int8_t init_qp_minus26; - - uint8_t constrained_intra_pred_flag; - uint8_t transform_skip_enabled_flag; - uint8_t cu_qp_delta_enabled_flag; - uint8_t diff_cu_qp_delta_depth; - - int8_t pps_cb_qp_offset; - int8_t pps_cr_qp_offset; - uint8_t pps_slice_chroma_qp_offsets_present_flag; - - uint8_t weighted_pred_flag; - uint8_t weighted_bipred_flag; - - uint8_t transquant_bypass_enabled_flag; - uint8_t tiles_enabled_flag; - uint8_t entropy_coding_sync_enabled_flag; - - uint8_t num_tile_columns_minus1; - uint8_t num_tile_rows_minus1; - uint8_t uniform_spacing_flag; - uint16_t column_width_minus1[HEVC_MAX_TILE_COLUMNS]; - uint16_t row_height_minus1[HEVC_MAX_TILE_ROWS]; - uint8_t loop_filter_across_tiles_enabled_flag; - - uint8_t pps_loop_filter_across_slices_enabled_flag; - uint8_t deblocking_filter_control_present_flag; - uint8_t deblocking_filter_override_enabled_flag; - uint8_t pps_deblocking_filter_disabled_flag; - int8_t pps_beta_offset_div2; - int8_t pps_tc_offset_div2; - - uint8_t pps_scaling_list_data_present_flag; - H265RawScalingList scaling_list; - - uint8_t lists_modification_present_flag; - uint8_t log2_parallel_merge_level_minus2; - - uint8_t slice_segment_header_extension_present_flag; - - uint8_t pps_extension_present_flag; - uint8_t pps_range_extension_flag; - uint8_t pps_multilayer_extension_flag; - uint8_t pps_3d_extension_flag; - uint8_t pps_scc_extension_flag; - uint8_t pps_extension_4bits; - - H265RawExtensionData extension_data; - - // Range extension. - uint8_t log2_max_transform_skip_block_size_minus2; - uint8_t cross_component_prediction_enabled_flag; - uint8_t chroma_qp_offset_list_enabled_flag; - uint8_t diff_cu_chroma_qp_offset_depth; - uint8_t chroma_qp_offset_list_len_minus1; - int8_t cb_qp_offset_list[6]; - int8_t cr_qp_offset_list[6]; - uint8_t log2_sao_offset_scale_luma; - uint8_t log2_sao_offset_scale_chroma; - - // Screen content coding extension. - uint8_t pps_curr_pic_ref_enabled_flag; - uint8_t residual_adaptive_colour_transform_enabled_flag; - uint8_t pps_slice_act_qp_offsets_present_flag; - int8_t pps_act_y_qp_offset_plus5; - int8_t pps_act_cb_qp_offset_plus5; - int8_t pps_act_cr_qp_offset_plus3; - - uint8_t pps_palette_predictor_initializer_present_flag; - uint8_t pps_num_palette_predictor_initializer; - uint8_t monochrome_palette_flag; - uint8_t luma_bit_depth_entry_minus8; - uint8_t chroma_bit_depth_entry_minus8; - uint16_t pps_palette_predictor_initializers[3][128]; -} H265RawPPS; - -typedef struct H265RawAUD { - H265RawNALUnitHeader nal_unit_header; - - uint8_t pic_type; -} H265RawAUD; - -typedef struct H265RawSliceHeader { - H265RawNALUnitHeader nal_unit_header; - - uint8_t first_slice_segment_in_pic_flag; - uint8_t no_output_of_prior_pics_flag; - uint8_t slice_pic_parameter_set_id; - - uint8_t dependent_slice_segment_flag; - uint16_t slice_segment_address; - - uint8_t slice_reserved_flag[8]; - uint8_t slice_type; - - uint8_t pic_output_flag; - uint8_t colour_plane_id; - - uint16_t slice_pic_order_cnt_lsb; - - uint8_t short_term_ref_pic_set_sps_flag; - H265RawSTRefPicSet short_term_ref_pic_set; - uint8_t short_term_ref_pic_set_idx; - - uint8_t num_long_term_sps; - uint8_t num_long_term_pics; - uint8_t lt_idx_sps[HEVC_MAX_REFS]; - uint8_t poc_lsb_lt[HEVC_MAX_REFS]; - uint8_t used_by_curr_pic_lt_flag[HEVC_MAX_REFS]; - uint8_t delta_poc_msb_present_flag[HEVC_MAX_REFS]; - uint32_t delta_poc_msb_cycle_lt[HEVC_MAX_REFS]; - - uint8_t slice_temporal_mvp_enabled_flag; - - uint8_t slice_sao_luma_flag; - uint8_t slice_sao_chroma_flag; - - uint8_t num_ref_idx_active_override_flag; - uint8_t num_ref_idx_l0_active_minus1; - uint8_t num_ref_idx_l1_active_minus1; - - uint8_t ref_pic_list_modification_flag_l0; - uint8_t list_entry_l0[HEVC_MAX_REFS]; - uint8_t ref_pic_list_modification_flag_l1; - uint8_t list_entry_l1[HEVC_MAX_REFS]; - - uint8_t mvd_l1_zero_flag; - uint8_t cabac_init_flag; - uint8_t collocated_from_l0_flag; - uint8_t collocated_ref_idx; - - uint8_t luma_log2_weight_denom; - int8_t delta_chroma_log2_weight_denom; - uint8_t luma_weight_l0_flag[HEVC_MAX_REFS]; - uint8_t chroma_weight_l0_flag[HEVC_MAX_REFS]; - int8_t delta_luma_weight_l0[HEVC_MAX_REFS]; - int16_t luma_offset_l0[HEVC_MAX_REFS]; - int8_t delta_chroma_weight_l0[HEVC_MAX_REFS][2]; - int16_t chroma_offset_l0[HEVC_MAX_REFS][2]; - uint8_t luma_weight_l1_flag[HEVC_MAX_REFS]; - uint8_t chroma_weight_l1_flag[HEVC_MAX_REFS]; - int8_t delta_luma_weight_l1[HEVC_MAX_REFS]; - int16_t luma_offset_l1[HEVC_MAX_REFS]; - int8_t delta_chroma_weight_l1[HEVC_MAX_REFS][2]; - int16_t chroma_offset_l1[HEVC_MAX_REFS][2]; - - uint8_t five_minus_max_num_merge_cand; - uint8_t use_integer_mv_flag; - - int8_t slice_qp_delta; - int8_t slice_cb_qp_offset; - int8_t slice_cr_qp_offset; - int8_t slice_act_y_qp_offset; - int8_t slice_act_cb_qp_offset; - int8_t slice_act_cr_qp_offset; - uint8_t cu_chroma_qp_offset_enabled_flag; - - uint8_t deblocking_filter_override_flag; - uint8_t slice_deblocking_filter_disabled_flag; - int8_t slice_beta_offset_div2; - int8_t slice_tc_offset_div2; - uint8_t slice_loop_filter_across_slices_enabled_flag; - - uint16_t num_entry_point_offsets; - uint8_t offset_len_minus1; - uint32_t entry_point_offset_minus1[HEVC_MAX_ENTRY_POINT_OFFSETS]; - - uint16_t slice_segment_header_extension_length; - uint8_t slice_segment_header_extension_data_byte[256]; -} H265RawSliceHeader; - - -typedef struct H265RawSlice { - H265RawSliceHeader header; - - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; - int data_bit_start; -} H265RawSlice; - - -typedef struct H265RawSEIBufferingPeriod { - uint8_t bp_seq_parameter_set_id; - uint8_t irap_cpb_params_present_flag; - uint32_t cpb_delay_offset; - uint32_t dpb_delay_offset; - uint8_t concatenation_flag; - uint32_t au_cpb_removal_delay_delta_minus1; - - uint32_t nal_initial_cpb_removal_delay[HEVC_MAX_CPB_CNT]; - uint32_t nal_initial_cpb_removal_offset[HEVC_MAX_CPB_CNT]; - uint32_t nal_initial_alt_cpb_removal_delay[HEVC_MAX_CPB_CNT]; - uint32_t nal_initial_alt_cpb_removal_offset[HEVC_MAX_CPB_CNT]; - - uint32_t vcl_initial_cpb_removal_delay[HEVC_MAX_CPB_CNT]; - uint32_t vcl_initial_cpb_removal_offset[HEVC_MAX_CPB_CNT]; - uint32_t vcl_initial_alt_cpb_removal_delay[HEVC_MAX_CPB_CNT]; - uint32_t vcl_initial_alt_cpb_removal_offset[HEVC_MAX_CPB_CNT]; - - uint8_t use_alt_cpb_params_flag; -} H265RawSEIBufferingPeriod; - -typedef struct H265RawSEIPicTiming { - uint8_t pic_struct; - uint8_t source_scan_type; - uint8_t duplicate_flag; - - uint32_t au_cpb_removal_delay_minus1; - uint32_t pic_dpb_output_delay; - uint32_t pic_dpb_output_du_delay; - - uint16_t num_decoding_units_minus1; - uint8_t du_common_cpb_removal_delay_flag; - uint32_t du_common_cpb_removal_delay_increment_minus1; - uint16_t num_nalus_in_du_minus1[HEVC_MAX_SLICE_SEGMENTS]; - uint32_t du_cpb_removal_delay_increment_minus1[HEVC_MAX_SLICE_SEGMENTS]; -} H265RawSEIPicTiming; - -typedef struct H265RawSEIPanScanRect { - uint32_t pan_scan_rect_id; - uint8_t pan_scan_rect_cancel_flag; - uint8_t pan_scan_cnt_minus1; - int32_t pan_scan_rect_left_offset[3]; - int32_t pan_scan_rect_right_offset[3]; - int32_t pan_scan_rect_top_offset[3]; - int32_t pan_scan_rect_bottom_offset[3]; - uint16_t pan_scan_rect_persistence_flag; -} H265RawSEIPanScanRect; - -typedef struct H265RawSEIRecoveryPoint { - int16_t recovery_poc_cnt; - uint8_t exact_match_flag; - uint8_t broken_link_flag; -} H265RawSEIRecoveryPoint; - -typedef struct H265RawFilmGrainCharacteristics { - uint8_t film_grain_characteristics_cancel_flag; - uint8_t film_grain_model_id; - uint8_t separate_colour_description_present_flag; - uint8_t film_grain_bit_depth_luma_minus8; - uint8_t film_grain_bit_depth_chroma_minus8; - uint8_t film_grain_full_range_flag; - uint8_t film_grain_colour_primaries; - uint8_t film_grain_transfer_characteristics; - uint8_t film_grain_matrix_coeffs; - uint8_t blending_mode_id; - uint8_t log2_scale_factor; - uint8_t comp_model_present_flag[3]; - uint8_t num_intensity_intervals_minus1[3]; - uint8_t num_model_values_minus1[3]; - uint8_t intensity_interval_lower_bound[3][256]; - uint8_t intensity_interval_upper_bound[3][256]; - int16_t comp_model_value[3][256][6]; - uint8_t film_grain_characteristics_persistence_flag; -} H265RawFilmGrainCharacteristics; - -typedef struct H265RawSEIDisplayOrientation { - uint8_t display_orientation_cancel_flag; - uint8_t hor_flip; - uint8_t ver_flip; - uint16_t anticlockwise_rotation; - uint16_t display_orientation_repetition_period; - uint8_t display_orientation_persistence_flag; -} H265RawSEIDisplayOrientation; - -typedef struct H265RawSEIActiveParameterSets { - uint8_t active_video_parameter_set_id; - uint8_t self_contained_cvs_flag; - uint8_t no_parameter_set_update_flag; - uint8_t num_sps_ids_minus1; - uint8_t active_seq_parameter_set_id[HEVC_MAX_SPS_COUNT]; - uint8_t layer_sps_idx[HEVC_MAX_LAYERS]; -} H265RawSEIActiveParameterSets; - -typedef struct H265RawSEIDecodedPictureHash { - uint8_t hash_type; - uint8_t picture_md5[3][16]; - uint16_t picture_crc[3]; - uint32_t picture_checksum[3]; -} H265RawSEIDecodedPictureHash; - -typedef struct H265RawSEITimeCode { - uint8_t num_clock_ts; - uint8_t clock_timestamp_flag[3]; - uint8_t units_field_based_flag[3]; - uint8_t counting_type[3]; - uint8_t full_timestamp_flag[3]; - uint8_t discontinuity_flag[3]; - uint8_t cnt_dropped_flag[3]; - uint16_t n_frames[3]; - uint8_t seconds_value[3]; - uint8_t minutes_value[3]; - uint8_t hours_value[3]; - uint8_t seconds_flag[3]; - uint8_t minutes_flag[3]; - uint8_t hours_flag[3]; - uint8_t time_offset_length[3]; - int32_t time_offset_value[3]; -} H265RawSEITimeCode; - -typedef struct H265RawSEIAlphaChannelInfo { - uint8_t alpha_channel_cancel_flag; - uint8_t alpha_channel_use_idc; - uint8_t alpha_channel_bit_depth_minus8; - uint16_t alpha_transparent_value; - uint16_t alpha_opaque_value; - uint8_t alpha_channel_incr_flag; - uint8_t alpha_channel_clip_flag; - uint8_t alpha_channel_clip_type_flag; -} H265RawSEIAlphaChannelInfo; - -typedef struct H265RawSEI { - H265RawNALUnitHeader nal_unit_header; - SEIRawMessageList message_list; -} H265RawSEI; - -typedef struct CodedBitstreamH265Context { - // Reader/writer context in common with the H.264 implementation. - CodedBitstreamH2645Context common; - - // All currently available parameter sets. These are updated when - // any parameter set NAL unit is read/written with this context. - AVBufferRef *vps_ref[HEVC_MAX_VPS_COUNT]; - AVBufferRef *sps_ref[HEVC_MAX_SPS_COUNT]; - AVBufferRef *pps_ref[HEVC_MAX_PPS_COUNT]; - H265RawVPS *vps[HEVC_MAX_VPS_COUNT]; - H265RawSPS *sps[HEVC_MAX_SPS_COUNT]; - H265RawPPS *pps[HEVC_MAX_PPS_COUNT]; - - // The currently active parameter sets. These are updated when any - // NAL unit refers to the relevant parameter set. These pointers - // must also be present in the arrays above. - const H265RawVPS *active_vps; - const H265RawSPS *active_sps; - const H265RawPPS *active_pps; -} CodedBitstreamH265Context; - - -#endif /* AVCODEC_CBS_H265_H */ diff --git a/third-party/cbs/include/cbs/cbs_jpeg.h b/third-party/cbs/include/cbs/cbs_jpeg.h deleted file mode 100644 index 9dbebd259fd..00000000000 --- a/third-party/cbs/include/cbs/cbs_jpeg.h +++ /dev/null @@ -1,123 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_JPEG_H -#define AVCODEC_CBS_JPEG_H - -#include -#include - -#include "libavutil/buffer.h" - - -enum { - JPEG_MARKER_SOF0 = 0xc0, - JPEG_MARKER_SOF1 = 0xc1, - JPEG_MARKER_SOF2 = 0xc2, - JPEG_MARKER_SOF3 = 0xc3, - - JPEG_MARKER_DHT = 0xc4, - JPEG_MARKER_SOI = 0xd8, - JPEG_MARKER_EOI = 0xd9, - JPEG_MARKER_SOS = 0xda, - JPEG_MARKER_DQT = 0xdb, - - JPEG_MARKER_APPN = 0xe0, - JPEG_MARKER_JPGN = 0xf0, - JPEG_MARKER_COM = 0xfe, -}; - -enum { - JPEG_MAX_COMPONENTS = 255, - - JPEG_MAX_HEIGHT = 65535, - JPEG_MAX_WIDTH = 65535, -}; - - -typedef struct JPEGRawFrameHeader { - uint16_t Lf; - uint8_t P; - uint16_t Y; - uint16_t X; - uint16_t Nf; - - uint8_t C [JPEG_MAX_COMPONENTS]; - uint8_t H [JPEG_MAX_COMPONENTS]; - uint8_t V [JPEG_MAX_COMPONENTS]; - uint8_t Tq[JPEG_MAX_COMPONENTS]; -} JPEGRawFrameHeader; - -typedef struct JPEGRawScanHeader { - uint16_t Ls; - uint8_t Ns; - - uint8_t Cs[JPEG_MAX_COMPONENTS]; - uint8_t Td[JPEG_MAX_COMPONENTS]; - uint8_t Ta[JPEG_MAX_COMPONENTS]; - - uint8_t Ss; - uint8_t Se; - uint8_t Ah; - uint8_t Al; -} JPEGRawScanHeader; - -typedef struct JPEGRawScan { - JPEGRawScanHeader header; - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; -} JPEGRawScan; - -typedef struct JPEGRawQuantisationTable { - uint8_t Pq; - uint8_t Tq; - uint16_t Q[64]; -} JPEGRawQuantisationTable; - -typedef struct JPEGRawQuantisationTableSpecification { - uint16_t Lq; - JPEGRawQuantisationTable table[4]; -} JPEGRawQuantisationTableSpecification; - -typedef struct JPEGRawHuffmanTable { - uint8_t Tc; - uint8_t Th; - uint8_t L[16]; - uint8_t V[256]; -} JPEGRawHuffmanTable; - -typedef struct JPEGRawHuffmanTableSpecification { - uint16_t Lh; - JPEGRawHuffmanTable table[8]; -} JPEGRawHuffmanTableSpecification; - -typedef struct JPEGRawApplicationData { - uint16_t Lp; - uint8_t *Ap; - AVBufferRef *Ap_ref; -} JPEGRawApplicationData; - -typedef struct JPEGRawComment { - uint16_t Lc; - uint8_t *Cm; - AVBufferRef *Cm_ref; -} JPEGRawComment; - - -#endif /* AVCODEC_CBS_JPEG_H */ diff --git a/third-party/cbs/include/cbs/cbs_mpeg2.h b/third-party/cbs/include/cbs/cbs_mpeg2.h deleted file mode 100644 index f7075a460dc..00000000000 --- a/third-party/cbs/include/cbs/cbs_mpeg2.h +++ /dev/null @@ -1,231 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_MPEG2_H -#define AVCODEC_CBS_MPEG2_H - -#include -#include - -#include "libavutil/buffer.h" - - -enum { - MPEG2_START_PICTURE = 0x00, - MPEG2_START_SLICE_MIN = 0x01, - MPEG2_START_SLICE_MAX = 0xaf, - MPEG2_START_USER_DATA = 0xb2, - MPEG2_START_SEQUENCE_HEADER = 0xb3, - MPEG2_START_SEQUENCE_ERROR = 0xb4, - MPEG2_START_EXTENSION = 0xb5, - MPEG2_START_SEQUENCE_END = 0xb7, - MPEG2_START_GROUP = 0xb8, -}; - -#define MPEG2_START_IS_SLICE(type) \ - ((type) >= MPEG2_START_SLICE_MIN && \ - (type) <= MPEG2_START_SLICE_MAX) - -enum { - MPEG2_EXTENSION_SEQUENCE = 0x1, - MPEG2_EXTENSION_SEQUENCE_DISPLAY = 0x2, - MPEG2_EXTENSION_QUANT_MATRIX = 0x3, - MPEG2_EXTENSION_COPYRIGHT = 0x4, - MPEG2_EXTENSION_SEQUENCE_SCALABLE = 0x5, - MPEG2_EXTENSION_PICTURE_DISPLAY = 0x7, - MPEG2_EXTENSION_PICTURE_CODING = 0x8, - MPEG2_EXTENSION_PICTURE_SPATIAL_SCALABLE = 0x9, - MPEG2_EXTENSION_PICTURE_TEMPORAL_SCALABLE = 0xa, - MPEG2_EXTENSION_CAMERA_PARAMETERS = 0xb, - MPEG2_EXTENSION_ITU_T = 0xc, -}; - - -typedef struct MPEG2RawSequenceHeader { - uint8_t sequence_header_code; - - uint16_t horizontal_size_value; - uint16_t vertical_size_value; - uint8_t aspect_ratio_information; - uint8_t frame_rate_code; - uint32_t bit_rate_value; - uint16_t vbv_buffer_size_value; - uint8_t constrained_parameters_flag; - - uint8_t load_intra_quantiser_matrix; - uint8_t intra_quantiser_matrix[64]; - uint8_t load_non_intra_quantiser_matrix; - uint8_t non_intra_quantiser_matrix[64]; -} MPEG2RawSequenceHeader; - -typedef struct MPEG2RawUserData { - uint8_t user_data_start_code; - - uint8_t *user_data; - AVBufferRef *user_data_ref; - size_t user_data_length; -} MPEG2RawUserData; - -typedef struct MPEG2RawSequenceExtension { - uint8_t profile_and_level_indication; - uint8_t progressive_sequence; - uint8_t chroma_format; - uint8_t horizontal_size_extension; - uint8_t vertical_size_extension; - uint16_t bit_rate_extension; - uint8_t vbv_buffer_size_extension; - uint8_t low_delay; - uint8_t frame_rate_extension_n; - uint8_t frame_rate_extension_d; -} MPEG2RawSequenceExtension; - -typedef struct MPEG2RawSequenceDisplayExtension { - uint8_t video_format; - - uint8_t colour_description; - uint8_t colour_primaries; - uint8_t transfer_characteristics; - uint8_t matrix_coefficients; - - uint16_t display_horizontal_size; - uint16_t display_vertical_size; -} MPEG2RawSequenceDisplayExtension; - -typedef struct MPEG2RawGroupOfPicturesHeader { - uint8_t group_start_code; - - uint32_t time_code; - uint8_t closed_gop; - uint8_t broken_link; -} MPEG2RawGroupOfPicturesHeader; - -typedef struct MPEG2RawExtraInformation { - uint8_t *extra_information; - AVBufferRef *extra_information_ref; - size_t extra_information_length; -} MPEG2RawExtraInformation; - -typedef struct MPEG2RawPictureHeader { - uint8_t picture_start_code; - - uint16_t temporal_reference; - uint8_t picture_coding_type; - uint16_t vbv_delay; - - uint8_t full_pel_forward_vector; - uint8_t forward_f_code; - uint8_t full_pel_backward_vector; - uint8_t backward_f_code; - - MPEG2RawExtraInformation extra_information_picture; -} MPEG2RawPictureHeader; - -typedef struct MPEG2RawPictureCodingExtension { - uint8_t f_code[2][2]; - - uint8_t intra_dc_precision; - uint8_t picture_structure; - uint8_t top_field_first; - uint8_t frame_pred_frame_dct; - uint8_t concealment_motion_vectors; - uint8_t q_scale_type; - uint8_t intra_vlc_format; - uint8_t alternate_scan; - uint8_t repeat_first_field; - uint8_t chroma_420_type; - uint8_t progressive_frame; - - uint8_t composite_display_flag; - uint8_t v_axis; - uint8_t field_sequence; - uint8_t sub_carrier; - uint8_t burst_amplitude; - uint8_t sub_carrier_phase; -} MPEG2RawPictureCodingExtension; - -typedef struct MPEG2RawQuantMatrixExtension { - uint8_t load_intra_quantiser_matrix; - uint8_t intra_quantiser_matrix[64]; - uint8_t load_non_intra_quantiser_matrix; - uint8_t non_intra_quantiser_matrix[64]; - uint8_t load_chroma_intra_quantiser_matrix; - uint8_t chroma_intra_quantiser_matrix[64]; - uint8_t load_chroma_non_intra_quantiser_matrix; - uint8_t chroma_non_intra_quantiser_matrix[64]; -} MPEG2RawQuantMatrixExtension; - -typedef struct MPEG2RawPictureDisplayExtension { - int16_t frame_centre_horizontal_offset[3]; - int16_t frame_centre_vertical_offset[3]; -} MPEG2RawPictureDisplayExtension; - -typedef struct MPEG2RawExtensionData { - uint8_t extension_start_code; - uint8_t extension_start_code_identifier; - - union { - MPEG2RawSequenceExtension sequence; - MPEG2RawSequenceDisplayExtension sequence_display; - MPEG2RawQuantMatrixExtension quant_matrix; - MPEG2RawPictureCodingExtension picture_coding; - MPEG2RawPictureDisplayExtension picture_display; - } data; -} MPEG2RawExtensionData; - -typedef struct MPEG2RawSliceHeader { - uint8_t slice_vertical_position; - - uint8_t slice_vertical_position_extension; - uint8_t priority_breakpoint; - - uint8_t quantiser_scale_code; - - uint8_t slice_extension_flag; - uint8_t intra_slice; - uint8_t slice_picture_id_enable; - uint8_t slice_picture_id; - - MPEG2RawExtraInformation extra_information_slice; -} MPEG2RawSliceHeader; - -typedef struct MPEG2RawSlice { - MPEG2RawSliceHeader header; - - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; - int data_bit_start; -} MPEG2RawSlice; - -typedef struct MPEG2RawSequenceEnd { - uint8_t sequence_end_code; -} MPEG2RawSequenceEnd; - - -typedef struct CodedBitstreamMPEG2Context { - // Elements stored in headers which are required for other decoding. - uint16_t horizontal_size; - uint16_t vertical_size; - uint8_t scalable; - uint8_t scalable_mode; - uint8_t progressive_sequence; - uint8_t number_of_frame_centre_offsets; -} CodedBitstreamMPEG2Context; - - -#endif /* AVCODEC_CBS_MPEG2_H */ diff --git a/third-party/cbs/include/cbs/cbs_sei.h b/third-party/cbs/include/cbs/cbs_sei.h deleted file mode 100644 index c7a7a95be09..00000000000 --- a/third-party/cbs/include/cbs/cbs_sei.h +++ /dev/null @@ -1,199 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_SEI_H -#define AVCODEC_CBS_SEI_H - -#include -#include - -#include "libavutil/buffer.h" - -#include "cbs.h" -#include "sei.h" - - -typedef struct SEIRawFillerPayload { - uint32_t payload_size; -} SEIRawFillerPayload; - -typedef struct SEIRawUserDataRegistered { - uint8_t itu_t_t35_country_code; - uint8_t itu_t_t35_country_code_extension_byte; - uint8_t *data; - AVBufferRef *data_ref; - size_t data_length; -} SEIRawUserDataRegistered; - -typedef struct SEIRawUserDataUnregistered { - uint8_t uuid_iso_iec_11578[16]; - uint8_t *data; - AVBufferRef *data_ref; - size_t data_length; -} SEIRawUserDataUnregistered; - -typedef struct SEIRawMasteringDisplayColourVolume { - uint16_t display_primaries_x[3]; - uint16_t display_primaries_y[3]; - uint16_t white_point_x; - uint16_t white_point_y; - uint32_t max_display_mastering_luminance; - uint32_t min_display_mastering_luminance; -} SEIRawMasteringDisplayColourVolume; - -typedef struct SEIRawContentLightLevelInfo { - uint16_t max_content_light_level; - uint16_t max_pic_average_light_level; -} SEIRawContentLightLevelInfo; - -typedef struct SEIRawAlternativeTransferCharacteristics { - uint8_t preferred_transfer_characteristics; -} SEIRawAlternativeTransferCharacteristics; - -typedef struct SEIRawMessage { - uint32_t payload_type; - uint32_t payload_size; - void *payload; - AVBufferRef *payload_ref; - uint8_t *extension_data; - AVBufferRef *extension_data_ref; - size_t extension_bit_length; -} SEIRawMessage; - -typedef struct SEIRawMessageList { - SEIRawMessage *messages; - int nb_messages; - int nb_messages_allocated; -} SEIRawMessageList; - - -typedef struct SEIMessageState { - // The type of the payload being written. - uint32_t payload_type; - // When reading, contains the size of the payload to allow finding the - // end of variable-length fields (such as user_data_payload_byte[]). - // (When writing, the size will be derived from the total number of - // bytes actually written.) - uint32_t payload_size; - // When writing, indicates that payload extension data is present so - // all extended fields must be written. May be updated by the writer - // to indicate that extended fields have been written, so the extension - // end bits must be written too. - uint8_t extension_present; -} SEIMessageState; - -struct GetBitContext; -struct PutBitContext; - -typedef int (*SEIMessageReadFunction)(CodedBitstreamContext *ctx, - struct GetBitContext *rw, - void *current, - SEIMessageState *sei); - -typedef int (*SEIMessageWriteFunction)(CodedBitstreamContext *ctx, - struct PutBitContext *rw, - void *current, - SEIMessageState *sei); - -typedef struct SEIMessageTypeDescriptor { - // Payload type for the message. (-1 in this field ends a list.) - int type; - // Valid in a prefix SEI NAL unit (always for H.264). - uint8_t prefix; - // Valid in a suffix SEI NAL unit (never for H.264). - uint8_t suffix; - // Size of the decomposed structure. - size_t size; - // Read bitstream into SEI message. - SEIMessageReadFunction read; - // Write bitstream from SEI message. - SEIMessageWriteFunction write; -} SEIMessageTypeDescriptor; - -// Macro for the read/write pair. The clumsy cast is needed because the -// current pointer is typed in all of the read/write functions but has to -// be void here to fit all cases. -#define SEI_MESSAGE_RW(codec, name) \ - .read = (SEIMessageReadFunction) cbs_ ## codec ## _read_ ## name, \ - .write = (SEIMessageWriteFunction)cbs_ ## codec ## _write_ ## name - -// End-of-list sentinel element. -#define SEI_MESSAGE_TYPE_END { .type = -1 } - - -/** - * Find the type descriptor for the given payload type. - * - * Returns NULL if the payload type is not known. - */ -const SEIMessageTypeDescriptor *ff_cbs_sei_find_type(CodedBitstreamContext *ctx, - int payload_type); - -/** - * Allocate a new payload for the given SEI message. - */ -int ff_cbs_sei_alloc_message_payload(SEIRawMessage *message, - const SEIMessageTypeDescriptor *desc); - -/** - * Allocate a new empty SEI message in a message list. - * - * The new message is in place nb_messages - 1. - */ -int ff_cbs_sei_list_add(SEIRawMessageList *list); - -/** - * Free all SEI messages in a message list. - */ -void ff_cbs_sei_free_message_list(SEIRawMessageList *list); - -/** - * Add an SEI message to an access unit. - * - * Will add to an existing SEI NAL unit, or create a new one for the - * message if there is no suitable existing one. - * - * Takes a new reference to payload_buf, if set. If payload_buf is - * NULL then the new message will not be reference counted. - */ -int ff_cbs_sei_add_message(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - int prefix, - uint32_t payload_type, - void *payload_data, - AVBufferRef *payload_buf); - -/** - * Iterate over messages with the given payload type in an access unit. - * - * Set message to NULL in the first call. Returns 0 while more messages - * are available, AVERROR(ENOENT) when all messages have been found. - */ -int ff_cbs_sei_find_message(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - uint32_t payload_type, - SEIRawMessage **message); - -/** - * Delete all messages with the given payload type from an access unit. - */ -void ff_cbs_sei_delete_message_type(CodedBitstreamContext *ctx, - CodedBitstreamFragment *au, - uint32_t payload_type); - -#endif /* AVCODEC_CBS_SEI_H */ diff --git a/third-party/cbs/include/cbs/cbs_vp9.h b/third-party/cbs/include/cbs/cbs_vp9.h deleted file mode 100644 index af15eb4bace..00000000000 --- a/third-party/cbs/include/cbs/cbs_vp9.h +++ /dev/null @@ -1,213 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CBS_VP9_H -#define AVCODEC_CBS_VP9_H - -#include -#include - -#include "cbs.h" - - -// Miscellaneous constants (section 3). -enum { - VP9_REFS_PER_FRAME = 3, - - VP9_MIN_TILE_WIDTH_B64 = 4, - VP9_MAX_TILE_WIDTH_B64 = 64, - - VP9_NUM_REF_FRAMES = 8, - VP9_MAX_REF_FRAMES = 4, - - VP9_MAX_SEGMENTS = 8, - VP9_SEG_LVL_MAX = 4, -}; - -// Frame types (section 7.2). -enum { - VP9_KEY_FRAME = 0, - VP9_NON_KEY_FRAME = 1, -}; - -// Frame sync bytes (section 7.2.1). -enum { - VP9_FRAME_SYNC_0 = 0x49, - VP9_FRAME_SYNC_1 = 0x83, - VP9_FRAME_SYNC_2 = 0x42, -}; - -// Color space values (section 7.2.2). -enum { - VP9_CS_UNKNOWN = 0, - VP9_CS_BT_601 = 1, - VP9_CS_BT_709 = 2, - VP9_CS_SMPTE_170 = 3, - VP9_CS_SMPTE_240 = 4, - VP9_CS_BT_2020 = 5, - VP9_CS_RESERVED = 6, - VP9_CS_RGB = 7, -}; - -// Reference frame types (section 7.4.12). -enum { - VP9_INTRA_FRAME = 0, - VP9_LAST_FRAME = 1, - VP9_GOLDEN_FRAME = 2, - VP9_ALTREF_FRAME = 3, -}; - -// Superframe properties (section B.3). -enum { - VP9_MAX_FRAMES_IN_SUPERFRAME = 8, - - VP9_SUPERFRAME_MARKER = 6, -}; - - -typedef struct VP9RawFrameHeader { - uint8_t frame_marker; - uint8_t profile_low_bit; - uint8_t profile_high_bit; - - uint8_t show_existing_frame; - uint8_t frame_to_show_map_idx; - - uint8_t frame_type; - uint8_t show_frame; - uint8_t error_resilient_mode; - - // Color config. - uint8_t ten_or_twelve_bit; - uint8_t color_space; - uint8_t color_range; - uint8_t subsampling_x; - uint8_t subsampling_y; - - uint8_t refresh_frame_flags; - - uint8_t intra_only; - uint8_t reset_frame_context; - - uint8_t ref_frame_idx[VP9_REFS_PER_FRAME]; - uint8_t ref_frame_sign_bias[VP9_MAX_REF_FRAMES]; - - uint8_t allow_high_precision_mv; - - uint8_t refresh_frame_context; - uint8_t frame_parallel_decoding_mode; - - uint8_t frame_context_idx; - - // Frame/render size. - uint8_t found_ref[VP9_REFS_PER_FRAME]; - uint16_t frame_width_minus_1; - uint16_t frame_height_minus_1; - uint8_t render_and_frame_size_different; - uint16_t render_width_minus_1; - uint16_t render_height_minus_1; - - // Interpolation filter. - uint8_t is_filter_switchable; - uint8_t raw_interpolation_filter_type; - - // Loop filter params. - uint8_t loop_filter_level; - uint8_t loop_filter_sharpness; - uint8_t loop_filter_delta_enabled; - uint8_t loop_filter_delta_update; - uint8_t update_ref_delta[VP9_MAX_REF_FRAMES]; - int8_t loop_filter_ref_deltas[VP9_MAX_REF_FRAMES]; - uint8_t update_mode_delta[2]; - int8_t loop_filter_mode_deltas[2]; - - // Quantization params. - uint8_t base_q_idx; - int8_t delta_q_y_dc; - int8_t delta_q_uv_dc; - int8_t delta_q_uv_ac; - - // Segmentation params. - uint8_t segmentation_enabled; - uint8_t segmentation_update_map; - uint8_t segmentation_tree_probs[7]; - uint8_t segmentation_temporal_update; - uint8_t segmentation_pred_prob[3]; - uint8_t segmentation_update_data; - uint8_t segmentation_abs_or_delta_update; - uint8_t feature_enabled[VP9_MAX_SEGMENTS][VP9_SEG_LVL_MAX]; - uint8_t feature_value[VP9_MAX_SEGMENTS][VP9_SEG_LVL_MAX]; - uint8_t feature_sign[VP9_MAX_SEGMENTS][VP9_SEG_LVL_MAX]; - - // Tile info. - uint8_t tile_cols_log2; - uint8_t tile_rows_log2; - - uint16_t header_size_in_bytes; -} VP9RawFrameHeader; - -typedef struct VP9RawFrame { - VP9RawFrameHeader header; - - uint8_t *data; - AVBufferRef *data_ref; - size_t data_size; -} VP9RawFrame; - -typedef struct VP9RawSuperframeIndex { - uint8_t superframe_marker; - uint8_t bytes_per_framesize_minus_1; - uint8_t frames_in_superframe_minus_1; - uint32_t frame_sizes[VP9_MAX_FRAMES_IN_SUPERFRAME]; -} VP9RawSuperframeIndex; - -typedef struct VP9RawSuperframe { - VP9RawFrame frames[VP9_MAX_FRAMES_IN_SUPERFRAME]; - VP9RawSuperframeIndex index; -} VP9RawSuperframe; - -typedef struct VP9ReferenceFrameState { - int frame_width; // RefFrameWidth - int frame_height; // RefFrameHeight - int subsampling_x; // RefSubsamplingX - int subsampling_y; // RefSubsamplingY - int bit_depth; // RefBitDepth -} VP9ReferenceFrameState; - -typedef struct CodedBitstreamVP9Context { - int profile; - - // Frame dimensions in 8x8 mode info blocks. - uint16_t mi_cols; - uint16_t mi_rows; - // Frame dimensions in 64x64 superblocks. - uint16_t sb64_cols; - uint16_t sb64_rows; - - int frame_width; - int frame_height; - - uint8_t subsampling_x; - uint8_t subsampling_y; - int bit_depth; - - VP9ReferenceFrameState ref[VP9_NUM_REF_FRAMES]; -} CodedBitstreamVP9Context; - - -#endif /* AVCODEC_CBS_VP9_H */ diff --git a/third-party/cbs/include/cbs/codec_desc.h b/third-party/cbs/include/cbs/codec_desc.h deleted file mode 100644 index 126b52df476..00000000000 --- a/third-party/cbs/include/cbs/codec_desc.h +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Codec descriptors public API - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CODEC_DESC_H -#define AVCODEC_CODEC_DESC_H - -#include "libavutil/avutil.h" - -#include "codec_id.h" - -/** - * @addtogroup lavc_core - * @{ - */ - -/** - * This struct describes the properties of a single codec described by an - * AVCodecID. - * @see avcodec_descriptor_get() - */ -typedef struct AVCodecDescriptor { - enum AVCodecID id; - enum AVMediaType type; - /** - * Name of the codec described by this descriptor. It is non-empty and - * unique for each codec descriptor. It should contain alphanumeric - * characters and '_' only. - */ - const char *name; - /** - * A more descriptive name for this codec. May be NULL. - */ - const char *long_name; - /** - * Codec properties, a combination of AV_CODEC_PROP_* flags. - */ - int props; - /** - * MIME type(s) associated with the codec. - * May be NULL; if not, a NULL-terminated array of MIME types. - * The first item is always non-NULL and is the preferred MIME type. - */ - const char *const *mime_types; - /** - * If non-NULL, an array of profiles recognized for this codec. - * Terminated with FF_PROFILE_UNKNOWN. - */ - const struct AVProfile *profiles; -} AVCodecDescriptor; - -/** - * Codec uses only intra compression. - * Video and audio codecs only. - */ -#define AV_CODEC_PROP_INTRA_ONLY (1 << 0) -/** - * Codec supports lossy compression. Audio and video codecs only. - * @note a codec may support both lossy and lossless - * compression modes - */ -#define AV_CODEC_PROP_LOSSY (1 << 1) -/** - * Codec supports lossless compression. Audio and video codecs only. - */ -#define AV_CODEC_PROP_LOSSLESS (1 << 2) -/** - * Codec supports frame reordering. That is, the coded order (the order in which - * the encoded packets are output by the encoders / stored / input to the - * decoders) may be different from the presentation order of the corresponding - * frames. - * - * For codecs that do not have this property set, PTS and DTS should always be - * equal. - */ -#define AV_CODEC_PROP_REORDER (1 << 3) -/** - * Subtitle codec is bitmap based - * Decoded AVSubtitle data can be read from the AVSubtitleRect->pict field. - */ -#define AV_CODEC_PROP_BITMAP_SUB (1 << 16) -/** - * Subtitle codec is text based. - * Decoded AVSubtitle data can be read from the AVSubtitleRect->ass field. - */ -#define AV_CODEC_PROP_TEXT_SUB (1 << 17) - -/** - * @return descriptor for given codec ID or NULL if no descriptor exists. - */ -const AVCodecDescriptor *avcodec_descriptor_get(enum AVCodecID id); - -/** - * Iterate over all codec descriptors known to libavcodec. - * - * @param prev previous descriptor. NULL to get the first descriptor. - * - * @return next descriptor or NULL after the last descriptor - */ -const AVCodecDescriptor *avcodec_descriptor_next(const AVCodecDescriptor *prev); - -/** - * @return codec descriptor with the given name or NULL if no such descriptor - * exists. - */ -const AVCodecDescriptor *avcodec_descriptor_get_by_name(const char *name); - -/** - * @} - */ - -#endif // AVCODEC_CODEC_DESC_H diff --git a/third-party/cbs/include/cbs/codec_id.h b/third-party/cbs/include/cbs/codec_id.h deleted file mode 100644 index 81fb316cff6..00000000000 --- a/third-party/cbs/include/cbs/codec_id.h +++ /dev/null @@ -1,634 +0,0 @@ -/* - * Codec IDs - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CODEC_ID_H -#define AVCODEC_CODEC_ID_H - -#include "libavutil/avutil.h" -#include "libavutil/samplefmt.h" - -/** - * @addtogroup lavc_core - * @{ - */ - -/** - * Identify the syntax and semantics of the bitstream. - * The principle is roughly: - * Two decoders with the same ID can decode the same streams. - * Two encoders with the same ID can encode compatible streams. - * There may be slight deviations from the principle due to implementation - * details. - * - * If you add a codec ID to this list, add it so that - * 1. no value of an existing codec ID changes (that would break ABI), - * 2. it is as close as possible to similar codecs - * - * After adding new codec IDs, do not forget to add an entry to the codec - * descriptor list and bump libavcodec minor version. - */ -enum AVCodecID { - AV_CODEC_ID_NONE, - - /* video codecs */ - AV_CODEC_ID_MPEG1VIDEO, - AV_CODEC_ID_MPEG2VIDEO, ///< preferred ID for MPEG-1/2 video decoding - AV_CODEC_ID_H261, - AV_CODEC_ID_H263, - AV_CODEC_ID_RV10, - AV_CODEC_ID_RV20, - AV_CODEC_ID_MJPEG, - AV_CODEC_ID_MJPEGB, - AV_CODEC_ID_LJPEG, - AV_CODEC_ID_SP5X, - AV_CODEC_ID_JPEGLS, - AV_CODEC_ID_MPEG4, - AV_CODEC_ID_RAWVIDEO, - AV_CODEC_ID_MSMPEG4V1, - AV_CODEC_ID_MSMPEG4V2, - AV_CODEC_ID_MSMPEG4V3, - AV_CODEC_ID_WMV1, - AV_CODEC_ID_WMV2, - AV_CODEC_ID_H263P, - AV_CODEC_ID_H263I, - AV_CODEC_ID_FLV1, - AV_CODEC_ID_SVQ1, - AV_CODEC_ID_SVQ3, - AV_CODEC_ID_DVVIDEO, - AV_CODEC_ID_HUFFYUV, - AV_CODEC_ID_CYUV, - AV_CODEC_ID_H264, - AV_CODEC_ID_INDEO3, - AV_CODEC_ID_VP3, - AV_CODEC_ID_THEORA, - AV_CODEC_ID_ASV1, - AV_CODEC_ID_ASV2, - AV_CODEC_ID_FFV1, - AV_CODEC_ID_4XM, - AV_CODEC_ID_VCR1, - AV_CODEC_ID_CLJR, - AV_CODEC_ID_MDEC, - AV_CODEC_ID_ROQ, - AV_CODEC_ID_INTERPLAY_VIDEO, - AV_CODEC_ID_XAN_WC3, - AV_CODEC_ID_XAN_WC4, - AV_CODEC_ID_RPZA, - AV_CODEC_ID_CINEPAK, - AV_CODEC_ID_WS_VQA, - AV_CODEC_ID_MSRLE, - AV_CODEC_ID_MSVIDEO1, - AV_CODEC_ID_IDCIN, - AV_CODEC_ID_8BPS, - AV_CODEC_ID_SMC, - AV_CODEC_ID_FLIC, - AV_CODEC_ID_TRUEMOTION1, - AV_CODEC_ID_VMDVIDEO, - AV_CODEC_ID_MSZH, - AV_CODEC_ID_ZLIB, - AV_CODEC_ID_QTRLE, - AV_CODEC_ID_TSCC, - AV_CODEC_ID_ULTI, - AV_CODEC_ID_QDRAW, - AV_CODEC_ID_VIXL, - AV_CODEC_ID_QPEG, - AV_CODEC_ID_PNG, - AV_CODEC_ID_PPM, - AV_CODEC_ID_PBM, - AV_CODEC_ID_PGM, - AV_CODEC_ID_PGMYUV, - AV_CODEC_ID_PAM, - AV_CODEC_ID_FFVHUFF, - AV_CODEC_ID_RV30, - AV_CODEC_ID_RV40, - AV_CODEC_ID_VC1, - AV_CODEC_ID_WMV3, - AV_CODEC_ID_LOCO, - AV_CODEC_ID_WNV1, - AV_CODEC_ID_AASC, - AV_CODEC_ID_INDEO2, - AV_CODEC_ID_FRAPS, - AV_CODEC_ID_TRUEMOTION2, - AV_CODEC_ID_BMP, - AV_CODEC_ID_CSCD, - AV_CODEC_ID_MMVIDEO, - AV_CODEC_ID_ZMBV, - AV_CODEC_ID_AVS, - AV_CODEC_ID_SMACKVIDEO, - AV_CODEC_ID_NUV, - AV_CODEC_ID_KMVC, - AV_CODEC_ID_FLASHSV, - AV_CODEC_ID_CAVS, - AV_CODEC_ID_JPEG2000, - AV_CODEC_ID_VMNC, - AV_CODEC_ID_VP5, - AV_CODEC_ID_VP6, - AV_CODEC_ID_VP6F, - AV_CODEC_ID_TARGA, - AV_CODEC_ID_DSICINVIDEO, - AV_CODEC_ID_TIERTEXSEQVIDEO, - AV_CODEC_ID_TIFF, - AV_CODEC_ID_GIF, - AV_CODEC_ID_DXA, - AV_CODEC_ID_DNXHD, - AV_CODEC_ID_THP, - AV_CODEC_ID_SGI, - AV_CODEC_ID_C93, - AV_CODEC_ID_BETHSOFTVID, - AV_CODEC_ID_PTX, - AV_CODEC_ID_TXD, - AV_CODEC_ID_VP6A, - AV_CODEC_ID_AMV, - AV_CODEC_ID_VB, - AV_CODEC_ID_PCX, - AV_CODEC_ID_SUNRAST, - AV_CODEC_ID_INDEO4, - AV_CODEC_ID_INDEO5, - AV_CODEC_ID_MIMIC, - AV_CODEC_ID_RL2, - AV_CODEC_ID_ESCAPE124, - AV_CODEC_ID_DIRAC, - AV_CODEC_ID_BFI, - AV_CODEC_ID_CMV, - AV_CODEC_ID_MOTIONPIXELS, - AV_CODEC_ID_TGV, - AV_CODEC_ID_TGQ, - AV_CODEC_ID_TQI, - AV_CODEC_ID_AURA, - AV_CODEC_ID_AURA2, - AV_CODEC_ID_V210X, - AV_CODEC_ID_TMV, - AV_CODEC_ID_V210, - AV_CODEC_ID_DPX, - AV_CODEC_ID_MAD, - AV_CODEC_ID_FRWU, - AV_CODEC_ID_FLASHSV2, - AV_CODEC_ID_CDGRAPHICS, - AV_CODEC_ID_R210, - AV_CODEC_ID_ANM, - AV_CODEC_ID_BINKVIDEO, - AV_CODEC_ID_IFF_ILBM, -#define AV_CODEC_ID_IFF_BYTERUN1 AV_CODEC_ID_IFF_ILBM - AV_CODEC_ID_KGV1, - AV_CODEC_ID_YOP, - AV_CODEC_ID_VP8, - AV_CODEC_ID_PICTOR, - AV_CODEC_ID_ANSI, - AV_CODEC_ID_A64_MULTI, - AV_CODEC_ID_A64_MULTI5, - AV_CODEC_ID_R10K, - AV_CODEC_ID_MXPEG, - AV_CODEC_ID_LAGARITH, - AV_CODEC_ID_PRORES, - AV_CODEC_ID_JV, - AV_CODEC_ID_DFA, - AV_CODEC_ID_WMV3IMAGE, - AV_CODEC_ID_VC1IMAGE, - AV_CODEC_ID_UTVIDEO, - AV_CODEC_ID_BMV_VIDEO, - AV_CODEC_ID_VBLE, - AV_CODEC_ID_DXTORY, - AV_CODEC_ID_V410, - AV_CODEC_ID_XWD, - AV_CODEC_ID_CDXL, - AV_CODEC_ID_XBM, - AV_CODEC_ID_ZEROCODEC, - AV_CODEC_ID_MSS1, - AV_CODEC_ID_MSA1, - AV_CODEC_ID_TSCC2, - AV_CODEC_ID_MTS2, - AV_CODEC_ID_CLLC, - AV_CODEC_ID_MSS2, - AV_CODEC_ID_VP9, - AV_CODEC_ID_AIC, - AV_CODEC_ID_ESCAPE130, - AV_CODEC_ID_G2M, - AV_CODEC_ID_WEBP, - AV_CODEC_ID_HNM4_VIDEO, - AV_CODEC_ID_HEVC, -#define AV_CODEC_ID_H265 AV_CODEC_ID_HEVC - AV_CODEC_ID_FIC, - AV_CODEC_ID_ALIAS_PIX, - AV_CODEC_ID_BRENDER_PIX, - AV_CODEC_ID_PAF_VIDEO, - AV_CODEC_ID_EXR, - AV_CODEC_ID_VP7, - AV_CODEC_ID_SANM, - AV_CODEC_ID_SGIRLE, - AV_CODEC_ID_MVC1, - AV_CODEC_ID_MVC2, - AV_CODEC_ID_HQX, - AV_CODEC_ID_TDSC, - AV_CODEC_ID_HQ_HQA, - AV_CODEC_ID_HAP, - AV_CODEC_ID_DDS, - AV_CODEC_ID_DXV, - AV_CODEC_ID_SCREENPRESSO, - AV_CODEC_ID_RSCC, - AV_CODEC_ID_AVS2, - AV_CODEC_ID_PGX, - AV_CODEC_ID_AVS3, - AV_CODEC_ID_MSP2, - AV_CODEC_ID_VVC, -#define AV_CODEC_ID_H266 AV_CODEC_ID_VVC - AV_CODEC_ID_Y41P, - AV_CODEC_ID_AVRP, - AV_CODEC_ID_012V, - AV_CODEC_ID_AVUI, - AV_CODEC_ID_AYUV, - AV_CODEC_ID_TARGA_Y216, - AV_CODEC_ID_V308, - AV_CODEC_ID_V408, - AV_CODEC_ID_YUV4, - AV_CODEC_ID_AVRN, - AV_CODEC_ID_CPIA, - AV_CODEC_ID_XFACE, - AV_CODEC_ID_SNOW, - AV_CODEC_ID_SMVJPEG, - AV_CODEC_ID_APNG, - AV_CODEC_ID_DAALA, - AV_CODEC_ID_CFHD, - AV_CODEC_ID_TRUEMOTION2RT, - AV_CODEC_ID_M101, - AV_CODEC_ID_MAGICYUV, - AV_CODEC_ID_SHEERVIDEO, - AV_CODEC_ID_YLC, - AV_CODEC_ID_PSD, - AV_CODEC_ID_PIXLET, - AV_CODEC_ID_SPEEDHQ, - AV_CODEC_ID_FMVC, - AV_CODEC_ID_SCPR, - AV_CODEC_ID_CLEARVIDEO, - AV_CODEC_ID_XPM, - AV_CODEC_ID_AV1, - AV_CODEC_ID_BITPACKED, - AV_CODEC_ID_MSCC, - AV_CODEC_ID_SRGC, - AV_CODEC_ID_SVG, - AV_CODEC_ID_GDV, - AV_CODEC_ID_FITS, - AV_CODEC_ID_IMM4, - AV_CODEC_ID_PROSUMER, - AV_CODEC_ID_MWSC, - AV_CODEC_ID_WCMV, - AV_CODEC_ID_RASC, - AV_CODEC_ID_HYMT, - AV_CODEC_ID_ARBC, - AV_CODEC_ID_AGM, - AV_CODEC_ID_LSCR, - AV_CODEC_ID_VP4, - AV_CODEC_ID_IMM5, - AV_CODEC_ID_MVDV, - AV_CODEC_ID_MVHA, - AV_CODEC_ID_CDTOONS, - AV_CODEC_ID_MV30, - AV_CODEC_ID_NOTCHLC, - AV_CODEC_ID_PFM, - AV_CODEC_ID_MOBICLIP, - AV_CODEC_ID_PHOTOCD, - AV_CODEC_ID_IPU, - AV_CODEC_ID_ARGO, - AV_CODEC_ID_CRI, - AV_CODEC_ID_SIMBIOSIS_IMX, - AV_CODEC_ID_SGA_VIDEO, - AV_CODEC_ID_GEM, - AV_CODEC_ID_VBN, - AV_CODEC_ID_JPEGXL, - AV_CODEC_ID_QOI, - AV_CODEC_ID_PHM, - - /* various PCM "codecs" */ - AV_CODEC_ID_FIRST_AUDIO = 0x10000, ///< A dummy id pointing at the start of audio codecs - AV_CODEC_ID_PCM_S16LE = 0x10000, - AV_CODEC_ID_PCM_S16BE, - AV_CODEC_ID_PCM_U16LE, - AV_CODEC_ID_PCM_U16BE, - AV_CODEC_ID_PCM_S8, - AV_CODEC_ID_PCM_U8, - AV_CODEC_ID_PCM_MULAW, - AV_CODEC_ID_PCM_ALAW, - AV_CODEC_ID_PCM_S32LE, - AV_CODEC_ID_PCM_S32BE, - AV_CODEC_ID_PCM_U32LE, - AV_CODEC_ID_PCM_U32BE, - AV_CODEC_ID_PCM_S24LE, - AV_CODEC_ID_PCM_S24BE, - AV_CODEC_ID_PCM_U24LE, - AV_CODEC_ID_PCM_U24BE, - AV_CODEC_ID_PCM_S24DAUD, - AV_CODEC_ID_PCM_ZORK, - AV_CODEC_ID_PCM_S16LE_PLANAR, - AV_CODEC_ID_PCM_DVD, - AV_CODEC_ID_PCM_F32BE, - AV_CODEC_ID_PCM_F32LE, - AV_CODEC_ID_PCM_F64BE, - AV_CODEC_ID_PCM_F64LE, - AV_CODEC_ID_PCM_BLURAY, - AV_CODEC_ID_PCM_LXF, - AV_CODEC_ID_S302M, - AV_CODEC_ID_PCM_S8_PLANAR, - AV_CODEC_ID_PCM_S24LE_PLANAR, - AV_CODEC_ID_PCM_S32LE_PLANAR, - AV_CODEC_ID_PCM_S16BE_PLANAR, - AV_CODEC_ID_PCM_S64LE, - AV_CODEC_ID_PCM_S64BE, - AV_CODEC_ID_PCM_F16LE, - AV_CODEC_ID_PCM_F24LE, - AV_CODEC_ID_PCM_VIDC, - AV_CODEC_ID_PCM_SGA, - - /* various ADPCM codecs */ - AV_CODEC_ID_ADPCM_IMA_QT = 0x11000, - AV_CODEC_ID_ADPCM_IMA_WAV, - AV_CODEC_ID_ADPCM_IMA_DK3, - AV_CODEC_ID_ADPCM_IMA_DK4, - AV_CODEC_ID_ADPCM_IMA_WS, - AV_CODEC_ID_ADPCM_IMA_SMJPEG, - AV_CODEC_ID_ADPCM_MS, - AV_CODEC_ID_ADPCM_4XM, - AV_CODEC_ID_ADPCM_XA, - AV_CODEC_ID_ADPCM_ADX, - AV_CODEC_ID_ADPCM_EA, - AV_CODEC_ID_ADPCM_G726, - AV_CODEC_ID_ADPCM_CT, - AV_CODEC_ID_ADPCM_SWF, - AV_CODEC_ID_ADPCM_YAMAHA, - AV_CODEC_ID_ADPCM_SBPRO_4, - AV_CODEC_ID_ADPCM_SBPRO_3, - AV_CODEC_ID_ADPCM_SBPRO_2, - AV_CODEC_ID_ADPCM_THP, - AV_CODEC_ID_ADPCM_IMA_AMV, - AV_CODEC_ID_ADPCM_EA_R1, - AV_CODEC_ID_ADPCM_EA_R3, - AV_CODEC_ID_ADPCM_EA_R2, - AV_CODEC_ID_ADPCM_IMA_EA_SEAD, - AV_CODEC_ID_ADPCM_IMA_EA_EACS, - AV_CODEC_ID_ADPCM_EA_XAS, - AV_CODEC_ID_ADPCM_EA_MAXIS_XA, - AV_CODEC_ID_ADPCM_IMA_ISS, - AV_CODEC_ID_ADPCM_G722, - AV_CODEC_ID_ADPCM_IMA_APC, - AV_CODEC_ID_ADPCM_VIMA, - AV_CODEC_ID_ADPCM_AFC, - AV_CODEC_ID_ADPCM_IMA_OKI, - AV_CODEC_ID_ADPCM_DTK, - AV_CODEC_ID_ADPCM_IMA_RAD, - AV_CODEC_ID_ADPCM_G726LE, - AV_CODEC_ID_ADPCM_THP_LE, - AV_CODEC_ID_ADPCM_PSX, - AV_CODEC_ID_ADPCM_AICA, - AV_CODEC_ID_ADPCM_IMA_DAT4, - AV_CODEC_ID_ADPCM_MTAF, - AV_CODEC_ID_ADPCM_AGM, - AV_CODEC_ID_ADPCM_ARGO, - AV_CODEC_ID_ADPCM_IMA_SSI, - AV_CODEC_ID_ADPCM_ZORK, - AV_CODEC_ID_ADPCM_IMA_APM, - AV_CODEC_ID_ADPCM_IMA_ALP, - AV_CODEC_ID_ADPCM_IMA_MTF, - AV_CODEC_ID_ADPCM_IMA_CUNNING, - AV_CODEC_ID_ADPCM_IMA_MOFLEX, - AV_CODEC_ID_ADPCM_IMA_ACORN, - - /* AMR */ - AV_CODEC_ID_AMR_NB = 0x12000, - AV_CODEC_ID_AMR_WB, - - /* RealAudio codecs*/ - AV_CODEC_ID_RA_144 = 0x13000, - AV_CODEC_ID_RA_288, - - /* various DPCM codecs */ - AV_CODEC_ID_ROQ_DPCM = 0x14000, - AV_CODEC_ID_INTERPLAY_DPCM, - AV_CODEC_ID_XAN_DPCM, - AV_CODEC_ID_SOL_DPCM, - AV_CODEC_ID_SDX2_DPCM, - AV_CODEC_ID_GREMLIN_DPCM, - AV_CODEC_ID_DERF_DPCM, - - /* audio codecs */ - AV_CODEC_ID_MP2 = 0x15000, - AV_CODEC_ID_MP3, ///< preferred ID for decoding MPEG audio layer 1, 2 or 3 - AV_CODEC_ID_AAC, - AV_CODEC_ID_AC3, - AV_CODEC_ID_DTS, - AV_CODEC_ID_VORBIS, - AV_CODEC_ID_DVAUDIO, - AV_CODEC_ID_WMAV1, - AV_CODEC_ID_WMAV2, - AV_CODEC_ID_MACE3, - AV_CODEC_ID_MACE6, - AV_CODEC_ID_VMDAUDIO, - AV_CODEC_ID_FLAC, - AV_CODEC_ID_MP3ADU, - AV_CODEC_ID_MP3ON4, - AV_CODEC_ID_SHORTEN, - AV_CODEC_ID_ALAC, - AV_CODEC_ID_WESTWOOD_SND1, - AV_CODEC_ID_GSM, ///< as in Berlin toast format - AV_CODEC_ID_QDM2, - AV_CODEC_ID_COOK, - AV_CODEC_ID_TRUESPEECH, - AV_CODEC_ID_TTA, - AV_CODEC_ID_SMACKAUDIO, - AV_CODEC_ID_QCELP, - AV_CODEC_ID_WAVPACK, - AV_CODEC_ID_DSICINAUDIO, - AV_CODEC_ID_IMC, - AV_CODEC_ID_MUSEPACK7, - AV_CODEC_ID_MLP, - AV_CODEC_ID_GSM_MS, /* as found in WAV */ - AV_CODEC_ID_ATRAC3, - AV_CODEC_ID_APE, - AV_CODEC_ID_NELLYMOSER, - AV_CODEC_ID_MUSEPACK8, - AV_CODEC_ID_SPEEX, - AV_CODEC_ID_WMAVOICE, - AV_CODEC_ID_WMAPRO, - AV_CODEC_ID_WMALOSSLESS, - AV_CODEC_ID_ATRAC3P, - AV_CODEC_ID_EAC3, - AV_CODEC_ID_SIPR, - AV_CODEC_ID_MP1, - AV_CODEC_ID_TWINVQ, - AV_CODEC_ID_TRUEHD, - AV_CODEC_ID_MP4ALS, - AV_CODEC_ID_ATRAC1, - AV_CODEC_ID_BINKAUDIO_RDFT, - AV_CODEC_ID_BINKAUDIO_DCT, - AV_CODEC_ID_AAC_LATM, - AV_CODEC_ID_QDMC, - AV_CODEC_ID_CELT, - AV_CODEC_ID_G723_1, - AV_CODEC_ID_G729, - AV_CODEC_ID_8SVX_EXP, - AV_CODEC_ID_8SVX_FIB, - AV_CODEC_ID_BMV_AUDIO, - AV_CODEC_ID_RALF, - AV_CODEC_ID_IAC, - AV_CODEC_ID_ILBC, - AV_CODEC_ID_OPUS, - AV_CODEC_ID_COMFORT_NOISE, - AV_CODEC_ID_TAK, - AV_CODEC_ID_METASOUND, - AV_CODEC_ID_PAF_AUDIO, - AV_CODEC_ID_ON2AVC, - AV_CODEC_ID_DSS_SP, - AV_CODEC_ID_CODEC2, - AV_CODEC_ID_FFWAVESYNTH, - AV_CODEC_ID_SONIC, - AV_CODEC_ID_SONIC_LS, - AV_CODEC_ID_EVRC, - AV_CODEC_ID_SMV, - AV_CODEC_ID_DSD_LSBF, - AV_CODEC_ID_DSD_MSBF, - AV_CODEC_ID_DSD_LSBF_PLANAR, - AV_CODEC_ID_DSD_MSBF_PLANAR, - AV_CODEC_ID_4GV, - AV_CODEC_ID_INTERPLAY_ACM, - AV_CODEC_ID_XMA1, - AV_CODEC_ID_XMA2, - AV_CODEC_ID_DST, - AV_CODEC_ID_ATRAC3AL, - AV_CODEC_ID_ATRAC3PAL, - AV_CODEC_ID_DOLBY_E, - AV_CODEC_ID_APTX, - AV_CODEC_ID_APTX_HD, - AV_CODEC_ID_SBC, - AV_CODEC_ID_ATRAC9, - AV_CODEC_ID_HCOM, - AV_CODEC_ID_ACELP_KELVIN, - AV_CODEC_ID_MPEGH_3D_AUDIO, - AV_CODEC_ID_SIREN, - AV_CODEC_ID_HCA, - AV_CODEC_ID_FASTAUDIO, - AV_CODEC_ID_MSNSIREN, - AV_CODEC_ID_DFPWM, - - /* subtitle codecs */ - AV_CODEC_ID_FIRST_SUBTITLE = 0x17000, ///< A dummy ID pointing at the start of subtitle codecs. - AV_CODEC_ID_DVD_SUBTITLE = 0x17000, - AV_CODEC_ID_DVB_SUBTITLE, - AV_CODEC_ID_TEXT, ///< raw UTF-8 text - AV_CODEC_ID_XSUB, - AV_CODEC_ID_SSA, - AV_CODEC_ID_MOV_TEXT, - AV_CODEC_ID_HDMV_PGS_SUBTITLE, - AV_CODEC_ID_DVB_TELETEXT, - AV_CODEC_ID_SRT, - AV_CODEC_ID_MICRODVD, - AV_CODEC_ID_EIA_608, - AV_CODEC_ID_JACOSUB, - AV_CODEC_ID_SAMI, - AV_CODEC_ID_REALTEXT, - AV_CODEC_ID_STL, - AV_CODEC_ID_SUBVIEWER1, - AV_CODEC_ID_SUBVIEWER, - AV_CODEC_ID_SUBRIP, - AV_CODEC_ID_WEBVTT, - AV_CODEC_ID_MPL2, - AV_CODEC_ID_VPLAYER, - AV_CODEC_ID_PJS, - AV_CODEC_ID_ASS, - AV_CODEC_ID_HDMV_TEXT_SUBTITLE, - AV_CODEC_ID_TTML, - AV_CODEC_ID_ARIB_CAPTION, - - /* other specific kind of codecs (generally used for attachments) */ - AV_CODEC_ID_FIRST_UNKNOWN = 0x18000, ///< A dummy ID pointing at the start of various fake codecs. - AV_CODEC_ID_TTF = 0x18000, - - AV_CODEC_ID_SCTE_35, ///< Contain timestamp estimated through PCR of program stream. - AV_CODEC_ID_EPG, - AV_CODEC_ID_BINTEXT, - AV_CODEC_ID_XBIN, - AV_CODEC_ID_IDF, - AV_CODEC_ID_OTF, - AV_CODEC_ID_SMPTE_KLV, - AV_CODEC_ID_DVD_NAV, - AV_CODEC_ID_TIMED_ID3, - AV_CODEC_ID_BIN_DATA, - - - AV_CODEC_ID_PROBE = 0x19000, ///< codec_id is not known (like AV_CODEC_ID_NONE) but lavf should attempt to identify it - - AV_CODEC_ID_MPEG2TS = 0x20000, /**< _FAKE_ codec to indicate a raw MPEG-2 TS - * stream (only used by libavformat) */ - AV_CODEC_ID_MPEG4SYSTEMS = 0x20001, /**< _FAKE_ codec to indicate a MPEG-4 Systems - * stream (only used by libavformat) */ - AV_CODEC_ID_FFMETADATA = 0x21000, ///< Dummy codec for streams containing only metadata information. - AV_CODEC_ID_WRAPPED_AVFRAME = 0x21001, ///< Passthrough codec, AVFrames wrapped in AVPacket -}; - -/** - * Get the type of the given codec. - */ -enum AVMediaType avcodec_get_type(enum AVCodecID codec_id); - -/** - * Get the name of a codec. - * @return a static string identifying the codec; never NULL - */ -const char *avcodec_get_name(enum AVCodecID id); - -/** - * Return codec bits per sample. - * - * @param[in] codec_id the codec - * @return Number of bits per sample or zero if unknown for the given codec. - */ -int av_get_bits_per_sample(enum AVCodecID codec_id); - -/** - * Return codec bits per sample. - * Only return non-zero if the bits per sample is exactly correct, not an - * approximation. - * - * @param[in] codec_id the codec - * @return Number of bits per sample or zero if unknown for the given codec. - */ -int av_get_exact_bits_per_sample(enum AVCodecID codec_id); - -/** - * Return a name for the specified profile, if available. - * - * @param codec_id the ID of the codec to which the requested profile belongs - * @param profile the profile value for which a name is requested - * @return A name for the profile if found, NULL otherwise. - * - * @note unlike av_get_profile_name(), which searches a list of profiles - * supported by a specific decoder or encoder implementation, this - * function searches the list of profiles from the AVCodecDescriptor - */ -const char *avcodec_profile_name(enum AVCodecID codec_id, int profile); - -/** - * Return the PCM codec associated with a sample format. - * @param be endianness, 0 for little, 1 for big, - * -1 (or anything else) for native - * @return AV_CODEC_ID_PCM_* or AV_CODEC_ID_NONE - */ -enum AVCodecID av_get_pcm_codec(enum AVSampleFormat fmt, int be); - -/** - * @} - */ - -#endif // AVCODEC_CODEC_ID_H diff --git a/third-party/cbs/include/cbs/codec_par.h b/third-party/cbs/include/cbs/codec_par.h deleted file mode 100644 index 7660791a12e..00000000000 --- a/third-party/cbs/include/cbs/codec_par.h +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Codec parameters public API - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_CODEC_PAR_H -#define AVCODEC_CODEC_PAR_H - -#include - -#include "libavutil/avutil.h" -#include "libavutil/channel_layout.h" -#include "libavutil/rational.h" -#include "libavutil/pixfmt.h" - -#include "codec_id.h" - -/** - * @addtogroup lavc_core - */ - -enum AVFieldOrder { - AV_FIELD_UNKNOWN, - AV_FIELD_PROGRESSIVE, - AV_FIELD_TT, //< Top coded_first, top displayed first - AV_FIELD_BB, //< Bottom coded first, bottom displayed first - AV_FIELD_TB, //< Top coded first, bottom displayed first - AV_FIELD_BT, //< Bottom coded first, top displayed first -}; - -/** - * This struct describes the properties of an encoded stream. - * - * sizeof(AVCodecParameters) is not a part of the public ABI, this struct must - * be allocated with avcodec_parameters_alloc() and freed with - * avcodec_parameters_free(). - */ -typedef struct AVCodecParameters { - /** - * General type of the encoded data. - */ - enum AVMediaType codec_type; - /** - * Specific type of the encoded data (the codec used). - */ - enum AVCodecID codec_id; - /** - * Additional information about the codec (corresponds to the AVI FOURCC). - */ - uint32_t codec_tag; - - /** - * Extra binary data needed for initializing the decoder, codec-dependent. - * - * Must be allocated with av_malloc() and will be freed by - * avcodec_parameters_free(). The allocated size of extradata must be at - * least extradata_size + AV_INPUT_BUFFER_PADDING_SIZE, with the padding - * bytes zeroed. - */ - uint8_t *extradata; - /** - * Size of the extradata content in bytes. - */ - int extradata_size; - - /** - * - video: the pixel format, the value corresponds to enum AVPixelFormat. - * - audio: the sample format, the value corresponds to enum AVSampleFormat. - */ - int format; - - /** - * The average bitrate of the encoded data (in bits per second). - */ - int64_t bit_rate; - - /** - * The number of bits per sample in the codedwords. - * - * This is basically the bitrate per sample. It is mandatory for a bunch of - * formats to actually decode them. It's the number of bits for one sample in - * the actual coded bitstream. - * - * This could be for example 4 for ADPCM - * For PCM formats this matches bits_per_raw_sample - * Can be 0 - */ - int bits_per_coded_sample; - - /** - * This is the number of valid bits in each output sample. If the - * sample format has more bits, the least significant bits are additional - * padding bits, which are always 0. Use right shifts to reduce the sample - * to its actual size. For example, audio formats with 24 bit samples will - * have bits_per_raw_sample set to 24, and format set to AV_SAMPLE_FMT_S32. - * To get the original sample use "(int32_t)sample >> 8"." - * - * For ADPCM this might be 12 or 16 or similar - * Can be 0 - */ - int bits_per_raw_sample; - - /** - * Codec-specific bitstream restrictions that the stream conforms to. - */ - int profile; - int level; - - /** - * Video only. The dimensions of the video frame in pixels. - */ - int width; - int height; - - /** - * Video only. The aspect ratio (width / height) which a single pixel - * should have when displayed. - * - * When the aspect ratio is unknown / undefined, the numerator should be - * set to 0 (the denominator may have any value). - */ - AVRational sample_aspect_ratio; - - /** - * Video only. The order of the fields in interlaced video. - */ - enum AVFieldOrder field_order; - - /** - * Video only. Additional colorspace characteristics. - */ - enum AVColorRange color_range; - enum AVColorPrimaries color_primaries; - enum AVColorTransferCharacteristic color_trc; - enum AVColorSpace color_space; - enum AVChromaLocation chroma_location; - - /** - * Video only. Number of delayed frames. - */ - int video_delay; - -#if FF_API_OLD_CHANNEL_LAYOUT - /** - * Audio only. The channel layout bitmask. May be 0 if the channel layout is - * unknown or unspecified, otherwise the number of bits set must be equal to - * the channels field. - * @deprecated use ch_layout - */ - attribute_deprecated - uint64_t channel_layout; - /** - * Audio only. The number of audio channels. - * @deprecated use ch_layout.nb_channels - */ - attribute_deprecated - int channels; -#endif - /** - * Audio only. The number of audio samples per second. - */ - int sample_rate; - /** - * Audio only. The number of bytes per coded audio frame, required by some - * formats. - * - * Corresponds to nBlockAlign in WAVEFORMATEX. - */ - int block_align; - /** - * Audio only. Audio frame size, if known. Required by some formats to be static. - */ - int frame_size; - - /** - * Audio only. The amount of padding (in samples) inserted by the encoder at - * the beginning of the audio. I.e. this number of leading decoded samples - * must be discarded by the caller to get the original audio without leading - * padding. - */ - int initial_padding; - /** - * Audio only. The amount of padding (in samples) appended by the encoder to - * the end of the audio. I.e. this number of decoded samples must be - * discarded by the caller from the end of the stream to get the original - * audio without any trailing padding. - */ - int trailing_padding; - /** - * Audio only. Number of samples to skip after a discontinuity. - */ - int seek_preroll; - - /** - * Audio only. The channel layout and number of channels. - */ - AVChannelLayout ch_layout; -} AVCodecParameters; - -/** - * Allocate a new AVCodecParameters and set its fields to default values - * (unknown/invalid/0). The returned struct must be freed with - * avcodec_parameters_free(). - */ -AVCodecParameters *avcodec_parameters_alloc(void); - -/** - * Free an AVCodecParameters instance and everything associated with it and - * write NULL to the supplied pointer. - */ -void avcodec_parameters_free(AVCodecParameters **par); - -/** - * Copy the contents of src to dst. Any allocated fields in dst are freed and - * replaced with newly allocated duplicates of the corresponding fields in src. - * - * @return >= 0 on success, a negative AVERROR code on failure. - */ -int avcodec_parameters_copy(AVCodecParameters *dst, const AVCodecParameters *src); - -/** - * This function is the same as av_get_audio_frame_duration(), except it works - * with AVCodecParameters instead of an AVCodecContext. - */ -int av_get_audio_frame_duration2(AVCodecParameters *par, int frame_bytes); - -/** - * @} - */ - -#endif // AVCODEC_CODEC_PAR_H diff --git a/third-party/cbs/include/cbs/config.h b/third-party/cbs/include/cbs/config.h deleted file mode 100644 index a778bdd9f20..00000000000 --- a/third-party/cbs/include/cbs/config.h +++ /dev/null @@ -1,34 +0,0 @@ -// [sunshine] Copied, generated file -#ifndef CBS_CONFIG_H -#define CBS_CONFIG_H - -#if defined(__BYTE_ORDER) && __BYTE_ORDER == __BIG_ENDIAN || \ - defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ || \ - defined(__FLOAT_WORD_ORDER__) && __FLOAT_WORD_ORDER__ == __ORDER_BIG_ENDIAN__ || \ - defined(__BIG_ENDIAN__) || \ - defined(__ARMEB__) || \ - defined(__THUMBEB__) || \ - defined(__AARCH64EB__) || \ - defined(_MIBSEB) || defined(__MIBSEB) || defined(__MIBSEB__) -// It's a big-endian target architecture -#define AV_HAVE_BIGENDIAN 1 - -#elif defined(__BYTE_ORDER) && __BYTE_ORDER == __LITTLE_ENDIAN || \ - defined(__BYTE_ORDER) && __BYTE_ORDER == __PDP_ENDIAN || \ - defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ || \ - defined(__FLOAT_WORD_ORDER__) && __FLOAT_WORD_ORDER__ == __ORDER_LITTLE_ENDIAN__ || \ - defined(__LITTLE_ENDIAN__) || \ - defined(__ARMEL__) || \ - defined(__THUMBEL__) || \ - defined(__AARCH64EL__) || \ - defined(_MIPSEL) || defined(__MIPSEL) || defined(__MIPSEL__) || \ - defined(_WIN32) -// It's a little-endian target architecture -#define AV_HAVE_BIGENDIAN 0 - -#else -// https://manhnt.github.io/programming_technique/2018/08/15/oneline-macro-endian-check.html -#define AV_HAVE_BIGENDIAN (*(uint16_t *)"\0\xff" < 0x0100) -#endif - -#endif \ No newline at end of file diff --git a/third-party/cbs/include/cbs/defs.h b/third-party/cbs/include/cbs/defs.h deleted file mode 100644 index 420a042b8ff..00000000000 --- a/third-party/cbs/include/cbs/defs.h +++ /dev/null @@ -1,170 +0,0 @@ -/* - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_DEFS_H -#define AVCODEC_DEFS_H - -/** - * @file - * @ingroup libavc - * Misc types and constants that do not belong anywhere else. - */ - -#include -#include - -/** - * @ingroup lavc_decoding - * Required number of additionally allocated bytes at the end of the input bitstream for decoding. - * This is mainly needed because some optimized bitstream readers read - * 32 or 64 bit at once and could read over the end.
- * Note: If the first 23 bits of the additional bytes are not 0, then damaged - * MPEG bitstreams could cause overread and segfault. - */ -#define AV_INPUT_BUFFER_PADDING_SIZE 64 - -/** - * @ingroup lavc_decoding - */ -enum AVDiscard{ - /* We leave some space between them for extensions (drop some - * keyframes for intra-only or drop just some bidir frames). */ - AVDISCARD_NONE =-16, ///< discard nothing - AVDISCARD_DEFAULT = 0, ///< discard useless packets like 0 size packets in avi - AVDISCARD_NONREF = 8, ///< discard all non reference - AVDISCARD_BIDIR = 16, ///< discard all bidirectional frames - AVDISCARD_NONINTRA= 24, ///< discard all non intra frames - AVDISCARD_NONKEY = 32, ///< discard all frames except keyframes - AVDISCARD_ALL = 48, ///< discard all -}; - -enum AVAudioServiceType { - AV_AUDIO_SERVICE_TYPE_MAIN = 0, - AV_AUDIO_SERVICE_TYPE_EFFECTS = 1, - AV_AUDIO_SERVICE_TYPE_VISUALLY_IMPAIRED = 2, - AV_AUDIO_SERVICE_TYPE_HEARING_IMPAIRED = 3, - AV_AUDIO_SERVICE_TYPE_DIALOGUE = 4, - AV_AUDIO_SERVICE_TYPE_COMMENTARY = 5, - AV_AUDIO_SERVICE_TYPE_EMERGENCY = 6, - AV_AUDIO_SERVICE_TYPE_VOICE_OVER = 7, - AV_AUDIO_SERVICE_TYPE_KARAOKE = 8, - AV_AUDIO_SERVICE_TYPE_NB , ///< Not part of ABI -}; - -/** - * Pan Scan area. - * This specifies the area which should be displayed. - * Note there may be multiple such areas for one frame. - */ -typedef struct AVPanScan { - /** - * id - * - encoding: Set by user. - * - decoding: Set by libavcodec. - */ - int id; - - /** - * width and height in 1/16 pel - * - encoding: Set by user. - * - decoding: Set by libavcodec. - */ - int width; - int height; - - /** - * position of the top left corner in 1/16 pel for up to 3 fields/frames - * - encoding: Set by user. - * - decoding: Set by libavcodec. - */ - int16_t position[3][2]; -} AVPanScan; - -/** - * This structure describes the bitrate properties of an encoded bitstream. It - * roughly corresponds to a subset the VBV parameters for MPEG-2 or HRD - * parameters for H.264/HEVC. - */ -typedef struct AVCPBProperties { - /** - * Maximum bitrate of the stream, in bits per second. - * Zero if unknown or unspecified. - */ - int64_t max_bitrate; - /** - * Minimum bitrate of the stream, in bits per second. - * Zero if unknown or unspecified. - */ - int64_t min_bitrate; - /** - * Average bitrate of the stream, in bits per second. - * Zero if unknown or unspecified. - */ - int64_t avg_bitrate; - - /** - * The size of the buffer to which the ratecontrol is applied, in bits. - * Zero if unknown or unspecified. - */ - int64_t buffer_size; - - /** - * The delay between the time the packet this structure is associated with - * is received and the time when it should be decoded, in periods of a 27MHz - * clock. - * - * UINT64_MAX when unknown or unspecified. - */ - uint64_t vbv_delay; -} AVCPBProperties; - -/** - * Allocate a CPB properties structure and initialize its fields to default - * values. - * - * @param size if non-NULL, the size of the allocated struct will be written - * here. This is useful for embedding it in side data. - * - * @return the newly allocated struct or NULL on failure - */ -AVCPBProperties *av_cpb_properties_alloc(size_t *size); - -/** - * This structure supplies correlation between a packet timestamp and a wall clock - * production time. The definition follows the Producer Reference Time ('prft') - * as defined in ISO/IEC 14496-12 - */ -typedef struct AVProducerReferenceTime { - /** - * A UTC timestamp, in microseconds, since Unix epoch (e.g, av_gettime()). - */ - int64_t wallclock; - int flags; -} AVProducerReferenceTime; - -/** - * Encode extradata length to a buffer. Used by xiph codecs. - * - * @param s buffer to write to; must be at least (v/255+1) bytes long - * @param v size of extradata in bytes - * @return number of bytes written to the buffer. - */ -unsigned int av_xiphlacing(unsigned char *s, unsigned int v); - -#endif // AVCODEC_DEFS_H diff --git a/third-party/cbs/include/cbs/get_bits.h b/third-party/cbs/include/cbs/get_bits.h deleted file mode 100644 index 045c8c595cb..00000000000 --- a/third-party/cbs/include/cbs/get_bits.h +++ /dev/null @@ -1,864 +0,0 @@ -/* - * Copyright (c) 2004 Michael Niedermayer - * Copyright (c) 2016 Alexandra Hájková - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * bitstream reader API header. - */ - -#ifndef AVCODEC_GET_BITS_H -#define AVCODEC_GET_BITS_H - -#include - -#include "libavutil/common.h" -#include "libavutil/intreadwrite.h" -#include "libavutil/avassert.h" - -#include "defs.h" -#include "mathops.h" -#include "vlc.h" - -/* - * Safe bitstream reading: - * optionally, the get_bits API can check to ensure that we - * don't read past input buffer boundaries. This is protected - * with CONFIG_SAFE_BITSTREAM_READER at the global level, and - * then below that with UNCHECKED_BITSTREAM_READER at the per- - * decoder level. This means that decoders that check internally - * can "#define UNCHECKED_BITSTREAM_READER 1" to disable - * overread checks. - * Boundary checking causes a minor performance penalty so for - * applications that won't want/need this, it can be disabled - * globally using "#define CONFIG_SAFE_BITSTREAM_READER 0". - */ -#ifndef UNCHECKED_BITSTREAM_READER -#define UNCHECKED_BITSTREAM_READER !CONFIG_SAFE_BITSTREAM_READER -#endif - -#ifndef CACHED_BITSTREAM_READER -#define CACHED_BITSTREAM_READER 0 -#endif - -typedef struct GetBitContext { - const uint8_t *buffer, *buffer_end; -#if CACHED_BITSTREAM_READER - uint64_t cache; - unsigned bits_left; -#endif - int index; - int size_in_bits; - int size_in_bits_plus8; -} GetBitContext; - -static inline unsigned int get_bits(GetBitContext *s, int n); -static inline void skip_bits(GetBitContext *s, int n); -static inline unsigned int show_bits(GetBitContext *s, int n); - -/* Bitstream reader API docs: - * name - * arbitrary name which is used as prefix for the internal variables - * - * gb - * getbitcontext - * - * OPEN_READER(name, gb) - * load gb into local variables - * - * CLOSE_READER(name, gb) - * store local vars in gb - * - * UPDATE_CACHE(name, gb) - * Refill the internal cache from the bitstream. - * After this call at least MIN_CACHE_BITS will be available. - * - * GET_CACHE(name, gb) - * Will output the contents of the internal cache, - * next bit is MSB of 32 or 64 bits (FIXME 64 bits). - * - * SHOW_UBITS(name, gb, num) - * Will return the next num bits. - * - * SHOW_SBITS(name, gb, num) - * Will return the next num bits and do sign extension. - * - * SKIP_BITS(name, gb, num) - * Will skip over the next num bits. - * Note, this is equivalent to SKIP_CACHE; SKIP_COUNTER. - * - * SKIP_CACHE(name, gb, num) - * Will remove the next num bits from the cache (note SKIP_COUNTER - * MUST be called before UPDATE_CACHE / CLOSE_READER). - * - * SKIP_COUNTER(name, gb, num) - * Will increment the internal bit counter (see SKIP_CACHE & SKIP_BITS). - * - * LAST_SKIP_BITS(name, gb, num) - * Like SKIP_BITS, to be used if next call is UPDATE_CACHE or CLOSE_READER. - * - * BITS_LEFT(name, gb) - * Return the number of bits left - * - * For examples see get_bits, show_bits, skip_bits, get_vlc. - */ - -#if CACHED_BITSTREAM_READER -# define MIN_CACHE_BITS 64 -#elif defined LONG_BITSTREAM_READER -# define MIN_CACHE_BITS 32 -#else -# define MIN_CACHE_BITS 25 -#endif - -#if !CACHED_BITSTREAM_READER - -#define OPEN_READER_NOSIZE(name, gb) \ - unsigned int name ## _index = (gb)->index; \ - unsigned int av_unused name ## _cache - -#if UNCHECKED_BITSTREAM_READER -#define OPEN_READER(name, gb) OPEN_READER_NOSIZE(name, gb) - -#define BITS_AVAILABLE(name, gb) 1 -#else -#define OPEN_READER(name, gb) \ - OPEN_READER_NOSIZE(name, gb); \ - unsigned int name ## _size_plus8 = (gb)->size_in_bits_plus8 - -#define BITS_AVAILABLE(name, gb) name ## _index < name ## _size_plus8 -#endif - -#define CLOSE_READER(name, gb) (gb)->index = name ## _index - -# ifdef LONG_BITSTREAM_READER - -# define UPDATE_CACHE_LE(name, gb) name ## _cache = \ - AV_RL64((gb)->buffer + (name ## _index >> 3)) >> (name ## _index & 7) - -# define UPDATE_CACHE_BE(name, gb) name ## _cache = \ - AV_RB64((gb)->buffer + (name ## _index >> 3)) >> (32 - (name ## _index & 7)) - -#else - -# define UPDATE_CACHE_LE(name, gb) name ## _cache = \ - AV_RL32((gb)->buffer + (name ## _index >> 3)) >> (name ## _index & 7) - -# define UPDATE_CACHE_BE(name, gb) name ## _cache = \ - AV_RB32((gb)->buffer + (name ## _index >> 3)) << (name ## _index & 7) - -#endif - - -#ifdef BITSTREAM_READER_LE - -# define UPDATE_CACHE(name, gb) UPDATE_CACHE_LE(name, gb) - -# define SKIP_CACHE(name, gb, num) name ## _cache >>= (num) - -#else - -# define UPDATE_CACHE(name, gb) UPDATE_CACHE_BE(name, gb) - -# define SKIP_CACHE(name, gb, num) name ## _cache <<= (num) - -#endif - -#if UNCHECKED_BITSTREAM_READER -# define SKIP_COUNTER(name, gb, num) name ## _index += (num) -#else -# define SKIP_COUNTER(name, gb, num) \ - name ## _index = FFMIN(name ## _size_plus8, name ## _index + (num)) -#endif - -#define BITS_LEFT(name, gb) ((int)((gb)->size_in_bits - name ## _index)) - -#define SKIP_BITS(name, gb, num) \ - do { \ - SKIP_CACHE(name, gb, num); \ - SKIP_COUNTER(name, gb, num); \ - } while (0) - -#define LAST_SKIP_BITS(name, gb, num) SKIP_COUNTER(name, gb, num) - -#define SHOW_UBITS_LE(name, gb, num) zero_extend(name ## _cache, num) -#define SHOW_SBITS_LE(name, gb, num) sign_extend(name ## _cache, num) - -#define SHOW_UBITS_BE(name, gb, num) NEG_USR32(name ## _cache, num) -#define SHOW_SBITS_BE(name, gb, num) NEG_SSR32(name ## _cache, num) - -#ifdef BITSTREAM_READER_LE -# define SHOW_UBITS(name, gb, num) SHOW_UBITS_LE(name, gb, num) -# define SHOW_SBITS(name, gb, num) SHOW_SBITS_LE(name, gb, num) -#else -# define SHOW_UBITS(name, gb, num) SHOW_UBITS_BE(name, gb, num) -# define SHOW_SBITS(name, gb, num) SHOW_SBITS_BE(name, gb, num) -#endif - -#define GET_CACHE(name, gb) ((uint32_t) name ## _cache) - -#endif - -static inline int get_bits_count(const GetBitContext *s) -{ -#if CACHED_BITSTREAM_READER - return s->index - s->bits_left; -#else - return s->index; -#endif -} - -#if CACHED_BITSTREAM_READER -static inline void refill_32(GetBitContext *s, int is_le) -{ -#if !UNCHECKED_BITSTREAM_READER - if (s->index >> 3 >= s->buffer_end - s->buffer) - return; -#endif - - if (is_le) - s->cache = (uint64_t)AV_RL32(s->buffer + (s->index >> 3)) << s->bits_left | s->cache; - else - s->cache = s->cache | (uint64_t)AV_RB32(s->buffer + (s->index >> 3)) << (32 - s->bits_left); - s->index += 32; - s->bits_left += 32; -} - -static inline void refill_64(GetBitContext *s, int is_le) -{ -#if !UNCHECKED_BITSTREAM_READER - if (s->index >> 3 >= s->buffer_end - s->buffer) - return; -#endif - - if (is_le) - s->cache = AV_RL64(s->buffer + (s->index >> 3)); - else - s->cache = AV_RB64(s->buffer + (s->index >> 3)); - s->index += 64; - s->bits_left = 64; -} - -static inline uint64_t get_val(GetBitContext *s, unsigned n, int is_le) -{ - uint64_t ret; - av_assert2(n>0 && n<=63); - if (is_le) { - ret = s->cache & ((UINT64_C(1) << n) - 1); - s->cache >>= n; - } else { - ret = s->cache >> (64 - n); - s->cache <<= n; - } - s->bits_left -= n; - return ret; -} - -static inline unsigned show_val(const GetBitContext *s, unsigned n) -{ -#ifdef BITSTREAM_READER_LE - return s->cache & ((UINT64_C(1) << n) - 1); -#else - return s->cache >> (64 - n); -#endif -} -#endif - -/** - * Skips the specified number of bits. - * @param n the number of bits to skip, - * For the UNCHECKED_BITSTREAM_READER this must not cause the distance - * from the start to overflow int32_t. Staying within the bitstream + padding - * is sufficient, too. - */ -static inline void skip_bits_long(GetBitContext *s, int n) -{ -#if CACHED_BITSTREAM_READER - skip_bits(s, n); -#else -#if UNCHECKED_BITSTREAM_READER - s->index += n; -#else - s->index += av_clip(n, -s->index, s->size_in_bits_plus8 - s->index); -#endif -#endif -} - -#if CACHED_BITSTREAM_READER -static inline void skip_remaining(GetBitContext *s, unsigned n) -{ -#ifdef BITSTREAM_READER_LE - s->cache >>= n; -#else - s->cache <<= n; -#endif - s->bits_left -= n; -} -#endif - -/** - * Read MPEG-1 dc-style VLC (sign bit + mantissa with no MSB). - * if MSB not set it is negative - * @param n length in bits - */ -static inline int get_xbits(GetBitContext *s, int n) -{ -#if CACHED_BITSTREAM_READER - int32_t cache = show_bits(s, 32); - int sign = ~cache >> 31; - skip_remaining(s, n); - - return ((((uint32_t)(sign ^ cache)) >> (32 - n)) ^ sign) - sign; -#else - // [sunshine] Removed register specifier, incompatible with C++17 - int sign; - int32_t cache; - OPEN_READER(re, s); - av_assert2(n>0 && n<=25); - UPDATE_CACHE(re, s); - cache = GET_CACHE(re, s); - sign = ~cache >> 31; - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); - return (NEG_USR32(sign ^ cache, n) ^ sign) - sign; -#endif -} - -#if !CACHED_BITSTREAM_READER -static inline int get_xbits_le(GetBitContext *s, int n) -{ - // [sunshine] Removed register specifier, incompatible with C++17 - int sign; - int32_t cache; - OPEN_READER(re, s); - av_assert2(n>0 && n<=25); - UPDATE_CACHE_LE(re, s); - cache = GET_CACHE(re, s); - sign = sign_extend(~cache, n) >> 31; - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); - return (zero_extend(sign ^ cache, n) ^ sign) - sign; -} -#endif - -static inline int get_sbits(GetBitContext *s, int n) -{ - // [sunshine] Removed register specifier, incompatible with C++17 - int tmp; -#if CACHED_BITSTREAM_READER - av_assert2(n>0 && n<=25); - tmp = sign_extend(get_bits(s, n), n); -#else - OPEN_READER(re, s); - av_assert2(n>0 && n<=25); - UPDATE_CACHE(re, s); - tmp = SHOW_SBITS(re, s, n); - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); -#endif - return tmp; -} - -/** - * Read 1-25 bits. - */ -static inline unsigned int get_bits(GetBitContext *s, int n) -{ - // [sunshine] Removed register specifier, incompatible with C++17 - unsigned int tmp; -#if CACHED_BITSTREAM_READER - - av_assert2(n>0 && n<=32); - if (n > s->bits_left) { -#ifdef BITSTREAM_READER_LE - refill_32(s, 1); -#else - refill_32(s, 0); -#endif - if (s->bits_left < 32) - s->bits_left = n; - } - -#ifdef BITSTREAM_READER_LE - tmp = get_val(s, n, 1); -#else - tmp = get_val(s, n, 0); -#endif -#else - OPEN_READER(re, s); - av_assert2(n>0 && n<=25); - UPDATE_CACHE(re, s); - tmp = SHOW_UBITS(re, s, n); - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); -#endif - av_assert2(tmp < UINT64_C(1) << n); - return tmp; -} - -/** - * Read 0-25 bits. - */ -static av_always_inline int get_bitsz(GetBitContext *s, int n) -{ - return n ? get_bits(s, n) : 0; -} - -static inline unsigned int get_bits_le(GetBitContext *s, int n) -{ -#if CACHED_BITSTREAM_READER - av_assert2(n>0 && n<=32); - if (n > s->bits_left) { - refill_32(s, 1); - if (s->bits_left < 32) - s->bits_left = n; - } - - return get_val(s, n, 1); -#else - // [sunshine] Removed register specifier, incompatible with C++17 - int tmp; - OPEN_READER(re, s); - av_assert2(n>0 && n<=25); - UPDATE_CACHE_LE(re, s); - tmp = SHOW_UBITS_LE(re, s, n); - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); - return tmp; -#endif -} - -/** - * Show 1-25 bits. - */ -static inline unsigned int show_bits(GetBitContext *s, int n) -{ - // [sunshine] Removed register specifier, incompatible with C++17 - unsigned int tmp; -#if CACHED_BITSTREAM_READER - if (n > s->bits_left) -#ifdef BITSTREAM_READER_LE - refill_32(s, 1); -#else - refill_32(s, 0); -#endif - - tmp = show_val(s, n); -#else - OPEN_READER_NOSIZE(re, s); - av_assert2(n>0 && n<=25); - UPDATE_CACHE(re, s); - tmp = SHOW_UBITS(re, s, n); -#endif - return tmp; -} - -static inline void skip_bits(GetBitContext *s, int n) -{ -#if CACHED_BITSTREAM_READER - if (n < s->bits_left) - skip_remaining(s, n); - else { - n -= s->bits_left; - s->cache = 0; - s->bits_left = 0; - - if (n >= 64) { - unsigned skip = (n / 8) * 8; - - n -= skip; - s->index += skip; - } -#ifdef BITSTREAM_READER_LE - refill_64(s, 1); -#else - refill_64(s, 0); -#endif - if (n) - skip_remaining(s, n); - } -#else - OPEN_READER(re, s); - LAST_SKIP_BITS(re, s, n); - CLOSE_READER(re, s); -#endif -} - -static inline unsigned int get_bits1(GetBitContext *s) -{ -#if CACHED_BITSTREAM_READER - if (!s->bits_left) -#ifdef BITSTREAM_READER_LE - refill_64(s, 1); -#else - refill_64(s, 0); -#endif - -#ifdef BITSTREAM_READER_LE - return get_val(s, 1, 1); -#else - return get_val(s, 1, 0); -#endif -#else - unsigned int index = s->index; - uint8_t result = s->buffer[index >> 3]; -#ifdef BITSTREAM_READER_LE - result >>= index & 7; - result &= 1; -#else - result <<= index & 7; - result >>= 8 - 1; -#endif -#if !UNCHECKED_BITSTREAM_READER - if (s->index < s->size_in_bits_plus8) -#endif - index++; - s->index = index; - - return result; -#endif -} - -static inline unsigned int show_bits1(GetBitContext *s) -{ - return show_bits(s, 1); -} - -static inline void skip_bits1(GetBitContext *s) -{ - skip_bits(s, 1); -} - -/** - * Read 0-32 bits. - */ -static inline unsigned int get_bits_long(GetBitContext *s, int n) -{ - av_assert2(n>=0 && n<=32); - if (!n) { - return 0; -#if CACHED_BITSTREAM_READER - } - return get_bits(s, n); -#else - } else if (n <= MIN_CACHE_BITS) { - return get_bits(s, n); - } else { -#ifdef BITSTREAM_READER_LE - unsigned ret = get_bits(s, 16); - return ret | (get_bits(s, n - 16) << 16); -#else - unsigned ret = get_bits(s, 16) << (n - 16); - return ret | get_bits(s, n - 16); -#endif - } -#endif -} - -/** - * Read 0-64 bits. - */ -static inline uint64_t get_bits64(GetBitContext *s, int n) -{ - if (n <= 32) { - return get_bits_long(s, n); - } else { -#ifdef BITSTREAM_READER_LE - uint64_t ret = get_bits_long(s, 32); - return ret | (uint64_t) get_bits_long(s, n - 32) << 32; -#else - uint64_t ret = (uint64_t) get_bits_long(s, n - 32) << 32; - return ret | get_bits_long(s, 32); -#endif - } -} - -/** - * Read 0-32 bits as a signed integer. - */ -static inline int get_sbits_long(GetBitContext *s, int n) -{ - // sign_extend(x, 0) is undefined - if (!n) - return 0; - - return sign_extend(get_bits_long(s, n), n); -} - -/** - * Show 0-32 bits. - */ -static inline unsigned int show_bits_long(GetBitContext *s, int n) -{ - if (n <= MIN_CACHE_BITS) { - return show_bits(s, n); - } else { - GetBitContext gb = *s; - return get_bits_long(&gb, n); - } -} - -static inline int init_get_bits_xe(GetBitContext *s, const uint8_t *buffer, - int bit_size, int is_le) -{ - int buffer_size; - int ret = 0; - - if (bit_size >= INT_MAX - FFMAX(7, AV_INPUT_BUFFER_PADDING_SIZE*8) || bit_size < 0 || !buffer) { - bit_size = 0; - buffer = NULL; - ret = AVERROR_INVALIDDATA; - } - - buffer_size = (bit_size + 7) >> 3; - - s->buffer = buffer; - s->size_in_bits = bit_size; - s->size_in_bits_plus8 = bit_size + 8; - s->buffer_end = buffer + buffer_size; - s->index = 0; - -#if CACHED_BITSTREAM_READER - s->cache = 0; - s->bits_left = 0; - refill_64(s, is_le); -#endif - - return ret; -} - -/** - * Initialize GetBitContext. - * @param buffer bitstream buffer, must be AV_INPUT_BUFFER_PADDING_SIZE bytes - * larger than the actual read bits because some optimized bitstream - * readers read 32 or 64 bit at once and could read over the end - * @param bit_size the size of the buffer in bits - * @return 0 on success, AVERROR_INVALIDDATA if the buffer_size would overflow. - */ -static inline int init_get_bits(GetBitContext *s, const uint8_t *buffer, - int bit_size) -{ -#ifdef BITSTREAM_READER_LE - return init_get_bits_xe(s, buffer, bit_size, 1); -#else - return init_get_bits_xe(s, buffer, bit_size, 0); -#endif -} - -/** - * Initialize GetBitContext. - * @param buffer bitstream buffer, must be AV_INPUT_BUFFER_PADDING_SIZE bytes - * larger than the actual read bits because some optimized bitstream - * readers read 32 or 64 bit at once and could read over the end - * @param byte_size the size of the buffer in bytes - * @return 0 on success, AVERROR_INVALIDDATA if the buffer_size would overflow. - */ -static inline int init_get_bits8(GetBitContext *s, const uint8_t *buffer, - int byte_size) -{ - if (byte_size > INT_MAX / 8 || byte_size < 0) - byte_size = -1; - return init_get_bits(s, buffer, byte_size * 8); -} - -static inline int init_get_bits8_le(GetBitContext *s, const uint8_t *buffer, - int byte_size) -{ - if (byte_size > INT_MAX / 8 || byte_size < 0) - byte_size = -1; - return init_get_bits_xe(s, buffer, byte_size * 8, 1); -} - -static inline const uint8_t *align_get_bits(GetBitContext *s) -{ - int n = -get_bits_count(s) & 7; - if (n) - skip_bits(s, n); - return s->buffer + (s->index >> 3); -} - -/** - * If the vlc code is invalid and max_depth=1, then no bits will be removed. - * If the vlc code is invalid and max_depth>1, then the number of bits removed - * is undefined. - */ -#define GET_VLC(code, name, gb, table, bits, max_depth) \ - do { \ - int n, nb_bits; \ - unsigned int index; \ - \ - index = SHOW_UBITS(name, gb, bits); \ - code = table[index].sym; \ - n = table[index].len; \ - \ - if (max_depth > 1 && n < 0) { \ - LAST_SKIP_BITS(name, gb, bits); \ - UPDATE_CACHE(name, gb); \ - \ - nb_bits = -n; \ - \ - index = SHOW_UBITS(name, gb, nb_bits) + code; \ - code = table[index].sym; \ - n = table[index].len; \ - if (max_depth > 2 && n < 0) { \ - LAST_SKIP_BITS(name, gb, nb_bits); \ - UPDATE_CACHE(name, gb); \ - \ - nb_bits = -n; \ - \ - index = SHOW_UBITS(name, gb, nb_bits) + code; \ - code = table[index].sym; \ - n = table[index].len; \ - } \ - } \ - SKIP_BITS(name, gb, n); \ - } while (0) - -#define GET_RL_VLC(level, run, name, gb, table, bits, \ - max_depth, need_update) \ - do { \ - int n, nb_bits; \ - unsigned int index; \ - \ - index = SHOW_UBITS(name, gb, bits); \ - level = table[index].level; \ - n = table[index].len; \ - \ - if (max_depth > 1 && n < 0) { \ - SKIP_BITS(name, gb, bits); \ - if (need_update) { \ - UPDATE_CACHE(name, gb); \ - } \ - \ - nb_bits = -n; \ - \ - index = SHOW_UBITS(name, gb, nb_bits) + level; \ - level = table[index].level; \ - n = table[index].len; \ - if (max_depth > 2 && n < 0) { \ - LAST_SKIP_BITS(name, gb, nb_bits); \ - if (need_update) { \ - UPDATE_CACHE(name, gb); \ - } \ - nb_bits = -n; \ - \ - index = SHOW_UBITS(name, gb, nb_bits) + level; \ - level = table[index].level; \ - n = table[index].len; \ - } \ - } \ - run = table[index].run; \ - SKIP_BITS(name, gb, n); \ - } while (0) - -/* Return the LUT element for the given bitstream configuration. */ -static inline int set_idx(GetBitContext *s, int code, int *n, int *nb_bits, - const VLCElem *table) -{ - unsigned idx; - - *nb_bits = -*n; - idx = show_bits(s, *nb_bits) + code; - *n = table[idx].len; - - return table[idx].sym; -} - -/** - * Parse a vlc code. - * @param bits is the number of bits which will be read at once, must be - * identical to nb_bits in init_vlc() - * @param max_depth is the number of times bits bits must be read to completely - * read the longest vlc code - * = (max_vlc_length + bits - 1) / bits - * @returns the code parsed or -1 if no vlc matches - */ -static av_always_inline int get_vlc2(GetBitContext *s, const VLCElem *table, - int bits, int max_depth) -{ -#if CACHED_BITSTREAM_READER - int nb_bits; - unsigned idx = show_bits(s, bits); - int code = table[idx].sym; - int n = table[idx].len; - - if (max_depth > 1 && n < 0) { - skip_remaining(s, bits); - code = set_idx(s, code, &n, &nb_bits, table); - if (max_depth > 2 && n < 0) { - skip_remaining(s, nb_bits); - code = set_idx(s, code, &n, &nb_bits, table); - } - } - skip_remaining(s, n); - - return code; -#else - int code; - - OPEN_READER(re, s); - UPDATE_CACHE(re, s); - - GET_VLC(code, re, s, table, bits, max_depth); - - CLOSE_READER(re, s); - - return code; -#endif -} - -static inline int decode012(GetBitContext *gb) -{ - int n; - n = get_bits1(gb); - if (n == 0) - return 0; - else - return get_bits1(gb) + 1; -} - -static inline int decode210(GetBitContext *gb) -{ - if (get_bits1(gb)) - return 0; - else - return 2 - get_bits1(gb); -} - -static inline int get_bits_left(GetBitContext *gb) -{ - return gb->size_in_bits - get_bits_count(gb); -} - -static inline int skip_1stop_8data_bits(GetBitContext *gb) -{ - if (get_bits_left(gb) <= 0) - return AVERROR_INVALIDDATA; - - while (get_bits1(gb)) { - skip_bits(gb, 8); - if (get_bits_left(gb) <= 0) - return AVERROR_INVALIDDATA; - } - - return 0; -} - -#endif /* AVCODEC_GET_BITS_H */ diff --git a/third-party/cbs/include/cbs/h264.h b/third-party/cbs/include/cbs/h264.h deleted file mode 100644 index 7a1fb6d6879..00000000000 --- a/third-party/cbs/include/cbs/h264.h +++ /dev/null @@ -1,113 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * H.264 common definitions - */ - -#ifndef AVCODEC_H264_H -#define AVCODEC_H264_H - -#define QP_MAX_NUM (51 + 6*6) // The maximum supported qp - -/* - * Table 7-1 – NAL unit type codes, syntax element categories, and NAL unit type classes in - * T-REC-H.264-201704 - */ -enum { - H264_NAL_UNSPECIFIED = 0, - H264_NAL_SLICE = 1, - H264_NAL_DPA = 2, - H264_NAL_DPB = 3, - H264_NAL_DPC = 4, - H264_NAL_IDR_SLICE = 5, - H264_NAL_SEI = 6, - H264_NAL_SPS = 7, - H264_NAL_PPS = 8, - H264_NAL_AUD = 9, - H264_NAL_END_SEQUENCE = 10, - H264_NAL_END_STREAM = 11, - H264_NAL_FILLER_DATA = 12, - H264_NAL_SPS_EXT = 13, - H264_NAL_PREFIX = 14, - H264_NAL_SUB_SPS = 15, - H264_NAL_DPS = 16, - H264_NAL_RESERVED17 = 17, - H264_NAL_RESERVED18 = 18, - H264_NAL_AUXILIARY_SLICE = 19, - H264_NAL_EXTEN_SLICE = 20, - H264_NAL_DEPTH_EXTEN_SLICE = 21, - H264_NAL_RESERVED22 = 22, - H264_NAL_RESERVED23 = 23, - H264_NAL_UNSPECIFIED24 = 24, - H264_NAL_UNSPECIFIED25 = 25, - H264_NAL_UNSPECIFIED26 = 26, - H264_NAL_UNSPECIFIED27 = 27, - H264_NAL_UNSPECIFIED28 = 28, - H264_NAL_UNSPECIFIED29 = 29, - H264_NAL_UNSPECIFIED30 = 30, - H264_NAL_UNSPECIFIED31 = 31, -}; - - -enum { - // 7.4.2.1.1: seq_parameter_set_id is in [0, 31]. - H264_MAX_SPS_COUNT = 32, - // 7.4.2.2: pic_parameter_set_id is in [0, 255]. - H264_MAX_PPS_COUNT = 256, - - // A.3: MaxDpbFrames is bounded above by 16. - H264_MAX_DPB_FRAMES = 16, - // 7.4.2.1.1: max_num_ref_frames is in [0, MaxDpbFrames], and - // each reference frame can have two fields. - H264_MAX_REFS = 2 * H264_MAX_DPB_FRAMES, - - // 7.4.3.1: modification_of_pic_nums_idc is not equal to 3 at most - // num_ref_idx_lN_active_minus1 + 1 times (that is, once for each - // possible reference), then equal to 3 once. - H264_MAX_RPLM_COUNT = H264_MAX_REFS + 1, - - // 7.4.3.3: in the worst case, we begin with a full short-term - // reference picture list. Each picture in turn is moved to the - // long-term list (type 3) and then discarded from there (type 2). - // Then, we set the length of the long-term list (type 4), mark - // the current picture as long-term (type 6) and terminate the - // process (type 0). - H264_MAX_MMCO_COUNT = H264_MAX_REFS * 2 + 3, - - // A.2.1, A.2.3: profiles supporting FMO constrain - // num_slice_groups_minus1 to be in [0, 7]. - H264_MAX_SLICE_GROUPS = 8, - - // E.2.2: cpb_cnt_minus1 is in [0, 31]. - H264_MAX_CPB_CNT = 32, - - // A.3: in table A-1 the highest level allows a MaxFS of 139264. - H264_MAX_MB_PIC_SIZE = 139264, - // A.3.1, A.3.2: PicWidthInMbs and PicHeightInMbs are constrained - // to be not greater than sqrt(MaxFS * 8). Hence height/width are - // bounded above by sqrt(139264 * 8) = 1055.5 macroblocks. - H264_MAX_MB_WIDTH = 1055, - H264_MAX_MB_HEIGHT = 1055, - H264_MAX_WIDTH = H264_MAX_MB_WIDTH * 16, - H264_MAX_HEIGHT = H264_MAX_MB_HEIGHT * 16, -}; - - -#endif /* AVCODEC_H264_H */ diff --git a/third-party/cbs/include/cbs/h2645_parse.h b/third-party/cbs/include/cbs/h2645_parse.h deleted file mode 100644 index 787ce971ee4..00000000000 --- a/third-party/cbs/include/cbs/h2645_parse.h +++ /dev/null @@ -1,139 +0,0 @@ -/* - * H.264/HEVC common parsing code - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_H2645_PARSE_H -#define AVCODEC_H2645_PARSE_H - -#include - -#include "libavutil/buffer.h" -#include "libavutil/error.h" -#include "libavutil/log.h" -#include "codec_id.h" -#include "get_bits.h" - -#define MAX_MBPAIR_SIZE (256*1024) // a tighter bound could be calculated if someone cares about a few bytes - -typedef struct H2645NAL { - const uint8_t *data; - int size; - - /** - * Size, in bits, of just the data, excluding the stop bit and any trailing - * padding. I.e. what HEVC calls SODB. - */ - int size_bits; - - int raw_size; - const uint8_t *raw_data; - - GetBitContext gb; - - /** - * NAL unit type - */ - int type; - - /** - * H.264 only, nal_ref_idc - */ - int ref_idc; - - /** - * HEVC only, nuh_temporal_id_plus_1 - 1 - */ - int temporal_id; - - /* - * HEVC only, identifier of layer to which nal unit belongs - */ - int nuh_layer_id; - - int skipped_bytes; - int skipped_bytes_pos_size; - int *skipped_bytes_pos; -} H2645NAL; - -typedef struct H2645RBSP { - uint8_t *rbsp_buffer; - AVBufferRef *rbsp_buffer_ref; - int rbsp_buffer_alloc_size; - int rbsp_buffer_size; -} H2645RBSP; - -/* an input packet split into unescaped NAL units */ -typedef struct H2645Packet { - H2645NAL *nals; - H2645RBSP rbsp; - int nb_nals; - int nals_allocated; - unsigned nal_buffer_size; -} H2645Packet; - -/** - * Extract the raw (unescaped) bitstream. - */ -int ff_h2645_extract_rbsp(const uint8_t *src, int length, H2645RBSP *rbsp, - H2645NAL *nal, int small_padding); - -/** - * Split an input packet into NAL units. - * - * If data == raw_data holds true for a NAL unit of the returned pkt, then - * said NAL unit does not contain any emulation_prevention_three_byte and - * the data is contained in the input buffer pointed to by buf. - * Otherwise, the unescaped data is part of the rbsp_buffer described by the - * packet's H2645RBSP. - * - * If the packet's rbsp_buffer_ref is not NULL, the underlying AVBuffer must - * own rbsp_buffer. If not and rbsp_buffer is not NULL, use_ref must be 0. - * If use_ref is set, rbsp_buffer will be reference-counted and owned by - * the underlying AVBuffer of rbsp_buffer_ref. - */ -int ff_h2645_packet_split(H2645Packet *pkt, const uint8_t *buf, int length, - void *logctx, int is_nalff, int nal_length_size, - enum AVCodecID codec_id, int small_padding, int use_ref); - -/** - * Free all the allocated memory in the packet. - */ -void ff_h2645_packet_uninit(H2645Packet *pkt); - -static inline int get_nalsize(int nal_length_size, const uint8_t *buf, - int buf_size, int *buf_index, void *logctx) -{ - int i, nalsize = 0; - - if (*buf_index >= buf_size - nal_length_size) { - // the end of the buffer is reached, refill it - return AVERROR(EAGAIN); - } - - for (i = 0; i < nal_length_size; i++) - nalsize = ((unsigned)nalsize << 8) | buf[(*buf_index)++]; - if (nalsize <= 0 || nalsize > buf_size - *buf_index) { - av_log(logctx, AV_LOG_ERROR, - "Invalid NAL unit size (%d > %d).\n", nalsize, buf_size - *buf_index); - return AVERROR_INVALIDDATA; - } - return nalsize; -} - -#endif /* AVCODEC_H2645_PARSE_H */ diff --git a/third-party/cbs/include/cbs/h264_levels.h b/third-party/cbs/include/cbs/h264_levels.h deleted file mode 100644 index 310d79e51a2..00000000000 --- a/third-party/cbs/include/cbs/h264_levels.h +++ /dev/null @@ -1,51 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_H264_LEVELS_H -#define AVCODEC_H264_LEVELS_H - - -#include - -typedef struct H264LevelDescriptor { - char name[4]; // Large enough for all current levels like "4.1" - uint8_t level_idc; - uint8_t constraint_set3_flag; - uint32_t max_mbps; - uint32_t max_fs; - uint32_t max_dpb_mbs; - uint32_t max_br; - uint32_t max_cpb; - uint16_t max_v_mv_r; - uint8_t min_cr; - uint8_t max_mvs_per_2mb; -} H264LevelDescriptor; - -/** - * Guess the level of a stream from some parameters. - * - * Unknown parameters may be zero, in which case they are ignored. - */ -const H264LevelDescriptor *ff_h264_guess_level(int profile_idc, - int64_t bitrate, - int framerate, - int width, int height, - int max_dec_frame_buffering); - - -#endif /* AVCODEC_H264_LEVELS_H */ diff --git a/third-party/cbs/include/cbs/hevc.h b/third-party/cbs/include/cbs/hevc.h deleted file mode 100644 index 1804755327e..00000000000 --- a/third-party/cbs/include/cbs/hevc.h +++ /dev/null @@ -1,160 +0,0 @@ -/* - * HEVC shared code - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_HEVC_H -#define AVCODEC_HEVC_H - -/** - * Table 7-1 – NAL unit type codes and NAL unit type classes in - * T-REC-H.265-201802 - */ -enum HEVCNALUnitType { - HEVC_NAL_TRAIL_N = 0, - HEVC_NAL_TRAIL_R = 1, - HEVC_NAL_TSA_N = 2, - HEVC_NAL_TSA_R = 3, - HEVC_NAL_STSA_N = 4, - HEVC_NAL_STSA_R = 5, - HEVC_NAL_RADL_N = 6, - HEVC_NAL_RADL_R = 7, - HEVC_NAL_RASL_N = 8, - HEVC_NAL_RASL_R = 9, - HEVC_NAL_VCL_N10 = 10, - HEVC_NAL_VCL_R11 = 11, - HEVC_NAL_VCL_N12 = 12, - HEVC_NAL_VCL_R13 = 13, - HEVC_NAL_VCL_N14 = 14, - HEVC_NAL_VCL_R15 = 15, - HEVC_NAL_BLA_W_LP = 16, - HEVC_NAL_BLA_W_RADL = 17, - HEVC_NAL_BLA_N_LP = 18, - HEVC_NAL_IDR_W_RADL = 19, - HEVC_NAL_IDR_N_LP = 20, - HEVC_NAL_CRA_NUT = 21, - HEVC_NAL_RSV_IRAP_VCL22 = 22, - HEVC_NAL_RSV_IRAP_VCL23 = 23, - HEVC_NAL_RSV_VCL24 = 24, - HEVC_NAL_RSV_VCL25 = 25, - HEVC_NAL_RSV_VCL26 = 26, - HEVC_NAL_RSV_VCL27 = 27, - HEVC_NAL_RSV_VCL28 = 28, - HEVC_NAL_RSV_VCL29 = 29, - HEVC_NAL_RSV_VCL30 = 30, - HEVC_NAL_RSV_VCL31 = 31, - HEVC_NAL_VPS = 32, - HEVC_NAL_SPS = 33, - HEVC_NAL_PPS = 34, - HEVC_NAL_AUD = 35, - HEVC_NAL_EOS_NUT = 36, - HEVC_NAL_EOB_NUT = 37, - HEVC_NAL_FD_NUT = 38, - HEVC_NAL_SEI_PREFIX = 39, - HEVC_NAL_SEI_SUFFIX = 40, - HEVC_NAL_RSV_NVCL41 = 41, - HEVC_NAL_RSV_NVCL42 = 42, - HEVC_NAL_RSV_NVCL43 = 43, - HEVC_NAL_RSV_NVCL44 = 44, - HEVC_NAL_RSV_NVCL45 = 45, - HEVC_NAL_RSV_NVCL46 = 46, - HEVC_NAL_RSV_NVCL47 = 47, - HEVC_NAL_UNSPEC48 = 48, - HEVC_NAL_UNSPEC49 = 49, - HEVC_NAL_UNSPEC50 = 50, - HEVC_NAL_UNSPEC51 = 51, - HEVC_NAL_UNSPEC52 = 52, - HEVC_NAL_UNSPEC53 = 53, - HEVC_NAL_UNSPEC54 = 54, - HEVC_NAL_UNSPEC55 = 55, - HEVC_NAL_UNSPEC56 = 56, - HEVC_NAL_UNSPEC57 = 57, - HEVC_NAL_UNSPEC58 = 58, - HEVC_NAL_UNSPEC59 = 59, - HEVC_NAL_UNSPEC60 = 60, - HEVC_NAL_UNSPEC61 = 61, - HEVC_NAL_UNSPEC62 = 62, - HEVC_NAL_UNSPEC63 = 63, -}; - -enum HEVCSliceType { - HEVC_SLICE_B = 0, - HEVC_SLICE_P = 1, - HEVC_SLICE_I = 2, -}; - -enum { - // 7.4.3.1: vps_max_layers_minus1 is in [0, 62]. - HEVC_MAX_LAYERS = 63, - // 7.4.3.1: vps_max_sub_layers_minus1 is in [0, 6]. - HEVC_MAX_SUB_LAYERS = 7, - // 7.4.3.1: vps_num_layer_sets_minus1 is in [0, 1023]. - HEVC_MAX_LAYER_SETS = 1024, - - // 7.4.2.1: vps_video_parameter_set_id is u(4). - HEVC_MAX_VPS_COUNT = 16, - // 7.4.3.2.1: sps_seq_parameter_set_id is in [0, 15]. - HEVC_MAX_SPS_COUNT = 16, - // 7.4.3.3.1: pps_pic_parameter_set_id is in [0, 63]. - HEVC_MAX_PPS_COUNT = 64, - - // A.4.2: MaxDpbSize is bounded above by 16. - HEVC_MAX_DPB_SIZE = 16, - // 7.4.3.1: vps_max_dec_pic_buffering_minus1[i] is in [0, MaxDpbSize - 1]. - HEVC_MAX_REFS = HEVC_MAX_DPB_SIZE, - - // 7.4.3.2.1: num_short_term_ref_pic_sets is in [0, 64]. - HEVC_MAX_SHORT_TERM_REF_PIC_SETS = 64, - // 7.4.3.2.1: num_long_term_ref_pics_sps is in [0, 32]. - HEVC_MAX_LONG_TERM_REF_PICS = 32, - - // A.3: all profiles require that CtbLog2SizeY is in [4, 6]. - HEVC_MIN_LOG2_CTB_SIZE = 4, - HEVC_MAX_LOG2_CTB_SIZE = 6, - - // E.3.2: cpb_cnt_minus1[i] is in [0, 31]. - HEVC_MAX_CPB_CNT = 32, - - // A.4.1: in table A.6 the highest level allows a MaxLumaPs of 35 651 584. - HEVC_MAX_LUMA_PS = 35651584, - // A.4.1: pic_width_in_luma_samples and pic_height_in_luma_samples are - // constrained to be not greater than sqrt(MaxLumaPs * 8). Hence height/ - // width are bounded above by sqrt(8 * 35651584) = 16888.2 samples. - HEVC_MAX_WIDTH = 16888, - HEVC_MAX_HEIGHT = 16888, - - // A.4.1: table A.6 allows at most 22 tile rows for any level. - HEVC_MAX_TILE_ROWS = 22, - // A.4.1: table A.6 allows at most 20 tile columns for any level. - HEVC_MAX_TILE_COLUMNS = 20, - - // A.4.2: table A.6 allows at most 600 slice segments for any level. - HEVC_MAX_SLICE_SEGMENTS = 600, - - // 7.4.7.1: in the worst case (tiles_enabled_flag and - // entropy_coding_sync_enabled_flag are both set), entry points can be - // placed at the beginning of every Ctb row in every tile, giving an - // upper bound of (num_tile_columns_minus1 + 1) * PicHeightInCtbsY - 1. - // Only a stream with very high resolution and perverse parameters could - // get near that, though, so set a lower limit here with the maximum - // possible value for 4K video (at most 135 16x16 Ctb rows). - HEVC_MAX_ENTRY_POINT_OFFSETS = HEVC_MAX_TILE_COLUMNS * 135, -}; - - -#endif /* AVCODEC_HEVC_H */ diff --git a/third-party/cbs/include/cbs/mathops.h b/third-party/cbs/include/cbs/mathops.h deleted file mode 100644 index f81d21f9c47..00000000000 --- a/third-party/cbs/include/cbs/mathops.h +++ /dev/null @@ -1,245 +0,0 @@ -/* - * simple math operations - * Copyright (c) 2001, 2002 Fabrice Bellard - * Copyright (c) 2006 Michael Niedermayer et al - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ -#ifndef AVCODEC_MATHOPS_H -#define AVCODEC_MATHOPS_H - -#include - -#include "libavutil/common.h" -#include "config.h" - -#define MAX_NEG_CROP 1024 - -extern const uint32_t ff_inverse[257]; -extern const uint8_t ff_log2_run[41]; -extern const uint8_t ff_sqrt_tab[256]; -extern const uint8_t ff_crop_tab[256 + 2 * MAX_NEG_CROP]; -extern const uint8_t ff_zigzag_direct[64]; -extern const uint8_t ff_zigzag_scan[16+1]; - -#if ARCH_ARM -# include "arm/mathops.h" -#elif ARCH_AVR32 -# include "avr32/mathops.h" -#elif ARCH_MIPS -# include "mips/mathops.h" -#elif ARCH_PPC -# include "ppc/mathops.h" -#elif ARCH_X86 -# include "x86/mathops.h" -#endif - -/* generic implementation */ - -#ifndef MUL64 -# define MUL64(a,b) ((int64_t)(a) * (int64_t)(b)) -#endif - -#ifndef MULL -# define MULL(a,b,s) (MUL64(a, b) >> (s)) -#endif - -#ifndef MULH -static av_always_inline int MULH(int a, int b){ - return MUL64(a, b) >> 32; -} -#endif - -#ifndef UMULH -static av_always_inline unsigned UMULH(unsigned a, unsigned b){ - return ((uint64_t)(a) * (uint64_t)(b))>>32; -} -#endif - -#ifndef MAC64 -# define MAC64(d, a, b) ((d) += MUL64(a, b)) -#endif - -#ifndef MLS64 -# define MLS64(d, a, b) ((d) -= MUL64(a, b)) -#endif - -/* signed 16x16 -> 32 multiply add accumulate */ -#ifndef MAC16 -# define MAC16(rt, ra, rb) rt += (ra) * (rb) -#endif - -/* signed 16x16 -> 32 multiply */ -#ifndef MUL16 -# define MUL16(ra, rb) ((ra) * (rb)) -#endif - -#ifndef MLS16 -# define MLS16(rt, ra, rb) ((rt) -= (ra) * (rb)) -#endif - -/* median of 3 */ -#ifndef mid_pred -#define mid_pred mid_pred -static inline av_const int mid_pred(int a, int b, int c) -{ - if(a>b){ - if(c>b){ - if(c>a) b=a; - else b=c; - } - }else{ - if(b>c){ - if(c>a) b=c; - else b=a; - } - } - return b; -} -#endif - -#ifndef median4 -#define median4 median4 -static inline av_const int median4(int a, int b, int c, int d) -{ - if (a < b) { - if (c < d) return (FFMIN(b, d) + FFMAX(a, c)) / 2; - else return (FFMIN(b, c) + FFMAX(a, d)) / 2; - } else { - if (c < d) return (FFMIN(a, d) + FFMAX(b, c)) / 2; - else return (FFMIN(a, c) + FFMAX(b, d)) / 2; - } -} -#endif - -#define FF_SIGNBIT(x) ((x) >> CHAR_BIT * sizeof(x) - 1) - -#ifndef sign_extend -static inline av_const int sign_extend(int val, unsigned bits) -{ - unsigned shift = 8 * sizeof(int) - bits; - union { unsigned u; int s; } v = { (unsigned) val << shift }; - return v.s >> shift; -} -#endif - -#ifndef zero_extend -static inline av_const unsigned zero_extend(unsigned val, unsigned bits) -{ - return (val << ((8 * sizeof(int)) - bits)) >> ((8 * sizeof(int)) - bits); -} -#endif - -#ifndef COPY3_IF_LT -#define COPY3_IF_LT(x, y, a, b, c, d)\ -if ((y) < (x)) {\ - (x) = (y);\ - (a) = (b);\ - (c) = (d);\ -} -#endif - -#ifndef MASK_ABS -#define MASK_ABS(mask, level) do { \ - mask = level >> 31; \ - level = (level ^ mask) - mask; \ - } while (0) -#endif - -#ifndef NEG_SSR32 -# define NEG_SSR32(a,s) ((( int32_t)(a))>>(32-(s))) -#endif - -#ifndef NEG_USR32 -# define NEG_USR32(a,s) (((uint32_t)(a))>>(32-(s))) -#endif - -#if HAVE_BIGENDIAN -# ifndef PACK_2U8 -# define PACK_2U8(a,b) (((a) << 8) | (b)) -# endif -# ifndef PACK_4U8 -# define PACK_4U8(a,b,c,d) (((a) << 24) | ((b) << 16) | ((c) << 8) | (d)) -# endif -# ifndef PACK_2U16 -# define PACK_2U16(a,b) (((a) << 16) | (b)) -# endif -#else -# ifndef PACK_2U8 -# define PACK_2U8(a,b) (((b) << 8) | (a)) -# endif -# ifndef PACK_4U2 -# define PACK_4U8(a,b,c,d) (((d) << 24) | ((c) << 16) | ((b) << 8) | (a)) -# endif -# ifndef PACK_2U16 -# define PACK_2U16(a,b) (((b) << 16) | (a)) -# endif -#endif - -#ifndef PACK_2S8 -# define PACK_2S8(a,b) PACK_2U8((a)&255, (b)&255) -#endif -#ifndef PACK_4S8 -# define PACK_4S8(a,b,c,d) PACK_4U8((a)&255, (b)&255, (c)&255, (d)&255) -#endif -#ifndef PACK_2S16 -# define PACK_2S16(a,b) PACK_2U16((a)&0xffff, (b)&0xffff) -#endif - -#ifndef FASTDIV -# define FASTDIV(a,b) ((uint32_t)((((uint64_t)a) * ff_inverse[b]) >> 32)) -#endif /* FASTDIV */ - -#ifndef ff_sqrt -#define ff_sqrt ff_sqrt -static inline av_const unsigned int ff_sqrt(unsigned int a) -{ - unsigned int b; - - if (a < 255) return (ff_sqrt_tab[a + 1] - 1) >> 4; - else if (a < (1 << 12)) b = ff_sqrt_tab[a >> 4] >> 2; -#if !CONFIG_SMALL - else if (a < (1 << 14)) b = ff_sqrt_tab[a >> 6] >> 1; - else if (a < (1 << 16)) b = ff_sqrt_tab[a >> 8] ; -#endif - else { - int s = av_log2_16bit(a >> 16) >> 1; - unsigned int c = a >> (s + 2); - b = ff_sqrt_tab[c >> (s + 8)]; - b = FASTDIV(c,b) + (b << s); - } - - return b - (a < b * b); -} -#endif - -static inline av_const float ff_sqrf(float a) -{ - return a*a; -} - -static inline int8_t ff_u8_to_s8(uint8_t a) -{ - union { - uint8_t u8; - int8_t s8; - } b; - b.u8 = a; - return b.s8; -} - -#endif /* AVCODEC_MATHOPS_H */ diff --git a/third-party/cbs/include/cbs/packet.h b/third-party/cbs/include/cbs/packet.h deleted file mode 100644 index 404d520071e..00000000000 --- a/third-party/cbs/include/cbs/packet.h +++ /dev/null @@ -1,731 +0,0 @@ -/* - * AVPacket public API - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_PACKET_H -#define AVCODEC_PACKET_H - -#include -#include - -#include "libavutil/attributes.h" -#include "libavutil/buffer.h" -#include "libavutil/dict.h" -#include "libavutil/rational.h" -#include "libavutil/version.h" - -#include "libavcodec/version_major.h" - -/** - * @defgroup lavc_packet AVPacket - * - * Types and functions for working with AVPacket. - * @{ - */ -enum AVPacketSideDataType { - /** - * An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE - * bytes worth of palette. This side data signals that a new palette is - * present. - */ - AV_PKT_DATA_PALETTE, - - /** - * The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format - * that the extradata buffer was changed and the receiving side should - * act upon it appropriately. The new extradata is embedded in the side - * data buffer and should be immediately used for processing the current - * frame or packet. - */ - AV_PKT_DATA_NEW_EXTRADATA, - - /** - * An AV_PKT_DATA_PARAM_CHANGE side data packet is laid out as follows: - * @code - * u32le param_flags - * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT) - * s32le channel_count - * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT) - * u64le channel_layout - * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE) - * s32le sample_rate - * if (param_flags & AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS) - * s32le width - * s32le height - * @endcode - */ - AV_PKT_DATA_PARAM_CHANGE, - - /** - * An AV_PKT_DATA_H263_MB_INFO side data packet contains a number of - * structures with info about macroblocks relevant to splitting the - * packet into smaller packets on macroblock edges (e.g. as for RFC 2190). - * That is, it does not necessarily contain info about all macroblocks, - * as long as the distance between macroblocks in the info is smaller - * than the target payload size. - * Each MB info structure is 12 bytes, and is laid out as follows: - * @code - * u32le bit offset from the start of the packet - * u8 current quantizer at the start of the macroblock - * u8 GOB number - * u16le macroblock address within the GOB - * u8 horizontal MV predictor - * u8 vertical MV predictor - * u8 horizontal MV predictor for block number 3 - * u8 vertical MV predictor for block number 3 - * @endcode - */ - AV_PKT_DATA_H263_MB_INFO, - - /** - * This side data should be associated with an audio stream and contains - * ReplayGain information in form of the AVReplayGain struct. - */ - AV_PKT_DATA_REPLAYGAIN, - - /** - * This side data contains a 3x3 transformation matrix describing an affine - * transformation that needs to be applied to the decoded video frames for - * correct presentation. - * - * See libavutil/display.h for a detailed description of the data. - */ - AV_PKT_DATA_DISPLAYMATRIX, - - /** - * This side data should be associated with a video stream and contains - * Stereoscopic 3D information in form of the AVStereo3D struct. - */ - AV_PKT_DATA_STEREO3D, - - /** - * This side data should be associated with an audio stream and corresponds - * to enum AVAudioServiceType. - */ - AV_PKT_DATA_AUDIO_SERVICE_TYPE, - - /** - * This side data contains quality related information from the encoder. - * @code - * u32le quality factor of the compressed frame. Allowed range is between 1 (good) and FF_LAMBDA_MAX (bad). - * u8 picture type - * u8 error count - * u16 reserved - * u64le[error count] sum of squared differences between encoder in and output - * @endcode - */ - AV_PKT_DATA_QUALITY_STATS, - - /** - * This side data contains an integer value representing the stream index - * of a "fallback" track. A fallback track indicates an alternate - * track to use when the current track can not be decoded for some reason. - * e.g. no decoder available for codec. - */ - AV_PKT_DATA_FALLBACK_TRACK, - - /** - * This side data corresponds to the AVCPBProperties struct. - */ - AV_PKT_DATA_CPB_PROPERTIES, - - /** - * Recommmends skipping the specified number of samples - * @code - * u32le number of samples to skip from start of this packet - * u32le number of samples to skip from end of this packet - * u8 reason for start skip - * u8 reason for end skip (0=padding silence, 1=convergence) - * @endcode - */ - AV_PKT_DATA_SKIP_SAMPLES, - - /** - * An AV_PKT_DATA_JP_DUALMONO side data packet indicates that - * the packet may contain "dual mono" audio specific to Japanese DTV - * and if it is true, recommends only the selected channel to be used. - * @code - * u8 selected channels (0=mail/left, 1=sub/right, 2=both) - * @endcode - */ - AV_PKT_DATA_JP_DUALMONO, - - /** - * A list of zero terminated key/value strings. There is no end marker for - * the list, so it is required to rely on the side data size to stop. - */ - AV_PKT_DATA_STRINGS_METADATA, - - /** - * Subtitle event position - * @code - * u32le x1 - * u32le y1 - * u32le x2 - * u32le y2 - * @endcode - */ - AV_PKT_DATA_SUBTITLE_POSITION, - - /** - * Data found in BlockAdditional element of matroska container. There is - * no end marker for the data, so it is required to rely on the side data - * size to recognize the end. 8 byte id (as found in BlockAddId) followed - * by data. - */ - AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL, - - /** - * The optional first identifier line of a WebVTT cue. - */ - AV_PKT_DATA_WEBVTT_IDENTIFIER, - - /** - * The optional settings (rendering instructions) that immediately - * follow the timestamp specifier of a WebVTT cue. - */ - AV_PKT_DATA_WEBVTT_SETTINGS, - - /** - * A list of zero terminated key/value strings. There is no end marker for - * the list, so it is required to rely on the side data size to stop. This - * side data includes updated metadata which appeared in the stream. - */ - AV_PKT_DATA_METADATA_UPDATE, - - /** - * MPEGTS stream ID as uint8_t, this is required to pass the stream ID - * information from the demuxer to the corresponding muxer. - */ - AV_PKT_DATA_MPEGTS_STREAM_ID, - - /** - * Mastering display metadata (based on SMPTE-2086:2014). This metadata - * should be associated with a video stream and contains data in the form - * of the AVMasteringDisplayMetadata struct. - */ - AV_PKT_DATA_MASTERING_DISPLAY_METADATA, - - /** - * This side data should be associated with a video stream and corresponds - * to the AVSphericalMapping structure. - */ - AV_PKT_DATA_SPHERICAL, - - /** - * Content light level (based on CTA-861.3). This metadata should be - * associated with a video stream and contains data in the form of the - * AVContentLightMetadata struct. - */ - AV_PKT_DATA_CONTENT_LIGHT_LEVEL, - - /** - * ATSC A53 Part 4 Closed Captions. This metadata should be associated with - * a video stream. A53 CC bitstream is stored as uint8_t in AVPacketSideData.data. - * The number of bytes of CC data is AVPacketSideData.size. - */ - AV_PKT_DATA_A53_CC, - - /** - * This side data is encryption initialization data. - * The format is not part of ABI, use av_encryption_init_info_* methods to - * access. - */ - AV_PKT_DATA_ENCRYPTION_INIT_INFO, - - /** - * This side data contains encryption info for how to decrypt the packet. - * The format is not part of ABI, use av_encryption_info_* methods to access. - */ - AV_PKT_DATA_ENCRYPTION_INFO, - - /** - * Active Format Description data consisting of a single byte as specified - * in ETSI TS 101 154 using AVActiveFormatDescription enum. - */ - AV_PKT_DATA_AFD, - - /** - * Producer Reference Time data corresponding to the AVProducerReferenceTime struct, - * usually exported by some encoders (on demand through the prft flag set in the - * AVCodecContext export_side_data field). - */ - AV_PKT_DATA_PRFT, - - /** - * ICC profile data consisting of an opaque octet buffer following the - * format described by ISO 15076-1. - */ - AV_PKT_DATA_ICC_PROFILE, - - /** - * DOVI configuration - * ref: - * dolby-vision-bitstreams-within-the-iso-base-media-file-format-v2.1.2, section 2.2 - * dolby-vision-bitstreams-in-mpeg-2-transport-stream-multiplex-v1.2, section 3.3 - * Tags are stored in struct AVDOVIDecoderConfigurationRecord. - */ - AV_PKT_DATA_DOVI_CONF, - - /** - * Timecode which conforms to SMPTE ST 12-1:2014. The data is an array of 4 uint32_t - * where the first uint32_t describes how many (1-3) of the other timecodes are used. - * The timecode format is described in the documentation of av_timecode_get_smpte_from_framenum() - * function in libavutil/timecode.h. - */ - AV_PKT_DATA_S12M_TIMECODE, - - /** - * HDR10+ dynamic metadata associated with a video frame. The metadata is in - * the form of the AVDynamicHDRPlus struct and contains - * information for color volume transform - application 4 of - * SMPTE 2094-40:2016 standard. - */ - AV_PKT_DATA_DYNAMIC_HDR10_PLUS, - - /** - * The number of side data types. - * This is not part of the public API/ABI in the sense that it may - * change when new side data types are added. - * This must stay the last enum value. - * If its value becomes huge, some code using it - * needs to be updated as it assumes it to be smaller than other limits. - */ - AV_PKT_DATA_NB -}; - -#define AV_PKT_DATA_QUALITY_FACTOR AV_PKT_DATA_QUALITY_STATS //DEPRECATED - -typedef struct AVPacketSideData { - uint8_t *data; - size_t size; - enum AVPacketSideDataType type; -} AVPacketSideData; - -/** - * This structure stores compressed data. It is typically exported by demuxers - * and then passed as input to decoders, or received as output from encoders and - * then passed to muxers. - * - * For video, it should typically contain one compressed frame. For audio it may - * contain several compressed frames. Encoders are allowed to output empty - * packets, with no compressed data, containing only side data - * (e.g. to update some stream parameters at the end of encoding). - * - * The semantics of data ownership depends on the buf field. - * If it is set, the packet data is dynamically allocated and is - * valid indefinitely until a call to av_packet_unref() reduces the - * reference count to 0. - * - * If the buf field is not set av_packet_ref() would make a copy instead - * of increasing the reference count. - * - * The side data is always allocated with av_malloc(), copied by - * av_packet_ref() and freed by av_packet_unref(). - * - * sizeof(AVPacket) being a part of the public ABI is deprecated. once - * av_init_packet() is removed, new packets will only be able to be allocated - * with av_packet_alloc(), and new fields may be added to the end of the struct - * with a minor bump. - * - * @see av_packet_alloc - * @see av_packet_ref - * @see av_packet_unref - */ -typedef struct AVPacket { - /** - * A reference to the reference-counted buffer where the packet data is - * stored. - * May be NULL, then the packet data is not reference-counted. - */ - AVBufferRef *buf; - /** - * Presentation timestamp in AVStream->time_base units; the time at which - * the decompressed packet will be presented to the user. - * Can be AV_NOPTS_VALUE if it is not stored in the file. - * pts MUST be larger or equal to dts as presentation cannot happen before - * decompression, unless one wants to view hex dumps. Some formats misuse - * the terms dts and pts/cts to mean something different. Such timestamps - * must be converted to true pts/dts before they are stored in AVPacket. - */ - int64_t pts; - /** - * Decompression timestamp in AVStream->time_base units; the time at which - * the packet is decompressed. - * Can be AV_NOPTS_VALUE if it is not stored in the file. - */ - int64_t dts; - uint8_t *data; - int size; - int stream_index; - /** - * A combination of AV_PKT_FLAG values - */ - int flags; - /** - * Additional packet data that can be provided by the container. - * Packet can contain several types of side information. - */ - AVPacketSideData *side_data; - int side_data_elems; - - /** - * Duration of this packet in AVStream->time_base units, 0 if unknown. - * Equals next_pts - this_pts in presentation order. - */ - int64_t duration; - - int64_t pos; ///< byte position in stream, -1 if unknown - - /** - * for some private data of the user - */ - void *opaque; - - /** - * AVBufferRef for free use by the API user. FFmpeg will never check the - * contents of the buffer ref. FFmpeg calls av_buffer_unref() on it when - * the packet is unreferenced. av_packet_copy_props() calls create a new - * reference with av_buffer_ref() for the target packet's opaque_ref field. - * - * This is unrelated to the opaque field, although it serves a similar - * purpose. - */ - AVBufferRef *opaque_ref; - - /** - * Time base of the packet's timestamps. - * In the future, this field may be set on packets output by encoders or - * demuxers, but its value will be by default ignored on input to decoders - * or muxers. - */ - AVRational time_base; -} AVPacket; - -#if FF_API_INIT_PACKET -attribute_deprecated -typedef struct AVPacketList { - AVPacket pkt; - struct AVPacketList *next; -} AVPacketList; -#endif - -#define AV_PKT_FLAG_KEY 0x0001 ///< The packet contains a keyframe -#define AV_PKT_FLAG_CORRUPT 0x0002 ///< The packet content is corrupted -/** - * Flag is used to discard packets which are required to maintain valid - * decoder state but are not required for output and should be dropped - * after decoding. - **/ -#define AV_PKT_FLAG_DISCARD 0x0004 -/** - * The packet comes from a trusted source. - * - * Otherwise-unsafe constructs such as arbitrary pointers to data - * outside the packet may be followed. - */ -#define AV_PKT_FLAG_TRUSTED 0x0008 -/** - * Flag is used to indicate packets that contain frames that can - * be discarded by the decoder. I.e. Non-reference frames. - */ -#define AV_PKT_FLAG_DISPOSABLE 0x0010 - -enum AVSideDataParamChangeFlags { -#if FF_API_OLD_CHANNEL_LAYOUT - /** - * @deprecated those are not used by any decoder - */ - AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_COUNT = 0x0001, - AV_SIDE_DATA_PARAM_CHANGE_CHANNEL_LAYOUT = 0x0002, -#endif - AV_SIDE_DATA_PARAM_CHANGE_SAMPLE_RATE = 0x0004, - AV_SIDE_DATA_PARAM_CHANGE_DIMENSIONS = 0x0008, -}; - -/** - * Allocate an AVPacket and set its fields to default values. The resulting - * struct must be freed using av_packet_free(). - * - * @return An AVPacket filled with default values or NULL on failure. - * - * @note this only allocates the AVPacket itself, not the data buffers. Those - * must be allocated through other means such as av_new_packet. - * - * @see av_new_packet - */ -AVPacket *av_packet_alloc(void); - -/** - * Create a new packet that references the same data as src. - * - * This is a shortcut for av_packet_alloc()+av_packet_ref(). - * - * @return newly created AVPacket on success, NULL on error. - * - * @see av_packet_alloc - * @see av_packet_ref - */ -AVPacket *av_packet_clone(const AVPacket *src); - -/** - * Free the packet, if the packet is reference counted, it will be - * unreferenced first. - * - * @param pkt packet to be freed. The pointer will be set to NULL. - * @note passing NULL is a no-op. - */ -void av_packet_free(AVPacket **pkt); - -#if FF_API_INIT_PACKET -/** - * Initialize optional fields of a packet with default values. - * - * Note, this does not touch the data and size members, which have to be - * initialized separately. - * - * @param pkt packet - * - * @see av_packet_alloc - * @see av_packet_unref - * - * @deprecated This function is deprecated. Once it's removed, - sizeof(AVPacket) will not be a part of the ABI anymore. - */ -attribute_deprecated -void av_init_packet(AVPacket *pkt); -#endif - -/** - * Allocate the payload of a packet and initialize its fields with - * default values. - * - * @param pkt packet - * @param size wanted payload size - * @return 0 if OK, AVERROR_xxx otherwise - */ -int av_new_packet(AVPacket *pkt, int size); - -/** - * Reduce packet size, correctly zeroing padding - * - * @param pkt packet - * @param size new size - */ -void av_shrink_packet(AVPacket *pkt, int size); - -/** - * Increase packet size, correctly zeroing padding - * - * @param pkt packet - * @param grow_by number of bytes by which to increase the size of the packet - */ -int av_grow_packet(AVPacket *pkt, int grow_by); - -/** - * Initialize a reference-counted packet from av_malloc()ed data. - * - * @param pkt packet to be initialized. This function will set the data, size, - * and buf fields, all others are left untouched. - * @param data Data allocated by av_malloc() to be used as packet data. If this - * function returns successfully, the data is owned by the underlying AVBuffer. - * The caller may not access the data through other means. - * @param size size of data in bytes, without the padding. I.e. the full buffer - * size is assumed to be size + AV_INPUT_BUFFER_PADDING_SIZE. - * - * @return 0 on success, a negative AVERROR on error - */ -int av_packet_from_data(AVPacket *pkt, uint8_t *data, int size); - -/** - * Allocate new information of a packet. - * - * @param pkt packet - * @param type side information type - * @param size side information size - * @return pointer to fresh allocated data or NULL otherwise - */ -uint8_t* av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, - size_t size); - -/** - * Wrap an existing array as a packet side data. - * - * @param pkt packet - * @param type side information type - * @param data the side data array. It must be allocated with the av_malloc() - * family of functions. The ownership of the data is transferred to - * pkt. - * @param size side information size - * @return a non-negative number on success, a negative AVERROR code on - * failure. On failure, the packet is unchanged and the data remains - * owned by the caller. - */ -int av_packet_add_side_data(AVPacket *pkt, enum AVPacketSideDataType type, - uint8_t *data, size_t size); - -/** - * Shrink the already allocated side data buffer - * - * @param pkt packet - * @param type side information type - * @param size new side information size - * @return 0 on success, < 0 on failure - */ -int av_packet_shrink_side_data(AVPacket *pkt, enum AVPacketSideDataType type, - size_t size); - -/** - * Get side information from packet. - * - * @param pkt packet - * @param type desired side information type - * @param size If supplied, *size will be set to the size of the side data - * or to zero if the desired side data is not present. - * @return pointer to data if present or NULL otherwise - */ -uint8_t* av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, - size_t *size); - -const char *av_packet_side_data_name(enum AVPacketSideDataType type); - -/** - * Pack a dictionary for use in side_data. - * - * @param dict The dictionary to pack. - * @param size pointer to store the size of the returned data - * @return pointer to data if successful, NULL otherwise - */ -uint8_t *av_packet_pack_dictionary(AVDictionary *dict, size_t *size); -/** - * Unpack a dictionary from side_data. - * - * @param data data from side_data - * @param size size of the data - * @param dict the metadata storage dictionary - * @return 0 on success, < 0 on failure - */ -int av_packet_unpack_dictionary(const uint8_t *data, size_t size, - AVDictionary **dict); - -/** - * Convenience function to free all the side data stored. - * All the other fields stay untouched. - * - * @param pkt packet - */ -void av_packet_free_side_data(AVPacket *pkt); - -/** - * Setup a new reference to the data described by a given packet - * - * If src is reference-counted, setup dst as a new reference to the - * buffer in src. Otherwise allocate a new buffer in dst and copy the - * data from src into it. - * - * All the other fields are copied from src. - * - * @see av_packet_unref - * - * @param dst Destination packet. Will be completely overwritten. - * @param src Source packet - * - * @return 0 on success, a negative AVERROR on error. On error, dst - * will be blank (as if returned by av_packet_alloc()). - */ -int av_packet_ref(AVPacket *dst, const AVPacket *src); - -/** - * Wipe the packet. - * - * Unreference the buffer referenced by the packet and reset the - * remaining packet fields to their default values. - * - * @param pkt The packet to be unreferenced. - */ -void av_packet_unref(AVPacket *pkt); - -/** - * Move every field in src to dst and reset src. - * - * @see av_packet_unref - * - * @param src Source packet, will be reset - * @param dst Destination packet - */ -void av_packet_move_ref(AVPacket *dst, AVPacket *src); - -/** - * Copy only "properties" fields from src to dst. - * - * Properties for the purpose of this function are all the fields - * beside those related to the packet data (buf, data, size) - * - * @param dst Destination packet - * @param src Source packet - * - * @return 0 on success AVERROR on failure. - */ -int av_packet_copy_props(AVPacket *dst, const AVPacket *src); - -/** - * Ensure the data described by a given packet is reference counted. - * - * @note This function does not ensure that the reference will be writable. - * Use av_packet_make_writable instead for that purpose. - * - * @see av_packet_ref - * @see av_packet_make_writable - * - * @param pkt packet whose data should be made reference counted. - * - * @return 0 on success, a negative AVERROR on error. On failure, the - * packet is unchanged. - */ -int av_packet_make_refcounted(AVPacket *pkt); - -/** - * Create a writable reference for the data described by a given packet, - * avoiding data copy if possible. - * - * @param pkt Packet whose data should be made writable. - * - * @return 0 on success, a negative AVERROR on failure. On failure, the - * packet is unchanged. - */ -int av_packet_make_writable(AVPacket *pkt); - -/** - * Convert valid timing fields (timestamps / durations) in a packet from one - * timebase to another. Timestamps with unknown values (AV_NOPTS_VALUE) will be - * ignored. - * - * @param pkt packet on which the conversion will be performed - * @param tb_src source timebase, in which the timing fields in pkt are - * expressed - * @param tb_dst destination timebase, to which the timing fields will be - * converted - */ -void av_packet_rescale_ts(AVPacket *pkt, AVRational tb_src, AVRational tb_dst); - -/** - * @} - */ - -#endif // AVCODEC_PACKET_H diff --git a/third-party/cbs/include/cbs/sei.h b/third-party/cbs/include/cbs/sei.h deleted file mode 100644 index 5513590b51e..00000000000 --- a/third-party/cbs/include/cbs/sei.h +++ /dev/null @@ -1,140 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_SEI_H -#define AVCODEC_SEI_H - -// SEI payload types form a common namespace between the H.264, H.265 -// and H.266 standards. A given payload type always has the same -// meaning, but some names have different payload types in different -// standards (e.g. scalable-nesting is 30 in H.264 but 133 in H.265). -// The content of the payload data depends on the standard, though -// many generic parts have the same interpretation everywhere (such as -// mastering-display-colour-volume and user-data-unregistered). -enum { - SEI_TYPE_BUFFERING_PERIOD = 0, - SEI_TYPE_PIC_TIMING = 1, - SEI_TYPE_PAN_SCAN_RECT = 2, - SEI_TYPE_FILLER_PAYLOAD = 3, - SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35 = 4, - SEI_TYPE_USER_DATA_UNREGISTERED = 5, - SEI_TYPE_RECOVERY_POINT = 6, - SEI_TYPE_DEC_REF_PIC_MARKING_REPETITION = 7, - SEI_TYPE_SPARE_PIC = 8, - SEI_TYPE_SCENE_INFO = 9, - SEI_TYPE_SUB_SEQ_INFO = 10, - SEI_TYPE_SUB_SEQ_LAYER_CHARACTERISTICS = 11, - SEI_TYPE_SUB_SEQ_CHARACTERISTICS = 12, - SEI_TYPE_FULL_FRAME_FREEZE = 13, - SEI_TYPE_FULL_FRAME_FREEZE_RELEASE = 14, - SEI_TYPE_FULL_FRAME_SNAPSHOT = 15, - SEI_TYPE_PROGRESSIVE_REFINEMENT_SEGMENT_START = 16, - SEI_TYPE_PROGRESSIVE_REFINEMENT_SEGMENT_END = 17, - SEI_TYPE_MOTION_CONSTRAINED_SLICE_GROUP_SET = 18, - SEI_TYPE_FILM_GRAIN_CHARACTERISTICS = 19, - SEI_TYPE_DEBLOCKING_FILTER_DISPLAY_PREFERENCE = 20, - SEI_TYPE_STEREO_VIDEO_INFO = 21, - SEI_TYPE_POST_FILTER_HINT = 22, - SEI_TYPE_TONE_MAPPING_INFO = 23, - SEI_TYPE_SCALABILITY_INFO = 24, - SEI_TYPE_SUB_PIC_SCALABLE_LAYER = 25, - SEI_TYPE_NON_REQUIRED_LAYER_REP = 26, - SEI_TYPE_PRIORITY_LAYER_INFO = 27, - SEI_TYPE_LAYERS_NOT_PRESENT_4 = 28, - SEI_TYPE_LAYER_DEPENDENCY_CHANGE = 29, - SEI_TYPE_SCALABLE_NESTING_4 = 30, - SEI_TYPE_BASE_LAYER_TEMPORAL_HRD = 31, - SEI_TYPE_QUALITY_LAYER_INTEGRITY_CHECK = 32, - SEI_TYPE_REDUNDANT_PIC_PROPERTY = 33, - SEI_TYPE_TL0_DEP_REP_INDEX = 34, - SEI_TYPE_TL_SWITCHING_POINT = 35, - SEI_TYPE_PARALLEL_DECODING_INFO = 36, - SEI_TYPE_MVC_SCALABLE_NESTING = 37, - SEI_TYPE_VIEW_SCALABILITY_INFO = 38, - SEI_TYPE_MULTIVIEW_SCENE_INFO_4 = 39, - SEI_TYPE_MULTIVIEW_ACQUISITION_INFO_4 = 40, - SEI_TYPE_NON_REQUIRED_VIEW_COMPONENT = 41, - SEI_TYPE_VIEW_DEPENDENCY_CHANGE = 42, - SEI_TYPE_OPERATION_POINTS_NOT_PRESENT = 43, - SEI_TYPE_BASE_VIEW_TEMPORAL_HRD = 44, - SEI_TYPE_FRAME_PACKING_ARRANGEMENT = 45, - SEI_TYPE_MULTIVIEW_VIEW_POSITION_4 = 46, - SEI_TYPE_DISPLAY_ORIENTATION = 47, - SEI_TYPE_MVCD_SCALABLE_NESTING = 48, - SEI_TYPE_MVCD_VIEW_SCALABILITY_INFO = 49, - SEI_TYPE_DEPTH_REPRESENTATION_INFO_4 = 50, - SEI_TYPE_THREE_DIMENSIONAL_REFERENCE_DISPLAYS_INFO_4 = 51, - SEI_TYPE_DEPTH_TIMING = 52, - SEI_TYPE_DEPTH_SAMPLING_INFO = 53, - SEI_TYPE_CONSTRAINED_DEPTH_PARAMETER_SET_IDENTIFIER = 54, - SEI_TYPE_GREEN_METADATA = 56, - SEI_TYPE_STRUCTURE_OF_PICTURES_INFO = 128, - SEI_TYPE_ACTIVE_PARAMETER_SETS = 129, - SEI_TYPE_PARAMETER_SETS_INCLUSION_INDICATION = SEI_TYPE_ACTIVE_PARAMETER_SETS, - SEI_TYPE_DECODING_UNIT_INFO = 130, - SEI_TYPE_TEMPORAL_SUB_LAYER_ZERO_IDX = 131, - SEI_TYPE_DECODED_PICTURE_HASH = 132, - SEI_TYPE_SCALABLE_NESTING_5 = 133, - SEI_TYPE_REGION_REFRESH_INFO = 134, - SEI_TYPE_NO_DISPLAY = 135, - SEI_TYPE_TIME_CODE = 136, - SEI_TYPE_MASTERING_DISPLAY_COLOUR_VOLUME = 137, - SEI_TYPE_SEGMENTED_RECT_FRAME_PACKING_ARRANGEMENT = 138, - SEI_TYPE_TEMPORAL_MOTION_CONSTRAINED_TILE_SETS = 139, - SEI_TYPE_CHROMA_RESAMPLING_FILTER_HINT = 140, - SEI_TYPE_KNEE_FUNCTION_INFO = 141, - SEI_TYPE_COLOUR_REMAPPING_INFO = 142, - SEI_TYPE_DEINTERLACED_FIELD_IDENTIFICATION = 143, - SEI_TYPE_CONTENT_LIGHT_LEVEL_INFO = 144, - SEI_TYPE_DEPENDENT_RAP_INDICATION = 145, - SEI_TYPE_CODED_REGION_COMPLETION = 146, - SEI_TYPE_ALTERNATIVE_TRANSFER_CHARACTERISTICS = 147, - SEI_TYPE_AMBIENT_VIEWING_ENVIRONMENT = 148, - SEI_TYPE_CONTENT_COLOUR_VOLUME = 149, - SEI_TYPE_EQUIRECTANGULAR_PROJECTION = 150, - SEI_TYPE_CUBEMAP_PROJECTION = 151, - SEI_TYPE_FISHEYE_VIDEO_INFO = 152, - SEI_TYPE_SPHERE_ROTATION = 154, - SEI_TYPE_REGIONWISE_PACKING = 155, - SEI_TYPE_OMNI_VIEWPORT = 156, - SEI_TYPE_REGIONAL_NESTING = 157, - SEI_TYPE_MCTS_EXTRACTION_INFO_SETS = 158, - SEI_TYPE_MCTS_EXTRACTION_INFO_NESTING = 159, - SEI_TYPE_LAYERS_NOT_PRESENT_5 = 160, - SEI_TYPE_INTER_LAYER_CONSTRAINED_TILE_SETS = 161, - SEI_TYPE_BSP_NESTING = 162, - SEI_TYPE_BSP_INITIAL_ARRIVAL_TIME = 163, - SEI_TYPE_SUB_BITSTREAM_PROPERTY = 164, - SEI_TYPE_ALPHA_CHANNEL_INFO = 165, - SEI_TYPE_OVERLAY_INFO = 166, - SEI_TYPE_TEMPORAL_MV_PREDICTION_CONSTRAINTS = 167, - SEI_TYPE_FRAME_FIELD_INFO = 168, - SEI_TYPE_THREE_DIMENSIONAL_REFERENCE_DISPLAYS_INFO = 176, - SEI_TYPE_DEPTH_REPRESENTATION_INFO_5 = 177, - SEI_TYPE_MULTIVIEW_SCENE_INFO_5 = 178, - SEI_TYPE_MULTIVIEW_ACQUISITION_INFO_5 = 179, - SEI_TYPE_MULTIVIEW_VIEW_POSITION_5 = 180, - SEI_TYPE_ALTERNATIVE_DEPTH_INFO = 181, - SEI_TYPE_SEI_MANIFEST = 200, - SEI_TYPE_SEI_PREFIX_INDICATION = 201, - SEI_TYPE_ANNOTATED_REGIONS = 202, - SEI_TYPE_SUBPIC_LEVEL_INFO = 203, - SEI_TYPE_SAMPLE_ASPECT_RATIO_INFO = 204, -}; - -#endif /* AVCODEC_SEI_H */ diff --git a/third-party/cbs/include/cbs/vlc.h b/third-party/cbs/include/cbs/vlc.h deleted file mode 100644 index e63c484755a..00000000000 --- a/third-party/cbs/include/cbs/vlc.h +++ /dev/null @@ -1,144 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_VLC_H -#define AVCODEC_VLC_H - -#include - -// When changing this, be sure to also update tableprint_vlc.h accordingly. -typedef int16_t VLCBaseType; - -typedef struct VLCElem { - VLCBaseType sym, len; -} VLCElem; - -typedef struct VLC { - int bits; - VLCElem *table; - int table_size, table_allocated; -} VLC; - -typedef struct RL_VLC_ELEM { - int16_t level; - int8_t len; - uint8_t run; -} RL_VLC_ELEM; - -#define init_vlc(vlc, nb_bits, nb_codes, \ - bits, bits_wrap, bits_size, \ - codes, codes_wrap, codes_size, \ - flags) \ - ff_init_vlc_sparse(vlc, nb_bits, nb_codes, \ - bits, bits_wrap, bits_size, \ - codes, codes_wrap, codes_size, \ - NULL, 0, 0, flags) - -int ff_init_vlc_sparse(VLC *vlc, int nb_bits, int nb_codes, - const void *bits, int bits_wrap, int bits_size, - const void *codes, int codes_wrap, int codes_size, - const void *symbols, int symbols_wrap, int symbols_size, - int flags); - -/** - * Build VLC decoding tables suitable for use with get_vlc2() - * - * This function takes lengths and symbols and calculates the codes from them. - * For this the input lengths and symbols have to be sorted according to "left - * nodes in the corresponding tree first". - * - * @param[in,out] vlc The VLC to be initialized; table and table_allocated - * must have been set when initializing a static VLC, - * otherwise this will be treated as uninitialized. - * @param[in] nb_bits The number of bits to use for the VLC table; - * higher values take up more memory and cache, but - * allow to read codes with fewer reads. - * @param[in] nb_codes The number of provided length and (if supplied) symbol - * entries. - * @param[in] lens The lengths of the codes. Entries > 0 correspond to - * valid codes; entries == 0 will be skipped and entries - * with len < 0 indicate that the tree is incomplete and - * has an open end of length -len at this position. - * @param[in] lens_wrap Stride (in bytes) of the lengths. - * @param[in] symbols The symbols, i.e. what is returned from get_vlc2() - * when the corresponding code is encountered. - * May be NULL, then 0, 1, 2, 3, 4,... will be used. - * @param[in] symbols_wrap Stride (in bytes) of the symbols. - * @param[in] symbols_size Size of the symbols. 1 and 2 are supported. - * @param[in] offset An offset to apply to all the valid symbols. - * @param[in] flags A combination of the INIT_VLC_* flags; notice that - * INIT_VLC_INPUT_LE is pointless and ignored. - */ -int ff_init_vlc_from_lengths(VLC *vlc, int nb_bits, int nb_codes, - const int8_t *lens, int lens_wrap, - const void *symbols, int symbols_wrap, int symbols_size, - int offset, int flags, void *logctx); - -void ff_free_vlc(VLC *vlc); - -/* If INIT_VLC_INPUT_LE is set, the LSB bit of the codes used to - * initialize the VLC table is the first bit to be read. */ -#define INIT_VLC_INPUT_LE 2 -/* If set the VLC is intended for a little endian bitstream reader. */ -#define INIT_VLC_OUTPUT_LE 8 -#define INIT_VLC_LE (INIT_VLC_INPUT_LE | INIT_VLC_OUTPUT_LE) -#define INIT_VLC_USE_NEW_STATIC 4 -#define INIT_VLC_STATIC_OVERLONG (1 | INIT_VLC_USE_NEW_STATIC) - -#define INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ - h, i, j, flags, static_size) \ - do { \ - static VLCElem table[static_size]; \ - (vlc)->table = table; \ - (vlc)->table_allocated = static_size; \ - ff_init_vlc_sparse(vlc, bits, a, b, c, d, e, f, g, h, i, j, \ - flags | INIT_VLC_USE_NEW_STATIC); \ - } while (0) - -#define INIT_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, h, i, j, static_size) \ - INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ - h, i, j, 0, static_size) - -#define INIT_LE_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, h, i, j, static_size) \ - INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ - h, i, j, INIT_VLC_LE, static_size) - -#define INIT_CUSTOM_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, flags, static_size) \ - INIT_CUSTOM_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, \ - NULL, 0, 0, flags, static_size) - -#define INIT_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, static_size) \ - INIT_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, NULL, 0, 0, static_size) - -#define INIT_LE_VLC_STATIC(vlc, bits, a, b, c, d, e, f, g, static_size) \ - INIT_LE_VLC_SPARSE_STATIC(vlc, bits, a, b, c, d, e, f, g, NULL, 0, 0, static_size) - -#define INIT_VLC_STATIC_FROM_LENGTHS(vlc, bits, nb_codes, lens, len_wrap, \ - symbols, symbols_wrap, symbols_size, \ - offset, flags, static_size) \ - do { \ - static VLCElem table[static_size]; \ - (vlc)->table = table; \ - (vlc)->table_allocated = static_size; \ - ff_init_vlc_from_lengths(vlc, bits, nb_codes, lens, len_wrap, \ - symbols, symbols_wrap, symbols_size, \ - offset, flags | INIT_VLC_USE_NEW_STATIC, \ - NULL); \ - } while (0) - -#endif /* AVCODEC_VLC_H */ diff --git a/third-party/cbs/intmath.h b/third-party/cbs/intmath.h deleted file mode 100644 index 50ba735d56d..00000000000 --- a/third-party/cbs/intmath.h +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright (c) 2010 Mans Rullgard - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ -#ifndef AVUTIL_INTMATH_H -#define AVUTIL_INTMATH_H - -#include - -// [sunshine] Changed include path -#include "cbs/config.h" -#include "cbs/attributes.h" - -#if ARCH_ARM -# include "arm/intmath.h" -#endif -#if ARCH_X86 -# include "x86/intmath.h" -#endif - -#if HAVE_FAST_CLZ -#if AV_GCC_VERSION_AT_LEAST(3,4) -#ifndef ff_log2 -# define ff_log2(x) (31 - __builtin_clz((x)|1)) -# ifndef ff_log2_16bit -# define ff_log2_16bit av_log2 -# endif -#endif /* ff_log2 */ -#endif /* AV_GCC_VERSION_AT_LEAST(3,4) */ -#endif - -extern const uint8_t ff_log2_tab[256]; - -#ifndef ff_log2 -#define ff_log2 ff_log2_c -static av_always_inline av_const int ff_log2_c(unsigned int v) -{ - int n = 0; - if (v & 0xffff0000) { - v >>= 16; - n += 16; - } - if (v & 0xff00) { - v >>= 8; - n += 8; - } - n += ff_log2_tab[v]; - - return n; -} -#endif - -#ifndef ff_log2_16bit -#define ff_log2_16bit ff_log2_16bit_c -static av_always_inline av_const int ff_log2_16bit_c(unsigned int v) -{ - int n = 0; - if (v & 0xff00) { - v >>= 8; - n += 8; - } - n += ff_log2_tab[v]; - - return n; -} -#endif - -#define av_log2 ff_log2 -#define av_log2_16bit ff_log2_16bit - -/** - * @addtogroup lavu_math - * @{ - */ - -#if HAVE_FAST_CLZ -#if AV_GCC_VERSION_AT_LEAST(3,4) -#ifndef ff_ctz -#define ff_ctz(v) __builtin_ctz(v) -#endif -#ifndef ff_ctzll -#define ff_ctzll(v) __builtin_ctzll(v) -#endif -#ifndef ff_clz -#define ff_clz(v) __builtin_clz(v) -#endif -#endif -#endif - -#ifndef ff_ctz -#define ff_ctz ff_ctz_c -/** - * Trailing zero bit count. - * - * @param v input value. If v is 0, the result is undefined. - * @return the number of trailing 0-bits - */ -/* We use the De-Bruijn method outlined in: - * http://supertech.csail.mit.edu/papers/debruijn.pdf. */ -static av_always_inline av_const int ff_ctz_c(int v) -{ - static const uint8_t debruijn_ctz32[32] = { - 0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8, - 31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9 - }; - return debruijn_ctz32[(uint32_t)((v & -v) * 0x077CB531U) >> 27]; -} -#endif - -#ifndef ff_ctzll -#define ff_ctzll ff_ctzll_c -/* We use the De-Bruijn method outlined in: - * http://supertech.csail.mit.edu/papers/debruijn.pdf. */ -static av_always_inline av_const int ff_ctzll_c(long long v) -{ - static const uint8_t debruijn_ctz64[64] = { - 0, 1, 2, 53, 3, 7, 54, 27, 4, 38, 41, 8, 34, 55, 48, 28, - 62, 5, 39, 46, 44, 42, 22, 9, 24, 35, 59, 56, 49, 18, 29, 11, - 63, 52, 6, 26, 37, 40, 33, 47, 61, 45, 43, 21, 23, 58, 17, 10, - 51, 25, 36, 32, 60, 20, 57, 16, 50, 31, 19, 15, 30, 14, 13, 12 - }; - return debruijn_ctz64[(uint64_t)((v & -v) * 0x022FDD63CC95386DU) >> 58]; -} -#endif - -#ifndef ff_clz -#define ff_clz ff_clz_c -static av_always_inline av_const unsigned ff_clz_c(unsigned x) -{ - unsigned i = sizeof(x) * 8; - - while (x) { - x >>= 1; - i--; - } - - return i; -} -#endif - -#if AV_GCC_VERSION_AT_LEAST(3,4) -#ifndef av_parity -#define av_parity __builtin_parity -#endif -#endif - -/** - * @} - */ -#endif /* AVUTIL_INTMATH_H */ diff --git a/third-party/cbs/log2_tab.c b/third-party/cbs/log2_tab.c deleted file mode 100644 index 3431401c745..00000000000 --- a/third-party/cbs/log2_tab.c +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2003-2012 Michael Niedermayer - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ -// [sunshine] Copied from libavutil - -#include - -const uint8_t ff_log2_tab[256]={ - 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4, - 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5, - 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6, - 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6, - 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, - 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, - 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, - 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7 -}; diff --git a/third-party/cbs/put_bits.h b/third-party/cbs/put_bits.h deleted file mode 100644 index fcb839a7f72..00000000000 --- a/third-party/cbs/put_bits.h +++ /dev/null @@ -1,422 +0,0 @@ -/* - * copyright (c) 2004 Michael Niedermayer - * - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * bitstream writer API - */ - -#ifndef AVCODEC_PUT_BITS_H -#define AVCODEC_PUT_BITS_H - -#include -#include - -// [sunshine] Changed include path -#include "cbs/config.h" -#include "libavutil/intreadwrite.h" -#include "libavutil/avassert.h" -#include "libavutil/common.h" - -#if ARCH_X86_64 -// TODO: Benchmark and optionally enable on other 64-bit architectures. -typedef uint64_t BitBuf; -#define AV_WBBUF AV_WB64 -#define AV_WLBUF AV_WL64 -#else -typedef uint32_t BitBuf; -#define AV_WBBUF AV_WB32 -#define AV_WLBUF AV_WL32 -#endif - -static const int BUF_BITS = 8 * sizeof(BitBuf); - -typedef struct PutBitContext { - BitBuf bit_buf; - int bit_left; - uint8_t *buf, *buf_ptr, *buf_end; -} PutBitContext; - -/** - * Initialize the PutBitContext s. - * - * @param buffer the buffer where to put bits - * @param buffer_size the size in bytes of buffer - */ -static inline void init_put_bits(PutBitContext *s, uint8_t *buffer, - int buffer_size) -{ - if (buffer_size < 0) { - buffer_size = 0; - buffer = NULL; - } - - s->buf = buffer; - s->buf_end = s->buf + buffer_size; - s->buf_ptr = s->buf; - s->bit_left = BUF_BITS; - s->bit_buf = 0; -} - -/** - * @return the total number of bits written to the bitstream. - */ -static inline int put_bits_count(PutBitContext *s) -{ - return (s->buf_ptr - s->buf) * 8 + BUF_BITS - s->bit_left; -} - -/** - * @return the number of bytes output so far; may only be called - * when the PutBitContext is freshly initialized or flushed. - */ -static inline int put_bytes_output(const PutBitContext *s) -{ - av_assert2(s->bit_left == BUF_BITS); - return s->buf_ptr - s->buf; -} - -/** - * @param round_up When set, the number of bits written so far will be - * rounded up to the next byte. - * @return the number of bytes output so far. - */ -static inline int put_bytes_count(const PutBitContext *s, int round_up) -{ - return s->buf_ptr - s->buf + ((BUF_BITS - s->bit_left + (round_up ? 7 : 0)) >> 3); -} - -/** - * Rebase the bit writer onto a reallocated buffer. - * - * @param buffer the buffer where to put bits - * @param buffer_size the size in bytes of buffer, - * must be large enough to hold everything written so far - */ -static inline void rebase_put_bits(PutBitContext *s, uint8_t *buffer, - int buffer_size) -{ - av_assert0(8*buffer_size >= put_bits_count(s)); - - s->buf_end = buffer + buffer_size; - s->buf_ptr = buffer + (s->buf_ptr - s->buf); - s->buf = buffer; -} - -/** - * @return the number of bits available in the bitstream. - */ -static inline int put_bits_left(PutBitContext* s) -{ - return (s->buf_end - s->buf_ptr) * 8 - BUF_BITS + s->bit_left; -} - -/** - * @param round_up When set, the number of bits written will be - * rounded up to the next byte. - * @return the number of bytes left. - */ -static inline int put_bytes_left(const PutBitContext *s, int round_up) -{ - return s->buf_end - s->buf_ptr - ((BUF_BITS - s->bit_left + (round_up ? 7 : 0)) >> 3); -} - -/** - * Pad the end of the output stream with zeros. - */ -static inline void flush_put_bits(PutBitContext *s) -{ -#ifndef BITSTREAM_WRITER_LE - if (s->bit_left < BUF_BITS) - s->bit_buf <<= s->bit_left; -#endif - while (s->bit_left < BUF_BITS) { - av_assert0(s->buf_ptr < s->buf_end); -#ifdef BITSTREAM_WRITER_LE - *s->buf_ptr++ = s->bit_buf; - s->bit_buf >>= 8; -#else - *s->buf_ptr++ = s->bit_buf >> (BUF_BITS - 8); - s->bit_buf <<= 8; -#endif - s->bit_left += 8; - } - s->bit_left = BUF_BITS; - s->bit_buf = 0; -} - -static inline void flush_put_bits_le(PutBitContext *s) -{ - while (s->bit_left < BUF_BITS) { - av_assert0(s->buf_ptr < s->buf_end); - *s->buf_ptr++ = s->bit_buf; - s->bit_buf >>= 8; - s->bit_left += 8; - } - s->bit_left = BUF_BITS; - s->bit_buf = 0; -} - -#ifdef BITSTREAM_WRITER_LE -#define ff_put_string ff_put_string_unsupported_here -#define ff_copy_bits ff_copy_bits_unsupported_here -#else - -/** - * Put the string string in the bitstream. - * - * @param terminate_string 0-terminates the written string if value is 1 - */ -void ff_put_string(PutBitContext *pb, const char *string, - int terminate_string); - -/** - * Copy the content of src to the bitstream. - * - * @param length the number of bits of src to copy - */ -void ff_copy_bits(PutBitContext *pb, const uint8_t *src, int length); -#endif - -static inline void put_bits_no_assert(PutBitContext *s, int n, BitBuf value) -{ - BitBuf bit_buf; - int bit_left; - - bit_buf = s->bit_buf; - bit_left = s->bit_left; - - /* XXX: optimize */ -#ifdef BITSTREAM_WRITER_LE - bit_buf |= value << (BUF_BITS - bit_left); - if (n >= bit_left) { - if (s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { - AV_WLBUF(s->buf_ptr, bit_buf); - s->buf_ptr += sizeof(BitBuf); - } else { - av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); - av_assert2(0); - } - bit_buf = value >> bit_left; - bit_left += BUF_BITS; - } - bit_left -= n; -#else - if (n < bit_left) { - bit_buf = (bit_buf << n) | value; - bit_left -= n; - } else { - bit_buf <<= bit_left; - bit_buf |= value >> (n - bit_left); - if (s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { - AV_WBBUF(s->buf_ptr, bit_buf); - s->buf_ptr += sizeof(BitBuf); - } else { - av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); - av_assert2(0); - } - bit_left += BUF_BITS - n; - bit_buf = value; - } -#endif - - s->bit_buf = bit_buf; - s->bit_left = bit_left; -} - -/** - * Write up to 31 bits into a bitstream. - * Use put_bits32 to write 32 bits. - */ -static inline void put_bits(PutBitContext *s, int n, BitBuf value) -{ - av_assert2(n <= 31 && value < (1UL << n)); - put_bits_no_assert(s, n, value); -} - -static inline void put_bits_le(PutBitContext *s, int n, BitBuf value) -{ - BitBuf bit_buf; - int bit_left; - - av_assert2(n <= 31 && value < (1UL << n)); - - bit_buf = s->bit_buf; - bit_left = s->bit_left; - - bit_buf |= value << (BUF_BITS - bit_left); - if (n >= bit_left) { - if (s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { - AV_WLBUF(s->buf_ptr, bit_buf); - s->buf_ptr += sizeof(BitBuf); - } else { - av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); - av_assert2(0); - } - bit_buf = value >> bit_left; - bit_left += BUF_BITS; - } - bit_left -= n; - - s->bit_buf = bit_buf; - s->bit_left = bit_left; -} - -static inline void put_sbits(PutBitContext *pb, int n, int32_t value) -{ - av_assert2(n >= 0 && n <= 31); - - put_bits(pb, n, av_mod_uintp2(value, n)); -} - -/** - * Write exactly 32 bits into a bitstream. - */ -static void av_unused put_bits32(PutBitContext *s, uint32_t value) -{ - BitBuf bit_buf; - int bit_left; - - if (BUF_BITS > 32) { - put_bits_no_assert(s, 32, value); - return; - } - - bit_buf = s->bit_buf; - bit_left = s->bit_left; - -#ifdef BITSTREAM_WRITER_LE - bit_buf |= (BitBuf)value << (BUF_BITS - bit_left); - if (s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { - AV_WLBUF(s->buf_ptr, bit_buf); - s->buf_ptr += sizeof(BitBuf); - } else { - av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); - av_assert2(0); - } - bit_buf = (uint64_t)value >> bit_left; -#else - bit_buf = (uint64_t)bit_buf << bit_left; - bit_buf |= (BitBuf)value >> (BUF_BITS - bit_left); - if (s->buf_end - s->buf_ptr >= sizeof(BitBuf)) { - AV_WBBUF(s->buf_ptr, bit_buf); - s->buf_ptr += sizeof(BitBuf); - } else { - av_log(NULL, AV_LOG_ERROR, "Internal error, put_bits buffer too small\n"); - av_assert2(0); - } - bit_buf = value; -#endif - - s->bit_buf = bit_buf; - s->bit_left = bit_left; -} - -/** - * Write up to 64 bits into a bitstream. - */ -static inline void put_bits64(PutBitContext *s, int n, uint64_t value) -{ - av_assert2((n == 64) || (n < 64 && value < (UINT64_C(1) << n))); - - if (n < 32) - put_bits(s, n, value); - else if (n == 32) - put_bits32(s, value); - else if (n < 64) { - uint32_t lo = value & 0xffffffff; - uint32_t hi = value >> 32; -#ifdef BITSTREAM_WRITER_LE - put_bits32(s, lo); - put_bits(s, n - 32, hi); -#else - put_bits(s, n - 32, hi); - put_bits32(s, lo); -#endif - } else { - uint32_t lo = value & 0xffffffff; - uint32_t hi = value >> 32; -#ifdef BITSTREAM_WRITER_LE - put_bits32(s, lo); - put_bits32(s, hi); -#else - put_bits32(s, hi); - put_bits32(s, lo); -#endif - - } -} - -/** - * Return the pointer to the byte where the bitstream writer will put - * the next bit. - */ -static inline uint8_t *put_bits_ptr(PutBitContext *s) -{ - return s->buf_ptr; -} - -/** - * Skip the given number of bytes. - * PutBitContext must be flushed & aligned to a byte boundary before calling this. - */ -static inline void skip_put_bytes(PutBitContext *s, int n) -{ - av_assert2((put_bits_count(s) & 7) == 0); - av_assert2(s->bit_left == BUF_BITS); - av_assert0(n <= s->buf_end - s->buf_ptr); - s->buf_ptr += n; -} - -/** - * Skip the given number of bits. - * Must only be used if the actual values in the bitstream do not matter. - * If n is < 0 the behavior is undefined. - */ -static inline void skip_put_bits(PutBitContext *s, int n) -{ - unsigned bits = BUF_BITS - s->bit_left + n; - s->buf_ptr += sizeof(BitBuf) * (bits / BUF_BITS); - s->bit_left = BUF_BITS - (bits & (BUF_BITS - 1)); -} - -/** - * Change the end of the buffer. - * - * @param size the new size in bytes of the buffer where to put bits - */ -static inline void set_put_bits_buffer_size(PutBitContext *s, int size) -{ - av_assert0(size <= INT_MAX/8 - BUF_BITS); - s->buf_end = s->buf + size; -} - -/** - * Pad the bitstream with zeros up to the next byte boundary. - */ -static inline void align_put_bits(PutBitContext *s) -{ - put_bits(s, s->bit_left & 7, 0); -} - -#undef AV_WBBUF -#undef AV_WLBUF - -#endif /* AVCODEC_PUT_BITS_H */ diff --git a/third-party/cbs/startcode.h b/third-party/cbs/startcode.h deleted file mode 100644 index 8b75832aaf7..00000000000 --- a/third-party/cbs/startcode.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -/** - * @file - * Accelerated start code search function for start codes common to - * MPEG-1/2/4 video, VC-1, H.264/5 - */ - -#ifndef AVCODEC_STARTCODE_H -#define AVCODEC_STARTCODE_H - -#include - -const uint8_t *avpriv_find_start_code(const uint8_t *p, - const uint8_t *end, - uint32_t *state); - -int ff_startcode_find_candidate_c(const uint8_t *buf, int size); - -#endif /* AVCODEC_STARTCODE_H */ diff --git a/third-party/cbs/version_major.h b/third-party/cbs/version_major.h deleted file mode 100644 index 1e23ed5e03e..00000000000 --- a/third-party/cbs/version_major.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * This file is part of FFmpeg. - * - * FFmpeg is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * FFmpeg is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with FFmpeg; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - */ - -#ifndef AVCODEC_VERSION_MAJOR_H -#define AVCODEC_VERSION_MAJOR_H - -/** - * @file - * @ingroup libavc - * Libavcodec version macros. - */ - -#define LIBAVCODEC_VERSION_MAJOR 59 - -/** - * FF_API_* defines may be placed below to indicate public API that will be - * dropped at a future version bump. The defines themselves are not part of - * the public API and may change, break or disappear at any time. - * - * @note, when bumping the major version it is recommended to manually - * disable each FF_API_* in its own commit instead of disabling them all - * at once through the bump. This improves the git bisect-ability of the change. - */ - -#define FF_API_OPENH264_SLICE_MODE (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_OPENH264_CABAC (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_UNUSED_CODEC_CAPS (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_THREAD_SAFE_CALLBACKS (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_DEBUG_MV (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_GET_FRAME_CLASS (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_AUTO_THREADS (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_INIT_PACKET (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_AVCTX_TIMEBASE (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_FLAG_TRUNCATED (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_SUB_TEXT_FORMAT (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_IDCT_NONE (LIBAVCODEC_VERSION_MAJOR < 60) -#define FF_API_SVTAV1_OPTS (LIBAVCODEC_VERSION_MAJOR < 60) - -#endif /* AVCODEC_VERSION_MAJOR_H */ From fc1b4a0c4472cc2e5997ad45b93b900a83770fa4 Mon Sep 17 00:00:00 2001 From: Brad Richardson Date: Thu, 22 Dec 2022 22:02:03 -0500 Subject: [PATCH 07/11] Add AV1 encoder library --- CMakeLists.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index fdae6de7e68..27c25d64305 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -401,10 +401,11 @@ endif() set(FFMPEG_LIBRARIES ${FFMPEG_PREPARED_BINARIES}/lib/libavcodec.a ${FFMPEG_PREPARED_BINARIES}/lib/libavutil.a + ${FFMPEG_PREPARED_BINARIES}/lib/libcbs.a + ${FFMPEG_PREPARED_BINARIES}/lib/libSvtAv1Enc.a ${FFMPEG_PREPARED_BINARIES}/lib/libswscale.a ${FFMPEG_PREPARED_BINARIES}/lib/libx264.a ${FFMPEG_PREPARED_BINARIES}/lib/libx265.a - ${FFMPEG_PREPARED_BINARIES}/lib/libcbs.a ${HDR10_PLUS_LIBRARY} ${FFMPEG_PLATFORM_LIBRARIES}) From e128e8f63b559cf030cab7bf0c2de88c084ff9f2 Mon Sep 17 00:00:00 2001 From: Brad Richardson Date: Thu, 22 Dec 2022 23:57:14 -0500 Subject: [PATCH 08/11] Update docs --- docs/source/about/advanced_usage.rst | 4 ++-- src_assets/common/assets/web/config.html | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/source/about/advanced_usage.rst b/docs/source/about/advanced_usage.rst index 2ab43a856c6..7e4c87e9589 100644 --- a/docs/source/about/advanced_usage.rst +++ b/docs/source/about/advanced_usage.rst @@ -865,7 +865,7 @@ nv_rc Value Description ========== =========== auto let ffmpeg decide - constqp constant QP mode + cqp constant QP mode cbr constant bitrate cbr_hq constant bitrate, high quality cbr_ld_hq constant bitrate, low delay, high quality @@ -958,7 +958,7 @@ amd_rc Value Description =========== =========== auto let ffmpeg decide - constqp constant QP mode + cqp constant QP mode cbr constant bitrate vbr_latency variable bitrate, latency constrained vbr_peak variable bitrate, peak constrained diff --git a/src_assets/common/assets/web/config.html b/src_assets/common/assets/web/config.html index 01954e76e7b..c8159b267a4 100644 --- a/src_assets/common/assets/web/config.html +++ b/src_assets/common/assets/web/config.html @@ -664,7 +664,7 @@

Configuration

- + From 65dea8593baeee5aa9d53243e6c4651433fbe762 Mon Sep 17 00:00:00 2001 From: Brad Richardson Date: Sun, 25 Dec 2022 22:14:16 -0500 Subject: [PATCH 09/11] Revert nvidia constqp changes --- docs/source/about/advanced_usage.rst | 2 +- src/config.cpp | 4 ++-- src_assets/common/assets/web/config.html | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/source/about/advanced_usage.rst b/docs/source/about/advanced_usage.rst index 7e4c87e9589..bbf12e9748d 100644 --- a/docs/source/about/advanced_usage.rst +++ b/docs/source/about/advanced_usage.rst @@ -958,7 +958,7 @@ amd_rc Value Description =========== =========== auto let ffmpeg decide - cqp constant QP mode + constqp constant QP mode cbr constant bitrate vbr_latency variable bitrate, latency constrained vbr_peak variable bitrate, peak constrained diff --git a/src/config.cpp b/src/config.cpp index b888a5937c5..00e4768120a 100644 --- a/src/config.cpp +++ b/src/config.cpp @@ -41,7 +41,7 @@ enum preset_e : int { }; enum rc_e : int { - cqp = 0x0, /**< Constant QP mode */ + constqp = 0x0, /**< Constant QP mode */ vbr = 0x1, /**< Variable bitrate mode */ cbr = 0x2, /**< Constant bitrate mode */ cbr_ld_hq = 0x8, /**< low-delay CBR, high quality */ @@ -76,7 +76,7 @@ std::optional preset_from_view(const std::string_view &preset) { std::optional rc_from_view(const std::string_view &rc) { #define _CONVERT_(x) \ if(rc == #x##sv) return x - _CONVERT_(cqp); + _CONVERT_(constqp); _CONVERT_(vbr); _CONVERT_(cbr); _CONVERT_(cbr_hq); diff --git a/src_assets/common/assets/web/config.html b/src_assets/common/assets/web/config.html index c8159b267a4..95d17f0da07 100644 --- a/src_assets/common/assets/web/config.html +++ b/src_assets/common/assets/web/config.html @@ -664,7 +664,7 @@

Configuration