Go to the documentation of this file.
   24 #define X265_API_IMPORTS 1 
   45 #if FF_API_REORDERED_OPAQUE 
   46     int64_t reordered_opaque;
 
   89     case NAL_UNIT_CODED_SLICE_BLA_W_LP:
 
   90     case NAL_UNIT_CODED_SLICE_BLA_W_RADL:
 
   91     case NAL_UNIT_CODED_SLICE_BLA_N_LP:
 
   92     case NAL_UNIT_CODED_SLICE_IDR_W_RADL:
 
   93     case NAL_UNIT_CODED_SLICE_IDR_N_LP:
 
   94     case NAL_UNIT_CODED_SLICE_CRA:
 
  108     for (
int i = 0; 
i < 
ctx->nb_rd; 
i++)
 
  109         if (!
ctx->rd[
i].in_use) {
 
  110             ctx->rd[
i].in_use = 1;
 
  123     ctx->rd[idx].in_use = 1;
 
  132     memset(&
ctx->rd[idx], 0, 
sizeof(
ctx->rd[idx]));
 
  139     ctx->api->param_free(
ctx->params);
 
  142     for (
int i = 0; 
i < 
ctx->nb_rd; 
i++)
 
  147         ctx->api->encoder_close(
ctx->encoder);
 
  159     if (
ctx->api->param_parse(
ctx->params, 
key, buf) == X265_PARAM_BAD_VALUE) {
 
  174     if (
ctx->api->param_parse(
ctx->params, 
key, buf) == X265_PARAM_BAD_VALUE) {
 
  189     ctx->api = x265_api_get(
desc->comp[0].depth);
 
  191         ctx->api = x265_api_get(0);
 
  193     ctx->params = 
ctx->api->param_alloc();
 
  199     if (
ctx->api->param_default_preset(
ctx->params, 
ctx->preset, 
ctx->tune) < 0) {
 
  204         for (
i = 0; x265_preset_names[
i]; 
i++)
 
  209         for (
i = 0; x265_tune_names[
i]; 
i++)
 
  225     ctx->params->sourceWidth     = avctx->
width;
 
  226     ctx->params->sourceHeight    = avctx->
height;
 
  231     if (
ctx->params->sourceWidth < 64 || 
ctx->params->sourceHeight < 64)
 
  232         ctx->params->maxCUSize = 32;
 
  233     if (
ctx->params->sourceWidth < 32 || 
ctx->params->sourceHeight < 32)
 
  234         ctx->params->maxCUSize = 16;
 
  235     if (
ctx->params->sourceWidth < 16 || 
ctx->params->sourceHeight < 16) {
 
  237                ctx->params->sourceWidth, 
ctx->params->sourceHeight);
 
  242     ctx->params->vui.bEnableVideoSignalTypePresentFlag = 1;
 
  245         ctx->params->vui.bEnableVideoFullRangeFlag =
 
  248         ctx->params->vui.bEnableVideoFullRangeFlag =
 
  261         ctx->params->vui.bEnableColorDescriptionPresentFlag = 1;
 
  265         ctx->params->vui.transferCharacteristics = avctx->
color_trc;
 
  266 #if X265_BUILD >= 159 
  268             ctx->params->preferredTransferCharacteristics = 
ctx->params->vui.transferCharacteristics;
 
  276     ctx->params->vui.bEnableChromaLocInfoPresentFlag =
 
  278         desc->log2_chroma_w == 1 && 
desc->log2_chroma_h == 1;
 
  280     if (
ctx->params->vui.bEnableChromaLocInfoPresentFlag) {
 
  281         ctx->params->vui.chromaSampleLocTypeTopField =
 
  282         ctx->params->vui.chromaSampleLocTypeBottomField =
 
  288         int sar_num, sar_den;
 
  293         snprintf(sar, 
sizeof(sar), 
"%d:%d", sar_num, sar_den);
 
  294         if (
ctx->api->param_parse(
ctx->params, 
"sar", sar) == X265_PARAM_BAD_VALUE) {
 
  300     switch (
desc->log2_chroma_w) {
 
  304         if (
desc->nb_components == 1) {
 
  305             if (
ctx->api->api_build_number < 85) {
 
  307                        "libx265 version is %d, must be at least 85 for gray encoding.\n",
 
  308                        ctx->api->api_build_number);
 
  311             ctx->params->internalCsp = X265_CSP_I400;
 
  318             ctx->params->vui.bEnableVideoSignalTypePresentFlag  = 1;
 
  319             ctx->params->vui.bEnableColorDescriptionPresentFlag = 1;
 
  322         ctx->params->internalCsp = X265_CSP_I444;
 
  326         ctx->params->internalCsp = 
desc->log2_chroma_h == 1 ?
 
  327             X265_CSP_I420 : X265_CSP_I422;
 
  331                "Pixel format '%s' cannot be mapped to a libx265 CSP!\n",
 
  340         if (
ctx->api->param_parse(
ctx->params, 
"crf", crf) == X265_PARAM_BAD_VALUE) {
 
  346         ctx->params->rc.rateControlMode = X265_RC_ABR;
 
  347     } 
else if (
ctx->cqp >= 0) {
 
  353     if (avctx->
qmin >= 0) {
 
  358     if (avctx->
qmax >= 0) {
 
  368     if (avctx->
qblur >= 0) {
 
  400         ctx->params->bRepeatHeaders = 1;
 
  417     if (avctx->
refs >= 0) {
 
  426             int parse_ret = 
ctx->api->param_parse(
ctx->params, en->
key, en->
value);
 
  429             case X265_PARAM_BAD_NAME:
 
  431                       "Unknown option: %s.\n", en->
key);
 
  433             case X265_PARAM_BAD_VALUE:
 
  435                       "Invalid value for %s: %s.\n", en->
key, en->
value);
 
  444         ctx->params->rc.vbvBufferInit == 0.9) {
 
  449         if (
ctx->api->param_apply_profile(
ctx->params, 
ctx->profile) < 0) {
 
  453             for (
i = 0; x265_profile_names[
i]; 
i++)
 
  460     ctx->encoder = 
ctx->api->encoder_open(
ctx->params);
 
  481                    "Cannot allocate HEVC header of size %d.\n", avctx->
extradata_size);
 
  497         if (
ctx->params->rc.aqMode == X265_AQ_NONE) {
 
  498             if (!
ctx->roi_warned) {
 
  504             int mb_size = (
ctx->params->rc.qgSize == 8) ? 8 : 16;
 
  505             int mbx = (
frame->width + mb_size - 1) / mb_size;
 
  506             int mby = (
frame->height + mb_size - 1) / mb_size;
 
  507             int qp_range = 51 + 6 * (pic->bitDepth - 8);
 
  515             if (!roi_size || sd->
size % roi_size != 0) {
 
  519             nb_rois = sd->
size / roi_size;
 
  521             qoffsets = 
av_calloc(mbx * mby, 
sizeof(*qoffsets));
 
  527             for (
int i = nb_rois - 1; 
i >= 0; 
i--) {
 
  528                 int startx, endx, starty, endy;
 
  533                 starty = 
FFMIN(mby, roi->
top / mb_size);
 
  534                 endy   = 
FFMIN(mby, (roi->
bottom + mb_size - 1)/ mb_size);
 
  535                 startx = 
FFMIN(mbx, roi->
left / mb_size);
 
  536                 endx   = 
FFMIN(mbx, (roi->
right + mb_size - 1)/ mb_size);
 
  544                 qoffset = 
av_clipf(qoffset * qp_range, -qp_range, +qp_range);
 
  546                 for (
int y = starty; y < endy; y++)
 
  547                     for (
int x = startx; x < endx; x++)
 
  548                         qoffsets[x + y*mbx] = qoffset;
 
  551             pic->quantOffsets = qoffsets;
 
  559     x265_sei *
sei = &pic->userSEI;
 
  560     for (
int i = 0; 
i < 
sei->numPayloads; 
i++)
 
  564         int idx = (
int)(intptr_t)pic->userData - 1;
 
  566         pic->userData = 
NULL;
 
  570     sei->numPayloads = 0;
 
  574                                 const AVFrame *pic, 
int *got_packet)
 
  577     x265_picture x265pic;
 
  578     x265_picture x265pic_out = { 0 };
 
  588     ctx->api->picture_init(
ctx->params, &x265pic);
 
  590     sei = &x265pic.userSEI;
 
  591     sei->numPayloads = 0;
 
  597         for (
i = 0; 
i < 3; 
i++) {
 
  598            x265pic.planes[
i] = pic->
data[
i];
 
  602         x265pic.pts      = pic->
pts;
 
  606                                               (
ctx->forced_idr ? X265_TYPE_IDR : X265_TYPE_I) :
 
  620         rd = &
ctx->rd[rd_idx];
 
  623 #if FF_API_REORDERED_OPAQUE 
  638         x265pic.userData = (
void*)(intptr_t)(rd_idx + 1);
 
  647             } 
else if (sei_data) {
 
  649                 x265_sei_payload *sei_payload;
 
  653                         (
sei->numPayloads + 1) * 
sizeof(*sei_payload));
 
  660                 sei->payloads = 
ctx->sei_data;
 
  661                 sei_payload = &
sei->payloads[
sei->numPayloads];
 
  662                 sei_payload->payload = sei_data;
 
  663                 sei_payload->payloadSize = sei_size;
 
  673                 x265_sei_payload *sei_payload;
 
  680                         (
sei->numPayloads + 1) * 
sizeof(*sei_payload));
 
  686                 sei->payloads = 
ctx->sei_data;
 
  687                 sei_payload = &
sei->payloads[
sei->numPayloads];
 
  689                 if (!sei_payload->payload) {
 
  693                 sei_payload->payloadSize = side_data->
size;
 
  701     ret = 
ctx->api->encoder_encode(
ctx->encoder, &nal, &nnal,
 
  702                                    pic ? &x265pic : 
NULL, &x265pic_out);
 
  704     for (
i = 0; 
i < 
sei->numPayloads; 
i++)
 
  714     for (
i = 0; 
i < nnal; 
i++)
 
  715         payload += nal[
i].sizeBytes;
 
  724     for (
i = 0; 
i < nnal; 
i++) {
 
  725         memcpy(dst, nal[
i].payload, nal[
i].sizeBytes);
 
  726         dst += nal[
i].sizeBytes;
 
  732     pkt->
pts = x265pic_out.pts;
 
  733     pkt->
dts = x265pic_out.dts;
 
  735     switch (x265pic_out.sliceType) {
 
  752 #if X265_BUILD >= 130 
  753     if (x265pic_out.sliceType == X265_TYPE_B)
 
  755     if (x265pic_out.frameData.sliceType == 
'b')
 
  761     if (x265pic_out.userData) {
 
  762         int idx = (
int)(intptr_t)x265pic_out.userData - 1;
 
  765 #if FF_API_REORDERED_OPAQUE 
  767         avctx->reordered_opaque = rd->reordered_opaque;
 
  780 #if FF_API_REORDERED_OPAQUE 
  783         avctx->reordered_opaque = 0;
 
  845     if (x265_api_get(12))
 
  847     else if (x265_api_get(10))
 
  849     else if (x265_api_get(8))
 
  853 #define OFFSET(x) offsetof(libx265Context, x) 
  854 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM 
  858     { 
"forced-idr",  
"if forcing keyframes, force them as IDR frames",                              
OFFSET(forced_idr),
AV_OPT_TYPE_BOOL,   { .i64 =  0 },  0,       1, 
VE },
 
  862     { 
"udu_sei",     
"Use user data unregistered SEI if available",                                 
OFFSET(udu_sei),   
AV_OPT_TYPE_BOOL,   { .i64 = 0 }, 0, 1, 
VE },
 
  864     { 
"x265-params", 
"set the x265 configuration using a :-separated list of key=value parameters", 
OFFSET(x265_opts), 
AV_OPT_TYPE_DICT,   { 0 }, 0, 0, 
VE },
 
  879     { 
"keyint_min", 
"-1" },
 
  886     { 
"i_qfactor", 
"-1" },
 
  887     { 
"b_qfactor", 
"-1" },
 
  899     .p.priv_class     = &
class,
 
  900     .p.wrapper_name   = 
"libx265",
 
  
#define FF_ENABLE_DEPRECATION_WARNINGS
int ff_alloc_a53_sei(const AVFrame *frame, size_t prefix_len, void **data, size_t *sei_size)
Check AVFrame for A53 side data and allocate and fill SEI message with A53 info.
#define AV_LOG_WARNING
Something somehow does not look correct.
AVPixelFormat
Pixel format.
int keyint_min
minimum GOP size
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
static av_cold int libx265_param_parse_int(AVCodecContext *avctx, const char *key, int value)
static av_cold int libx265_param_parse_float(AVCodecContext *avctx, const char *key, float value)
enum AVColorSpace colorspace
YUV colorspace type.
static const FFCodecDefault defaults[]
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
int64_t duration
Duration of the frame, in the same units as pts.
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
int ff_side_data_set_encoder_stats(AVPacket *pkt, int quality, int64_t *error, int error_count, int pict_type)
static const AVOption options[]
enum AVPixelFormat * pix_fmts
array of supported pixel formats, or NULL if unknown, array is terminated by -1
void * opaque
for some private data of the user
This structure describes decoded (raw) audio or video data.
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
@ AVCOL_RANGE_JPEG
Full range content.
int depth
Number of bits in the component.
static void free_picture(libx265Context *ctx, x265_picture *pic)
#define AV_PIX_FMT_YUV420P10
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key,...
#define FF_CODEC_CAP_NOT_INIT_THREADSAFE
The codec is not known to be init-threadsafe (i.e.
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
int64_t duration
Duration of this packet in AVStream->time_base units, 0 if unknown.
#define AV_PKT_FLAG_DISPOSABLE
Flag is used to indicate packets that contain frames that can be discarded by the decoder.
#define AV_CODEC_FLAG_PSNR
error[?] variables will be set during encoding.
AVCPBProperties * ff_add_cpb_side_data(AVCodecContext *avctx)
Add a CPB properties side data to an encoding context.
int qmax
maximum quantizer
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
#define AV_CODEC_FLAG_GLOBAL_HEADER
Place global headers in extradata instead of every keyframe.
int roi_warned
If the encoder does not support ROI then warn the first time we encounter a frame with ROI side data.
void * av_memdup(const void *p, size_t size)
Duplicate a buffer with av_malloc().
AVBufferRef * opaque_ref
AVBufferRef for free use by the API user.
static int libx265_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pic, int *got_packet)
#define AV_CODEC_FLAG_COPY_OPAQUE
float i_quant_factor
qscale factor between P- and I-frames If > 0 then the last P-frame quantizer will be used (q = lastp_...
AVCodec p
The public AVCodec.
AVBufferRef * opaque_ref
AVBufferRef for free use by the API user.
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
#define AV_PIX_FMT_GBRP10
static av_cold void libx265_encode_init_csp(FFCodec *codec)
int refs
number of reference frames
int flags
AV_CODEC_FLAG_*.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
#define FF_CODEC_ENCODE_CB(func)
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
#define AV_PIX_FMT_YUV444P10
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Structure describing a single Region Of Interest.
int rc_initial_buffer_occupancy
Number of bits which should be loaded into the rc buffer before decoding starts.
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
attribute_deprecated int64_t reordered_opaque
reordered opaque 64 bits (generally an integer or a double precision float PTS but can be anything).
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
void * av_realloc_array(void *ptr, size_t nmemb, size_t size)
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
static void rd_release(libx265Context *ctx, int idx)
#define AV_CODEC_CAP_OTHER_THREADS
Codec supports multithreading through a method other than slice- or frame-level multithreading.
#define AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
This encoder can reorder user opaque values from input AVFrames and return them with corresponding ou...
#define av_assert0(cond)
assert() equivalent, that is always enabled.
@ SEI_TYPE_USER_DATA_REGISTERED_ITU_T_T35
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
int64_t rc_max_rate
maximum bitrate
void * opaque
for some private data of the user
This structure describes the bitrate properties of an encoded bitstream.
#define CODEC_LONG_NAME(str)
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
#define AV_PIX_FMT_GRAY10
int rc_buffer_size
decoder bitstream buffer size
#define LIBAVUTIL_VERSION_INT
Describe the class of an AVClass context structure.
enum AVColorRange color_range
MPEG vs JPEG YUV range.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
float qblur
amount of qscale smoothing over time (0.0-1.0)
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
int64_t bit_rate
the average bitrate
static av_cold int libx265_encode_init(AVCodecContext *avctx)
uint32_t self_size
Must be set to the size of this data structure (that is, sizeof(AVRegionOfInterest)).
const char * av_default_item_name(void *ptr)
Return the context name.
@ AV_PICTURE_TYPE_I
Intra.
#define AV_PIX_FMT_YUV422P10
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
static int FUNC() sei(CodedBitstreamContext *ctx, RWContext *rw, H264RawSEI *current)
@ AVCOL_RANGE_UNSPECIFIED
@ AV_FRAME_DATA_SEI_UNREGISTERED
User data unregistered metadata associated with a video frame.
float qcompress
amount of qscale change between easy & hard scenes (0.0-1.0)
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
enum AVPictureType pict_type
Picture type of the frame.
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
int gop_size
the number of pictures in a group of pictures, or 0 for intra_only
AVBufferRef * frame_opaque_ref
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
#define AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV444P12
@ AVCHROMA_LOC_UNSPECIFIED
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
#define AVERROR_EXTERNAL
Generic error in an external library.
int flags
A combination of AV_PKT_FLAG values.
int64_t avg_bitrate
Average bitrate of the stream, in bits per second.
#define AV_LOG_INFO
Standard information.
float b_quant_factor
qscale factor between IP and B-frames If > 0 then the last P-frame quantizer will be used (q= lastp_q...
#define i(width, name, range_min, range_max)
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
int top
Distance in pixels from the top edge of the frame to the top and bottom edges and from the left edge ...
#define AV_PIX_FMT_GBRP12
int64_t max_bitrate
Maximum bitrate of the stream, in bits per second.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
const char * name
Name of the codec implementation.
int av_buffer_replace(AVBufferRef **pdst, const AVBufferRef *src)
Ensure dst refers to the same data as src.
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
AVFrameSideData ** side_data
static av_cold int libx265_encode_set_roi(libx265Context *ctx, const AVFrame *frame, x265_picture *pic)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
void * av_calloc(size_t nmemb, size_t size)
#define AV_CODEC_FLAG_CLOSED_GOP
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
int64_t buffer_size
The size of the buffer to which the ratecontrol is applied, in bits.
#define AV_PIX_FMT_YUV420P12
#define AV_INPUT_BUFFER_PADDING_SIZE
static int rd_get(libx265Context *ctx)
int max_qdiff
maximum quantizer difference between frames
main external API structure.
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
@ SEI_TYPE_USER_DATA_UNREGISTERED
int qmin
minimum quantizer
enum AVFrameSideDataType type
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
static enum AVPixelFormat x265_csp_ten[]
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
static av_cold int libx265_encode_close(AVCodecContext *avctx)
FFCodec ff_libx265_encoder
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
#define FF_DISABLE_DEPRECATION_WARNINGS
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
@ AV_PICTURE_TYPE_P
Predicted.
static float add(float src0, float src1)
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
A reference to a data buffer.
#define FF_CODEC_CAP_AUTO_THREADS
Codec handles avctx->thread_count == 0 (auto) internally.
Structure to hold side data for an AVFrame.
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
@ AVCOL_PRI_SMPTE432
SMPTE ST 432-1 (2010) / P3 D65 / Display P3.
This structure stores compressed data.
int width
picture width / height.
@ AV_FRAME_DATA_REGIONS_OF_INTEREST
Regions Of Interest, the data is an array of AVRegionOfInterest type, the number of array element is ...
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
static const FFCodecDefault x265_defaults[]
#define AV_PIX_FMT_GRAY12
#define FF_QP2LAMBDA
factor to convert from H.263 QP to lambda
AVRational qoffset
Quantisation offset.
@ AVCOL_SPC_ICTCP
ITU-R BT.2100-0, ICtCp.
static enum AVPixelFormat x265_csp_eight[]
static enum AVPixelFormat x265_csp_twelve[]
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
static int is_keyframe(NalUnitType naltype)