Go to the documentation of this file.
24 #define X265_API_IMPORTS 1
70 case NAL_UNIT_CODED_SLICE_BLA_W_LP:
71 case NAL_UNIT_CODED_SLICE_BLA_W_RADL:
72 case NAL_UNIT_CODED_SLICE_BLA_N_LP:
73 case NAL_UNIT_CODED_SLICE_IDR_W_RADL:
74 case NAL_UNIT_CODED_SLICE_IDR_N_LP:
75 case NAL_UNIT_CODED_SLICE_CRA:
86 ctx->api->param_free(
ctx->params);
90 ctx->api->encoder_close(
ctx->encoder);
102 if (
ctx->api->param_parse(
ctx->params,
key, buf) == X265_PARAM_BAD_VALUE) {
117 if (
ctx->api->param_parse(
ctx->params,
key, buf) == X265_PARAM_BAD_VALUE) {
132 ctx->api = x265_api_get(
desc->comp[0].depth);
134 ctx->api = x265_api_get(0);
136 ctx->params =
ctx->api->param_alloc();
142 if (
ctx->api->param_default_preset(
ctx->params,
ctx->preset,
ctx->tune) < 0) {
147 for (
i = 0; x265_preset_names[
i];
i++)
152 for (
i = 0; x265_tune_names[
i];
i++)
168 ctx->params->sourceWidth = avctx->
width;
169 ctx->params->sourceHeight = avctx->
height;
174 if (
ctx->params->sourceWidth < 64 ||
ctx->params->sourceHeight < 64)
175 ctx->params->maxCUSize = 32;
176 if (
ctx->params->sourceWidth < 32 ||
ctx->params->sourceHeight < 32)
177 ctx->params->maxCUSize = 16;
178 if (
ctx->params->sourceWidth < 16 ||
ctx->params->sourceHeight < 16) {
180 ctx->params->sourceWidth,
ctx->params->sourceHeight);
185 ctx->params->vui.bEnableVideoSignalTypePresentFlag = 1;
188 ctx->params->vui.bEnableVideoFullRangeFlag =
191 ctx->params->vui.bEnableVideoFullRangeFlag =
204 ctx->params->vui.bEnableColorDescriptionPresentFlag = 1;
208 ctx->params->vui.transferCharacteristics = avctx->
color_trc;
209 #if X265_BUILD >= 159
211 ctx->params->preferredTransferCharacteristics =
ctx->params->vui.transferCharacteristics;
219 ctx->params->vui.bEnableChromaLocInfoPresentFlag =
221 desc->log2_chroma_w == 1 &&
desc->log2_chroma_h == 1;
223 if (
ctx->params->vui.bEnableChromaLocInfoPresentFlag) {
224 ctx->params->vui.chromaSampleLocTypeTopField =
225 ctx->params->vui.chromaSampleLocTypeBottomField =
231 int sar_num, sar_den;
236 snprintf(sar,
sizeof(sar),
"%d:%d", sar_num, sar_den);
237 if (
ctx->api->param_parse(
ctx->params,
"sar", sar) == X265_PARAM_BAD_VALUE) {
243 switch (
desc->log2_chroma_w) {
247 if (
desc->nb_components == 1) {
248 if (
ctx->api->api_build_number < 85) {
250 "libx265 version is %d, must be at least 85 for gray encoding.\n",
251 ctx->api->api_build_number);
254 ctx->params->internalCsp = X265_CSP_I400;
261 ctx->params->vui.bEnableVideoSignalTypePresentFlag = 1;
262 ctx->params->vui.bEnableColorDescriptionPresentFlag = 1;
265 ctx->params->internalCsp = X265_CSP_I444;
269 ctx->params->internalCsp =
desc->log2_chroma_h == 1 ?
270 X265_CSP_I420 : X265_CSP_I422;
274 "Pixel format '%s' cannot be mapped to a libx265 CSP!\n",
283 if (
ctx->api->param_parse(
ctx->params,
"crf", crf) == X265_PARAM_BAD_VALUE) {
289 ctx->params->rc.rateControlMode = X265_RC_ABR;
290 }
else if (
ctx->cqp >= 0) {
297 if (avctx->
qmin >= 0) {
302 if (avctx->
qmax >= 0) {
313 if (avctx->
qblur >= 0) {
345 ctx->params->bRepeatHeaders = 1;
362 if (avctx->
refs >= 0) {
371 int parse_ret =
ctx->api->param_parse(
ctx->params, en->
key, en->
value);
374 case X265_PARAM_BAD_NAME:
376 "Unknown option: %s.\n", en->
key);
378 case X265_PARAM_BAD_VALUE:
380 "Invalid value for %s: %s.\n", en->
key, en->
value);
389 ctx->params->rc.vbvBufferInit == 0.9) {
394 if (
ctx->api->param_apply_profile(
ctx->params,
ctx->profile) < 0) {
398 for (
i = 0; x265_profile_names[
i];
i++)
405 ctx->encoder =
ctx->api->encoder_open(
ctx->params);
426 "Cannot allocate HEVC header of size %d.\n", avctx->
extradata_size);
442 if (
ctx->params->rc.aqMode == X265_AQ_NONE) {
443 if (!
ctx->roi_warned) {
449 int mb_size = (
ctx->params->rc.qgSize == 8) ? 8 : 16;
450 int mbx = (
frame->width + mb_size - 1) / mb_size;
451 int mby = (
frame->height + mb_size - 1) / mb_size;
452 int qp_range = 51 + 6 * (pic->bitDepth - 8);
460 if (!roi_size || sd->
size % roi_size != 0) {
464 nb_rois = sd->
size / roi_size;
466 qoffsets =
av_calloc(mbx * mby,
sizeof(*qoffsets));
472 for (
int i = nb_rois - 1;
i >= 0;
i--) {
473 int startx, endx, starty, endy;
478 starty =
FFMIN(mby, roi->
top / mb_size);
479 endy =
FFMIN(mby, (roi->
bottom + mb_size - 1)/ mb_size);
480 startx =
FFMIN(mbx, roi->
left / mb_size);
481 endx =
FFMIN(mbx, (roi->
right + mb_size - 1)/ mb_size);
489 qoffset =
av_clipf(qoffset * qp_range, -qp_range, +qp_range);
491 for (
int y = starty; y < endy; y++)
492 for (
int x = startx; x < endx; x++)
493 qoffsets[x + y*mbx] = qoffset;
496 pic->quantOffsets = qoffsets;
503 const AVFrame *pic,
int *got_packet)
506 x265_picture x265pic;
507 x265_picture x265pic_out = { 0 };
516 ctx->api->picture_init(
ctx->params, &x265pic);
519 x265_sei *
sei = &x265pic.userSEI;
520 sei->numPayloads = 0;
521 for (
i = 0;
i < 3;
i++) {
522 x265pic.planes[
i] = pic->
data[
i];
526 x265pic.pts = pic->
pts;
530 (
ctx->forced_idr ? X265_TYPE_IDR : X265_TYPE_I) :
541 if (!x265pic.userData) {
553 x265_sei_payload *sei_payload;
560 (
sei->numPayloads + 1) *
sizeof(*sei_payload));
567 sei->payloads =
ctx->sei_data;
568 sei_payload = &
sei->payloads[
sei->numPayloads];
569 sei_payload->payload = side_data->
data;
570 sei_payload->payloadSize = side_data->
size;
578 ret =
ctx->api->encoder_encode(
ctx->encoder, &nal, &nnal,
579 pic ? &x265pic :
NULL, &x265pic_out);
589 for (
i = 0;
i < nnal;
i++)
590 payload += nal[
i].sizeBytes;
599 for (
i = 0;
i < nnal;
i++) {
600 memcpy(dst, nal[
i].payload, nal[
i].sizeBytes);
601 dst += nal[
i].sizeBytes;
607 pkt->
pts = x265pic_out.pts;
608 pkt->
dts = x265pic_out.dts;
610 switch (x265pic_out.sliceType) {
627 #if X265_BUILD >= 130
628 if (x265pic_out.sliceType == X265_TYPE_B)
630 if (x265pic_out.frameData.sliceType ==
'b')
636 if (x265pic_out.userData) {
699 if (x265_api_get(12))
701 else if (x265_api_get(10))
703 else if (x265_api_get(8))
707 #define OFFSET(x) offsetof(libx265Context, x)
708 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
712 {
"forced-idr",
"if forcing keyframes, force them as IDR frames",
OFFSET(forced_idr),
AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1,
VE },
716 {
"udu_sei",
"Use user data unregistered SEI if available",
OFFSET(udu_sei),
AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1,
VE },
717 {
"x265-params",
"set the x265 configuration using a :-separated list of key=value parameters",
OFFSET(x265_opts),
AV_OPT_TYPE_DICT, { 0 }, 0, 0,
VE },
732 {
"keyint_min",
"-1" },
739 {
"i_qfactor",
"-1" },
740 {
"b_qfactor",
"-1" },
752 .p.priv_class = &
class,
753 .p.wrapper_name =
"libx265",
#define AV_LOG_WARNING
Something somehow does not look correct.
AVPixelFormat
Pixel format.
int keyint_min
minimum GOP size
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
static av_cold int libx265_param_parse_int(AVCodecContext *avctx, const char *key, int value)
static av_cold int libx265_param_parse_float(AVCodecContext *avctx, const char *key, float value)
enum AVColorSpace colorspace
YUV colorspace type.
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
int ff_side_data_set_encoder_stats(AVPacket *pkt, int quality, int64_t *error, int error_count, int pict_type)
static const AVOption options[]
enum AVPixelFormat * pix_fmts
array of supported pixel formats, or NULL if unknown, array is terminated by -1
This structure describes decoded (raw) audio or video data.
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
@ AVCOL_RANGE_JPEG
Full range content.
int depth
Number of bits in the component.
#define AV_PIX_FMT_YUV420P10
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key,...
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
#define AV_PKT_FLAG_DISPOSABLE
Flag is used to indicate packets that contain frames that can be discarded by the decoder.
#define AV_CODEC_FLAG_PSNR
error[?] variables will be set during encoding.
AVCPBProperties * ff_add_cpb_side_data(AVCodecContext *avctx)
Add a CPB properties side data to an encoding context.
int qmax
maximum quantizer
@ SEI_TYPE_USER_DATA_UNREGISTERED
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
#define AV_CODEC_FLAG_GLOBAL_HEADER
Place global headers in extradata instead of every keyframe.
int roi_warned
If the encoder does not support ROI then warn the first time we encounter a frame with ROI side data.
static int libx265_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pic, int *got_packet)
float i_quant_factor
qscale factor between P- and I-frames If > 0 then the last P-frame quantizer will be used (q = lastp_...
AVCodec p
The public AVCodec.
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
#define AV_PIX_FMT_GBRP10
static av_cold void libx265_encode_init_csp(FFCodec *codec)
int refs
number of reference frames
int flags
AV_CODEC_FLAG_*.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
#define FF_CODEC_ENCODE_CB(func)
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
#define AV_PIX_FMT_YUV444P10
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Structure describing a single Region Of Interest.
int rc_initial_buffer_occupancy
Number of bits which should be loaded into the rc buffer before decoding starts.
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
#define AV_CODEC_CAP_OTHER_THREADS
Codec supports multithreading through a method other than slice- or frame-level multithreading.
#define AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
This codec takes the reordered_opaque field from input AVFrames and returns it in the corresponding f...
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
int64_t rc_max_rate
maximum bitrate
This structure describes the bitrate properties of an encoded bitstream.
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
#define AV_PIX_FMT_GRAY10
int rc_buffer_size
decoder bitstream buffer size
#define LIBAVUTIL_VERSION_INT
Describe the class of an AVClass context structure.
enum AVColorRange color_range
MPEG vs JPEG YUV range.
float qblur
amount of qscale smoothing over time (0.0-1.0)
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
int64_t bit_rate
the average bitrate
static av_cold int libx265_encode_init(AVCodecContext *avctx)
uint32_t self_size
Must be set to the size of this data structure (that is, sizeof(AVRegionOfInterest)).
const char * av_default_item_name(void *ptr)
Return the context name.
@ AV_PICTURE_TYPE_I
Intra.
#define AV_PIX_FMT_YUV422P10
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
static int FUNC() sei(CodedBitstreamContext *ctx, RWContext *rw, H264RawSEI *current)
@ AVCOL_RANGE_UNSPECIFIED
@ AV_FRAME_DATA_SEI_UNREGISTERED
User data unregistered metadata associated with a video frame.
float qcompress
amount of qscale change between easy & hard scenes (0.0-1.0)
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
enum AVPictureType pict_type
Picture type of the frame.
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
static const FFCodecDefault defaults[]
int gop_size
the number of pictures in a group of pictures, or 0 for intra_only
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
#define AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV444P12
@ AVCHROMA_LOC_UNSPECIFIED
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
#define AVERROR_EXTERNAL
Generic error in an external library.
int flags
A combination of AV_PKT_FLAG values.
int64_t avg_bitrate
Average bitrate of the stream, in bits per second.
#define AV_LOG_INFO
Standard information.
float b_quant_factor
qscale factor between IP and B-frames If > 0 then the last P-frame quantizer will be used (q= lastp_q...
#define i(width, name, range_min, range_max)
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
int top
Distance in pixels from the top edge of the frame to the top and bottom edges and from the left edge ...
#define AV_PIX_FMT_GBRP12
int64_t max_bitrate
Maximum bitrate of the stream, in bits per second.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
const char * name
Name of the codec implementation.
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
AVFrameSideData ** side_data
static av_cold int libx265_encode_set_roi(libx265Context *ctx, const AVFrame *frame, x265_picture *pic)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
void * av_calloc(size_t nmemb, size_t size)
#define AV_CODEC_FLAG_CLOSED_GOP
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
int64_t buffer_size
The size of the buffer to which the ratecontrol is applied, in bits.
#define AV_PIX_FMT_YUV420P12
#define AV_INPUT_BUFFER_PADDING_SIZE
int max_qdiff
maximum quantizer difference between frames
main external API structure.
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
int qmin
minimum quantizer
enum AVFrameSideDataType type
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
static enum AVPixelFormat x265_csp_ten[]
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
static av_cold int libx265_encode_close(AVCodecContext *avctx)
FFCodec ff_libx265_encoder
int64_t reordered_opaque
reordered opaque 64 bits (generally an integer or a double precision float PTS but can be anything).
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
@ AV_PIX_FMT_GBRP
planar GBR 4:4:4 24bpp
@ AV_PICTURE_TYPE_P
Predicted.
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
#define FF_CODEC_CAP_AUTO_THREADS
Codec handles avctx->thread_count == 0 (auto) internally.
Structure to hold side data for an AVFrame.
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
@ AVCOL_PRI_SMPTE432
SMPTE ST 432-1 (2010) / P3 D65 / Display P3.
This structure stores compressed data.
int64_t reordered_opaque
opaque 64-bit number (generally a PTS) that will be reordered and output in AVFrame....
int width
picture width / height.
@ AV_FRAME_DATA_REGIONS_OF_INTEREST
Regions Of Interest, the data is an array of AVRegionOfInterest type, the number of array element is ...
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
static const FFCodecDefault x265_defaults[]
#define AV_PIX_FMT_GRAY12
#define FF_QP2LAMBDA
factor to convert from H.263 QP to lambda
AVRational qoffset
Quantisation offset.
@ AVCOL_SPC_ICTCP
ITU-R BT.2100-0, ICtCp.
static enum AVPixelFormat x265_csp_eight[]
static enum AVPixelFormat x265_csp_twelve[]
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
static int is_keyframe(NalUnitType naltype)