Go to the documentation of this file.
42 #include <jxl/decode.h>
43 #include <jxl/thread_parallel_runner.h>
51 #if JPEGXL_NUMERIC_VERSION >= JPEGXL_COMPUTE_NUMERIC_VERSION(0, 8, 0)
68 ctx->events = JXL_DEC_BASIC_INFO | JXL_DEC_FULL_IMAGE
69 | JXL_DEC_COLOR_ENCODING | JXL_DEC_FRAME;
70 if (JxlDecoderSubscribeEvents(
ctx->decoder,
ctx->events) != JXL_DEC_SUCCESS) {
75 if (JxlDecoderSetParallelRunner(
ctx->decoder, JxlThreadParallelRunner,
ctx->runner) != JXL_DEC_SUCCESS) {
80 memset(&
ctx->basic_info, 0,
sizeof(JxlBasicInfo));
81 memset(&
ctx->jxl_pixfmt, 0,
sizeof(JxlPixelFormat));
82 ctx->prev_is_last = 1;
90 JxlMemoryManager manager;
93 ctx->decoder = JxlDecoderCreate(&manager);
115 const JxlBasicInfo *basic_info = &
ctx->basic_info;
116 JxlPixelFormat *
format = &
ctx->jxl_pixfmt;
117 format->endianness = JXL_NATIVE_ENDIAN;
118 format->num_channels = basic_info->num_color_channels + (basic_info->alpha_bits > 0);
119 #if JPEGXL_NUMERIC_VERSION >= JPEGXL_COMPUTE_NUMERIC_VERSION(0, 8, 0)
121 ctx->jxl_bit_depth.type = JXL_BIT_DEPTH_FROM_PIXEL_FORMAT;
122 ctx->jxl_bit_depth.exponent_bits_per_sample = basic_info->exponent_bits_per_sample;
125 if (basic_info->num_color_channels == 1) {
126 if (basic_info->bits_per_sample <= 8) {
127 format->data_type = JXL_TYPE_UINT8;
130 if (basic_info->exponent_bits_per_sample || basic_info->bits_per_sample > 16) {
131 if (!basic_info->alpha_bits) {
132 format->data_type = JXL_TYPE_FLOAT;
137 format->data_type = JXL_TYPE_UINT16;
142 if (basic_info->num_color_channels == 3) {
143 if (basic_info->bits_per_sample <= 8) {
144 format->data_type = JXL_TYPE_UINT8;
147 if (basic_info->exponent_bits_per_sample || basic_info->bits_per_sample > 16) {
148 format->data_type = JXL_TYPE_FLOAT;
151 format->data_type = JXL_TYPE_UINT16;
164 desc.prim.r.x =
av_d2q(jxl_color->primaries_red_xy[0], 300000);
165 desc.prim.r.y =
av_d2q(jxl_color->primaries_red_xy[1], 300000);
166 desc.prim.g.x =
av_d2q(jxl_color->primaries_green_xy[0], 300000);
167 desc.prim.g.y =
av_d2q(jxl_color->primaries_green_xy[1], 300000);
168 desc.prim.b.x =
av_d2q(jxl_color->primaries_blue_xy[0], 300000);
169 desc.prim.b.y =
av_d2q(jxl_color->primaries_blue_xy[1], 300000);
170 desc.wp.x =
av_d2q(jxl_color->white_point_xy[0], 300000);
171 desc.wp.y =
av_d2q(jxl_color->white_point_xy[1], 300000);
187 switch (jxl_color->transfer_function) {
194 case JXL_TRANSFER_FUNCTION_GAMMA:
195 if (jxl_color->gamma > 0.45355 && jxl_color->gamma < 0.45555)
197 else if (jxl_color->gamma > 0.35614 && jxl_color->gamma < 0.35814)
213 JxlDecoderStatus jret;
216 #if JPEGXL_NUMERIC_VERSION < JPEGXL_COMPUTE_NUMERIC_VERSION(0, 9, 0)
217 jret = JxlDecoderGetICCProfileSize(
ctx->decoder, &
ctx->jxl_pixfmt, JXL_COLOR_PROFILE_TARGET_DATA, &icc_len);
219 jret = JxlDecoderGetICCProfileSize(
ctx->decoder, JXL_COLOR_PROFILE_TARGET_DATA, &icc_len);
221 if (jret == JXL_DEC_SUCCESS && icc_len > 0) {
226 #if JPEGXL_NUMERIC_VERSION < JPEGXL_COMPUTE_NUMERIC_VERSION(0, 9, 0)
227 jret = JxlDecoderGetColorAsICCProfile(
ctx->decoder, &
ctx->jxl_pixfmt, JXL_COLOR_PROFILE_TARGET_DATA,
228 ctx->iccp->data, icc_len);
230 jret = JxlDecoderGetColorAsICCProfile(
ctx->decoder, JXL_COLOR_PROFILE_TARGET_DATA,
ctx->iccp->data, icc_len);
232 if (jret != JXL_DEC_SUCCESS) {
261 JxlDecoderStatus jret;
263 JxlColorEncoding jxl_color;
267 #if JPEGXL_NUMERIC_VERSION < JPEGXL_COMPUTE_NUMERIC_VERSION(0, 9, 0)
268 jret = JxlDecoderGetColorAsEncodedProfile(
ctx->decoder,
NULL, JXL_COLOR_PROFILE_TARGET_ORIGINAL, &jxl_color);
270 jret = JxlDecoderGetColorAsEncodedProfile(
ctx->decoder, JXL_COLOR_PROFILE_TARGET_ORIGINAL, &jxl_color);
272 if (jret == JXL_DEC_SUCCESS) {
274 jret = JxlDecoderSetPreferredColorProfile(
ctx->decoder, &jxl_color);
275 if (jret == JXL_DEC_SUCCESS)
276 #if JPEGXL_NUMERIC_VERSION < JPEGXL_COMPUTE_NUMERIC_VERSION(0, 9, 0)
277 jret = JxlDecoderGetColorAsEncodedProfile(
ctx->decoder, &
ctx->jxl_pixfmt,
278 JXL_COLOR_PROFILE_TARGET_DATA, &jxl_color);
280 jret = JxlDecoderGetColorAsEncodedProfile(
ctx->decoder, JXL_COLOR_PROFILE_TARGET_DATA, &jxl_color);
284 if (jret != JXL_DEC_SUCCESS)
288 if (
ctx->basic_info.uses_original_profile) {
304 if (
ctx->basic_info.num_color_channels > 1)
320 jxl_color.primaries = JXL_PRIMARIES_2100;
324 jxl_color.white_point = JXL_WHITE_POINT_D65;
327 if (
ctx->jxl_pixfmt.data_type == JXL_TYPE_FLOAT
328 ||
ctx->jxl_pixfmt.data_type == JXL_TYPE_FLOAT16) {
330 jxl_color.transfer_function = JXL_TRANSFER_FUNCTION_LINEAR;
334 jxl_color.transfer_function = JXL_TRANSFER_FUNCTION_SRGB;
339 jxl_color.rendering_intent = JXL_RENDERING_INTENT_RELATIVE;
340 jxl_color.color_space =
ctx->basic_info.num_color_channels > 1 ? JXL_COLOR_SPACE_RGB : JXL_COLOR_SPACE_GRAY;
341 jret = JxlDecoderSetPreferredColorProfile(
ctx->decoder, &jxl_color);
342 if (jret != JXL_DEC_SUCCESS) {
366 JxlDecoderStatus jret = JXL_DEC_SUCCESS;
379 ctx->accumulated_pts = 0;
380 ctx->frame_duration = 0;
383 if (jret == JXL_DEC_NEED_MORE_INPUT) {
393 if (jret == JXL_DEC_ERROR) {
399 jret = JxlDecoderProcessInput(
ctx->decoder);
405 remaining = JxlDecoderReleaseInput(
ctx->decoder);
413 case JXL_DEC_NEED_MORE_INPUT:
416 case JXL_DEC_BASIC_INFO:
418 if (JxlDecoderGetBasicInfo(
ctx->decoder, &
ctx->basic_info) != JXL_DEC_SUCCESS) {
433 if (
ctx->basic_info.have_animation)
434 ctx->anim_timebase =
av_make_q(
ctx->basic_info.animation.tps_denominator,
435 ctx->basic_info.animation.tps_numerator);
437 case JXL_DEC_COLOR_ENCODING:
443 case JXL_DEC_NEED_IMAGE_OUT_BUFFER:
448 ctx->jxl_pixfmt.align =
ctx->frame->linesize[0];
449 if (JxlDecoderSetImageOutBuffer(
ctx->decoder, &
ctx->jxl_pixfmt,
450 ctx->frame->data[0],
ctx->frame->buf[0]->size)
451 != JXL_DEC_SUCCESS) {
455 #if JPEGXL_NUMERIC_VERSION >= JPEGXL_COMPUTE_NUMERIC_VERSION(0, 8, 0)
456 if (JxlDecoderSetImageOutBitDepth(
ctx->decoder, &
ctx->jxl_bit_depth) != JXL_DEC_SUCCESS) {
465 if (
ctx->prev_is_last) {
473 if (JxlDecoderGetFrameHeader(
ctx->decoder, &
header) != JXL_DEC_SUCCESS) {
479 if (
ctx->basic_info.have_animation &&
header.duration)
482 case JXL_DEC_FULL_IMAGE:
490 if (
ctx->basic_info.have_animation) {
499 ctx->accumulated_pts +=
ctx->frame_duration;
503 case JXL_DEC_SUCCESS:
511 JxlDecoderReset(
ctx->decoder);
526 JxlThreadParallelRunnerDestroy(
ctx->runner);
529 JxlDecoderDestroy(
ctx->decoder);
550 .p.wrapper_name =
"libjxl",
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
int ff_decode_get_packet(AVCodecContext *avctx, AVPacket *pkt)
Called by decoders to get the next packet for decoding.
#define AV_LOG_WARNING
Something somehow does not look correct.
static int libjxl_init_jxl_decoder(AVCodecContext *avctx)
AVPixelFormat
Pixel format.
size_t ff_libjxl_get_threadcount(int threads)
Transform threadcount in ffmpeg to one used by libjxl.
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
@ AV_PIX_FMT_YA8
8 bits gray, 8 bits alpha
enum AVColorSpace colorspace
YUV colorspace type.
AVColorTransferCharacteristic
Color Transfer Characteristic.
AVWhitepointCoefficients wp
Struct that contains both white point location and primaries location, providing the complete descrip...
#define AVERROR_EOF
End of file.
@ AVCOL_TRC_LINEAR
"Linear transfer characteristics"
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
This structure describes decoded (raw) audio or video data.
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
@ AVCOL_RANGE_JPEG
Full range content.
static av_cold int libjxl_decode_init(AVCodecContext *avctx)
#define FF_CODEC_CAP_NOT_INIT_THREADSAFE
The codec is not known to be init-threadsafe (i.e.
#define AV_LOG_VERBOSE
Detailed information.
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
int64_t duration
Duration of this packet in AVStream->time_base units, 0 if unknown.
AVColorPrimaries
Chromaticity coordinates of the source primaries.
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
AVCodec p
The public AVCodec.
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
@ AVCOL_TRC_GAMMA28
also ITU-R BT470BG
static enum AVPixelFormat libjxl_get_pix_fmt(AVCodecContext *avctx, LibJxlDecodeContext *ctx)
#define AV_PIX_FMT_GRAY16
@ AVCOL_TRC_GAMMA22
also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM
int ff_frame_new_side_data_from_buf(const AVCodecContext *avctx, AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef **buf)
Similar to ff_frame_new_side_data, but using an existing buffer ref.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define AV_FRAME_FLAG_KEY
A flag to mark frames that are keyframes.
const AVColorPrimariesDesc * av_csp_primaries_desc_from_id(enum AVColorPrimaries prm)
Retrieves a complete gamut description from an enum constant describing the color primaries.
int flags
Flags modifying the (de)muxer behaviour.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
JxlBitDepth jxl_bit_depth
#define AV_CODEC_CAP_OTHER_THREADS
Codec supports multithreading through a method other than slice- or frame-level multithreading.
const FFCodec ff_libjxl_decoder
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
enum AVColorPrimaries av_csp_primaries_id_from_desc(const AVColorPrimariesDesc *prm)
Detects which enum AVColorPrimaries constant corresponds to the given complete gamut description.
static int libjxl_get_icc(AVCodecContext *avctx)
#define AV_PIX_FMT_RGBF32
JxlPixelFormat jxl_pixfmt
#define AV_PIX_FMT_GRAYF32
#define CODEC_LONG_NAME(str)
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
#define AV_PIX_FMT_RGBA64
enum AVColorRange color_range
MPEG vs JPEG YUV range.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Rational number (pair of numerator and denominator).
struct AVCodecInternal * internal
Private context used for internal data.
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
@ AV_PICTURE_TYPE_I
Intra.
@ AV_FRAME_DATA_ICC_PROFILE
The data contains an ICC profile as an opaque octet buffer following the format described by ISO 1507...
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
@ AVCOL_PRI_BT2020
ITU-R BT2020.
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
int(* init)(AVBSFContext *ctx)
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are expressed.
static AVRational av_make_q(int num, int den)
Create an AVRational.
#define AV_NOPTS_VALUE
Undefined timestamp value.
static const uint8_t header[24]
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
#define AVERROR_EXTERNAL
Generic error in an external library.
AVBufferRef * av_buffer_alloc(size_t size)
Allocate an AVBuffer of the given size using av_malloc().
static enum AVColorPrimaries libjxl_get_primaries(void *avctx, const JxlColorEncoding *jxl_color)
@ AVCOL_TRC_BT709
also ITU-R BT1361
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
AVPacket * in_pkt
This packet is used to hold the packet given to decoders implementing the .decode API; it is unused b...
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
AVRational av_d2q(double d, int max)
Convert a double precision floating point number to a rational.
const char * name
Name of the codec implementation.
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
#define FF_CODEC_CAP_ICC_PROFILES
Codec supports embedded ICC profiles (AV_FRAME_DATA_ICC_PROFILE).
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
static int libjxl_receive_frame(AVCodecContext *avctx, AVFrame *frame)
main external API structure.
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
#define FF_CODEC_RECEIVE_FRAME_CB(func)
#define AV_PIX_FMT_RGBAF32
int64_t duration
Duration of the stream, in AV_TIME_BASE fractional seconds.
A reference to a data buffer.
#define FF_CODEC_CAP_AUTO_THREADS
Codec handles avctx->thread_count == 0 (auto) internally.
static av_cold int libjxl_decode_close(AVCodecContext *avctx)
This structure stores compressed data.
void ff_libjxl_init_memory_manager(JxlMemoryManager *manager)
Initialize and populate a JxlMemoryManager with av_malloc() and av_free() so libjxl will use these fu...
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
@ AVCOL_TRC_SMPTE428
SMPTE ST 428-1.
static enum AVColorTransferCharacteristic libjxl_get_trc(void *avctx, const JxlColorEncoding *jxl_color)
static int libjxl_color_encoding_event(AVCodecContext *avctx, AVFrame *frame)