1 /*
2  * MJPEG encoder
3  * Copyright (c) 2000, 2001 Fabrice Bellard
4  * Copyright (c) 2003 Alex Beregszaszi
5  * Copyright (c) 2003-2004 Michael Niedermayer
6  *
7  * Support for external huffman table, various fixes (AVID workaround),
8  * aspecting, new decode_frame mechanism and apple mjpeg-b support
9  *                                  by Alex Beregszaszi
10  *
11  * This file is part of FFmpeg.
12  *
13  * FFmpeg is free software; you can redistribute it and/or
14  * modify it under the terms of the GNU Lesser General Public
15  * License as published by the Free Software Foundation; either
16  * version 2.1 of the License, or (at your option) any later version.
17  *
18  * FFmpeg is distributed in the hope that it will be useful,
19  * but WITHOUT ANY WARRANTY; without even the implied warranty of
20  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
21  * Lesser General Public License for more details.
22  *
23  * You should have received a copy of the GNU Lesser General Public
24  * License along with FFmpeg; if not, write to the Free Software
25  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
26  */
27 
28 /**
29  * @file
30  * MJPEG encoder.
31  */
32 
33 #include "libavutil/pixdesc.h"
34 
35 #include "avcodec.h"
36 #include "jpegtables.h"
37 #include "mjpegenc_common.h"
38 #include "mjpegenc_huffman.h"
39 #include "mpegvideo.h"
40 #include "mjpeg.h"
41 #include "mjpegenc.h"
42 #include "profiles.h"
43 
init_uni_ac_vlc(const uint8_t huff_size_ac[256],uint8_t * uni_ac_vlc_len)44 static av_cold void init_uni_ac_vlc(const uint8_t huff_size_ac[256],
45                                     uint8_t *uni_ac_vlc_len)
46 {
47     for (int i = 0; i < 128; i++) {
48         int level = i - 64;
49         if (!level)
50             continue;
51         for (int run = 0; run < 64; run++) {
52             int len, code, nbits;
53             int alevel = FFABS(level);
54 
55             len = (run >> 4) * huff_size_ac[0xf0];
56 
57             nbits= av_log2_16bit(alevel) + 1;
58             code = ((15&run) << 4) | nbits;
59 
60             len += huff_size_ac[code] + nbits;
61 
62             uni_ac_vlc_len[UNI_AC_ENC_INDEX(run, i)] = len;
63             // We ignore EOB as its just a constant which does not change generally
64         }
65     }
66 }
67 
68 /**
69  * Encodes and outputs the entire frame in the JPEG format.
70  *
71  * @param s The MpegEncContext.
72  */
mjpeg_encode_picture_frame(MpegEncContext * s)73 static void mjpeg_encode_picture_frame(MpegEncContext *s)
74 {
75     int nbits, code, table_id;
76     MJpegContext *m = s->mjpeg_ctx;
77     uint8_t  *huff_size[4] = { m->huff_size_dc_luminance,
78                                m->huff_size_dc_chrominance,
79                                m->huff_size_ac_luminance,
80                                m->huff_size_ac_chrominance };
81     uint16_t *huff_code[4] = { m->huff_code_dc_luminance,
82                                m->huff_code_dc_chrominance,
83                                m->huff_code_ac_luminance,
84                                m->huff_code_ac_chrominance };
85     size_t total_bits = 0;
86     size_t bytes_needed;
87 
88     s->header_bits = get_bits_diff(s);
89     // Estimate the total size first
90     for (int i = 0; i < m->huff_ncode; i++) {
91         table_id = m->huff_buffer[i].table_id;
92         code = m->huff_buffer[i].code;
93         nbits = code & 0xf;
94 
95         total_bits += huff_size[table_id][code] + nbits;
96     }
97 
98     bytes_needed = (total_bits + 7) / 8;
99     ff_mpv_reallocate_putbitbuffer(s, bytes_needed, bytes_needed);
100 
101     for (int i = 0; i < m->huff_ncode; i++) {
102         table_id = m->huff_buffer[i].table_id;
103         code = m->huff_buffer[i].code;
104         nbits = code & 0xf;
105 
106         put_bits(&s->pb, huff_size[table_id][code], huff_code[table_id][code]);
107         if (nbits != 0) {
108             put_sbits(&s->pb, nbits, m->huff_buffer[i].mant);
109         }
110     }
111 
112     m->huff_ncode = 0;
113     s->i_tex_bits = get_bits_diff(s);
114 }
115 
116 /**
117  * Builds all 4 optimal Huffman tables.
118  *
119  * Uses the data stored in the JPEG buffer to compute the tables.
120  * Stores the Huffman tables in the bits_* and val_* arrays in the MJpegContext.
121  *
122  * @param m MJpegContext containing the JPEG buffer.
123  */
mjpeg_build_optimal_huffman(MJpegContext * m)124 static void mjpeg_build_optimal_huffman(MJpegContext *m)
125 {
126     MJpegEncHuffmanContext dc_luminance_ctx;
127     MJpegEncHuffmanContext dc_chrominance_ctx;
128     MJpegEncHuffmanContext ac_luminance_ctx;
129     MJpegEncHuffmanContext ac_chrominance_ctx;
130     MJpegEncHuffmanContext *ctx[4] = { &dc_luminance_ctx,
131                                        &dc_chrominance_ctx,
132                                        &ac_luminance_ctx,
133                                        &ac_chrominance_ctx };
134     for (int i = 0; i < 4; i++)
135         ff_mjpeg_encode_huffman_init(ctx[i]);
136 
137     for (int i = 0; i < m->huff_ncode; i++) {
138         int table_id = m->huff_buffer[i].table_id;
139         int code     = m->huff_buffer[i].code;
140 
141         ff_mjpeg_encode_huffman_increment(ctx[table_id], code);
142     }
143 
144     ff_mjpeg_encode_huffman_close(&dc_luminance_ctx,
145                                   m->bits_dc_luminance,
146                                   m->val_dc_luminance, 12);
147     ff_mjpeg_encode_huffman_close(&dc_chrominance_ctx,
148                                   m->bits_dc_chrominance,
149                                   m->val_dc_chrominance, 12);
150     ff_mjpeg_encode_huffman_close(&ac_luminance_ctx,
151                                   m->bits_ac_luminance,
152                                   m->val_ac_luminance, 256);
153     ff_mjpeg_encode_huffman_close(&ac_chrominance_ctx,
154                                   m->bits_ac_chrominance,
155                                   m->val_ac_chrominance, 256);
156 
157     ff_mjpeg_build_huffman_codes(m->huff_size_dc_luminance,
158                                  m->huff_code_dc_luminance,
159                                  m->bits_dc_luminance,
160                                  m->val_dc_luminance);
161     ff_mjpeg_build_huffman_codes(m->huff_size_dc_chrominance,
162                                  m->huff_code_dc_chrominance,
163                                  m->bits_dc_chrominance,
164                                  m->val_dc_chrominance);
165     ff_mjpeg_build_huffman_codes(m->huff_size_ac_luminance,
166                                  m->huff_code_ac_luminance,
167                                  m->bits_ac_luminance,
168                                  m->val_ac_luminance);
169     ff_mjpeg_build_huffman_codes(m->huff_size_ac_chrominance,
170                                  m->huff_code_ac_chrominance,
171                                  m->bits_ac_chrominance,
172                                  m->val_ac_chrominance);
173 }
174 
175 /**
176  * Writes the complete JPEG frame when optimal huffman tables are enabled,
177  * otherwise writes the stuffing.
178  *
179  * Header + values + stuffing.
180  *
181  * @param s The MpegEncContext.
182  * @return int Error code, 0 if successful.
183  */
ff_mjpeg_encode_stuffing(MpegEncContext * s)184 int ff_mjpeg_encode_stuffing(MpegEncContext *s)
185 {
186     PutBitContext *pbc = &s->pb;
187     int mb_y = s->mb_y - !s->mb_x;
188     int ret;
189     MJpegContext *m;
190 
191     m = s->mjpeg_ctx;
192 
193     if (s->huffman == HUFFMAN_TABLE_OPTIMAL) {
194         mjpeg_build_optimal_huffman(m);
195 
196         // Replace the VLCs with the optimal ones.
197         // The default ones may be used for trellis during quantization.
198         init_uni_ac_vlc(m->huff_size_ac_luminance,   m->uni_ac_vlc_len);
199         init_uni_ac_vlc(m->huff_size_ac_chrominance, m->uni_chroma_ac_vlc_len);
200         s->intra_ac_vlc_length      =
201         s->intra_ac_vlc_last_length = m->uni_ac_vlc_len;
202         s->intra_chroma_ac_vlc_length      =
203         s->intra_chroma_ac_vlc_last_length = m->uni_chroma_ac_vlc_len;
204 
205         ff_mjpeg_encode_picture_header(s->avctx, &s->pb, &s->intra_scantable,
206                                        s->pred, s->intra_matrix, s->chroma_intra_matrix);
207         mjpeg_encode_picture_frame(s);
208     }
209 
210     ret = ff_mpv_reallocate_putbitbuffer(s, put_bits_count(&s->pb) / 8 + 100,
211                                             put_bits_count(&s->pb) / 4 + 1000);
212     if (ret < 0) {
213         av_log(s->avctx, AV_LOG_ERROR, "Buffer reallocation failed\n");
214         goto fail;
215     }
216 
217     ff_mjpeg_escape_FF(pbc, s->esc_pos);
218 
219     if ((s->avctx->active_thread_type & FF_THREAD_SLICE) && mb_y < s->mb_height - 1)
220         put_marker(pbc, RST0 + (mb_y&7));
221     s->esc_pos = put_bits_count(pbc) >> 3;
222 
223 fail:
224     for (int i = 0; i < 3; i++)
225         s->last_dc[i] = 128 << s->intra_dc_precision;
226 
227     return ret;
228 }
229 
alloc_huffman(MpegEncContext * s)230 static int alloc_huffman(MpegEncContext *s)
231 {
232     MJpegContext *m = s->mjpeg_ctx;
233     size_t num_mbs, num_blocks, num_codes;
234     int blocks_per_mb;
235 
236     // We need to init this here as the mjpeg init is called before the common init,
237     s->mb_width  = (s->width  + 15) / 16;
238     s->mb_height = (s->height + 15) / 16;
239 
240     switch (s->chroma_format) {
241     case CHROMA_420: blocks_per_mb =  6; break;
242     case CHROMA_422: blocks_per_mb =  8; break;
243     case CHROMA_444: blocks_per_mb = 12; break;
244     default: av_assert0(0);
245     };
246 
247     // Make sure we have enough space to hold this frame.
248     num_mbs = s->mb_width * s->mb_height;
249     num_blocks = num_mbs * blocks_per_mb;
250     num_codes = num_blocks * 64;
251 
252     m->huff_buffer = av_malloc_array(num_codes, sizeof(MJpegHuffmanCode));
253     if (!m->huff_buffer)
254         return AVERROR(ENOMEM);
255     return 0;
256 }
257 
ff_mjpeg_encode_init(MpegEncContext * s)258 av_cold int ff_mjpeg_encode_init(MpegEncContext *s)
259 {
260     MJpegContext *m;
261 
262     av_assert0(s->slice_context_count == 1);
263 
264     if (s->width > 65500 || s->height > 65500) {
265         av_log(s, AV_LOG_ERROR, "JPEG does not support resolutions above 65500x65500\n");
266         return AVERROR(EINVAL);
267     }
268 
269     m = av_mallocz(sizeof(MJpegContext));
270     if (!m)
271         return AVERROR(ENOMEM);
272 
273     s->min_qcoeff=-1023;
274     s->max_qcoeff= 1023;
275 
276     // Build default Huffman tables.
277     // These may be overwritten later with more optimal Huffman tables, but
278     // they are needed at least right now for some processes like trellis.
279     ff_mjpeg_build_huffman_codes(m->huff_size_dc_luminance,
280                                  m->huff_code_dc_luminance,
281                                  avpriv_mjpeg_bits_dc_luminance,
282                                  avpriv_mjpeg_val_dc);
283     ff_mjpeg_build_huffman_codes(m->huff_size_dc_chrominance,
284                                  m->huff_code_dc_chrominance,
285                                  avpriv_mjpeg_bits_dc_chrominance,
286                                  avpriv_mjpeg_val_dc);
287     ff_mjpeg_build_huffman_codes(m->huff_size_ac_luminance,
288                                  m->huff_code_ac_luminance,
289                                  avpriv_mjpeg_bits_ac_luminance,
290                                  avpriv_mjpeg_val_ac_luminance);
291     ff_mjpeg_build_huffman_codes(m->huff_size_ac_chrominance,
292                                  m->huff_code_ac_chrominance,
293                                  avpriv_mjpeg_bits_ac_chrominance,
294                                  avpriv_mjpeg_val_ac_chrominance);
295 
296     init_uni_ac_vlc(m->huff_size_ac_luminance,   m->uni_ac_vlc_len);
297     init_uni_ac_vlc(m->huff_size_ac_chrominance, m->uni_chroma_ac_vlc_len);
298     s->intra_ac_vlc_length      =
299     s->intra_ac_vlc_last_length = m->uni_ac_vlc_len;
300     s->intra_chroma_ac_vlc_length      =
301     s->intra_chroma_ac_vlc_last_length = m->uni_chroma_ac_vlc_len;
302 
303     // Buffers start out empty.
304     m->huff_ncode = 0;
305     s->mjpeg_ctx = m;
306 
307     if(s->huffman == HUFFMAN_TABLE_OPTIMAL)
308         return alloc_huffman(s);
309 
310     return 0;
311 }
312 
ff_mjpeg_encode_close(MpegEncContext * s)313 av_cold void ff_mjpeg_encode_close(MpegEncContext *s)
314 {
315     if (s->mjpeg_ctx) {
316         av_freep(&s->mjpeg_ctx->huff_buffer);
317         av_freep(&s->mjpeg_ctx);
318     }
319 }
320 
321 /**
322  * Add code and table_id to the JPEG buffer.
323  *
324  * @param s The MJpegContext which contains the JPEG buffer.
325  * @param table_id Which Huffman table the code belongs to.
326  * @param code The encoded exponent of the coefficients and the run-bits.
327  */
ff_mjpeg_encode_code(MJpegContext * s,uint8_t table_id,int code)328 static inline void ff_mjpeg_encode_code(MJpegContext *s, uint8_t table_id, int code)
329 {
330     MJpegHuffmanCode *c = &s->huff_buffer[s->huff_ncode++];
331     c->table_id = table_id;
332     c->code = code;
333 }
334 
335 /**
336  * Add the coefficient's data to the JPEG buffer.
337  *
338  * @param s The MJpegContext which contains the JPEG buffer.
339  * @param table_id Which Huffman table the code belongs to.
340  * @param val The coefficient.
341  * @param run The run-bits.
342  */
ff_mjpeg_encode_coef(MJpegContext * s,uint8_t table_id,int val,int run)343 static void ff_mjpeg_encode_coef(MJpegContext *s, uint8_t table_id, int val, int run)
344 {
345     int mant, code;
346 
347     if (val == 0) {
348         av_assert0(run == 0);
349         ff_mjpeg_encode_code(s, table_id, 0);
350     } else {
351         mant = val;
352         if (val < 0) {
353             val = -val;
354             mant--;
355         }
356 
357         code = (run << 4) | (av_log2_16bit(val) + 1);
358 
359         s->huff_buffer[s->huff_ncode].mant = mant;
360         ff_mjpeg_encode_code(s, table_id, code);
361     }
362 }
363 
364 /**
365  * Add the block's data into the JPEG buffer.
366  *
367  * @param s The MJpegEncContext that contains the JPEG buffer.
368  * @param block The block.
369  * @param n The block's index or number.
370  */
record_block(MpegEncContext * s,int16_t * block,int n)371 static void record_block(MpegEncContext *s, int16_t *block, int n)
372 {
373     int i, j, table_id;
374     int component, dc, last_index, val, run;
375     MJpegContext *m = s->mjpeg_ctx;
376 
377     /* DC coef */
378     component = (n <= 3 ? 0 : (n&1) + 1);
379     table_id = (n <= 3 ? 0 : 1);
380     dc = block[0]; /* overflow is impossible */
381     val = dc - s->last_dc[component];
382 
383     ff_mjpeg_encode_coef(m, table_id, val, 0);
384 
385     s->last_dc[component] = dc;
386 
387     /* AC coefs */
388 
389     run = 0;
390     last_index = s->block_last_index[n];
391     table_id |= 2;
392 
393     for(i=1;i<=last_index;i++) {
394         j = s->intra_scantable.permutated[i];
395         val = block[j];
396 
397         if (val == 0) {
398             run++;
399         } else {
400             while (run >= 16) {
401                 ff_mjpeg_encode_code(m, table_id, 0xf0);
402                 run -= 16;
403             }
404             ff_mjpeg_encode_coef(m, table_id, val, run);
405             run = 0;
406         }
407     }
408 
409     /* output EOB only if not already 64 values */
410     if (last_index < 63 || run != 0)
411         ff_mjpeg_encode_code(m, table_id, 0);
412 }
413 
encode_block(MpegEncContext * s,int16_t * block,int n)414 static void encode_block(MpegEncContext *s, int16_t *block, int n)
415 {
416     int mant, nbits, code, i, j;
417     int component, dc, run, last_index, val;
418     MJpegContext *m = s->mjpeg_ctx;
419     uint8_t *huff_size_ac;
420     uint16_t *huff_code_ac;
421 
422     /* DC coef */
423     component = (n <= 3 ? 0 : (n&1) + 1);
424     dc = block[0]; /* overflow is impossible */
425     val = dc - s->last_dc[component];
426     if (n < 4) {
427         ff_mjpeg_encode_dc(&s->pb, val, m->huff_size_dc_luminance, m->huff_code_dc_luminance);
428         huff_size_ac = m->huff_size_ac_luminance;
429         huff_code_ac = m->huff_code_ac_luminance;
430     } else {
431         ff_mjpeg_encode_dc(&s->pb, val, m->huff_size_dc_chrominance, m->huff_code_dc_chrominance);
432         huff_size_ac = m->huff_size_ac_chrominance;
433         huff_code_ac = m->huff_code_ac_chrominance;
434     }
435     s->last_dc[component] = dc;
436 
437     /* AC coefs */
438 
439     run = 0;
440     last_index = s->block_last_index[n];
441     for(i=1;i<=last_index;i++) {
442         j = s->intra_scantable.permutated[i];
443         val = block[j];
444         if (val == 0) {
445             run++;
446         } else {
447             while (run >= 16) {
448                 put_bits(&s->pb, huff_size_ac[0xf0], huff_code_ac[0xf0]);
449                 run -= 16;
450             }
451             mant = val;
452             if (val < 0) {
453                 val = -val;
454                 mant--;
455             }
456 
457             nbits= av_log2_16bit(val) + 1;
458             code = (run << 4) | nbits;
459 
460             put_bits(&s->pb, huff_size_ac[code], huff_code_ac[code]);
461 
462             put_sbits(&s->pb, nbits, mant);
463             run = 0;
464         }
465     }
466 
467     /* output EOB only if not already 64 values */
468     if (last_index < 63 || run != 0)
469         put_bits(&s->pb, huff_size_ac[0], huff_code_ac[0]);
470 }
471 
ff_mjpeg_encode_mb(MpegEncContext * s,int16_t block[12][64])472 void ff_mjpeg_encode_mb(MpegEncContext *s, int16_t block[12][64])
473 {
474     int i;
475     if (s->huffman == HUFFMAN_TABLE_OPTIMAL) {
476         if (s->chroma_format == CHROMA_444) {
477             record_block(s, block[0], 0);
478             record_block(s, block[2], 2);
479             record_block(s, block[4], 4);
480             record_block(s, block[8], 8);
481             record_block(s, block[5], 5);
482             record_block(s, block[9], 9);
483 
484             if (16*s->mb_x+8 < s->width) {
485                 record_block(s, block[1], 1);
486                 record_block(s, block[3], 3);
487                 record_block(s, block[6], 6);
488                 record_block(s, block[10], 10);
489                 record_block(s, block[7], 7);
490                 record_block(s, block[11], 11);
491             }
492         } else {
493             for(i=0;i<5;i++) {
494                 record_block(s, block[i], i);
495             }
496             if (s->chroma_format == CHROMA_420) {
497                 record_block(s, block[5], 5);
498             } else {
499                 record_block(s, block[6], 6);
500                 record_block(s, block[5], 5);
501                 record_block(s, block[7], 7);
502             }
503         }
504     } else {
505         if (s->chroma_format == CHROMA_444) {
506             encode_block(s, block[0], 0);
507             encode_block(s, block[2], 2);
508             encode_block(s, block[4], 4);
509             encode_block(s, block[8], 8);
510             encode_block(s, block[5], 5);
511             encode_block(s, block[9], 9);
512 
513             if (16*s->mb_x+8 < s->width) {
514                 encode_block(s, block[1], 1);
515                 encode_block(s, block[3], 3);
516                 encode_block(s, block[6], 6);
517                 encode_block(s, block[10], 10);
518                 encode_block(s, block[7], 7);
519                 encode_block(s, block[11], 11);
520             }
521         } else {
522             for(i=0;i<5;i++) {
523                 encode_block(s, block[i], i);
524             }
525             if (s->chroma_format == CHROMA_420) {
526                 encode_block(s, block[5], 5);
527             } else {
528                 encode_block(s, block[6], 6);
529                 encode_block(s, block[5], 5);
530                 encode_block(s, block[7], 7);
531             }
532         }
533 
534         s->i_tex_bits += get_bits_diff(s);
535     }
536 }
537 
538 #if CONFIG_AMV_ENCODER
539 // maximum over s->mjpeg_vsample[i]
540 #define V_MAX 2
amv_encode_picture(AVCodecContext * avctx,AVPacket * pkt,const AVFrame * pic_arg,int * got_packet)541 static int amv_encode_picture(AVCodecContext *avctx, AVPacket *pkt,
542                               const AVFrame *pic_arg, int *got_packet)
543 {
544     MpegEncContext *s = avctx->priv_data;
545     AVFrame *pic;
546     int i, ret;
547     int chroma_h_shift, chroma_v_shift;
548 
549     av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt, &chroma_h_shift, &chroma_v_shift);
550 
551     if ((avctx->height & 15) && avctx->strict_std_compliance > FF_COMPLIANCE_UNOFFICIAL) {
552         av_log(avctx, AV_LOG_ERROR,
553                "Heights which are not a multiple of 16 might fail with some decoders, "
554                "use vstrict=-1 / -strict -1 to use %d anyway.\n", avctx->height);
555         av_log(avctx, AV_LOG_WARNING, "If you have a device that plays AMV videos, please test if videos "
556                "with such heights work with it and report your findings to ffmpeg-devel@ffmpeg.org\n");
557         return AVERROR_EXPERIMENTAL;
558     }
559 
560     pic = av_frame_clone(pic_arg);
561     if (!pic)
562         return AVERROR(ENOMEM);
563     //picture should be flipped upside-down
564     for(i=0; i < 3; i++) {
565         int vsample = i ? 2 >> chroma_v_shift : 2;
566         pic->data[i] += pic->linesize[i] * (vsample * s->height / V_MAX - 1);
567         pic->linesize[i] *= -1;
568     }
569     ret = ff_mpv_encode_picture(avctx, pkt, pic, got_packet);
570     av_frame_free(&pic);
571     return ret;
572 }
573 #endif
574 
575 #define OFFSET(x) offsetof(MpegEncContext, x)
576 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
577 static const AVOption options[] = {
578 FF_MPV_COMMON_OPTS
579 { "pred", "Prediction method", OFFSET(pred), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 3, VE, "pred" },
580     { "left",   NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 1 }, INT_MIN, INT_MAX, VE, "pred" },
581     { "plane",  NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 2 }, INT_MIN, INT_MAX, VE, "pred" },
582     { "median", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 3 }, INT_MIN, INT_MAX, VE, "pred" },
583 { "huffman", "Huffman table strategy", OFFSET(huffman), AV_OPT_TYPE_INT, { .i64 = HUFFMAN_TABLE_OPTIMAL }, 0, NB_HUFFMAN_TABLE_OPTION - 1, VE, "huffman" },
584     { "default", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = HUFFMAN_TABLE_DEFAULT }, INT_MIN, INT_MAX, VE, "huffman" },
585     { "optimal", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = HUFFMAN_TABLE_OPTIMAL }, INT_MIN, INT_MAX, VE, "huffman" },
586 { NULL},
587 };
588 
589 #if CONFIG_MJPEG_ENCODER
590 static const AVClass mjpeg_class = {
591     .class_name = "mjpeg encoder",
592     .item_name  = av_default_item_name,
593     .option     = options,
594     .version    = LIBAVUTIL_VERSION_INT,
595 };
596 
597 AVCodec ff_mjpeg_encoder = {
598     .name           = "mjpeg",
599     .long_name      = NULL_IF_CONFIG_SMALL("MJPEG (Motion JPEG)"),
600     .type           = AVMEDIA_TYPE_VIDEO,
601     .id             = AV_CODEC_ID_MJPEG,
602     .priv_data_size = sizeof(MpegEncContext),
603     .init           = ff_mpv_encode_init,
604     .encode2        = ff_mpv_encode_picture,
605     .close          = ff_mpv_encode_end,
606     .capabilities   = AV_CODEC_CAP_SLICE_THREADS | AV_CODEC_CAP_FRAME_THREADS,
607     .caps_internal  = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
608     .pix_fmts       = (const enum AVPixelFormat[]) {
609         AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_NONE
610     },
611     .priv_class     = &mjpeg_class,
612     .profiles       = NULL_IF_CONFIG_SMALL(ff_mjpeg_profiles),
613 };
614 #endif
615 
616 #if CONFIG_AMV_ENCODER
617 static const AVClass amv_class = {
618     .class_name = "amv encoder",
619     .item_name  = av_default_item_name,
620     .option     = options,
621     .version    = LIBAVUTIL_VERSION_INT,
622 };
623 
624 AVCodec ff_amv_encoder = {
625     .name           = "amv",
626     .long_name      = NULL_IF_CONFIG_SMALL("AMV Video"),
627     .type           = AVMEDIA_TYPE_VIDEO,
628     .id             = AV_CODEC_ID_AMV,
629     .priv_data_size = sizeof(MpegEncContext),
630     .init           = ff_mpv_encode_init,
631     .encode2        = amv_encode_picture,
632     .close          = ff_mpv_encode_end,
633     .caps_internal  = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
634     .pix_fmts       = (const enum AVPixelFormat[]) {
635         AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_NONE
636     },
637     .priv_class     = &amv_class,
638 };
639 #endif
640