1 /*
2  * Copyright (c) 2019, Alliance for Open Media. All rights reserved
3  *
4  * This source code is subject to the terms of the BSD 2 Clause License and
5  * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6  * was not distributed with this source code in the LICENSE file, you can
7  * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8  * Media Patent License 1.0 was not distributed with this source code in the
9  * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10  */
11 
12 #include <stdint.h>
13 
14 #include "av1/common/blockd.h"
15 #include "config/aom_config.h"
16 #include "config/aom_scale_rtcd.h"
17 
18 #include "aom/aom_codec.h"
19 #include "aom/aom_encoder.h"
20 
21 #if CONFIG_MISMATCH_DEBUG
22 #include "aom_util/debug_util.h"
23 #endif  // CONFIG_MISMATCH_DEBUG
24 
25 #include "av1/common/av1_common_int.h"
26 #include "av1/common/reconinter.h"
27 
28 #include "av1/encoder/encoder.h"
29 #include "av1/encoder/encode_strategy.h"
30 #include "av1/encoder/encodeframe.h"
31 #include "av1/encoder/firstpass.h"
32 #include "av1/encoder/pass2_strategy.h"
33 #include "av1/encoder/temporal_filter.h"
34 #include "av1/encoder/tpl_model.h"
35 
36 #if CONFIG_TUNE_VMAF
37 #include "av1/encoder/tune_vmaf.h"
38 #endif
39 
40 #define TEMPORAL_FILTER_KEY_FRAME (CONFIG_REALTIME_ONLY ? 0 : 1)
41 
set_refresh_frame_flags(RefreshFrameFlagsInfo * const refresh_frame_flags,bool refresh_gf,bool refresh_bwdref,bool refresh_arf)42 static INLINE void set_refresh_frame_flags(
43     RefreshFrameFlagsInfo *const refresh_frame_flags, bool refresh_gf,
44     bool refresh_bwdref, bool refresh_arf) {
45   refresh_frame_flags->golden_frame = refresh_gf;
46   refresh_frame_flags->bwd_ref_frame = refresh_bwdref;
47   refresh_frame_flags->alt_ref_frame = refresh_arf;
48 }
49 
av1_configure_buffer_updates(AV1_COMP * const cpi,RefreshFrameFlagsInfo * const refresh_frame_flags,const FRAME_UPDATE_TYPE type,const REFBUF_STATE refbuf_state,int force_refresh_all)50 void av1_configure_buffer_updates(
51     AV1_COMP *const cpi, RefreshFrameFlagsInfo *const refresh_frame_flags,
52     const FRAME_UPDATE_TYPE type, const REFBUF_STATE refbuf_state,
53     int force_refresh_all) {
54   // NOTE(weitinglin): Should we define another function to take care of
55   // cpi->rc.is_$Source_Type to make this function as it is in the comment?
56   const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags =
57       &cpi->ext_flags.refresh_frame;
58   cpi->rc.is_src_frame_alt_ref = 0;
59 
60   switch (type) {
61     case KF_UPDATE:
62       set_refresh_frame_flags(refresh_frame_flags, true, true, true);
63       break;
64 
65     case LF_UPDATE:
66       set_refresh_frame_flags(refresh_frame_flags, false, false, false);
67       break;
68 
69     case GF_UPDATE:
70       set_refresh_frame_flags(refresh_frame_flags, true, false, false);
71       break;
72 
73     case OVERLAY_UPDATE:
74       if (refbuf_state == REFBUF_RESET)
75         set_refresh_frame_flags(refresh_frame_flags, true, true, true);
76       else
77         set_refresh_frame_flags(refresh_frame_flags, true, false, false);
78 
79       cpi->rc.is_src_frame_alt_ref = 1;
80       break;
81 
82     case ARF_UPDATE:
83       // NOTE: BWDREF does not get updated along with ALTREF_FRAME.
84       if (refbuf_state == REFBUF_RESET)
85         set_refresh_frame_flags(refresh_frame_flags, true, true, true);
86       else
87         set_refresh_frame_flags(refresh_frame_flags, false, false, true);
88 
89       break;
90 
91     case INTNL_OVERLAY_UPDATE:
92       set_refresh_frame_flags(refresh_frame_flags, false, false, false);
93       cpi->rc.is_src_frame_alt_ref = 1;
94       break;
95 
96     case INTNL_ARF_UPDATE:
97       set_refresh_frame_flags(refresh_frame_flags, false, true, false);
98       break;
99 
100     default: assert(0); break;
101   }
102 
103   if (ext_refresh_frame_flags->update_pending &&
104       (!is_stat_generation_stage(cpi))) {
105     set_refresh_frame_flags(refresh_frame_flags,
106                             ext_refresh_frame_flags->golden_frame,
107                             ext_refresh_frame_flags->bwd_ref_frame,
108                             ext_refresh_frame_flags->alt_ref_frame);
109     GF_GROUP *gf_group = &cpi->ppi->gf_group;
110     if (ext_refresh_frame_flags->golden_frame)
111       gf_group->update_type[cpi->gf_frame_index] = GF_UPDATE;
112     if (ext_refresh_frame_flags->alt_ref_frame)
113       gf_group->update_type[cpi->gf_frame_index] = ARF_UPDATE;
114     if (ext_refresh_frame_flags->bwd_ref_frame)
115       gf_group->update_type[cpi->gf_frame_index] = INTNL_ARF_UPDATE;
116   }
117 
118   if (force_refresh_all)
119     set_refresh_frame_flags(refresh_frame_flags, true, true, true);
120 }
121 
set_additional_frame_flags(const AV1_COMMON * const cm,unsigned int * const frame_flags)122 static void set_additional_frame_flags(const AV1_COMMON *const cm,
123                                        unsigned int *const frame_flags) {
124   if (frame_is_intra_only(cm)) {
125     *frame_flags |= FRAMEFLAGS_INTRAONLY;
126   }
127   if (frame_is_sframe(cm)) {
128     *frame_flags |= FRAMEFLAGS_SWITCH;
129   }
130   if (cm->features.error_resilient_mode) {
131     *frame_flags |= FRAMEFLAGS_ERROR_RESILIENT;
132   }
133 }
134 
set_ext_overrides(AV1_COMMON * const cm,EncodeFrameParams * const frame_params,ExternalFlags * const ext_flags)135 static void set_ext_overrides(AV1_COMMON *const cm,
136                               EncodeFrameParams *const frame_params,
137                               ExternalFlags *const ext_flags) {
138   // Overrides the defaults with the externally supplied values with
139   // av1_update_reference() and av1_update_entropy() calls
140   // Note: The overrides are valid only for the next frame passed
141   // to av1_encode_lowlevel()
142 
143   if (ext_flags->use_s_frame) {
144     frame_params->frame_type = S_FRAME;
145   }
146 
147   if (ext_flags->refresh_frame_context_pending) {
148     cm->features.refresh_frame_context = ext_flags->refresh_frame_context;
149     ext_flags->refresh_frame_context_pending = 0;
150   }
151   cm->features.allow_ref_frame_mvs = ext_flags->use_ref_frame_mvs;
152 
153   frame_params->error_resilient_mode = ext_flags->use_error_resilient;
154   // A keyframe is already error resilient and keyframes with
155   // error_resilient_mode interferes with the use of show_existing_frame
156   // when forward reference keyframes are enabled.
157   frame_params->error_resilient_mode &= frame_params->frame_type != KEY_FRAME;
158   // For bitstream conformance, s-frames must be error-resilient
159   frame_params->error_resilient_mode |= frame_params->frame_type == S_FRAME;
160 }
161 
choose_primary_ref_frame(const AV1_COMP * const cpi,const EncodeFrameParams * const frame_params)162 static int choose_primary_ref_frame(
163     const AV1_COMP *const cpi, const EncodeFrameParams *const frame_params) {
164   const AV1_COMMON *const cm = &cpi->common;
165 
166   const int intra_only = frame_params->frame_type == KEY_FRAME ||
167                          frame_params->frame_type == INTRA_ONLY_FRAME;
168   if (intra_only || frame_params->error_resilient_mode ||
169       cpi->ext_flags.use_primary_ref_none) {
170     return PRIMARY_REF_NONE;
171   }
172 
173   // In large scale case, always use Last frame's frame contexts.
174   // Note(yunqing): In other cases, primary_ref_frame is chosen based on
175   // cpi->ppi->gf_group.layer_depth[cpi->gf_frame_index], which also controls
176   // frame bit allocation.
177   if (cm->tiles.large_scale) return (LAST_FRAME - LAST_FRAME);
178 
179   if (cpi->ppi->use_svc) return av1_svc_primary_ref_frame(cpi);
180 
181   // Find the most recent reference frame with the same reference type as the
182   // current frame
183   const int current_ref_type = get_current_frame_ref_type(cpi);
184   int wanted_fb = cpi->ppi->fb_of_context_type[current_ref_type];
185 
186   int primary_ref_frame = PRIMARY_REF_NONE;
187   for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
188     if (get_ref_frame_map_idx(cm, ref_frame) == wanted_fb) {
189       primary_ref_frame = ref_frame - LAST_FRAME;
190     }
191   }
192 
193   return primary_ref_frame;
194 }
195 
adjust_frame_rate(AV1_COMP * cpi,int64_t ts_start,int64_t ts_end)196 static void adjust_frame_rate(AV1_COMP *cpi, int64_t ts_start, int64_t ts_end) {
197   TimeStamps *time_stamps = &cpi->time_stamps;
198   int64_t this_duration;
199   int step = 0;
200 
201   // Clear down mmx registers
202 
203   if (cpi->ppi->use_svc && cpi->svc.spatial_layer_id > 0) {
204     cpi->framerate = cpi->svc.base_framerate;
205     av1_rc_update_framerate(cpi, cpi->common.width, cpi->common.height);
206     return;
207   }
208 
209   if (ts_start == time_stamps->first_ts_start) {
210     this_duration = ts_end - ts_start;
211     step = 1;
212   } else {
213     int64_t last_duration =
214         time_stamps->prev_ts_end - time_stamps->prev_ts_start;
215 
216     this_duration = ts_end - time_stamps->prev_ts_end;
217 
218     // do a step update if the duration changes by 10%
219     if (last_duration)
220       step = (int)((this_duration - last_duration) * 10 / last_duration);
221   }
222 
223   if (this_duration) {
224     if (step) {
225       av1_new_framerate(cpi, 10000000.0 / this_duration);
226     } else {
227       // Average this frame's rate into the last second's average
228       // frame rate. If we haven't seen 1 second yet, then average
229       // over the whole interval seen.
230       const double interval =
231           AOMMIN((double)(ts_end - time_stamps->first_ts_start), 10000000.0);
232       double avg_duration = 10000000.0 / cpi->framerate;
233       avg_duration *= (interval - avg_duration + this_duration);
234       avg_duration /= interval;
235 
236       av1_new_framerate(cpi, 10000000.0 / avg_duration);
237     }
238   }
239   time_stamps->prev_ts_start = ts_start;
240   time_stamps->prev_ts_end = ts_end;
241 }
242 
243 // Determine whether there is a forced keyframe pending in the lookahead buffer
is_forced_keyframe_pending(struct lookahead_ctx * lookahead,const int up_to_index,const COMPRESSOR_STAGE compressor_stage)244 int is_forced_keyframe_pending(struct lookahead_ctx *lookahead,
245                                const int up_to_index,
246                                const COMPRESSOR_STAGE compressor_stage) {
247   for (int i = 0; i <= up_to_index; i++) {
248     const struct lookahead_entry *e =
249         av1_lookahead_peek(lookahead, i, compressor_stage);
250     if (e == NULL) {
251       // We have reached the end of the lookahead buffer and not early-returned
252       // so there isn't a forced key-frame pending.
253       return -1;
254     } else if (e->flags == AOM_EFLAG_FORCE_KF) {
255       return i;
256     } else {
257       continue;
258     }
259   }
260   return -1;  // Never reached
261 }
262 
263 // Check if we should encode an ARF or internal ARF.  If not, try a LAST
264 // Do some setup associated with the chosen source
265 // temporal_filtered, flush, and frame_update_type are outputs.
266 // Return the frame source, or NULL if we couldn't find one
choose_frame_source(AV1_COMP * const cpi,int * const flush,int * pop_lookahead,struct lookahead_entry ** last_source,EncodeFrameParams * const frame_params)267 static struct lookahead_entry *choose_frame_source(
268     AV1_COMP *const cpi, int *const flush, int *pop_lookahead,
269     struct lookahead_entry **last_source,
270     EncodeFrameParams *const frame_params) {
271   AV1_COMMON *const cm = &cpi->common;
272   const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
273   struct lookahead_entry *source = NULL;
274 
275   // Source index in lookahead buffer.
276   int src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
277 
278   // TODO(Aasaipriya): Forced key frames need to be fixed when rc_mode != AOM_Q
279   if (src_index &&
280       (is_forced_keyframe_pending(cpi->ppi->lookahead, src_index,
281                                   cpi->compressor_stage) != -1) &&
282       cpi->oxcf.rc_cfg.mode != AOM_Q && !is_stat_generation_stage(cpi)) {
283     src_index = 0;
284     *flush = 1;
285   }
286 
287   // If the current frame is arf, then we should not pop from the lookahead
288   // buffer. If the current frame is not arf, then pop it. This assumes the
289   // first frame in the GF group is not arf. May need to change if it is not
290   // true.
291   *pop_lookahead = (src_index == 0);
292   // If this is a key frame and keyframe filtering is enabled with overlay,
293   // then do not pop.
294   if (*pop_lookahead && cpi->oxcf.kf_cfg.enable_keyframe_filtering > 1 &&
295       gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE &&
296       !is_stat_generation_stage(cpi) && cpi->ppi->lookahead) {
297     if (cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz &&
298         (*flush ||
299          cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz ==
300              cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].pop_sz)) {
301       *pop_lookahead = 0;
302     }
303   }
304 
305   // LAP stage does not have ARFs or forward key-frames,
306   // hence, always pop_lookahead here.
307   if (is_stat_generation_stage(cpi)) {
308     *pop_lookahead = 1;
309     src_index = 0;
310   }
311 
312   frame_params->show_frame = *pop_lookahead;
313 
314 #if CONFIG_FRAME_PARALLEL_ENCODE
315   // Future frame in parallel encode set
316   if (gf_group->src_offset[cpi->gf_frame_index] != 0 &&
317       !is_stat_generation_stage(cpi)) {
318     src_index = gf_group->src_offset[cpi->gf_frame_index];
319   }
320 #endif
321   if (frame_params->show_frame) {
322     // show frame, pop from buffer
323     // Get last frame source.
324     if (cm->current_frame.frame_number > 0) {
325       *last_source = av1_lookahead_peek(cpi->ppi->lookahead, src_index - 1,
326                                         cpi->compressor_stage);
327     }
328     // Read in the source frame.
329     source = av1_lookahead_peek(cpi->ppi->lookahead, src_index,
330                                 cpi->compressor_stage);
331   } else {
332     // no show frames are arf frames
333     source = av1_lookahead_peek(cpi->ppi->lookahead, src_index,
334                                 cpi->compressor_stage);
335     if (source != NULL) {
336       cm->showable_frame = 1;
337     }
338   }
339   return source;
340 }
341 
342 // Don't allow a show_existing_frame to coincide with an error resilient or
343 // S-Frame. An exception can be made in the case of a keyframe, since it does
344 // not depend on any previous frames.
allow_show_existing(const AV1_COMP * const cpi,unsigned int frame_flags)345 static int allow_show_existing(const AV1_COMP *const cpi,
346                                unsigned int frame_flags) {
347   if (cpi->common.current_frame.frame_number == 0) return 0;
348 
349   const struct lookahead_entry *lookahead_src =
350       av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage);
351   if (lookahead_src == NULL) return 1;
352 
353   const int is_error_resilient =
354       cpi->oxcf.tool_cfg.error_resilient_mode ||
355       (lookahead_src->flags & AOM_EFLAG_ERROR_RESILIENT);
356   const int is_s_frame = cpi->oxcf.kf_cfg.enable_sframe ||
357                          (lookahead_src->flags & AOM_EFLAG_SET_S_FRAME);
358   const int is_key_frame =
359       (cpi->rc.frames_to_key == 0) || (frame_flags & FRAMEFLAGS_KEY);
360   return !(is_error_resilient || is_s_frame) || is_key_frame;
361 }
362 
363 // Update frame_flags to tell the encoder's caller what sort of frame was
364 // encoded.
update_frame_flags(const AV1_COMMON * const cm,const RefreshFrameFlagsInfo * const refresh_frame_flags,unsigned int * frame_flags)365 static void update_frame_flags(
366     const AV1_COMMON *const cm,
367     const RefreshFrameFlagsInfo *const refresh_frame_flags,
368     unsigned int *frame_flags) {
369   if (encode_show_existing_frame(cm)) {
370     *frame_flags &= ~FRAMEFLAGS_GOLDEN;
371     *frame_flags &= ~FRAMEFLAGS_BWDREF;
372     *frame_flags &= ~FRAMEFLAGS_ALTREF;
373     *frame_flags &= ~FRAMEFLAGS_KEY;
374     return;
375   }
376 
377   if (refresh_frame_flags->golden_frame) {
378     *frame_flags |= FRAMEFLAGS_GOLDEN;
379   } else {
380     *frame_flags &= ~FRAMEFLAGS_GOLDEN;
381   }
382 
383   if (refresh_frame_flags->alt_ref_frame) {
384     *frame_flags |= FRAMEFLAGS_ALTREF;
385   } else {
386     *frame_flags &= ~FRAMEFLAGS_ALTREF;
387   }
388 
389   if (refresh_frame_flags->bwd_ref_frame) {
390     *frame_flags |= FRAMEFLAGS_BWDREF;
391   } else {
392     *frame_flags &= ~FRAMEFLAGS_BWDREF;
393   }
394 
395   if (cm->current_frame.frame_type == KEY_FRAME) {
396     *frame_flags |= FRAMEFLAGS_KEY;
397   } else {
398     *frame_flags &= ~FRAMEFLAGS_KEY;
399   }
400 }
401 
402 #define DUMP_REF_FRAME_IMAGES 0
403 
404 #if DUMP_REF_FRAME_IMAGES == 1
dump_one_image(AV1_COMMON * cm,const YV12_BUFFER_CONFIG * const ref_buf,char * file_name)405 static int dump_one_image(AV1_COMMON *cm,
406                           const YV12_BUFFER_CONFIG *const ref_buf,
407                           char *file_name) {
408   int h;
409   FILE *f_ref = NULL;
410 
411   if (ref_buf == NULL) {
412     printf("Frame data buffer is NULL.\n");
413     return AOM_CODEC_MEM_ERROR;
414   }
415 
416   if ((f_ref = fopen(file_name, "wb")) == NULL) {
417     printf("Unable to open file %s to write.\n", file_name);
418     return AOM_CODEC_MEM_ERROR;
419   }
420 
421   // --- Y ---
422   for (h = 0; h < cm->height; ++h) {
423     fwrite(&ref_buf->y_buffer[h * ref_buf->y_stride], 1, cm->width, f_ref);
424   }
425   // --- U ---
426   for (h = 0; h < (cm->height >> 1); ++h) {
427     fwrite(&ref_buf->u_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
428            f_ref);
429   }
430   // --- V ---
431   for (h = 0; h < (cm->height >> 1); ++h) {
432     fwrite(&ref_buf->v_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
433            f_ref);
434   }
435 
436   fclose(f_ref);
437 
438   return AOM_CODEC_OK;
439 }
440 
dump_ref_frame_images(AV1_COMP * cpi)441 static void dump_ref_frame_images(AV1_COMP *cpi) {
442   AV1_COMMON *const cm = &cpi->common;
443   MV_REFERENCE_FRAME ref_frame;
444 
445   for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
446     char file_name[256] = "";
447     snprintf(file_name, sizeof(file_name), "/tmp/enc_F%d_ref_%d.yuv",
448              cm->current_frame.frame_number, ref_frame);
449     dump_one_image(cm, get_ref_frame_yv12_buf(cpi, ref_frame), file_name);
450   }
451 }
452 #endif  // DUMP_REF_FRAME_IMAGES == 1
453 
av1_get_refresh_ref_frame_map(int refresh_frame_flags)454 int av1_get_refresh_ref_frame_map(int refresh_frame_flags) {
455   int ref_map_index;
456 
457   for (ref_map_index = 0; ref_map_index < REF_FRAMES; ++ref_map_index)
458     if ((refresh_frame_flags >> ref_map_index) & 1) break;
459 
460   if (ref_map_index == REF_FRAMES) ref_map_index = INVALID_IDX;
461   return ref_map_index;
462 }
463 
update_arf_stack(int ref_map_index,RefBufferStack * ref_buffer_stack)464 static void update_arf_stack(int ref_map_index,
465                              RefBufferStack *ref_buffer_stack) {
466   if (ref_buffer_stack->arf_stack_size >= 0) {
467     if (ref_buffer_stack->arf_stack[0] == ref_map_index)
468       stack_pop(ref_buffer_stack->arf_stack, &ref_buffer_stack->arf_stack_size);
469   }
470 
471   if (ref_buffer_stack->lst_stack_size) {
472     for (int i = ref_buffer_stack->lst_stack_size - 1; i >= 0; --i) {
473       if (ref_buffer_stack->lst_stack[i] == ref_map_index) {
474         for (int idx = i; idx < ref_buffer_stack->lst_stack_size - 1; ++idx)
475           ref_buffer_stack->lst_stack[idx] =
476               ref_buffer_stack->lst_stack[idx + 1];
477         ref_buffer_stack->lst_stack[ref_buffer_stack->lst_stack_size - 1] =
478             INVALID_IDX;
479         --ref_buffer_stack->lst_stack_size;
480       }
481     }
482   }
483 
484   if (ref_buffer_stack->gld_stack_size) {
485     for (int i = ref_buffer_stack->gld_stack_size - 1; i >= 0; --i) {
486       if (ref_buffer_stack->gld_stack[i] == ref_map_index) {
487         for (int idx = i; idx < ref_buffer_stack->gld_stack_size - 1; ++idx)
488           ref_buffer_stack->gld_stack[idx] =
489               ref_buffer_stack->gld_stack[idx + 1];
490         ref_buffer_stack->gld_stack[ref_buffer_stack->gld_stack_size - 1] =
491             INVALID_IDX;
492         --ref_buffer_stack->gld_stack_size;
493       }
494     }
495   }
496 }
497 
498 // Update reference frame stack info.
av1_update_ref_frame_map(const AV1_COMP * cpi,FRAME_UPDATE_TYPE frame_update_type,REFBUF_STATE refbuf_state,int ref_map_index,RefBufferStack * ref_buffer_stack)499 void av1_update_ref_frame_map(const AV1_COMP *cpi,
500                               FRAME_UPDATE_TYPE frame_update_type,
501                               REFBUF_STATE refbuf_state, int ref_map_index,
502                               RefBufferStack *ref_buffer_stack) {
503   const AV1_COMMON *const cm = &cpi->common;
504 
505   // TODO(jingning): Consider the S-frame same as key frame for the
506   // reference frame tracking purpose. The logic might be better
507   // expressed than converting the frame update type.
508   if (frame_is_sframe(cm)) frame_update_type = KF_UPDATE;
509   if (is_frame_droppable(&cpi->svc, &cpi->ext_flags.refresh_frame)) return;
510 
511   switch (frame_update_type) {
512     case KF_UPDATE:
513       stack_reset(ref_buffer_stack->lst_stack,
514                   &ref_buffer_stack->lst_stack_size);
515       stack_reset(ref_buffer_stack->gld_stack,
516                   &ref_buffer_stack->gld_stack_size);
517       stack_reset(ref_buffer_stack->arf_stack,
518                   &ref_buffer_stack->arf_stack_size);
519       stack_push(ref_buffer_stack->gld_stack, &ref_buffer_stack->gld_stack_size,
520                  ref_map_index);
521       break;
522     case GF_UPDATE:
523       update_arf_stack(ref_map_index, ref_buffer_stack);
524       stack_push(ref_buffer_stack->gld_stack, &ref_buffer_stack->gld_stack_size,
525                  ref_map_index);
526       // For nonrd_mode: update LAST as well on GF_UPDATE frame.
527       // TODO(jingning, marpan): Why replacing both reference frames with the
528       // same decoded frame?
529       if (cpi->sf.rt_sf.use_nonrd_pick_mode)
530         stack_push(ref_buffer_stack->lst_stack,
531                    &ref_buffer_stack->lst_stack_size, ref_map_index);
532       break;
533     case LF_UPDATE:
534       update_arf_stack(ref_map_index, ref_buffer_stack);
535       stack_push(ref_buffer_stack->lst_stack, &ref_buffer_stack->lst_stack_size,
536                  ref_map_index);
537       break;
538     case ARF_UPDATE:
539     case INTNL_ARF_UPDATE:
540       if (refbuf_state == REFBUF_RESET) {
541         stack_reset(ref_buffer_stack->lst_stack,
542                     &ref_buffer_stack->lst_stack_size);
543         stack_reset(ref_buffer_stack->gld_stack,
544                     &ref_buffer_stack->gld_stack_size);
545         stack_reset(ref_buffer_stack->arf_stack,
546                     &ref_buffer_stack->arf_stack_size);
547       } else {
548         update_arf_stack(ref_map_index, ref_buffer_stack);
549       }
550       stack_push(ref_buffer_stack->arf_stack, &ref_buffer_stack->arf_stack_size,
551                  ref_map_index);
552       break;
553     case OVERLAY_UPDATE:
554       if (refbuf_state == REFBUF_RESET) {
555         ref_map_index = stack_pop(ref_buffer_stack->arf_stack,
556                                   &ref_buffer_stack->arf_stack_size);
557         stack_reset(ref_buffer_stack->lst_stack,
558                     &ref_buffer_stack->lst_stack_size);
559         stack_reset(ref_buffer_stack->gld_stack,
560                     &ref_buffer_stack->gld_stack_size);
561         stack_reset(ref_buffer_stack->arf_stack,
562                     &ref_buffer_stack->arf_stack_size);
563         stack_push(ref_buffer_stack->gld_stack,
564                    &ref_buffer_stack->gld_stack_size, ref_map_index);
565       } else {
566         if (ref_map_index != INVALID_IDX) {
567           update_arf_stack(ref_map_index, ref_buffer_stack);
568           stack_push(ref_buffer_stack->lst_stack,
569                      &ref_buffer_stack->lst_stack_size, ref_map_index);
570         }
571         ref_map_index = stack_pop(ref_buffer_stack->arf_stack,
572                                   &ref_buffer_stack->arf_stack_size);
573         stack_push(ref_buffer_stack->gld_stack,
574                    &ref_buffer_stack->gld_stack_size, ref_map_index);
575       }
576       break;
577     case INTNL_OVERLAY_UPDATE:
578       ref_map_index = stack_pop(ref_buffer_stack->arf_stack,
579                                 &ref_buffer_stack->arf_stack_size);
580       stack_push(ref_buffer_stack->lst_stack, &ref_buffer_stack->lst_stack_size,
581                  ref_map_index);
582       break;
583     default: assert(0 && "unknown type");
584   }
585   return;
586 }
587 
get_free_ref_map_index(RefFrameMapPair ref_map_pairs[REF_FRAMES],const RefBufferStack * ref_buffer_stack)588 static int get_free_ref_map_index(
589 #if CONFIG_FRAME_PARALLEL_ENCODE
590     RefFrameMapPair ref_map_pairs[REF_FRAMES],
591 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
592     const RefBufferStack *ref_buffer_stack) {
593 #if CONFIG_FRAME_PARALLEL_ENCODE
594   (void)ref_buffer_stack;
595   for (int idx = 0; idx < REF_FRAMES; ++idx)
596     if (ref_map_pairs[idx].disp_order == -1) return idx;
597   return INVALID_IDX;
598 #else
599   for (int idx = 0; idx < REF_FRAMES; ++idx) {
600     int is_free = 1;
601     for (int i = 0; i < ref_buffer_stack->arf_stack_size; ++i) {
602       if (ref_buffer_stack->arf_stack[i] == idx) {
603         is_free = 0;
604         break;
605       }
606     }
607 
608     for (int i = 0; i < ref_buffer_stack->lst_stack_size; ++i) {
609       if (ref_buffer_stack->lst_stack[i] == idx) {
610         is_free = 0;
611         break;
612       }
613     }
614 
615     for (int i = 0; i < ref_buffer_stack->gld_stack_size; ++i) {
616       if (ref_buffer_stack->gld_stack[i] == idx) {
617         is_free = 0;
618         break;
619       }
620     }
621 
622     if (is_free) return idx;
623   }
624   return INVALID_IDX;
625 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
626 }
627 
628 #if CONFIG_FRAME_PARALLEL_ENCODE
get_refresh_idx(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],int update_arf,GF_GROUP * gf_group,int gf_index,int enable_refresh_skip,int cur_frame_disp)629 static int get_refresh_idx(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
630                            int update_arf,
631 #if CONFIG_FRAME_PARALLEL_ENCODE_2
632                            GF_GROUP *gf_group, int gf_index,
633                            int enable_refresh_skip,
634 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
635                            int cur_frame_disp) {
636   int arf_count = 0;
637   int oldest_arf_order = INT32_MAX;
638   int oldest_arf_idx = -1;
639 
640   int oldest_frame_order = INT32_MAX;
641   int oldest_idx = -1;
642 
643   for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
644     RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx];
645     if (ref_pair.disp_order == -1) continue;
646     const int frame_order = ref_pair.disp_order;
647     const int reference_frame_level = ref_pair.pyr_level;
648     // Do not refresh a future frame.
649     if (frame_order > cur_frame_disp) continue;
650 
651 #if CONFIG_FRAME_PARALLEL_ENCODE_2
652     if (enable_refresh_skip) {
653       int skip_frame = 0;
654       // Prevent refreshing a frame in gf_group->skip_frame_refresh.
655       for (int i = 0; i < REF_FRAMES; i++) {
656         int frame_to_skip = gf_group->skip_frame_refresh[gf_index][i];
657         if (frame_to_skip == INVALID_IDX) break;
658         if (frame_order == frame_to_skip) {
659           skip_frame = 1;
660           break;
661         }
662       }
663       if (skip_frame) continue;
664     }
665 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
666 
667     // Keep track of the oldest level 1 frame if the current frame is also level
668     // 1.
669     if (reference_frame_level == 1) {
670       // If there are more than 2 level 1 frames in the reference list,
671       // discard the oldest.
672       if (frame_order < oldest_arf_order) {
673         oldest_arf_order = frame_order;
674         oldest_arf_idx = map_idx;
675       }
676       arf_count++;
677       continue;
678     }
679 
680     // Update the overall oldest reference frame.
681     if (frame_order < oldest_frame_order) {
682       oldest_frame_order = frame_order;
683       oldest_idx = map_idx;
684     }
685   }
686   if (update_arf && arf_count > 2) return oldest_arf_idx;
687   if (oldest_idx >= 0) return oldest_idx;
688   if (oldest_arf_idx >= 0) return oldest_arf_idx;
689 #if CONFIG_FRAME_PARALLEL_ENCODE_2
690   if (oldest_idx == -1) {
691     assert(arf_count > 2 && enable_refresh_skip);
692     return oldest_arf_idx;
693   }
694 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
695   assert(0 && "No valid refresh index found");
696   return -1;
697 }
698 
699 #if CONFIG_FRAME_PARALLEL_ENCODE_2
700 // Computes the reference refresh index for INTNL_ARF_UPDATE frame.
av1_calc_refresh_idx_for_intnl_arf(AV1_COMP * cpi,RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],int gf_index)701 int av1_calc_refresh_idx_for_intnl_arf(
702     AV1_COMP *cpi, RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
703     int gf_index) {
704   GF_GROUP *const gf_group = &cpi->ppi->gf_group;
705 
706   // Search for the open slot to store the current frame.
707   int free_fb_index = get_free_ref_map_index(ref_frame_map_pairs, NULL);
708 
709   // Use a free slot if available.
710   if (free_fb_index != INVALID_IDX) {
711     return free_fb_index;
712   } else {
713     int enable_refresh_skip = !is_one_pass_rt_params(cpi);
714     int refresh_idx =
715         get_refresh_idx(ref_frame_map_pairs, 0, gf_group, gf_index,
716                         enable_refresh_skip, gf_group->display_idx[gf_index]);
717     return refresh_idx;
718   }
719 }
720 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
721 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
722 
av1_get_refresh_frame_flags(const AV1_COMP * const cpi,const EncodeFrameParams * const frame_params,FRAME_UPDATE_TYPE frame_update_type,int gf_index,int cur_disp_order,RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],const RefBufferStack * const ref_buffer_stack)723 int av1_get_refresh_frame_flags(const AV1_COMP *const cpi,
724                                 const EncodeFrameParams *const frame_params,
725                                 FRAME_UPDATE_TYPE frame_update_type,
726                                 int gf_index,
727 #if CONFIG_FRAME_PARALLEL_ENCODE
728                                 int cur_disp_order,
729                                 RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
730 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
731                                 const RefBufferStack *const ref_buffer_stack) {
732   const AV1_COMMON *const cm = &cpi->common;
733   const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags =
734       &cpi->ext_flags.refresh_frame;
735 
736   GF_GROUP *gf_group = &cpi->ppi->gf_group;
737   if (gf_group->refbuf_state[gf_index] == REFBUF_RESET)
738     return SELECT_ALL_BUF_SLOTS;
739 
740   // TODO(jingning): Deprecate the following operations.
741   // Switch frames and shown key-frames overwrite all reference slots
742   if (frame_params->frame_type == S_FRAME) return SELECT_ALL_BUF_SLOTS;
743 
744   // show_existing_frames don't actually send refresh_frame_flags so set the
745   // flags to 0 to keep things consistent.
746   if (frame_params->show_existing_frame) return 0;
747 
748   const SVC *const svc = &cpi->svc;
749   if (is_frame_droppable(svc, ext_refresh_frame_flags)) return 0;
750 
751   int refresh_mask = 0;
752 
753   if (ext_refresh_frame_flags->update_pending) {
754     if (svc->set_ref_frame_config) {
755       for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++) {
756         int ref_frame_map_idx = svc->ref_idx[i];
757         refresh_mask |= svc->refresh[ref_frame_map_idx] << ref_frame_map_idx;
758       }
759       return refresh_mask;
760     }
761     // Unfortunately the encoder interface reflects the old refresh_*_frame
762     // flags so we have to replicate the old refresh_frame_flags logic here in
763     // order to preserve the behaviour of the flag overrides.
764     int ref_frame_map_idx = get_ref_frame_map_idx(cm, LAST_FRAME);
765     if (ref_frame_map_idx != INVALID_IDX)
766       refresh_mask |= ext_refresh_frame_flags->last_frame << ref_frame_map_idx;
767 
768     ref_frame_map_idx = get_ref_frame_map_idx(cm, EXTREF_FRAME);
769     if (ref_frame_map_idx != INVALID_IDX)
770       refresh_mask |= ext_refresh_frame_flags->bwd_ref_frame
771                       << ref_frame_map_idx;
772 
773     ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF2_FRAME);
774     if (ref_frame_map_idx != INVALID_IDX)
775       refresh_mask |= ext_refresh_frame_flags->alt2_ref_frame
776                       << ref_frame_map_idx;
777 
778     if (frame_update_type == OVERLAY_UPDATE) {
779       ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
780       if (ref_frame_map_idx != INVALID_IDX)
781         refresh_mask |= ext_refresh_frame_flags->golden_frame
782                         << ref_frame_map_idx;
783     } else {
784       ref_frame_map_idx = get_ref_frame_map_idx(cm, GOLDEN_FRAME);
785       if (ref_frame_map_idx != INVALID_IDX)
786         refresh_mask |= ext_refresh_frame_flags->golden_frame
787                         << ref_frame_map_idx;
788 
789       ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
790       if (ref_frame_map_idx != INVALID_IDX)
791         refresh_mask |= ext_refresh_frame_flags->alt_ref_frame
792                         << ref_frame_map_idx;
793     }
794     return refresh_mask;
795   }
796 
797   // Search for the open slot to store the current frame.
798   int free_fb_index = get_free_ref_map_index(
799 #if CONFIG_FRAME_PARALLEL_ENCODE
800       ref_frame_map_pairs,
801 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
802       ref_buffer_stack);
803 
804 #if CONFIG_FRAME_PARALLEL_ENCODE
805   // No refresh necessary for these frame types.
806   if (frame_update_type == OVERLAY_UPDATE ||
807       frame_update_type == INTNL_OVERLAY_UPDATE)
808     return refresh_mask;
809 
810   // If there is an open slot, refresh that one instead of replacing a
811   // reference.
812   if (free_fb_index != INVALID_IDX) {
813     refresh_mask = 1 << free_fb_index;
814     return refresh_mask;
815   }
816 #if CONFIG_FRAME_PARALLEL_ENCODE_2
817   const int enable_refresh_skip = !is_one_pass_rt_params(cpi);
818 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
819   const int update_arf = frame_update_type == ARF_UPDATE;
820   const int refresh_idx =
821       get_refresh_idx(ref_frame_map_pairs, update_arf,
822 #if CONFIG_FRAME_PARALLEL_ENCODE_2
823                       &cpi->ppi->gf_group, gf_index, enable_refresh_skip,
824 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
825                       cur_disp_order);
826   return 1 << refresh_idx;
827 #else
828   switch (frame_update_type) {
829     case KF_UPDATE:
830     case GF_UPDATE:
831       if (free_fb_index != INVALID_IDX) {
832         refresh_mask = 1 << free_fb_index;
833       } else {
834         if (ref_buffer_stack->gld_stack_size)
835           refresh_mask =
836               1 << ref_buffer_stack
837                        ->gld_stack[ref_buffer_stack->gld_stack_size - 1];
838         else
839           refresh_mask =
840               1 << ref_buffer_stack
841                        ->lst_stack[ref_buffer_stack->lst_stack_size - 1];
842       }
843       break;
844     case LF_UPDATE:
845       if (free_fb_index != INVALID_IDX) {
846         refresh_mask = 1 << free_fb_index;
847       } else {
848         if (ref_buffer_stack->lst_stack_size >= 2)
849           refresh_mask =
850               1 << ref_buffer_stack
851                        ->lst_stack[ref_buffer_stack->lst_stack_size - 1];
852         else if (ref_buffer_stack->gld_stack_size >= 2)
853           refresh_mask =
854               1 << ref_buffer_stack
855                        ->gld_stack[ref_buffer_stack->gld_stack_size - 1];
856         else
857           assert(0 && "No ref map index found");
858       }
859       break;
860     case ARF_UPDATE:
861       if (free_fb_index != INVALID_IDX) {
862         refresh_mask = 1 << free_fb_index;
863       } else {
864         if (ref_buffer_stack->gld_stack_size >= 3)
865           refresh_mask =
866               1 << ref_buffer_stack
867                        ->gld_stack[ref_buffer_stack->gld_stack_size - 1];
868         else if (ref_buffer_stack->lst_stack_size >= 2)
869           refresh_mask =
870               1 << ref_buffer_stack
871                        ->lst_stack[ref_buffer_stack->lst_stack_size - 1];
872         else
873           assert(0 && "No ref map index found");
874       }
875       break;
876     case INTNL_ARF_UPDATE:
877       if (free_fb_index != INVALID_IDX) {
878         refresh_mask = 1 << free_fb_index;
879       } else {
880         refresh_mask =
881             1 << ref_buffer_stack
882                      ->lst_stack[ref_buffer_stack->lst_stack_size - 1];
883       }
884       break;
885     case OVERLAY_UPDATE:
886       if (free_fb_index != INVALID_IDX) refresh_mask = 1 << free_fb_index;
887       break;
888     case INTNL_OVERLAY_UPDATE: break;
889     default: assert(0); break;
890   }
891 
892   return refresh_mask;
893 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
894 }
895 
896 #if !CONFIG_REALTIME_ONLY
setup_mi(AV1_COMP * const cpi,YV12_BUFFER_CONFIG * src)897 void setup_mi(AV1_COMP *const cpi, YV12_BUFFER_CONFIG *src) {
898   AV1_COMMON *const cm = &cpi->common;
899   const int num_planes = av1_num_planes(cm);
900   MACROBLOCK *const x = &cpi->td.mb;
901   MACROBLOCKD *const xd = &x->e_mbd;
902 
903   av1_setup_src_planes(x, src, 0, 0, num_planes, cm->seq_params->sb_size);
904 
905   av1_setup_block_planes(xd, cm->seq_params->subsampling_x,
906                          cm->seq_params->subsampling_y, num_planes);
907 
908   set_mi_offsets(&cm->mi_params, xd, 0, 0);
909 }
910 
911 // Apply temporal filtering to source frames and encode the filtered frame.
912 // If the current frame does not require filtering, this function is identical
913 // to av1_encode() except that tpl is not performed.
denoise_and_encode(AV1_COMP * const cpi,uint8_t * const dest,EncodeFrameInput * const frame_input,EncodeFrameParams * const frame_params,EncodeFrameResults * const frame_results)914 static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
915                               EncodeFrameInput *const frame_input,
916                               EncodeFrameParams *const frame_params,
917                               EncodeFrameResults *const frame_results) {
918 #if CONFIG_COLLECT_COMPONENT_TIMING
919   if (cpi->oxcf.pass == 2) start_timing(cpi, denoise_and_encode_time);
920 #endif
921   const AV1EncoderConfig *const oxcf = &cpi->oxcf;
922   AV1_COMMON *const cm = &cpi->common;
923   GF_GROUP *const gf_group = &cpi->ppi->gf_group;
924   FRAME_UPDATE_TYPE update_type =
925       get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
926 
927   // Decide whether to apply temporal filtering to the source frame.
928   int apply_filtering = 0;
929   if (frame_params->frame_type == KEY_FRAME) {
930     // Decide whether it is allowed to perform key frame filtering
931     int allow_kf_filtering =
932         oxcf->kf_cfg.enable_keyframe_filtering &&
933         !is_stat_generation_stage(cpi) && !frame_params->show_existing_frame &&
934         cpi->rc.frames_to_key > cpi->oxcf.algo_cfg.arnr_max_frames &&
935         !is_lossless_requested(&oxcf->rc_cfg) &&
936         oxcf->algo_cfg.arnr_max_frames > 0 && oxcf->gf_cfg.lag_in_frames > 1;
937     if (allow_kf_filtering) {
938       const double y_noise_level = av1_estimate_noise_from_single_plane(
939           frame_input->source, 0, cm->seq_params->bit_depth);
940       apply_filtering = y_noise_level > 0;
941     } else {
942       apply_filtering = 0;
943     }
944     // If we are doing kf filtering, set up a few things.
945     if (apply_filtering) {
946       av1_setup_past_independence(cm);
947     }
948   } else if (update_type == ARF_UPDATE || update_type == INTNL_ARF_UPDATE) {
949     // ARF
950     apply_filtering = oxcf->algo_cfg.arnr_max_frames > 0;
951   }
952   if (is_stat_generation_stage(cpi)) {
953     apply_filtering = 0;
954   }
955 
956 #if CONFIG_COLLECT_COMPONENT_TIMING
957   if (cpi->oxcf.pass == 2) start_timing(cpi, apply_filtering_time);
958 #endif
959   // Save the pointer to the original source image.
960   YV12_BUFFER_CONFIG *source_buffer = frame_input->source;
961   // apply filtering to frame
962   if (apply_filtering) {
963     int show_existing_alt_ref = 0;
964     // TODO(bohanli): figure out why we need frame_type in cm here.
965     cm->current_frame.frame_type = frame_params->frame_type;
966     int arf_src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
967     int is_forward_keyframe = 0;
968     if (gf_group->frame_type[cpi->gf_frame_index] == KEY_FRAME &&
969         gf_group->refbuf_state[cpi->gf_frame_index] == REFBUF_UPDATE)
970       is_forward_keyframe = 1;
971 
972     const int code_arf =
973         av1_temporal_filter(cpi, arf_src_index, update_type,
974                             is_forward_keyframe, &show_existing_alt_ref);
975     if (code_arf) {
976       aom_extend_frame_borders(&cpi->ppi->alt_ref_buffer, av1_num_planes(cm));
977       frame_input->source = &cpi->ppi->alt_ref_buffer;
978       aom_copy_metadata_to_frame_buffer(frame_input->source,
979                                         source_buffer->metadata);
980     }
981     // Currently INTNL_ARF_UPDATE only do show_existing.
982     if (update_type == ARF_UPDATE &&
983         gf_group->frame_type[cpi->gf_frame_index] != KEY_FRAME) {
984       cpi->ppi->show_existing_alt_ref = show_existing_alt_ref;
985     }
986   }
987 #if CONFIG_COLLECT_COMPONENT_TIMING
988   if (cpi->oxcf.pass == 2) end_timing(cpi, apply_filtering_time);
989 #endif
990 
991   // perform tpl after filtering
992   int allow_tpl = oxcf->gf_cfg.lag_in_frames > 1 &&
993                   !is_stat_generation_stage(cpi) &&
994                   oxcf->algo_cfg.enable_tpl_model;
995 
996   if (gf_group->size > MAX_LENGTH_TPL_FRAME_STATS) {
997     allow_tpl = 0;
998   }
999   if (frame_params->frame_type == KEY_FRAME) {
1000     // Don't do tpl for fwd key frames or fwd key frame overlays
1001     allow_tpl = allow_tpl && !cpi->sf.tpl_sf.disable_filtered_key_tpl &&
1002                 gf_group->update_type[cpi->gf_frame_index] != OVERLAY_UPDATE;
1003   } else {
1004     // Do tpl after ARF is filtered, or if no ARF, at the second frame of GF
1005     // group.
1006     // TODO(bohanli): if no ARF, just do it at the first frame.
1007     int gf_index = cpi->gf_frame_index;
1008     allow_tpl = allow_tpl && (gf_group->update_type[gf_index] == ARF_UPDATE ||
1009                               gf_group->update_type[gf_index] == GF_UPDATE);
1010     if (allow_tpl) {
1011       // Need to set the size for TPL for ARF
1012       // TODO(bohanli): Why is this? what part of it is necessary?
1013       av1_set_frame_size(cpi, cm->superres_upscaled_width,
1014                          cm->superres_upscaled_height);
1015     }
1016   }
1017 
1018 #if CONFIG_RD_COMMAND
1019   if (frame_params->frame_type == KEY_FRAME) {
1020     char filepath[] = "rd_command.txt";
1021     av1_read_rd_command(filepath, &cpi->rd_command);
1022   }
1023 #endif  // CONFIG_RD_COMMAND
1024   if (allow_tpl == 0) {
1025     // Avoid the use of unintended TPL stats from previous GOP's results.
1026     if (cpi->gf_frame_index == 0 && !is_stat_generation_stage(cpi))
1027       av1_init_tpl_stats(&cpi->ppi->tpl_data);
1028   } else {
1029     if (!cpi->skip_tpl_setup_stats) {
1030       av1_tpl_preload_rc_estimate(cpi, frame_params);
1031       av1_tpl_setup_stats(cpi, 0, frame_params, frame_input);
1032 #if CONFIG_BITRATE_ACCURACY
1033       av1_vbr_rc_update_q_index_list(&cpi->vbr_rc_info, &cpi->ppi->tpl_data,
1034                                      gf_group, cpi->gf_frame_index,
1035                                      cm->seq_params->bit_depth);
1036 #endif
1037     }
1038   }
1039 
1040   if (av1_encode(cpi, dest, frame_input, frame_params, frame_results) !=
1041       AOM_CODEC_OK) {
1042     return AOM_CODEC_ERROR;
1043   }
1044 
1045   // Set frame_input source to true source for psnr calculation.
1046   if (apply_filtering && is_psnr_calc_enabled(cpi)) {
1047     cpi->source =
1048         av1_scale_if_required(cm, source_buffer, &cpi->scaled_source,
1049                               cm->features.interp_filter, 0, false, true);
1050     cpi->unscaled_source = source_buffer;
1051   }
1052 #if CONFIG_COLLECT_COMPONENT_TIMING
1053   if (cpi->oxcf.pass == 2) end_timing(cpi, denoise_and_encode_time);
1054 #endif
1055   return AOM_CODEC_OK;
1056 }
1057 #endif  // !CONFIG_REALTIME_ONLY
1058 
1059 #if !CONFIG_FRAME_PARALLEL_ENCODE
find_unused_ref_frame(const int * used_ref_frames,const int * stack,int stack_size)1060 static INLINE int find_unused_ref_frame(const int *used_ref_frames,
1061                                         const int *stack, int stack_size) {
1062   for (int i = 0; i < stack_size; ++i) {
1063     const int this_ref = stack[i];
1064     int ref_idx = 0;
1065     for (ref_idx = 0; ref_idx <= ALTREF_FRAME - LAST_FRAME; ++ref_idx) {
1066       if (this_ref == used_ref_frames[ref_idx]) break;
1067     }
1068 
1069     // not in use
1070     if (ref_idx > ALTREF_FRAME - LAST_FRAME) return this_ref;
1071   }
1072 
1073   return INVALID_IDX;
1074 }
1075 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1076 
1077 #if CONFIG_FRAME_PARALLEL_ENCODE
1078 /*!\cond */
1079 // Struct to keep track of relevant reference frame data.
1080 typedef struct {
1081   int map_idx;
1082   int disp_order;
1083   int pyr_level;
1084   int used;
1085 } RefBufMapData;
1086 /*!\endcond */
1087 
1088 // Comparison function to sort reference frames in ascending display order.
compare_map_idx_pair_asc(const void * a,const void * b)1089 static int compare_map_idx_pair_asc(const void *a, const void *b) {
1090   if (((RefBufMapData *)a)->disp_order == ((RefBufMapData *)b)->disp_order) {
1091     return 0;
1092   } else if (((const RefBufMapData *)a)->disp_order >
1093              ((const RefBufMapData *)b)->disp_order) {
1094     return 1;
1095   } else {
1096     return -1;
1097   }
1098 }
1099 
1100 // Checks to see if a particular reference frame is already in the reference
1101 // frame map.
is_in_ref_map(RefBufMapData * map,int disp_order,int n_frames)1102 static int is_in_ref_map(RefBufMapData *map, int disp_order, int n_frames) {
1103   for (int i = 0; i < n_frames; i++) {
1104     if (disp_order == map[i].disp_order) return 1;
1105   }
1106   return 0;
1107 }
1108 
1109 // Add a reference buffer index to a named reference slot.
add_ref_to_slot(RefBufMapData * ref,int * const remapped_ref_idx,int frame)1110 static void add_ref_to_slot(RefBufMapData *ref, int *const remapped_ref_idx,
1111                             int frame) {
1112   remapped_ref_idx[frame - LAST_FRAME] = ref->map_idx;
1113   ref->used = 1;
1114 }
1115 
1116 // Threshold dictating when we are allowed to start considering
1117 // leaving lowest level frames unmapped.
1118 #define LOW_LEVEL_FRAMES_TR 5
1119 
1120 // Find which reference buffer should be left out of the named mapping.
1121 // This is because there are 8 reference buffers and only 7 named slots.
set_unmapped_ref(RefBufMapData * buffer_map,int n_bufs,int n_min_level_refs,int min_level,int cur_frame_disp)1122 static void set_unmapped_ref(RefBufMapData *buffer_map, int n_bufs,
1123                              int n_min_level_refs, int min_level,
1124                              int cur_frame_disp) {
1125   int max_dist = 0;
1126   int unmapped_idx = -1;
1127   if (n_bufs <= ALTREF_FRAME) return;
1128   for (int i = 0; i < n_bufs; i++) {
1129     if (buffer_map[i].used) continue;
1130     if (buffer_map[i].pyr_level != min_level ||
1131         n_min_level_refs >= LOW_LEVEL_FRAMES_TR) {
1132       int dist = abs(cur_frame_disp - buffer_map[i].disp_order);
1133       if (dist > max_dist) {
1134         max_dist = dist;
1135         unmapped_idx = i;
1136       }
1137     }
1138   }
1139   assert(unmapped_idx >= 0 && "Unmapped reference not found");
1140   buffer_map[unmapped_idx].used = 1;
1141 }
1142 
get_ref_frames(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],const AV1_COMP * const cpi,int gf_index,int is_parallel_encode,int cur_frame_disp,int remapped_ref_idx[REF_FRAMES])1143 static void get_ref_frames(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
1144 #if CONFIG_FRAME_PARALLEL_ENCODE_2
1145                            const AV1_COMP *const cpi, int gf_index,
1146                            int is_parallel_encode,
1147 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
1148                            int cur_frame_disp,
1149                            int remapped_ref_idx[REF_FRAMES]) {
1150   int buf_map_idx = 0;
1151 
1152   // Initialize reference frame mappings.
1153   for (int i = 0; i < REF_FRAMES; ++i) remapped_ref_idx[i] = INVALID_IDX;
1154 
1155   RefBufMapData buffer_map[REF_FRAMES];
1156   int n_bufs = 0;
1157   memset(buffer_map, 0, REF_FRAMES * sizeof(buffer_map[0]));
1158   int min_level = MAX_ARF_LAYERS;
1159   int max_level = 0;
1160 #if CONFIG_FRAME_PARALLEL_ENCODE_2
1161   GF_GROUP *gf_group = &cpi->ppi->gf_group;
1162   int skip_ref_unmapping = 0;
1163   int is_one_pass_rt = is_one_pass_rt_params(cpi);
1164 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
1165 
1166   // Go through current reference buffers and store display order, pyr level,
1167   // and map index.
1168   for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
1169     // Get reference frame buffer.
1170     RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx];
1171     if (ref_pair.disp_order == -1) continue;
1172     const int frame_order = ref_pair.disp_order;
1173     // Avoid duplicates.
1174     if (is_in_ref_map(buffer_map, frame_order, n_bufs)) continue;
1175     const int reference_frame_level = ref_pair.pyr_level;
1176 
1177     // Keep track of the lowest and highest levels that currently exist.
1178     if (reference_frame_level < min_level) min_level = reference_frame_level;
1179     if (reference_frame_level > max_level) max_level = reference_frame_level;
1180 
1181     buffer_map[n_bufs].map_idx = map_idx;
1182     buffer_map[n_bufs].disp_order = frame_order;
1183     buffer_map[n_bufs].pyr_level = reference_frame_level;
1184     buffer_map[n_bufs].used = 0;
1185     n_bufs++;
1186   }
1187 
1188   // Sort frames in ascending display order.
1189   qsort(buffer_map, n_bufs, sizeof(buffer_map[0]), compare_map_idx_pair_asc);
1190 
1191   int n_min_level_refs = 0;
1192   int n_past_high_level = 0;
1193   int closest_past_ref = -1;
1194   int golden_idx = -1;
1195   int altref_idx = -1;
1196 
1197   // Find the GOLDEN_FRAME and BWDREF_FRAME.
1198   // Also collect various stats about the reference frames for the remaining
1199   // mappings.
1200   for (int i = n_bufs - 1; i >= 0; i--) {
1201     if (buffer_map[i].pyr_level == min_level) {
1202       // Keep track of the number of lowest level frames.
1203       n_min_level_refs++;
1204       if (buffer_map[i].disp_order < cur_frame_disp && golden_idx == -1 &&
1205           remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] == INVALID_IDX) {
1206         // Save index for GOLDEN.
1207         golden_idx = i;
1208       } else if (buffer_map[i].disp_order > cur_frame_disp &&
1209                  altref_idx == -1 &&
1210                  remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] == INVALID_IDX) {
1211         // Save index for ALTREF.
1212         altref_idx = i;
1213       }
1214     } else if (buffer_map[i].disp_order == cur_frame_disp) {
1215       // Map the BWDREF_FRAME if this is the show_existing_frame.
1216       add_ref_to_slot(&buffer_map[i], remapped_ref_idx, BWDREF_FRAME);
1217     }
1218 
1219     // Keep track of the number of past frames that are not at the lowest level.
1220     if (buffer_map[i].disp_order < cur_frame_disp &&
1221         buffer_map[i].pyr_level != min_level)
1222       n_past_high_level++;
1223 
1224 #if CONFIG_FRAME_PARALLEL_ENCODE_2
1225     // During parallel encodes of lower layer frames, exclude the first frame
1226     // (frame_parallel_level 1) from being used for the reference assignment of
1227     // the second frame (frame_parallel_level 2).
1228     if (!is_one_pass_rt && gf_group->frame_parallel_level[gf_index] == 2 &&
1229         gf_group->frame_parallel_level[gf_index - 1] == 1 &&
1230         gf_group->update_type[gf_index - 1] == INTNL_ARF_UPDATE) {
1231       assert(gf_group->update_type[gf_index] == INTNL_ARF_UPDATE);
1232       // TODO(Remya): Use original value of is_parallel_encode when FPMT is
1233       // enabled.
1234       is_parallel_encode = 0;
1235       // If parallel cpis are active, use ref_idx_to_skip, else, use display
1236       // index.
1237       assert(IMPLIES(is_parallel_encode, cpi->ref_idx_to_skip != INVALID_IDX));
1238       assert(IMPLIES(!is_parallel_encode,
1239                      gf_group->skip_frame_as_ref[gf_index] != INVALID_IDX));
1240       buffer_map[i].used = is_parallel_encode
1241                                ? (buffer_map[i].map_idx == cpi->ref_idx_to_skip)
1242                                : (buffer_map[i].disp_order ==
1243                                   gf_group->skip_frame_as_ref[gf_index]);
1244       // In case a ref frame is excluded from being used during assignment,
1245       // skip the call to set_unmapped_ref(). Applicable in steady state.
1246       if (buffer_map[i].used) skip_ref_unmapping = 1;
1247     }
1248 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
1249 
1250     // Keep track of where the frames change from being past frames to future
1251     // frames.
1252     if (buffer_map[i].disp_order < cur_frame_disp && closest_past_ref < 0)
1253       closest_past_ref = i;
1254   }
1255 
1256   // Do not map GOLDEN and ALTREF based on their pyramid level if all reference
1257   // frames have the same level.
1258   if (n_min_level_refs <= n_bufs) {
1259     // Map the GOLDEN_FRAME.
1260     if (golden_idx > -1)
1261       add_ref_to_slot(&buffer_map[golden_idx], remapped_ref_idx, GOLDEN_FRAME);
1262     // Map the ALTREF_FRAME.
1263     if (altref_idx > -1)
1264       add_ref_to_slot(&buffer_map[altref_idx], remapped_ref_idx, ALTREF_FRAME);
1265   }
1266 
1267   // Find the buffer to be excluded from the mapping.
1268 #if CONFIG_FRAME_PARALLEL_ENCODE_2
1269   if (!skip_ref_unmapping)
1270 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
1271     set_unmapped_ref(buffer_map, n_bufs, n_min_level_refs, min_level,
1272                      cur_frame_disp);
1273 
1274   // Place past frames in LAST_FRAME, LAST2_FRAME, and LAST3_FRAME.
1275   for (int frame = LAST_FRAME; frame < GOLDEN_FRAME; frame++) {
1276     // Continue if the current ref slot is already full.
1277     if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1278     // Find the next unmapped reference buffer
1279     // in decreasing ouptut order relative to current picture.
1280     int next_buf_max = 0;
1281     int next_disp_order = INT_MIN;
1282     for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) {
1283       if (!buffer_map[buf_map_idx].used &&
1284           buffer_map[buf_map_idx].disp_order < cur_frame_disp &&
1285           buffer_map[buf_map_idx].disp_order > next_disp_order) {
1286         next_disp_order = buffer_map[buf_map_idx].disp_order;
1287         next_buf_max = buf_map_idx;
1288       }
1289     }
1290     buf_map_idx = next_buf_max;
1291     if (buf_map_idx < 0) break;
1292     if (buffer_map[buf_map_idx].used) break;
1293     add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1294   }
1295 
1296   // Place future frames (if there are any) in BWDREF_FRAME and ALTREF2_FRAME.
1297   for (int frame = BWDREF_FRAME; frame < REF_FRAMES; frame++) {
1298     // Continue if the current ref slot is already full.
1299     if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1300     // Find the next unmapped reference buffer
1301     // in increasing ouptut order relative to current picture.
1302     int next_buf_max = 0;
1303     int next_disp_order = INT_MAX;
1304     for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) {
1305       if (!buffer_map[buf_map_idx].used &&
1306           buffer_map[buf_map_idx].disp_order > cur_frame_disp &&
1307           buffer_map[buf_map_idx].disp_order < next_disp_order) {
1308         next_disp_order = buffer_map[buf_map_idx].disp_order;
1309         next_buf_max = buf_map_idx;
1310       }
1311     }
1312     buf_map_idx = next_buf_max;
1313     if (buf_map_idx < 0) break;
1314     if (buffer_map[buf_map_idx].used) break;
1315     add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1316   }
1317 
1318   // Place remaining past frames.
1319   buf_map_idx = closest_past_ref;
1320   for (int frame = LAST_FRAME; frame < REF_FRAMES; frame++) {
1321     // Continue if the current ref slot is already full.
1322     if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1323     // Find the next unmapped reference buffer.
1324     for (; buf_map_idx >= 0; buf_map_idx--) {
1325       if (!buffer_map[buf_map_idx].used) break;
1326     }
1327     if (buf_map_idx < 0) break;
1328     if (buffer_map[buf_map_idx].used) break;
1329     add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1330   }
1331 
1332   // Place remaining future frames.
1333   buf_map_idx = n_bufs - 1;
1334   for (int frame = ALTREF_FRAME; frame >= LAST_FRAME; frame--) {
1335     // Continue if the current ref slot is already full.
1336     if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1337     // Find the next unmapped reference buffer.
1338     for (; buf_map_idx > closest_past_ref; buf_map_idx--) {
1339       if (!buffer_map[buf_map_idx].used) break;
1340     }
1341     if (buf_map_idx < 0) break;
1342     if (buffer_map[buf_map_idx].used) break;
1343     add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1344   }
1345 
1346   // Fill any slots that are empty (should only happen for the first 7 frames).
1347   for (int i = 0; i < REF_FRAMES; ++i)
1348     if (remapped_ref_idx[i] == INVALID_IDX) remapped_ref_idx[i] = 0;
1349 }
1350 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1351 
av1_get_ref_frames(const RefBufferStack * ref_buffer_stack,RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],int cur_frame_disp,const AV1_COMP * cpi,int gf_index,int is_parallel_encode,int remapped_ref_idx[REF_FRAMES])1352 void av1_get_ref_frames(const RefBufferStack *ref_buffer_stack,
1353 #if CONFIG_FRAME_PARALLEL_ENCODE
1354                         RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
1355                         int cur_frame_disp,
1356 #if CONFIG_FRAME_PARALLEL_ENCODE_2
1357                         const AV1_COMP *cpi, int gf_index,
1358                         int is_parallel_encode,
1359 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
1360 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1361                         int remapped_ref_idx[REF_FRAMES]) {
1362 #if CONFIG_FRAME_PARALLEL_ENCODE
1363   (void)ref_buffer_stack;
1364   get_ref_frames(ref_frame_map_pairs,
1365 #if CONFIG_FRAME_PARALLEL_ENCODE_2
1366                  cpi, gf_index, is_parallel_encode,
1367 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
1368                  cur_frame_disp, remapped_ref_idx);
1369   return;
1370 #else
1371   const int *const arf_stack = ref_buffer_stack->arf_stack;
1372   const int *const lst_stack = ref_buffer_stack->lst_stack;
1373   const int *const gld_stack = ref_buffer_stack->gld_stack;
1374   const int arf_stack_size = ref_buffer_stack->arf_stack_size;
1375   const int lst_stack_size = ref_buffer_stack->lst_stack_size;
1376   const int gld_stack_size = ref_buffer_stack->gld_stack_size;
1377 
1378   // Initialization
1379   for (int i = 0; i < REF_FRAMES; ++i) remapped_ref_idx[i] = INVALID_IDX;
1380 
1381   if (arf_stack_size) {
1382     remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] = arf_stack[arf_stack_size - 1];
1383 
1384     if (arf_stack_size > 1)
1385       remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = arf_stack[0];
1386 
1387     if (arf_stack_size > 2)
1388       remapped_ref_idx[ALTREF2_FRAME - LAST_FRAME] = arf_stack[1];
1389   }
1390 
1391   if (lst_stack_size) {
1392     remapped_ref_idx[LAST_FRAME - LAST_FRAME] = lst_stack[0];
1393 
1394     if (lst_stack_size > 1)
1395       remapped_ref_idx[LAST2_FRAME - LAST_FRAME] = lst_stack[1];
1396   }
1397 
1398   if (gld_stack_size) {
1399     remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] = gld_stack[0];
1400 
1401     // If there are more frames in the golden stack, assign them to BWDREF,
1402     // ALTREF2, or LAST3.
1403     if (gld_stack_size > 1) {
1404       if (arf_stack_size <= 2) {
1405         if (arf_stack_size <= 1) {
1406           remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = gld_stack[1];
1407           if (gld_stack_size > 2)
1408             remapped_ref_idx[ALTREF2_FRAME - LAST_FRAME] = gld_stack[2];
1409         } else {
1410           remapped_ref_idx[ALTREF2_FRAME - LAST_FRAME] = gld_stack[1];
1411         }
1412       } else {
1413         remapped_ref_idx[LAST3_FRAME - LAST_FRAME] = gld_stack[1];
1414       }
1415     }
1416   }
1417 
1418   for (int idx = ALTREF_FRAME - LAST_FRAME; idx >= 0; --idx) {
1419     int ref_map_index = remapped_ref_idx[idx];
1420 
1421     if (ref_map_index != INVALID_IDX) continue;
1422 
1423     ref_map_index =
1424         find_unused_ref_frame(remapped_ref_idx, arf_stack, arf_stack_size);
1425 
1426     if (ref_map_index == INVALID_IDX) {
1427       ref_map_index =
1428           find_unused_ref_frame(remapped_ref_idx, gld_stack, gld_stack_size);
1429     }
1430 
1431     if (ref_map_index == INVALID_IDX) {
1432       ref_map_index =
1433           find_unused_ref_frame(remapped_ref_idx, lst_stack, lst_stack_size);
1434     }
1435 
1436     if (ref_map_index != INVALID_IDX) {
1437       remapped_ref_idx[idx] = ref_map_index;
1438     } else if (!gld_stack_size && arf_stack_size) {
1439       remapped_ref_idx[idx] = ref_buffer_stack->arf_stack[0];
1440     } else {
1441       remapped_ref_idx[idx] = ref_buffer_stack->gld_stack[0];
1442     }
1443   }
1444 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1445 }
1446 
av1_encode_strategy(AV1_COMP * const cpi,size_t * const size,uint8_t * const dest,unsigned int * frame_flags,int64_t * const time_stamp,int64_t * const time_end,const aom_rational64_t * const timestamp_ratio,int * const pop_lookahead,int flush)1447 int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
1448                         uint8_t *const dest, unsigned int *frame_flags,
1449                         int64_t *const time_stamp, int64_t *const time_end,
1450                         const aom_rational64_t *const timestamp_ratio,
1451                         int *const pop_lookahead, int flush) {
1452   AV1EncoderConfig *const oxcf = &cpi->oxcf;
1453   AV1_COMMON *const cm = &cpi->common;
1454   GF_GROUP *gf_group = &cpi->ppi->gf_group;
1455   ExternalFlags *const ext_flags = &cpi->ext_flags;
1456   GFConfig *const gf_cfg = &oxcf->gf_cfg;
1457 
1458   EncodeFrameInput frame_input;
1459   EncodeFrameParams frame_params;
1460   EncodeFrameResults frame_results;
1461   memset(&frame_input, 0, sizeof(frame_input));
1462   memset(&frame_params, 0, sizeof(frame_params));
1463   memset(&frame_results, 0, sizeof(frame_results));
1464 
1465   // Check if we need to stuff more src frames
1466   if (flush == 0) {
1467     int srcbuf_size =
1468         av1_lookahead_depth(cpi->ppi->lookahead, cpi->compressor_stage);
1469     int pop_size =
1470         av1_lookahead_pop_sz(cpi->ppi->lookahead, cpi->compressor_stage);
1471 
1472     // Continue buffering look ahead buffer.
1473     if (srcbuf_size < pop_size) return -1;
1474   }
1475 
1476   if (!av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage)) {
1477 #if !CONFIG_REALTIME_ONLY
1478     if (flush && oxcf->pass == AOM_RC_FIRST_PASS &&
1479         !cpi->ppi->twopass.first_pass_done) {
1480       av1_end_first_pass(cpi); /* get last stats packet */
1481       cpi->ppi->twopass.first_pass_done = 1;
1482     }
1483 #endif
1484     return -1;
1485   }
1486 
1487   // TODO(sarahparker) finish bit allocation for one pass pyramid
1488   if (has_no_stats_stage(cpi)) {
1489     gf_cfg->gf_max_pyr_height =
1490         AOMMIN(gf_cfg->gf_max_pyr_height, USE_ALTREF_FOR_ONE_PASS);
1491     gf_cfg->gf_min_pyr_height =
1492         AOMMIN(gf_cfg->gf_min_pyr_height, gf_cfg->gf_max_pyr_height);
1493   }
1494 
1495   cpi->skip_tpl_setup_stats = 0;
1496 #if !CONFIG_REALTIME_ONLY
1497   cpi->twopass_frame.this_frame = NULL;
1498   const int use_one_pass_rt_params = is_one_pass_rt_params(cpi);
1499   if (!use_one_pass_rt_params && !is_stat_generation_stage(cpi)) {
1500 #if CONFIG_COLLECT_COMPONENT_TIMING
1501     start_timing(cpi, av1_get_second_pass_params_time);
1502 #endif
1503 
1504 #if CONFIG_FRAME_PARALLEL_ENCODE
1505     // Initialise frame_level_rate_correction_factors with value previous
1506     // to the parallel frames.
1507     if (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) {
1508       for (int i = 0; i < RATE_FACTOR_LEVELS; i++)
1509         cpi->rc.frame_level_rate_correction_factors[i] =
1510             cpi->ppi->p_rc.rate_correction_factors[i];
1511     }
1512     // copy mv_stats from ppi to frame_level cpi.
1513     cpi->mv_stats = cpi->ppi->mv_stats;
1514 #endif
1515     av1_get_second_pass_params(cpi, &frame_params, &frame_input, *frame_flags);
1516 #if CONFIG_COLLECT_COMPONENT_TIMING
1517     end_timing(cpi, av1_get_second_pass_params_time);
1518 #endif
1519   }
1520 #endif
1521 
1522   if (!is_stat_generation_stage(cpi)) {
1523     // TODO(jingning): fwd key frame always uses show existing frame?
1524     if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE &&
1525         gf_group->refbuf_state[cpi->gf_frame_index] == REFBUF_RESET) {
1526       frame_params.show_existing_frame = 1;
1527     } else {
1528       frame_params.show_existing_frame =
1529           (cpi->ppi->show_existing_alt_ref &&
1530            gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) ||
1531           gf_group->update_type[cpi->gf_frame_index] == INTNL_OVERLAY_UPDATE;
1532     }
1533     frame_params.show_existing_frame &= allow_show_existing(cpi, *frame_flags);
1534 
1535     // Reset show_existing_alt_ref decision to 0 after it is used.
1536     if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) {
1537       cpi->ppi->show_existing_alt_ref = 0;
1538     }
1539   } else {
1540     frame_params.show_existing_frame = 0;
1541   }
1542 
1543   struct lookahead_entry *source = NULL;
1544   struct lookahead_entry *last_source = NULL;
1545   if (frame_params.show_existing_frame) {
1546     source = av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage);
1547     *pop_lookahead = 1;
1548     frame_params.show_frame = 1;
1549   } else {
1550     source = choose_frame_source(cpi, &flush, pop_lookahead, &last_source,
1551                                  &frame_params);
1552   }
1553 
1554   if (source == NULL) {  // If no source was found, we can't encode a frame.
1555 #if !CONFIG_REALTIME_ONLY
1556     if (flush && oxcf->pass == AOM_RC_FIRST_PASS &&
1557         !cpi->ppi->twopass.first_pass_done) {
1558       av1_end_first_pass(cpi); /* get last stats packet */
1559       cpi->ppi->twopass.first_pass_done = 1;
1560     }
1561 #endif
1562     return -1;
1563   }
1564 
1565 #if CONFIG_FRAME_PARALLEL_ENCODE
1566   // reset src_offset to allow actual encode call for this frame to get its
1567   // source.
1568   gf_group->src_offset[cpi->gf_frame_index] = 0;
1569 #endif
1570 
1571   // Source may be changed if temporal filtered later.
1572   frame_input.source = &source->img;
1573   frame_input.last_source = last_source != NULL ? &last_source->img : NULL;
1574   frame_input.ts_duration = source->ts_end - source->ts_start;
1575   // Save unfiltered source. It is used in av1_get_second_pass_params().
1576   cpi->unfiltered_source = frame_input.source;
1577 
1578   *time_stamp = source->ts_start;
1579   *time_end = source->ts_end;
1580   if (source->ts_start < cpi->time_stamps.first_ts_start) {
1581     cpi->time_stamps.first_ts_start = source->ts_start;
1582     cpi->time_stamps.prev_ts_end = source->ts_start;
1583   }
1584 
1585   av1_apply_encoding_flags(cpi, source->flags);
1586   *frame_flags = (source->flags & AOM_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
1587 
1588   // Shown frames and arf-overlay frames need frame-rate considering
1589   if (frame_params.show_frame)
1590     adjust_frame_rate(cpi, source->ts_start, source->ts_end);
1591 
1592   if (!frame_params.show_existing_frame) {
1593     if (cpi->film_grain_table) {
1594       cm->cur_frame->film_grain_params_present = aom_film_grain_table_lookup(
1595           cpi->film_grain_table, *time_stamp, *time_end, 0 /* =erase */,
1596           &cm->film_grain_params);
1597     } else {
1598       cm->cur_frame->film_grain_params_present =
1599           cm->seq_params->film_grain_params_present;
1600     }
1601     // only one operating point supported now
1602     const int64_t pts64 = ticks_to_timebase_units(timestamp_ratio, *time_stamp);
1603     if (pts64 < 0 || pts64 > UINT32_MAX) return AOM_CODEC_ERROR;
1604     cm->frame_presentation_time = (uint32_t)pts64;
1605   }
1606 
1607 #if CONFIG_COLLECT_COMPONENT_TIMING
1608   start_timing(cpi, av1_get_one_pass_rt_params_time);
1609 #endif
1610 #if CONFIG_REALTIME_ONLY
1611   av1_get_one_pass_rt_params(cpi, &frame_params, *frame_flags);
1612   if (cpi->oxcf.speed >= 5 && cpi->ppi->number_spatial_layers == 1 &&
1613       cpi->ppi->number_temporal_layers == 1)
1614     av1_set_reference_structure_one_pass_rt(cpi, cpi->gf_frame_index == 0);
1615 #else
1616   if (use_one_pass_rt_params) {
1617     av1_get_one_pass_rt_params(cpi, &frame_params, *frame_flags);
1618     if (cpi->oxcf.speed >= 5 && cpi->ppi->number_spatial_layers == 1 &&
1619         cpi->ppi->number_temporal_layers == 1)
1620       av1_set_reference_structure_one_pass_rt(cpi, cpi->gf_frame_index == 0);
1621   }
1622 #endif
1623 #if CONFIG_COLLECT_COMPONENT_TIMING
1624   end_timing(cpi, av1_get_one_pass_rt_params_time);
1625 #endif
1626 
1627   FRAME_UPDATE_TYPE frame_update_type =
1628       get_frame_update_type(gf_group, cpi->gf_frame_index);
1629 
1630   if (frame_params.show_existing_frame &&
1631       frame_params.frame_type != KEY_FRAME) {
1632     // Force show-existing frames to be INTER, except forward keyframes
1633     frame_params.frame_type = INTER_FRAME;
1634   }
1635 
1636   // TODO(david.turner@argondesign.com): Move all the encode strategy
1637   // (largely near av1_get_compressed_data) in here
1638 
1639   // TODO(david.turner@argondesign.com): Change all the encode strategy to
1640   // modify frame_params instead of cm or cpi.
1641 
1642   // Per-frame encode speed.  In theory this can vary, but things may have
1643   // been written assuming speed-level will not change within a sequence, so
1644   // this parameter should be used with caution.
1645   frame_params.speed = oxcf->speed;
1646 
1647   // Work out some encoding parameters specific to the pass:
1648   if (has_no_stats_stage(cpi) && oxcf->q_cfg.aq_mode == CYCLIC_REFRESH_AQ) {
1649     av1_cyclic_refresh_update_parameters(cpi);
1650   } else if (is_stat_generation_stage(cpi)) {
1651     cpi->td.mb.e_mbd.lossless[0] = is_lossless_requested(&oxcf->rc_cfg);
1652     // Current frame is coded as a key-frame for any of the following cases:
1653     // 1) First frame of a video
1654     // 2) For all-intra frame encoding
1655     // 3) When a key-frame is forced
1656     const int kf_requested =
1657         (cm->current_frame.frame_number == 0 ||
1658          oxcf->kf_cfg.key_freq_max == 0 || (*frame_flags & FRAMEFLAGS_KEY));
1659     if (kf_requested && frame_update_type != OVERLAY_UPDATE &&
1660         frame_update_type != INTNL_OVERLAY_UPDATE) {
1661       frame_params.frame_type = KEY_FRAME;
1662     } else {
1663       frame_params.frame_type = INTER_FRAME;
1664     }
1665   } else if (is_stat_consumption_stage(cpi)) {
1666 #if CONFIG_MISMATCH_DEBUG
1667     mismatch_move_frame_idx_w();
1668 #endif
1669 #if TXCOEFF_COST_TIMER
1670     cm->txcoeff_cost_timer = 0;
1671     cm->txcoeff_cost_count = 0;
1672 #endif
1673   }
1674 
1675   if (!is_stat_generation_stage(cpi))
1676     set_ext_overrides(cm, &frame_params, ext_flags);
1677 
1678   // Shown keyframes and S frames refresh all reference buffers
1679   const int force_refresh_all =
1680       ((frame_params.frame_type == KEY_FRAME && frame_params.show_frame) ||
1681        frame_params.frame_type == S_FRAME) &&
1682       !frame_params.show_existing_frame;
1683 
1684   av1_configure_buffer_updates(
1685       cpi, &frame_params.refresh_frame, frame_update_type,
1686       gf_group->refbuf_state[cpi->gf_frame_index], force_refresh_all);
1687 
1688   if (!is_stat_generation_stage(cpi)) {
1689     const RefCntBuffer *ref_frames[INTER_REFS_PER_FRAME];
1690     const YV12_BUFFER_CONFIG *ref_frame_buf[INTER_REFS_PER_FRAME];
1691 
1692 #if CONFIG_FRAME_PARALLEL_ENCODE
1693     RefFrameMapPair ref_frame_map_pairs[REF_FRAMES];
1694     init_ref_map_pair(cpi, ref_frame_map_pairs);
1695     const int order_offset = gf_group->arf_src_offset[cpi->gf_frame_index];
1696     const int cur_frame_disp =
1697         cpi->common.current_frame.frame_number + order_offset;
1698 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1699 
1700 #if CONFIG_FRAME_PARALLEL_ENCODE
1701     if (gf_group->frame_parallel_level[cpi->gf_frame_index] == 0) {
1702 #else
1703     {
1704 #endif
1705       if (!ext_flags->refresh_frame.update_pending) {
1706         av1_get_ref_frames(&cpi->ref_buffer_stack,
1707 #if CONFIG_FRAME_PARALLEL_ENCODE
1708                            ref_frame_map_pairs, cur_frame_disp,
1709 #if CONFIG_FRAME_PARALLEL_ENCODE_2
1710                            cpi, cpi->gf_frame_index, 1,
1711 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
1712 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1713                            cm->remapped_ref_idx);
1714       } else if (cpi->svc.set_ref_frame_config) {
1715         for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++)
1716           cm->remapped_ref_idx[i] = cpi->svc.ref_idx[i];
1717       }
1718     }
1719 
1720     // Get the reference frames
1721     for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1722       ref_frames[i] = get_ref_frame_buf(cm, ref_frame_priority_order[i]);
1723       ref_frame_buf[i] = ref_frames[i] != NULL ? &ref_frames[i]->buf : NULL;
1724     }
1725 
1726     // Work out which reference frame slots may be used.
1727     frame_params.ref_frame_flags =
1728         get_ref_frame_flags(&cpi->sf, is_one_pass_rt_params(cpi), ref_frame_buf,
1729                             ext_flags->ref_frame_flags);
1730 
1731 #if CONFIG_FRAME_PARALLEL_ENCODE
1732     // Set primary_ref_frame of non-reference frames as PRIMARY_REF_NONE.
1733     if (cpi->ppi->gf_group.is_frame_non_ref[cpi->gf_frame_index]) {
1734       frame_params.primary_ref_frame = PRIMARY_REF_NONE;
1735     } else {
1736       frame_params.primary_ref_frame =
1737           choose_primary_ref_frame(cpi, &frame_params);
1738     }
1739 #else
1740     frame_params.primary_ref_frame =
1741         choose_primary_ref_frame(cpi, &frame_params);
1742 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1743 
1744     frame_params.order_offset = gf_group->arf_src_offset[cpi->gf_frame_index];
1745 
1746 #if CONFIG_FRAME_PARALLEL_ENCODE
1747 #if CONFIG_FRAME_PARALLEL_ENCODE_2
1748     // Call av1_get_refresh_frame_flags() if refresh index not available.
1749     if (!cpi->refresh_idx_available) {
1750 #endif
1751 #endif
1752       frame_params.refresh_frame_flags = av1_get_refresh_frame_flags(
1753           cpi, &frame_params, frame_update_type, cpi->gf_frame_index,
1754 #if CONFIG_FRAME_PARALLEL_ENCODE
1755           cur_frame_disp, ref_frame_map_pairs,
1756 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1757           &cpi->ref_buffer_stack);
1758 #if CONFIG_FRAME_PARALLEL_ENCODE
1759 #if CONFIG_FRAME_PARALLEL_ENCODE_2
1760     } else {
1761       assert(cpi->ref_refresh_index != INVALID_IDX);
1762       frame_params.refresh_frame_flags = (1 << cpi->ref_refresh_index);
1763     }
1764 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1765 #endif  // CONFIG_FRAME_PARALLEL_ENCODE_2
1766 
1767 #if CONFIG_FRAME_PARALLEL_ENCODE
1768     // Make the frames marked as is_frame_non_ref to non-reference frames.
1769     if (gf_group->is_frame_non_ref[cpi->gf_frame_index])
1770       frame_params.refresh_frame_flags = 0;
1771 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1772 
1773 #if CONFIG_FRAME_PARALLEL_ENCODE
1774     frame_params.existing_fb_idx_to_show = INVALID_IDX;
1775     // Find the frame buffer to show based on display order.
1776     if (frame_params.show_existing_frame) {
1777       for (int frame = 0; frame < REF_FRAMES; frame++) {
1778         const RefCntBuffer *const buf = cm->ref_frame_map[frame];
1779         if (buf == NULL) continue;
1780         const int frame_order = (int)buf->display_order_hint;
1781         if (frame_order == cur_frame_disp)
1782           frame_params.existing_fb_idx_to_show = frame;
1783       }
1784     }
1785 #else
1786     frame_params.existing_fb_idx_to_show =
1787         frame_params.show_existing_frame
1788             ? (frame_update_type == INTNL_OVERLAY_UPDATE
1789                    ? get_ref_frame_map_idx(cm, BWDREF_FRAME)
1790                    : get_ref_frame_map_idx(cm, ALTREF_FRAME))
1791             : INVALID_IDX;
1792 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1793   }
1794 
1795   // The way frame_params->remapped_ref_idx is setup is a placeholder.
1796   // Currently, reference buffer assignment is done by update_ref_frame_map()
1797   // which is called by high-level strategy AFTER encoding a frame.  It
1798   // modifies cm->remapped_ref_idx.  If you want to use an alternative method
1799   // to determine reference buffer assignment, just put your assignments into
1800   // frame_params->remapped_ref_idx here and they will be used when encoding
1801   // this frame.  If frame_params->remapped_ref_idx is setup independently of
1802   // cm->remapped_ref_idx then update_ref_frame_map() will have no effect.
1803   memcpy(frame_params.remapped_ref_idx, cm->remapped_ref_idx,
1804          REF_FRAMES * sizeof(*cm->remapped_ref_idx));
1805 
1806   cpi->td.mb.delta_qindex = 0;
1807 
1808   if (!frame_params.show_existing_frame) {
1809     cm->quant_params.using_qmatrix = oxcf->q_cfg.using_qm;
1810   }
1811 
1812 #if CONFIG_REALTIME_ONLY
1813   if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) !=
1814       AOM_CODEC_OK) {
1815     return AOM_CODEC_ERROR;
1816   }
1817 #else
1818   if (has_no_stats_stage(cpi) && oxcf->mode == REALTIME &&
1819       gf_cfg->lag_in_frames == 0) {
1820     if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) !=
1821         AOM_CODEC_OK) {
1822       return AOM_CODEC_ERROR;
1823     }
1824   } else if (denoise_and_encode(cpi, dest, &frame_input, &frame_params,
1825                                 &frame_results) != AOM_CODEC_OK) {
1826     return AOM_CODEC_ERROR;
1827   }
1828 #endif  // CONFIG_REALTIME_ONLY
1829 
1830   if (!is_stat_generation_stage(cpi)) {
1831     // First pass doesn't modify reference buffer assignment or produce frame
1832     // flags
1833     update_frame_flags(&cpi->common, &cpi->refresh_frame, frame_flags);
1834     set_additional_frame_flags(cm, frame_flags);
1835 #if !CONFIG_FRAME_PARALLEL_ENCODE
1836     if (!ext_flags->refresh_frame.update_pending) {
1837       int ref_map_index =
1838           av1_get_refresh_ref_frame_map(cm->current_frame.refresh_frame_flags);
1839       av1_update_ref_frame_map(cpi, frame_update_type,
1840                                gf_group->refbuf_state[cpi->gf_frame_index],
1841                                ref_map_index, &cpi->ref_buffer_stack);
1842     }
1843 #endif  // CONFIG_FRAME_PARALLEL_ENCODE
1844   }
1845 
1846 #if !CONFIG_REALTIME_ONLY
1847 #if TXCOEFF_COST_TIMER
1848   if (!is_stat_generation_stage(cpi)) {
1849     cm->cum_txcoeff_cost_timer += cm->txcoeff_cost_timer;
1850     fprintf(stderr,
1851             "\ntxb coeff cost block number: %ld, frame time: %ld, cum time %ld "
1852             "in us\n",
1853             cm->txcoeff_cost_count, cm->txcoeff_cost_timer,
1854             cm->cum_txcoeff_cost_timer);
1855   }
1856 #endif
1857 #endif  // !CONFIG_REALTIME_ONLY
1858 
1859 #if CONFIG_TUNE_VMAF
1860   if (!is_stat_generation_stage(cpi) &&
1861       (oxcf->tune_cfg.tuning >= AOM_TUNE_VMAF_WITH_PREPROCESSING &&
1862        oxcf->tune_cfg.tuning <= AOM_TUNE_VMAF_NEG_MAX_GAIN)) {
1863     av1_update_vmaf_curve(cpi);
1864   }
1865 #endif
1866 
1867   // Unpack frame_results:
1868   *size = frame_results.size;
1869 
1870   // Leave a signal for a higher level caller about if this frame is droppable
1871   if (*size > 0) {
1872     cpi->droppable = is_frame_droppable(&cpi->svc, &ext_flags->refresh_frame);
1873   }
1874 
1875   return AOM_CODEC_OK;
1876 }
1877