1 /*
2 * GStreamer
3 * Copyright (C) 2011 Robert Swain <robert.swain@collabora.co.uk>
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 *
23 * Alternatively, the contents of this file may be used under the
24 * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
25 * which case the following provisions apply instead of the ones
26 * mentioned above:
27 *
28 * This library is free software; you can redistribute it and/or
29 * modify it under the terms of the GNU Library General Public
30 * License as published by the Free Software Foundation; either
31 * version 2 of the License, or (at your option) any later version.
32 *
33 * This library is distributed in the hope that it will be useful,
34 * but WITHOUT ANY WARRANTY; without even the implied warranty of
35 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
36 * Library General Public License for more details.
37 *
38 * You should have received a copy of the GNU Library General Public
39 * License along with this library; if not, write to the
40 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
41 * Boston, MA 02110-1301, USA.
42 */
43
44 /**
45 * SECTION:element-fieldanalysis
46 * @title: fieldanalysis
47 *
48 * Analyse fields from video buffers to identify whether the buffers are
49 * progressive/telecined/interlaced and, if telecined, the telecine pattern
50 * used.
51 *
52 * ## Example launch line
53 * |[
54 * gst-launch-1.0 -v uridecodebin uri=/path/to/foo.bar ! fieldanalysis ! deinterlace ! videoconvert ! autovideosink
55 * ]| This pipeline will analyse a video stream with default metrics and thresholds and output progressive frames.
56 *
57 */
58
59 #ifdef HAVE_CONFIG_H
60 # include <config.h>
61 #endif
62
63 #include <gst/gst.h>
64 #include <gst/video/video.h>
65 #include <string.h>
66 #include <stdlib.h> /* for abs() */
67
68 #include "gstfieldanalysis.h"
69 #include "gstfieldanalysisorc.h"
70
71 GST_DEBUG_CATEGORY_STATIC (gst_field_analysis_debug);
72 #define GST_CAT_DEFAULT gst_field_analysis_debug
73
74 #define DEFAULT_FIELD_METRIC GST_FIELDANALYSIS_SSD
75 #define DEFAULT_FRAME_METRIC GST_FIELDANALYSIS_5_TAP
76 #define DEFAULT_NOISE_FLOOR 16
77 #define DEFAULT_FIELD_THRESH 0.08f
78 #define DEFAULT_FRAME_THRESH 0.002f
79 #define DEFAULT_COMB_METHOD METHOD_5_TAP
80 #define DEFAULT_SPATIAL_THRESH 9
81 #define DEFAULT_BLOCK_WIDTH 16
82 #define DEFAULT_BLOCK_HEIGHT 16
83 #define DEFAULT_BLOCK_THRESH 80
84 #define DEFAULT_IGNORED_LINES 2
85
86 enum
87 {
88 PROP_0,
89 PROP_FIELD_METRIC,
90 PROP_FRAME_METRIC,
91 PROP_NOISE_FLOOR,
92 PROP_FIELD_THRESH,
93 PROP_FRAME_THRESH,
94 PROP_COMB_METHOD,
95 PROP_SPATIAL_THRESH,
96 PROP_BLOCK_WIDTH,
97 PROP_BLOCK_HEIGHT,
98 PROP_BLOCK_THRESH,
99 PROP_IGNORED_LINES
100 };
101
102 static GstStaticPadTemplate sink_factory =
103 GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
104 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{YUY2,UYVY,Y42B,I420,YV12}")));
105
106 static GstStaticPadTemplate src_factory =
107 GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
108 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{YUY2,UYVY,Y42B,I420,YV12}")));
109
110 G_DEFINE_TYPE (GstFieldAnalysis, gst_field_analysis, GST_TYPE_ELEMENT);
111 #define parent_class gst_field_analysis_parent_class
112
113 static void gst_field_analysis_set_property (GObject * object, guint prop_id,
114 const GValue * value, GParamSpec * pspec);
115 static void gst_field_analysis_get_property (GObject * object, guint prop_id,
116 GValue * value, GParamSpec * pspec);
117
118 static gboolean gst_field_analysis_sink_event (GstPad * pad, GstObject * parent,
119 GstEvent * event);
120 static GstFlowReturn gst_field_analysis_chain (GstPad * pad, GstObject * parent,
121 GstBuffer * buf);
122 static GstStateChangeReturn gst_field_analysis_change_state (GstElement *
123 element, GstStateChange transition);
124 static void gst_field_analysis_finalize (GObject * self);
125
126 static GQueue *gst_field_analysis_flush_frames (GstFieldAnalysis * filter);
127
128 typedef enum
129 {
130 GST_FIELDANALYSIS_SAD,
131 GST_FIELDANALYSIS_SSD,
132 GST_FIELDANALYSIS_3_TAP
133 } GstFieldAnalysisFieldMetric;
134
135 #define GST_TYPE_FIELDANALYSIS_FIELD_METRIC (gst_fieldanalysis_field_metric_get_type())
136 static GType
gst_fieldanalysis_field_metric_get_type(void)137 gst_fieldanalysis_field_metric_get_type (void)
138 {
139 static GType fieldanalysis_field_metric_type = 0;
140
141 if (!fieldanalysis_field_metric_type) {
142 static const GEnumValue fieldanalysis_field_metrics[] = {
143 {GST_FIELDANALYSIS_SAD, "Sum of Absolute Differences", "sad"},
144 {GST_FIELDANALYSIS_SSD, "Sum of Squared Differences", "ssd"},
145 {GST_FIELDANALYSIS_3_TAP, "Difference of 3-tap [1,4,1] Horizontal Filter",
146 "3-tap"},
147 {0, NULL, NULL},
148 };
149
150 fieldanalysis_field_metric_type =
151 g_enum_register_static ("GstFieldAnalysisFieldMetric",
152 fieldanalysis_field_metrics);
153 }
154
155 return fieldanalysis_field_metric_type;
156 }
157
158 typedef enum
159 {
160 GST_FIELDANALYSIS_5_TAP,
161 GST_FIELDANALYSIS_WINDOWED_COMB
162 } GstFieldAnalysisFrameMetric;
163
164 #define GST_TYPE_FIELDANALYSIS_FRAME_METRIC (gst_fieldanalysis_frame_metric_get_type())
165 static GType
gst_fieldanalysis_frame_metric_get_type(void)166 gst_fieldanalysis_frame_metric_get_type (void)
167 {
168 static GType fieldanalysis_frame_metric_type = 0;
169
170 if (!fieldanalysis_frame_metric_type) {
171 static const GEnumValue fieldanalyis_frame_metrics[] = {
172 {GST_FIELDANALYSIS_5_TAP, "5-tap [1,-3,4,-3,1] Vertical Filter", "5-tap"},
173 {GST_FIELDANALYSIS_WINDOWED_COMB,
174 "Windowed Comb Detection (not optimised)",
175 "windowed-comb"},
176 {0, NULL, NULL},
177 };
178
179 fieldanalysis_frame_metric_type =
180 g_enum_register_static ("GstFieldAnalysisFrameMetric",
181 fieldanalyis_frame_metrics);
182 }
183
184 return fieldanalysis_frame_metric_type;
185 }
186
187 #define GST_TYPE_FIELDANALYSIS_COMB_METHOD (gst_fieldanalysis_comb_method_get_type())
188 static GType
gst_fieldanalysis_comb_method_get_type(void)189 gst_fieldanalysis_comb_method_get_type (void)
190 {
191 static GType fieldanalysis_comb_method_type = 0;
192
193 if (!fieldanalysis_comb_method_type) {
194 static const GEnumValue fieldanalyis_comb_methods[] = {
195 {METHOD_32DETECT,
196 "Difference to above sample in same field small and difference to sample in other field large",
197 "32-detect"},
198 {METHOD_IS_COMBED,
199 "Differences between current sample and the above/below samples in other field multiplied together, larger than squared spatial threshold (from Tritical's isCombed)",
200 "isCombed"},
201 {METHOD_5_TAP,
202 "5-tap [1,-3,4,-3,1] vertical filter result is larger than spatial threshold*6",
203 "5-tap"},
204 {0, NULL, NULL},
205 };
206
207 fieldanalysis_comb_method_type =
208 g_enum_register_static ("FieldAnalysisCombMethod",
209 fieldanalyis_comb_methods);
210 }
211
212 return fieldanalysis_comb_method_type;
213 }
214
215 static void
gst_field_analysis_class_init(GstFieldAnalysisClass * klass)216 gst_field_analysis_class_init (GstFieldAnalysisClass * klass)
217 {
218 GObjectClass *gobject_class;
219 GstElementClass *gstelement_class;
220
221 gobject_class = (GObjectClass *) klass;
222 gstelement_class = (GstElementClass *) klass;
223
224 gobject_class->set_property = gst_field_analysis_set_property;
225 gobject_class->get_property = gst_field_analysis_get_property;
226 gobject_class->finalize = gst_field_analysis_finalize;
227
228 g_object_class_install_property (gobject_class, PROP_FIELD_METRIC,
229 g_param_spec_enum ("field-metric", "Field Metric",
230 "Metric to be used for comparing same parity fields to decide if they are a repeated field for telecine",
231 GST_TYPE_FIELDANALYSIS_FIELD_METRIC, DEFAULT_FIELD_METRIC,
232 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
233 g_object_class_install_property (gobject_class, PROP_FRAME_METRIC,
234 g_param_spec_enum ("frame-metric", "Frame Metric",
235 "Metric to be used for comparing opposite parity fields to decide if they are a progressive frame",
236 GST_TYPE_FIELDANALYSIS_FRAME_METRIC, DEFAULT_FRAME_METRIC,
237 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
238 g_object_class_install_property (gobject_class, PROP_NOISE_FLOOR,
239 g_param_spec_uint ("noise-floor", "Noise Floor",
240 "Noise floor for appropriate metrics (per-pixel metric values with a score less than this will be ignored)",
241 0, G_MAXUINT32,
242 DEFAULT_NOISE_FLOOR, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
243 g_object_class_install_property (gobject_class, PROP_FIELD_THRESH,
244 g_param_spec_float ("field-threshold", "Field Threshold",
245 "Threshold for field metric decisions", 0.0f, G_MAXFLOAT,
246 DEFAULT_FIELD_THRESH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
247 g_object_class_install_property (gobject_class, PROP_FRAME_THRESH,
248 g_param_spec_float ("frame-threshold", "Frame Threshold",
249 "Threshold for frame metric decisions", 0.0f, G_MAXFLOAT,
250 DEFAULT_FRAME_THRESH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
251 g_object_class_install_property (gobject_class, PROP_COMB_METHOD,
252 g_param_spec_enum ("comb-method", "Comb-detection Method",
253 "Metric to be used for identifying comb artifacts if using windowed comb detection",
254 GST_TYPE_FIELDANALYSIS_COMB_METHOD, DEFAULT_COMB_METHOD,
255 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
256 g_object_class_install_property (gobject_class, PROP_SPATIAL_THRESH,
257 g_param_spec_int64 ("spatial-threshold", "Spatial Combing Threshold",
258 "Threshold for combing metric decisions", 0, G_MAXINT64,
259 DEFAULT_SPATIAL_THRESH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
260 g_object_class_install_property (gobject_class, PROP_BLOCK_WIDTH,
261 g_param_spec_uint64 ("block-width", "Block width",
262 "Block width for windowed comb detection", 1, G_MAXUINT64,
263 DEFAULT_BLOCK_WIDTH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
264 g_object_class_install_property (gobject_class, PROP_BLOCK_HEIGHT,
265 g_param_spec_uint64 ("block-height", "Block height",
266 "Block height for windowed comb detection", 0, G_MAXUINT64,
267 DEFAULT_BLOCK_HEIGHT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
268 g_object_class_install_property (gobject_class, PROP_BLOCK_THRESH,
269 g_param_spec_uint64 ("block-threshold", "Block threshold",
270 "Block threshold for windowed comb detection", 0, G_MAXUINT64,
271 DEFAULT_BLOCK_THRESH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
272 g_object_class_install_property (gobject_class, PROP_IGNORED_LINES,
273 g_param_spec_uint64 ("ignored-lines", "Ignored lines",
274 "Ignore this many lines from the top and bottom for windowed comb detection",
275 2, G_MAXUINT64, DEFAULT_IGNORED_LINES,
276 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
277
278 gstelement_class->change_state =
279 GST_DEBUG_FUNCPTR (gst_field_analysis_change_state);
280
281 gst_element_class_set_static_metadata (gstelement_class,
282 "Video field analysis",
283 "Filter/Analysis/Video",
284 "Analyse fields from video frames to identify if they are progressive/telecined/interlaced",
285 "Robert Swain <robert.swain@collabora.co.uk>");
286
287 gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
288 gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
289
290 }
291
292 static gfloat same_parity_sad (GstFieldAnalysis * filter,
293 FieldAnalysisFields (*history)[2]);
294 static gfloat same_parity_ssd (GstFieldAnalysis * filter,
295 FieldAnalysisFields (*history)[2]);
296 static gfloat same_parity_3_tap (GstFieldAnalysis * filter,
297 FieldAnalysisFields (*history)[2]);
298 static gfloat opposite_parity_5_tap (GstFieldAnalysis * filter,
299 FieldAnalysisFields (*history)[2]);
300 static guint64 block_score_for_row_32detect (GstFieldAnalysis * filter,
301 FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1);
302 static guint64 block_score_for_row_iscombed (GstFieldAnalysis * filter,
303 FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1);
304 static guint64 block_score_for_row_5_tap (GstFieldAnalysis * filter,
305 FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1);
306 static gfloat opposite_parity_windowed_comb (GstFieldAnalysis * filter,
307 FieldAnalysisFields (*history)[2]);
308
309 static void
gst_field_analysis_clear_frames(GstFieldAnalysis * filter)310 gst_field_analysis_clear_frames (GstFieldAnalysis * filter)
311 {
312 GST_DEBUG_OBJECT (filter, "Clearing %d frames", filter->nframes);
313 while (filter->nframes) {
314 gst_video_frame_unmap (&filter->frames[filter->nframes - 1].frame);
315 filter->nframes--;
316 }
317 }
318
319 static void
gst_field_analysis_reset(GstFieldAnalysis * filter)320 gst_field_analysis_reset (GstFieldAnalysis * filter)
321 {
322 gst_field_analysis_clear_frames (filter);
323 GST_DEBUG_OBJECT (filter, "Resetting context");
324 memset (filter->frames, 0, 2 * sizeof (FieldAnalysisHistory));
325 filter->is_telecine = FALSE;
326 filter->first_buffer = TRUE;
327 gst_video_info_init (&filter->vinfo);
328 g_free (filter->comb_mask);
329 filter->comb_mask = NULL;
330 g_free (filter->block_scores);
331 filter->block_scores = NULL;
332 }
333
334 static void
gst_field_analysis_init(GstFieldAnalysis * filter)335 gst_field_analysis_init (GstFieldAnalysis * filter)
336 {
337 filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
338 gst_pad_set_event_function (filter->sinkpad,
339 GST_DEBUG_FUNCPTR (gst_field_analysis_sink_event));
340 gst_pad_set_chain_function (filter->sinkpad,
341 GST_DEBUG_FUNCPTR (gst_field_analysis_chain));
342
343 filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
344
345 gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
346 gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
347
348 filter->nframes = 0;
349 gst_field_analysis_reset (filter);
350 filter->same_field = &same_parity_ssd;
351 filter->field_thresh = DEFAULT_FIELD_THRESH;
352 filter->same_frame = &opposite_parity_5_tap;
353 filter->frame_thresh = DEFAULT_FRAME_THRESH;
354 filter->noise_floor = DEFAULT_NOISE_FLOOR;
355 filter->block_score_for_row = &block_score_for_row_5_tap;
356 filter->spatial_thresh = DEFAULT_SPATIAL_THRESH;
357 filter->block_width = DEFAULT_BLOCK_WIDTH;
358 filter->block_height = DEFAULT_BLOCK_HEIGHT;
359 filter->block_thresh = DEFAULT_BLOCK_THRESH;
360 filter->ignored_lines = DEFAULT_IGNORED_LINES;
361 }
362
363 static void
gst_field_analysis_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)364 gst_field_analysis_set_property (GObject * object, guint prop_id,
365 const GValue * value, GParamSpec * pspec)
366 {
367 GstFieldAnalysis *filter = GST_FIELDANALYSIS (object);
368
369 switch (prop_id) {
370 case PROP_FIELD_METRIC:
371 switch (g_value_get_enum (value)) {
372 case GST_FIELDANALYSIS_SAD:
373 filter->same_field = &same_parity_sad;
374 break;
375 case GST_FIELDANALYSIS_SSD:
376 filter->same_field = &same_parity_ssd;
377 break;
378 case GST_FIELDANALYSIS_3_TAP:
379 filter->same_field = &same_parity_3_tap;
380 break;
381 default:
382 break;
383 }
384 break;
385 case PROP_FRAME_METRIC:
386 switch (g_value_get_enum (value)) {
387 case GST_FIELDANALYSIS_5_TAP:
388 filter->same_frame = &opposite_parity_5_tap;
389 break;
390 case GST_FIELDANALYSIS_WINDOWED_COMB:
391 filter->same_frame = &opposite_parity_windowed_comb;
392 break;
393 default:
394 break;
395 }
396 break;
397 case PROP_NOISE_FLOOR:
398 filter->noise_floor = g_value_get_uint (value);
399 break;
400 case PROP_FIELD_THRESH:
401 filter->field_thresh = g_value_get_float (value);
402 break;
403 case PROP_FRAME_THRESH:
404 filter->frame_thresh = g_value_get_float (value);
405 break;
406 case PROP_COMB_METHOD:
407 switch (g_value_get_enum (value)) {
408 case METHOD_32DETECT:
409 filter->block_score_for_row = &block_score_for_row_32detect;
410 break;
411 case METHOD_IS_COMBED:
412 filter->block_score_for_row = &block_score_for_row_iscombed;
413 break;
414 case METHOD_5_TAP:
415 filter->block_score_for_row = &block_score_for_row_5_tap;
416 break;
417 default:
418 break;
419 }
420 break;
421 case PROP_SPATIAL_THRESH:
422 filter->spatial_thresh = g_value_get_int64 (value);
423 break;
424 case PROP_BLOCK_WIDTH:
425 filter->block_width = g_value_get_uint64 (value);
426 if (GST_VIDEO_FRAME_WIDTH (&filter->frames[0].frame)) {
427 const gint frame_width =
428 GST_VIDEO_FRAME_WIDTH (&filter->frames[0].frame);
429 if (filter->block_scores) {
430 gsize nbytes = (frame_width / filter->block_width) * sizeof (guint);
431 filter->block_scores = g_realloc (filter->block_scores, nbytes);
432 memset (filter->block_scores, 0, nbytes);
433 } else {
434 filter->block_scores =
435 g_malloc0 ((frame_width / filter->block_width) * sizeof (guint));
436 }
437 }
438 break;
439 case PROP_BLOCK_HEIGHT:
440 filter->block_height = g_value_get_uint64 (value);
441 break;
442 case PROP_BLOCK_THRESH:
443 filter->block_thresh = g_value_get_uint64 (value);
444 break;
445 case PROP_IGNORED_LINES:
446 filter->ignored_lines = g_value_get_uint64 (value);
447 break;
448 default:
449 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
450 break;
451 }
452 }
453
454 static void
gst_field_analysis_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)455 gst_field_analysis_get_property (GObject * object, guint prop_id,
456 GValue * value, GParamSpec * pspec)
457 {
458 GstFieldAnalysis *filter = GST_FIELDANALYSIS (object);
459
460 switch (prop_id) {
461 case PROP_FIELD_METRIC:
462 {
463 GstFieldAnalysisFieldMetric metric = DEFAULT_FIELD_METRIC;
464 if (filter->same_field == &same_parity_sad) {
465 metric = GST_FIELDANALYSIS_SAD;
466 } else if (filter->same_field == &same_parity_ssd) {
467 metric = GST_FIELDANALYSIS_SSD;
468 } else if (filter->same_field == &same_parity_3_tap) {
469 metric = GST_FIELDANALYSIS_3_TAP;
470 }
471 g_value_set_enum (value, metric);
472 break;
473 }
474 case PROP_FRAME_METRIC:
475 {
476 GstFieldAnalysisFrameMetric metric = DEFAULT_FRAME_METRIC;
477 if (filter->same_frame == &opposite_parity_5_tap) {
478 metric = GST_FIELDANALYSIS_5_TAP;
479 } else if (filter->same_frame == &opposite_parity_windowed_comb) {
480 metric = GST_FIELDANALYSIS_WINDOWED_COMB;
481 }
482 g_value_set_enum (value, metric);
483 break;
484 }
485 case PROP_NOISE_FLOOR:
486 g_value_set_uint (value, filter->noise_floor);
487 break;
488 case PROP_FIELD_THRESH:
489 g_value_set_float (value, filter->field_thresh);
490 break;
491 case PROP_FRAME_THRESH:
492 g_value_set_float (value, filter->frame_thresh);
493 break;
494 case PROP_COMB_METHOD:
495 {
496 FieldAnalysisCombMethod method = DEFAULT_COMB_METHOD;
497 if (filter->block_score_for_row == &block_score_for_row_32detect) {
498 method = METHOD_32DETECT;
499 } else if (filter->block_score_for_row == &block_score_for_row_iscombed) {
500 method = METHOD_IS_COMBED;
501 } else if (filter->block_score_for_row == &block_score_for_row_5_tap) {
502 method = METHOD_5_TAP;
503 }
504 g_value_set_enum (value, method);
505 break;
506 }
507 case PROP_SPATIAL_THRESH:
508 g_value_set_int64 (value, filter->spatial_thresh);
509 break;
510 case PROP_BLOCK_WIDTH:
511 g_value_set_uint64 (value, filter->block_width);
512 break;
513 case PROP_BLOCK_HEIGHT:
514 g_value_set_uint64 (value, filter->block_height);
515 break;
516 case PROP_BLOCK_THRESH:
517 g_value_set_uint64 (value, filter->block_thresh);
518 break;
519 case PROP_IGNORED_LINES:
520 g_value_set_uint64 (value, filter->ignored_lines);
521 break;
522 default:
523 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
524 break;
525 }
526 }
527
528 static void
gst_field_analysis_update_format(GstFieldAnalysis * filter,GstCaps * caps)529 gst_field_analysis_update_format (GstFieldAnalysis * filter, GstCaps * caps)
530 {
531 gint width;
532 GQueue *outbufs;
533 GstVideoInfo vinfo;
534
535 if (!gst_video_info_from_caps (&vinfo, caps)) {
536 GST_ERROR_OBJECT (filter, "Invalid caps: %" GST_PTR_FORMAT, caps);
537 return;
538 }
539
540 /* if format is unchanged in our eyes, don't update the context */
541 if ((GST_VIDEO_INFO_WIDTH (&filter->vinfo) == GST_VIDEO_INFO_WIDTH (&vinfo))
542 && (GST_VIDEO_INFO_HEIGHT (&filter->vinfo) ==
543 GST_VIDEO_INFO_HEIGHT (&vinfo))
544 && (GST_VIDEO_INFO_COMP_OFFSET (&filter->vinfo, 0) ==
545 GST_VIDEO_INFO_COMP_OFFSET (&vinfo, 0))
546 && (GST_VIDEO_INFO_COMP_PSTRIDE (&filter->vinfo, 0) ==
547 GST_VIDEO_INFO_COMP_PSTRIDE (&vinfo, 0))
548 && (GST_VIDEO_INFO_COMP_STRIDE (&filter->vinfo, 0) ==
549 GST_VIDEO_INFO_COMP_STRIDE (&vinfo, 0)))
550 return;
551
552 /* format changed - process and push buffers before updating context */
553
554 GST_OBJECT_LOCK (filter);
555 filter->flushing = TRUE;
556 outbufs = gst_field_analysis_flush_frames (filter);
557 GST_OBJECT_UNLOCK (filter);
558
559 if (outbufs) {
560 while (g_queue_get_length (outbufs))
561 gst_pad_push (filter->srcpad, g_queue_pop_head (outbufs));
562 }
563
564 GST_OBJECT_LOCK (filter);
565 filter->flushing = FALSE;
566
567 filter->vinfo = vinfo;
568 width = GST_VIDEO_INFO_WIDTH (&filter->vinfo);
569
570 /* update allocations for metric scores */
571 if (filter->comb_mask) {
572 filter->comb_mask = g_realloc (filter->comb_mask, width);
573 } else {
574 filter->comb_mask = g_malloc (width);
575 }
576 if (filter->block_scores) {
577 gsize nbytes = (width / filter->block_width) * sizeof (guint);
578 filter->block_scores = g_realloc (filter->block_scores, nbytes);
579 memset (filter->block_scores, 0, nbytes);
580 } else {
581 filter->block_scores =
582 g_malloc0 ((width / filter->block_width) * sizeof (guint));
583 }
584
585 GST_OBJECT_UNLOCK (filter);
586 return;
587 }
588
589 #define FIELD_ANALYSIS_TOP_BOTTOM (1 << 0)
590 #define FIELD_ANALYSIS_BOTTOM_TOP (1 << 1)
591 #define FIELD_ANALYSIS_TOP_MATCH (1 << 2)
592 #define FIELD_ANALYSIS_BOTTOM_MATCH (1 << 3)
593
594 /* decorate removes a buffer from the internal queue, on which we have a ref,
595 * then makes its metadata writable (could be the same buffer, could be a new
596 * buffer, but either way we have a ref on it), decorates this buffer and
597 * returns it */
598 static GstBuffer *
gst_field_analysis_decorate(GstFieldAnalysis * filter,gboolean tff,gboolean onefield,FieldAnalysisConclusion conclusion,gboolean drop)599 gst_field_analysis_decorate (GstFieldAnalysis * filter, gboolean tff,
600 gboolean onefield, FieldAnalysisConclusion conclusion, gboolean drop)
601 {
602 GstBuffer *buf = NULL;
603 GstCaps *caps;
604 GstVideoInfo srcpadvinfo, vinfo = filter->vinfo;
605
606 /* deal with incoming buffer */
607 if (conclusion > FIELD_ANALYSIS_PROGRESSIVE || filter->is_telecine == TRUE) {
608 filter->is_telecine = conclusion != FIELD_ANALYSIS_INTERLACED;
609 if (conclusion >= FIELD_ANALYSIS_TELECINE_PROGRESSIVE
610 || filter->is_telecine == TRUE) {
611 GST_VIDEO_INFO_INTERLACE_MODE (&vinfo) = GST_VIDEO_INTERLACE_MODE_MIXED;
612 } else {
613 GST_VIDEO_INFO_INTERLACE_MODE (&vinfo) =
614 GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
615 }
616 } else {
617 GST_VIDEO_INFO_INTERLACE_MODE (&vinfo) =
618 GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
619 }
620
621 caps = gst_pad_get_current_caps (filter->srcpad);
622 gst_video_info_from_caps (&srcpadvinfo, caps);
623 gst_caps_unref (caps);
624 /* push a caps event on the src pad before pushing the buffer */
625 if (!gst_video_info_is_equal (&vinfo, &srcpadvinfo)) {
626 gboolean ret = TRUE;
627
628 caps = gst_video_info_to_caps (&vinfo);
629 GST_OBJECT_UNLOCK (filter);
630 ret = gst_pad_set_caps (filter->srcpad, caps);
631 GST_OBJECT_LOCK (filter);
632 gst_caps_unref (caps);
633
634 if (!ret) {
635 GST_ERROR_OBJECT (filter, "Could not set pad caps");
636 return NULL;
637 }
638 }
639
640 buf = filter->frames[filter->nframes - 1].frame.buffer;
641 gst_video_frame_unmap (&filter->frames[filter->nframes - 1].frame);
642 filter->nframes--;
643
644 /* set buffer flags */
645 if (!tff) {
646 GST_BUFFER_FLAG_UNSET (buf, GST_VIDEO_BUFFER_FLAG_TFF);
647 } else if (tff == 1 || (tff == -1
648 && GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_TFF))) {
649 GST_BUFFER_FLAG_SET (buf, GST_VIDEO_BUFFER_FLAG_TFF);
650 }
651
652 if (onefield) {
653 GST_BUFFER_FLAG_SET (buf, GST_VIDEO_BUFFER_FLAG_ONEFIELD);
654 } else {
655 GST_BUFFER_FLAG_UNSET (buf, GST_VIDEO_BUFFER_FLAG_ONEFIELD);
656 }
657
658 if (drop) {
659 GST_BUFFER_FLAG_SET (buf, GST_VIDEO_BUFFER_FLAG_RFF);
660 } else {
661 GST_BUFFER_FLAG_UNSET (buf, GST_VIDEO_BUFFER_FLAG_RFF);
662 }
663
664 if (conclusion == FIELD_ANALYSIS_PROGRESSIVE
665 || conclusion == FIELD_ANALYSIS_TELECINE_PROGRESSIVE) {
666 GST_BUFFER_FLAG_UNSET (buf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
667 } else {
668 GST_BUFFER_FLAG_SET (buf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
669 }
670
671 GST_DEBUG_OBJECT (filter,
672 "Pushing buffer with flags: %p, i %d, tff %d, 1f %d, drop %d; conc %d",
673 buf, GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_INTERLACED),
674 GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_TFF),
675 GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_ONEFIELD),
676 GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_RFF), conclusion);
677
678 return buf;
679 }
680
681 /* _flush_one does not touch the buffer ref counts directly but _decorate ()
682 * has some influence on ref counts - see its annotation for details */
683 static GstBuffer *
gst_field_analysis_flush_one(GstFieldAnalysis * filter,GQueue * outbufs)684 gst_field_analysis_flush_one (GstFieldAnalysis * filter, GQueue * outbufs)
685 {
686 GstBuffer *buf = NULL;
687 FieldAnalysis results;
688
689 if (!filter->nframes)
690 return NULL;
691
692 GST_DEBUG_OBJECT (filter, "Flushing last frame (nframes %d)",
693 filter->nframes);
694 results = filter->frames[filter->nframes - 1].results;
695 if (results.holding == 1 + TOP_FIELD || results.holding == 1 + BOTTOM_FIELD) {
696 /* should be only one field needed */
697 buf =
698 gst_field_analysis_decorate (filter, results.holding == 1 + TOP_FIELD,
699 TRUE, results.conclusion, FALSE);
700 } else {
701 /* possibility that both fields are needed */
702 buf =
703 gst_field_analysis_decorate (filter, -1, FALSE, results.conclusion,
704 !results.holding);
705 }
706 if (buf) {
707 filter->nframes--;
708 if (outbufs)
709 g_queue_push_tail (outbufs, buf);
710 } else {
711 GST_DEBUG_OBJECT (filter, "Error occurred during decoration");
712 }
713 return buf;
714 }
715
716 /* _flush_frames () has no direct influence on refcounts and nor does _flush_one,
717 * but _decorate () does and so this function does indirectly */
718 static GQueue *
gst_field_analysis_flush_frames(GstFieldAnalysis * filter)719 gst_field_analysis_flush_frames (GstFieldAnalysis * filter)
720 {
721 GQueue *outbufs;
722
723 if (filter->nframes < 2)
724 return NULL;
725
726 outbufs = g_queue_new ();
727
728 while (filter->nframes)
729 gst_field_analysis_flush_one (filter, outbufs);
730
731 return outbufs;
732 }
733
734 static gboolean
gst_field_analysis_sink_event(GstPad * pad,GstObject * parent,GstEvent * event)735 gst_field_analysis_sink_event (GstPad * pad, GstObject * parent,
736 GstEvent * event)
737 {
738 GstFieldAnalysis *filter = GST_FIELDANALYSIS (parent);
739 gboolean forward; /* should we forward the event? */
740 gboolean ret = TRUE;
741
742 GST_LOG_OBJECT (pad, "received %s event: %" GST_PTR_FORMAT,
743 GST_EVENT_TYPE_NAME (event), event);
744
745 switch (GST_EVENT_TYPE (event)) {
746 case GST_EVENT_SEGMENT:
747 case GST_EVENT_EOS:
748 {
749 /* for both SEGMENT and EOS it is safest to process and push queued
750 * buffers */
751 GQueue *outbufs;
752
753 forward = TRUE;
754
755 GST_OBJECT_LOCK (filter);
756 filter->flushing = TRUE;
757 outbufs = gst_field_analysis_flush_frames (filter);
758 GST_OBJECT_UNLOCK (filter);
759
760 if (outbufs) {
761 while (g_queue_get_length (outbufs))
762 gst_pad_push (filter->srcpad, g_queue_pop_head (outbufs));
763 }
764
765 GST_OBJECT_LOCK (filter);
766 filter->flushing = FALSE;
767 GST_OBJECT_UNLOCK (filter);
768 break;
769 }
770 case GST_EVENT_FLUSH_STOP:
771 /* if we have any buffers left in the queue, unref them until the queue
772 * is empty */
773
774 forward = TRUE;
775
776 GST_OBJECT_LOCK (filter);
777 gst_field_analysis_reset (filter);
778 GST_OBJECT_UNLOCK (filter);
779 break;
780 case GST_EVENT_CAPS:
781 {
782 GstCaps *caps;
783
784 forward = FALSE;
785
786 gst_event_parse_caps (event, &caps);
787 gst_field_analysis_update_format (filter, caps);
788 ret = gst_pad_set_caps (filter->srcpad, caps);
789 gst_event_unref (event);
790 break;
791 }
792 default:
793 forward = TRUE;
794 break;
795 }
796
797 if (forward) {
798 ret = gst_pad_event_default (pad, parent, event);
799 }
800
801 return ret;
802 }
803
804
805 static gfloat
same_parity_sad(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2])806 same_parity_sad (GstFieldAnalysis * filter, FieldAnalysisFields (*history)[2])
807 {
808 gint j;
809 gfloat sum;
810 guint8 *f1j, *f2j;
811
812 const gint width = GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame);
813 const gint height = GST_VIDEO_FRAME_HEIGHT (&(*history)[0].frame);
814 const gint stride0x2 =
815 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
816 const gint stride1x2 =
817 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0) << 1;
818 const guint32 noise_floor = filter->noise_floor;
819
820 f1j =
821 GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
822 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame,
823 0) +
824 (*history)[0].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame,
825 0);
826 f2j =
827 GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
828 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame,
829 0) +
830 (*history)[1].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame,
831 0);
832
833 sum = 0.0f;
834 for (j = 0; j < (height >> 1); j++) {
835 guint32 tempsum = 0;
836 fieldanalysis_orc_same_parity_sad_planar_yuv (&tempsum, f1j, f2j,
837 noise_floor, width);
838 sum += tempsum;
839 f1j += stride0x2;
840 f2j += stride1x2;
841 }
842
843 return sum / (0.5f * width * height);
844 }
845
846 static gfloat
same_parity_ssd(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2])847 same_parity_ssd (GstFieldAnalysis * filter, FieldAnalysisFields (*history)[2])
848 {
849 gint j;
850 gfloat sum;
851 guint8 *f1j, *f2j;
852
853 const gint width = GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame);
854 const gint height = GST_VIDEO_FRAME_HEIGHT (&(*history)[0].frame);
855 const gint stride0x2 =
856 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
857 const gint stride1x2 =
858 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0) << 1;
859 /* noise floor needs to be squared for SSD */
860 const guint32 noise_floor = filter->noise_floor * filter->noise_floor;
861
862 f1j =
863 GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
864 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame,
865 0) +
866 (*history)[0].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame,
867 0);
868 f2j =
869 GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
870 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame,
871 0) +
872 (*history)[1].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame,
873 0);
874
875 sum = 0.0f;
876 for (j = 0; j < (height >> 1); j++) {
877 guint32 tempsum = 0;
878 fieldanalysis_orc_same_parity_ssd_planar_yuv (&tempsum, f1j, f2j,
879 noise_floor, width);
880 sum += tempsum;
881 f1j += stride0x2;
882 f2j += stride1x2;
883 }
884
885 return sum / (0.5f * width * height); /* field is half height */
886 }
887
888 /* horizontal [1,4,1] diff between fields - is this a good idea or should the
889 * current sample be emphasised more or less? */
890 static gfloat
same_parity_3_tap(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2])891 same_parity_3_tap (GstFieldAnalysis * filter, FieldAnalysisFields (*history)[2])
892 {
893 gint i, j;
894 gfloat sum;
895 guint8 *f1j, *f2j;
896
897 const gint width = GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame);
898 const gint height = GST_VIDEO_FRAME_HEIGHT (&(*history)[0].frame);
899 const gint stride0x2 =
900 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
901 const gint stride1x2 =
902 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0) << 1;
903 const gint incr = GST_VIDEO_FRAME_COMP_PSTRIDE (&(*history)[0].frame, 0);
904 /* noise floor needs to be *6 for [1,4,1] */
905 const guint32 noise_floor = filter->noise_floor * 6;
906
907 f1j = GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame, 0) +
908 GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame, 0) +
909 (*history)[0].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame,
910 0);
911 f2j =
912 GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
913 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame,
914 0) +
915 (*history)[1].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame,
916 0);
917
918 sum = 0.0f;
919 for (j = 0; j < (height >> 1); j++) {
920 guint32 tempsum = 0;
921 guint32 diff;
922
923 /* unroll first as it is a special case */
924 diff = abs (((f1j[0] << 2) + (f1j[incr] << 1))
925 - ((f2j[0] << 2) + (f2j[incr] << 1)));
926 if (diff > noise_floor)
927 sum += diff;
928
929 fieldanalysis_orc_same_parity_3_tap_planar_yuv (&tempsum, f1j, &f1j[incr],
930 &f1j[incr << 1], f2j, &f2j[incr], &f2j[incr << 1], noise_floor,
931 width - 1);
932 sum += tempsum;
933
934 /* unroll last as it is a special case */
935 i = width - 1;
936 diff = abs (((f1j[i - incr] << 1) + (f1j[i] << 2))
937 - ((f2j[i - incr] << 1) + (f2j[i] << 2)));
938 if (diff > noise_floor)
939 sum += diff;
940
941 f1j += stride0x2;
942 f2j += stride1x2;
943 }
944
945 return sum / ((6.0f / 2.0f) * width * height); /* 1 + 4 + 1 = 6; field is half height */
946 }
947
948 /* vertical [1,-3,4,-3,1] - same as is used in FieldDiff from TIVTC,
949 * tritical's AVISynth IVTC filter */
950 /* 0th field's parity defines operation */
951 static gfloat
opposite_parity_5_tap(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2])952 opposite_parity_5_tap (GstFieldAnalysis * filter,
953 FieldAnalysisFields (*history)[2])
954 {
955 gint j;
956 gfloat sum;
957 guint8 *fjm2, *fjm1, *fj, *fjp1, *fjp2;
958 guint32 tempsum;
959
960 const gint width = GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame);
961 const gint height = GST_VIDEO_FRAME_HEIGHT (&(*history)[0].frame);
962 const gint stride0x2 =
963 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
964 const gint stride1x2 =
965 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0) << 1;
966 /* noise floor needs to be *6 for [1,-3,4,-3,1] */
967 const guint32 noise_floor = filter->noise_floor * 6;
968
969 sum = 0.0f;
970
971 /* fj is line j of the combined frame made from the top field even lines of
972 * field 0 and the bottom field odd lines from field 1
973 * fjp1 is one line down from fj
974 * fjm2 is two lines up from fj
975 * fj with j == 0 is the 0th line of the top field
976 * fj with j == 1 is the 0th line of the bottom field or the 1st field of
977 * the frame*/
978
979 /* unroll first line as it is a special case */
980 if ((*history)[0].parity == TOP_FIELD) {
981 fj = GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
982 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame, 0);
983 fjp1 =
984 GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
985 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame,
986 0) + GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0);
987 fjp2 = fj + stride0x2;
988 } else {
989 fj = GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
990 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame, 0);
991 fjp1 =
992 GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
993 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame,
994 0) + GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0);
995 fjp2 = fj + stride1x2;
996 }
997
998 tempsum = 0;
999 fieldanalysis_orc_opposite_parity_5_tap_planar_yuv (&tempsum, fjp2, fjp1, fj,
1000 fjp1, fjp2, noise_floor, width);
1001 sum += tempsum;
1002
1003 for (j = 1; j < (height >> 1) - 1; j++) {
1004 /* shift everything down a line in the field of interest (means += stridex2) */
1005 fjm2 = fj;
1006 fjm1 = fjp1;
1007 fj = fjp2;
1008 if ((*history)[0].parity == TOP_FIELD) {
1009 fjp1 += stride1x2;
1010 fjp2 += stride0x2;
1011 } else {
1012 fjp1 += stride0x2;
1013 fjp2 += stride1x2;
1014 }
1015
1016 tempsum = 0;
1017 fieldanalysis_orc_opposite_parity_5_tap_planar_yuv (&tempsum, fjm2, fjm1,
1018 fj, fjp1, fjp2, noise_floor, width);
1019 sum += tempsum;
1020 }
1021
1022 /* unroll the last line as it is a special case */
1023 /* shift everything down a line in the field of interest (means += stridex2) */
1024 fjm2 = fj;
1025 fjm1 = fjp1;
1026 fj = fjp2;
1027
1028 tempsum = 0;
1029 fieldanalysis_orc_opposite_parity_5_tap_planar_yuv (&tempsum, fjm2, fjm1, fj,
1030 fjm1, fjm2, noise_floor, width);
1031 sum += tempsum;
1032
1033 return sum / ((6.0f / 2.0f) * width * height); /* 1 + 4 + 1 == 3 + 3 == 6; field is half height */
1034 }
1035
1036 /* this metric was sourced from HandBrake but originally from transcode
1037 * the return value is the highest block score for the row of blocks */
1038 static inline guint64
block_score_for_row_32detect(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2],guint8 * base_fj,guint8 * base_fjp1)1039 block_score_for_row_32detect (GstFieldAnalysis * filter,
1040 FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1)
1041 {
1042 guint64 i, j;
1043 guint8 *comb_mask = filter->comb_mask;
1044 guint *block_scores = filter->block_scores;
1045 guint64 block_score;
1046 guint8 *fjm2, *fjm1, *fj, *fjp1;
1047 const gint incr = GST_VIDEO_FRAME_COMP_PSTRIDE (&(*history)[0].frame, 0);
1048 const gint stridex2 =
1049 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
1050 const guint64 block_width = filter->block_width;
1051 const guint64 block_height = filter->block_height;
1052 const gint64 spatial_thresh = filter->spatial_thresh;
1053 const gint width =
1054 GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) -
1055 (GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) % block_width);
1056
1057 fjm2 = base_fj - stridex2;
1058 fjm1 = base_fjp1 - stridex2;
1059 fj = base_fj;
1060 fjp1 = base_fjp1;
1061
1062 for (j = 0; j < block_height; j++) {
1063 /* we have to work one result ahead of ourselves which results in some small
1064 * peculiarities below */
1065 gint diff1, diff2;
1066
1067 diff1 = fj[0] - fjm1[0];
1068 diff2 = fj[0] - fjp1[0];
1069 /* change in the same direction */
1070 if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1071 || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1072 comb_mask[0] = abs (fj[0] - fjm2[0]) < 10 && abs (fj[0] - fjm1[0]) > 15;
1073 } else {
1074 comb_mask[0] = FALSE;
1075 }
1076
1077 for (i = 1; i < width; i++) {
1078 const guint64 idx = i * incr;
1079 const guint64 res_idx = (i - 1) / block_width;
1080
1081 diff1 = fj[idx] - fjm1[idx];
1082 diff2 = fj[idx] - fjp1[idx];
1083 if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1084 || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1085 comb_mask[i] = abs (fj[idx] - fjm2[idx]) < 10
1086 && abs (fj[idx] - fjm1[idx]) > 15;
1087 } else {
1088 comb_mask[i] = FALSE;
1089 }
1090
1091 if (i == 1 && comb_mask[i - 1] && comb_mask[i]) {
1092 /* left edge */
1093 block_scores[res_idx]++;
1094 } else if (i == width - 1) {
1095 /* right edge */
1096 if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i])
1097 block_scores[res_idx]++;
1098 if (comb_mask[i - 1] && comb_mask[i])
1099 block_scores[i / block_width]++;
1100 } else if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i]) {
1101 block_scores[res_idx]++;
1102 }
1103 }
1104 /* advance down a line */
1105 fjm2 = fjm1;
1106 fjm1 = fj;
1107 fj = fjp1;
1108 fjp1 = fjm1 + stridex2;
1109 }
1110
1111 block_score = 0;
1112 for (i = 0; i < width / block_width; i++) {
1113 if (block_scores[i] > block_score)
1114 block_score = block_scores[i];
1115 }
1116
1117 g_free (block_scores);
1118 g_free (comb_mask);
1119 return block_score;
1120 }
1121
1122 /* this metric was sourced from HandBrake but originally from
1123 * tritical's isCombedT Avisynth function
1124 * the return value is the highest block score for the row of blocks */
1125 static inline guint64
block_score_for_row_iscombed(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2],guint8 * base_fj,guint8 * base_fjp1)1126 block_score_for_row_iscombed (GstFieldAnalysis * filter,
1127 FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1)
1128 {
1129 guint64 i, j;
1130 guint8 *comb_mask = filter->comb_mask;
1131 guint *block_scores = filter->block_scores;
1132 guint64 block_score;
1133 guint8 *fjm1, *fj, *fjp1;
1134 const gint incr = GST_VIDEO_FRAME_COMP_PSTRIDE (&(*history)[0].frame, 0);
1135 const gint stridex2 =
1136 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
1137 const guint64 block_width = filter->block_width;
1138 const guint64 block_height = filter->block_height;
1139 const gint64 spatial_thresh = filter->spatial_thresh;
1140 const gint64 spatial_thresh_squared = spatial_thresh * spatial_thresh;
1141 const gint width =
1142 GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) -
1143 (GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) % block_width);
1144
1145 fjm1 = base_fjp1 - stridex2;
1146 fj = base_fj;
1147 fjp1 = base_fjp1;
1148
1149 for (j = 0; j < block_height; j++) {
1150 /* we have to work one result ahead of ourselves which results in some small
1151 * peculiarities below */
1152 gint diff1, diff2;
1153
1154 diff1 = fj[0] - fjm1[0];
1155 diff2 = fj[0] - fjp1[0];
1156 /* change in the same direction */
1157 if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1158 || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1159 comb_mask[0] =
1160 (fjm1[0] - fj[0]) * (fjp1[0] - fj[0]) > spatial_thresh_squared;
1161 } else {
1162 comb_mask[0] = FALSE;
1163 }
1164
1165 for (i = 1; i < width; i++) {
1166 const guint64 idx = i * incr;
1167 const guint64 res_idx = (i - 1) / block_width;
1168
1169 diff1 = fj[idx] - fjm1[idx];
1170 diff2 = fj[idx] - fjp1[idx];
1171 if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1172 || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1173 comb_mask[i] =
1174 (fjm1[idx] - fj[idx]) * (fjp1[idx] - fj[idx]) >
1175 spatial_thresh_squared;
1176 } else {
1177 comb_mask[i] = FALSE;
1178 }
1179
1180 if (i == 1 && comb_mask[i - 1] && comb_mask[i]) {
1181 /* left edge */
1182 block_scores[res_idx]++;
1183 } else if (i == width - 1) {
1184 /* right edge */
1185 if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i])
1186 block_scores[res_idx]++;
1187 if (comb_mask[i - 1] && comb_mask[i])
1188 block_scores[i / block_width]++;
1189 } else if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i]) {
1190 block_scores[res_idx]++;
1191 }
1192 }
1193 /* advance down a line */
1194 fjm1 = fj;
1195 fj = fjp1;
1196 fjp1 = fjm1 + stridex2;
1197 }
1198
1199 block_score = 0;
1200 for (i = 0; i < width / block_width; i++) {
1201 if (block_scores[i] > block_score)
1202 block_score = block_scores[i];
1203 }
1204
1205 g_free (block_scores);
1206 g_free (comb_mask);
1207 return block_score;
1208 }
1209
1210 /* this metric was sourced from HandBrake but originally from
1211 * tritical's isCombedT Avisynth function
1212 * the return value is the highest block score for the row of blocks */
1213 static inline guint64
block_score_for_row_5_tap(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2],guint8 * base_fj,guint8 * base_fjp1)1214 block_score_for_row_5_tap (GstFieldAnalysis * filter,
1215 FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1)
1216 {
1217 guint64 i, j;
1218 guint8 *comb_mask = filter->comb_mask;
1219 guint *block_scores = filter->block_scores;
1220 guint64 block_score;
1221 guint8 *fjm2, *fjm1, *fj, *fjp1, *fjp2;
1222 const gint incr = GST_VIDEO_FRAME_COMP_PSTRIDE (&(*history)[0].frame, 0);
1223 const gint stridex2 =
1224 GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
1225 const guint64 block_width = filter->block_width;
1226 const guint64 block_height = filter->block_height;
1227 const gint64 spatial_thresh = filter->spatial_thresh;
1228 const gint64 spatial_threshx6 = 6 * spatial_thresh;
1229 const gint width =
1230 GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) -
1231 (GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) % block_width);
1232
1233
1234 fjm2 = base_fj - stridex2;
1235 fjm1 = base_fjp1 - stridex2;
1236 fj = base_fj;
1237 fjp1 = base_fjp1;
1238 fjp2 = fj + stridex2;
1239
1240 for (j = 0; j < block_height; j++) {
1241 /* we have to work one result ahead of ourselves which results in some small
1242 * peculiarities below */
1243 gint diff1, diff2;
1244
1245 diff1 = fj[0] - fjm1[0];
1246 diff2 = fj[0] - fjp1[0];
1247 /* change in the same direction */
1248 if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1249 || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1250 comb_mask[0] =
1251 abs (fjm2[0] + (fj[0] << 2) + fjp2[0] - 3 * (fjm1[0] + fjp1[0])) >
1252 spatial_threshx6;
1253
1254 /* motion detection that needs previous and next frames
1255 this isn't really necessary, but acts as an optimisation if the
1256 additional delay isn't a problem
1257 if (motion_detection) {
1258 if (abs(fpj[idx] - fj[idx] ) > motion_thresh &&
1259 abs( fjm1[idx] - fnjm1[idx]) > motion_thresh &&
1260 abs( fjp1[idx] - fnjp1[idx]) > motion_thresh)
1261 motion++;
1262 if (abs( fj[idx] - fnj[idx]) > motion_thresh &&
1263 abs(fpjm1[idx] - fjm1[idx] ) > motion_thresh &&
1264 abs(fpjp1[idx] - fjp1[idx] ) > motion_thresh)
1265 motion++;
1266 } else {
1267 motion = 1;
1268 }
1269 */
1270 } else {
1271 comb_mask[0] = FALSE;
1272 }
1273
1274 for (i = 1; i < width; i++) {
1275 const guint64 idx = i * incr;
1276 const guint64 res_idx = (i - 1) / block_width;
1277
1278 diff1 = fj[idx] - fjm1[idx];
1279 diff2 = fj[idx] - fjp1[idx];
1280 if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1281 || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1282 comb_mask[i] =
1283 abs (fjm2[idx] + (fj[idx] << 2) + fjp2[idx] - 3 * (fjm1[idx] +
1284 fjp1[idx])) > spatial_threshx6;
1285 } else {
1286 comb_mask[i] = FALSE;
1287 }
1288
1289 if (i == 1 && comb_mask[i - 1] && comb_mask[i]) {
1290 /* left edge */
1291 block_scores[res_idx]++;
1292 } else if (i == width - 1) {
1293 /* right edge */
1294 if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i])
1295 block_scores[res_idx]++;
1296 if (comb_mask[i - 1] && comb_mask[i])
1297 block_scores[i / block_width]++;
1298 } else if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i]) {
1299 block_scores[res_idx]++;
1300 }
1301 }
1302 /* advance down a line */
1303 fjm2 = fjm1;
1304 fjm1 = fj;
1305 fj = fjp1;
1306 fjp1 = fjp2;
1307 fjp2 = fj + stridex2;
1308 }
1309
1310 block_score = 0;
1311 for (i = 0; i < width / block_width; i++) {
1312 if (block_scores[i] > block_score)
1313 block_score = block_scores[i];
1314 }
1315
1316 g_free (block_scores);
1317 g_free (comb_mask);
1318 return block_score;
1319 }
1320
1321 /* a pass is made over the field using one of three comb-detection metrics
1322 and the results are then analysed block-wise. if the samples to the left
1323 and right are combed, they contribute to the block score. if the block
1324 score is above the given threshold, the frame is combed. if the block
1325 score is between half the threshold and the threshold, the block is
1326 slightly combed. if when analysis is complete, slight combing is detected
1327 that is returned. if any results are observed that are above the threshold,
1328 the function returns immediately */
1329 /* 0th field's parity defines operation */
1330 static gfloat
opposite_parity_windowed_comb(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2])1331 opposite_parity_windowed_comb (GstFieldAnalysis * filter,
1332 FieldAnalysisFields (*history)[2])
1333 {
1334 gint j;
1335 gboolean slightly_combed;
1336
1337 const gint height = GST_VIDEO_FRAME_HEIGHT (&(*history)[0].frame);
1338 const gint stride = GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0);
1339 const guint64 block_thresh = filter->block_thresh;
1340 const guint64 block_height = filter->block_height;
1341 guint8 *base_fj, *base_fjp1;
1342
1343 if ((*history)[0].parity == TOP_FIELD) {
1344 base_fj =
1345 GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
1346 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame, 0);
1347 base_fjp1 =
1348 GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
1349 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame,
1350 0) + GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0);
1351 } else {
1352 base_fj =
1353 GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
1354 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame, 0);
1355 base_fjp1 =
1356 GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
1357 0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame,
1358 0) + GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0);
1359 }
1360
1361 /* we operate on a row of blocks of height block_height through each iteration */
1362 slightly_combed = FALSE;
1363 for (j = 0; j <= height - filter->ignored_lines - block_height;
1364 j += block_height) {
1365 guint64 line_offset = (filter->ignored_lines + j) * stride;
1366 guint block_score =
1367 filter->block_score_for_row (filter, history, base_fj + line_offset,
1368 base_fjp1 + line_offset);
1369
1370 if (block_score > (block_thresh >> 1)
1371 && block_score <= block_thresh) {
1372 /* blend if nothing more combed comes along */
1373 slightly_combed = TRUE;
1374 } else if (block_score > block_thresh) {
1375 if (GST_VIDEO_INFO_INTERLACE_MODE (&(*history)[0].frame.info) ==
1376 GST_VIDEO_INTERLACE_MODE_INTERLEAVED) {
1377 return 1.0f; /* blend */
1378 } else {
1379 return 2.0f; /* deinterlace */
1380 }
1381 }
1382 }
1383
1384 return (gfloat) slightly_combed; /* TRUE means blend, else don't */
1385 }
1386
1387 /* this is where the magic happens
1388 *
1389 * the buffer incoming to the chain function (buf_to_queue) is added to the
1390 * internal queue and then should no longer be used until it is popped from the
1391 * queue.
1392 *
1393 * analysis is performed on the incoming buffer (peeked from the queue) and the
1394 * previous buffer using two classes of metrics making up five individual
1395 * scores.
1396 *
1397 * there are two same-parity comparisons: top of current with top of previous
1398 * and bottom of current with bottom of previous
1399 *
1400 * there are three opposing parity comparisons: top of current with bottom of
1401 * _current_, top of current with bottom of previous and bottom of current with
1402 * top of previous.
1403 *
1404 * from the results of these comparisons we can use some rather complex logic to
1405 * identify the state of the previous buffer, decorate and return it and
1406 * identify some preliminary state of the current buffer.
1407 *
1408 * the returned buffer has a ref on it (it has come from _make_metadata_writable
1409 * that was called on an incoming buffer that was queued and then popped) */
1410 static GstBuffer *
gst_field_analysis_process_buffer(GstFieldAnalysis * filter,GstBuffer ** buf_to_queue)1411 gst_field_analysis_process_buffer (GstFieldAnalysis * filter,
1412 GstBuffer ** buf_to_queue)
1413 {
1414 /* res0/1 correspond to f0/1 */
1415 FieldAnalysis *res0, *res1;
1416 FieldAnalysisFields history[2];
1417 GstBuffer *outbuf = NULL;
1418
1419 /* move previous result to index 1 */
1420 filter->frames[1] = filter->frames[0];
1421
1422 if (!gst_video_frame_map (&filter->frames[0].frame, &filter->vinfo,
1423 *buf_to_queue, GST_MAP_READ)) {
1424 GST_ERROR_OBJECT (filter, "Failed to map buffer: %" GST_PTR_FORMAT,
1425 *buf_to_queue);
1426 return NULL;
1427 }
1428 filter->nframes++;
1429 /* note that we have a ref and mapping the buffer takes a ref so to destroy a
1430 * buffer we need to unmap it and unref it */
1431
1432 res0 = &filter->frames[0].results; /* results for current frame */
1433 res1 = &filter->frames[1].results; /* results for previous frame */
1434
1435 history[0].frame = filter->frames[0].frame;
1436 /* we do it like this because the first frame has no predecessor so this is
1437 * the only result we can get for it */
1438 if (filter->nframes >= 1) {
1439 history[1].frame = filter->frames[0].frame;
1440 history[0].parity = TOP_FIELD;
1441 history[1].parity = BOTTOM_FIELD;
1442 /* compare the fields within the buffer, if the buffer exhibits combing it
1443 * could be interlaced or a mixed telecine frame */
1444 res0->f = filter->same_frame (filter, &history);
1445 res0->t = res0->b = res0->t_b = res0->b_t = G_MAXFLOAT;
1446 if (filter->nframes == 1)
1447 GST_DEBUG_OBJECT (filter, "Scores: f %f, t , b , t_b , b_t ", res0->f);
1448 if (res0->f <= filter->frame_thresh) {
1449 res0->conclusion = FIELD_ANALYSIS_PROGRESSIVE;
1450 } else {
1451 res0->conclusion = FIELD_ANALYSIS_INTERLACED;
1452 }
1453 res0->holding = -1; /* needed fields unknown */
1454 res0->drop = FALSE;
1455 }
1456 if (filter->nframes >= 2) {
1457 guint telecine_matches;
1458 gboolean first_buffer = filter->first_buffer;
1459
1460 filter->first_buffer = FALSE;
1461
1462 history[1].frame = filter->frames[1].frame;
1463
1464 /* compare the top and bottom fields to the previous frame */
1465 history[0].parity = TOP_FIELD;
1466 history[1].parity = TOP_FIELD;
1467 res0->t = filter->same_field (filter, &history);
1468 history[0].parity = BOTTOM_FIELD;
1469 history[1].parity = BOTTOM_FIELD;
1470 res0->b = filter->same_field (filter, &history);
1471
1472 /* compare the top field from this frame to the bottom of the previous for
1473 * for combing (and vice versa) */
1474 history[0].parity = TOP_FIELD;
1475 history[1].parity = BOTTOM_FIELD;
1476 res0->t_b = filter->same_frame (filter, &history);
1477 history[0].parity = BOTTOM_FIELD;
1478 history[1].parity = TOP_FIELD;
1479 res0->b_t = filter->same_frame (filter, &history);
1480
1481 GST_DEBUG_OBJECT (filter,
1482 "Scores: f %f, t %f, b %f, t_b %f, b_t %f", res0->f,
1483 res0->t, res0->b, res0->t_b, res0->b_t);
1484
1485 /* analysis */
1486 telecine_matches = 0;
1487 if (res0->t_b <= filter->frame_thresh)
1488 telecine_matches |= FIELD_ANALYSIS_TOP_BOTTOM;
1489 if (res0->b_t <= filter->frame_thresh)
1490 telecine_matches |= FIELD_ANALYSIS_BOTTOM_TOP;
1491 /* normally if there is a top or bottom field match, it is significantly
1492 * smaller than the other match - try 10% */
1493 if (res0->t <= filter->field_thresh || res0->t * (100 / 10) < res0->b)
1494 telecine_matches |= FIELD_ANALYSIS_TOP_MATCH;
1495 if (res0->b <= filter->field_thresh || res0->b * (100 / 10) < res0->t)
1496 telecine_matches |= FIELD_ANALYSIS_BOTTOM_MATCH;
1497
1498 if (telecine_matches & (FIELD_ANALYSIS_TOP_MATCH |
1499 FIELD_ANALYSIS_BOTTOM_MATCH)) {
1500 /* we have a repeated field => some kind of telecine */
1501 if (res1->f <= filter->frame_thresh) {
1502 /* prev P */
1503 if ((telecine_matches & FIELD_ANALYSIS_TOP_MATCH)
1504 && (telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH)) {
1505 /* prev P, cur repeated => cur P */
1506 res0->conclusion = FIELD_ANALYSIS_TELECINE_PROGRESSIVE;
1507 res0->holding = 1 + BOTH_FIELDS;
1508 /* push prev P, RFF */
1509 res1->drop = TRUE;
1510 outbuf =
1511 gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1512 res1->drop);
1513 } else {
1514 /* prev P, cur t xor b matches => cur TCM */
1515 res0->conclusion = FIELD_ANALYSIS_TELECINE_MIXED;
1516 /* hold non-repeated: if bottom match, hold top = 1 + 0 */
1517 res0->holding = 1 + !(telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH);
1518 /* push prev P */
1519 outbuf =
1520 gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1521 res1->drop);
1522 }
1523 } else {
1524 /* prev !P */
1525 gboolean b, t;
1526
1527 if (res0->f <= filter->frame_thresh) {
1528 /* cur P */
1529 res0->conclusion = FIELD_ANALYSIS_TELECINE_PROGRESSIVE;
1530 res0->holding = 1 + BOTH_FIELDS;
1531 } else {
1532 /* cur !P */
1533 res0->conclusion = FIELD_ANALYSIS_TELECINE_MIXED;
1534 if (telecine_matches & FIELD_ANALYSIS_TOP_MATCH
1535 && telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH) {
1536 /* cur t && b */
1537 res0->holding = 0;
1538 } else {
1539 /* cur t xor b; hold non-repeated */
1540 res0->holding =
1541 1 + !(telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH);
1542 }
1543 }
1544
1545 if (res1->holding == -1) {
1546 b = t = TRUE;
1547 } else {
1548 b = res1->holding == 1 + BOTTOM_FIELD;
1549 t = res1->holding == 1 + TOP_FIELD;
1550 }
1551
1552 if ((t && telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH) || (b
1553 && telecine_matches & FIELD_ANALYSIS_TOP_MATCH)) {
1554 if (t && telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH) {
1555 res1->holding = 1 + TOP_FIELD;
1556 } else if (b && telecine_matches & FIELD_ANALYSIS_TOP_MATCH) {
1557 res1->holding = 1 + BOTTOM_FIELD;
1558 }
1559 /* push 1F held field */
1560 outbuf =
1561 gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1562 res1->conclusion, res1->drop);
1563 } else if (res0->f > filter->frame_thresh && ((t
1564 && telecine_matches & FIELD_ANALYSIS_BOTTOM_TOP) || (b
1565 && telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM))) {
1566 if (t && telecine_matches & FIELD_ANALYSIS_BOTTOM_TOP) {
1567 res1->holding = 1 + TOP_FIELD;
1568 } else if (b && telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM) {
1569 res1->holding = 1 + BOTTOM_FIELD;
1570 }
1571 res0->conclusion = FIELD_ANALYSIS_TELECINE_MIXED;
1572 /* hold the opposite field to the one held in the last frame */
1573 res0->holding = 1 + (res1->holding == 1 + TOP_FIELD);
1574 /* push 1F held field */
1575 outbuf =
1576 gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1577 res1->conclusion, res1->drop);
1578 } else if (first_buffer && (telecine_matches & FIELD_ANALYSIS_BOTTOM_TOP
1579 || telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM)) {
1580 /* non-matched field is an orphan in the first buffer - push orphan as 1F */
1581 res1->conclusion = FIELD_ANALYSIS_TELECINE_MIXED;
1582 /* if prev b matched, prev t is orphan */
1583 res1->holding = 1 + !(telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM);
1584 /* push 1F held field */
1585 outbuf =
1586 gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1587 res1->conclusion, res1->drop);
1588 } else if (res1->holding == 1 + BOTH_FIELDS || res1->holding == -1) {
1589 /* holding both fields, push prev as is */
1590 outbuf =
1591 gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1592 res1->drop);
1593 } else {
1594 /* push prev as is with RFF */
1595 res1->drop = TRUE;
1596 outbuf =
1597 gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1598 res1->drop);
1599 }
1600 }
1601 } else if (res0->f <= filter->frame_thresh) {
1602 /* cur P */
1603 res0->conclusion = FIELD_ANALYSIS_PROGRESSIVE;
1604 res0->holding = 1 + BOTH_FIELDS;
1605 if (res1->holding == 1 + BOTH_FIELDS || res1->holding == -1) {
1606 /* holding both fields, push prev as is */
1607 outbuf =
1608 gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1609 res1->drop);
1610 } else if (res1->holding > 0) {
1611 /* holding one field, push prev 1F held */
1612 outbuf =
1613 gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1614 res1->conclusion, res1->drop);
1615 } else {
1616 /* unknown or no fields held, push prev as is with RFF */
1617 /* this will push unknown as drop - should be pushed as not drop? */
1618 res1->drop = TRUE;
1619 outbuf =
1620 gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1621 res1->drop);
1622 }
1623 } else {
1624 /* cur !P */
1625 if (telecine_matches & (FIELD_ANALYSIS_TOP_BOTTOM |
1626 FIELD_ANALYSIS_BOTTOM_TOP)) {
1627 /* cross-parity match => TCM */
1628 gboolean b, t;
1629
1630 if (res1->holding == -1) {
1631 b = t = TRUE;
1632 } else {
1633 b = res1->holding == 1 + BOTTOM_FIELD;
1634 t = res1->holding == 1 + TOP_FIELD;
1635 }
1636
1637 res0->conclusion = FIELD_ANALYSIS_TELECINE_MIXED;
1638 /* leave holding as unknown */
1639 if (res1->holding == 1 + BOTH_FIELDS) {
1640 /* prev P/TCP/I [or TCM repeated (weird case)] */
1641 /* push prev as is */
1642 outbuf =
1643 gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1644 res1->drop);
1645 } else if ((t && telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM) || (b
1646 && telecine_matches & FIELD_ANALYSIS_BOTTOM_TOP)) {
1647 /* held is opposite to matched => need both field from prev */
1648 /* if t_b, hold bottom from prev and top from current, else vice-versa */
1649 res1->holding = 1 + ! !(telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM);
1650 res0->holding = 1 + !(telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM);
1651 /* push prev TCM */
1652 outbuf =
1653 gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1654 res1->drop);
1655 } else if ((res1->holding > 0 && res1->holding != 1 + BOTH_FIELDS) || (t
1656 && telecine_matches & FIELD_ANALYSIS_BOTTOM_TOP) || (b
1657 && telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM)) {
1658 /* held field is needed, push prev 1F held */
1659 outbuf =
1660 gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1661 res1->conclusion, res1->drop);
1662 } else {
1663 /* holding none or unknown */
1664 /* push prev as is with RFF */
1665 res1->drop = TRUE;
1666 outbuf =
1667 gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1668 res1->drop);
1669 }
1670 } else {
1671 /* cur I */
1672 res0->conclusion = FIELD_ANALYSIS_INTERLACED;
1673 res0->holding = 1 + BOTH_FIELDS;
1674 /* push prev appropriately */
1675 res1->drop = res1->holding <= 0;
1676 if (res1->holding != 0) {
1677 res1->drop = FALSE;
1678 if (res1->holding == 1 + BOTH_FIELDS || res1->holding == -1) {
1679 /* push prev as is */
1680 outbuf =
1681 gst_field_analysis_decorate (filter, -1, FALSE,
1682 res1->conclusion, res1->drop);
1683 } else {
1684 /* push prev 1F held */
1685 outbuf =
1686 gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1687 res1->conclusion, res1->drop);
1688 }
1689 } else {
1690 /* push prev as is with RFF */
1691 res1->drop = TRUE;
1692 outbuf =
1693 gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1694 res1->drop);
1695 }
1696 }
1697 }
1698 }
1699
1700 switch (res0->conclusion) {
1701 case FIELD_ANALYSIS_PROGRESSIVE:
1702 GST_DEBUG_OBJECT (filter, "Conclusion: PROGRESSIVE");
1703 break;
1704 case FIELD_ANALYSIS_INTERLACED:
1705 GST_DEBUG_OBJECT (filter, "Conclusion: INTERLACED");
1706 break;
1707 case FIELD_ANALYSIS_TELECINE_PROGRESSIVE:
1708 GST_DEBUG_OBJECT (filter, "Conclusion: TC PROGRESSIVE");
1709 break;
1710 case FIELD_ANALYSIS_TELECINE_MIXED:
1711 GST_DEBUG_OBJECT (filter, "Conclusion: TC MIXED %s",
1712 res0->holding ==
1713 1 + BOTH_FIELDS ? "top and bottom" : res0->holding ==
1714 1 + BOTTOM_FIELD ? "bottom" : "top");
1715 break;
1716 default:
1717 GST_DEBUG_OBJECT (filter, "Invalid conclusion! This is a bug!");
1718 break;
1719 }
1720
1721 return outbuf;
1722 }
1723
1724 /* we have a ref on buf when it comes into chain */
1725 static GstFlowReturn
gst_field_analysis_chain(GstPad * pad,GstObject * parent,GstBuffer * buf)1726 gst_field_analysis_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
1727 {
1728 GstFlowReturn ret = GST_FLOW_OK;
1729 GstFieldAnalysis *filter;
1730 GstBuffer *outbuf = NULL;
1731
1732 filter = GST_FIELDANALYSIS (parent);
1733
1734 GST_OBJECT_LOCK (filter);
1735 if (filter->flushing) {
1736 GST_DEBUG_OBJECT (filter, "We are flushing.");
1737 /* we have a ref on buf so it must be unreffed */
1738 goto unref_unlock_ret;
1739 }
1740
1741 if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
1742 GST_DEBUG_OBJECT (filter, "Discont: flushing");
1743 /* we should have a ref on outbuf, either because we had one when it entered
1744 * the queue and _make_metadata_writable () inside _decorate () returned
1745 * the same buffer or because it returned a new buffer on which we have one
1746 * ref */
1747 outbuf = gst_field_analysis_flush_one (filter, NULL);
1748
1749 if (outbuf) {
1750 /* we give away our ref on outbuf here */
1751 GST_OBJECT_UNLOCK (filter);
1752 ret = gst_pad_push (filter->srcpad, outbuf);
1753 GST_OBJECT_LOCK (filter);
1754 if (filter->flushing) {
1755 GST_DEBUG_OBJECT (filter, "We are flushing. outbuf already pushed.");
1756 /* we have a ref on buf so it must be unreffed */
1757 goto unref_unlock_ret;
1758 }
1759 }
1760
1761 gst_field_analysis_clear_frames (filter);
1762
1763 if (ret != GST_FLOW_OK) {
1764 GST_DEBUG_OBJECT (filter,
1765 "Pushing of flushed buffer failed with return %d", ret);
1766 /* we have a ref on buf so it must be unreffed */
1767 goto unref_unlock_ret;
1768 } else {
1769 outbuf = NULL;
1770 }
1771 }
1772
1773 /* after this function, buf has been pushed to the internal queue and its ref
1774 * retained there and we have a ref on outbuf */
1775 outbuf = gst_field_analysis_process_buffer (filter, &buf);
1776
1777 GST_OBJECT_UNLOCK (filter);
1778
1779 /* here we give up our ref on outbuf */
1780 if (outbuf)
1781 ret = gst_pad_push (filter->srcpad, outbuf);
1782
1783 return ret;
1784
1785 unref_unlock_ret:
1786 /* we must unref the input buffer here */
1787 gst_buffer_unref (buf);
1788 GST_OBJECT_UNLOCK (filter);
1789 return ret;
1790 }
1791
1792 static GstStateChangeReturn
gst_field_analysis_change_state(GstElement * element,GstStateChange transition)1793 gst_field_analysis_change_state (GstElement * element,
1794 GstStateChange transition)
1795 {
1796 GstStateChangeReturn ret;
1797 GstFieldAnalysis *filter = GST_FIELDANALYSIS (element);
1798
1799 switch (transition) {
1800 case GST_STATE_CHANGE_NULL_TO_READY:
1801 break;
1802 case GST_STATE_CHANGE_READY_TO_PAUSED:
1803 break;
1804 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
1805 break;
1806 default:
1807 break;
1808 }
1809
1810 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1811 if (ret != GST_STATE_CHANGE_SUCCESS)
1812 return ret;
1813
1814 switch (transition) {
1815 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
1816 break;
1817 case GST_STATE_CHANGE_PAUSED_TO_READY:
1818 gst_field_analysis_reset (filter);
1819 break;
1820 case GST_STATE_CHANGE_READY_TO_NULL:
1821 default:
1822 break;
1823 }
1824
1825 return ret;
1826 }
1827
1828 static void
gst_field_analysis_finalize(GObject * object)1829 gst_field_analysis_finalize (GObject * object)
1830 {
1831 GstFieldAnalysis *filter = GST_FIELDANALYSIS (object);
1832
1833 gst_field_analysis_reset (filter);
1834
1835 G_OBJECT_CLASS (parent_class)->finalize (object);
1836 }
1837
1838
1839 static gboolean
fieldanalysis_init(GstPlugin * fieldanalysis)1840 fieldanalysis_init (GstPlugin * fieldanalysis)
1841 {
1842 GST_DEBUG_CATEGORY_INIT (gst_field_analysis_debug, "fieldanalysis",
1843 0, "Video field analysis");
1844
1845 return gst_element_register (fieldanalysis, "fieldanalysis", GST_RANK_NONE,
1846 GST_TYPE_FIELDANALYSIS);
1847 }
1848
1849 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1850 GST_VERSION_MINOR,
1851 fieldanalysis,
1852 "Video field analysis",
1853 fieldanalysis_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
1854