1 /* GStreamer
2  * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3  * Copyright (C) <2003> David Schleef <ds@schleef.org>
4  * Copyright (C) <2010> Sebastian Dröge <sebastian.droege@collabora.co.uk>
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Library General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Library General Public License for more details.
15  *
16  * You should have received a copy of the GNU Library General Public
17  * License along with this library; if not, write to the
18  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
19  * Boston, MA 02110-1301, USA.
20  */
21 
22 /*
23  * This file was (probably) generated from gstvideobalance.c,
24  * gstvideobalance.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
25  */
26 
27 /**
28  * SECTION:element-videobalance
29  *
30  * Adjusts brightness, contrast, hue, saturation on a video stream.
31  *
32  * <refsect2>
33  * <title>Example launch line</title>
34  * |[
35  * gst-launch-1.0 videotestsrc ! videobalance saturation=0.0 ! videoconvert ! ximagesink
36  * ]| This pipeline converts the image to black and white by setting the
37  * saturation to 0.0.
38  * </refsect2>
39  */
40 
41 #ifdef HAVE_CONFIG_H
42 #include "config.h"
43 #endif
44 
45 #include <gst/math-compat.h>
46 
47 #include "gstvideobalance.h"
48 #include <string.h>
49 
50 #include <gst/video/colorbalance.h>
51 
52 GST_DEBUG_CATEGORY_STATIC (videobalance_debug);
53 #define GST_CAT_DEFAULT videobalance_debug
54 
55 /* GstVideoBalance properties */
56 #define DEFAULT_PROP_CONTRAST		1.0
57 #define DEFAULT_PROP_BRIGHTNESS		0.0
58 #define DEFAULT_PROP_HUE		0.0
59 #define DEFAULT_PROP_SATURATION		1.0
60 
61 enum
62 {
63   PROP_0,
64   PROP_CONTRAST,
65   PROP_BRIGHTNESS,
66   PROP_HUE,
67   PROP_SATURATION
68 };
69 
70 #define PROCESSING_CAPS \
71   "{ AYUV, ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, " \
72   "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, " \
73   "I420, YV12, IYUV, Y41B, NV12, NV21 }"
74 
75 static GstStaticPadTemplate gst_video_balance_src_template =
76     GST_STATIC_PAD_TEMPLATE ("src",
77     GST_PAD_SRC,
78     GST_PAD_ALWAYS,
79     GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS) ";"
80         "video/x-raw(ANY)")
81     );
82 
83 static GstStaticPadTemplate gst_video_balance_sink_template =
84     GST_STATIC_PAD_TEMPLATE ("sink",
85     GST_PAD_SINK,
86     GST_PAD_ALWAYS,
87     GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS) ";"
88         "video/x-raw(ANY)")
89     );
90 
91 static void gst_video_balance_colorbalance_init (GstColorBalanceInterface *
92     iface);
93 
94 static void gst_video_balance_set_property (GObject * object, guint prop_id,
95     const GValue * value, GParamSpec * pspec);
96 static void gst_video_balance_get_property (GObject * object, guint prop_id,
97     GValue * value, GParamSpec * pspec);
98 
99 #define gst_video_balance_parent_class parent_class
100 G_DEFINE_TYPE_WITH_CODE (GstVideoBalance, gst_video_balance,
101     GST_TYPE_VIDEO_FILTER,
102     G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
103         gst_video_balance_colorbalance_init));
104 
105 /*
106  * look-up tables (LUT).
107  */
108 static void
gst_video_balance_update_tables(GstVideoBalance * vb)109 gst_video_balance_update_tables (GstVideoBalance * vb)
110 {
111   gint i, j;
112   gdouble y, u, v, hue_cos, hue_sin;
113 
114   /* Y */
115   for (i = 0; i < 256; i++) {
116     y = 16 + ((i - 16) * vb->contrast + vb->brightness * 255);
117     if (y < 0)
118       y = 0;
119     else if (y > 255)
120       y = 255;
121     vb->tabley[i] = rint (y);
122   }
123 
124   hue_cos = cos (G_PI * vb->hue);
125   hue_sin = sin (G_PI * vb->hue);
126 
127   /* U/V lookup tables are 2D, since we need both U/V for each table
128    * separately. */
129   for (i = -128; i < 128; i++) {
130     for (j = -128; j < 128; j++) {
131       u = 128 + ((i * hue_cos + j * hue_sin) * vb->saturation);
132       v = 128 + ((-i * hue_sin + j * hue_cos) * vb->saturation);
133       if (u < 0)
134         u = 0;
135       else if (u > 255)
136         u = 255;
137       if (v < 0)
138         v = 0;
139       else if (v > 255)
140         v = 255;
141       vb->tableu[i + 128][j + 128] = rint (u);
142       vb->tablev[i + 128][j + 128] = rint (v);
143     }
144   }
145 }
146 
147 static gboolean
gst_video_balance_is_passthrough(GstVideoBalance * videobalance)148 gst_video_balance_is_passthrough (GstVideoBalance * videobalance)
149 {
150   return videobalance->contrast == 1.0 &&
151       videobalance->brightness == 0.0 &&
152       videobalance->hue == 0.0 && videobalance->saturation == 1.0;
153 }
154 
155 static void
gst_video_balance_update_properties(GstVideoBalance * videobalance)156 gst_video_balance_update_properties (GstVideoBalance * videobalance)
157 {
158   gboolean passthrough;
159   GstBaseTransform *base = GST_BASE_TRANSFORM (videobalance);
160 
161   GST_OBJECT_LOCK (videobalance);
162   passthrough = gst_video_balance_is_passthrough (videobalance);
163   if (!passthrough)
164     gst_video_balance_update_tables (videobalance);
165   GST_OBJECT_UNLOCK (videobalance);
166 
167   gst_base_transform_set_passthrough (base, passthrough);
168 }
169 
170 static void
gst_video_balance_planar_yuv(GstVideoBalance * videobalance,GstVideoFrame * frame)171 gst_video_balance_planar_yuv (GstVideoBalance * videobalance,
172     GstVideoFrame * frame)
173 {
174   gint x, y;
175   guint8 *ydata;
176   guint8 *udata, *vdata;
177   gint ystride, ustride, vstride;
178   gint width, height;
179   gint width2, height2;
180   guint8 *tabley = videobalance->tabley;
181   guint8 **tableu = videobalance->tableu;
182   guint8 **tablev = videobalance->tablev;
183 
184   width = GST_VIDEO_FRAME_WIDTH (frame);
185   height = GST_VIDEO_FRAME_HEIGHT (frame);
186 
187   ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
188   ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
189 
190   for (y = 0; y < height; y++) {
191     guint8 *yptr;
192 
193     yptr = ydata + y * ystride;
194     for (x = 0; x < width; x++) {
195       *yptr = tabley[*yptr];
196       yptr++;
197     }
198   }
199 
200   width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
201   height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
202 
203   udata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
204   vdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 2);
205   ustride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
206   vstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 2);
207 
208   for (y = 0; y < height2; y++) {
209     guint8 *uptr, *vptr;
210     guint8 u1, v1;
211 
212     uptr = udata + y * ustride;
213     vptr = vdata + y * vstride;
214 
215     for (x = 0; x < width2; x++) {
216       u1 = *uptr;
217       v1 = *vptr;
218 
219       *uptr++ = tableu[u1][v1];
220       *vptr++ = tablev[u1][v1];
221     }
222   }
223 }
224 
225 static void
gst_video_balance_semiplanar_yuv(GstVideoBalance * videobalance,GstVideoFrame * frame)226 gst_video_balance_semiplanar_yuv (GstVideoBalance * videobalance,
227     GstVideoFrame * frame)
228 {
229   gint x, y;
230   guint8 *ydata;
231   guint8 *uvdata;
232   gint ystride, uvstride;
233   gint width, height;
234   gint width2, height2;
235   guint8 *tabley = videobalance->tabley;
236   guint8 **tableu = videobalance->tableu;
237   guint8 **tablev = videobalance->tablev;
238   gint upos, vpos;
239 
240   width = GST_VIDEO_FRAME_WIDTH (frame);
241   height = GST_VIDEO_FRAME_HEIGHT (frame);
242 
243   ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
244   ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
245 
246   for (y = 0; y < height; y++) {
247     guint8 *yptr;
248 
249     yptr = ydata + y * ystride;
250     for (x = 0; x < width; x++) {
251       *yptr = tabley[*yptr];
252       yptr++;
253     }
254   }
255 
256   width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
257   height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
258 
259   uvdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
260   uvstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
261 
262   upos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 0 : 1;
263   vpos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 1 : 0;
264 
265   for (y = 0; y < height2; y++) {
266     guint8 *uvptr;
267     guint8 u1, v1;
268 
269     uvptr = uvdata + y * uvstride;
270 
271     for (x = 0; x < width2; x++) {
272       u1 = uvptr[upos];
273       v1 = uvptr[vpos];
274 
275       uvptr[upos] = tableu[u1][v1];
276       uvptr[vpos] = tablev[u1][v1];
277       uvptr += 2;
278     }
279   }
280 }
281 
282 static void
gst_video_balance_packed_yuv(GstVideoBalance * videobalance,GstVideoFrame * frame)283 gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
284     GstVideoFrame * frame)
285 {
286   gint x, y, stride;
287   guint8 *ydata, *udata, *vdata;
288   gint yoff, uoff, voff;
289   gint width, height;
290   gint width2, height2;
291   guint8 *tabley = videobalance->tabley;
292   guint8 **tableu = videobalance->tableu;
293   guint8 **tablev = videobalance->tablev;
294 
295   width = GST_VIDEO_FRAME_WIDTH (frame);
296   height = GST_VIDEO_FRAME_HEIGHT (frame);
297 
298   stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
299   ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
300   yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
301 
302   for (y = 0; y < height; y++) {
303     guint8 *yptr;
304 
305     yptr = ydata + y * stride;
306     for (x = 0; x < width; x++) {
307       *yptr = tabley[*yptr];
308       yptr += yoff;
309     }
310   }
311 
312   width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
313   height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
314 
315   udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
316   vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
317   uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
318   voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);
319 
320   for (y = 0; y < height2; y++) {
321     guint8 *uptr, *vptr;
322     guint8 u1, v1;
323 
324     uptr = udata + y * stride;
325     vptr = vdata + y * stride;
326 
327     for (x = 0; x < width2; x++) {
328       u1 = *uptr;
329       v1 = *vptr;
330 
331       *uptr = tableu[u1][v1];
332       *vptr = tablev[u1][v1];
333 
334       uptr += uoff;
335       vptr += voff;
336     }
337   }
338 }
339 
340 static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = {
341   298, 0, 409, -57068,
342   298, -100, -208, 34707,
343   298, 516, 0, -70870,
344 };
345 
346 static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = {
347   66, 129, 25, 4096,
348   -38, -74, 112, 32768,
349   112, -94, -18, 32768,
350 };
351 
352 #define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
353 
354 static void
gst_video_balance_packed_rgb(GstVideoBalance * videobalance,GstVideoFrame * frame)355 gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
356     GstVideoFrame * frame)
357 {
358   gint i, j, height;
359   gint width, stride, row_wrap;
360   gint pixel_stride;
361   guint8 *data;
362   gint offsets[3];
363   gint r, g, b;
364   gint y, u, v;
365   gint u_tmp, v_tmp;
366   guint8 *tabley = videobalance->tabley;
367   guint8 **tableu = videobalance->tableu;
368   guint8 **tablev = videobalance->tablev;
369 
370   width = GST_VIDEO_FRAME_WIDTH (frame);
371   height = GST_VIDEO_FRAME_HEIGHT (frame);
372 
373   offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
374   offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
375   offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
376 
377   data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
378   stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
379 
380   pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
381   row_wrap = stride - pixel_stride * width;
382 
383   for (i = 0; i < height; i++) {
384     for (j = 0; j < width; j++) {
385       r = data[offsets[0]];
386       g = data[offsets[1]];
387       b = data[offsets[2]];
388 
389       y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
390       u_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
391       v_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);
392 
393       y = CLAMP (y, 0, 255);
394       u_tmp = CLAMP (u_tmp, 0, 255);
395       v_tmp = CLAMP (v_tmp, 0, 255);
396 
397       y = tabley[y];
398       u = tableu[u_tmp][v_tmp];
399       v = tablev[u_tmp][v_tmp];
400 
401       r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
402       g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
403       b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);
404 
405       data[offsets[0]] = CLAMP (r, 0, 255);
406       data[offsets[1]] = CLAMP (g, 0, 255);
407       data[offsets[2]] = CLAMP (b, 0, 255);
408       data += pixel_stride;
409     }
410     data += row_wrap;
411   }
412 }
413 
414 /* get notified of caps and plug in the correct process function */
415 static gboolean
gst_video_balance_set_info(GstVideoFilter * vfilter,GstCaps * incaps,GstVideoInfo * in_info,GstCaps * outcaps,GstVideoInfo * out_info)416 gst_video_balance_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
417     GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
418 {
419   GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
420 
421   GST_DEBUG_OBJECT (videobalance,
422       "in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);
423 
424   videobalance->process = NULL;
425 
426   switch (GST_VIDEO_INFO_FORMAT (in_info)) {
427     case GST_VIDEO_FORMAT_I420:
428     case GST_VIDEO_FORMAT_YV12:
429     case GST_VIDEO_FORMAT_Y41B:
430     case GST_VIDEO_FORMAT_Y42B:
431     case GST_VIDEO_FORMAT_Y444:
432       videobalance->process = gst_video_balance_planar_yuv;
433       break;
434     case GST_VIDEO_FORMAT_YUY2:
435     case GST_VIDEO_FORMAT_UYVY:
436     case GST_VIDEO_FORMAT_AYUV:
437     case GST_VIDEO_FORMAT_YVYU:
438       videobalance->process = gst_video_balance_packed_yuv;
439       break;
440     case GST_VIDEO_FORMAT_NV12:
441     case GST_VIDEO_FORMAT_NV21:
442       videobalance->process = gst_video_balance_semiplanar_yuv;
443       break;
444     case GST_VIDEO_FORMAT_ARGB:
445     case GST_VIDEO_FORMAT_ABGR:
446     case GST_VIDEO_FORMAT_RGBA:
447     case GST_VIDEO_FORMAT_BGRA:
448     case GST_VIDEO_FORMAT_xRGB:
449     case GST_VIDEO_FORMAT_xBGR:
450     case GST_VIDEO_FORMAT_RGBx:
451     case GST_VIDEO_FORMAT_BGRx:
452     case GST_VIDEO_FORMAT_RGB:
453     case GST_VIDEO_FORMAT_BGR:
454       videobalance->process = gst_video_balance_packed_rgb;
455       break;
456     default:
457       if (!gst_video_balance_is_passthrough (videobalance))
458         goto unknown_format;
459       break;
460   }
461 
462   return TRUE;
463 
464   /* ERRORS */
465 unknown_format:
466   {
467     GST_ERROR_OBJECT (videobalance, "unknown format %" GST_PTR_FORMAT, incaps);
468     return FALSE;
469   }
470 }
471 
472 static void
gst_video_balance_before_transform(GstBaseTransform * base,GstBuffer * buf)473 gst_video_balance_before_transform (GstBaseTransform * base, GstBuffer * buf)
474 {
475   GstVideoBalance *balance = GST_VIDEO_BALANCE (base);
476   GstClockTime timestamp, stream_time;
477 
478   timestamp = GST_BUFFER_TIMESTAMP (buf);
479   stream_time =
480       gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
481 
482   GST_DEBUG_OBJECT (balance, "sync to %" GST_TIME_FORMAT,
483       GST_TIME_ARGS (timestamp));
484 
485   if (GST_CLOCK_TIME_IS_VALID (stream_time))
486     gst_object_sync_values (GST_OBJECT (balance), stream_time);
487 }
488 
489 static GstCaps *
gst_video_balance_transform_caps(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * filter)490 gst_video_balance_transform_caps (GstBaseTransform * trans,
491     GstPadDirection direction, GstCaps * caps, GstCaps * filter)
492 {
493   GstVideoBalance *balance = GST_VIDEO_BALANCE (trans);
494   GstCaps *ret;
495 
496   if (!gst_video_balance_is_passthrough (balance)) {
497     static GstStaticCaps raw_caps =
498         GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS));
499 
500     caps = gst_caps_intersect (caps, gst_static_caps_get (&raw_caps));
501 
502     if (filter) {
503       ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
504       gst_caps_unref (caps);
505     } else {
506       ret = caps;
507     }
508   } else {
509     if (filter) {
510       ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
511     } else {
512       ret = gst_caps_ref (caps);
513     }
514   }
515 
516   return ret;
517 }
518 
519 static GstFlowReturn
gst_video_balance_transform_frame_ip(GstVideoFilter * vfilter,GstVideoFrame * frame)520 gst_video_balance_transform_frame_ip (GstVideoFilter * vfilter,
521     GstVideoFrame * frame)
522 {
523   GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
524 
525   if (!videobalance->process)
526     goto not_negotiated;
527 
528   GST_OBJECT_LOCK (videobalance);
529   videobalance->process (videobalance, frame);
530   GST_OBJECT_UNLOCK (videobalance);
531 
532   return GST_FLOW_OK;
533 
534   /* ERRORS */
535 not_negotiated:
536   {
537     GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
538     return GST_FLOW_NOT_NEGOTIATED;
539   }
540 }
541 
542 static void
gst_video_balance_finalize(GObject * object)543 gst_video_balance_finalize (GObject * object)
544 {
545   GList *channels = NULL;
546   GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
547 
548   g_free (balance->tableu[0]);
549 
550   channels = balance->channels;
551   while (channels) {
552     GstColorBalanceChannel *channel = channels->data;
553 
554     g_object_unref (channel);
555     channels->data = NULL;
556     channels = g_list_next (channels);
557   }
558 
559   if (balance->channels)
560     g_list_free (balance->channels);
561 
562   G_OBJECT_CLASS (parent_class)->finalize (object);
563 }
564 
565 static void
gst_video_balance_class_init(GstVideoBalanceClass * klass)566 gst_video_balance_class_init (GstVideoBalanceClass * klass)
567 {
568   GObjectClass *gobject_class = (GObjectClass *) klass;
569   GstElementClass *gstelement_class = (GstElementClass *) klass;
570   GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
571   GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
572 
573   GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0,
574       "videobalance");
575 
576   gobject_class->finalize = gst_video_balance_finalize;
577   gobject_class->set_property = gst_video_balance_set_property;
578   gobject_class->get_property = gst_video_balance_get_property;
579 
580   g_object_class_install_property (gobject_class, PROP_CONTRAST,
581       g_param_spec_double ("contrast", "Contrast", "contrast",
582           0.0, 2.0, DEFAULT_PROP_CONTRAST,
583           GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
584   g_object_class_install_property (gobject_class, PROP_BRIGHTNESS,
585       g_param_spec_double ("brightness", "Brightness", "brightness", -1.0, 1.0,
586           DEFAULT_PROP_BRIGHTNESS,
587           GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
588   g_object_class_install_property (gobject_class, PROP_HUE,
589       g_param_spec_double ("hue", "Hue", "hue", -1.0, 1.0, DEFAULT_PROP_HUE,
590           GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
591   g_object_class_install_property (gobject_class, PROP_SATURATION,
592       g_param_spec_double ("saturation", "Saturation", "saturation", 0.0, 2.0,
593           DEFAULT_PROP_SATURATION,
594           GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
595 
596   gst_element_class_set_static_metadata (gstelement_class, "Video balance",
597       "Filter/Effect/Video",
598       "Adjusts brightness, contrast, hue, saturation on a video stream",
599       "David Schleef <ds@schleef.org>");
600 
601   gst_element_class_add_static_pad_template (gstelement_class,
602       &gst_video_balance_sink_template);
603   gst_element_class_add_static_pad_template (gstelement_class,
604       &gst_video_balance_src_template);
605 
606   trans_class->before_transform =
607       GST_DEBUG_FUNCPTR (gst_video_balance_before_transform);
608   trans_class->transform_ip_on_passthrough = FALSE;
609   trans_class->transform_caps =
610       GST_DEBUG_FUNCPTR (gst_video_balance_transform_caps);
611 
612   vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_balance_set_info);
613   vfilter_class->transform_frame_ip =
614       GST_DEBUG_FUNCPTR (gst_video_balance_transform_frame_ip);
615 }
616 
617 static void
gst_video_balance_init(GstVideoBalance * videobalance)618 gst_video_balance_init (GstVideoBalance * videobalance)
619 {
620   const gchar *channels[4] = { "HUE", "SATURATION",
621     "BRIGHTNESS", "CONTRAST"
622   };
623   gint i;
624 
625   /* Initialize propertiews */
626   videobalance->contrast = DEFAULT_PROP_CONTRAST;
627   videobalance->brightness = DEFAULT_PROP_BRIGHTNESS;
628   videobalance->hue = DEFAULT_PROP_HUE;
629   videobalance->saturation = DEFAULT_PROP_SATURATION;
630 
631   videobalance->tableu[0] = g_new (guint8, 256 * 256 * 2);
632   for (i = 0; i < 256; i++) {
633     videobalance->tableu[i] =
634         videobalance->tableu[0] + i * 256 * sizeof (guint8);
635     videobalance->tablev[i] =
636         videobalance->tableu[0] + 256 * 256 * sizeof (guint8) +
637         i * 256 * sizeof (guint8);
638   }
639 
640   gst_video_balance_update_properties (videobalance);
641 
642   /* Generate the channels list */
643   for (i = 0; i < G_N_ELEMENTS (channels); i++) {
644     GstColorBalanceChannel *channel;
645 
646     channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
647     channel->label = g_strdup (channels[i]);
648     channel->min_value = -1000;
649     channel->max_value = 1000;
650 
651     videobalance->channels = g_list_append (videobalance->channels, channel);
652   }
653 }
654 
655 static const GList *
gst_video_balance_colorbalance_list_channels(GstColorBalance * balance)656 gst_video_balance_colorbalance_list_channels (GstColorBalance * balance)
657 {
658   GstVideoBalance *videobalance = GST_VIDEO_BALANCE (balance);
659 
660   g_return_val_if_fail (videobalance != NULL, NULL);
661   g_return_val_if_fail (GST_IS_VIDEO_BALANCE (videobalance), NULL);
662 
663   return videobalance->channels;
664 }
665 
666 static void
gst_video_balance_colorbalance_set_value(GstColorBalance * balance,GstColorBalanceChannel * channel,gint value)667 gst_video_balance_colorbalance_set_value (GstColorBalance * balance,
668     GstColorBalanceChannel * channel, gint value)
669 {
670   GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
671   gdouble new_val;
672   gboolean changed = FALSE;
673 
674   g_return_if_fail (vb != NULL);
675   g_return_if_fail (GST_IS_VIDEO_BALANCE (vb));
676   g_return_if_fail (GST_IS_VIDEO_FILTER (vb));
677   g_return_if_fail (channel->label != NULL);
678 
679   GST_OBJECT_LOCK (vb);
680   if (!g_ascii_strcasecmp (channel->label, "HUE")) {
681     new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
682     changed = new_val != vb->hue;
683     vb->hue = new_val;
684   } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
685     new_val = (value + 1000.0) * 2.0 / 2000.0;
686     changed = new_val != vb->saturation;
687     vb->saturation = new_val;
688   } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
689     new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
690     changed = new_val != vb->brightness;
691     vb->brightness = new_val;
692   } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
693     new_val = (value + 1000.0) * 2.0 / 2000.0;
694     changed = new_val != vb->contrast;
695     vb->contrast = new_val;
696   }
697   GST_OBJECT_UNLOCK (vb);
698 
699   if (changed)
700     gst_video_balance_update_properties (vb);
701 
702   if (changed) {
703     gst_color_balance_value_changed (balance, channel,
704         gst_color_balance_get_value (balance, channel));
705   }
706 }
707 
708 static gint
gst_video_balance_colorbalance_get_value(GstColorBalance * balance,GstColorBalanceChannel * channel)709 gst_video_balance_colorbalance_get_value (GstColorBalance * balance,
710     GstColorBalanceChannel * channel)
711 {
712   GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
713   gint value = 0;
714 
715   g_return_val_if_fail (vb != NULL, 0);
716   g_return_val_if_fail (GST_IS_VIDEO_BALANCE (vb), 0);
717   g_return_val_if_fail (channel->label != NULL, 0);
718 
719   if (!g_ascii_strcasecmp (channel->label, "HUE")) {
720     value = (vb->hue + 1) * 2000.0 / 2.0 - 1000.0;
721   } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
722     value = vb->saturation * 2000.0 / 2.0 - 1000.0;
723   } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
724     value = (vb->brightness + 1) * 2000.0 / 2.0 - 1000.0;
725   } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
726     value = vb->contrast * 2000.0 / 2.0 - 1000.0;
727   }
728 
729   return value;
730 }
731 
732 static GstColorBalanceType
gst_video_balance_colorbalance_get_balance_type(GstColorBalance * balance)733 gst_video_balance_colorbalance_get_balance_type (GstColorBalance * balance)
734 {
735   return GST_COLOR_BALANCE_SOFTWARE;
736 }
737 
738 static void
gst_video_balance_colorbalance_init(GstColorBalanceInterface * iface)739 gst_video_balance_colorbalance_init (GstColorBalanceInterface * iface)
740 {
741   iface->list_channels = gst_video_balance_colorbalance_list_channels;
742   iface->set_value = gst_video_balance_colorbalance_set_value;
743   iface->get_value = gst_video_balance_colorbalance_get_value;
744   iface->get_balance_type = gst_video_balance_colorbalance_get_balance_type;
745 }
746 
747 static GstColorBalanceChannel *
gst_video_balance_find_channel(GstVideoBalance * balance,const gchar * label)748 gst_video_balance_find_channel (GstVideoBalance * balance, const gchar * label)
749 {
750   GList *l;
751 
752   for (l = balance->channels; l; l = l->next) {
753     GstColorBalanceChannel *channel = l->data;
754 
755     if (g_ascii_strcasecmp (channel->label, label) == 0)
756       return channel;
757   }
758   return NULL;
759 }
760 
761 static void
gst_video_balance_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)762 gst_video_balance_set_property (GObject * object, guint prop_id,
763     const GValue * value, GParamSpec * pspec)
764 {
765   GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
766   gdouble d;
767   const gchar *label = NULL;
768 
769   GST_OBJECT_LOCK (balance);
770   switch (prop_id) {
771     case PROP_CONTRAST:
772       d = g_value_get_double (value);
773       GST_DEBUG_OBJECT (balance, "Changing contrast from %lf to %lf",
774           balance->contrast, d);
775       if (d != balance->contrast)
776         label = "CONTRAST";
777       balance->contrast = d;
778       break;
779     case PROP_BRIGHTNESS:
780       d = g_value_get_double (value);
781       GST_DEBUG_OBJECT (balance, "Changing brightness from %lf to %lf",
782           balance->brightness, d);
783       if (d != balance->brightness)
784         label = "BRIGHTNESS";
785       balance->brightness = d;
786       break;
787     case PROP_HUE:
788       d = g_value_get_double (value);
789       GST_DEBUG_OBJECT (balance, "Changing hue from %lf to %lf", balance->hue,
790           d);
791       if (d != balance->hue)
792         label = "HUE";
793       balance->hue = d;
794       break;
795     case PROP_SATURATION:
796       d = g_value_get_double (value);
797       GST_DEBUG_OBJECT (balance, "Changing saturation from %lf to %lf",
798           balance->saturation, d);
799       if (d != balance->saturation)
800         label = "SATURATION";
801       balance->saturation = d;
802       break;
803     default:
804       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
805       break;
806   }
807 
808   GST_OBJECT_UNLOCK (balance);
809   gst_video_balance_update_properties (balance);
810 
811   if (label) {
812     GstColorBalanceChannel *channel =
813         gst_video_balance_find_channel (balance, label);
814     gst_color_balance_value_changed (GST_COLOR_BALANCE (balance), channel,
815         gst_color_balance_get_value (GST_COLOR_BALANCE (balance), channel));
816   }
817 }
818 
819 static void
gst_video_balance_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)820 gst_video_balance_get_property (GObject * object, guint prop_id, GValue * value,
821     GParamSpec * pspec)
822 {
823   GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
824 
825   switch (prop_id) {
826     case PROP_CONTRAST:
827       g_value_set_double (value, balance->contrast);
828       break;
829     case PROP_BRIGHTNESS:
830       g_value_set_double (value, balance->brightness);
831       break;
832     case PROP_HUE:
833       g_value_set_double (value, balance->hue);
834       break;
835     case PROP_SATURATION:
836       g_value_set_double (value, balance->saturation);
837       break;
838     default:
839       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
840       break;
841   }
842 }
843