1 /* GStreamer
2  * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Library General Public
6  * License as published by the Free Software Foundation; either
7  * version 2 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Library General Public License for more details.
13  *
14  * You should have received a copy of the GNU Library General Public
15  * License along with this library; if not, write to the
16  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17  * Boston, MA 02110-1301, USA.
18  */
19 
20 #ifdef HAVE_CONFIG_H
21 #  include "config.h"
22 #endif
23 
24 #include <string.h>
25 #include <stdlib.h>
26 #include <stdio.h>
27 
28 #include <gst/rtp/gstrtpbuffer.h>
29 #include <gst/video/video.h>
30 
31 #include "gstrtph263ppay.h"
32 #include "gstrtputils.h"
33 
34 #define DEFAULT_FRAGMENTATION_MODE   GST_FRAGMENTATION_MODE_NORMAL
35 
36 enum
37 {
38   PROP_0,
39   PROP_FRAGMENTATION_MODE
40 };
41 
42 #define GST_TYPE_FRAGMENTATION_MODE (gst_fragmentation_mode_get_type())
43 static GType
gst_fragmentation_mode_get_type(void)44 gst_fragmentation_mode_get_type (void)
45 {
46   static GType fragmentation_mode_type = 0;
47   static const GEnumValue fragmentation_mode[] = {
48     {GST_FRAGMENTATION_MODE_NORMAL, "Normal", "normal"},
49     {GST_FRAGMENTATION_MODE_SYNC, "Fragment at sync points", "sync"},
50     {0, NULL, NULL},
51   };
52 
53   if (!fragmentation_mode_type) {
54     fragmentation_mode_type =
55         g_enum_register_static ("GstFragmentationMode", fragmentation_mode);
56   }
57   return fragmentation_mode_type;
58 }
59 
60 
61 GST_DEBUG_CATEGORY_STATIC (rtph263ppay_debug);
62 #define GST_CAT_DEFAULT rtph263ppay_debug
63 
64 static GstStaticPadTemplate gst_rtp_h263p_pay_sink_template =
65 GST_STATIC_PAD_TEMPLATE ("sink",
66     GST_PAD_SINK,
67     GST_PAD_ALWAYS,
68     GST_STATIC_CAPS ("video/x-h263, variant = (string) itu")
69     );
70 
71 /*
72  * We also return these in getcaps() as required by the SDP caps
73  *
74  * width = (int) [16, 4096]
75  * height = (int) [16, 4096]
76  * "annex-f = (boolean) {true, false},"
77  * "annex-i = (boolean) {true, false},"
78  * "annex-j = (boolean) {true, false},"
79  * "annex-l = (boolean) {true, false},"
80  * "annex-t = (boolean) {true, false},"
81  * "annex-v = (boolean) {true, false}")
82  */
83 
84 
85 static GstStaticPadTemplate gst_rtp_h263p_pay_src_template =
86     GST_STATIC_PAD_TEMPLATE ("src",
87     GST_PAD_SRC,
88     GST_PAD_ALWAYS,
89     GST_STATIC_CAPS ("application/x-rtp, "
90         "media = (string) \"video\", "
91         "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
92         "clock-rate = (int) 90000, " "encoding-name = (string) \"H263-1998\"; "
93         "application/x-rtp, "
94         "media = (string) \"video\", "
95         "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
96         "clock-rate = (int) 90000, " "encoding-name = (string) \"H263-2000\"")
97     );
98 
99 static void gst_rtp_h263p_pay_finalize (GObject * object);
100 
101 static void gst_rtp_h263p_pay_set_property (GObject * object, guint prop_id,
102     const GValue * value, GParamSpec * pspec);
103 static void gst_rtp_h263p_pay_get_property (GObject * object, guint prop_id,
104     GValue * value, GParamSpec * pspec);
105 
106 static gboolean gst_rtp_h263p_pay_setcaps (GstRTPBasePayload * payload,
107     GstCaps * caps);
108 static GstCaps *gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload,
109     GstPad * pad, GstCaps * filter);
110 static GstFlowReturn gst_rtp_h263p_pay_handle_buffer (GstRTPBasePayload *
111     payload, GstBuffer * buffer);
112 
113 #define gst_rtp_h263p_pay_parent_class parent_class
114 G_DEFINE_TYPE (GstRtpH263PPay, gst_rtp_h263p_pay, GST_TYPE_RTP_BASE_PAYLOAD);
115 
116 static void
gst_rtp_h263p_pay_class_init(GstRtpH263PPayClass * klass)117 gst_rtp_h263p_pay_class_init (GstRtpH263PPayClass * klass)
118 {
119   GObjectClass *gobject_class;
120   GstElementClass *gstelement_class;
121   GstRTPBasePayloadClass *gstrtpbasepayload_class;
122 
123   gobject_class = (GObjectClass *) klass;
124   gstelement_class = (GstElementClass *) klass;
125   gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
126 
127   gobject_class->finalize = gst_rtp_h263p_pay_finalize;
128   gobject_class->set_property = gst_rtp_h263p_pay_set_property;
129   gobject_class->get_property = gst_rtp_h263p_pay_get_property;
130 
131   gstrtpbasepayload_class->set_caps = gst_rtp_h263p_pay_setcaps;
132   gstrtpbasepayload_class->get_caps = gst_rtp_h263p_pay_sink_getcaps;
133   gstrtpbasepayload_class->handle_buffer = gst_rtp_h263p_pay_handle_buffer;
134 
135   g_object_class_install_property (G_OBJECT_CLASS (klass),
136       PROP_FRAGMENTATION_MODE, g_param_spec_enum ("fragmentation-mode",
137           "Fragmentation Mode",
138           "Packet Fragmentation Mode", GST_TYPE_FRAGMENTATION_MODE,
139           DEFAULT_FRAGMENTATION_MODE,
140           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
141 
142   gst_element_class_add_static_pad_template (gstelement_class,
143       &gst_rtp_h263p_pay_src_template);
144   gst_element_class_add_static_pad_template (gstelement_class,
145       &gst_rtp_h263p_pay_sink_template);
146 
147   gst_element_class_set_static_metadata (gstelement_class, "RTP H263 payloader",
148       "Codec/Payloader/Network/RTP",
149       "Payload-encodes H263/+/++ video in RTP packets (RFC 4629)",
150       "Wim Taymans <wim.taymans@gmail.com>");
151 
152   GST_DEBUG_CATEGORY_INIT (rtph263ppay_debug, "rtph263ppay",
153       0, "rtph263ppay (RFC 4629)");
154 }
155 
156 static void
gst_rtp_h263p_pay_init(GstRtpH263PPay * rtph263ppay)157 gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay)
158 {
159   rtph263ppay->adapter = gst_adapter_new ();
160 
161   rtph263ppay->fragmentation_mode = DEFAULT_FRAGMENTATION_MODE;
162 }
163 
164 static void
gst_rtp_h263p_pay_finalize(GObject * object)165 gst_rtp_h263p_pay_finalize (GObject * object)
166 {
167   GstRtpH263PPay *rtph263ppay;
168 
169   rtph263ppay = GST_RTP_H263P_PAY (object);
170 
171   g_object_unref (rtph263ppay->adapter);
172   rtph263ppay->adapter = NULL;
173 
174   G_OBJECT_CLASS (parent_class)->finalize (object);
175 }
176 
177 static gboolean
gst_rtp_h263p_pay_setcaps(GstRTPBasePayload * payload,GstCaps * caps)178 gst_rtp_h263p_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
179 {
180   gboolean res;
181   GstCaps *peercaps;
182   gchar *encoding_name = NULL;
183 
184   g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE);
185 
186   peercaps =
187       gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
188   if (peercaps) {
189     GstCaps *tcaps =
190         gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
191     GstCaps *intersect = gst_caps_intersect (peercaps, tcaps);
192     gst_caps_unref (tcaps);
193 
194     gst_caps_unref (peercaps);
195     if (!gst_caps_is_empty (intersect)) {
196       GstStructure *s = gst_caps_get_structure (intersect, 0);
197       encoding_name = g_strdup (gst_structure_get_string (s, "encoding-name"));
198     }
199     gst_caps_unref (intersect);
200   }
201 
202   if (!encoding_name)
203     encoding_name = g_strdup ("H263-1998");
204 
205   gst_rtp_base_payload_set_options (payload, "video", TRUE,
206       (gchar *) encoding_name, 90000);
207   res = gst_rtp_base_payload_set_outcaps (payload, NULL);
208   g_free (encoding_name);
209 
210   return res;
211 }
212 
213 static GstCaps *
caps_append(GstCaps * caps,GstStructure * in_s,guint x,guint y,guint mpi)214 caps_append (GstCaps * caps, GstStructure * in_s, guint x, guint y, guint mpi)
215 {
216   GstStructure *s;
217 
218   if (!in_s)
219     return caps;
220 
221   if (mpi < 1 || mpi > 32)
222     return caps;
223 
224   s = gst_structure_copy (in_s);
225 
226   gst_structure_set (s,
227       "width", GST_TYPE_INT_RANGE, 1, x,
228       "height", GST_TYPE_INT_RANGE, 1, y,
229       "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001 * mpi, NULL);
230 
231   caps = gst_caps_merge_structure (caps, s);
232 
233   return caps;
234 }
235 
236 
237 static GstCaps *
gst_rtp_h263p_pay_sink_getcaps(GstRTPBasePayload * payload,GstPad * pad,GstCaps * filter)238 gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload, GstPad * pad,
239     GstCaps * filter)
240 {
241   GstRtpH263PPay *rtph263ppay;
242   GstCaps *caps = NULL, *templ;
243   GstCaps *peercaps = NULL;
244   GstCaps *intersect = NULL;
245   guint i;
246 
247   rtph263ppay = GST_RTP_H263P_PAY (payload);
248 
249   peercaps =
250       gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
251 
252   /* if we're just outputting to udpsink or fakesink or so, we should also
253    * accept any input compatible with our sink template caps */
254   if (!peercaps || gst_caps_is_any (peercaps)) {
255     if (peercaps)
256       gst_caps_unref (peercaps);
257     caps =
258         gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));
259     goto done;
260   }
261 
262   /* We basically need to differentiate two use-cases here: One where there's
263    * a capsfilter after the payloader with caps created from an SDP; in this
264    * case the filter caps are fixed and we want to signal to an encoder what
265    * we want it to produce. The second case is simply payloader ! depayloader
266    * where we are dealing with the depayloader's template caps. In this case
267    * we should accept any input compatible with our sink template caps. */
268   if (!gst_caps_is_fixed (peercaps)) {
269     gst_caps_unref (peercaps);
270     caps =
271         gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));
272     goto done;
273   }
274 
275   templ = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
276   intersect = gst_caps_intersect (peercaps, templ);
277   gst_caps_unref (peercaps);
278   gst_caps_unref (templ);
279 
280   if (gst_caps_is_empty (intersect))
281     return intersect;
282 
283   caps = gst_caps_new_empty ();
284   for (i = 0; i < gst_caps_get_size (intersect); i++) {
285     GstStructure *s = gst_caps_get_structure (intersect, i);
286     const gchar *encoding_name = gst_structure_get_string (s, "encoding-name");
287 
288     if (!strcmp (encoding_name, "H263-2000")) {
289       const gchar *profile_str = gst_structure_get_string (s, "profile");
290       const gchar *level_str = gst_structure_get_string (s, "level");
291       int profile = 0;
292       int level = 0;
293 
294       if (profile_str && level_str) {
295         gboolean i = FALSE, j = FALSE, l = FALSE, t = FALSE, f = FALSE,
296             v = FALSE;
297         GstStructure *new_s = gst_structure_new ("video/x-h263",
298             "variant", G_TYPE_STRING, "itu",
299             NULL);
300 
301         profile = atoi (profile_str);
302         level = atoi (level_str);
303 
304         /* These profiles are defined in the H.263 Annex X */
305         switch (profile) {
306           case 0:
307             /* The Baseline Profile (Profile 0) */
308             break;
309           case 1:
310             /* H.320 Coding Efficiency Version 2 Backward-Compatibility Profile
311              * (Profile 1)
312              * Baseline + Annexes I, J, L.4 and T
313              */
314             i = j = l = t = TRUE;
315             break;
316           case 2:
317             /* Version 1 Backward-Compatibility Profile (Profile 2)
318              * Baseline + Annex F
319              */
320             i = j = l = t = f = TRUE;
321             break;
322           case 3:
323             /* Version 2 Interactive and Streaming Wireless Profile
324              * Baseline + Annexes I, J, T
325              */
326             i = j = t = TRUE;
327             break;
328           case 4:
329             /* Version 3 Interactive and Streaming Wireless Profile (Profile 4)
330              * Baseline + Annexes I, J, T, V, W.6.3.8,
331              */
332             /* Missing W.6.3.8 */
333             i = j = t = v = TRUE;
334             break;
335           case 5:
336             /* Conversational High Compression Profile (Profile 5)
337              * Baseline + Annexes F, I, J, L.4, T, D, U
338              */
339             /* Missing D, U */
340             f = i = j = l = t = TRUE;
341             break;
342           case 6:
343             /* Conversational Internet Profile (Profile 6)
344              * Baseline + Annexes F, I, J, L.4, T, D, U and
345              * K with arbitratry slice ordering
346              */
347             /* Missing D, U, K with arbitratry slice ordering */
348             f = i = j = l = t = TRUE;
349             break;
350           case 7:
351             /* Conversational Interlace Profile (Profile 7)
352              * Baseline + Annexes F, I, J, L.4, T, D, U,  W.6.3.11
353              */
354             /* Missing D, U, W.6.3.11 */
355             f = i = j = l = t = TRUE;
356             break;
357           case 8:
358             /* High Latency Profile (Profile 8)
359              * Baseline + Annexes F, I, J, L.4, T, D, U, P.5, O.1.1 and
360              * K with arbitratry slice ordering
361              */
362             /* Missing D, U, P.5, O.1.1 */
363             f = i = j = l = t = TRUE;
364             break;
365         }
366 
367 
368         if (f || i || j || t || l || v) {
369           GValue list = { 0 };
370           GValue vstr = { 0 };
371 
372           g_value_init (&list, GST_TYPE_LIST);
373           g_value_init (&vstr, G_TYPE_STRING);
374 
375           g_value_set_static_string (&vstr, "h263");
376           gst_value_list_append_value (&list, &vstr);
377           g_value_set_static_string (&vstr, "h263p");
378           gst_value_list_append_value (&list, &vstr);
379 
380           if (l || v) {
381             g_value_set_static_string (&vstr, "h263pp");
382             gst_value_list_append_value (&list, &vstr);
383           }
384           g_value_unset (&vstr);
385 
386           gst_structure_set_value (new_s, "h263version", &list);
387           g_value_unset (&list);
388         } else {
389           gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
390         }
391 
392 
393         if (!f)
394           gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
395         if (!i)
396           gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
397         if (!j)
398           gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
399         if (!t)
400           gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);
401         if (!l)
402           gst_structure_set (new_s, "annex-l", G_TYPE_BOOLEAN, FALSE, NULL);
403         if (!v)
404           gst_structure_set (new_s, "annex-v", G_TYPE_BOOLEAN, FALSE, NULL);
405 
406 
407         if (level <= 10 || level == 45) {
408           gst_structure_set (new_s,
409               "width", GST_TYPE_INT_RANGE, 1, 176,
410               "height", GST_TYPE_INT_RANGE, 1, 144,
411               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
412           caps = gst_caps_merge_structure (caps, new_s);
413         } else if (level <= 20) {
414           GstStructure *s_copy = gst_structure_copy (new_s);
415 
416           gst_structure_set (new_s,
417               "width", GST_TYPE_INT_RANGE, 1, 352,
418               "height", GST_TYPE_INT_RANGE, 1, 288,
419               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
420           caps = gst_caps_merge_structure (caps, new_s);
421 
422           gst_structure_set (s_copy,
423               "width", GST_TYPE_INT_RANGE, 1, 176,
424               "height", GST_TYPE_INT_RANGE, 1, 144,
425               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
426           caps = gst_caps_merge_structure (caps, s_copy);
427         } else if (level <= 40) {
428 
429           gst_structure_set (new_s,
430               "width", GST_TYPE_INT_RANGE, 1, 352,
431               "height", GST_TYPE_INT_RANGE, 1, 288,
432               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
433           caps = gst_caps_merge_structure (caps, new_s);
434         } else if (level <= 50) {
435           GstStructure *s_copy = gst_structure_copy (new_s);
436 
437           gst_structure_set (new_s,
438               "width", GST_TYPE_INT_RANGE, 1, 352,
439               "height", GST_TYPE_INT_RANGE, 1, 288,
440               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
441           caps = gst_caps_merge_structure (caps, new_s);
442 
443           gst_structure_set (s_copy,
444               "width", GST_TYPE_INT_RANGE, 1, 352,
445               "height", GST_TYPE_INT_RANGE, 1, 240,
446               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
447           caps = gst_caps_merge_structure (caps, s_copy);
448         } else if (level <= 60) {
449           GstStructure *s_copy = gst_structure_copy (new_s);
450 
451           gst_structure_set (new_s,
452               "width", GST_TYPE_INT_RANGE, 1, 720,
453               "height", GST_TYPE_INT_RANGE, 1, 288,
454               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
455           caps = gst_caps_merge_structure (caps, new_s);
456 
457           gst_structure_set (s_copy,
458               "width", GST_TYPE_INT_RANGE, 1, 720,
459               "height", GST_TYPE_INT_RANGE, 1, 240,
460               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
461           caps = gst_caps_merge_structure (caps, s_copy);
462         } else if (level <= 70) {
463           GstStructure *s_copy = gst_structure_copy (new_s);
464 
465           gst_structure_set (new_s,
466               "width", GST_TYPE_INT_RANGE, 1, 720,
467               "height", GST_TYPE_INT_RANGE, 1, 576,
468               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
469           caps = gst_caps_merge_structure (caps, new_s);
470 
471           gst_structure_set (s_copy,
472               "width", GST_TYPE_INT_RANGE, 1, 720,
473               "height", GST_TYPE_INT_RANGE, 1, 480,
474               "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
475           caps = gst_caps_merge_structure (caps, s_copy);
476         } else {
477           caps = gst_caps_merge_structure (caps, new_s);
478         }
479 
480       } else {
481         GstStructure *new_s = gst_structure_new ("video/x-h263",
482             "variant", G_TYPE_STRING, "itu",
483             "h263version", G_TYPE_STRING, "h263",
484             NULL);
485 
486         GST_DEBUG_OBJECT (rtph263ppay, "No profile or level specified"
487             " for H263-2000, defaulting to baseline H263");
488 
489         caps = gst_caps_merge_structure (caps, new_s);
490       }
491     } else {
492       gboolean f = FALSE, i = FALSE, j = FALSE, t = FALSE;
493       /* FIXME: ffmpeg support the Appendix K too, how do we express it ?
494        *   guint k;
495        */
496       const gchar *str;
497       GstStructure *new_s = gst_structure_new ("video/x-h263",
498           "variant", G_TYPE_STRING, "itu",
499           NULL);
500       gboolean added = FALSE;
501 
502       str = gst_structure_get_string (s, "f");
503       if (str && !strcmp (str, "1"))
504         f = TRUE;
505 
506       str = gst_structure_get_string (s, "i");
507       if (str && !strcmp (str, "1"))
508         i = TRUE;
509 
510       str = gst_structure_get_string (s, "j");
511       if (str && !strcmp (str, "1"))
512         j = TRUE;
513 
514       str = gst_structure_get_string (s, "t");
515       if (str && !strcmp (str, "1"))
516         t = TRUE;
517 
518       if (f || i || j || t) {
519         GValue list = { 0 };
520         GValue vstr = { 0 };
521 
522         g_value_init (&list, GST_TYPE_LIST);
523         g_value_init (&vstr, G_TYPE_STRING);
524 
525         g_value_set_static_string (&vstr, "h263");
526         gst_value_list_append_value (&list, &vstr);
527         g_value_set_static_string (&vstr, "h263p");
528         gst_value_list_append_value (&list, &vstr);
529         g_value_unset (&vstr);
530 
531         gst_structure_set_value (new_s, "h263version", &list);
532         g_value_unset (&list);
533       } else {
534         gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
535       }
536 
537       if (!f)
538         gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
539       if (!i)
540         gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
541       if (!j)
542         gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
543       if (!t)
544         gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);
545 
546 
547       str = gst_structure_get_string (s, "custom");
548       if (str) {
549         unsigned int xmax, ymax, mpi;
550         if (sscanf (str, "%u,%u,%u", &xmax, &ymax, &mpi) == 3) {
551           if (xmax % 4 && ymax % 4 && mpi >= 1 && mpi <= 32) {
552             caps = caps_append (caps, new_s, xmax, ymax, mpi);
553             added = TRUE;
554           } else {
555             GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI"
556                 " %u x %u at %u, ignoring", xmax, ymax, mpi);
557           }
558         } else {
559           GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI: %s,"
560               " ignoring", str);
561         }
562       }
563 
564       str = gst_structure_get_string (s, "16cif");
565       if (str) {
566         int mpi = atoi (str);
567         caps = caps_append (caps, new_s, 1408, 1152, mpi);
568         added = TRUE;
569       }
570 
571       str = gst_structure_get_string (s, "4cif");
572       if (str) {
573         int mpi = atoi (str);
574         caps = caps_append (caps, new_s, 704, 576, mpi);
575         added = TRUE;
576       }
577 
578       str = gst_structure_get_string (s, "cif");
579       if (str) {
580         int mpi = atoi (str);
581         caps = caps_append (caps, new_s, 352, 288, mpi);
582         added = TRUE;
583       }
584 
585       str = gst_structure_get_string (s, "qcif");
586       if (str) {
587         int mpi = atoi (str);
588         caps = caps_append (caps, new_s, 176, 144, mpi);
589         added = TRUE;
590       }
591 
592       str = gst_structure_get_string (s, "sqcif");
593       if (str) {
594         int mpi = atoi (str);
595         caps = caps_append (caps, new_s, 128, 96, mpi);
596         added = TRUE;
597       }
598 
599       if (added)
600         gst_structure_free (new_s);
601       else
602         caps = gst_caps_merge_structure (caps, new_s);
603     }
604   }
605 
606   gst_caps_unref (intersect);
607 
608 done:
609 
610   if (filter) {
611     GstCaps *tmp;
612 
613     GST_DEBUG_OBJECT (payload, "Intersect %" GST_PTR_FORMAT " and filter %"
614         GST_PTR_FORMAT, caps, filter);
615     tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
616     gst_caps_unref (caps);
617     caps = tmp;
618   }
619 
620   return caps;
621 }
622 
623 
624 static void
gst_rtp_h263p_pay_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)625 gst_rtp_h263p_pay_set_property (GObject * object, guint prop_id,
626     const GValue * value, GParamSpec * pspec)
627 {
628   GstRtpH263PPay *rtph263ppay;
629 
630   rtph263ppay = GST_RTP_H263P_PAY (object);
631 
632   switch (prop_id) {
633     case PROP_FRAGMENTATION_MODE:
634       rtph263ppay->fragmentation_mode = g_value_get_enum (value);
635       break;
636     default:
637       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
638       break;
639   }
640 }
641 
642 static void
gst_rtp_h263p_pay_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)643 gst_rtp_h263p_pay_get_property (GObject * object, guint prop_id,
644     GValue * value, GParamSpec * pspec)
645 {
646   GstRtpH263PPay *rtph263ppay;
647 
648   rtph263ppay = GST_RTP_H263P_PAY (object);
649 
650   switch (prop_id) {
651     case PROP_FRAGMENTATION_MODE:
652       g_value_set_enum (value, rtph263ppay->fragmentation_mode);
653       break;
654     default:
655       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
656       break;
657   }
658 }
659 
660 static GstFlowReturn
gst_rtp_h263p_pay_flush(GstRtpH263PPay * rtph263ppay)661 gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
662 {
663   guint avail;
664   GstBufferList *list = NULL;
665   GstBuffer *outbuf = NULL;
666   GstFlowReturn ret;
667   gboolean fragmented = FALSE;
668 
669   avail = gst_adapter_available (rtph263ppay->adapter);
670   if (avail == 0)
671     return GST_FLOW_OK;
672 
673   fragmented = FALSE;
674   /* This algorithm assumes the H263/+/++ encoder sends complete frames in each
675    * buffer */
676   /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:
677    *  This algorithm implements the Follow-on packets method for packetization.
678    *  This assumes low packet loss network.
679    * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:
680    *  This algorithm separates large frames at synchronisation points (Segments)
681    *  (See RFC 4629 section 6). It would be interesting to have a property such as network
682    *  quality to select between both packetization methods */
683   /* TODO Add VRC supprt (See RFC 4629 section 5.2) */
684 
685   while (avail > 0) {
686     guint towrite;
687     guint8 *payload;
688     gint header_len;
689     guint next_gop = 0;
690     gboolean found_gob = FALSE;
691     GstRTPBuffer rtp = { NULL };
692     GstBuffer *payload_buf;
693 
694     if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
695       /* start after 1st gop possible */
696 
697       /* Check if we have a gob or eos , eossbs */
698       /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
699       next_gop =
700           gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
701           0x00008000, 0, avail);
702       if (next_gop == 0) {
703         GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");
704         found_gob = TRUE;
705       }
706 
707       /* Find next and cut the packet accordingly */
708       /* TODO we should get as many gobs as possible until MTU is reached, this
709        * code seems to just get one GOB per packet */
710       if (next_gop == 0 && avail > 3)
711         next_gop =
712             gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
713             0x00008000, 3, avail - 3);
714       GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at :  %d", next_gop);
715       if (next_gop == -1)
716         next_gop = 0;
717     }
718 
719     /* for picture start frames (non-fragmented), we need to remove the first
720      * two 0x00 bytes and set P=1 */
721     if (!fragmented || found_gob) {
722       gst_adapter_flush (rtph263ppay->adapter, 2);
723       avail -= 2;
724     }
725     header_len = 2;
726 
727     towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
728         (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));
729 
730     if (next_gop > 0)
731       towrite = MIN (next_gop, towrite);
732 
733     outbuf = gst_rtp_buffer_new_allocate (header_len, 0, 0);
734 
735     gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
736     /* last fragment gets the marker bit set */
737     gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);
738 
739     payload = gst_rtp_buffer_get_payload (&rtp);
740 
741     /*  0                   1
742      *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
743      * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
744      * |   RR    |P|V|   PLEN    |PEBIT|
745      * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
746      */
747     /* if fragmented or gop header , write p bit =1 */
748     payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;
749     payload[1] = 0;
750 
751     GST_BUFFER_PTS (outbuf) = rtph263ppay->first_timestamp;
752     GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
753     gst_rtp_buffer_unmap (&rtp);
754 
755     payload_buf = gst_adapter_take_buffer_fast (rtph263ppay->adapter, towrite);
756     gst_rtp_copy_video_meta (rtph263ppay, outbuf, payload_buf);
757     outbuf = gst_buffer_append (outbuf, payload_buf);
758     avail -= towrite;
759 
760     /* If more data is available and this is our first iteration,
761      * we create a buffer list and remember that we're fragmented.
762      *
763      * If we're fragmented already, add buffers to the previously
764      * created buffer list.
765      *
766      * Otherwise fragmented will be FALSE and we just push the single output
767      * buffer, and no list is allocated.
768      */
769     if (avail && !fragmented) {
770       fragmented = TRUE;
771       list = gst_buffer_list_new ();
772       gst_buffer_list_add (list, outbuf);
773     } else if (fragmented) {
774       gst_buffer_list_add (list, outbuf);
775     }
776   }
777 
778   if (fragmented) {
779     ret =
780         gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtph263ppay),
781         list);
782   } else {
783     ret =
784         gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263ppay), outbuf);
785   }
786 
787   return ret;
788 }
789 
790 static GstFlowReturn
gst_rtp_h263p_pay_handle_buffer(GstRTPBasePayload * payload,GstBuffer * buffer)791 gst_rtp_h263p_pay_handle_buffer (GstRTPBasePayload * payload,
792     GstBuffer * buffer)
793 {
794   GstRtpH263PPay *rtph263ppay;
795   GstFlowReturn ret;
796 
797   rtph263ppay = GST_RTP_H263P_PAY (payload);
798 
799   rtph263ppay->first_timestamp = GST_BUFFER_PTS (buffer);
800   rtph263ppay->first_duration = GST_BUFFER_DURATION (buffer);
801 
802   /* we always encode and flush a full picture */
803   gst_adapter_push (rtph263ppay->adapter, buffer);
804   ret = gst_rtp_h263p_pay_flush (rtph263ppay);
805 
806   return ret;
807 }
808 
809 gboolean
gst_rtp_h263p_pay_plugin_init(GstPlugin * plugin)810 gst_rtp_h263p_pay_plugin_init (GstPlugin * plugin)
811 {
812   return gst_element_register (plugin, "rtph263ppay",
813       GST_RANK_SECONDARY, GST_TYPE_RTP_H263P_PAY);
814 }
815