1 /*
2 Copyright 2008-2013 David Robillard <http://drobilla.net>
3
4 Permission to use, copy, modify, and/or distribute this software for any
5 purpose with or without fee is hereby granted, provided that the above
6 copyright notice and this permission notice appear in all copies.
7
8 THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17 /**
18 @file util.h Helper functions for the LV2 Atom extension.
19
20 Note these functions are all static inline, do not take their address.
21
22 This header is non-normative, it is provided for convenience.
23 */
24
25 #ifndef LV2_ATOM_UTIL_H
26 #define LV2_ATOM_UTIL_H
27
28 #include <stdarg.h>
29 #include <stdint.h>
30 #include <string.h>
31
32 #include "atom.h"
33
34 #ifdef __cplusplus
35 extern "C" {
36 #else
37 # include <stdbool.h>
38 #endif
39
40 /** Pad a size to 64 bits. */
41 static inline uint32_t
lv2_atom_pad_size(uint32_t size)42 lv2_atom_pad_size(uint32_t size)
43 {
44 return (size + 7U) & (~7U);
45 }
46
47 /** Return the total size of @p atom, including the header. */
48 static inline uint32_t
lv2_atom_total_size(const LV2_Atom * atom)49 lv2_atom_total_size(const LV2_Atom* atom)
50 {
51 return (uint32_t)sizeof(LV2_Atom) + atom->size;
52 }
53
54 /** Return true iff @p atom is null. */
55 static inline bool
lv2_atom_is_null(const LV2_Atom * atom)56 lv2_atom_is_null(const LV2_Atom* atom)
57 {
58 return !atom || (atom->type == 0 && atom->size == 0);
59 }
60
61 /** Return true iff @p a is equal to @p b. */
62 static inline bool
lv2_atom_equals(const LV2_Atom * a,const LV2_Atom * b)63 lv2_atom_equals(const LV2_Atom* a, const LV2_Atom* b)
64 {
65 return (a == b) || ((a->type == b->type) &&
66 (a->size == b->size) &&
67 !memcmp(a + 1, b + 1, a->size));
68 }
69
70 /**
71 @name Sequence Iterator
72 @{
73 */
74
75 /** Get an iterator pointing to the first event in a Sequence body. */
76 static inline const LV2_Atom_Event*
lv2_atom_sequence_begin(const LV2_Atom_Sequence_Body * body)77 lv2_atom_sequence_begin(const LV2_Atom_Sequence_Body* body)
78 {
79 return (const LV2_Atom_Event*)(body + 1);
80 }
81
82 /** Get an iterator pointing to the end of a Sequence body. */
83 static inline LV2_Atom_Event*
lv2_atom_sequence_end(LV2_Atom_Sequence_Body * body,uint32_t size)84 lv2_atom_sequence_end(LV2_Atom_Sequence_Body* body, uint32_t size)
85 {
86 return (LV2_Atom_Event*)((uint8_t*)body + lv2_atom_pad_size(size));
87 }
88
89 /** Return true iff @p i has reached the end of @p body. */
90 static inline bool
lv2_atom_sequence_is_end(const LV2_Atom_Sequence_Body * body,uint32_t size,const LV2_Atom_Event * i)91 lv2_atom_sequence_is_end(const LV2_Atom_Sequence_Body* body,
92 uint32_t size,
93 const LV2_Atom_Event* i)
94 {
95 return (const uint8_t*)i >= ((const uint8_t*)body + size);
96 }
97
98 /** Return an iterator to the element following @p i. */
99 static inline const LV2_Atom_Event*
lv2_atom_sequence_next(const LV2_Atom_Event * i)100 lv2_atom_sequence_next(const LV2_Atom_Event* i)
101 {
102 return (const LV2_Atom_Event*)((const uint8_t*)i
103 + sizeof(LV2_Atom_Event)
104 + lv2_atom_pad_size(i->body.size));
105 }
106
107 /**
108 A macro for iterating over all events in a Sequence.
109 @param seq The sequence to iterate over
110 @param iter The name of the iterator
111
112 This macro is used similarly to a for loop (which it expands to), e.g.:
113 @code
114 LV2_ATOM_SEQUENCE_FOREACH(sequence, ev) {
115 // Do something with ev (an LV2_Atom_Event*) here...
116 }
117 @endcode
118 */
119 #define LV2_ATOM_SEQUENCE_FOREACH(seq, iter) \
120 for (const LV2_Atom_Event* iter = lv2_atom_sequence_begin(&(seq)->body); \
121 !lv2_atom_sequence_is_end(&(seq)->body, (seq)->atom.size, (iter)); \
122 (iter) = lv2_atom_sequence_next(iter))
123
124 /** Like LV2_ATOM_SEQUENCE_FOREACH but for a headerless sequence body. */
125 #define LV2_ATOM_SEQUENCE_BODY_FOREACH(body, size, iter) \
126 for (const LV2_Atom_Event* iter = lv2_atom_sequence_begin(body); \
127 !lv2_atom_sequence_is_end(body, size, (iter)); \
128 (iter) = lv2_atom_sequence_next(iter))
129
130 /**
131 @}
132 @name Sequence Utilities
133 @{
134 */
135
136 /**
137 Clear all events from @p sequence.
138
139 This simply resets the size field, the other fields are left untouched.
140 */
141 static inline void
lv2_atom_sequence_clear(LV2_Atom_Sequence * seq)142 lv2_atom_sequence_clear(LV2_Atom_Sequence* seq)
143 {
144 seq->atom.size = sizeof(LV2_Atom_Sequence_Body);
145 }
146
147 /**
148 Append an event at the end of @p sequence.
149
150 @param seq Sequence to append to.
151 @param capacity Total capacity of the sequence atom
152 (e.g. as set by the host for sequence output ports).
153 @param event Event to write.
154
155 @return A pointer to the newly written event in @p seq,
156 or NULL on failure (insufficient space).
157 */
158 static inline LV2_Atom_Event*
lv2_atom_sequence_append_event(LV2_Atom_Sequence * seq,uint32_t capacity,const LV2_Atom_Event * event)159 lv2_atom_sequence_append_event(LV2_Atom_Sequence* seq,
160 uint32_t capacity,
161 const LV2_Atom_Event* event)
162 {
163 const uint32_t total_size = (uint32_t)sizeof(*event) + event->body.size;
164 if (capacity - seq->atom.size < total_size) {
165 return NULL;
166 }
167
168 LV2_Atom_Event* e = lv2_atom_sequence_end(&seq->body, seq->atom.size);
169 memcpy(e, event, total_size);
170
171 seq->atom.size += lv2_atom_pad_size(total_size);
172
173 return e;
174 }
175
176 /**
177 @}
178 @name Tuple Iterator
179 @{
180 */
181
182 /** Get an iterator pointing to the first element in @p tup. */
183 static inline const LV2_Atom*
lv2_atom_tuple_begin(const LV2_Atom_Tuple * tup)184 lv2_atom_tuple_begin(const LV2_Atom_Tuple* tup)
185 {
186 return (const LV2_Atom*)(LV2_ATOM_BODY_CONST(tup));
187 }
188
189 /** Return true iff @p i has reached the end of @p body. */
190 static inline bool
lv2_atom_tuple_is_end(const void * body,uint32_t size,const LV2_Atom * i)191 lv2_atom_tuple_is_end(const void* body, uint32_t size, const LV2_Atom* i)
192 {
193 return (const uint8_t*)i >= ((const uint8_t*)body + size);
194 }
195
196 /** Return an iterator to the element following @p i. */
197 static inline const LV2_Atom*
lv2_atom_tuple_next(const LV2_Atom * i)198 lv2_atom_tuple_next(const LV2_Atom* i)
199 {
200 return (const LV2_Atom*)(
201 (const uint8_t*)i + sizeof(LV2_Atom) + lv2_atom_pad_size(i->size));
202 }
203
204 /**
205 A macro for iterating over all properties of a Tuple.
206 @param tuple The tuple to iterate over
207 @param iter The name of the iterator
208
209 This macro is used similarly to a for loop (which it expands to), e.g.:
210 @code
211 LV2_ATOMO_TUPLE_FOREACH(tuple, elem) {
212 // Do something with elem (an LV2_Atom*) here...
213 }
214 @endcode
215 */
216 #define LV2_ATOM_TUPLE_FOREACH(tuple, iter) \
217 for (const LV2_Atom* iter = lv2_atom_tuple_begin(tuple); \
218 !lv2_atom_tuple_is_end(LV2_ATOM_BODY_CONST(tuple), (tuple)->size, (iter)); \
219 (iter) = lv2_atom_tuple_next(iter))
220
221 /** Like LV2_ATOM_TUPLE_FOREACH but for a headerless tuple body. */
222 #define LV2_ATOM_TUPLE_BODY_FOREACH(body, size, iter) \
223 for (const LV2_Atom* iter = (const LV2_Atom*)body; \
224 !lv2_atom_tuple_is_end(body, size, (iter)); \
225 (iter) = lv2_atom_tuple_next(iter))
226
227 /**
228 @}
229 @name Object Iterator
230 @{
231 */
232
233 /** Return a pointer to the first property in @p body. */
234 static inline const LV2_Atom_Property_Body*
lv2_atom_object_begin(const LV2_Atom_Object_Body * body)235 lv2_atom_object_begin(const LV2_Atom_Object_Body* body)
236 {
237 return (const LV2_Atom_Property_Body*)(body + 1);
238 }
239
240 /** Return true iff @p i has reached the end of @p obj. */
241 static inline bool
lv2_atom_object_is_end(const LV2_Atom_Object_Body * body,uint32_t size,const LV2_Atom_Property_Body * i)242 lv2_atom_object_is_end(const LV2_Atom_Object_Body* body,
243 uint32_t size,
244 const LV2_Atom_Property_Body* i)
245 {
246 return (const uint8_t*)i >= ((const uint8_t*)body + size);
247 }
248
249 /** Return an iterator to the property following @p i. */
250 static inline const LV2_Atom_Property_Body*
lv2_atom_object_next(const LV2_Atom_Property_Body * i)251 lv2_atom_object_next(const LV2_Atom_Property_Body* i)
252 {
253 const LV2_Atom* const value = (const LV2_Atom*)(
254 (const uint8_t*)i + 2 * sizeof(uint32_t));
255 return (const LV2_Atom_Property_Body*)(
256 (const uint8_t*)i + lv2_atom_pad_size(
257 (uint32_t)sizeof(LV2_Atom_Property_Body) + value->size));
258 }
259
260 /**
261 A macro for iterating over all properties of an Object.
262 @param obj The object to iterate over
263 @param iter The name of the iterator
264
265 This macro is used similarly to a for loop (which it expands to), e.g.:
266 @code
267 LV2_ATOM_OBJECT_FOREACH(object, i) {
268 // Do something with prop (an LV2_Atom_Property_Body*) here...
269 }
270 @endcode
271 */
272 #define LV2_ATOM_OBJECT_FOREACH(obj, iter) \
273 for (const LV2_Atom_Property_Body* iter = lv2_atom_object_begin(&(obj)->body); \
274 !lv2_atom_object_is_end(&(obj)->body, (obj)->atom.size, (iter)); \
275 (iter) = lv2_atom_object_next(iter))
276
277 /** Like LV2_ATOM_OBJECT_FOREACH but for a headerless object body. */
278 #define LV2_ATOM_OBJECT_BODY_FOREACH(body, size, iter) \
279 for (const LV2_Atom_Property_Body* iter = lv2_atom_object_begin(body); \
280 !lv2_atom_object_is_end(body, size, (iter)); \
281 (iter) = lv2_atom_object_next(iter))
282
283 /**
284 @}
285 @name Object Query
286 @{
287 */
288
289 /** A single entry in an Object query. */
290 typedef struct {
291 uint32_t key; /**< Key to query (input set by user) */
292 const LV2_Atom** value; /**< Found value (output set by query function) */
293 } LV2_Atom_Object_Query;
294
295 static const LV2_Atom_Object_Query LV2_ATOM_OBJECT_QUERY_END = { 0, NULL };
296
297 /**
298 Get an object's values for various keys.
299
300 The value pointer of each item in @p query will be set to the location of
301 the corresponding value in @p object. Every value pointer in @p query MUST
302 be initialised to NULL. This function reads @p object in a single linear
303 sweep. By allocating @p query on the stack, objects can be "queried"
304 quickly without allocating any memory. This function is realtime safe.
305
306 This function can only do "flat" queries, it is not smart enough to match
307 variables in nested objects.
308
309 For example:
310 @code
311 const LV2_Atom* name = NULL;
312 const LV2_Atom* age = NULL;
313 LV2_Atom_Object_Query q[] = {
314 { urids.eg_name, &name },
315 { urids.eg_age, &age },
316 LV2_ATOM_OBJECT_QUERY_END
317 };
318 lv2_atom_object_query(obj, q);
319 // name and age are now set to the appropriate values in obj, or NULL.
320 @endcode
321 */
322 static inline int
lv2_atom_object_query(const LV2_Atom_Object * object,LV2_Atom_Object_Query * query)323 lv2_atom_object_query(const LV2_Atom_Object* object,
324 LV2_Atom_Object_Query* query)
325 {
326 int matches = 0;
327 int n_queries = 0;
328
329 /* Count number of query keys so we can short-circuit when done */
330 for (LV2_Atom_Object_Query* q = query; q->key; ++q) {
331 ++n_queries;
332 }
333
334 LV2_ATOM_OBJECT_FOREACH(object, prop) {
335 for (LV2_Atom_Object_Query* q = query; q->key; ++q) {
336 if (q->key == prop->key && !*q->value) {
337 *q->value = &prop->value;
338 if (++matches == n_queries) {
339 return matches;
340 }
341 break;
342 }
343 }
344 }
345 return matches;
346 }
347
348 /**
349 Body only version of lv2_atom_object_get().
350 */
351 static inline int
lv2_atom_object_body_get(uint32_t size,const LV2_Atom_Object_Body * body,...)352 lv2_atom_object_body_get(uint32_t size, const LV2_Atom_Object_Body* body, ...)
353 {
354 int matches = 0;
355 int n_queries = 0;
356
357 /* Count number of keys so we can short-circuit when done */
358 va_list args;
359 va_start(args, body);
360 for (n_queries = 0; va_arg(args, uint32_t); ++n_queries) {
361 if (!va_arg(args, const LV2_Atom**)) {
362 return -1;
363 }
364 }
365 va_end(args);
366
367 LV2_ATOM_OBJECT_BODY_FOREACH(body, size, prop) {
368 va_start(args, body);
369 for (int i = 0; i < n_queries; ++i) {
370 uint32_t qkey = va_arg(args, uint32_t);
371 const LV2_Atom** qval = va_arg(args, const LV2_Atom**);
372 if (qkey == prop->key && !*qval) {
373 *qval = &prop->value;
374 if (++matches == n_queries) {
375 return matches;
376 }
377 break;
378 }
379 }
380 va_end(args);
381 }
382 return matches;
383 }
384
385 /**
386 Variable argument version of lv2_atom_object_query().
387
388 This is nicer-looking in code, but a bit more error-prone since it is not
389 type safe and the argument list must be terminated.
390
391 The arguments should be a series of uint32_t key and const LV2_Atom** value
392 pairs, terminated by a zero key. The value pointers MUST be initialized to
393 NULL. For example:
394
395 @code
396 const LV2_Atom* name = NULL;
397 const LV2_Atom* age = NULL;
398 lv2_atom_object_get(obj,
399 uris.name_key, &name,
400 uris.age_key, &age,
401 0);
402 @endcode
403 */
404 static inline int
lv2_atom_object_get(const LV2_Atom_Object * object,...)405 lv2_atom_object_get(const LV2_Atom_Object* object, ...)
406 {
407 int matches = 0;
408 int n_queries = 0;
409
410 /* Count number of keys so we can short-circuit when done */
411 va_list args;
412 va_start(args, object);
413 for (n_queries = 0; va_arg(args, uint32_t); ++n_queries) {
414 if (!va_arg(args, const LV2_Atom**)) {
415 return -1;
416 }
417 }
418 va_end(args);
419
420 LV2_ATOM_OBJECT_FOREACH(object, prop) {
421 va_start(args, object);
422 for (int i = 0; i < n_queries; ++i) {
423 uint32_t qkey = va_arg(args, uint32_t);
424 const LV2_Atom** qval = va_arg(args, const LV2_Atom**);
425 if (qkey == prop->key && !*qval) {
426 *qval = &prop->value;
427 if (++matches == n_queries) {
428 return matches;
429 }
430 break;
431 }
432 }
433 va_end(args);
434 }
435 return matches;
436 }
437
438 /**
439 @}
440 */
441
442 #ifdef __cplusplus
443 } /* extern "C" */
444 #endif
445
446 #endif /* LV2_ATOM_UTIL_H */
447