1 /*         ______   ___    ___
2  *        /\  _  \ /\_ \  /\_ \
3  *        \ \ \L\ \\//\ \ \//\ \      __     __   _ __   ___
4  *         \ \  __ \ \ \ \  \ \ \   /'__`\ /'_ `\/\`'__\/ __`\
5  *          \ \ \/\ \ \_\ \_ \_\ \_/\  __//\ \L\ \ \ \//\ \L\ \
6  *           \ \_\ \_\/\____\/\____\ \____\ \____ \ \_\\ \____/
7  *            \/_/\/_/\/____/\/____/\/____/\/___L\ \/_/ \/___/
8  *                                           /\____/
9  *                                           \_/__/
10  *
11  *      Core primitive addon functions.
12  *
13  *
14  *      By Pavel Sountsov.
15  *
16  *      See readme.txt for copyright information.
17  */
18 
19 #include "allegro5/allegro.h"
20 #include "allegro5/allegro_primitives.h"
21 #include "allegro5/platform/alplatf.h"
22 #include "allegro5/internal/aintern.h"
23 #include "allegro5/internal/aintern_bitmap.h"
24 #include "allegro5/internal/aintern_exitfunc.h"
25 #include "allegro5/internal/aintern_pixels.h"
26 #include "allegro5/internal/aintern_prim.h"
27 #include "allegro5/internal/aintern_prim_directx.h"
28 #include "allegro5/internal/aintern_prim_opengl.h"
29 #include "allegro5/internal/aintern_prim_soft.h"
30 #include <math.h>
31 
32 #ifdef ALLEGRO_CFG_OPENGL
33 #include "allegro5/allegro_opengl.h"
34 #endif
35 
36 #ifndef ALLEGRO_DIRECT3D
37 #define ALLEGRO_DIRECT3D ALLEGRO_DIRECT3D_INTERNAL
38 #endif
39 
40 ALLEGRO_DEBUG_CHANNEL("primitives")
41 
42 static bool addon_initialized = false;
43 
44 /* Function: al_init_primitives_addon
45  */
al_init_primitives_addon(void)46 bool al_init_primitives_addon(void)
47 {
48    bool ret = true;
49    ret &= _al_init_d3d_driver();
50 
51    addon_initialized = ret;
52 
53    _al_add_exit_func(al_shutdown_primitives_addon, "primitives_shutdown");
54 
55    return ret;
56 }
57 
58 /* Function: al_is_primitives_addon_initialized
59  */
al_is_primitives_addon_initialized(void)60 bool al_is_primitives_addon_initialized(void)
61 {
62    return addon_initialized;
63 }
64 
65 /* Function: al_shutdown_primitives_addon
66  */
al_shutdown_primitives_addon(void)67 void al_shutdown_primitives_addon(void)
68 {
69    _al_shutdown_d3d_driver();
70    addon_initialized = false;
71 }
72 
73 /* Function: al_draw_prim
74  */
al_draw_prim(const void * vtxs,const ALLEGRO_VERTEX_DECL * decl,ALLEGRO_BITMAP * texture,int start,int end,int type)75 int al_draw_prim(const void* vtxs, const ALLEGRO_VERTEX_DECL* decl,
76    ALLEGRO_BITMAP* texture, int start, int end, int type)
77 {
78    ALLEGRO_BITMAP *target;
79    int ret = 0;
80 
81    ASSERT(addon_initialized);
82    ASSERT(vtxs);
83    ASSERT(end >= start);
84    ASSERT(start >= 0);
85    ASSERT(type >= 0 && type < ALLEGRO_PRIM_NUM_TYPES);
86 
87    target = al_get_target_bitmap();
88 
89    /* In theory, if we ever get a camera concept for this addon, the transformation into
90     * view space should occur here
91     */
92 
93    if (al_get_bitmap_flags(target) & ALLEGRO_MEMORY_BITMAP ||
94        (texture && al_get_bitmap_flags(texture) & ALLEGRO_MEMORY_BITMAP) ||
95        _al_pixel_format_is_compressed(al_get_bitmap_format(target))) {
96       ret =  _al_draw_prim_soft(texture, vtxs, decl, start, end, type);
97    } else {
98       int flags = al_get_display_flags(_al_get_bitmap_display(target));
99       if (flags & ALLEGRO_OPENGL) {
100          ret =  _al_draw_prim_opengl(target, texture, vtxs, decl, start, end, type);
101       } else if (flags & ALLEGRO_DIRECT3D) {
102          ret =  _al_draw_prim_directx(target, texture, vtxs, decl, start, end, type);
103       }
104    }
105 
106    return ret;
107 }
108 
109 /* Function: al_draw_indexed_prim
110  */
al_draw_indexed_prim(const void * vtxs,const ALLEGRO_VERTEX_DECL * decl,ALLEGRO_BITMAP * texture,const int * indices,int num_vtx,int type)111 int al_draw_indexed_prim(const void* vtxs, const ALLEGRO_VERTEX_DECL* decl,
112    ALLEGRO_BITMAP* texture, const int* indices, int num_vtx, int type)
113 {
114    ALLEGRO_BITMAP *target;
115    int ret = 0;
116 
117    ASSERT(addon_initialized);
118    ASSERT(vtxs);
119    ASSERT(indices);
120    ASSERT(num_vtx > 0);
121    ASSERT(type >= 0 && type < ALLEGRO_PRIM_NUM_TYPES);
122 
123    target = al_get_target_bitmap();
124 
125    /* In theory, if we ever get a camera concept for this addon, the transformation into
126     * view space should occur here
127     */
128 
129    if (al_get_bitmap_flags(target) & ALLEGRO_MEMORY_BITMAP ||
130        (texture && al_get_bitmap_flags(texture) & ALLEGRO_MEMORY_BITMAP) ||
131        _al_pixel_format_is_compressed(al_get_bitmap_format(target))) {
132       ret =  _al_draw_prim_indexed_soft(texture, vtxs, decl, indices, num_vtx, type);
133    } else {
134       int flags = al_get_display_flags(_al_get_bitmap_display(target));
135       if (flags & ALLEGRO_OPENGL) {
136          ret =  _al_draw_prim_indexed_opengl(target, texture, vtxs, decl, indices, num_vtx, type);
137       } else if (flags & ALLEGRO_DIRECT3D) {
138          ret =  _al_draw_prim_indexed_directx(target, texture, vtxs, decl, indices, num_vtx, type);
139       }
140    }
141 
142    return ret;
143 }
144 
_al_bitmap_region_is_locked(ALLEGRO_BITMAP * bmp,int x1,int y1,int w,int h)145 int _al_bitmap_region_is_locked(ALLEGRO_BITMAP* bmp, int x1, int y1, int w, int h)
146 {
147    ASSERT(bmp);
148 
149    if (!al_is_bitmap_locked(bmp))
150       return 0;
151    if (x1 + w > bmp->lock_x && y1 + h > bmp->lock_y && x1 < bmp->lock_x + bmp->lock_w && y1 < bmp->lock_y + bmp->lock_h)
152       return 1;
153    return 0;
154 }
155 
156 /* Function: al_get_allegro_primitives_version
157  */
al_get_allegro_primitives_version(void)158 uint32_t al_get_allegro_primitives_version(void)
159 {
160    return ALLEGRO_VERSION_INT;
161 }
162 
163 /* Function: al_create_vertex_decl
164  */
al_create_vertex_decl(const ALLEGRO_VERTEX_ELEMENT * elements,int stride)165 ALLEGRO_VERTEX_DECL* al_create_vertex_decl(const ALLEGRO_VERTEX_ELEMENT* elements, int stride)
166 {
167    ALLEGRO_VERTEX_DECL* ret;
168    ALLEGRO_DISPLAY* display;
169    ALLEGRO_VERTEX_ELEMENT* e;
170    int flags;
171 
172    ASSERT(addon_initialized);
173 
174    ret = al_malloc(sizeof(ALLEGRO_VERTEX_DECL));
175    ret->elements = al_calloc(1, sizeof(ALLEGRO_VERTEX_ELEMENT) * ALLEGRO_PRIM_ATTR_NUM);
176    while(elements->attribute) {
177 #ifdef ALLEGRO_CFG_OPENGLES
178       if (elements->storage == ALLEGRO_PRIM_HALF_FLOAT_2 ||
179           elements->storage == ALLEGRO_PRIM_HALF_FLOAT_4) {
180          ALLEGRO_WARN("This platform does not support ALLEGRO_PRIM_HALF_FLOAT_2 or ALLEGRO_PRIM_HALF_FLOAT_4.\n");
181          goto fail;
182       }
183 #endif
184       ret->elements[elements->attribute] = *elements;
185       elements++;
186    }
187 
188    e = &ret->elements[ALLEGRO_PRIM_POSITION];
189    if (e->attribute) {
190       if (e->storage != ALLEGRO_PRIM_FLOAT_2 &&
191           e->storage != ALLEGRO_PRIM_FLOAT_3 &&
192           e->storage != ALLEGRO_PRIM_SHORT_2) {
193          ALLEGRO_WARN("Invalid storage for ALLEGRO_PRIM_POSITION.\n");
194          goto fail;
195       }
196    }
197 
198    e = &ret->elements[ALLEGRO_PRIM_TEX_COORD];
199    if(!e->attribute)
200       e = &ret->elements[ALLEGRO_PRIM_TEX_COORD_PIXEL];
201    if (e->attribute) {
202       if (e->storage != ALLEGRO_PRIM_FLOAT_2 &&
203           e->storage != ALLEGRO_PRIM_SHORT_2) {
204          ALLEGRO_WARN("Invalid storage for %s.\n", ret->elements[ALLEGRO_PRIM_TEX_COORD].attribute ? "ALLEGRO_PRIM_TEX_COORD" : "ALLEGRO_PRIM_TEX_COORD_PIXEL");
205          goto fail;
206       }
207    }
208 
209    display = al_get_current_display();
210    flags = al_get_display_flags(display);
211    if (flags & ALLEGRO_DIRECT3D) {
212       _al_set_d3d_decl(display, ret);
213    }
214 
215    ret->stride = stride;
216    return ret;
217 fail:
218    al_free(ret->elements);
219    al_free(ret);
220    return NULL;
221 }
222 
223 /* Function: al_destroy_vertex_decl
224  */
al_destroy_vertex_decl(ALLEGRO_VERTEX_DECL * decl)225 void al_destroy_vertex_decl(ALLEGRO_VERTEX_DECL* decl)
226 {
227    if (!decl)
228       return;
229    al_free(decl->elements);
230    /*
231     * TODO: Somehow free the d3d_decl
232     */
233    al_free(decl);
234 }
235 
236 /* Function: al_create_vertex_buffer
237  */
al_create_vertex_buffer(ALLEGRO_VERTEX_DECL * decl,const void * initial_data,int num_vertices,int flags)238 ALLEGRO_VERTEX_BUFFER* al_create_vertex_buffer(ALLEGRO_VERTEX_DECL* decl,
239    const void* initial_data, int num_vertices, int flags)
240 {
241    ALLEGRO_VERTEX_BUFFER* ret;
242    int display_flags = al_get_display_flags(al_get_current_display());
243    ASSERT(addon_initialized);
244    ret = al_calloc(1, sizeof(ALLEGRO_VERTEX_BUFFER));
245    ret->common.size = num_vertices;
246    ret->common.write_only = !(flags & ALLEGRO_PRIM_BUFFER_READWRITE);
247    ret->decl = decl;
248 
249 #if defined ALLEGRO_IPHONE || defined ALLEGRO_ANDROID
250    if (flags & ALLEGRO_PRIM_BUFFER_READWRITE)
251       goto fail;
252 #endif
253 
254    if (display_flags & ALLEGRO_OPENGL) {
255       if (_al_create_vertex_buffer_opengl(ret, initial_data, num_vertices, flags))
256          return ret;
257    }
258    else if (display_flags & ALLEGRO_DIRECT3D) {
259       if (_al_create_vertex_buffer_directx(ret, initial_data, num_vertices, flags))
260          return ret;
261    }
262 
263    /* Silence the warning */
264    goto fail;
265 fail:
266    al_free(ret);
267    return 0;
268 }
269 
270 /* Function: al_create_index_buffer
271  */
al_create_index_buffer(int index_size,const void * initial_data,int num_indices,int flags)272 ALLEGRO_INDEX_BUFFER* al_create_index_buffer(int index_size,
273     const void* initial_data, int num_indices, int flags)
274 {
275    ALLEGRO_INDEX_BUFFER* ret;
276    int display_flags = al_get_display_flags(al_get_current_display());
277    ASSERT(addon_initialized);
278    ASSERT(index_size == 2 || index_size == 4);
279    ret = al_calloc(1, sizeof(ALLEGRO_INDEX_BUFFER));
280    ret->common.size = num_indices;
281    ret->common.write_only = !(flags & ALLEGRO_PRIM_BUFFER_READWRITE);
282    ret->index_size = index_size;
283 
284 #if defined ALLEGRO_IPHONE || defined ALLEGRO_ANDROID
285    if (flags & ALLEGRO_PRIM_BUFFER_READWRITE)
286       goto fail;
287 #endif
288 
289 #if defined ALLEGRO_IPHONE
290    if (index_size == 4)
291       goto fail;
292 #endif
293 
294    if (display_flags & ALLEGRO_OPENGL) {
295       if (_al_create_index_buffer_opengl(ret, initial_data, num_indices, flags))
296          return ret;
297    }
298    else if (display_flags & ALLEGRO_DIRECT3D) {
299       if (_al_create_index_buffer_directx(ret, initial_data, num_indices, flags))
300          return ret;
301    }
302 
303    /* Silence the warning */
304    goto fail;
305 fail:
306    al_free(ret);
307    return NULL;
308 }
309 
310 /* Function: al_destroy_vertex_buffer
311  */
al_destroy_vertex_buffer(ALLEGRO_VERTEX_BUFFER * buffer)312 void al_destroy_vertex_buffer(ALLEGRO_VERTEX_BUFFER* buffer)
313 {
314    int flags = al_get_display_flags(al_get_current_display());
315    ASSERT(addon_initialized);
316 
317    if (buffer == 0)
318       return;
319 
320    al_unlock_vertex_buffer(buffer);
321 
322    if (flags & ALLEGRO_OPENGL) {
323       _al_destroy_vertex_buffer_opengl(buffer);
324    }
325    else if (flags & ALLEGRO_DIRECT3D) {
326       _al_destroy_vertex_buffer_directx(buffer);
327    }
328 
329    al_free(buffer);
330 }
331 
332 /* Function: al_destroy_index_buffer
333  */
al_destroy_index_buffer(ALLEGRO_INDEX_BUFFER * buffer)334 void al_destroy_index_buffer(ALLEGRO_INDEX_BUFFER* buffer)
335 {
336    int flags = al_get_display_flags(al_get_current_display());
337    ASSERT(addon_initialized);
338 
339    if (buffer == 0)
340       return;
341 
342    al_unlock_index_buffer(buffer);
343 
344    if (flags & ALLEGRO_OPENGL) {
345       _al_destroy_index_buffer_opengl(buffer);
346    }
347    else if (flags & ALLEGRO_DIRECT3D) {
348       _al_destroy_index_buffer_directx(buffer);
349    }
350 
351    al_free(buffer);
352 }
353 
354 /* The sizes are in bytes here */
lock_buffer_common(ALLEGRO_BUFFER_COMMON * common,int offset,int length,int flags)355 static bool lock_buffer_common(ALLEGRO_BUFFER_COMMON* common, int offset, int length, int flags)
356 {
357    if (common->is_locked || (common->write_only && flags != ALLEGRO_LOCK_WRITEONLY))
358       return false;
359 
360    common->lock_offset = offset;
361    common->lock_length = length;
362    common->lock_flags = flags;
363    common->is_locked = true;
364    return true;
365 }
366 
367 /* Function: al_lock_vertex_buffer
368  */
al_lock_vertex_buffer(ALLEGRO_VERTEX_BUFFER * buffer,int offset,int length,int flags)369 void* al_lock_vertex_buffer(ALLEGRO_VERTEX_BUFFER* buffer, int offset,
370    int length, int flags)
371 {
372    int stride;
373    int disp_flags = al_get_display_flags(al_get_current_display());
374    ASSERT(buffer);
375    ASSERT(addon_initialized);
376 
377    if (offset + length > buffer->common.size)
378       return NULL;
379 
380    stride = buffer->decl ? buffer->decl->stride : (int)sizeof(ALLEGRO_VERTEX);
381 
382    if (!lock_buffer_common(&buffer->common, offset * stride, length * stride, flags))
383       return NULL;
384 
385    if (disp_flags & ALLEGRO_OPENGL) {
386       return _al_lock_vertex_buffer_opengl(buffer);
387    }
388    else if (disp_flags & ALLEGRO_DIRECT3D) {
389       return _al_lock_vertex_buffer_directx(buffer);
390    }
391    else {
392       return NULL;
393    }
394 }
395 
396 /* Function: al_lock_index_buffer
397  */
al_lock_index_buffer(ALLEGRO_INDEX_BUFFER * buffer,int offset,int length,int flags)398 void* al_lock_index_buffer(ALLEGRO_INDEX_BUFFER* buffer, int offset,
399     int length, int flags)
400 {
401    int disp_flags = al_get_display_flags(al_get_current_display());
402    ASSERT(buffer);
403    ASSERT(addon_initialized);
404 
405    if (offset + length > buffer->common.size)
406       return NULL;
407 
408    if (!lock_buffer_common(&buffer->common, offset * buffer->index_size, length * buffer->index_size, flags))
409       return NULL;
410 
411    if (disp_flags & ALLEGRO_OPENGL) {
412       return _al_lock_index_buffer_opengl(buffer);
413    }
414    else if (disp_flags & ALLEGRO_DIRECT3D) {
415       return _al_lock_index_buffer_directx(buffer);
416    }
417    else {
418       return NULL;
419    }
420 }
421 
422 /* Function: al_unlock_vertex_buffer
423  */
al_unlock_vertex_buffer(ALLEGRO_VERTEX_BUFFER * buffer)424 void al_unlock_vertex_buffer(ALLEGRO_VERTEX_BUFFER* buffer)
425 {
426    int flags = al_get_display_flags(al_get_current_display());
427    ASSERT(buffer);
428    ASSERT(addon_initialized);
429 
430    if (!buffer->common.is_locked)
431       return;
432 
433    buffer->common.is_locked = false;
434 
435    if (flags & ALLEGRO_OPENGL) {
436       _al_unlock_vertex_buffer_opengl(buffer);
437    }
438    else if (flags & ALLEGRO_DIRECT3D) {
439       _al_unlock_vertex_buffer_directx(buffer);
440    }
441 }
442 
443 /* Function: al_unlock_index_buffer
444  */
al_unlock_index_buffer(ALLEGRO_INDEX_BUFFER * buffer)445 void al_unlock_index_buffer(ALLEGRO_INDEX_BUFFER* buffer)
446 {
447 	int flags = al_get_display_flags(al_get_current_display());
448    ASSERT(buffer);
449    ASSERT(addon_initialized);
450 
451    if (!buffer->common.is_locked)
452       return;
453 
454    buffer->common.is_locked = false;
455 
456    if (flags & ALLEGRO_OPENGL) {
457       _al_unlock_index_buffer_opengl(buffer);
458    }
459    else if (flags & ALLEGRO_DIRECT3D) {
460       _al_unlock_index_buffer_directx(buffer);
461    }
462 }
463 
464 /* Software fallback for buffer drawing */
_al_draw_buffer_common_soft(ALLEGRO_VERTEX_BUFFER * vertex_buffer,ALLEGRO_BITMAP * texture,ALLEGRO_INDEX_BUFFER * index_buffer,int start,int end,int type)465 int _al_draw_buffer_common_soft(ALLEGRO_VERTEX_BUFFER* vertex_buffer, ALLEGRO_BITMAP* texture, ALLEGRO_INDEX_BUFFER* index_buffer, int start, int end, int type)
466 {
467    void* vtx;
468    int num_primitives = 0;
469    int num_vtx = end - start;
470    int vtx_lock_start = index_buffer ? 0 : start;
471    int vtx_lock_len = index_buffer ? al_get_vertex_buffer_size(vertex_buffer) : num_vtx;
472    if (vertex_buffer->common.write_only || (index_buffer && index_buffer->common.write_only)) {
473       return 0;
474    }
475 
476    vtx = al_lock_vertex_buffer(vertex_buffer, vtx_lock_start, vtx_lock_len, ALLEGRO_LOCK_READONLY);
477    ASSERT(vtx);
478 
479    if (index_buffer) {
480       void* idx;
481       int* int_idx = NULL;
482       int ii;
483 
484       idx = al_lock_index_buffer(index_buffer, start, num_vtx, ALLEGRO_LOCK_READONLY);
485       ASSERT(idx);
486 
487       if (index_buffer->index_size != 4) {
488          int_idx = al_malloc(num_vtx * sizeof(int));
489          for (ii = 0; ii < num_vtx; ii++) {
490             int_idx[ii] = ((unsigned short*)idx)[ii];
491          }
492          idx = int_idx;
493       }
494 
495       num_primitives = _al_draw_prim_indexed_soft(texture, vtx, vertex_buffer->decl, idx, num_vtx, type);
496 
497       al_unlock_index_buffer(index_buffer);
498       al_free(int_idx);
499    }
500    else {
501       num_primitives = _al_draw_prim_soft(texture, vtx, vertex_buffer->decl, 0, num_vtx, type);
502    }
503 
504    al_unlock_vertex_buffer(vertex_buffer);
505    return num_primitives;
506 }
507 
508 /* Function: al_draw_vertex_buffer
509  */
al_draw_vertex_buffer(ALLEGRO_VERTEX_BUFFER * vertex_buffer,ALLEGRO_BITMAP * texture,int start,int end,int type)510 int al_draw_vertex_buffer(ALLEGRO_VERTEX_BUFFER* vertex_buffer,
511    ALLEGRO_BITMAP* texture, int start, int end, int type)
512 {
513    ALLEGRO_BITMAP *target;
514    int ret = 0;
515 
516    ASSERT(addon_initialized);
517    ASSERT(end >= start);
518    ASSERT(start >= 0);
519    ASSERT(end <= al_get_vertex_buffer_size(vertex_buffer));
520    ASSERT(type >= 0 && type < ALLEGRO_PRIM_NUM_TYPES);
521    ASSERT(vertex_buffer);
522    ASSERT(!vertex_buffer->common.is_locked);
523 
524    target = al_get_target_bitmap();
525 
526    if (al_get_bitmap_flags(target) & ALLEGRO_MEMORY_BITMAP ||
527        (texture && al_get_bitmap_flags(texture) & ALLEGRO_MEMORY_BITMAP) ||
528        _al_pixel_format_is_compressed(al_get_bitmap_format(target))) {
529       ret = _al_draw_buffer_common_soft(vertex_buffer, texture, NULL, start, end, type);
530    } else {
531       int flags = al_get_display_flags(al_get_current_display());
532       if (flags & ALLEGRO_OPENGL) {
533          ret = _al_draw_vertex_buffer_opengl(target, texture, vertex_buffer, start, end, type);
534       }
535       else if (flags & ALLEGRO_DIRECT3D) {
536          ret = _al_draw_vertex_buffer_directx(target, texture, vertex_buffer, start, end, type);
537       }
538    }
539 
540    return ret;
541 }
542 
543 /* Function: al_draw_indexed_buffer
544  */
al_draw_indexed_buffer(ALLEGRO_VERTEX_BUFFER * vertex_buffer,ALLEGRO_BITMAP * texture,ALLEGRO_INDEX_BUFFER * index_buffer,int start,int end,int type)545 int al_draw_indexed_buffer(ALLEGRO_VERTEX_BUFFER* vertex_buffer,
546    ALLEGRO_BITMAP* texture, ALLEGRO_INDEX_BUFFER* index_buffer,
547    int start, int end, int type)
548 {
549    ALLEGRO_BITMAP *target;
550    int ret = 0;
551 
552    ASSERT(addon_initialized);
553    ASSERT(end >= start);
554    ASSERT(start >= 0);
555    ASSERT(end <= al_get_index_buffer_size(index_buffer));
556    ASSERT(type >= 0 && type < ALLEGRO_PRIM_NUM_TYPES);
557    ASSERT(vertex_buffer);
558    ASSERT(!vertex_buffer->common.is_locked);
559    ASSERT(index_buffer);
560    ASSERT(!index_buffer->common.is_locked);
561 
562    target = al_get_target_bitmap();
563 
564    if (al_get_bitmap_flags(target) & ALLEGRO_MEMORY_BITMAP ||
565        (texture && al_get_bitmap_flags(texture) & ALLEGRO_MEMORY_BITMAP) ||
566        _al_pixel_format_is_compressed(al_get_bitmap_format(target))) {
567       ret = _al_draw_buffer_common_soft(vertex_buffer, texture, index_buffer, start, end, type);
568    } else {
569       int flags = al_get_display_flags(al_get_current_display());
570       if (flags & ALLEGRO_OPENGL) {
571          ret = _al_draw_indexed_buffer_opengl(target, texture, vertex_buffer, index_buffer, start, end, type);
572       }
573       else if (flags & ALLEGRO_DIRECT3D) {
574          ret = _al_draw_indexed_buffer_directx(target, texture, vertex_buffer, index_buffer, start, end, type);
575       }
576    }
577 
578    return ret;
579 }
580 
581 /* Function: al_get_vertex_buffer_size
582  */
al_get_vertex_buffer_size(ALLEGRO_VERTEX_BUFFER * buffer)583 int al_get_vertex_buffer_size(ALLEGRO_VERTEX_BUFFER* buffer)
584 {
585    ASSERT(buffer);
586    return buffer->common.size;
587 }
588 
589 /* Function: al_get_index_buffer_size
590  */
al_get_index_buffer_size(ALLEGRO_INDEX_BUFFER * buffer)591 int al_get_index_buffer_size(ALLEGRO_INDEX_BUFFER* buffer)
592 {
593    ASSERT(buffer);
594    return buffer->common.size;
595 }
596