1 /* cairo - a vector graphics library with display and print output
2 *
3 * Copyright © 2009 Eric Anholt
4 * Copyright © 2009 Chris Wilson
5 * Copyright © 2005,2010 Red Hat, Inc
6 * Copyright © 2011 Linaro Limited
7 * Copyright © 2011 Samsung Electronics
8 *
9 * This library is free software; you can redistribute it and/or
10 * modify it either under the terms of the GNU Lesser General Public
11 * License version 2.1 as published by the Free Software Foundation
12 * (the "LGPL") or, at your option, under the terms of the Mozilla
13 * Public License Version 1.1 (the "MPL"). If you do not alter this
14 * notice, a recipient may use your version of this file under either
15 * the MPL or the LGPL.
16 *
17 * You should have received a copy of the LGPL along with this library
18 * in the file COPYING-LGPL-2.1; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA
20 * You should have received a copy of the MPL along with this library
21 * in the file COPYING-MPL-1.1
22 *
23 * The contents of this file are subject to the Mozilla Public License
24 * Version 1.1 (the "License"); you may not use this file except in
25 * compliance with the License. You may obtain a copy of the License at
26 * http://www.mozilla.org/MPL/
27 *
28 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY
29 * OF ANY KIND, either express or implied. See the LGPL or the MPL for
30 * the specific language governing rights and limitations.
31 *
32 * The Original Code is the cairo graphics library.
33 *
34 * The Initial Developer of the Original Code is Red Hat, Inc.
35 *
36 * Contributor(s):
37 * Benjamin Otte <otte@gnome.org>
38 * Carl Worth <cworth@cworth.org>
39 * Chris Wilson <chris@chris-wilson.co.uk>
40 * Eric Anholt <eric@anholt.net>
41 * Alexandros Frantzis <alexandros.frantzis@linaro.org>
42 * Henry Song <hsong@sisa.samsung.com>
43 * Martin Robinson <mrobinson@igalia.com>
44 */
45
46 #include "cairoint.h"
47
48 #include "cairo-gl-private.h"
49
50 #include "cairo-composite-rectangles-private.h"
51 #include "cairo-clip-private.h"
52 #include "cairo-error-private.h"
53 #include "cairo-image-surface-private.h"
54
55 /* FIXME: Copy of same routine in cairo-gl-msaa-compositor.c */
56 static cairo_int_status_t
_draw_int_rect(cairo_gl_context_t * ctx,cairo_gl_composite_t * setup,cairo_rectangle_int_t * rect)57 _draw_int_rect (cairo_gl_context_t *ctx,
58 cairo_gl_composite_t *setup,
59 cairo_rectangle_int_t *rect)
60 {
61 cairo_box_t box;
62 cairo_point_t quad[4];
63
64 _cairo_box_from_rectangle (&box, rect);
65 quad[0].x = box.p1.x;
66 quad[0].y = box.p1.y;
67 quad[1].x = box.p1.x;
68 quad[1].y = box.p2.y;
69 quad[2].x = box.p2.x;
70 quad[2].y = box.p2.y;
71 quad[3].x = box.p2.x;
72 quad[3].y = box.p1.y;
73
74 return _cairo_gl_composite_emit_quad_as_tristrip (ctx, setup, quad);
75 }
76
77 static cairo_int_status_t
_blit_texture_to_renderbuffer(cairo_gl_surface_t * surface)78 _blit_texture_to_renderbuffer (cairo_gl_surface_t *surface)
79 {
80 cairo_gl_context_t *ctx = NULL;
81 cairo_gl_composite_t setup;
82 cairo_surface_pattern_t pattern;
83 cairo_rectangle_int_t extents;
84 cairo_int_status_t status;
85
86 /* FIXME: This only permits blit when glesv3 is enabled. But note that
87 glesv2 with the ANGLE extension should also be able to support this feature,
88 so once the ANGLE support code is in place this check can be relaxed. */
89 if (((cairo_gl_context_t *)surface->base.device)->gl_flavor != CAIRO_GL_FLAVOR_ES3)
90 return CAIRO_INT_STATUS_SUCCESS;
91
92 if (! surface->content_in_texture)
93 return CAIRO_INT_STATUS_SUCCESS;
94
95 memset (&setup, 0, sizeof (cairo_gl_composite_t));
96
97 status = _cairo_gl_composite_set_operator (&setup,
98 CAIRO_OPERATOR_SOURCE,
99 FALSE);
100
101 if (status)
102 return status;
103
104 setup.dst = surface;
105 setup.clip_region = surface->clip_region;
106
107 _cairo_pattern_init_for_surface (&pattern, &surface->base);
108 status = _cairo_gl_composite_set_source (&setup, &pattern.base,
109 NULL, NULL, FALSE);
110 _cairo_pattern_fini (&pattern.base);
111
112 if (unlikely (status))
113 goto FAIL;
114
115 _cairo_gl_composite_set_multisample (&setup);
116
117 status = _cairo_gl_composite_begin (&setup, &ctx);
118
119 if (unlikely (status))
120 goto FAIL;
121
122 extents.x = extents.y = 0;
123 extents.width = surface->width;
124 extents.height = surface->height;
125
126 status = _draw_int_rect (ctx, &setup, &extents);
127
128 if (status == CAIRO_INT_STATUS_SUCCESS)
129 surface->content_in_texture = FALSE;
130
131 FAIL:
132 _cairo_gl_composite_fini (&setup);
133
134 if (ctx) {
135 _cairo_gl_composite_flush (ctx);
136 status = _cairo_gl_context_release (ctx, status);
137 }
138
139 return status;
140 }
141
142 cairo_int_status_t
_cairo_gl_composite_set_source(cairo_gl_composite_t * setup,const cairo_pattern_t * pattern,const cairo_rectangle_int_t * sample,const cairo_rectangle_int_t * extents,cairo_bool_t use_texgen)143 _cairo_gl_composite_set_source (cairo_gl_composite_t *setup,
144 const cairo_pattern_t *pattern,
145 const cairo_rectangle_int_t *sample,
146 const cairo_rectangle_int_t *extents,
147 cairo_bool_t use_texgen)
148 {
149 _cairo_gl_operand_destroy (&setup->src);
150 return _cairo_gl_operand_init (&setup->src, pattern, setup->dst,
151 sample, extents, use_texgen);
152 }
153
154 void
_cairo_gl_composite_set_source_operand(cairo_gl_composite_t * setup,const cairo_gl_operand_t * source)155 _cairo_gl_composite_set_source_operand (cairo_gl_composite_t *setup,
156 const cairo_gl_operand_t *source)
157 {
158 cairo_int_status_t status;
159
160 _cairo_gl_operand_destroy (&setup->src);
161 _cairo_gl_operand_copy (&setup->src, source);
162
163 if (source->type == CAIRO_GL_OPERAND_TEXTURE)
164 status = _cairo_gl_surface_resolve_multisampling (source->texture.surface);
165 }
166
167 void
_cairo_gl_composite_set_solid_source(cairo_gl_composite_t * setup,const cairo_color_t * color)168 _cairo_gl_composite_set_solid_source (cairo_gl_composite_t *setup,
169 const cairo_color_t *color)
170 {
171 _cairo_gl_operand_destroy (&setup->src);
172 _cairo_gl_solid_operand_init (&setup->src, color);
173 }
174
175 cairo_int_status_t
_cairo_gl_composite_set_mask(cairo_gl_composite_t * setup,const cairo_pattern_t * pattern,const cairo_rectangle_int_t * sample,const cairo_rectangle_int_t * extents,cairo_bool_t use_texgen)176 _cairo_gl_composite_set_mask (cairo_gl_composite_t *setup,
177 const cairo_pattern_t *pattern,
178 const cairo_rectangle_int_t *sample,
179 const cairo_rectangle_int_t *extents,
180 cairo_bool_t use_texgen)
181 {
182 _cairo_gl_operand_destroy (&setup->mask);
183 if (pattern == NULL)
184 return CAIRO_STATUS_SUCCESS;
185
186 return _cairo_gl_operand_init (&setup->mask, pattern, setup->dst,
187 sample, extents, use_texgen);
188 }
189
190 void
_cairo_gl_composite_set_mask_operand(cairo_gl_composite_t * setup,const cairo_gl_operand_t * mask)191 _cairo_gl_composite_set_mask_operand (cairo_gl_composite_t *setup,
192 const cairo_gl_operand_t *mask)
193 {
194 cairo_int_status_t status;
195 _cairo_gl_operand_destroy (&setup->mask);
196 if (mask) {
197 _cairo_gl_operand_copy (&setup->mask, mask);
198 if (mask->type == CAIRO_GL_OPERAND_TEXTURE)
199 status = _cairo_gl_surface_resolve_multisampling (mask->texture.surface);
200 }
201 }
202
203 void
_cairo_gl_composite_set_spans(cairo_gl_composite_t * setup)204 _cairo_gl_composite_set_spans (cairo_gl_composite_t *setup)
205 {
206 setup->spans = TRUE;
207 }
208
209 void
_cairo_gl_composite_set_multisample(cairo_gl_composite_t * setup)210 _cairo_gl_composite_set_multisample (cairo_gl_composite_t *setup)
211 {
212 setup->multisample = TRUE;
213 }
214
215 void
_cairo_gl_composite_set_clip_region(cairo_gl_composite_t * setup,cairo_region_t * clip_region)216 _cairo_gl_composite_set_clip_region (cairo_gl_composite_t *setup,
217 cairo_region_t *clip_region)
218 {
219 setup->clip_region = clip_region;
220 }
221
222 void
_cairo_gl_composite_set_clip(cairo_gl_composite_t * setup,cairo_clip_t * clip)223 _cairo_gl_composite_set_clip (cairo_gl_composite_t *setup,
224 cairo_clip_t *clip)
225 {
226 setup->clip = clip;
227 }
228
229 static void
_cairo_gl_composite_bind_to_shader(cairo_gl_context_t * ctx,cairo_gl_composite_t * setup)230 _cairo_gl_composite_bind_to_shader (cairo_gl_context_t *ctx,
231 cairo_gl_composite_t *setup)
232 {
233 _cairo_gl_shader_bind_matrix4f(ctx, ctx->current_shader->mvp_location,
234 ctx->modelviewprojection_matrix);
235 _cairo_gl_operand_bind_to_shader (ctx, &setup->src, CAIRO_GL_TEX_SOURCE);
236 _cairo_gl_operand_bind_to_shader (ctx, &setup->mask, CAIRO_GL_TEX_MASK);
237 }
238
239 static void
_cairo_gl_texture_set_filter(cairo_gl_context_t * ctx,GLuint target,cairo_filter_t filter)240 _cairo_gl_texture_set_filter (cairo_gl_context_t *ctx,
241 GLuint target,
242 cairo_filter_t filter)
243 {
244 switch (filter) {
245 case CAIRO_FILTER_FAST:
246 case CAIRO_FILTER_NEAREST:
247 glTexParameteri (target, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
248 glTexParameteri (target, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
249 break;
250 case CAIRO_FILTER_GOOD:
251 case CAIRO_FILTER_BEST:
252 case CAIRO_FILTER_BILINEAR:
253 glTexParameteri (target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
254 glTexParameteri (target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
255 break;
256 default:
257 case CAIRO_FILTER_GAUSSIAN:
258 ASSERT_NOT_REACHED;
259 }
260 }
261
262 static void
_cairo_gl_texture_set_extend(cairo_gl_context_t * ctx,GLuint target,cairo_extend_t extend)263 _cairo_gl_texture_set_extend (cairo_gl_context_t *ctx,
264 GLuint target,
265 cairo_extend_t extend)
266 {
267 GLint wrap_mode;
268 assert (! _cairo_gl_device_requires_power_of_two_textures (&ctx->base) ||
269 (extend != CAIRO_EXTEND_REPEAT && extend != CAIRO_EXTEND_REFLECT));
270
271 switch (extend) {
272 case CAIRO_EXTEND_NONE:
273 if (ctx->gl_flavor == CAIRO_GL_FLAVOR_ES3 ||
274 ctx->gl_flavor == CAIRO_GL_FLAVOR_ES2)
275 wrap_mode = GL_CLAMP_TO_EDGE;
276 else
277 wrap_mode = GL_CLAMP_TO_BORDER;
278 break;
279 case CAIRO_EXTEND_PAD:
280 wrap_mode = GL_CLAMP_TO_EDGE;
281 break;
282 case CAIRO_EXTEND_REPEAT:
283 if (ctx->has_npot_repeat)
284 wrap_mode = GL_REPEAT;
285 else
286 wrap_mode = GL_CLAMP_TO_EDGE;
287 break;
288 case CAIRO_EXTEND_REFLECT:
289 if (ctx->has_npot_repeat)
290 wrap_mode = GL_MIRRORED_REPEAT;
291 else
292 wrap_mode = GL_CLAMP_TO_EDGE;
293 break;
294 default:
295 wrap_mode = 0;
296 }
297
298 if (likely (wrap_mode)) {
299 glTexParameteri (target, GL_TEXTURE_WRAP_S, wrap_mode);
300 glTexParameteri (target, GL_TEXTURE_WRAP_T, wrap_mode);
301 }
302 }
303
304
305 static void
_cairo_gl_context_setup_operand(cairo_gl_context_t * ctx,cairo_gl_tex_t tex_unit,cairo_gl_operand_t * operand,unsigned int vertex_offset,cairo_bool_t vertex_size_changed)306 _cairo_gl_context_setup_operand (cairo_gl_context_t *ctx,
307 cairo_gl_tex_t tex_unit,
308 cairo_gl_operand_t *operand,
309 unsigned int vertex_offset,
310 cairo_bool_t vertex_size_changed)
311 {
312 cairo_gl_dispatch_t *dispatch = &ctx->dispatch;
313 cairo_bool_t needs_setup;
314
315 /* XXX: we need to do setup when switching from shaders
316 * to no shaders (or back) */
317 needs_setup = vertex_size_changed;
318 needs_setup |= _cairo_gl_operand_needs_setup (&ctx->operands[tex_unit],
319 operand,
320 vertex_offset);
321
322 if (needs_setup) {
323 _cairo_gl_composite_flush (ctx);
324 _cairo_gl_context_destroy_operand (ctx, tex_unit);
325 }
326
327 memcpy (&ctx->operands[tex_unit], operand, sizeof (cairo_gl_operand_t));
328 ctx->operands[tex_unit].vertex_offset = vertex_offset;
329
330 if (! needs_setup)
331 return;
332
333 switch (operand->type) {
334 default:
335 case CAIRO_GL_OPERAND_COUNT:
336 ASSERT_NOT_REACHED;
337 case CAIRO_GL_OPERAND_NONE:
338 break;
339 /* fall through */
340 case CAIRO_GL_OPERAND_CONSTANT:
341 break;
342 case CAIRO_GL_OPERAND_TEXTURE:
343 glActiveTexture (GL_TEXTURE0 + tex_unit);
344 glBindTexture (ctx->tex_target, operand->texture.tex);
345 _cairo_gl_texture_set_extend (ctx, ctx->tex_target,
346 operand->texture.attributes.extend);
347 _cairo_gl_texture_set_filter (ctx, ctx->tex_target,
348 operand->texture.attributes.filter);
349
350 if (! operand->texture.texgen) {
351 dispatch->VertexAttribPointer (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit, 2,
352 GL_FLOAT, GL_FALSE, ctx->vertex_size,
353 ctx->vb + vertex_offset);
354 dispatch->EnableVertexAttribArray (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit);
355 }
356 break;
357 case CAIRO_GL_OPERAND_LINEAR_GRADIENT:
358 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_A0:
359 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_NONE:
360 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_EXT:
361 glActiveTexture (GL_TEXTURE0 + tex_unit);
362 glBindTexture (ctx->tex_target, operand->gradient.gradient->tex);
363 _cairo_gl_texture_set_extend (ctx, ctx->tex_target, operand->gradient.extend);
364 _cairo_gl_texture_set_filter (ctx, ctx->tex_target, CAIRO_FILTER_BILINEAR);
365
366 if (! operand->gradient.texgen) {
367 dispatch->VertexAttribPointer (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit, 2,
368 GL_FLOAT, GL_FALSE, ctx->vertex_size,
369 ctx->vb + vertex_offset);
370 dispatch->EnableVertexAttribArray (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit);
371 }
372 break;
373 }
374 }
375
376 static void
_cairo_gl_context_setup_spans(cairo_gl_context_t * ctx,cairo_bool_t spans_enabled,unsigned int vertex_size,unsigned int vertex_offset)377 _cairo_gl_context_setup_spans (cairo_gl_context_t *ctx,
378 cairo_bool_t spans_enabled,
379 unsigned int vertex_size,
380 unsigned int vertex_offset)
381 {
382 cairo_gl_dispatch_t *dispatch = &ctx->dispatch;
383
384 if (! spans_enabled) {
385 dispatch->DisableVertexAttribArray (CAIRO_GL_COLOR_ATTRIB_INDEX);
386 ctx->spans = FALSE;
387 return;
388 }
389
390 dispatch->VertexAttribPointer (CAIRO_GL_COLOR_ATTRIB_INDEX, 4,
391 GL_UNSIGNED_BYTE, GL_TRUE, vertex_size,
392 ctx->vb + vertex_offset);
393 dispatch->EnableVertexAttribArray (CAIRO_GL_COLOR_ATTRIB_INDEX);
394 ctx->spans = TRUE;
395 }
396
397 void
_cairo_gl_context_destroy_operand(cairo_gl_context_t * ctx,cairo_gl_tex_t tex_unit)398 _cairo_gl_context_destroy_operand (cairo_gl_context_t *ctx,
399 cairo_gl_tex_t tex_unit)
400 {
401 cairo_gl_dispatch_t *dispatch = &ctx->dispatch;
402
403 if (!_cairo_gl_context_is_flushed (ctx))
404 _cairo_gl_composite_flush (ctx);
405
406 switch (ctx->operands[tex_unit].type) {
407 default:
408 case CAIRO_GL_OPERAND_COUNT:
409 ASSERT_NOT_REACHED;
410 case CAIRO_GL_OPERAND_NONE:
411 break;
412 /* fall through */
413 case CAIRO_GL_OPERAND_CONSTANT:
414 break;
415 case CAIRO_GL_OPERAND_TEXTURE:
416 dispatch->DisableVertexAttribArray (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit);
417 break;
418 case CAIRO_GL_OPERAND_LINEAR_GRADIENT:
419 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_A0:
420 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_NONE:
421 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_EXT:
422 dispatch->DisableVertexAttribArray (CAIRO_GL_TEXCOORD0_ATTRIB_INDEX + tex_unit);
423 break;
424 }
425
426 memset (&ctx->operands[tex_unit], 0, sizeof (cairo_gl_operand_t));
427 }
428
429 static void
_cairo_gl_set_operator(cairo_gl_context_t * ctx,cairo_operator_t op,cairo_bool_t component_alpha)430 _cairo_gl_set_operator (cairo_gl_context_t *ctx,
431 cairo_operator_t op,
432 cairo_bool_t component_alpha)
433 {
434 struct {
435 GLenum src;
436 GLenum dst;
437 } blend_factors[] = {
438 { GL_ZERO, GL_ZERO }, /* Clear */
439 { GL_ONE, GL_ZERO }, /* Source */
440 { GL_ONE, GL_ONE_MINUS_SRC_ALPHA }, /* Over */
441 { GL_DST_ALPHA, GL_ZERO }, /* In */
442 { GL_ONE_MINUS_DST_ALPHA, GL_ZERO }, /* Out */
443 { GL_DST_ALPHA, GL_ONE_MINUS_SRC_ALPHA }, /* Atop */
444
445 { GL_ZERO, GL_ONE }, /* Dest */
446 { GL_ONE_MINUS_DST_ALPHA, GL_ONE }, /* DestOver */
447 { GL_ZERO, GL_SRC_ALPHA }, /* DestIn */
448 { GL_ZERO, GL_ONE_MINUS_SRC_ALPHA }, /* DestOut */
449 { GL_ONE_MINUS_DST_ALPHA, GL_SRC_ALPHA }, /* DestAtop */
450
451 { GL_ONE_MINUS_DST_ALPHA, GL_ONE_MINUS_SRC_ALPHA }, /* Xor */
452 { GL_ONE, GL_ONE }, /* Add */
453 };
454 GLenum src_factor, dst_factor;
455
456 assert (op < ARRAY_LENGTH (blend_factors));
457 /* different dst and component_alpha changes cause flushes elsewhere */
458 if (ctx->current_operator != op)
459 _cairo_gl_composite_flush (ctx);
460 ctx->current_operator = op;
461
462 src_factor = blend_factors[op].src;
463 dst_factor = blend_factors[op].dst;
464
465 /* Even when the user requests CAIRO_CONTENT_COLOR, we use GL_RGBA
466 * due to texture filtering of GL_CLAMP_TO_BORDER. So fix those
467 * bits in that case.
468 */
469 if (ctx->current_target->base.content == CAIRO_CONTENT_COLOR) {
470 if (src_factor == GL_ONE_MINUS_DST_ALPHA)
471 src_factor = GL_ZERO;
472 if (src_factor == GL_DST_ALPHA)
473 src_factor = GL_ONE;
474 }
475
476 if (component_alpha) {
477 if (dst_factor == GL_ONE_MINUS_SRC_ALPHA)
478 dst_factor = GL_ONE_MINUS_SRC_COLOR;
479 if (dst_factor == GL_SRC_ALPHA)
480 dst_factor = GL_SRC_COLOR;
481 }
482
483 if (ctx->current_target->base.content == CAIRO_CONTENT_ALPHA) {
484 glBlendFuncSeparate (GL_ZERO, GL_ZERO, src_factor, dst_factor);
485 } else if (ctx->current_target->base.content == CAIRO_CONTENT_COLOR) {
486 glBlendFuncSeparate (src_factor, dst_factor, GL_ONE, GL_ONE);
487 } else {
488 glBlendFunc (src_factor, dst_factor);
489 }
490 }
491
492 static cairo_status_t
_cairo_gl_composite_begin_component_alpha(cairo_gl_context_t * ctx,cairo_gl_composite_t * setup)493 _cairo_gl_composite_begin_component_alpha (cairo_gl_context_t *ctx,
494 cairo_gl_composite_t *setup)
495 {
496 cairo_gl_shader_t *pre_shader = NULL;
497 cairo_status_t status;
498
499 /* For CLEAR, cairo's rendering equation (quoting Owen's description in:
500 * https://lists.cairographics.org/archives/cairo/2005-August/004992.html)
501 * is:
502 * mask IN clip ? src OP dest : dest
503 * or more simply:
504 * mask IN CLIP ? 0 : dest
505 *
506 * where the ternary operator A ? B : C is (A * B) + ((1 - A) * C).
507 *
508 * The model we use in _cairo_gl_set_operator() is Render's:
509 * src IN mask IN clip OP dest
510 * which would boil down to:
511 * 0 (bounded by the extents of the drawing).
512 *
513 * However, we can do a Render operation using an opaque source
514 * and DEST_OUT to produce:
515 * 1 IN mask IN clip DEST_OUT dest
516 * which is
517 * mask IN clip ? 0 : dest
518 */
519 if (setup->op == CAIRO_OPERATOR_CLEAR) {
520 _cairo_gl_solid_operand_init (&setup->src, CAIRO_COLOR_WHITE);
521 setup->op = CAIRO_OPERATOR_DEST_OUT;
522 }
523
524 /*
525 * implements component-alpha %CAIRO_OPERATOR_OVER using two passes of
526 * the simpler operations %CAIRO_OPERATOR_DEST_OUT and %CAIRO_OPERATOR_ADD.
527 *
528 * From http://anholt.livejournal.com/32058.html:
529 *
530 * The trouble is that component-alpha rendering requires two different sources
531 * for blending: one for the source value to the blender, which is the
532 * per-channel multiplication of source and mask, and one for the source alpha
533 * for multiplying with the destination channels, which is the multiplication
534 * of the source channels by the mask alpha. So the equation for Over is:
535 *
536 * dst.A = src.A * mask.A + (1 - (src.A * mask.A)) * dst.A
537 * dst.R = src.R * mask.R + (1 - (src.A * mask.R)) * dst.R
538 * dst.G = src.G * mask.G + (1 - (src.A * mask.G)) * dst.G
539 * dst.B = src.B * mask.B + (1 - (src.A * mask.B)) * dst.B
540 *
541 * But we can do some simpler operations, right? How about PictOpOutReverse,
542 * which has a source factor of 0 and dest factor of (1 - source alpha). We
543 * can get the source alpha value (srca.X = src.A * mask.X) out of the texture
544 * blenders pretty easily. So we can do a component-alpha OutReverse, which
545 * gets us:
546 *
547 * dst.A = 0 + (1 - (src.A * mask.A)) * dst.A
548 * dst.R = 0 + (1 - (src.A * mask.R)) * dst.R
549 * dst.G = 0 + (1 - (src.A * mask.G)) * dst.G
550 * dst.B = 0 + (1 - (src.A * mask.B)) * dst.B
551 *
552 * OK. And if an op doesn't use the source alpha value for the destination
553 * factor, then we can do the channel multiplication in the texture blenders
554 * to get the source value, and ignore the source alpha that we wouldn't use.
555 * We've supported this in the Radeon driver for a long time. An example would
556 * be PictOpAdd, which does:
557 *
558 * dst.A = src.A * mask.A + dst.A
559 * dst.R = src.R * mask.R + dst.R
560 * dst.G = src.G * mask.G + dst.G
561 * dst.B = src.B * mask.B + dst.B
562 *
563 * Hey, this looks good! If we do a PictOpOutReverse and then a PictOpAdd right
564 * after it, we get:
565 *
566 * dst.A = src.A * mask.A + ((1 - (src.A * mask.A)) * dst.A)
567 * dst.R = src.R * mask.R + ((1 - (src.A * mask.R)) * dst.R)
568 * dst.G = src.G * mask.G + ((1 - (src.A * mask.G)) * dst.G)
569 * dst.B = src.B * mask.B + ((1 - (src.A * mask.B)) * dst.B)
570 *
571 * This two-pass trickery could be avoided using a new GL extension that
572 * lets two values come out of the shader and into the blend unit.
573 */
574 if (setup->op == CAIRO_OPERATOR_OVER) {
575 setup->op = CAIRO_OPERATOR_ADD;
576 status = _cairo_gl_get_shader_by_type (ctx,
577 &setup->src,
578 &setup->mask,
579 setup->spans,
580 CAIRO_GL_SHADER_IN_CA_SOURCE_ALPHA,
581 &pre_shader);
582 if (unlikely (status))
583 return status;
584 }
585
586 if (ctx->pre_shader != pre_shader)
587 _cairo_gl_composite_flush (ctx);
588 ctx->pre_shader = pre_shader;
589
590 return CAIRO_STATUS_SUCCESS;
591 }
592
593 static void
_scissor_to_doubles(cairo_gl_surface_t * surface,double x1,double y1,double x2,double y2)594 _scissor_to_doubles (cairo_gl_surface_t *surface,
595 double x1, double y1,
596 double x2, double y2)
597 {
598 double height;
599
600 height = y2 - y1;
601 if (_cairo_gl_surface_is_texture (surface) == FALSE)
602 y1 = surface->height - (y1 + height);
603 glScissor (x1, y1, x2 - x1, height);
604 glEnable (GL_SCISSOR_TEST);
605 }
606
607 void
_cairo_gl_scissor_to_rectangle(cairo_gl_surface_t * surface,const cairo_rectangle_int_t * r)608 _cairo_gl_scissor_to_rectangle (cairo_gl_surface_t *surface,
609 const cairo_rectangle_int_t *r)
610 {
611 _scissor_to_doubles (surface, r->x, r->y, r->x+r->width, r->y+r->height);
612 }
613
614 static void
_scissor_to_box(cairo_gl_surface_t * surface,const cairo_box_t * box)615 _scissor_to_box (cairo_gl_surface_t *surface,
616 const cairo_box_t *box)
617 {
618 double x1, y1, x2, y2;
619 _cairo_box_to_doubles (box, &x1, &y1, &x2, &y2);
620 _scissor_to_doubles (surface, x1, y1, x2, y2);
621 }
622
623 static cairo_bool_t
_cairo_gl_composite_setup_vbo(cairo_gl_context_t * ctx,unsigned int size_per_vertex)624 _cairo_gl_composite_setup_vbo (cairo_gl_context_t *ctx,
625 unsigned int size_per_vertex)
626 {
627 cairo_bool_t vertex_size_changed = ctx->vertex_size != size_per_vertex;
628 if (vertex_size_changed) {
629 ctx->vertex_size = size_per_vertex;
630 _cairo_gl_composite_flush (ctx);
631 }
632
633 if (_cairo_gl_context_is_flushed (ctx)) {
634 ctx->dispatch.VertexAttribPointer (CAIRO_GL_VERTEX_ATTRIB_INDEX, 2,
635 GL_FLOAT, GL_FALSE, size_per_vertex,
636 ctx->vb);
637 ctx->dispatch.EnableVertexAttribArray (CAIRO_GL_VERTEX_ATTRIB_INDEX);
638 }
639
640 return vertex_size_changed;
641 }
642
643 static void
_disable_stencil_buffer(void)644 _disable_stencil_buffer (void)
645 {
646 glDisable (GL_STENCIL_TEST);
647 glDepthMask (GL_FALSE);
648 }
649
650 static cairo_int_status_t
_cairo_gl_composite_setup_painted_clipping(cairo_gl_composite_t * setup,cairo_gl_context_t * ctx,int vertex_size)651 _cairo_gl_composite_setup_painted_clipping (cairo_gl_composite_t *setup,
652 cairo_gl_context_t *ctx,
653 int vertex_size)
654 {
655 cairo_int_status_t status = CAIRO_INT_STATUS_SUCCESS;
656
657 cairo_gl_surface_t *dst = setup->dst;
658 cairo_clip_t *clip = setup->clip;
659
660 if (clip->num_boxes == 1 && clip->path == NULL) {
661 _scissor_to_box (dst, &clip->boxes[0]);
662 goto disable_stencil_buffer_and_return;
663 }
664
665 if (! _cairo_gl_ensure_stencil (ctx, setup->dst)) {
666 status = CAIRO_INT_STATUS_UNSUPPORTED;
667 goto disable_stencil_buffer_and_return;
668 }
669
670 /* We only want to clear the part of the stencil buffer
671 * that we are about to use. It also does not hurt to
672 * scissor around the painted clip. */
673 _cairo_gl_scissor_to_rectangle (dst, _cairo_clip_get_extents (clip));
674
675 /* The clip is not rectangular, so use the stencil buffer. */
676 glDepthMask (GL_TRUE);
677 glEnable (GL_STENCIL_TEST);
678
679 /* Texture surfaces have private depth/stencil buffers, so we can
680 * rely on any previous clip being cached there. */
681 if (_cairo_gl_surface_is_texture (setup->dst)) {
682 cairo_clip_t *old_clip = setup->dst->clip_on_stencil_buffer;
683 if (_cairo_clip_equal (old_clip, setup->clip))
684 goto activate_stencil_buffer_and_return;
685
686 if (old_clip) {
687 _cairo_clip_destroy (setup->dst->clip_on_stencil_buffer);
688 }
689
690 setup->dst->clip_on_stencil_buffer = _cairo_clip_copy (setup->clip);
691 }
692
693 glClearStencil (0);
694 glClear (GL_STENCIL_BUFFER_BIT);
695
696 glStencilOp (GL_REPLACE, GL_REPLACE, GL_REPLACE);
697 glStencilFunc (GL_EQUAL, 1, 0xffffffff);
698 glColorMask (0, 0, 0, 0);
699
700 status = _cairo_gl_msaa_compositor_draw_clip (ctx, setup, clip);
701
702 if (unlikely (status)) {
703 glColorMask (1, 1, 1, 1);
704 goto disable_stencil_buffer_and_return;
705 }
706
707 /* We want to only render to the stencil buffer, so draw everything now.
708 Flushing also unbinds the VBO, which we want to rebind for regular
709 drawing. */
710 _cairo_gl_composite_flush (ctx);
711 _cairo_gl_composite_setup_vbo (ctx, vertex_size);
712
713 activate_stencil_buffer_and_return:
714 glColorMask (1, 1, 1, 1);
715 glStencilOp (GL_KEEP, GL_KEEP, GL_KEEP);
716 glStencilFunc (GL_EQUAL, 1, 0xffffffff);
717 return CAIRO_INT_STATUS_SUCCESS;
718
719 disable_stencil_buffer_and_return:
720 _disable_stencil_buffer ();
721 return status;
722 }
723
724 static cairo_int_status_t
_cairo_gl_composite_setup_clipping(cairo_gl_composite_t * setup,cairo_gl_context_t * ctx,int vertex_size)725 _cairo_gl_composite_setup_clipping (cairo_gl_composite_t *setup,
726 cairo_gl_context_t *ctx,
727 int vertex_size)
728 {
729 cairo_bool_t clip_changing = TRUE;
730 cairo_bool_t clip_region_changing = TRUE;
731
732 if (! ctx->clip && ! setup->clip && ! setup->clip_region && ! ctx->clip_region)
733 goto disable_all_clipping;
734
735 clip_changing = ! _cairo_clip_equal (ctx->clip, setup->clip);
736 clip_region_changing = ! cairo_region_equal (ctx->clip_region, setup->clip_region);
737 if (! _cairo_gl_context_is_flushed (ctx) &&
738 (clip_region_changing || clip_changing))
739 _cairo_gl_composite_flush (ctx);
740
741 assert (!setup->clip_region || !setup->clip);
742
743 /* setup->clip is only used by the msaa compositor and setup->clip_region
744 * only by the other compositors, so it's safe to wait to clean up obsolete
745 * clips. */
746 if (clip_region_changing) {
747 cairo_region_destroy (ctx->clip_region);
748 ctx->clip_region = cairo_region_reference (setup->clip_region);
749 }
750 if (clip_changing) {
751 _cairo_clip_destroy (ctx->clip);
752 ctx->clip = _cairo_clip_copy (setup->clip);
753 }
754
755 /* For clip regions, we scissor right before drawing. */
756 if (setup->clip_region)
757 goto disable_all_clipping;
758
759 if (setup->clip)
760 return _cairo_gl_composite_setup_painted_clipping (setup, ctx,
761 vertex_size);
762 disable_all_clipping:
763 _disable_stencil_buffer ();
764 glDisable (GL_SCISSOR_TEST);
765 return CAIRO_INT_STATUS_SUCCESS;
766 }
767
768 cairo_status_t
_cairo_gl_set_operands_and_operator(cairo_gl_composite_t * setup,cairo_gl_context_t * ctx)769 _cairo_gl_set_operands_and_operator (cairo_gl_composite_t *setup,
770 cairo_gl_context_t *ctx)
771 {
772 unsigned int dst_size, src_size, mask_size, vertex_size;
773 cairo_status_t status;
774 cairo_gl_shader_t *shader;
775 cairo_bool_t component_alpha;
776 cairo_bool_t vertex_size_changed;
777
778 component_alpha =
779 setup->mask.type == CAIRO_GL_OPERAND_TEXTURE &&
780 setup->mask.texture.attributes.has_component_alpha;
781
782 /* Do various magic for component alpha */
783 if (component_alpha) {
784 status = _cairo_gl_composite_begin_component_alpha (ctx, setup);
785 if (unlikely (status))
786 return status;
787 } else {
788 if (ctx->pre_shader) {
789 _cairo_gl_composite_flush (ctx);
790 ctx->pre_shader = NULL;
791 }
792 }
793
794 status = _cairo_gl_get_shader_by_type (ctx,
795 &setup->src,
796 &setup->mask,
797 setup->spans,
798 component_alpha ?
799 CAIRO_GL_SHADER_IN_CA_SOURCE :
800 CAIRO_GL_SHADER_IN_NORMAL,
801 &shader);
802 if (unlikely (status)) {
803 ctx->pre_shader = NULL;
804 return status;
805 }
806 if (ctx->current_shader != shader)
807 _cairo_gl_composite_flush (ctx);
808
809 status = CAIRO_STATUS_SUCCESS;
810
811 dst_size = 2 * sizeof (GLfloat);
812 src_size = _cairo_gl_operand_get_vertex_size (&setup->src);
813 mask_size = _cairo_gl_operand_get_vertex_size (&setup->mask);
814 vertex_size = dst_size + src_size + mask_size;
815
816 if (setup->spans)
817 vertex_size += sizeof (GLfloat);
818
819 vertex_size_changed = _cairo_gl_composite_setup_vbo (ctx, vertex_size);
820
821 _cairo_gl_context_setup_operand (ctx, CAIRO_GL_TEX_SOURCE, &setup->src, dst_size, vertex_size_changed);
822 _cairo_gl_context_setup_operand (ctx, CAIRO_GL_TEX_MASK, &setup->mask, dst_size + src_size, vertex_size_changed);
823
824 _cairo_gl_context_setup_spans (ctx, setup->spans, vertex_size,
825 dst_size + src_size + mask_size);
826
827 _cairo_gl_set_operator (ctx, setup->op, component_alpha);
828
829 if (_cairo_gl_context_is_flushed (ctx)) {
830 if (ctx->pre_shader) {
831 _cairo_gl_set_shader (ctx, ctx->pre_shader);
832 _cairo_gl_composite_bind_to_shader (ctx, setup);
833 }
834 _cairo_gl_set_shader (ctx, shader);
835 _cairo_gl_composite_bind_to_shader (ctx, setup);
836 }
837
838 return status;
839 }
840
841 cairo_status_t
_cairo_gl_composite_begin(cairo_gl_composite_t * setup,cairo_gl_context_t ** ctx_out)842 _cairo_gl_composite_begin (cairo_gl_composite_t *setup,
843 cairo_gl_context_t **ctx_out)
844 {
845 cairo_gl_context_t *ctx;
846 cairo_status_t status;
847
848 assert (setup->dst);
849
850 status = _cairo_gl_context_acquire (setup->dst->base.device, &ctx);
851 if (unlikely (status))
852 return status;
853
854 _cairo_gl_context_set_destination (ctx, setup->dst, setup->multisample);
855 glEnable (GL_BLEND);
856
857 status = _cairo_gl_set_operands_and_operator (setup, ctx);
858 if (unlikely (status))
859 goto FAIL;
860
861 status = _cairo_gl_composite_setup_clipping (setup, ctx, ctx->vertex_size);
862 if (unlikely (status))
863 goto FAIL;
864
865 *ctx_out = ctx;
866
867 FAIL:
868 if (unlikely (status))
869 status = _cairo_gl_context_release (ctx, status);
870
871 return status;
872 }
873
874 static inline void
_cairo_gl_composite_draw_tristrip(cairo_gl_context_t * ctx)875 _cairo_gl_composite_draw_tristrip (cairo_gl_context_t *ctx)
876 {
877 cairo_array_t* indices = &ctx->tristrip_indices;
878 const unsigned short *indices_array = _cairo_array_index_const (indices, 0);
879
880 if (ctx->pre_shader) {
881 cairo_gl_shader_t *prev_shader = ctx->current_shader;
882
883 _cairo_gl_set_shader (ctx, ctx->pre_shader);
884 _cairo_gl_set_operator (ctx, CAIRO_OPERATOR_DEST_OUT, TRUE);
885 glDrawElements (GL_TRIANGLE_STRIP, _cairo_array_num_elements (indices), GL_UNSIGNED_SHORT, indices_array);
886
887 _cairo_gl_set_shader (ctx, prev_shader);
888 _cairo_gl_set_operator (ctx, CAIRO_OPERATOR_ADD, TRUE);
889 }
890
891 glDrawElements (GL_TRIANGLE_STRIP, _cairo_array_num_elements (indices), GL_UNSIGNED_SHORT, indices_array);
892 _cairo_array_truncate (indices, 0);
893 }
894
895 static inline void
_cairo_gl_composite_draw_triangles(cairo_gl_context_t * ctx,unsigned int count)896 _cairo_gl_composite_draw_triangles (cairo_gl_context_t *ctx,
897 unsigned int count)
898 {
899 if (! ctx->pre_shader) {
900 glDrawArrays (GL_TRIANGLES, 0, count);
901 } else {
902 cairo_gl_shader_t *prev_shader = ctx->current_shader;
903
904 _cairo_gl_set_shader (ctx, ctx->pre_shader);
905 _cairo_gl_set_operator (ctx, CAIRO_OPERATOR_DEST_OUT, TRUE);
906 glDrawArrays (GL_TRIANGLES, 0, count);
907
908 _cairo_gl_set_shader (ctx, prev_shader);
909 _cairo_gl_set_operator (ctx, CAIRO_OPERATOR_ADD, TRUE);
910 glDrawArrays (GL_TRIANGLES, 0, count);
911 }
912 }
913
914 static void
_cairo_gl_composite_draw_triangles_with_clip_region(cairo_gl_context_t * ctx,unsigned int count)915 _cairo_gl_composite_draw_triangles_with_clip_region (cairo_gl_context_t *ctx,
916 unsigned int count)
917 {
918 int i, num_rectangles;
919
920 if (!ctx->clip_region) {
921 _cairo_gl_composite_draw_triangles (ctx, count);
922 return;
923 }
924
925 num_rectangles = cairo_region_num_rectangles (ctx->clip_region);
926 for (i = 0; i < num_rectangles; i++) {
927 cairo_rectangle_int_t rect;
928
929 cairo_region_get_rectangle (ctx->clip_region, i, &rect);
930
931 _cairo_gl_scissor_to_rectangle (ctx->current_target, &rect);
932 _cairo_gl_composite_draw_triangles (ctx, count);
933 }
934 }
935
936 static void
_cairo_gl_composite_unmap_vertex_buffer(cairo_gl_context_t * ctx)937 _cairo_gl_composite_unmap_vertex_buffer (cairo_gl_context_t *ctx)
938 {
939 ctx->vb_offset = 0;
940 }
941
942 void
_cairo_gl_composite_flush(cairo_gl_context_t * ctx)943 _cairo_gl_composite_flush (cairo_gl_context_t *ctx)
944 {
945 unsigned int count;
946 int i;
947
948 if (_cairo_gl_context_is_flushed (ctx))
949 return;
950
951 count = ctx->vb_offset / ctx->vertex_size;
952 _cairo_gl_composite_unmap_vertex_buffer (ctx);
953
954 if (ctx->primitive_type == CAIRO_GL_PRIMITIVE_TYPE_TRISTRIPS) {
955 _cairo_gl_composite_draw_tristrip (ctx);
956 } else {
957 assert (ctx->primitive_type == CAIRO_GL_PRIMITIVE_TYPE_TRIANGLES);
958 _cairo_gl_composite_draw_triangles_with_clip_region (ctx, count);
959 }
960
961 for (i = 0; i < ARRAY_LENGTH (ctx->glyph_cache); i++)
962 _cairo_gl_glyph_cache_unlock (&ctx->glyph_cache[i]);
963 }
964
965 static void
_cairo_gl_composite_prepare_buffer(cairo_gl_context_t * ctx,unsigned int n_vertices,cairo_gl_primitive_type_t primitive_type)966 _cairo_gl_composite_prepare_buffer (cairo_gl_context_t *ctx,
967 unsigned int n_vertices,
968 cairo_gl_primitive_type_t primitive_type)
969 {
970 if (ctx->primitive_type != primitive_type) {
971 _cairo_gl_composite_flush (ctx);
972 ctx->primitive_type = primitive_type;
973 }
974
975 assert(ctx->vbo_size > 0);
976 if (ctx->vb_offset + n_vertices * ctx->vertex_size > ctx->vbo_size)
977 _cairo_gl_composite_flush (ctx);
978 }
979
980 static inline void
_cairo_gl_composite_emit_vertex(cairo_gl_context_t * ctx,GLfloat x,GLfloat y)981 _cairo_gl_composite_emit_vertex (cairo_gl_context_t *ctx,
982 GLfloat x, GLfloat y)
983 {
984 GLfloat *vb = (GLfloat *) (void *) &ctx->vb[ctx->vb_offset];
985
986 *vb++ = x;
987 *vb++ = y;
988
989 _cairo_gl_operand_emit (&ctx->operands[CAIRO_GL_TEX_SOURCE], &vb, x, y);
990 _cairo_gl_operand_emit (&ctx->operands[CAIRO_GL_TEX_MASK ], &vb, x, y);
991
992 ctx->vb_offset += ctx->vertex_size;
993 }
994
995 static inline void
_cairo_gl_composite_emit_alpha_vertex(cairo_gl_context_t * ctx,GLfloat x,GLfloat y,uint8_t alpha)996 _cairo_gl_composite_emit_alpha_vertex (cairo_gl_context_t *ctx,
997 GLfloat x, GLfloat y, uint8_t alpha)
998 {
999 GLfloat *vb = (GLfloat *) (void *) &ctx->vb[ctx->vb_offset];
1000 union fi {
1001 float f;
1002 GLbyte bytes[4];
1003 } fi;
1004
1005 *vb++ = x;
1006 *vb++ = y;
1007
1008 _cairo_gl_operand_emit (&ctx->operands[CAIRO_GL_TEX_SOURCE], &vb, x, y);
1009 _cairo_gl_operand_emit (&ctx->operands[CAIRO_GL_TEX_MASK ], &vb, x, y);
1010
1011 fi.bytes[0] = 0;
1012 fi.bytes[1] = 0;
1013 fi.bytes[2] = 0;
1014 fi.bytes[3] = alpha;
1015 *vb++ = fi.f;
1016
1017 ctx->vb_offset += ctx->vertex_size;
1018 }
1019
1020 static void
_cairo_gl_composite_emit_point(cairo_gl_context_t * ctx,const cairo_point_t * point)1021 _cairo_gl_composite_emit_point (cairo_gl_context_t *ctx,
1022 const cairo_point_t *point)
1023 {
1024 _cairo_gl_composite_emit_vertex (ctx,
1025 _cairo_fixed_to_double (point->x),
1026 _cairo_fixed_to_double (point->y));
1027 }
1028
1029 static void
_cairo_gl_composite_emit_rect(cairo_gl_context_t * ctx,GLfloat x1,GLfloat y1,GLfloat x2,GLfloat y2)1030 _cairo_gl_composite_emit_rect (cairo_gl_context_t *ctx,
1031 GLfloat x1, GLfloat y1,
1032 GLfloat x2, GLfloat y2)
1033 {
1034 _cairo_gl_composite_prepare_buffer (ctx, 6,
1035 CAIRO_GL_PRIMITIVE_TYPE_TRIANGLES);
1036
1037 _cairo_gl_composite_emit_vertex (ctx, x1, y1);
1038 _cairo_gl_composite_emit_vertex (ctx, x2, y1);
1039 _cairo_gl_composite_emit_vertex (ctx, x1, y2);
1040
1041 _cairo_gl_composite_emit_vertex (ctx, x2, y1);
1042 _cairo_gl_composite_emit_vertex (ctx, x2, y2);
1043 _cairo_gl_composite_emit_vertex (ctx, x1, y2);
1044 }
1045
1046 cairo_gl_emit_rect_t
_cairo_gl_context_choose_emit_rect(cairo_gl_context_t * ctx)1047 _cairo_gl_context_choose_emit_rect (cairo_gl_context_t *ctx)
1048 {
1049 return _cairo_gl_composite_emit_rect;
1050 }
1051
1052 void
_cairo_gl_context_emit_rect(cairo_gl_context_t * ctx,GLfloat x1,GLfloat y1,GLfloat x2,GLfloat y2)1053 _cairo_gl_context_emit_rect (cairo_gl_context_t *ctx,
1054 GLfloat x1, GLfloat y1,
1055 GLfloat x2, GLfloat y2)
1056 {
1057 _cairo_gl_composite_emit_rect (ctx, x1, y1, x2, y2);
1058 }
1059
1060 static void
_cairo_gl_composite_emit_span(cairo_gl_context_t * ctx,GLfloat x1,GLfloat y1,GLfloat x2,GLfloat y2,uint8_t alpha)1061 _cairo_gl_composite_emit_span (cairo_gl_context_t *ctx,
1062 GLfloat x1, GLfloat y1,
1063 GLfloat x2, GLfloat y2,
1064 uint8_t alpha)
1065 {
1066 _cairo_gl_composite_prepare_buffer (ctx, 6,
1067 CAIRO_GL_PRIMITIVE_TYPE_TRIANGLES);
1068
1069 _cairo_gl_composite_emit_alpha_vertex (ctx, x1, y1, alpha);
1070 _cairo_gl_composite_emit_alpha_vertex (ctx, x2, y1, alpha);
1071 _cairo_gl_composite_emit_alpha_vertex (ctx, x1, y2, alpha);
1072
1073 _cairo_gl_composite_emit_alpha_vertex (ctx, x2, y1, alpha);
1074 _cairo_gl_composite_emit_alpha_vertex (ctx, x2, y2, alpha);
1075 _cairo_gl_composite_emit_alpha_vertex (ctx, x1, y2, alpha);
1076 }
1077
1078 static void
_cairo_gl_composite_emit_solid_span(cairo_gl_context_t * ctx,GLfloat x1,GLfloat y1,GLfloat x2,GLfloat y2,uint8_t alpha)1079 _cairo_gl_composite_emit_solid_span (cairo_gl_context_t *ctx,
1080 GLfloat x1, GLfloat y1,
1081 GLfloat x2, GLfloat y2,
1082 uint8_t alpha)
1083 {
1084 GLfloat *v;
1085 union fi {
1086 float f;
1087 GLbyte bytes[4];
1088 } fi;
1089
1090 _cairo_gl_composite_prepare_buffer (ctx, 6,
1091 CAIRO_GL_PRIMITIVE_TYPE_TRIANGLES);
1092 v = (GLfloat *) (void *) &ctx->vb[ctx->vb_offset];
1093
1094 v[15] = v[ 6] = v[0] = x1;
1095 v[10] = v[ 4] = v[1] = y1;
1096 v[12] = v[ 9] = v[3] = x2;
1097 v[16] = v[13] = v[7] = y2;
1098
1099 fi.bytes[0] = 0;
1100 fi.bytes[1] = 0;
1101 fi.bytes[2] = 0;
1102 fi.bytes[3] = alpha;
1103 v[17] =v[14] = v[11] = v[8] = v[5] = v[2] = fi.f;
1104
1105 ctx->vb_offset += 6*3 * sizeof(GLfloat);
1106 }
1107
1108 cairo_gl_emit_span_t
_cairo_gl_context_choose_emit_span(cairo_gl_context_t * ctx)1109 _cairo_gl_context_choose_emit_span (cairo_gl_context_t *ctx)
1110 {
1111 if (ctx->operands[CAIRO_GL_TEX_MASK].type != CAIRO_GL_OPERAND_NONE) {
1112 switch (ctx->operands[CAIRO_GL_TEX_MASK].type) {
1113 default:
1114 case CAIRO_GL_OPERAND_COUNT:
1115 ASSERT_NOT_REACHED;
1116 case CAIRO_GL_OPERAND_NONE:
1117 case CAIRO_GL_OPERAND_CONSTANT:
1118 break;
1119
1120 case CAIRO_GL_OPERAND_LINEAR_GRADIENT:
1121 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_A0:
1122 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_NONE:
1123 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_EXT:
1124 if (!ctx->operands[CAIRO_GL_TEX_MASK].gradient.texgen)
1125 return _cairo_gl_composite_emit_span;
1126 break;
1127
1128 case CAIRO_GL_OPERAND_TEXTURE:
1129 if (!ctx->operands[CAIRO_GL_TEX_MASK].texture.texgen)
1130 return _cairo_gl_composite_emit_span;
1131 break;
1132 }
1133 }
1134
1135 switch (ctx->operands[CAIRO_GL_TEX_SOURCE].type) {
1136 default:
1137 case CAIRO_GL_OPERAND_COUNT:
1138 ASSERT_NOT_REACHED;
1139 case CAIRO_GL_OPERAND_NONE:
1140 case CAIRO_GL_OPERAND_CONSTANT:
1141 break;
1142
1143 case CAIRO_GL_OPERAND_LINEAR_GRADIENT:
1144 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_A0:
1145 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_NONE:
1146 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_EXT:
1147 if (!ctx->operands[CAIRO_GL_TEX_SOURCE].gradient.texgen)
1148 return _cairo_gl_composite_emit_span;
1149 break;
1150
1151 case CAIRO_GL_OPERAND_TEXTURE:
1152 if (!ctx->operands[CAIRO_GL_TEX_SOURCE].texture.texgen)
1153 return _cairo_gl_composite_emit_span;
1154 }
1155
1156 return _cairo_gl_composite_emit_solid_span;
1157 }
1158
1159 static inline void
_cairo_gl_composite_emit_glyph_vertex(cairo_gl_context_t * ctx,GLfloat x,GLfloat y,GLfloat glyph_x,GLfloat glyph_y)1160 _cairo_gl_composite_emit_glyph_vertex (cairo_gl_context_t *ctx,
1161 GLfloat x, GLfloat y,
1162 GLfloat glyph_x, GLfloat glyph_y)
1163 {
1164 GLfloat *vb = (GLfloat *) (void *) &ctx->vb[ctx->vb_offset];
1165
1166 *vb++ = x;
1167 *vb++ = y;
1168
1169 _cairo_gl_operand_emit (&ctx->operands[CAIRO_GL_TEX_SOURCE], &vb, x, y);
1170
1171 *vb++ = glyph_x;
1172 *vb++ = glyph_y;
1173
1174 ctx->vb_offset += ctx->vertex_size;
1175 }
1176
1177 static void
_cairo_gl_composite_emit_glyph(cairo_gl_context_t * ctx,GLfloat x1,GLfloat y1,GLfloat x2,GLfloat y2,GLfloat glyph_x1,GLfloat glyph_y1,GLfloat glyph_x2,GLfloat glyph_y2)1178 _cairo_gl_composite_emit_glyph (cairo_gl_context_t *ctx,
1179 GLfloat x1, GLfloat y1,
1180 GLfloat x2, GLfloat y2,
1181 GLfloat glyph_x1, GLfloat glyph_y1,
1182 GLfloat glyph_x2, GLfloat glyph_y2)
1183 {
1184 _cairo_gl_composite_prepare_buffer (ctx, 6,
1185 CAIRO_GL_PRIMITIVE_TYPE_TRIANGLES);
1186
1187 _cairo_gl_composite_emit_glyph_vertex (ctx, x1, y1, glyph_x1, glyph_y1);
1188 _cairo_gl_composite_emit_glyph_vertex (ctx, x2, y1, glyph_x2, glyph_y1);
1189 _cairo_gl_composite_emit_glyph_vertex (ctx, x1, y2, glyph_x1, glyph_y2);
1190
1191 _cairo_gl_composite_emit_glyph_vertex (ctx, x2, y1, glyph_x2, glyph_y1);
1192 _cairo_gl_composite_emit_glyph_vertex (ctx, x2, y2, glyph_x2, glyph_y2);
1193 _cairo_gl_composite_emit_glyph_vertex (ctx, x1, y2, glyph_x1, glyph_y2);
1194 }
1195
1196 static void
_cairo_gl_composite_emit_solid_glyph(cairo_gl_context_t * ctx,GLfloat x1,GLfloat y1,GLfloat x2,GLfloat y2,GLfloat glyph_x1,GLfloat glyph_y1,GLfloat glyph_x2,GLfloat glyph_y2)1197 _cairo_gl_composite_emit_solid_glyph (cairo_gl_context_t *ctx,
1198 GLfloat x1, GLfloat y1,
1199 GLfloat x2, GLfloat y2,
1200 GLfloat glyph_x1, GLfloat glyph_y1,
1201 GLfloat glyph_x2, GLfloat glyph_y2)
1202 {
1203 GLfloat *v;
1204
1205 _cairo_gl_composite_prepare_buffer (ctx, 6,
1206 CAIRO_GL_PRIMITIVE_TYPE_TRIANGLES);
1207
1208 v = (GLfloat *) (void *) &ctx->vb[ctx->vb_offset];
1209
1210 v[20] = v[ 8] = v[0] = x1;
1211 v[13] = v[ 5] = v[1] = y1;
1212 v[22] = v[10] = v[2] = glyph_x1;
1213 v[15] = v[ 7] = v[3] = glyph_y1;
1214
1215 v[16] = v[12] = v[4] = x2;
1216 v[18] = v[14] = v[6] = glyph_x2;
1217
1218 v[21] = v[17] = v[ 9] = y2;
1219 v[23] = v[19] = v[11] = glyph_y2;
1220
1221 ctx->vb_offset += 4 * 6 * sizeof (GLfloat);
1222 }
1223
1224 cairo_gl_emit_glyph_t
_cairo_gl_context_choose_emit_glyph(cairo_gl_context_t * ctx)1225 _cairo_gl_context_choose_emit_glyph (cairo_gl_context_t *ctx)
1226 {
1227 switch (ctx->operands[CAIRO_GL_TEX_SOURCE].type) {
1228 default:
1229 case CAIRO_GL_OPERAND_COUNT:
1230 ASSERT_NOT_REACHED;
1231 case CAIRO_GL_OPERAND_NONE:
1232 case CAIRO_GL_OPERAND_CONSTANT:
1233 return _cairo_gl_composite_emit_solid_glyph;
1234
1235 case CAIRO_GL_OPERAND_LINEAR_GRADIENT:
1236 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_A0:
1237 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_NONE:
1238 case CAIRO_GL_OPERAND_RADIAL_GRADIENT_EXT:
1239 case CAIRO_GL_OPERAND_TEXTURE:
1240 return _cairo_gl_composite_emit_glyph;
1241 }
1242 }
1243
1244 void
_cairo_gl_composite_fini(cairo_gl_composite_t * setup)1245 _cairo_gl_composite_fini (cairo_gl_composite_t *setup)
1246 {
1247 _cairo_gl_operand_destroy (&setup->src);
1248 _cairo_gl_operand_destroy (&setup->mask);
1249 }
1250
1251 cairo_status_t
_cairo_gl_composite_set_operator(cairo_gl_composite_t * setup,cairo_operator_t op,cairo_bool_t assume_component_alpha)1252 _cairo_gl_composite_set_operator (cairo_gl_composite_t *setup,
1253 cairo_operator_t op,
1254 cairo_bool_t assume_component_alpha)
1255 {
1256 if (assume_component_alpha) {
1257 if (op != CAIRO_OPERATOR_CLEAR &&
1258 op != CAIRO_OPERATOR_OVER &&
1259 op != CAIRO_OPERATOR_ADD)
1260 return UNSUPPORTED ("unsupported component alpha operator");
1261 } else {
1262 if (! _cairo_gl_operator_is_supported (op))
1263 return UNSUPPORTED ("unsupported operator");
1264 }
1265
1266 setup->op = op;
1267 return CAIRO_STATUS_SUCCESS;
1268 }
1269
1270 cairo_status_t
_cairo_gl_composite_init(cairo_gl_composite_t * setup,cairo_operator_t op,cairo_gl_surface_t * dst,cairo_bool_t assume_component_alpha)1271 _cairo_gl_composite_init (cairo_gl_composite_t *setup,
1272 cairo_operator_t op,
1273 cairo_gl_surface_t *dst,
1274 cairo_bool_t assume_component_alpha)
1275 {
1276 cairo_status_t status;
1277
1278 status = _blit_texture_to_renderbuffer (dst);
1279
1280 memset (setup, 0, sizeof (cairo_gl_composite_t));
1281
1282 status = _cairo_gl_composite_set_operator (setup, op,
1283 assume_component_alpha);
1284 if (status)
1285 return status;
1286
1287 setup->dst = dst;
1288 setup->clip_region = dst->clip_region;
1289
1290 return CAIRO_STATUS_SUCCESS;
1291 }
1292
1293 static cairo_int_status_t
_cairo_gl_composite_append_vertex_indices(cairo_gl_context_t * ctx,int number_of_new_indices)1294 _cairo_gl_composite_append_vertex_indices (cairo_gl_context_t *ctx,
1295 int number_of_new_indices)
1296 {
1297 cairo_int_status_t status = CAIRO_INT_STATUS_SUCCESS;
1298 cairo_array_t *indices = &ctx->tristrip_indices;
1299 int number_of_indices = _cairo_array_num_elements (indices);
1300 unsigned short current_vertex_index = 0;
1301 int i;
1302
1303 assert (number_of_new_indices > 0);
1304
1305 /* If any preexisting triangle triangle strip indices exist on this
1306 context, we insert a set of degenerate triangles from the last
1307 preexisting vertex to our first one. */
1308 if (number_of_indices > 0) {
1309 const unsigned short *indices_array = _cairo_array_index_const (indices, 0);
1310 current_vertex_index = indices_array[number_of_indices - 1];
1311
1312 status = _cairo_array_append (indices, ¤t_vertex_index);
1313 if (unlikely (status))
1314 return status;
1315
1316 current_vertex_index++;
1317 status =_cairo_array_append (indices, ¤t_vertex_index);
1318 if (unlikely (status))
1319 return status;
1320 }
1321
1322 for (i = 0; i < number_of_new_indices; i++) {
1323 status = _cairo_array_append (indices, ¤t_vertex_index);
1324 current_vertex_index++;
1325 if (unlikely (status))
1326 return status;
1327 }
1328
1329 return CAIRO_STATUS_SUCCESS;
1330 }
1331
1332 cairo_int_status_t
_cairo_gl_composite_emit_quad_as_tristrip(cairo_gl_context_t * ctx,cairo_gl_composite_t * setup,const cairo_point_t quad[4])1333 _cairo_gl_composite_emit_quad_as_tristrip (cairo_gl_context_t *ctx,
1334 cairo_gl_composite_t *setup,
1335 const cairo_point_t quad[4])
1336 {
1337 _cairo_gl_composite_prepare_buffer (ctx, 4,
1338 CAIRO_GL_PRIMITIVE_TYPE_TRISTRIPS);
1339
1340 _cairo_gl_composite_emit_point (ctx, &quad[0]);
1341 _cairo_gl_composite_emit_point (ctx, &quad[1]);
1342
1343 /* Cairo stores quad vertices in counter-clockwise order, but we need to
1344 emit them from top to bottom in the triangle strip, so we need to reverse
1345 the order of the last two vertices. */
1346 _cairo_gl_composite_emit_point (ctx, &quad[3]);
1347 _cairo_gl_composite_emit_point (ctx, &quad[2]);
1348
1349 return _cairo_gl_composite_append_vertex_indices (ctx, 4);
1350 }
1351
1352 cairo_int_status_t
_cairo_gl_composite_emit_triangle_as_tristrip(cairo_gl_context_t * ctx,cairo_gl_composite_t * setup,const cairo_point_t triangle[3])1353 _cairo_gl_composite_emit_triangle_as_tristrip (cairo_gl_context_t *ctx,
1354 cairo_gl_composite_t *setup,
1355 const cairo_point_t triangle[3])
1356 {
1357 _cairo_gl_composite_prepare_buffer (ctx, 3,
1358 CAIRO_GL_PRIMITIVE_TYPE_TRISTRIPS);
1359
1360 _cairo_gl_composite_emit_point (ctx, &triangle[0]);
1361 _cairo_gl_composite_emit_point (ctx, &triangle[1]);
1362 _cairo_gl_composite_emit_point (ctx, &triangle[2]);
1363 return _cairo_gl_composite_append_vertex_indices (ctx, 3);
1364 }
1365