1 /**************************************************************************
2  *
3  * Copyright 2007 VMware, Inc.
4  * All Rights Reserved.
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a
7  * copy of this software and associated documentation files (the
8  * "Software"), to deal in the Software without restriction, including
9  * without limitation the rights to use, copy, modify, merge, publish,
10  * distribute, sub license, and/or sell copies of the Software, and to
11  * permit persons to whom the Software is furnished to do so, subject to
12  * the following conditions:
13  *
14  * The above copyright notice and this permission notice (including the
15  * next paragraph) shall be included in all copies or substantial portions
16  * of the Software.
17  *
18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21  * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25  *
26  **************************************************************************/
27 
28  /**
29   * @file
30   *
31   * Wrap the cso cache & hash mechanisms in a simplified
32   * pipe-driver-specific interface.
33   *
34   * @author Zack Rusin <zackr@vmware.com>
35   * @author Keith Whitwell <keithw@vmware.com>
36   */
37 
38 #include "pipe/p_state.h"
39 #include "util/u_draw.h"
40 #include "util/u_framebuffer.h"
41 #include "util/u_inlines.h"
42 #include "util/u_math.h"
43 #include "util/u_memory.h"
44 #include "util/u_vbuf.h"
45 #include "tgsi/tgsi_parse.h"
46 
47 #include "cso_cache/cso_context.h"
48 #include "cso_cache/cso_cache.h"
49 #include "cso_cache/cso_hash.h"
50 #include "cso_context.h"
51 
52 
53 /**
54  * Per-shader sampler information.
55  */
56 struct sampler_info
57 {
58    struct cso_sampler *cso_samplers[PIPE_MAX_SAMPLERS];
59    void *samplers[PIPE_MAX_SAMPLERS];
60 };
61 
62 
63 
64 struct cso_context {
65    struct pipe_context *pipe;
66    struct cso_cache *cache;
67 
68    struct u_vbuf *vbuf;
69    struct u_vbuf *vbuf_current;
70    bool always_use_vbuf;
71 
72    boolean has_geometry_shader;
73    boolean has_tessellation;
74    boolean has_compute_shader;
75    boolean has_streamout;
76 
77    unsigned saved_state;  /**< bitmask of CSO_BIT_x flags */
78 
79    struct pipe_sampler_view *fragment_views[PIPE_MAX_SHADER_SAMPLER_VIEWS];
80    unsigned nr_fragment_views;
81 
82    struct pipe_sampler_view *fragment_views_saved[PIPE_MAX_SHADER_SAMPLER_VIEWS];
83    unsigned nr_fragment_views_saved;
84 
85    struct sampler_info fragment_samplers_saved;
86    struct sampler_info samplers[PIPE_SHADER_TYPES];
87 
88    /* Temporary number until cso_single_sampler_done is called.
89     * It tracks the highest sampler seen in cso_single_sampler.
90     */
91    int max_sampler_seen;
92 
93    struct pipe_vertex_buffer vertex_buffer0_current;
94    struct pipe_vertex_buffer vertex_buffer0_saved;
95 
96    struct pipe_constant_buffer aux_constbuf_current[PIPE_SHADER_TYPES];
97    struct pipe_constant_buffer aux_constbuf_saved[PIPE_SHADER_TYPES];
98 
99    struct pipe_image_view fragment_image0_current;
100    struct pipe_image_view fragment_image0_saved;
101 
102    unsigned nr_so_targets;
103    struct pipe_stream_output_target *so_targets[PIPE_MAX_SO_BUFFERS];
104 
105    unsigned nr_so_targets_saved;
106    struct pipe_stream_output_target *so_targets_saved[PIPE_MAX_SO_BUFFERS];
107 
108    /** Current and saved state.
109     * The saved state is used as a 1-deep stack.
110     */
111    void *blend, *blend_saved;
112    void *depth_stencil, *depth_stencil_saved;
113    void *rasterizer, *rasterizer_saved;
114    void *fragment_shader, *fragment_shader_saved;
115    void *vertex_shader, *vertex_shader_saved;
116    void *geometry_shader, *geometry_shader_saved;
117    void *tessctrl_shader, *tessctrl_shader_saved;
118    void *tesseval_shader, *tesseval_shader_saved;
119    void *compute_shader;
120    void *velements, *velements_saved;
121    struct pipe_query *render_condition, *render_condition_saved;
122    uint render_condition_mode, render_condition_mode_saved;
123    boolean render_condition_cond, render_condition_cond_saved;
124 
125    struct pipe_framebuffer_state fb, fb_saved;
126    struct pipe_viewport_state vp, vp_saved;
127    struct pipe_blend_color blend_color;
128    unsigned sample_mask, sample_mask_saved;
129    unsigned min_samples, min_samples_saved;
130    struct pipe_stencil_ref stencil_ref, stencil_ref_saved;
131 };
132 
cso_get_pipe_context(struct cso_context * cso)133 struct pipe_context *cso_get_pipe_context(struct cso_context *cso)
134 {
135    return cso->pipe;
136 }
137 
delete_blend_state(struct cso_context * ctx,void * state)138 static boolean delete_blend_state(struct cso_context *ctx, void *state)
139 {
140    struct cso_blend *cso = (struct cso_blend *)state;
141 
142    if (ctx->blend == cso->data)
143       return FALSE;
144 
145    if (cso->delete_state)
146       cso->delete_state(cso->context, cso->data);
147    FREE(state);
148    return TRUE;
149 }
150 
delete_depth_stencil_state(struct cso_context * ctx,void * state)151 static boolean delete_depth_stencil_state(struct cso_context *ctx, void *state)
152 {
153    struct cso_depth_stencil_alpha *cso =
154       (struct cso_depth_stencil_alpha *)state;
155 
156    if (ctx->depth_stencil == cso->data)
157       return FALSE;
158 
159    if (cso->delete_state)
160       cso->delete_state(cso->context, cso->data);
161    FREE(state);
162 
163    return TRUE;
164 }
165 
delete_sampler_state(UNUSED struct cso_context * ctx,void * state)166 static boolean delete_sampler_state(UNUSED struct cso_context *ctx, void *state)
167 {
168    struct cso_sampler *cso = (struct cso_sampler *)state;
169    if (cso->delete_state)
170       cso->delete_state(cso->context, cso->data);
171    FREE(state);
172    return TRUE;
173 }
174 
delete_rasterizer_state(struct cso_context * ctx,void * state)175 static boolean delete_rasterizer_state(struct cso_context *ctx, void *state)
176 {
177    struct cso_rasterizer *cso = (struct cso_rasterizer *)state;
178 
179    if (ctx->rasterizer == cso->data)
180       return FALSE;
181    if (cso->delete_state)
182       cso->delete_state(cso->context, cso->data);
183    FREE(state);
184    return TRUE;
185 }
186 
delete_vertex_elements(struct cso_context * ctx,void * state)187 static boolean delete_vertex_elements(struct cso_context *ctx,
188                                       void *state)
189 {
190    struct cso_velements *cso = (struct cso_velements *)state;
191 
192    if (ctx->velements == cso->data)
193       return FALSE;
194 
195    if (cso->delete_state)
196       cso->delete_state(cso->context, cso->data);
197    FREE(state);
198    return TRUE;
199 }
200 
201 
delete_cso(struct cso_context * ctx,void * state,enum cso_cache_type type)202 static inline boolean delete_cso(struct cso_context *ctx,
203                                  void *state, enum cso_cache_type type)
204 {
205    switch (type) {
206    case CSO_BLEND:
207       return delete_blend_state(ctx, state);
208    case CSO_SAMPLER:
209       return delete_sampler_state(ctx, state);
210    case CSO_DEPTH_STENCIL_ALPHA:
211       return delete_depth_stencil_state(ctx, state);
212    case CSO_RASTERIZER:
213       return delete_rasterizer_state(ctx, state);
214    case CSO_VELEMENTS:
215       return delete_vertex_elements(ctx, state);
216    default:
217       assert(0);
218       FREE(state);
219    }
220    return FALSE;
221 }
222 
223 static inline void
sanitize_hash(struct cso_hash * hash,enum cso_cache_type type,int max_size,void * user_data)224 sanitize_hash(struct cso_hash *hash, enum cso_cache_type type,
225               int max_size, void *user_data)
226 {
227    struct cso_context *ctx = (struct cso_context *)user_data;
228    /* if we're approach the maximum size, remove fourth of the entries
229     * otherwise every subsequent call will go through the same */
230    int hash_size = cso_hash_size(hash);
231    int max_entries = (max_size > hash_size) ? max_size : hash_size;
232    int to_remove =  (max_size < max_entries) * max_entries/4;
233    struct cso_hash_iter iter;
234    struct cso_sampler **samplers_to_restore = NULL;
235    unsigned to_restore = 0;
236 
237    if (hash_size > max_size)
238       to_remove += hash_size - max_size;
239 
240    if (to_remove == 0)
241       return;
242 
243    if (type == CSO_SAMPLER) {
244       int i, j;
245 
246       samplers_to_restore = MALLOC(PIPE_SHADER_TYPES * PIPE_MAX_SAMPLERS *
247                                    sizeof(*samplers_to_restore));
248 
249       /* Temporarily remove currently bound sampler states from the hash
250        * table, to prevent them from being deleted
251        */
252       for (i = 0; i < PIPE_SHADER_TYPES; i++) {
253          for (j = 0; j < PIPE_MAX_SAMPLERS; j++) {
254             struct cso_sampler *sampler = ctx->samplers[i].cso_samplers[j];
255 
256             if (sampler && cso_hash_take(hash, sampler->hash_key))
257                samplers_to_restore[to_restore++] = sampler;
258          }
259       }
260    }
261 
262    iter = cso_hash_first_node(hash);
263    while (to_remove) {
264       /*remove elements until we're good */
265       /*fixme: currently we pick the nodes to remove at random*/
266       void *cso = cso_hash_iter_data(iter);
267 
268       if (!cso)
269          break;
270 
271       if (delete_cso(ctx, cso, type)) {
272          iter = cso_hash_erase(hash, iter);
273          --to_remove;
274       } else
275          iter = cso_hash_iter_next(iter);
276    }
277 
278    if (type == CSO_SAMPLER) {
279       /* Put currently bound sampler states back into the hash table */
280       while (to_restore--) {
281          struct cso_sampler *sampler = samplers_to_restore[to_restore];
282 
283          cso_hash_insert(hash, sampler->hash_key, sampler);
284       }
285 
286       FREE(samplers_to_restore);
287    }
288 }
289 
cso_init_vbuf(struct cso_context * cso,unsigned flags)290 static void cso_init_vbuf(struct cso_context *cso, unsigned flags)
291 {
292    struct u_vbuf_caps caps;
293    bool uses_user_vertex_buffers = !(flags & CSO_NO_USER_VERTEX_BUFFERS);
294    bool needs64b = !(flags & CSO_NO_64B_VERTEX_BUFFERS);
295 
296    u_vbuf_get_caps(cso->pipe->screen, &caps, needs64b);
297 
298    /* Enable u_vbuf if needed. */
299    if (caps.fallback_always ||
300        (uses_user_vertex_buffers &&
301         caps.fallback_only_for_user_vbuffers)) {
302       cso->vbuf = u_vbuf_create(cso->pipe, &caps);
303       cso->vbuf_current = cso->vbuf;
304       cso->always_use_vbuf = caps.fallback_always;
305    }
306 }
307 
308 struct cso_context *
cso_create_context(struct pipe_context * pipe,unsigned flags)309 cso_create_context(struct pipe_context *pipe, unsigned flags)
310 {
311    struct cso_context *ctx = CALLOC_STRUCT(cso_context);
312    if (!ctx)
313       return NULL;
314 
315    ctx->cache = cso_cache_create();
316    if (ctx->cache == NULL)
317       goto out;
318    cso_cache_set_sanitize_callback(ctx->cache,
319                                    sanitize_hash,
320                                    ctx);
321 
322    ctx->pipe = pipe;
323    ctx->sample_mask = ~0;
324 
325    cso_init_vbuf(ctx, flags);
326 
327    /* Enable for testing: */
328    if (0) cso_set_maximum_cache_size( ctx->cache, 4 );
329 
330    if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_GEOMETRY,
331                                 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
332       ctx->has_geometry_shader = TRUE;
333    }
334    if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_TESS_CTRL,
335                                 PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
336       ctx->has_tessellation = TRUE;
337    }
338    if (pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
339                                       PIPE_SHADER_CAP_MAX_INSTRUCTIONS) > 0) {
340       int supported_irs =
341          pipe->screen->get_shader_param(pipe->screen, PIPE_SHADER_COMPUTE,
342                                         PIPE_SHADER_CAP_SUPPORTED_IRS);
343       if (supported_irs & ((1 << PIPE_SHADER_IR_TGSI) |
344                            (1 << PIPE_SHADER_IR_NIR))) {
345          ctx->has_compute_shader = TRUE;
346       }
347    }
348    if (pipe->screen->get_param(pipe->screen,
349                                PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS) != 0) {
350       ctx->has_streamout = TRUE;
351    }
352 
353    ctx->max_sampler_seen = -1;
354    return ctx;
355 
356 out:
357    cso_destroy_context( ctx );
358    return NULL;
359 }
360 
361 /**
362  * Free the CSO context.
363  */
cso_destroy_context(struct cso_context * ctx)364 void cso_destroy_context( struct cso_context *ctx )
365 {
366    unsigned i;
367 
368    if (ctx->pipe) {
369       ctx->pipe->bind_blend_state( ctx->pipe, NULL );
370       ctx->pipe->bind_rasterizer_state( ctx->pipe, NULL );
371 
372       {
373          static struct pipe_sampler_view *views[PIPE_MAX_SHADER_SAMPLER_VIEWS] = { NULL };
374          static void *zeros[PIPE_MAX_SAMPLERS] = { NULL };
375          struct pipe_screen *scr = ctx->pipe->screen;
376          enum pipe_shader_type sh;
377          for (sh = 0; sh < PIPE_SHADER_TYPES; sh++) {
378             int maxsam = scr->get_shader_param(scr, sh,
379                                                PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS);
380             int maxview = scr->get_shader_param(scr, sh,
381                                                 PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS);
382             assert(maxsam <= PIPE_MAX_SAMPLERS);
383             assert(maxview <= PIPE_MAX_SHADER_SAMPLER_VIEWS);
384             if (maxsam > 0) {
385                ctx->pipe->bind_sampler_states(ctx->pipe, sh, 0, maxsam, zeros);
386             }
387             if (maxview > 0) {
388                ctx->pipe->set_sampler_views(ctx->pipe, sh, 0, maxview, views);
389             }
390          }
391       }
392 
393       ctx->pipe->bind_depth_stencil_alpha_state( ctx->pipe, NULL );
394       ctx->pipe->bind_fs_state( ctx->pipe, NULL );
395       ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, NULL);
396       ctx->pipe->bind_vs_state( ctx->pipe, NULL );
397       ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_VERTEX, 0, NULL);
398       if (ctx->has_geometry_shader) {
399          ctx->pipe->bind_gs_state(ctx->pipe, NULL);
400          ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_GEOMETRY, 0, NULL);
401       }
402       if (ctx->has_tessellation) {
403          ctx->pipe->bind_tcs_state(ctx->pipe, NULL);
404          ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_CTRL, 0, NULL);
405          ctx->pipe->bind_tes_state(ctx->pipe, NULL);
406          ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_TESS_EVAL, 0, NULL);
407       }
408       if (ctx->has_compute_shader) {
409          ctx->pipe->bind_compute_state(ctx->pipe, NULL);
410          ctx->pipe->set_constant_buffer(ctx->pipe, PIPE_SHADER_COMPUTE, 0, NULL);
411       }
412       ctx->pipe->bind_vertex_elements_state( ctx->pipe, NULL );
413 
414       if (ctx->has_streamout)
415          ctx->pipe->set_stream_output_targets(ctx->pipe, 0, NULL, NULL);
416    }
417 
418    for (i = 0; i < ctx->nr_fragment_views; i++) {
419       pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
420    }
421    for (i = 0; i < ctx->nr_fragment_views_saved; i++) {
422       pipe_sampler_view_reference(&ctx->fragment_views_saved[i], NULL);
423    }
424 
425    util_unreference_framebuffer_state(&ctx->fb);
426    util_unreference_framebuffer_state(&ctx->fb_saved);
427 
428    pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_current);
429    pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_saved);
430 
431    for (i = 0; i < PIPE_SHADER_TYPES; i++) {
432       pipe_resource_reference(&ctx->aux_constbuf_current[i].buffer, NULL);
433       pipe_resource_reference(&ctx->aux_constbuf_saved[i].buffer, NULL);
434    }
435 
436    pipe_resource_reference(&ctx->fragment_image0_current.resource, NULL);
437    pipe_resource_reference(&ctx->fragment_image0_saved.resource, NULL);
438 
439    for (i = 0; i < PIPE_MAX_SO_BUFFERS; i++) {
440       pipe_so_target_reference(&ctx->so_targets[i], NULL);
441       pipe_so_target_reference(&ctx->so_targets_saved[i], NULL);
442    }
443 
444    if (ctx->cache) {
445       cso_cache_delete( ctx->cache );
446       ctx->cache = NULL;
447    }
448 
449    if (ctx->vbuf)
450       u_vbuf_destroy(ctx->vbuf);
451    FREE( ctx );
452 }
453 
454 
455 /* Those function will either find the state of the given template
456  * in the cache or they will create a new state from the given
457  * template, insert it in the cache and return it.
458  */
459 
460 /*
461  * If the driver returns 0 from the create method then they will assign
462  * the data member of the cso to be the template itself.
463  */
464 
cso_set_blend(struct cso_context * ctx,const struct pipe_blend_state * templ)465 enum pipe_error cso_set_blend(struct cso_context *ctx,
466                               const struct pipe_blend_state *templ)
467 {
468    unsigned key_size, hash_key;
469    struct cso_hash_iter iter;
470    void *handle;
471 
472    key_size = templ->independent_blend_enable ?
473       sizeof(struct pipe_blend_state) :
474       (char *)&(templ->rt[1]) - (char *)templ;
475    hash_key = cso_construct_key((void*)templ, key_size);
476    iter = cso_find_state_template(ctx->cache, hash_key, CSO_BLEND,
477                                   (void*)templ, key_size);
478 
479    if (cso_hash_iter_is_null(iter)) {
480       struct cso_blend *cso = MALLOC(sizeof(struct cso_blend));
481       if (!cso)
482          return PIPE_ERROR_OUT_OF_MEMORY;
483 
484       memset(&cso->state, 0, sizeof cso->state);
485       memcpy(&cso->state, templ, key_size);
486       cso->data = ctx->pipe->create_blend_state(ctx->pipe, &cso->state);
487       cso->delete_state = (cso_state_callback)ctx->pipe->delete_blend_state;
488       cso->context = ctx->pipe;
489 
490       iter = cso_insert_state(ctx->cache, hash_key, CSO_BLEND, cso);
491       if (cso_hash_iter_is_null(iter)) {
492          FREE(cso);
493          return PIPE_ERROR_OUT_OF_MEMORY;
494       }
495 
496       handle = cso->data;
497    }
498    else {
499       handle = ((struct cso_blend *)cso_hash_iter_data(iter))->data;
500    }
501 
502    if (ctx->blend != handle) {
503       ctx->blend = handle;
504       ctx->pipe->bind_blend_state(ctx->pipe, handle);
505    }
506    return PIPE_OK;
507 }
508 
509 static void
cso_save_blend(struct cso_context * ctx)510 cso_save_blend(struct cso_context *ctx)
511 {
512    assert(!ctx->blend_saved);
513    ctx->blend_saved = ctx->blend;
514 }
515 
516 static void
cso_restore_blend(struct cso_context * ctx)517 cso_restore_blend(struct cso_context *ctx)
518 {
519    if (ctx->blend != ctx->blend_saved) {
520       ctx->blend = ctx->blend_saved;
521       ctx->pipe->bind_blend_state(ctx->pipe, ctx->blend_saved);
522    }
523    ctx->blend_saved = NULL;
524 }
525 
526 
527 
528 enum pipe_error
cso_set_depth_stencil_alpha(struct cso_context * ctx,const struct pipe_depth_stencil_alpha_state * templ)529 cso_set_depth_stencil_alpha(struct cso_context *ctx,
530                             const struct pipe_depth_stencil_alpha_state *templ)
531 {
532    unsigned key_size = sizeof(struct pipe_depth_stencil_alpha_state);
533    unsigned hash_key = cso_construct_key((void*)templ, key_size);
534    struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
535                                                        hash_key,
536                                                        CSO_DEPTH_STENCIL_ALPHA,
537                                                        (void*)templ, key_size);
538    void *handle;
539 
540    if (cso_hash_iter_is_null(iter)) {
541       struct cso_depth_stencil_alpha *cso =
542          MALLOC(sizeof(struct cso_depth_stencil_alpha));
543       if (!cso)
544          return PIPE_ERROR_OUT_OF_MEMORY;
545 
546       memcpy(&cso->state, templ, sizeof(*templ));
547       cso->data = ctx->pipe->create_depth_stencil_alpha_state(ctx->pipe,
548                                                               &cso->state);
549       cso->delete_state =
550          (cso_state_callback)ctx->pipe->delete_depth_stencil_alpha_state;
551       cso->context = ctx->pipe;
552 
553       iter = cso_insert_state(ctx->cache, hash_key,
554                               CSO_DEPTH_STENCIL_ALPHA, cso);
555       if (cso_hash_iter_is_null(iter)) {
556          FREE(cso);
557          return PIPE_ERROR_OUT_OF_MEMORY;
558       }
559 
560       handle = cso->data;
561    }
562    else {
563       handle = ((struct cso_depth_stencil_alpha *)
564                 cso_hash_iter_data(iter))->data;
565    }
566 
567    if (ctx->depth_stencil != handle) {
568       ctx->depth_stencil = handle;
569       ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe, handle);
570    }
571    return PIPE_OK;
572 }
573 
574 static void
cso_save_depth_stencil_alpha(struct cso_context * ctx)575 cso_save_depth_stencil_alpha(struct cso_context *ctx)
576 {
577    assert(!ctx->depth_stencil_saved);
578    ctx->depth_stencil_saved = ctx->depth_stencil;
579 }
580 
581 static void
cso_restore_depth_stencil_alpha(struct cso_context * ctx)582 cso_restore_depth_stencil_alpha(struct cso_context *ctx)
583 {
584    if (ctx->depth_stencil != ctx->depth_stencil_saved) {
585       ctx->depth_stencil = ctx->depth_stencil_saved;
586       ctx->pipe->bind_depth_stencil_alpha_state(ctx->pipe,
587                                                 ctx->depth_stencil_saved);
588    }
589    ctx->depth_stencil_saved = NULL;
590 }
591 
592 
593 
cso_set_rasterizer(struct cso_context * ctx,const struct pipe_rasterizer_state * templ)594 enum pipe_error cso_set_rasterizer(struct cso_context *ctx,
595                                    const struct pipe_rasterizer_state *templ)
596 {
597    unsigned key_size = sizeof(struct pipe_rasterizer_state);
598    unsigned hash_key = cso_construct_key((void*)templ, key_size);
599    struct cso_hash_iter iter = cso_find_state_template(ctx->cache,
600                                                        hash_key,
601                                                        CSO_RASTERIZER,
602                                                        (void*)templ, key_size);
603    void *handle = NULL;
604 
605    /* We can't have both point_quad_rasterization (sprites) and point_smooth
606     * (round AA points) enabled at the same time.
607     */
608    assert(!(templ->point_quad_rasterization && templ->point_smooth));
609 
610    if (cso_hash_iter_is_null(iter)) {
611       struct cso_rasterizer *cso = MALLOC(sizeof(struct cso_rasterizer));
612       if (!cso)
613          return PIPE_ERROR_OUT_OF_MEMORY;
614 
615       memcpy(&cso->state, templ, sizeof(*templ));
616       cso->data = ctx->pipe->create_rasterizer_state(ctx->pipe, &cso->state);
617       cso->delete_state =
618          (cso_state_callback)ctx->pipe->delete_rasterizer_state;
619       cso->context = ctx->pipe;
620 
621       iter = cso_insert_state(ctx->cache, hash_key, CSO_RASTERIZER, cso);
622       if (cso_hash_iter_is_null(iter)) {
623          FREE(cso);
624          return PIPE_ERROR_OUT_OF_MEMORY;
625       }
626 
627       handle = cso->data;
628    }
629    else {
630       handle = ((struct cso_rasterizer *)cso_hash_iter_data(iter))->data;
631    }
632 
633    if (ctx->rasterizer != handle) {
634       ctx->rasterizer = handle;
635       ctx->pipe->bind_rasterizer_state(ctx->pipe, handle);
636    }
637    return PIPE_OK;
638 }
639 
640 static void
cso_save_rasterizer(struct cso_context * ctx)641 cso_save_rasterizer(struct cso_context *ctx)
642 {
643    assert(!ctx->rasterizer_saved);
644    ctx->rasterizer_saved = ctx->rasterizer;
645 }
646 
647 static void
cso_restore_rasterizer(struct cso_context * ctx)648 cso_restore_rasterizer(struct cso_context *ctx)
649 {
650    if (ctx->rasterizer != ctx->rasterizer_saved) {
651       ctx->rasterizer = ctx->rasterizer_saved;
652       ctx->pipe->bind_rasterizer_state(ctx->pipe, ctx->rasterizer_saved);
653    }
654    ctx->rasterizer_saved = NULL;
655 }
656 
657 
cso_set_fragment_shader_handle(struct cso_context * ctx,void * handle)658 void cso_set_fragment_shader_handle(struct cso_context *ctx, void *handle )
659 {
660    if (ctx->fragment_shader != handle) {
661       ctx->fragment_shader = handle;
662       ctx->pipe->bind_fs_state(ctx->pipe, handle);
663    }
664 }
665 
666 static void
cso_save_fragment_shader(struct cso_context * ctx)667 cso_save_fragment_shader(struct cso_context *ctx)
668 {
669    assert(!ctx->fragment_shader_saved);
670    ctx->fragment_shader_saved = ctx->fragment_shader;
671 }
672 
673 static void
cso_restore_fragment_shader(struct cso_context * ctx)674 cso_restore_fragment_shader(struct cso_context *ctx)
675 {
676    if (ctx->fragment_shader_saved != ctx->fragment_shader) {
677       ctx->pipe->bind_fs_state(ctx->pipe, ctx->fragment_shader_saved);
678       ctx->fragment_shader = ctx->fragment_shader_saved;
679    }
680    ctx->fragment_shader_saved = NULL;
681 }
682 
683 
cso_set_vertex_shader_handle(struct cso_context * ctx,void * handle)684 void cso_set_vertex_shader_handle(struct cso_context *ctx, void *handle)
685 {
686    if (ctx->vertex_shader != handle) {
687       ctx->vertex_shader = handle;
688       ctx->pipe->bind_vs_state(ctx->pipe, handle);
689    }
690 }
691 
692 static void
cso_save_vertex_shader(struct cso_context * ctx)693 cso_save_vertex_shader(struct cso_context *ctx)
694 {
695    assert(!ctx->vertex_shader_saved);
696    ctx->vertex_shader_saved = ctx->vertex_shader;
697 }
698 
699 static void
cso_restore_vertex_shader(struct cso_context * ctx)700 cso_restore_vertex_shader(struct cso_context *ctx)
701 {
702    if (ctx->vertex_shader_saved != ctx->vertex_shader) {
703       ctx->pipe->bind_vs_state(ctx->pipe, ctx->vertex_shader_saved);
704       ctx->vertex_shader = ctx->vertex_shader_saved;
705    }
706    ctx->vertex_shader_saved = NULL;
707 }
708 
709 
cso_set_framebuffer(struct cso_context * ctx,const struct pipe_framebuffer_state * fb)710 void cso_set_framebuffer(struct cso_context *ctx,
711                          const struct pipe_framebuffer_state *fb)
712 {
713    if (memcmp(&ctx->fb, fb, sizeof(*fb)) != 0) {
714       util_copy_framebuffer_state(&ctx->fb, fb);
715       ctx->pipe->set_framebuffer_state(ctx->pipe, fb);
716    }
717 }
718 
719 static void
cso_save_framebuffer(struct cso_context * ctx)720 cso_save_framebuffer(struct cso_context *ctx)
721 {
722    util_copy_framebuffer_state(&ctx->fb_saved, &ctx->fb);
723 }
724 
725 static void
cso_restore_framebuffer(struct cso_context * ctx)726 cso_restore_framebuffer(struct cso_context *ctx)
727 {
728    if (memcmp(&ctx->fb, &ctx->fb_saved, sizeof(ctx->fb))) {
729       util_copy_framebuffer_state(&ctx->fb, &ctx->fb_saved);
730       ctx->pipe->set_framebuffer_state(ctx->pipe, &ctx->fb);
731       util_unreference_framebuffer_state(&ctx->fb_saved);
732    }
733 }
734 
735 
cso_set_viewport(struct cso_context * ctx,const struct pipe_viewport_state * vp)736 void cso_set_viewport(struct cso_context *ctx,
737                       const struct pipe_viewport_state *vp)
738 {
739    if (memcmp(&ctx->vp, vp, sizeof(*vp))) {
740       ctx->vp = *vp;
741       ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, vp);
742    }
743 }
744 
745 /**
746  * Setup viewport state for given width and height (position is always (0,0)).
747  * Invert the Y axis if 'invert' is true.
748  */
749 void
cso_set_viewport_dims(struct cso_context * ctx,float width,float height,boolean invert)750 cso_set_viewport_dims(struct cso_context *ctx,
751                       float width, float height, boolean invert)
752 {
753    struct pipe_viewport_state vp;
754    vp.scale[0] = width * 0.5f;
755    vp.scale[1] = height * (invert ? -0.5f : 0.5f);
756    vp.scale[2] = 0.5f;
757    vp.translate[0] = 0.5f * width;
758    vp.translate[1] = 0.5f * height;
759    vp.translate[2] = 0.5f;
760    vp.swizzle_x = PIPE_VIEWPORT_SWIZZLE_POSITIVE_X;
761    vp.swizzle_y = PIPE_VIEWPORT_SWIZZLE_POSITIVE_Y;
762    vp.swizzle_z = PIPE_VIEWPORT_SWIZZLE_POSITIVE_Z;
763    vp.swizzle_w = PIPE_VIEWPORT_SWIZZLE_POSITIVE_W;
764    cso_set_viewport(ctx, &vp);
765 }
766 
767 static void
cso_save_viewport(struct cso_context * ctx)768 cso_save_viewport(struct cso_context *ctx)
769 {
770    ctx->vp_saved = ctx->vp;
771 }
772 
773 
774 static void
cso_restore_viewport(struct cso_context * ctx)775 cso_restore_viewport(struct cso_context *ctx)
776 {
777    if (memcmp(&ctx->vp, &ctx->vp_saved, sizeof(ctx->vp))) {
778       ctx->vp = ctx->vp_saved;
779       ctx->pipe->set_viewport_states(ctx->pipe, 0, 1, &ctx->vp);
780    }
781 }
782 
783 
cso_set_blend_color(struct cso_context * ctx,const struct pipe_blend_color * bc)784 void cso_set_blend_color(struct cso_context *ctx,
785                          const struct pipe_blend_color *bc)
786 {
787    if (memcmp(&ctx->blend_color, bc, sizeof(ctx->blend_color))) {
788       ctx->blend_color = *bc;
789       ctx->pipe->set_blend_color(ctx->pipe, bc);
790    }
791 }
792 
cso_set_sample_mask(struct cso_context * ctx,unsigned sample_mask)793 void cso_set_sample_mask(struct cso_context *ctx, unsigned sample_mask)
794 {
795    if (ctx->sample_mask != sample_mask) {
796       ctx->sample_mask = sample_mask;
797       ctx->pipe->set_sample_mask(ctx->pipe, sample_mask);
798    }
799 }
800 
801 static void
cso_save_sample_mask(struct cso_context * ctx)802 cso_save_sample_mask(struct cso_context *ctx)
803 {
804    ctx->sample_mask_saved = ctx->sample_mask;
805 }
806 
807 static void
cso_restore_sample_mask(struct cso_context * ctx)808 cso_restore_sample_mask(struct cso_context *ctx)
809 {
810    cso_set_sample_mask(ctx, ctx->sample_mask_saved);
811 }
812 
cso_set_min_samples(struct cso_context * ctx,unsigned min_samples)813 void cso_set_min_samples(struct cso_context *ctx, unsigned min_samples)
814 {
815    if (ctx->min_samples != min_samples && ctx->pipe->set_min_samples) {
816       ctx->min_samples = min_samples;
817       ctx->pipe->set_min_samples(ctx->pipe, min_samples);
818    }
819 }
820 
821 static void
cso_save_min_samples(struct cso_context * ctx)822 cso_save_min_samples(struct cso_context *ctx)
823 {
824    ctx->min_samples_saved = ctx->min_samples;
825 }
826 
827 static void
cso_restore_min_samples(struct cso_context * ctx)828 cso_restore_min_samples(struct cso_context *ctx)
829 {
830    cso_set_min_samples(ctx, ctx->min_samples_saved);
831 }
832 
cso_set_stencil_ref(struct cso_context * ctx,const struct pipe_stencil_ref * sr)833 void cso_set_stencil_ref(struct cso_context *ctx,
834                          const struct pipe_stencil_ref *sr)
835 {
836    if (memcmp(&ctx->stencil_ref, sr, sizeof(ctx->stencil_ref))) {
837       ctx->stencil_ref = *sr;
838       ctx->pipe->set_stencil_ref(ctx->pipe, sr);
839    }
840 }
841 
842 static void
cso_save_stencil_ref(struct cso_context * ctx)843 cso_save_stencil_ref(struct cso_context *ctx)
844 {
845    ctx->stencil_ref_saved = ctx->stencil_ref;
846 }
847 
848 
849 static void
cso_restore_stencil_ref(struct cso_context * ctx)850 cso_restore_stencil_ref(struct cso_context *ctx)
851 {
852    if (memcmp(&ctx->stencil_ref, &ctx->stencil_ref_saved,
853               sizeof(ctx->stencil_ref))) {
854       ctx->stencil_ref = ctx->stencil_ref_saved;
855       ctx->pipe->set_stencil_ref(ctx->pipe, &ctx->stencil_ref);
856    }
857 }
858 
cso_set_render_condition(struct cso_context * ctx,struct pipe_query * query,boolean condition,enum pipe_render_cond_flag mode)859 void cso_set_render_condition(struct cso_context *ctx,
860                               struct pipe_query *query,
861                               boolean condition,
862                               enum pipe_render_cond_flag mode)
863 {
864    struct pipe_context *pipe = ctx->pipe;
865 
866    if (ctx->render_condition != query ||
867        ctx->render_condition_mode != mode ||
868        ctx->render_condition_cond != condition) {
869       pipe->render_condition(pipe, query, condition, mode);
870       ctx->render_condition = query;
871       ctx->render_condition_cond = condition;
872       ctx->render_condition_mode = mode;
873    }
874 }
875 
876 static void
cso_save_render_condition(struct cso_context * ctx)877 cso_save_render_condition(struct cso_context *ctx)
878 {
879    ctx->render_condition_saved = ctx->render_condition;
880    ctx->render_condition_cond_saved = ctx->render_condition_cond;
881    ctx->render_condition_mode_saved = ctx->render_condition_mode;
882 }
883 
884 static void
cso_restore_render_condition(struct cso_context * ctx)885 cso_restore_render_condition(struct cso_context *ctx)
886 {
887    cso_set_render_condition(ctx, ctx->render_condition_saved,
888                             ctx->render_condition_cond_saved,
889                             ctx->render_condition_mode_saved);
890 }
891 
cso_set_geometry_shader_handle(struct cso_context * ctx,void * handle)892 void cso_set_geometry_shader_handle(struct cso_context *ctx, void *handle)
893 {
894    assert(ctx->has_geometry_shader || !handle);
895 
896    if (ctx->has_geometry_shader && ctx->geometry_shader != handle) {
897       ctx->geometry_shader = handle;
898       ctx->pipe->bind_gs_state(ctx->pipe, handle);
899    }
900 }
901 
902 static void
cso_save_geometry_shader(struct cso_context * ctx)903 cso_save_geometry_shader(struct cso_context *ctx)
904 {
905    if (!ctx->has_geometry_shader) {
906       return;
907    }
908 
909    assert(!ctx->geometry_shader_saved);
910    ctx->geometry_shader_saved = ctx->geometry_shader;
911 }
912 
913 static void
cso_restore_geometry_shader(struct cso_context * ctx)914 cso_restore_geometry_shader(struct cso_context *ctx)
915 {
916    if (!ctx->has_geometry_shader) {
917       return;
918    }
919 
920    if (ctx->geometry_shader_saved != ctx->geometry_shader) {
921       ctx->pipe->bind_gs_state(ctx->pipe, ctx->geometry_shader_saved);
922       ctx->geometry_shader = ctx->geometry_shader_saved;
923    }
924    ctx->geometry_shader_saved = NULL;
925 }
926 
cso_set_tessctrl_shader_handle(struct cso_context * ctx,void * handle)927 void cso_set_tessctrl_shader_handle(struct cso_context *ctx, void *handle)
928 {
929    assert(ctx->has_tessellation || !handle);
930 
931    if (ctx->has_tessellation && ctx->tessctrl_shader != handle) {
932       ctx->tessctrl_shader = handle;
933       ctx->pipe->bind_tcs_state(ctx->pipe, handle);
934    }
935 }
936 
937 static void
cso_save_tessctrl_shader(struct cso_context * ctx)938 cso_save_tessctrl_shader(struct cso_context *ctx)
939 {
940    if (!ctx->has_tessellation) {
941       return;
942    }
943 
944    assert(!ctx->tessctrl_shader_saved);
945    ctx->tessctrl_shader_saved = ctx->tessctrl_shader;
946 }
947 
948 static void
cso_restore_tessctrl_shader(struct cso_context * ctx)949 cso_restore_tessctrl_shader(struct cso_context *ctx)
950 {
951    if (!ctx->has_tessellation) {
952       return;
953    }
954 
955    if (ctx->tessctrl_shader_saved != ctx->tessctrl_shader) {
956       ctx->pipe->bind_tcs_state(ctx->pipe, ctx->tessctrl_shader_saved);
957       ctx->tessctrl_shader = ctx->tessctrl_shader_saved;
958    }
959    ctx->tessctrl_shader_saved = NULL;
960 }
961 
cso_set_tesseval_shader_handle(struct cso_context * ctx,void * handle)962 void cso_set_tesseval_shader_handle(struct cso_context *ctx, void *handle)
963 {
964    assert(ctx->has_tessellation || !handle);
965 
966    if (ctx->has_tessellation && ctx->tesseval_shader != handle) {
967       ctx->tesseval_shader = handle;
968       ctx->pipe->bind_tes_state(ctx->pipe, handle);
969    }
970 }
971 
972 static void
cso_save_tesseval_shader(struct cso_context * ctx)973 cso_save_tesseval_shader(struct cso_context *ctx)
974 {
975    if (!ctx->has_tessellation) {
976       return;
977    }
978 
979    assert(!ctx->tesseval_shader_saved);
980    ctx->tesseval_shader_saved = ctx->tesseval_shader;
981 }
982 
983 static void
cso_restore_tesseval_shader(struct cso_context * ctx)984 cso_restore_tesseval_shader(struct cso_context *ctx)
985 {
986    if (!ctx->has_tessellation) {
987       return;
988    }
989 
990    if (ctx->tesseval_shader_saved != ctx->tesseval_shader) {
991       ctx->pipe->bind_tes_state(ctx->pipe, ctx->tesseval_shader_saved);
992       ctx->tesseval_shader = ctx->tesseval_shader_saved;
993    }
994    ctx->tesseval_shader_saved = NULL;
995 }
996 
cso_set_compute_shader_handle(struct cso_context * ctx,void * handle)997 void cso_set_compute_shader_handle(struct cso_context *ctx, void *handle)
998 {
999    assert(ctx->has_compute_shader || !handle);
1000 
1001    if (ctx->has_compute_shader && ctx->compute_shader != handle) {
1002       ctx->compute_shader = handle;
1003       ctx->pipe->bind_compute_state(ctx->pipe, handle);
1004    }
1005 }
1006 
1007 static void
cso_set_vertex_elements_direct(struct cso_context * ctx,const struct cso_velems_state * velems)1008 cso_set_vertex_elements_direct(struct cso_context *ctx,
1009                                const struct cso_velems_state *velems)
1010 {
1011    unsigned key_size, hash_key;
1012    struct cso_hash_iter iter;
1013    void *handle;
1014 
1015    /* Need to include the count into the stored state data too.
1016     * Otherwise first few count pipe_vertex_elements could be identical
1017     * even if count is different, and there's no guarantee the hash would
1018     * be different in that case neither.
1019     */
1020    key_size = sizeof(struct pipe_vertex_element) * velems->count +
1021               sizeof(unsigned);
1022    hash_key = cso_construct_key((void*)velems, key_size);
1023    iter = cso_find_state_template(ctx->cache, hash_key, CSO_VELEMENTS,
1024                                   (void*)velems, key_size);
1025 
1026    if (cso_hash_iter_is_null(iter)) {
1027       struct cso_velements *cso = MALLOC(sizeof(struct cso_velements));
1028       if (!cso)
1029          return;
1030 
1031       memcpy(&cso->state, velems, key_size);
1032       cso->data = ctx->pipe->create_vertex_elements_state(ctx->pipe,
1033                                                           velems->count,
1034                                                       &cso->state.velems[0]);
1035       cso->delete_state =
1036          (cso_state_callback) ctx->pipe->delete_vertex_elements_state;
1037       cso->context = ctx->pipe;
1038 
1039       iter = cso_insert_state(ctx->cache, hash_key, CSO_VELEMENTS, cso);
1040       if (cso_hash_iter_is_null(iter)) {
1041          FREE(cso);
1042          return;
1043       }
1044 
1045       handle = cso->data;
1046    }
1047    else {
1048       handle = ((struct cso_velements *)cso_hash_iter_data(iter))->data;
1049    }
1050 
1051    if (ctx->velements != handle) {
1052       ctx->velements = handle;
1053       ctx->pipe->bind_vertex_elements_state(ctx->pipe, handle);
1054    }
1055 }
1056 
1057 enum pipe_error
cso_set_vertex_elements(struct cso_context * ctx,const struct cso_velems_state * velems)1058 cso_set_vertex_elements(struct cso_context *ctx,
1059                         const struct cso_velems_state *velems)
1060 {
1061    struct u_vbuf *vbuf = ctx->vbuf_current;
1062 
1063    if (vbuf) {
1064       u_vbuf_set_vertex_elements(vbuf, velems);
1065       return PIPE_OK;
1066    }
1067 
1068    cso_set_vertex_elements_direct(ctx, velems);
1069    return PIPE_OK;
1070 }
1071 
1072 static void
cso_save_vertex_elements(struct cso_context * ctx)1073 cso_save_vertex_elements(struct cso_context *ctx)
1074 {
1075    struct u_vbuf *vbuf = ctx->vbuf_current;
1076 
1077    if (vbuf) {
1078       u_vbuf_save_vertex_elements(vbuf);
1079       return;
1080    }
1081 
1082    assert(!ctx->velements_saved);
1083    ctx->velements_saved = ctx->velements;
1084 }
1085 
1086 static void
cso_restore_vertex_elements(struct cso_context * ctx)1087 cso_restore_vertex_elements(struct cso_context *ctx)
1088 {
1089    struct u_vbuf *vbuf = ctx->vbuf_current;
1090 
1091    if (vbuf) {
1092       u_vbuf_restore_vertex_elements(vbuf);
1093       return;
1094    }
1095 
1096    if (ctx->velements != ctx->velements_saved) {
1097       ctx->velements = ctx->velements_saved;
1098       ctx->pipe->bind_vertex_elements_state(ctx->pipe, ctx->velements_saved);
1099    }
1100    ctx->velements_saved = NULL;
1101 }
1102 
1103 /* vertex buffers */
1104 
1105 static void
cso_set_vertex_buffers_direct(struct cso_context * ctx,unsigned start_slot,unsigned count,const struct pipe_vertex_buffer * buffers)1106 cso_set_vertex_buffers_direct(struct cso_context *ctx,
1107                               unsigned start_slot, unsigned count,
1108                               const struct pipe_vertex_buffer *buffers)
1109 {
1110    /* Save what's in the auxiliary slot, so that we can save and restore it
1111     * for meta ops.
1112     */
1113    if (start_slot == 0) {
1114       if (buffers) {
1115          pipe_vertex_buffer_reference(&ctx->vertex_buffer0_current,
1116                                       buffers);
1117       } else {
1118          pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_current);
1119       }
1120    }
1121 
1122    ctx->pipe->set_vertex_buffers(ctx->pipe, start_slot, count, buffers);
1123 }
1124 
1125 
cso_set_vertex_buffers(struct cso_context * ctx,unsigned start_slot,unsigned count,const struct pipe_vertex_buffer * buffers)1126 void cso_set_vertex_buffers(struct cso_context *ctx,
1127                             unsigned start_slot, unsigned count,
1128                             const struct pipe_vertex_buffer *buffers)
1129 {
1130    struct u_vbuf *vbuf = ctx->vbuf_current;
1131 
1132    if (!count)
1133       return;
1134 
1135    if (vbuf) {
1136       u_vbuf_set_vertex_buffers(vbuf, start_slot, count, buffers);
1137       return;
1138    }
1139 
1140    cso_set_vertex_buffers_direct(ctx, start_slot, count, buffers);
1141 }
1142 
1143 static void
cso_save_vertex_buffer0(struct cso_context * ctx)1144 cso_save_vertex_buffer0(struct cso_context *ctx)
1145 {
1146    struct u_vbuf *vbuf = ctx->vbuf_current;
1147 
1148    if (vbuf) {
1149       u_vbuf_save_vertex_buffer0(vbuf);
1150       return;
1151    }
1152 
1153    pipe_vertex_buffer_reference(&ctx->vertex_buffer0_saved,
1154                                 &ctx->vertex_buffer0_current);
1155 }
1156 
1157 static void
cso_restore_vertex_buffer0(struct cso_context * ctx)1158 cso_restore_vertex_buffer0(struct cso_context *ctx)
1159 {
1160    struct u_vbuf *vbuf = ctx->vbuf_current;
1161 
1162    if (vbuf) {
1163       u_vbuf_restore_vertex_buffer0(vbuf);
1164       return;
1165    }
1166 
1167    cso_set_vertex_buffers(ctx, 0, 1, &ctx->vertex_buffer0_saved);
1168    pipe_vertex_buffer_unreference(&ctx->vertex_buffer0_saved);
1169 }
1170 
1171 /**
1172  * Set vertex buffers and vertex elements. Skip u_vbuf if it's only needed
1173  * for user vertex buffers and user vertex buffers are not set by this call.
1174  * u_vbuf will be disabled. To re-enable u_vbuf, call this function again.
1175  *
1176  * Skipping u_vbuf decreases CPU overhead for draw calls that don't need it,
1177  * such as VBOs, glBegin/End, and display lists.
1178  *
1179  * Internal operations that do "save states, draw, restore states" shouldn't
1180  * use this, because the states are only saved in either cso_context or
1181  * u_vbuf, not both.
1182  */
1183 void
cso_set_vertex_buffers_and_elements(struct cso_context * ctx,const struct cso_velems_state * velems,unsigned vb_count,unsigned unbind_trailing_vb_count,const struct pipe_vertex_buffer * vbuffers,bool uses_user_vertex_buffers)1184 cso_set_vertex_buffers_and_elements(struct cso_context *ctx,
1185                                     const struct cso_velems_state *velems,
1186                                     unsigned vb_count,
1187                                     unsigned unbind_trailing_vb_count,
1188                                     const struct pipe_vertex_buffer *vbuffers,
1189                                     bool uses_user_vertex_buffers)
1190 {
1191    struct u_vbuf *vbuf = ctx->vbuf;
1192 
1193    if (vbuf && (ctx->always_use_vbuf || uses_user_vertex_buffers)) {
1194       if (!ctx->vbuf_current) {
1195          /* Unbind all buffers in cso_context, because we'll use u_vbuf. */
1196          unsigned unbind_vb_count = vb_count + unbind_trailing_vb_count;
1197          if (unbind_vb_count)
1198             cso_set_vertex_buffers_direct(ctx, 0, unbind_vb_count, NULL);
1199 
1200          /* Unset this to make sure the CSO is re-bound on the next use. */
1201          ctx->velements = NULL;
1202          ctx->vbuf_current = vbuf;
1203       } else if (unbind_trailing_vb_count) {
1204          u_vbuf_set_vertex_buffers(vbuf, vb_count, unbind_trailing_vb_count,
1205                                    NULL);
1206       }
1207 
1208       if (vb_count)
1209          u_vbuf_set_vertex_buffers(vbuf, 0, vb_count, vbuffers);
1210       u_vbuf_set_vertex_elements(vbuf, velems);
1211       return;
1212    }
1213 
1214    if (ctx->vbuf_current) {
1215       /* Unbind all buffers in u_vbuf, because we'll use cso_context. */
1216       unsigned unbind_vb_count = vb_count + unbind_trailing_vb_count;
1217       if (unbind_vb_count)
1218          u_vbuf_set_vertex_buffers(vbuf, 0, unbind_vb_count, NULL);
1219 
1220       /* Unset this to make sure the CSO is re-bound on the next use. */
1221       u_vbuf_unset_vertex_elements(vbuf);
1222       ctx->vbuf_current = NULL;
1223    } else if (unbind_trailing_vb_count) {
1224       cso_set_vertex_buffers_direct(ctx, vb_count, unbind_trailing_vb_count,
1225                                     NULL);
1226    }
1227 
1228    if (vb_count)
1229       cso_set_vertex_buffers_direct(ctx, 0, vb_count, vbuffers);
1230    cso_set_vertex_elements_direct(ctx, velems);
1231 }
1232 
1233 void
cso_single_sampler(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned idx,const struct pipe_sampler_state * templ)1234 cso_single_sampler(struct cso_context *ctx, enum pipe_shader_type shader_stage,
1235                    unsigned idx, const struct pipe_sampler_state *templ)
1236 {
1237    if (templ) {
1238       unsigned key_size = sizeof(struct pipe_sampler_state);
1239       unsigned hash_key = cso_construct_key((void*)templ, key_size);
1240       struct cso_sampler *cso;
1241       struct cso_hash_iter iter =
1242          cso_find_state_template(ctx->cache,
1243                                  hash_key, CSO_SAMPLER,
1244                                  (void *) templ, key_size);
1245 
1246       if (cso_hash_iter_is_null(iter)) {
1247          cso = MALLOC(sizeof(struct cso_sampler));
1248          if (!cso)
1249             return;
1250 
1251          memcpy(&cso->state, templ, sizeof(*templ));
1252          cso->data = ctx->pipe->create_sampler_state(ctx->pipe, &cso->state);
1253          cso->delete_state =
1254             (cso_state_callback) ctx->pipe->delete_sampler_state;
1255          cso->context = ctx->pipe;
1256          cso->hash_key = hash_key;
1257 
1258          iter = cso_insert_state(ctx->cache, hash_key, CSO_SAMPLER, cso);
1259          if (cso_hash_iter_is_null(iter)) {
1260             FREE(cso);
1261             return;
1262          }
1263       }
1264       else {
1265          cso = cso_hash_iter_data(iter);
1266       }
1267 
1268       ctx->samplers[shader_stage].cso_samplers[idx] = cso;
1269       ctx->samplers[shader_stage].samplers[idx] = cso->data;
1270       ctx->max_sampler_seen = MAX2(ctx->max_sampler_seen, (int)idx);
1271    }
1272 }
1273 
1274 
1275 /**
1276  * Send staged sampler state to the driver.
1277  */
1278 void
cso_single_sampler_done(struct cso_context * ctx,enum pipe_shader_type shader_stage)1279 cso_single_sampler_done(struct cso_context *ctx,
1280                         enum pipe_shader_type shader_stage)
1281 {
1282    struct sampler_info *info = &ctx->samplers[shader_stage];
1283 
1284    if (ctx->max_sampler_seen == -1)
1285       return;
1286 
1287    ctx->pipe->bind_sampler_states(ctx->pipe, shader_stage, 0,
1288                                   ctx->max_sampler_seen + 1,
1289                                   info->samplers);
1290    ctx->max_sampler_seen = -1;
1291 }
1292 
1293 
1294 /*
1295  * If the function encouters any errors it will return the
1296  * last one. Done to always try to set as many samplers
1297  * as possible.
1298  */
1299 void
cso_set_samplers(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned nr,const struct pipe_sampler_state ** templates)1300 cso_set_samplers(struct cso_context *ctx,
1301                  enum pipe_shader_type shader_stage,
1302                  unsigned nr,
1303                  const struct pipe_sampler_state **templates)
1304 {
1305    for (unsigned i = 0; i < nr; i++)
1306       cso_single_sampler(ctx, shader_stage, i, templates[i]);
1307 
1308    cso_single_sampler_done(ctx, shader_stage);
1309 }
1310 
1311 static void
cso_save_fragment_samplers(struct cso_context * ctx)1312 cso_save_fragment_samplers(struct cso_context *ctx)
1313 {
1314    struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1315    struct sampler_info *saved = &ctx->fragment_samplers_saved;
1316 
1317    memcpy(saved->cso_samplers, info->cso_samplers,
1318           sizeof(info->cso_samplers));
1319    memcpy(saved->samplers, info->samplers, sizeof(info->samplers));
1320 }
1321 
1322 
1323 static void
cso_restore_fragment_samplers(struct cso_context * ctx)1324 cso_restore_fragment_samplers(struct cso_context *ctx)
1325 {
1326    struct sampler_info *info = &ctx->samplers[PIPE_SHADER_FRAGMENT];
1327    struct sampler_info *saved = &ctx->fragment_samplers_saved;
1328 
1329    memcpy(info->cso_samplers, saved->cso_samplers,
1330           sizeof(info->cso_samplers));
1331    memcpy(info->samplers, saved->samplers, sizeof(info->samplers));
1332 
1333    for (int i = PIPE_MAX_SAMPLERS - 1; i >= 0; i--) {
1334       if (info->samplers[i]) {
1335          ctx->max_sampler_seen = i;
1336          break;
1337       }
1338    }
1339 
1340    cso_single_sampler_done(ctx, PIPE_SHADER_FRAGMENT);
1341 }
1342 
1343 
1344 void
cso_set_sampler_views(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned count,struct pipe_sampler_view ** views)1345 cso_set_sampler_views(struct cso_context *ctx,
1346                       enum pipe_shader_type shader_stage,
1347                       unsigned count,
1348                       struct pipe_sampler_view **views)
1349 {
1350    if (shader_stage == PIPE_SHADER_FRAGMENT) {
1351       unsigned i;
1352       boolean any_change = FALSE;
1353 
1354       /* reference new views */
1355       for (i = 0; i < count; i++) {
1356          any_change |= ctx->fragment_views[i] != views[i];
1357          pipe_sampler_view_reference(&ctx->fragment_views[i], views[i]);
1358       }
1359       /* unref extra old views, if any */
1360       for (; i < ctx->nr_fragment_views; i++) {
1361          any_change |= ctx->fragment_views[i] != NULL;
1362          pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1363       }
1364 
1365       /* bind the new sampler views */
1366       if (any_change) {
1367          ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0,
1368                                       MAX2(ctx->nr_fragment_views, count),
1369                                       ctx->fragment_views);
1370       }
1371 
1372       ctx->nr_fragment_views = count;
1373    }
1374    else
1375       ctx->pipe->set_sampler_views(ctx->pipe, shader_stage, 0, count, views);
1376 }
1377 
1378 
1379 static void
cso_save_fragment_sampler_views(struct cso_context * ctx)1380 cso_save_fragment_sampler_views(struct cso_context *ctx)
1381 {
1382    unsigned i;
1383 
1384    ctx->nr_fragment_views_saved = ctx->nr_fragment_views;
1385 
1386    for (i = 0; i < ctx->nr_fragment_views; i++) {
1387       assert(!ctx->fragment_views_saved[i]);
1388       pipe_sampler_view_reference(&ctx->fragment_views_saved[i],
1389                                   ctx->fragment_views[i]);
1390    }
1391 }
1392 
1393 
1394 static void
cso_restore_fragment_sampler_views(struct cso_context * ctx)1395 cso_restore_fragment_sampler_views(struct cso_context *ctx)
1396 {
1397    unsigned i, nr_saved = ctx->nr_fragment_views_saved;
1398    unsigned num;
1399 
1400    for (i = 0; i < nr_saved; i++) {
1401       pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1402       /* move the reference from one pointer to another */
1403       ctx->fragment_views[i] = ctx->fragment_views_saved[i];
1404       ctx->fragment_views_saved[i] = NULL;
1405    }
1406    for (; i < ctx->nr_fragment_views; i++) {
1407       pipe_sampler_view_reference(&ctx->fragment_views[i], NULL);
1408    }
1409 
1410    num = MAX2(ctx->nr_fragment_views, nr_saved);
1411 
1412    /* bind the old/saved sampler views */
1413    ctx->pipe->set_sampler_views(ctx->pipe, PIPE_SHADER_FRAGMENT, 0, num,
1414                                 ctx->fragment_views);
1415 
1416    ctx->nr_fragment_views = nr_saved;
1417    ctx->nr_fragment_views_saved = 0;
1418 }
1419 
1420 
1421 void
cso_set_shader_images(struct cso_context * ctx,enum pipe_shader_type shader_stage,unsigned start,unsigned count,struct pipe_image_view * images)1422 cso_set_shader_images(struct cso_context *ctx,
1423                       enum pipe_shader_type shader_stage,
1424                       unsigned start, unsigned count,
1425                       struct pipe_image_view *images)
1426 {
1427    if (shader_stage == PIPE_SHADER_FRAGMENT && start == 0 && count >= 1) {
1428       util_copy_image_view(&ctx->fragment_image0_current, &images[0]);
1429    }
1430 
1431    ctx->pipe->set_shader_images(ctx->pipe, shader_stage, start, count, images);
1432 }
1433 
1434 
1435 static void
cso_save_fragment_image0(struct cso_context * ctx)1436 cso_save_fragment_image0(struct cso_context *ctx)
1437 {
1438    util_copy_image_view(&ctx->fragment_image0_saved,
1439                         &ctx->fragment_image0_current);
1440 }
1441 
1442 
1443 static void
cso_restore_fragment_image0(struct cso_context * ctx)1444 cso_restore_fragment_image0(struct cso_context *ctx)
1445 {
1446    cso_set_shader_images(ctx, PIPE_SHADER_FRAGMENT, 0, 1,
1447                          &ctx->fragment_image0_saved);
1448 }
1449 
1450 
1451 void
cso_set_stream_outputs(struct cso_context * ctx,unsigned num_targets,struct pipe_stream_output_target ** targets,const unsigned * offsets)1452 cso_set_stream_outputs(struct cso_context *ctx,
1453                        unsigned num_targets,
1454                        struct pipe_stream_output_target **targets,
1455                        const unsigned *offsets)
1456 {
1457    struct pipe_context *pipe = ctx->pipe;
1458    uint i;
1459 
1460    if (!ctx->has_streamout) {
1461       assert(num_targets == 0);
1462       return;
1463    }
1464 
1465    if (ctx->nr_so_targets == 0 && num_targets == 0) {
1466       /* Nothing to do. */
1467       return;
1468    }
1469 
1470    /* reference new targets */
1471    for (i = 0; i < num_targets; i++) {
1472       pipe_so_target_reference(&ctx->so_targets[i], targets[i]);
1473    }
1474    /* unref extra old targets, if any */
1475    for (; i < ctx->nr_so_targets; i++) {
1476       pipe_so_target_reference(&ctx->so_targets[i], NULL);
1477    }
1478 
1479    pipe->set_stream_output_targets(pipe, num_targets, targets,
1480                                    offsets);
1481    ctx->nr_so_targets = num_targets;
1482 }
1483 
1484 static void
cso_save_stream_outputs(struct cso_context * ctx)1485 cso_save_stream_outputs(struct cso_context *ctx)
1486 {
1487    uint i;
1488 
1489    if (!ctx->has_streamout) {
1490       return;
1491    }
1492 
1493    ctx->nr_so_targets_saved = ctx->nr_so_targets;
1494 
1495    for (i = 0; i < ctx->nr_so_targets; i++) {
1496       assert(!ctx->so_targets_saved[i]);
1497       pipe_so_target_reference(&ctx->so_targets_saved[i], ctx->so_targets[i]);
1498    }
1499 }
1500 
1501 static void
cso_restore_stream_outputs(struct cso_context * ctx)1502 cso_restore_stream_outputs(struct cso_context *ctx)
1503 {
1504    struct pipe_context *pipe = ctx->pipe;
1505    uint i;
1506    unsigned offset[PIPE_MAX_SO_BUFFERS];
1507 
1508    if (!ctx->has_streamout) {
1509       return;
1510    }
1511 
1512    if (ctx->nr_so_targets == 0 && ctx->nr_so_targets_saved == 0) {
1513       /* Nothing to do. */
1514       return;
1515    }
1516 
1517    assert(ctx->nr_so_targets_saved <= PIPE_MAX_SO_BUFFERS);
1518    for (i = 0; i < ctx->nr_so_targets_saved; i++) {
1519       pipe_so_target_reference(&ctx->so_targets[i], NULL);
1520       /* move the reference from one pointer to another */
1521       ctx->so_targets[i] = ctx->so_targets_saved[i];
1522       ctx->so_targets_saved[i] = NULL;
1523       /* -1 means append */
1524       offset[i] = (unsigned)-1;
1525    }
1526    for (; i < ctx->nr_so_targets; i++) {
1527       pipe_so_target_reference(&ctx->so_targets[i], NULL);
1528    }
1529 
1530    pipe->set_stream_output_targets(pipe, ctx->nr_so_targets_saved,
1531                                    ctx->so_targets, offset);
1532 
1533    ctx->nr_so_targets = ctx->nr_so_targets_saved;
1534    ctx->nr_so_targets_saved = 0;
1535 }
1536 
1537 /* constant buffers */
1538 
1539 void
cso_set_constant_buffer(struct cso_context * cso,enum pipe_shader_type shader_stage,unsigned index,struct pipe_constant_buffer * cb)1540 cso_set_constant_buffer(struct cso_context *cso,
1541                         enum pipe_shader_type shader_stage,
1542                         unsigned index, struct pipe_constant_buffer *cb)
1543 {
1544    struct pipe_context *pipe = cso->pipe;
1545 
1546    pipe->set_constant_buffer(pipe, shader_stage, index, cb);
1547 
1548    if (index == 0) {
1549       util_copy_constant_buffer(&cso->aux_constbuf_current[shader_stage], cb);
1550    }
1551 }
1552 
1553 void
cso_set_constant_buffer_resource(struct cso_context * cso,enum pipe_shader_type shader_stage,unsigned index,struct pipe_resource * buffer)1554 cso_set_constant_buffer_resource(struct cso_context *cso,
1555                                  enum pipe_shader_type shader_stage,
1556                                  unsigned index,
1557                                  struct pipe_resource *buffer)
1558 {
1559    if (buffer) {
1560       struct pipe_constant_buffer cb;
1561       cb.buffer = buffer;
1562       cb.buffer_offset = 0;
1563       cb.buffer_size = buffer->width0;
1564       cb.user_buffer = NULL;
1565       cso_set_constant_buffer(cso, shader_stage, index, &cb);
1566    } else {
1567       cso_set_constant_buffer(cso, shader_stage, index, NULL);
1568    }
1569 }
1570 
1571 void
cso_set_constant_user_buffer(struct cso_context * cso,enum pipe_shader_type shader_stage,unsigned index,void * ptr,unsigned size)1572 cso_set_constant_user_buffer(struct cso_context *cso,
1573                              enum pipe_shader_type shader_stage,
1574                              unsigned index, void *ptr, unsigned size)
1575 {
1576    if (ptr) {
1577       struct pipe_constant_buffer cb;
1578       cb.buffer = NULL;
1579       cb.buffer_offset = 0;
1580       cb.buffer_size = size;
1581       cb.user_buffer = ptr;
1582       cso_set_constant_buffer(cso, shader_stage, index, &cb);
1583    } else {
1584       cso_set_constant_buffer(cso, shader_stage, index, NULL);
1585    }
1586 }
1587 
1588 void
cso_save_constant_buffer_slot0(struct cso_context * cso,enum pipe_shader_type shader_stage)1589 cso_save_constant_buffer_slot0(struct cso_context *cso,
1590                                enum pipe_shader_type shader_stage)
1591 {
1592    util_copy_constant_buffer(&cso->aux_constbuf_saved[shader_stage],
1593                              &cso->aux_constbuf_current[shader_stage]);
1594 }
1595 
1596 void
cso_restore_constant_buffer_slot0(struct cso_context * cso,enum pipe_shader_type shader_stage)1597 cso_restore_constant_buffer_slot0(struct cso_context *cso,
1598                                   enum pipe_shader_type shader_stage)
1599 {
1600    cso_set_constant_buffer(cso, shader_stage, 0,
1601                            &cso->aux_constbuf_saved[shader_stage]);
1602    pipe_resource_reference(&cso->aux_constbuf_saved[shader_stage].buffer,
1603                            NULL);
1604 }
1605 
1606 
1607 /**
1608  * Save all the CSO state items specified by the state_mask bitmask
1609  * of CSO_BIT_x flags.
1610  */
1611 void
cso_save_state(struct cso_context * cso,unsigned state_mask)1612 cso_save_state(struct cso_context *cso, unsigned state_mask)
1613 {
1614    assert(cso->saved_state == 0);
1615 
1616    cso->saved_state = state_mask;
1617 
1618    if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1619       cso_save_vertex_buffer0(cso);
1620    if (state_mask & CSO_BIT_BLEND)
1621       cso_save_blend(cso);
1622    if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1623       cso_save_depth_stencil_alpha(cso);
1624    if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1625       cso_save_fragment_samplers(cso);
1626    if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1627       cso_save_fragment_sampler_views(cso);
1628    if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1629       cso_save_fragment_shader(cso);
1630    if (state_mask & CSO_BIT_FRAMEBUFFER)
1631       cso_save_framebuffer(cso);
1632    if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1633       cso_save_geometry_shader(cso);
1634    if (state_mask & CSO_BIT_MIN_SAMPLES)
1635       cso_save_min_samples(cso);
1636    if (state_mask & CSO_BIT_RASTERIZER)
1637       cso_save_rasterizer(cso);
1638    if (state_mask & CSO_BIT_RENDER_CONDITION)
1639       cso_save_render_condition(cso);
1640    if (state_mask & CSO_BIT_SAMPLE_MASK)
1641       cso_save_sample_mask(cso);
1642    if (state_mask & CSO_BIT_STENCIL_REF)
1643       cso_save_stencil_ref(cso);
1644    if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1645       cso_save_stream_outputs(cso);
1646    if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1647       cso_save_tessctrl_shader(cso);
1648    if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1649       cso_save_tesseval_shader(cso);
1650    if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1651       cso_save_vertex_elements(cso);
1652    if (state_mask & CSO_BIT_VERTEX_SHADER)
1653       cso_save_vertex_shader(cso);
1654    if (state_mask & CSO_BIT_VIEWPORT)
1655       cso_save_viewport(cso);
1656    if (state_mask & CSO_BIT_PAUSE_QUERIES)
1657       cso->pipe->set_active_query_state(cso->pipe, false);
1658    if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1659       cso_save_fragment_image0(cso);
1660 }
1661 
1662 
1663 /**
1664  * Restore the state which was saved by cso_save_state().
1665  */
1666 void
cso_restore_state(struct cso_context * cso)1667 cso_restore_state(struct cso_context *cso)
1668 {
1669    unsigned state_mask = cso->saved_state;
1670 
1671    assert(state_mask);
1672 
1673    if (state_mask & CSO_BIT_AUX_VERTEX_BUFFER_SLOT)
1674       cso_restore_vertex_buffer0(cso);
1675    if (state_mask & CSO_BIT_BLEND)
1676       cso_restore_blend(cso);
1677    if (state_mask & CSO_BIT_DEPTH_STENCIL_ALPHA)
1678       cso_restore_depth_stencil_alpha(cso);
1679    if (state_mask & CSO_BIT_FRAGMENT_SAMPLERS)
1680       cso_restore_fragment_samplers(cso);
1681    if (state_mask & CSO_BIT_FRAGMENT_SAMPLER_VIEWS)
1682       cso_restore_fragment_sampler_views(cso);
1683    if (state_mask & CSO_BIT_FRAGMENT_SHADER)
1684       cso_restore_fragment_shader(cso);
1685    if (state_mask & CSO_BIT_FRAMEBUFFER)
1686       cso_restore_framebuffer(cso);
1687    if (state_mask & CSO_BIT_GEOMETRY_SHADER)
1688       cso_restore_geometry_shader(cso);
1689    if (state_mask & CSO_BIT_MIN_SAMPLES)
1690       cso_restore_min_samples(cso);
1691    if (state_mask & CSO_BIT_RASTERIZER)
1692       cso_restore_rasterizer(cso);
1693    if (state_mask & CSO_BIT_RENDER_CONDITION)
1694       cso_restore_render_condition(cso);
1695    if (state_mask & CSO_BIT_SAMPLE_MASK)
1696       cso_restore_sample_mask(cso);
1697    if (state_mask & CSO_BIT_STENCIL_REF)
1698       cso_restore_stencil_ref(cso);
1699    if (state_mask & CSO_BIT_STREAM_OUTPUTS)
1700       cso_restore_stream_outputs(cso);
1701    if (state_mask & CSO_BIT_TESSCTRL_SHADER)
1702       cso_restore_tessctrl_shader(cso);
1703    if (state_mask & CSO_BIT_TESSEVAL_SHADER)
1704       cso_restore_tesseval_shader(cso);
1705    if (state_mask & CSO_BIT_VERTEX_ELEMENTS)
1706       cso_restore_vertex_elements(cso);
1707    if (state_mask & CSO_BIT_VERTEX_SHADER)
1708       cso_restore_vertex_shader(cso);
1709    if (state_mask & CSO_BIT_VIEWPORT)
1710       cso_restore_viewport(cso);
1711    if (state_mask & CSO_BIT_PAUSE_QUERIES)
1712       cso->pipe->set_active_query_state(cso->pipe, true);
1713    if (state_mask & CSO_BIT_FRAGMENT_IMAGE0)
1714       cso_restore_fragment_image0(cso);
1715 
1716    cso->saved_state = 0;
1717 }
1718 
1719 
1720 
1721 /* drawing */
1722 
1723 void
cso_draw_vbo(struct cso_context * cso,const struct pipe_draw_info * info)1724 cso_draw_vbo(struct cso_context *cso,
1725              const struct pipe_draw_info *info)
1726 {
1727    struct u_vbuf *vbuf = cso->vbuf_current;
1728 
1729    /* We can't have both indirect drawing and SO-vertex-count drawing */
1730    assert(info->indirect == NULL || info->count_from_stream_output == NULL);
1731 
1732    /* We can't have SO-vertex-count drawing with an index buffer */
1733    assert(info->count_from_stream_output == NULL || info->index_size == 0);
1734 
1735    if (vbuf) {
1736       u_vbuf_draw_vbo(vbuf, info);
1737    } else {
1738       struct pipe_context *pipe = cso->pipe;
1739       pipe->draw_vbo(pipe, info);
1740    }
1741 }
1742 
1743 void
cso_draw_arrays(struct cso_context * cso,uint mode,uint start,uint count)1744 cso_draw_arrays(struct cso_context *cso, uint mode, uint start, uint count)
1745 {
1746    struct pipe_draw_info info;
1747 
1748    util_draw_init_info(&info);
1749 
1750    info.mode = mode;
1751    info.start = start;
1752    info.count = count;
1753    info.min_index = start;
1754    info.max_index = start + count - 1;
1755 
1756    cso_draw_vbo(cso, &info);
1757 }
1758 
1759 void
cso_draw_arrays_instanced(struct cso_context * cso,uint mode,uint start,uint count,uint start_instance,uint instance_count)1760 cso_draw_arrays_instanced(struct cso_context *cso, uint mode,
1761                           uint start, uint count,
1762                           uint start_instance, uint instance_count)
1763 {
1764    struct pipe_draw_info info;
1765 
1766    util_draw_init_info(&info);
1767 
1768    info.mode = mode;
1769    info.start = start;
1770    info.count = count;
1771    info.min_index = start;
1772    info.max_index = start + count - 1;
1773    info.start_instance = start_instance;
1774    info.instance_count = instance_count;
1775 
1776    cso_draw_vbo(cso, &info);
1777 }
1778