1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  *
23  * Authors:
24  *    Jason Ekstrand (jason@jlekstrand.net)
25  *
26  */
27 
28 #ifndef _VTN_PRIVATE_H_
29 #define _VTN_PRIVATE_H_
30 
31 #include <setjmp.h>
32 
33 #include "nir/nir.h"
34 #include "nir/nir_builder.h"
35 #include "util/u_dynarray.h"
36 #include "nir_spirv.h"
37 #include "spirv.h"
38 #include "vtn_generator_ids.h"
39 
40 struct vtn_builder;
41 struct vtn_decoration;
42 
43 void vtn_log(struct vtn_builder *b, enum nir_spirv_debug_level level,
44              size_t spirv_offset, const char *message);
45 
46 void vtn_logf(struct vtn_builder *b, enum nir_spirv_debug_level level,
47               size_t spirv_offset, const char *fmt, ...) PRINTFLIKE(4, 5);
48 
49 #define vtn_info(...) vtn_logf(b, NIR_SPIRV_DEBUG_LEVEL_INFO, 0, __VA_ARGS__)
50 
51 void _vtn_warn(struct vtn_builder *b, const char *file, unsigned line,
52                const char *fmt, ...) PRINTFLIKE(4, 5);
53 #define vtn_warn(...) _vtn_warn(b, __FILE__, __LINE__, __VA_ARGS__)
54 
55 void _vtn_err(struct vtn_builder *b, const char *file, unsigned line,
56                const char *fmt, ...) PRINTFLIKE(4, 5);
57 #define vtn_err(...) _vtn_err(b, __FILE__, __LINE__, __VA_ARGS__)
58 
59 /** Fail SPIR-V parsing
60  *
61  * This function logs an error and then bails out of the shader compile using
62  * longjmp.  This being safe relies on two things:
63  *
64  *  1) We must guarantee that setjmp is called after allocating the builder
65  *     and setting up b->debug (so that logging works) but before before any
66  *     errors have a chance to occur.
67  *
68  *  2) While doing the SPIR-V -> NIR conversion, we need to be careful to
69  *     ensure that all heap allocations happen through ralloc and are parented
70  *     to the builder.  This way they will get properly cleaned up on error.
71  *
72  *  3) We must ensure that _vtn_fail is never called while a mutex lock or a
73  *     reference to any other resource is held with the exception of ralloc
74  *     objects which are parented to the builder.
75  *
76  * So long as these two things continue to hold, we can easily longjmp back to
77  * spirv_to_nir(), clean up the builder, and return NULL.
78  */
79 NORETURN void
80 _vtn_fail(struct vtn_builder *b, const char *file, unsigned line,
81              const char *fmt, ...) PRINTFLIKE(4, 5);
82 
83 #define vtn_fail(...) _vtn_fail(b, __FILE__, __LINE__, __VA_ARGS__)
84 
85 /** Fail if the given expression evaluates to true */
86 #define vtn_fail_if(expr, ...) \
87    do { \
88       if (unlikely(expr)) \
89          vtn_fail(__VA_ARGS__); \
90    } while (0)
91 
92 #define _vtn_fail_with(t, msg, v) \
93    vtn_fail("%s: %s (%u)\n", msg, spirv_ ## t ## _to_string(v), v)
94 
95 #define vtn_fail_with_decoration(msg, v) _vtn_fail_with(decoration, msg, v)
96 #define vtn_fail_with_opcode(msg, v)     _vtn_fail_with(op, msg, v)
97 
98 /** Assert that a condition is true and, if it isn't, vtn_fail
99  *
100  * This macro is transitional only and should not be used in new code.  Use
101  * vtn_fail_if and provide a real message instead.
102  */
103 #define vtn_assert(expr) \
104    do { \
105       if (!likely(expr)) \
106          vtn_fail("%s", #expr); \
107    } while (0)
108 
109 enum vtn_value_type {
110    vtn_value_type_invalid = 0,
111    vtn_value_type_undef,
112    vtn_value_type_string,
113    vtn_value_type_decoration_group,
114    vtn_value_type_type,
115    vtn_value_type_constant,
116    vtn_value_type_pointer,
117    vtn_value_type_function,
118    vtn_value_type_block,
119    vtn_value_type_ssa,
120    vtn_value_type_extension,
121    vtn_value_type_image_pointer,
122 };
123 
124 enum vtn_branch_type {
125    vtn_branch_type_none,
126    vtn_branch_type_if_merge,
127    vtn_branch_type_switch_break,
128    vtn_branch_type_switch_fallthrough,
129    vtn_branch_type_loop_break,
130    vtn_branch_type_loop_continue,
131    vtn_branch_type_loop_back_edge,
132    vtn_branch_type_discard,
133    vtn_branch_type_return,
134 };
135 
136 enum vtn_cf_node_type {
137    vtn_cf_node_type_block,
138    vtn_cf_node_type_if,
139    vtn_cf_node_type_loop,
140    vtn_cf_node_type_case,
141    vtn_cf_node_type_switch,
142    vtn_cf_node_type_function,
143 };
144 
145 struct vtn_cf_node {
146    struct list_head link;
147    struct vtn_cf_node *parent;
148    enum vtn_cf_node_type type;
149 };
150 
151 struct vtn_loop {
152    struct vtn_cf_node node;
153 
154    /* The main body of the loop */
155    struct list_head body;
156 
157    /* The "continue" part of the loop.  This gets executed after the body
158     * and is where you go when you hit a continue.
159     */
160    struct list_head cont_body;
161 
162    struct vtn_block *header_block;
163    struct vtn_block *cont_block;
164    struct vtn_block *break_block;
165 
166    SpvLoopControlMask control;
167 };
168 
169 struct vtn_if {
170    struct vtn_cf_node node;
171 
172    uint32_t condition;
173 
174    enum vtn_branch_type then_type;
175    struct list_head then_body;
176 
177    enum vtn_branch_type else_type;
178    struct list_head else_body;
179 
180    struct vtn_block *merge_block;
181 
182    SpvSelectionControlMask control;
183 };
184 
185 struct vtn_case {
186    struct vtn_cf_node node;
187 
188    struct vtn_block *block;
189 
190    enum vtn_branch_type type;
191    struct list_head body;
192 
193    /* The fallthrough case, if any */
194    struct vtn_case *fallthrough;
195 
196    /* The uint32_t values that map to this case */
197    struct util_dynarray values;
198 
199    /* True if this is the default case */
200    bool is_default;
201 
202    /* Initialized to false; used when sorting the list of cases */
203    bool visited;
204 };
205 
206 struct vtn_switch {
207    struct vtn_cf_node node;
208 
209    uint32_t selector;
210 
211    struct list_head cases;
212 
213    struct vtn_block *break_block;
214 };
215 
216 struct vtn_block {
217    struct vtn_cf_node node;
218 
219    /** A pointer to the label instruction */
220    const uint32_t *label;
221 
222    /** A pointer to the merge instruction (or NULL if non exists) */
223    const uint32_t *merge;
224 
225    /** A pointer to the branch instruction that ends this block */
226    const uint32_t *branch;
227 
228    enum vtn_branch_type branch_type;
229 
230    /* The CF node for which this is a merge target
231     *
232     * The SPIR-V spec requires that any given block can be the merge target
233     * for at most one merge instruction.  If this block is a merge target,
234     * this points back to the block containing that merge instruction.
235     */
236    struct vtn_cf_node *merge_cf_node;
237 
238    /** Points to the loop that this block starts (if it starts a loop) */
239    struct vtn_loop *loop;
240 
241    /** Points to the switch case started by this block (if any) */
242    struct vtn_case *switch_case;
243 
244    /** Every block ends in a nop intrinsic so that we can find it again */
245    nir_intrinsic_instr *end_nop;
246 };
247 
248 struct vtn_function {
249    struct vtn_cf_node node;
250 
251    struct vtn_type *type;
252 
253    bool referenced;
254    bool emitted;
255 
256    nir_function_impl *impl;
257    struct vtn_block *start_block;
258 
259    struct list_head body;
260 
261    const uint32_t *end;
262 
263    SpvFunctionControlMask control;
264 };
265 
266 #define VTN_DECL_CF_NODE_CAST(_type)               \
267 static inline struct vtn_##_type *                 \
268 vtn_cf_node_as_##_type(struct vtn_cf_node *node)   \
269 {                                                  \
270    assert(node->type == vtn_cf_node_type_##_type); \
271    return (struct vtn_##_type *)node;              \
272 }
273 
274 VTN_DECL_CF_NODE_CAST(block)
275 VTN_DECL_CF_NODE_CAST(loop)
276 VTN_DECL_CF_NODE_CAST(if)
277 VTN_DECL_CF_NODE_CAST(case)
278 VTN_DECL_CF_NODE_CAST(switch)
279 VTN_DECL_CF_NODE_CAST(function)
280 
281 #define vtn_foreach_cf_node(node, cf_list) \
282    list_for_each_entry(struct vtn_cf_node, node, cf_list, link)
283 
284 typedef bool (*vtn_instruction_handler)(struct vtn_builder *, SpvOp,
285                                         const uint32_t *, unsigned);
286 
287 void vtn_build_cfg(struct vtn_builder *b, const uint32_t *words,
288                    const uint32_t *end);
289 void vtn_function_emit(struct vtn_builder *b, struct vtn_function *func,
290                        vtn_instruction_handler instruction_handler);
291 void vtn_handle_function_call(struct vtn_builder *b, SpvOp opcode,
292                               const uint32_t *w, unsigned count);
293 
294 const uint32_t *
295 vtn_foreach_instruction(struct vtn_builder *b, const uint32_t *start,
296                         const uint32_t *end, vtn_instruction_handler handler);
297 
298 struct vtn_ssa_value {
299    union {
300       nir_ssa_def *def;
301       struct vtn_ssa_value **elems;
302    };
303 
304    /* For matrices, if this is non-NULL, then this value is actually the
305     * transpose of some other value.  The value that `transposed` points to
306     * always dominates this value.
307     */
308    struct vtn_ssa_value *transposed;
309 
310    const struct glsl_type *type;
311 };
312 
313 enum vtn_base_type {
314    vtn_base_type_void,
315    vtn_base_type_scalar,
316    vtn_base_type_vector,
317    vtn_base_type_matrix,
318    vtn_base_type_array,
319    vtn_base_type_struct,
320    vtn_base_type_pointer,
321    vtn_base_type_image,
322    vtn_base_type_sampler,
323    vtn_base_type_sampled_image,
324    vtn_base_type_function,
325 };
326 
327 struct vtn_type {
328    enum vtn_base_type base_type;
329 
330    const struct glsl_type *type;
331 
332    /* The SPIR-V id of the given type. */
333    uint32_t id;
334 
335    /* Specifies the length of complex types.
336     *
337     * For Workgroup pointers, this is the size of the referenced type.
338     */
339    unsigned length;
340 
341    /* for arrays, matrices and pointers, the array stride */
342    unsigned stride;
343 
344    /* Access qualifiers */
345    enum gl_access_qualifier access;
346 
347    union {
348       /* Members for scalar, vector, and array-like types */
349       struct {
350          /* for arrays, the vtn_type for the elements of the array */
351          struct vtn_type *array_element;
352 
353          /* for matrices, whether the matrix is stored row-major */
354          bool row_major:1;
355 
356          /* Whether this type, or a parent type, has been decorated as a
357           * builtin
358           */
359          bool is_builtin:1;
360 
361          /* Which built-in to use */
362          SpvBuiltIn builtin;
363       };
364 
365       /* Members for struct types */
366       struct {
367          /* for structures, the vtn_type for each member */
368          struct vtn_type **members;
369 
370          /* for structs, the offset of each member */
371          unsigned *offsets;
372 
373          /* for structs, whether it was decorated as a "non-SSBO-like" block */
374          bool block:1;
375 
376          /* for structs, whether it was decorated as an "SSBO-like" block */
377          bool buffer_block:1;
378 
379          /* for structs with block == true, whether this is a builtin block
380           * (i.e. a block that contains only builtins).
381           */
382          bool builtin_block:1;
383 
384          /* for structs and unions it specifies the minimum alignment of the
385           * members. 0 means packed.
386           *
387           * Set by CPacked and Alignment Decorations in kernels.
388           */
389          bool packed:1;
390       };
391 
392       /* Members for pointer types */
393       struct {
394          /* For pointers, the vtn_type for dereferenced type */
395          struct vtn_type *deref;
396 
397          /* Storage class for pointers */
398          SpvStorageClass storage_class;
399 
400          /* Required alignment for pointers */
401          uint32_t align;
402       };
403 
404       /* Members for image types */
405       struct {
406          /* GLSL image type for this type.  This is not to be confused with
407           * vtn_type::type which is actually going to be the GLSL type for a
408           * pointer to an image, likely a uint32_t.
409           */
410          const struct glsl_type *glsl_image;
411 
412          /* Image format for image_load_store type images */
413          unsigned image_format;
414 
415          /* Access qualifier for storage images */
416          SpvAccessQualifier access_qualifier;
417       };
418 
419       /* Members for sampled image types */
420       struct {
421          /* For sampled images, the image type */
422          struct vtn_type *image;
423       };
424 
425       /* Members for function types */
426       struct {
427          /* For functions, the vtn_type for each parameter */
428          struct vtn_type **params;
429 
430          /* Return type for functions */
431          struct vtn_type *return_type;
432       };
433    };
434 };
435 
436 bool vtn_type_contains_block(struct vtn_builder *b, struct vtn_type *type);
437 
438 bool vtn_types_compatible(struct vtn_builder *b,
439                           struct vtn_type *t1, struct vtn_type *t2);
440 
441 struct vtn_type *vtn_type_without_array(struct vtn_type *type);
442 
443 struct vtn_variable;
444 
445 enum vtn_access_mode {
446    vtn_access_mode_id,
447    vtn_access_mode_literal,
448 };
449 
450 struct vtn_access_link {
451    enum vtn_access_mode mode;
452    int64_t id;
453 };
454 
455 struct vtn_access_chain {
456    uint32_t length;
457 
458    /** Whether or not to treat the base pointer as an array.  This is only
459     * true if this access chain came from an OpPtrAccessChain.
460     */
461    bool ptr_as_array;
462 
463    /* Access qualifiers */
464    enum gl_access_qualifier access;
465 
466    /** Struct elements and array offsets.
467     *
468     * This is an array of 1 so that it can conveniently be created on the
469     * stack but the real length is given by the length field.
470     */
471    struct vtn_access_link link[1];
472 };
473 
474 enum vtn_variable_mode {
475    vtn_variable_mode_function,
476    vtn_variable_mode_private,
477    vtn_variable_mode_uniform,
478    vtn_variable_mode_atomic_counter,
479    vtn_variable_mode_ubo,
480    vtn_variable_mode_ssbo,
481    vtn_variable_mode_phys_ssbo,
482    vtn_variable_mode_push_constant,
483    vtn_variable_mode_workgroup,
484    vtn_variable_mode_cross_workgroup,
485    vtn_variable_mode_input,
486    vtn_variable_mode_output,
487    vtn_variable_mode_image,
488 };
489 
490 struct vtn_pointer {
491    /** The variable mode for the referenced data */
492    enum vtn_variable_mode mode;
493 
494    /** The dereferenced type of this pointer */
495    struct vtn_type *type;
496 
497    /** The pointer type of this pointer
498     *
499     * This may be NULL for some temporary pointers constructed as part of a
500     * large load, store, or copy.  It MUST be valid for all pointers which are
501     * stored as SPIR-V SSA values.
502     */
503    struct vtn_type *ptr_type;
504 
505    /** The referenced variable, if known
506     *
507     * This field may be NULL if the pointer uses a (block_index, offset) pair
508     * instead of an access chain or if the access chain starts at a deref.
509     */
510    struct vtn_variable *var;
511 
512    /** The NIR deref corresponding to this pointer */
513    nir_deref_instr *deref;
514 
515    /** A (block_index, offset) pair representing a UBO or SSBO position. */
516    struct nir_ssa_def *block_index;
517    struct nir_ssa_def *offset;
518 
519    /* Access qualifiers */
520    enum gl_access_qualifier access;
521 };
522 
523 bool vtn_mode_uses_ssa_offset(struct vtn_builder *b,
524                               enum vtn_variable_mode mode);
525 
vtn_pointer_uses_ssa_offset(struct vtn_builder * b,struct vtn_pointer * ptr)526 static inline bool vtn_pointer_uses_ssa_offset(struct vtn_builder *b,
527                                                struct vtn_pointer *ptr)
528 {
529    return vtn_mode_uses_ssa_offset(b, ptr->mode);
530 }
531 
532 
533 struct vtn_variable {
534    enum vtn_variable_mode mode;
535 
536    struct vtn_type *type;
537 
538    unsigned descriptor_set;
539    unsigned binding;
540    bool explicit_binding;
541    unsigned offset;
542    unsigned input_attachment_index;
543    bool patch;
544 
545    nir_variable *var;
546 
547    /* If the variable is a struct with a location set on it then this will be
548     * stored here. This will be used to calculate locations for members that
549     * don’t have their own explicit location.
550     */
551    int base_location;
552 
553    int shared_location;
554 
555    /**
556     * In some early released versions of GLSLang, it implemented all function
557     * calls by making copies of all parameters into temporary variables and
558     * passing those variables into the function.  It even did so for samplers
559     * and images which violates the SPIR-V spec.  Unfortunately, two games
560     * (Talos Principle and Doom) shipped with this old version of GLSLang and
561     * also happen to pass samplers into functions.  Talos Principle received
562     * an update fairly shortly after release with an updated GLSLang.  Doom,
563     * on the other hand, has never received an update so we need to work
564     * around this GLSLang issue in SPIR-V -> NIR.  Hopefully, we can drop this
565     * hack at some point in the future.
566     */
567    struct vtn_pointer *copy_prop_sampler;
568 
569    /* Access qualifiers. */
570    enum gl_access_qualifier access;
571 };
572 
573 const struct glsl_type *
574 vtn_type_get_nir_type(struct vtn_builder *b, struct vtn_type *type,
575                       enum vtn_variable_mode mode);
576 
577 struct vtn_image_pointer {
578    nir_deref_instr *image;
579    nir_ssa_def *coord;
580    nir_ssa_def *sample;
581    nir_ssa_def *lod;
582 };
583 
584 struct vtn_value {
585    enum vtn_value_type value_type;
586 
587    /* Workaround for https://gitlab.freedesktop.org/mesa/mesa/-/issues/3406
588     * Only set for OpImage / OpSampledImage. Note that this is in addition
589     * the existence of a NonUniform decoration on this value.*/
590    uint32_t propagated_non_uniform : 1;
591 
592    const char *name;
593    struct vtn_decoration *decoration;
594    struct vtn_type *type;
595    union {
596       char *str;
597       nir_constant *constant;
598       struct vtn_pointer *pointer;
599       struct vtn_image_pointer *image;
600       struct vtn_function *func;
601       struct vtn_block *block;
602       struct vtn_ssa_value *ssa;
603       vtn_instruction_handler ext_handler;
604    };
605 };
606 
607 #define VTN_DEC_DECORATION -1
608 #define VTN_DEC_EXECUTION_MODE -2
609 #define VTN_DEC_STRUCT_MEMBER0 0
610 
611 struct vtn_decoration {
612    struct vtn_decoration *next;
613 
614    /* Specifies how to apply this decoration.  Negative values represent a
615     * decoration or execution mode. (See the VTN_DEC_ #defines above.)
616     * Non-negative values specify that it applies to a structure member.
617     */
618    int scope;
619 
620    const uint32_t *operands;
621    struct vtn_value *group;
622 
623    union {
624       SpvDecoration decoration;
625       SpvExecutionMode exec_mode;
626    };
627 };
628 
629 struct vtn_builder {
630    nir_builder nb;
631 
632    /* Used by vtn_fail to jump back to the beginning of SPIR-V compilation */
633    jmp_buf fail_jump;
634 
635    const uint32_t *spirv;
636    size_t spirv_word_count;
637 
638    nir_shader *shader;
639    struct spirv_to_nir_options *options;
640    struct vtn_block *block;
641 
642    /* Current offset, file, line, and column.  Useful for debugging.  Set
643     * automatically by vtn_foreach_instruction.
644     */
645    size_t spirv_offset;
646    char *file;
647    int line, col;
648 
649    /*
650     * In SPIR-V, constants are global, whereas in NIR, the load_const
651     * instruction we use is per-function. So while we parse each function, we
652     * keep a hash table of constants we've resolved to nir_ssa_value's so
653     * far, and we lazily resolve them when we see them used in a function.
654     */
655    struct hash_table *const_table;
656 
657    /*
658     * Map from phi instructions (pointer to the start of the instruction)
659     * to the variable corresponding to it.
660     */
661    struct hash_table *phi_table;
662 
663    unsigned num_specializations;
664    struct nir_spirv_specialization *specializations;
665 
666    unsigned value_id_bound;
667    struct vtn_value *values;
668 
669    /* Information on the origin of the SPIR-V */
670    enum vtn_generator generator_id;
671    SpvSourceLanguage source_lang;
672 
673    /* True if we need to fix up CS OpControlBarrier */
674    bool wa_glslang_cs_barrier;
675 
676    /* Workaround discard bugs in HLSL -> SPIR-V compilers */
677    bool uses_demote_to_helper_invocation;
678    bool convert_discard_to_demote;
679 
680    gl_shader_stage entry_point_stage;
681    const char *entry_point_name;
682    struct vtn_value *entry_point;
683    struct vtn_value *workgroup_size_builtin;
684    bool variable_pointers;
685 
686    struct vtn_function *func;
687    struct list_head functions;
688 
689    /* Current function parameter index */
690    unsigned func_param_idx;
691 
692    bool has_loop_continue;
693    bool has_kill;
694 
695    /* false by default, set to true by the ContractionOff execution mode */
696    bool exact;
697 
698    /* when a physical memory model is choosen */
699    bool physical_ptrs;
700 
701    /* memory model specified by OpMemoryModel */
702    unsigned mem_model;
703 };
704 
705 nir_ssa_def *
706 vtn_pointer_to_ssa(struct vtn_builder *b, struct vtn_pointer *ptr);
707 struct vtn_pointer *
708 vtn_pointer_from_ssa(struct vtn_builder *b, nir_ssa_def *ssa,
709                      struct vtn_type *ptr_type);
710 
711 static inline struct vtn_value *
vtn_untyped_value(struct vtn_builder * b,uint32_t value_id)712 vtn_untyped_value(struct vtn_builder *b, uint32_t value_id)
713 {
714    vtn_fail_if(value_id >= b->value_id_bound,
715                "SPIR-V id %u is out-of-bounds", value_id);
716    return &b->values[value_id];
717 }
718 
719 /* Consider not using this function directly and instead use
720  * vtn_push_ssa/vtn_push_pointer so that appropriate applying of
721  * decorations is handled by common code.
722  */
723 static inline struct vtn_value *
vtn_push_value(struct vtn_builder * b,uint32_t value_id,enum vtn_value_type value_type)724 vtn_push_value(struct vtn_builder *b, uint32_t value_id,
725                enum vtn_value_type value_type)
726 {
727    struct vtn_value *val = vtn_untyped_value(b, value_id);
728 
729    vtn_fail_if(value_type == vtn_value_type_ssa,
730                "Do not call vtn_push_value for value_type_ssa.  Use "
731                "vtn_push_ssa_value instead.");
732 
733    vtn_fail_if(val->value_type != vtn_value_type_invalid,
734                "SPIR-V id %u has already been written by another instruction",
735                value_id);
736 
737    val->value_type = value_type;
738 
739    return &b->values[value_id];
740 }
741 
742 static inline struct vtn_value *
vtn_value(struct vtn_builder * b,uint32_t value_id,enum vtn_value_type value_type)743 vtn_value(struct vtn_builder *b, uint32_t value_id,
744           enum vtn_value_type value_type)
745 {
746    struct vtn_value *val = vtn_untyped_value(b, value_id);
747    vtn_fail_if(val->value_type != value_type,
748                "SPIR-V id %u is the wrong kind of value", value_id);
749    return val;
750 }
751 
752 bool
753 vtn_set_instruction_result_type(struct vtn_builder *b, SpvOp opcode,
754                                 const uint32_t *w, unsigned count);
755 
756 static inline uint64_t
vtn_constant_uint(struct vtn_builder * b,uint32_t value_id)757 vtn_constant_uint(struct vtn_builder *b, uint32_t value_id)
758 {
759    struct vtn_value *val = vtn_value(b, value_id, vtn_value_type_constant);
760 
761    vtn_fail_if(val->type->base_type != vtn_base_type_scalar ||
762                !glsl_type_is_integer(val->type->type),
763                "Expected id %u to be an integer constant", value_id);
764 
765    switch (glsl_get_bit_size(val->type->type)) {
766    case 8:  return val->constant->values[0].u8;
767    case 16: return val->constant->values[0].u16;
768    case 32: return val->constant->values[0].u32;
769    case 64: return val->constant->values[0].u64;
770    default: unreachable("Invalid bit size");
771    }
772 }
773 
774 static inline int64_t
vtn_constant_int(struct vtn_builder * b,uint32_t value_id)775 vtn_constant_int(struct vtn_builder *b, uint32_t value_id)
776 {
777    struct vtn_value *val = vtn_value(b, value_id, vtn_value_type_constant);
778 
779    vtn_fail_if(val->type->base_type != vtn_base_type_scalar ||
780                !glsl_type_is_integer(val->type->type),
781                "Expected id %u to be an integer constant", value_id);
782 
783    switch (glsl_get_bit_size(val->type->type)) {
784    case 8:  return val->constant->values[0].i8;
785    case 16: return val->constant->values[0].i16;
786    case 32: return val->constant->values[0].i32;
787    case 64: return val->constant->values[0].i64;
788    default: unreachable("Invalid bit size");
789    }
790 }
791 
792 static inline struct vtn_type *
vtn_get_value_type(struct vtn_builder * b,uint32_t value_id)793 vtn_get_value_type(struct vtn_builder *b, uint32_t value_id)
794 {
795    struct vtn_value *val = vtn_untyped_value(b, value_id);
796    vtn_fail_if(val->type == NULL, "Value %u does not have a type", value_id);
797    return val->type;
798 }
799 
800 static inline struct vtn_type *
vtn_get_type(struct vtn_builder * b,uint32_t value_id)801 vtn_get_type(struct vtn_builder *b, uint32_t value_id)
802 {
803    return vtn_value(b, value_id, vtn_value_type_type)->type;
804 }
805 
806 struct vtn_ssa_value *vtn_ssa_value(struct vtn_builder *b, uint32_t value_id);
807 struct vtn_value *vtn_push_ssa_value(struct vtn_builder *b, uint32_t value_id,
808                                      struct vtn_ssa_value *ssa);
809 
810 nir_ssa_def *vtn_get_nir_ssa(struct vtn_builder *b, uint32_t value_id);
811 struct vtn_value *vtn_push_nir_ssa(struct vtn_builder *b, uint32_t value_id,
812                                    nir_ssa_def *def);
813 
814 struct vtn_value *vtn_push_pointer(struct vtn_builder *b,
815                                    uint32_t value_id,
816                                    struct vtn_pointer *ptr);
817 
818 struct vtn_sampled_image {
819    nir_deref_instr *image;
820    nir_deref_instr *sampler;
821 };
822 
823 nir_ssa_def *vtn_sampled_image_to_nir_ssa(struct vtn_builder *b,
824                                           struct vtn_sampled_image si);
825 
826 void
827 vtn_copy_value(struct vtn_builder *b, uint32_t src_value_id,
828                uint32_t dst_value_id);
829 
830 struct vtn_ssa_value *vtn_create_ssa_value(struct vtn_builder *b,
831                                            const struct glsl_type *type);
832 
833 struct vtn_ssa_value *vtn_ssa_transpose(struct vtn_builder *b,
834                                         struct vtn_ssa_value *src);
835 
836 nir_deref_instr *vtn_nir_deref(struct vtn_builder *b, uint32_t id);
837 
838 nir_deref_instr *vtn_pointer_to_deref(struct vtn_builder *b,
839                                       struct vtn_pointer *ptr);
840 nir_ssa_def *
841 vtn_pointer_to_offset(struct vtn_builder *b, struct vtn_pointer *ptr,
842                       nir_ssa_def **index_out);
843 
844 struct vtn_ssa_value *
845 vtn_local_load(struct vtn_builder *b, nir_deref_instr *src,
846                enum gl_access_qualifier access);
847 
848 void vtn_local_store(struct vtn_builder *b, struct vtn_ssa_value *src,
849                      nir_deref_instr *dest,
850                      enum gl_access_qualifier access);
851 
852 struct vtn_ssa_value *
853 vtn_variable_load(struct vtn_builder *b, struct vtn_pointer *src);
854 
855 void vtn_variable_store(struct vtn_builder *b, struct vtn_ssa_value *src,
856                         struct vtn_pointer *dest);
857 
858 void vtn_handle_variables(struct vtn_builder *b, SpvOp opcode,
859                           const uint32_t *w, unsigned count);
860 
861 
862 typedef void (*vtn_decoration_foreach_cb)(struct vtn_builder *,
863                                           struct vtn_value *,
864                                           int member,
865                                           const struct vtn_decoration *,
866                                           void *);
867 
868 void vtn_foreach_decoration(struct vtn_builder *b, struct vtn_value *value,
869                             vtn_decoration_foreach_cb cb, void *data);
870 
871 typedef void (*vtn_execution_mode_foreach_cb)(struct vtn_builder *,
872                                               struct vtn_value *,
873                                               const struct vtn_decoration *,
874                                               void *);
875 
876 void vtn_foreach_execution_mode(struct vtn_builder *b, struct vtn_value *value,
877                                 vtn_execution_mode_foreach_cb cb, void *data);
878 
879 nir_op vtn_nir_alu_op_for_spirv_opcode(struct vtn_builder *b,
880                                        SpvOp opcode, bool *swap,
881                                        unsigned src_bit_size, unsigned dst_bit_size);
882 
883 void vtn_handle_alu(struct vtn_builder *b, SpvOp opcode,
884                     const uint32_t *w, unsigned count);
885 
886 void vtn_handle_bitcast(struct vtn_builder *b, const uint32_t *w,
887                         unsigned count);
888 
889 void vtn_handle_subgroup(struct vtn_builder *b, SpvOp opcode,
890                          const uint32_t *w, unsigned count);
891 
892 bool vtn_handle_glsl450_instruction(struct vtn_builder *b, SpvOp ext_opcode,
893                                     const uint32_t *words, unsigned count);
894 
895 bool vtn_handle_opencl_instruction(struct vtn_builder *b, SpvOp ext_opcode,
896                                    const uint32_t *words, unsigned count);
897 
898 struct vtn_builder* vtn_create_builder(const uint32_t *words, size_t word_count,
899                                        gl_shader_stage stage, const char *entry_point_name,
900                                        const struct spirv_to_nir_options *options);
901 
902 void vtn_handle_entry_point(struct vtn_builder *b, const uint32_t *w,
903                             unsigned count);
904 
905 void vtn_handle_decoration(struct vtn_builder *b, SpvOp opcode,
906                            const uint32_t *w, unsigned count);
907 
908 enum vtn_variable_mode vtn_storage_class_to_mode(struct vtn_builder *b,
909                                                  SpvStorageClass class,
910                                                  struct vtn_type *interface_type,
911                                                  nir_variable_mode *nir_mode_out);
912 
913 nir_address_format vtn_mode_to_address_format(struct vtn_builder *b,
914                                               enum vtn_variable_mode);
915 
916 static inline uint32_t
vtn_align_u32(uint32_t v,uint32_t a)917 vtn_align_u32(uint32_t v, uint32_t a)
918 {
919    assert(a != 0 && a == (a & -((int32_t) a)));
920    return (v + a - 1) & ~(a - 1);
921 }
922 
923 static inline uint64_t
vtn_u64_literal(const uint32_t * w)924 vtn_u64_literal(const uint32_t *w)
925 {
926    return (uint64_t)w[1] << 32 | w[0];
927 }
928 
929 bool vtn_handle_amd_gcn_shader_instruction(struct vtn_builder *b, SpvOp ext_opcode,
930                                            const uint32_t *words, unsigned count);
931 
932 bool vtn_handle_amd_shader_ballot_instruction(struct vtn_builder *b, SpvOp ext_opcode,
933                                               const uint32_t *w, unsigned count);
934 
935 bool vtn_handle_amd_shader_trinary_minmax_instruction(struct vtn_builder *b, SpvOp ext_opcode,
936 						      const uint32_t *words, unsigned count);
937 
938 bool vtn_handle_amd_shader_explicit_vertex_parameter_instruction(struct vtn_builder *b,
939                                                                  SpvOp ext_opcode,
940                                                                  const uint32_t *words,
941                                                                  unsigned count);
942 
943 SpvMemorySemanticsMask vtn_storage_class_to_memory_semantics(SpvStorageClass sc);
944 
945 void vtn_emit_memory_barrier(struct vtn_builder *b, SpvScope scope,
946                              SpvMemorySemanticsMask semantics);
947 
948 #endif /* _VTN_PRIVATE_H_ */
949