xref: /openbsd/gnu/gcc/gcc/target.h (revision 404b540a)
1 /* Data structure definitions for a generic GCC target.
2    Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3    Free Software Foundation, Inc.
4 
5 This program is free software; you can redistribute it and/or modify it
6 under the terms of the GNU General Public License as published by the
7 Free Software Foundation; either version 2, or (at your option) any
8 later version.
9 
10 This program is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 GNU General Public License for more details.
14 
15 You should have received a copy of the GNU General Public License
16 along with this program; if not, write to the Free Software
17 Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 
19  In other words, you are welcome to use, share and improve this program.
20  You are forbidden to forbid anyone else to use, share and improve
21  what you give them.   Help stamp out software-hoarding!  */
22 
23 /* This file contains a data structure that describes a GCC target.
24    At present it is incomplete, but in future it should grow to
25    contain most or all target machine and target O/S specific
26    information.
27 
28    This structure has its initializer declared in target-def.h in the
29    form of large macro TARGET_INITIALIZER that expands to many smaller
30    macros.
31 
32    The smaller macros each initialize one component of the structure,
33    and each has a default.  Each target should have a file that
34    includes target.h and target-def.h, and overrides any inappropriate
35    defaults by undefining the relevant macro and defining a suitable
36    replacement.  That file should then contain the definition of
37    "targetm" like so:
38 
39    struct gcc_target targetm = TARGET_INITIALIZER;
40 
41    Doing things this way allows us to bring together everything that
42    defines a GCC target.  By supplying a default that is appropriate
43    to most targets, we can easily add new items without needing to
44    edit dozens of target configuration files.  It should also allow us
45    to gradually reduce the amount of conditional compilation that is
46    scattered throughout GCC.  */
47 
48 #ifndef GCC_TARGET_H
49 #define GCC_TARGET_H
50 
51 #include "tm.h"
52 #include "insn-modes.h"
53 
54 struct stdarg_info;
55 struct spec_info_def;
56 
57 /* The struct used by the secondary_reload target hook.  */
58 typedef struct secondary_reload_info
59 {
60   /* icode is actually an enum insn_code, but we don't want to force every
61      file that includes target.h to include optabs.h .  */
62   int icode;
63   int extra_cost; /* Cost for using (a) scratch register(s) to be taken
64 		     into account by copy_cost.  */
65   /* The next two members are for the use of the backward
66      compatibility hook.  */
67   struct secondary_reload_info *prev_sri;
68   int t_icode; /* Actually an enum insn_code - see above.  */
69 } secondary_reload_info;
70 
71 
72 struct gcc_target
73 {
74   /* Functions that output assembler for the target.  */
75   struct asm_out
76   {
77     /* Opening and closing parentheses for asm expression grouping.  */
78     const char *open_paren, *close_paren;
79 
80     /* Assembler instructions for creating various kinds of integer object.  */
81     const char *byte_op;
82     struct asm_int_op
83     {
84       const char *hi;
85       const char *si;
86       const char *di;
87       const char *ti;
88     } aligned_op, unaligned_op;
89 
90     /* Try to output the assembler code for an integer object whose
91        value is given by X.  SIZE is the size of the object in bytes and
92        ALIGNED_P indicates whether it is aligned.  Return true if
93        successful.  Only handles cases for which BYTE_OP, ALIGNED_OP
94        and UNALIGNED_OP are NULL.  */
95     bool (* integer) (rtx x, unsigned int size, int aligned_p);
96 
97     /* Output code that will globalize a label.  */
98     void (* globalize_label) (FILE *, const char *);
99 
100     /* Output code that will emit a label for unwind info, if this
101        target requires such labels.  Second argument is the decl the
102        unwind info is associated with, third is a boolean: true if
103        this is for exception handling, fourth is a boolean: true if
104        this is only a placeholder for an omitted FDE.  */
105     void (* unwind_label) (FILE *, tree, int, int);
106 
107     /* Output code that will emit a label to divide up the exception
108        table.  */
109     void (* except_table_label) (FILE *);
110 
111     /* Emit any directives required to unwind this instruction.  */
112     void (* unwind_emit) (FILE *, rtx);
113 
114     /* Output an internal label.  */
115     void (* internal_label) (FILE *, const char *, unsigned long);
116 
117     /* Emit a ttype table reference to a typeinfo object.  */
118     bool (* ttype) (rtx);
119 
120     /* Emit an assembler directive to set visibility for the symbol
121        associated with the tree decl.  */
122     void (* visibility) (tree, int);
123 
124     /* Output the assembler code for entry to a function.  */
125     void (* function_prologue) (FILE *, HOST_WIDE_INT);
126 
127     /* Output the assembler code for end of prologue.  */
128     void (* function_end_prologue) (FILE *);
129 
130     /* Output the assembler code for start of epilogue.  */
131     void (* function_begin_epilogue) (FILE *);
132 
133     /* Output the assembler code for function exit.  */
134     void (* function_epilogue) (FILE *, HOST_WIDE_INT);
135 
136     /* Initialize target-specific sections.  */
137     void (* init_sections) (void);
138 
139     /* Tell assembler to change to section NAME with attributes FLAGS.
140        If DECL is non-NULL, it is the VAR_DECL or FUNCTION_DECL with
141        which this section is associated.  */
142     void (* named_section) (const char *name, unsigned int flags, tree decl);
143 
144     /* Return a mask describing how relocations should be treated when
145        selecting sections.  Bit 1 should be set if global relocations
146        should be placed in a read-write section; bit 0 should be set if
147        local relocations should be placed in a read-write section.  */
148     int (*reloc_rw_mask) (void);
149 
150     /* Return a section for EXP.  It may be a DECL or a constant.  RELOC
151        is nonzero if runtime relocations must be applied; bit 1 will be
152        set if the runtime relocations require non-local name resolution.
153        ALIGN is the required alignment of the data.  */
154     section *(* select_section) (tree, int, unsigned HOST_WIDE_INT);
155 
156     /* Return a section for X.  MODE is X's mode and ALIGN is its
157        alignment in bits.  */
158     section *(* select_rtx_section) (enum machine_mode, rtx,
159 				     unsigned HOST_WIDE_INT);
160 
161     /* Select a unique section name for DECL.  RELOC is the same as
162        for SELECT_SECTION.  */
163     void (* unique_section) (tree, int);
164 
165     /* Return the readonly data section associated with function DECL.  */
166     section *(* function_rodata_section) (tree);
167 
168     /* Output a constructor for a symbol with a given priority.  */
169     void (* constructor) (rtx, int);
170 
171     /* Output a destructor for a symbol with a given priority.  */
172     void (* destructor) (rtx, int);
173 
174     /* Output the assembler code for a thunk function.  THUNK_DECL is the
175        declaration for the thunk function itself, FUNCTION is the decl for
176        the target function.  DELTA is an immediate constant offset to be
177        added to THIS.  If VCALL_OFFSET is nonzero, the word at
178        *(*this + vcall_offset) should be added to THIS.  */
179     void (* output_mi_thunk) (FILE *file, tree thunk_decl,
180 			      HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
181 			      tree function_decl);
182 
183     /* Determine whether output_mi_thunk would succeed.  */
184     /* ??? Ideally, this hook would not exist, and success or failure
185        would be returned from output_mi_thunk directly.  But there's
186        too much undo-able setup involved in invoking output_mi_thunk.
187        Could be fixed by making output_mi_thunk emit rtl instead of
188        text to the output file.  */
189     bool (* can_output_mi_thunk) (tree thunk_decl, HOST_WIDE_INT delta,
190 				  HOST_WIDE_INT vcall_offset,
191 				  tree function_decl);
192 
193     /* Output any boilerplate text needed at the beginning of a
194        translation unit.  */
195     void (*file_start) (void);
196 
197     /* Output any boilerplate text needed at the end of a
198        translation unit.  */
199     void (*file_end) (void);
200 
201     /* Output an assembler pseudo-op to declare a library function name
202        external.  */
203     void (*external_libcall) (rtx);
204 
205      /* Output an assembler directive to mark decl live. This instructs
206 	linker to not dead code strip this symbol.  */
207     void (*mark_decl_preserved) (const char *);
208 
209     /* Output the definition of a section anchor.  */
210     void (*output_anchor) (rtx);
211 
212     /* Output a DTP-relative reference to a TLS symbol.  */
213     void (*output_dwarf_dtprel) (FILE *file, int size, rtx x);
214 
215   } asm_out;
216 
217   /* Functions relating to instruction scheduling.  */
218   struct sched
219   {
220     /* Given the current cost, COST, of an insn, INSN, calculate and
221        return a new cost based on its relationship to DEP_INSN through
222        the dependence LINK.  The default is to make no adjustment.  */
223     int (* adjust_cost) (rtx insn, rtx link, rtx def_insn, int cost);
224 
225     /* Adjust the priority of an insn as you see fit.  Returns the new
226        priority.  */
227     int (* adjust_priority) (rtx, int);
228 
229     /* Function which returns the maximum number of insns that can be
230        scheduled in the same machine cycle.  This must be constant
231        over an entire compilation.  The default is 1.  */
232     int (* issue_rate) (void);
233 
234     /* Calculate how much this insn affects how many more insns we
235        can emit this cycle.  Default is they all cost the same.  */
236     int (* variable_issue) (FILE *, int, rtx, int);
237 
238     /* Initialize machine-dependent scheduling code.  */
239     void (* md_init) (FILE *, int, int);
240 
241     /* Finalize machine-dependent scheduling code.  */
242     void (* md_finish) (FILE *, int);
243 
244     /* Initialize machine-dependent function while scheduling code.  */
245     void (* md_init_global) (FILE *, int, int);
246 
247     /* Finalize machine-dependent function wide scheduling code.  */
248     void (* md_finish_global) (FILE *, int);
249 
250     /* Reorder insns in a machine-dependent fashion, in two different
251        places.  Default does nothing.  */
252     int (* reorder) (FILE *, int, rtx *, int *, int);
253     int (* reorder2) (FILE *, int, rtx *, int *, int);
254 
255     /* The following member value is a pointer to a function called
256        after evaluation forward dependencies of insns in chain given
257        by two parameter values (head and tail correspondingly).  */
258     void (* dependencies_evaluation_hook) (rtx, rtx);
259 
260     /* The values of the following four members are pointers to
261        functions used to simplify the automaton descriptions.
262        dfa_pre_cycle_insn and dfa_post_cycle_insn give functions
263        returning insns which are used to change the pipeline hazard
264        recognizer state when the new simulated processor cycle
265        correspondingly starts and finishes.  The function defined by
266        init_dfa_pre_cycle_insn and init_dfa_post_cycle_insn are used
267        to initialize the corresponding insns.  The default values of
268        the members result in not changing the automaton state when
269        the new simulated processor cycle correspondingly starts and
270        finishes.  */
271     void (* init_dfa_pre_cycle_insn) (void);
272     rtx (* dfa_pre_cycle_insn) (void);
273     void (* init_dfa_post_cycle_insn) (void);
274     rtx (* dfa_post_cycle_insn) (void);
275 
276     /* The following member value is a pointer to a function returning value
277        which defines how many insns in queue `ready' will we try for
278        multi-pass scheduling.  If the member value is nonzero and the
279        function returns positive value, the DFA based scheduler will make
280        multi-pass scheduling for the first cycle.  In other words, we will
281        try to choose ready insn which permits to start maximum number of
282        insns on the same cycle.  */
283     int (* first_cycle_multipass_dfa_lookahead) (void);
284 
285     /* The following member value is pointer to a function controlling
286        what insns from the ready insn queue will be considered for the
287        multipass insn scheduling.  If the hook returns zero for insn
288        passed as the parameter, the insn will be not chosen to be
289        issued.  */
290     int (* first_cycle_multipass_dfa_lookahead_guard) (rtx);
291 
292     /* The following member value is pointer to a function called by
293        the insn scheduler before issuing insn passed as the third
294        parameter on given cycle.  If the hook returns nonzero, the
295        insn is not issued on given processors cycle.  Instead of that,
296        the processor cycle is advanced.  If the value passed through
297        the last parameter is zero, the insn ready queue is not sorted
298        on the new cycle start as usually.  The first parameter passes
299        file for debugging output.  The second one passes the scheduler
300        verbose level of the debugging output.  The forth and the fifth
301        parameter values are correspondingly processor cycle on which
302        the previous insn has been issued and the current processor
303        cycle.  */
304     int (* dfa_new_cycle) (FILE *, int, rtx, int, int, int *);
305 
306     /* The following member value is a pointer to a function called
307        by the insn scheduler.  It should return true if there exists a
308        dependence which is considered costly by the target, between
309        the insn passed as the first parameter, and the insn passed as
310        the second parameter.  The third parameter is the INSN_DEPEND
311        link that represents the dependence between the two insns.  The
312        fourth argument is the cost of the dependence as estimated by
313        the scheduler.  The last argument is the distance in cycles
314        between the already scheduled insn (first parameter) and the
315        the second insn (second parameter).  */
316     bool (* is_costly_dependence) (rtx, rtx, rtx, int, int);
317 
318     /* Given the current cost, COST, of an insn, INSN, calculate and
319        return a new cost based on its relationship to DEP_INSN through the
320        dependence of type DEP_TYPE.  The default is to make no adjustment.  */
321     int (* adjust_cost_2) (rtx insn, int, rtx def_insn, int cost);
322 
323     /* The following member value is a pointer to a function called
324        by the insn scheduler. This hook is called to notify the backend
325        that new instructions were emitted.  */
326     void (* h_i_d_extended) (void);
327 
328     /* The following member value is a pointer to a function called
329        by the insn scheduler.
330        The first parameter is an instruction, the second parameter is the type
331        of the requested speculation, and the third parameter is a pointer to the
332        speculative pattern of the corresponding type (set if return value == 1).
333        It should return
334        -1, if there is no pattern, that will satisfy the requested speculation
335        type,
336        0, if current pattern satisfies the requested speculation type,
337        1, if pattern of the instruction should be changed to the newly
338        generated one.  */
339     int (* speculate_insn) (rtx, int, rtx *);
340 
341     /* The following member value is a pointer to a function called
342        by the insn scheduler.  It should return true if the check instruction
343        corresponding to the instruction passed as the parameter needs a
344        recovery block.  */
345     bool (* needs_block_p) (rtx);
346 
347     /* The following member value is a pointer to a function called
348        by the insn scheduler.  It should return a pattern for the check
349        instruction.
350        The first parameter is a speculative instruction, the second parameter
351        is the label of the corresponding recovery block (or null, if it is a
352        simple check).  If the mutation of the check is requested (e.g. from
353        ld.c to chk.a), the third parameter is true - in this case the first
354        parameter is the previous check.  */
355     rtx (* gen_check) (rtx, rtx, bool);
356 
357     /* The following member value is a pointer to a function controlling
358        what insns from the ready insn queue will be considered for the
359        multipass insn scheduling.  If the hook returns zero for the insn
360        passed as the parameter, the insn will not be chosen to be
361        issued.  This hook is used to discard speculative instructions,
362        that stand at the first position of the ready list.  */
363     bool (* first_cycle_multipass_dfa_lookahead_guard_spec) (rtx);
364 
365     /* The following member value is a pointer to a function that provides
366        information about the speculation capabilities of the target.
367        The parameter is a pointer to spec_info variable.  */
368     void (* set_sched_flags) (struct spec_info_def *);
369   } sched;
370 
371   /* Functions relating to vectorization.  */
372   struct vectorize
373   {
374     /* The following member value is a pointer to a function called
375        by the vectorizer, and return the decl of the target builtin
376        function.  */
377     tree (* builtin_mask_for_load) (void);
378   } vectorize;
379 
380   /* The initial value of target_flags.  */
381   int default_target_flags;
382 
383   /* Handle target switch CODE (an OPT_* value).  ARG is the argument
384      passed to the switch; it is NULL if no argument was.  VALUE is the
385      value of ARG if CODE specifies a UInteger option, otherwise it is
386      1 if the positive form of the switch was used and 0 if the negative
387      form was.  Return true if the switch was valid.  */
388   bool (* handle_option) (size_t code, const char *arg, int value);
389 
390   /* Return machine mode for filter value.  */
391   enum machine_mode (* eh_return_filter_mode) (void);
392 
393   /* Given two decls, merge their attributes and return the result.  */
394   tree (* merge_decl_attributes) (tree, tree);
395 
396   /* Given two types, merge their attributes and return the result.  */
397   tree (* merge_type_attributes) (tree, tree);
398 
399   /* Table of machine attributes and functions to handle them.
400      Ignored if NULL.  */
401   const struct attribute_spec *attribute_table;
402 
403   /* Return zero if the attributes on TYPE1 and TYPE2 are incompatible,
404      one if they are compatible and two if they are nearly compatible
405      (which causes a warning to be generated).  */
406   int (* comp_type_attributes) (tree type1, tree type2);
407 
408   /* Assign default attributes to the newly defined TYPE.  */
409   void (* set_default_type_attributes) (tree type);
410 
411   /* Insert attributes on the newly created DECL.  */
412   void (* insert_attributes) (tree decl, tree *attributes);
413 
414   /* Return true if FNDECL (which has at least one machine attribute)
415      can be inlined despite its machine attributes, false otherwise.  */
416   bool (* function_attribute_inlinable_p) (tree fndecl);
417 
418   /* Return true if bitfields in RECORD_TYPE should follow the
419      Microsoft Visual C++ bitfield layout rules.  */
420   bool (* ms_bitfield_layout_p) (tree record_type);
421 
422   /* True if the target supports decimal floating point.  */
423   bool (* decimal_float_supported_p) (void);
424 
425   /* Return true if anonymous bitfields affect structure alignment.  */
426   bool (* align_anon_bitfield) (void);
427 
428   /* Return true if volatile bitfields should use the narrowest type possible.
429      Return false if they should use the container type.  */
430   bool (* narrow_volatile_bitfield) (void);
431 
432   /* Set up target-specific built-in functions.  */
433   void (* init_builtins) (void);
434 
435   /* Expand a target-specific builtin.  */
436   rtx (* expand_builtin) (tree exp, rtx target, rtx subtarget,
437 			  enum machine_mode mode, int ignore);
438 
439   /* Select a replacement for a target-specific builtin.  This is done
440      *before* regular type checking, and so allows the target to implement
441      a crude form of function overloading.  The result is a complete
442      expression that implements the operation.  */
443   tree (*resolve_overloaded_builtin) (tree decl, tree params);
444 
445   /* Fold a target-specific builtin.  */
446   tree (* fold_builtin) (tree fndecl, tree arglist, bool ignore);
447 
448   /* For a vendor-specific fundamental TYPE, return a pointer to
449      a statically-allocated string containing the C++ mangling for
450      TYPE.  In all other cases, return NULL.  */
451   const char * (* mangle_fundamental_type) (tree type);
452 
453   /* Make any adjustments to libfunc names needed for this target.  */
454   void (* init_libfuncs) (void);
455 
456   /* Given a decl, a section name, and whether the decl initializer
457      has relocs, choose attributes for the section.  */
458   /* ??? Should be merged with SELECT_SECTION and UNIQUE_SECTION.  */
459   unsigned int (* section_type_flags) (tree, const char *, int);
460 
461   /* True if new jumps cannot be created, to replace existing ones or
462      not, at the current point in the compilation.  */
463   bool (* cannot_modify_jumps_p) (void);
464 
465   /* Return a register class for which branch target register
466      optimizations should be applied.  */
467   int (* branch_target_register_class) (void);
468 
469   /* Return true if branch target register optimizations should include
470      callee-saved registers that are not already live during the current
471      function.  AFTER_PE_GEN is true if prologues and epilogues have
472      already been generated.  */
473   bool (* branch_target_register_callee_saved) (bool after_pe_gen);
474 
475   /* True if the constant X cannot be placed in the constant pool.  */
476   bool (* cannot_force_const_mem) (rtx);
477 
478   /* True if the insn X cannot be duplicated.  */
479   bool (* cannot_copy_insn_p) (rtx);
480 
481   /* True if X is considered to be commutative.  */
482   bool (* commutative_p) (rtx, int);
483 
484   /* Given an address RTX, undo the effects of LEGITIMIZE_ADDRESS.  */
485   rtx (* delegitimize_address) (rtx);
486 
487   /* True if the given constant can be put into an object_block.  */
488   bool (* use_blocks_for_constant_p) (enum machine_mode, rtx);
489 
490   /* The minimum and maximum byte offsets for anchored addresses.  */
491   HOST_WIDE_INT min_anchor_offset;
492   HOST_WIDE_INT max_anchor_offset;
493 
494   /* True if section anchors can be used to access the given symbol.  */
495   bool (* use_anchors_for_symbol_p) (rtx);
496 
497   /* True if it is OK to do sibling call optimization for the specified
498      call expression EXP.  DECL will be the called function, or NULL if
499      this is an indirect call.  */
500   bool (*function_ok_for_sibcall) (tree decl, tree exp);
501 
502   /* True if EXP should be placed in a "small data" section.  */
503   bool (* in_small_data_p) (tree);
504 
505   /* True if EXP names an object for which name resolution must resolve
506      to the current module.  */
507   bool (* binds_local_p) (tree);
508 
509   /* Do something target-specific to record properties of the DECL into
510      the associated SYMBOL_REF.  */
511   void (* encode_section_info) (tree, rtx, int);
512 
513   /* Undo the effects of encode_section_info on the symbol string.  */
514   const char * (* strip_name_encoding) (const char *);
515 
516   /* If shift optabs for MODE are known to always truncate the shift count,
517      return the mask that they apply.  Return 0 otherwise.  */
518   unsigned HOST_WIDE_INT (* shift_truncation_mask) (enum machine_mode mode);
519 
520   /* Return the number of divisions in the given MODE that should be present,
521      so that it is profitable to turn the division into a multiplication by
522      the reciprocal.  */
523   unsigned int (* min_divisions_for_recip_mul) (enum machine_mode mode);
524 
525   /* If the representation of integral MODE is such that values are
526      always sign-extended to a wider mode MODE_REP then return
527      SIGN_EXTEND.  Return UNKNOWN otherwise.  */
528   /* Note that the return type ought to be RTX_CODE, but that's not
529      necessarily defined at this point.  */
530   int (* mode_rep_extended) (enum machine_mode mode,
531 			     enum machine_mode mode_rep);
532 
533   /* True if MODE is valid for a pointer in __attribute__((mode("MODE"))).  */
534   bool (* valid_pointer_mode) (enum machine_mode mode);
535 
536   /* True if MODE is valid for the target.  By "valid", we mean able to
537      be manipulated in non-trivial ways.  In particular, this means all
538      the arithmetic is supported.  */
539   bool (* scalar_mode_supported_p) (enum machine_mode mode);
540 
541   /* Similarly for vector modes.  "Supported" here is less strict.  At
542      least some operations are supported; need to check optabs or builtins
543      for further details.  */
544   bool (* vector_mode_supported_p) (enum machine_mode mode);
545 
546   /* True if a vector is opaque.  */
547   bool (* vector_opaque_p) (tree);
548 
549   /* Compute a (partial) cost for rtx X.  Return true if the complete
550      cost has been computed, and false if subexpressions should be
551      scanned.  In either case, *TOTAL contains the cost result.  */
552   /* Note that CODE and OUTER_CODE ought to be RTX_CODE, but that's
553      not necessarily defined at this point.  */
554   bool (* rtx_costs) (rtx x, int code, int outer_code, int *total);
555 
556   /* Compute the cost of X, used as an address.  Never called with
557      invalid addresses.  */
558   int (* address_cost) (rtx x);
559 
560   /* Return where to allocate pseudo for a given hard register initial
561      value.  */
562   rtx (* allocate_initial_value) (rtx x);
563 
564   /* Given a register, this hook should return a parallel of registers
565      to represent where to find the register pieces.  Define this hook
566      if the register and its mode are represented in Dwarf in
567      non-contiguous locations, or if the register should be
568      represented in more than one register in Dwarf.  Otherwise, this
569      hook should return NULL_RTX.  */
570   rtx (* dwarf_register_span) (rtx);
571 
572   /* Fetch the fixed register(s) which hold condition codes, for
573      targets where it makes sense to look for duplicate assignments to
574      the condition codes.  This should return true if there is such a
575      register, false otherwise.  The arguments should be set to the
576      fixed register numbers.  Up to two condition code registers are
577      supported.  If there is only one for this target, the int pointed
578      at by the second argument should be set to -1.  */
579   bool (* fixed_condition_code_regs) (unsigned int *, unsigned int *);
580 
581   /* If two condition code modes are compatible, return a condition
582      code mode which is compatible with both, such that a comparison
583      done in the returned mode will work for both of the original
584      modes.  If the condition code modes are not compatible, return
585      VOIDmode.  */
586   enum machine_mode (* cc_modes_compatible) (enum machine_mode,
587 					     enum machine_mode);
588 
589   /* Do machine-dependent code transformations.  Called just before
590      delayed-branch scheduling.  */
591   void (* machine_dependent_reorg) (void);
592 
593   /* Create the __builtin_va_list type.  */
594   tree (* build_builtin_va_list) (void);
595 
596   /* Gimplifies a VA_ARG_EXPR.  */
597   tree (* gimplify_va_arg_expr) (tree valist, tree type, tree *pre_p,
598 				 tree *post_p);
599 
600   /* Validity-checking routines for PCH files, target-specific.
601      get_pch_validity returns a pointer to the data to be stored,
602      and stores the size in its argument.  pch_valid_p gets the same
603      information back and returns NULL if the PCH is valid,
604      or an error message if not.
605   */
606   void * (* get_pch_validity) (size_t *);
607   const char * (* pch_valid_p) (const void *, size_t);
608 
609   /* If nonnull, this function checks whether a PCH file with the
610      given set of target flags can be used.  It returns NULL if so,
611      otherwise it returns an error message.  */
612   const char *(*check_pch_target_flags) (int);
613 
614   /* True if the compiler should give an enum type only as many
615      bytes as it takes to represent the range of possible values of
616      that type.  */
617   bool (* default_short_enums) (void);
618 
619   /* This target hook returns an rtx that is used to store the address
620      of the current frame into the built-in setjmp buffer.  */
621   rtx (* builtin_setjmp_frame_value) (void);
622 
623   /* This target hook should add STRING_CST trees for any hard regs
624      the port wishes to automatically clobber for an asm.  */
625   tree (* md_asm_clobbers) (tree, tree, tree);
626 
627   /* This target hook allows the backend to specify a calling convention
628      in the debug information.  This function actually returns an
629      enum dwarf_calling_convention, but because of forward declarations
630      and not wanting to include dwarf2.h everywhere target.h is included
631      the function is being declared as an int.  */
632   int (* dwarf_calling_convention) (tree);
633 
634   /* This target hook allows the backend to emit frame-related insns that
635      contain UNSPECs or UNSPEC_VOLATILEs.  The call frame debugging info
636      engine will invoke it on insns of the form
637        (set (reg) (unspec [...] UNSPEC_INDEX))
638      and
639        (set (reg) (unspec_volatile [...] UNSPECV_INDEX))
640      to let the backend emit the call frame instructions.  */
641   void (* dwarf_handle_frame_unspec) (const char *, rtx, int);
642 
643   /* Perform architecture specific checking of statements gimplified
644      from VA_ARG_EXPR.  LHS is left hand side of MODIFY_EXPR, RHS
645      is right hand side.  Returns true if the statements doesn't need
646      to be checked for va_list references.  */
647   bool (* stdarg_optimize_hook) (struct stdarg_info *ai, tree lhs, tree rhs);
648 
649   /* This target hook allows the operating system to override the DECL
650      that represents the external variable that contains the stack
651      protection guard variable.  The type of this DECL is ptr_type_node.  */
652   tree (* stack_protect_guard) (void);
653 
654   /* This target hook allows the operating system to override the CALL_EXPR
655      that is invoked when a check vs the guard variable fails.  */
656   tree (* stack_protect_fail) (void);
657 
658   /* Returns NULL if target supports the insn within a doloop block,
659      otherwise it returns an error message.  */
660   const char * (*invalid_within_doloop) (rtx);
661 
662   /* DECL is a variable or function with __attribute__((dllimport))
663      specified.  Use this hook if the target needs to add extra validation
664      checks to  handle_dll_attribute ().  */
665   bool (* valid_dllimport_attribute_p) (tree decl);
666 
667   /* Functions relating to calls - argument passing, returns, etc.  */
668   struct calls {
669     bool (*promote_function_args) (tree fntype);
670     bool (*promote_function_return) (tree fntype);
671     bool (*promote_prototypes) (tree fntype);
672     rtx (*struct_value_rtx) (tree fndecl, int incoming);
673     bool (*return_in_memory) (tree type, tree fndecl);
674     bool (*return_in_msb) (tree type);
675 
676     /* Return true if a parameter must be passed by reference.  TYPE may
677        be null if this is a libcall.  CA may be null if this query is
678        from __builtin_va_arg.  */
679     bool (*pass_by_reference) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
680 			       tree type, bool named_arg);
681 
682     rtx (*expand_builtin_saveregs) (void);
683     /* Returns pretend_argument_size.  */
684     void (*setup_incoming_varargs) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
685 				    tree type, int *pretend_arg_size,
686 				    int second_time);
687     bool (*strict_argument_naming) (CUMULATIVE_ARGS *ca);
688     /* Returns true if we should use
689        targetm.calls.setup_incoming_varargs() and/or
690        targetm.calls.strict_argument_naming().  */
691     bool (*pretend_outgoing_varargs_named) (CUMULATIVE_ARGS *ca);
692 
693     /* Given a complex type T, return true if a parameter of type T
694        should be passed as two scalars.  */
695     bool (* split_complex_arg) (tree type);
696 
697     /* Return true if type T, mode MODE, may not be passed in registers,
698        but must be passed on the stack.  */
699     /* ??? This predicate should be applied strictly after pass-by-reference.
700        Need audit to verify that this is the case.  */
701     bool (* must_pass_in_stack) (enum machine_mode mode, tree t);
702 
703     /* Return true if type TYPE, mode MODE, which is passed by reference,
704        should have the object copy generated by the callee rather than
705        the caller.  It is never called for TYPE requiring constructors.  */
706     bool (* callee_copies) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
707 			    tree type, bool named);
708 
709     /* Return zero for arguments passed entirely on the stack or entirely
710        in registers.  If passed in both, return the number of bytes passed
711        in registers; the balance is therefore passed on the stack.  */
712     int (* arg_partial_bytes) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
713 			       tree type, bool named);
714 
715     /* Return the diagnostic message string if function without a prototype
716        is not allowed for this 'val' argument; NULL otherwise. */
717     const char *(*invalid_arg_for_unprototyped_fn) (tree typelist,
718 					     	    tree funcdecl, tree val);
719 
720     /* Return an rtx for the return value location of the function
721        specified by FN_DECL_OR_TYPE with a return type of RET_TYPE.  */
722     rtx (*function_value) (tree ret_type, tree fn_decl_or_type,
723 			   bool outgoing);
724 
725     /* Return an rtx for the argument pointer incoming to the
726        current function.  */
727     rtx (*internal_arg_pointer) (void);
728   } calls;
729 
730   /* Return the diagnostic message string if conversion from FROMTYPE
731      to TOTYPE is not allowed, NULL otherwise.  */
732   const char *(*invalid_conversion) (tree fromtype, tree totype);
733 
734   /* Return the diagnostic message string if the unary operation OP is
735      not permitted on TYPE, NULL otherwise.  */
736   const char *(*invalid_unary_op) (int op, tree type);
737 
738   /* Return the diagnostic message string if the binary operation OP
739      is not permitted on TYPE1 and TYPE2, NULL otherwise.  */
740   const char *(*invalid_binary_op) (int op, tree type1, tree type2);
741 
742   /* Return the class for a secondary reload, and fill in extra information.  */
743   enum reg_class (*secondary_reload) (bool, rtx, enum reg_class,
744 				      enum machine_mode,
745 				      struct secondary_reload_info *);
746 
747   /* Functions specific to the C++ frontend.  */
748   struct cxx {
749     /* Return the integer type used for guard variables.  */
750     tree (*guard_type) (void);
751     /* Return true if only the low bit of the guard should be tested.  */
752     bool (*guard_mask_bit) (void);
753     /* Returns the size of the array cookie for an array of type.  */
754     tree (*get_cookie_size) (tree);
755     /* Returns true if the element size should be stored in the
756        array cookie.  */
757     bool (*cookie_has_size) (void);
758     /* Allows backends to perform additional processing when
759        deciding if a class should be exported or imported.  */
760     int (*import_export_class) (tree, int);
761     /* Returns true if constructors and destructors return "this".  */
762     bool (*cdtor_returns_this) (void);
763     /* Returns true if the key method for a class can be an inline
764        function, so long as it is not declared inline in the class
765        itself.  Returning true is the behavior required by the Itanium
766        C++ ABI.  */
767     bool (*key_method_may_be_inline) (void);
768     /* DECL is a virtual table, virtual table table, typeinfo object,
769        or other similar implicit class data object that will be
770        emitted with external linkage in this translation unit.  No ELF
771        visibility has been explicitly specified.  If the target needs
772        to specify a visibility other than that of the containing class,
773        use this hook to set DECL_VISIBILITY and
774        DECL_VISIBILITY_SPECIFIED.  */
775     void (*determine_class_data_visibility) (tree decl);
776     /* Returns true (the default) if virtual tables and other
777        similar implicit class data objects are always COMDAT if they
778        have external linkage.  If this hook returns false, then
779        class data for classes whose virtual table will be emitted in
780        only one translation unit will not be COMDAT.  */
781     bool (*class_data_always_comdat) (void);
782     /* Returns true if __aeabi_atexit should be used to register static
783        destructors.  */
784     bool (*use_aeabi_atexit) (void);
785     /* TYPE is a C++ class (i.e., RECORD_TYPE or UNION_TYPE) that
786        has just been defined.  Use this hook to make adjustments to the
787        class  (eg, tweak visibility or perform any other required
788        target modifications).  */
789     void (*adjust_class_at_definition) (tree type);
790   } cxx;
791 
792   /* For targets that need to mark extra registers as live on entry to
793      the function, they should define this target hook and set their
794      bits in the bitmap passed in. */
795   void (*live_on_entry) (bitmap);
796 
797   /* True if unwinding tables should be generated by default.  */
798   bool unwind_tables_default;
799 
800   /* Leave the boolean fields at the end.  */
801 
802   /* True if arbitrary sections are supported.  */
803   bool have_named_sections;
804 
805   /* True if we can create zeroed data by switching to a BSS section
806      and then using ASM_OUTPUT_SKIP to allocate the space.  */
807   bool have_switchable_bss_sections;
808 
809   /* True if "native" constructors and destructors are supported,
810      false if we're using collect2 for the job.  */
811   bool have_ctors_dtors;
812 
813   /* True if thread-local storage is supported.  */
814   bool have_tls;
815 
816   /* True if a small readonly data section is supported.  */
817   bool have_srodata_section;
818 
819   /* True if EH frame info sections should be zero-terminated.  */
820   bool terminate_dw2_eh_frame_info;
821 
822   /* True if #NO_APP should be emitted at the beginning of
823      assembly output.  */
824   bool file_start_app_off;
825 
826   /* True if output_file_directive should be called for main_input_filename
827      at the beginning of assembly output.  */
828   bool file_start_file_directive;
829 
830   /* True if #pragma redefine_extname is to be supported.  */
831   bool handle_pragma_redefine_extname;
832 
833   /* True if #pragma extern_prefix is to be supported.  */
834   bool handle_pragma_extern_prefix;
835 
836   /* True if the target is allowed to reorder memory accesses unless
837      synchronization is explicitly requested.  */
838   bool relaxed_ordering;
839 
840   /* Returns true if we should generate exception tables for use with the
841      ARM EABI.  The effects the encoding of function exception specifications.
842    */
843   bool arm_eabi_unwinder;
844 
845   /* Leave the boolean fields at the end.  */
846 };
847 
848 extern struct gcc_target targetm;
849 
850 #endif /* GCC_TARGET_H */
851