110d565efSmrg /* Definitions of target machine for GCC,
210d565efSmrg    for ARM with targeting the VXWorks run time environment.
3*ec02198aSmrg    Copyright (C) 1999-2020 Free Software Foundation, Inc.
410d565efSmrg 
510d565efSmrg    Contributed by: Mike Stump <mrs@wrs.com>
610d565efSmrg    Brought up to date by CodeSourcery, LLC.
710d565efSmrg 
810d565efSmrg This file is part of GCC.
910d565efSmrg 
1010d565efSmrg GCC is free software; you can redistribute it and/or modify
1110d565efSmrg it under the terms of the GNU General Public License as published by
1210d565efSmrg the Free Software Foundation; either version 3, or (at your option)
1310d565efSmrg any later version.
1410d565efSmrg 
1510d565efSmrg GCC is distributed in the hope that it will be useful,
1610d565efSmrg but WITHOUT ANY WARRANTY; without even the implied warranty of
1710d565efSmrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
1810d565efSmrg GNU General Public License for more details.
1910d565efSmrg 
2010d565efSmrg Under Section 7 of GPL version 3, you are granted additional
2110d565efSmrg permissions described in the GCC Runtime Library Exception, version
2210d565efSmrg 3.1, as published by the Free Software Foundation.
2310d565efSmrg 
2410d565efSmrg You should have received a copy of the GNU General Public License and
2510d565efSmrg a copy of the GCC Runtime Library Exception along with this program;
2610d565efSmrg see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
2710d565efSmrg <http://www.gnu.org/licenses/>.  */
2810d565efSmrg 
29c7a68eb7Smrg /* TARGET_OS_CPP_BUILTINS, down to BPABI if defined.  */
3010d565efSmrg 
31c7a68eb7Smrg #if defined (TARGET_BPABI_CPP_BUILTINS)
32c7a68eb7Smrg #define MAYBE_TARGET_BPABI_CPP_BUILTINS TARGET_BPABI_CPP_BUILTINS
33c7a68eb7Smrg #else
34c7a68eb7Smrg #define MAYBE_TARGET_BPABI_CPP_BUILTINS()
35c7a68eb7Smrg #endif
36c7a68eb7Smrg 
37c7a68eb7Smrg #undef TARGET_OS_CPP_BUILTINS
3810d565efSmrg #define TARGET_OS_CPP_BUILTINS()			\
3910d565efSmrg   do {							\
4010d565efSmrg     if (TARGET_BIG_END)					\
4110d565efSmrg       builtin_define ("ARMEB");				\
4210d565efSmrg     else						\
4310d565efSmrg       builtin_define ("ARMEL");				\
4410d565efSmrg 							\
4510d565efSmrg     if (arm_arch_xscale)				\
46*ec02198aSmrg       builtin_define ("_VX_CPU=XSCALE");		\
47*ec02198aSmrg     if (arm_arch8)					\
48*ec02198aSmrg       builtin_define ("_VX_CPU=ARMARCH8A");		\
49c7a68eb7Smrg     else if (arm_arch7)					\
50c7a68eb7Smrg       {							\
51c7a68eb7Smrg 	if (!arm_arch_notm)				\
52*ec02198aSmrg 	  builtin_define ("_VX_CPU=ARMARCH7M");		\
53c7a68eb7Smrg 	else if (TARGET_THUMB)				\
54*ec02198aSmrg 	  builtin_define ("_VX_CPU=ARMARCH7_T2");	\
55c7a68eb7Smrg 	else						\
56*ec02198aSmrg 	  builtin_define ("_VX_CPU=ARMARCH7");		\
57c7a68eb7Smrg       }							\
58c7a68eb7Smrg     else if (arm_arch6)					\
59c7a68eb7Smrg       {							\
60c7a68eb7Smrg 	if (TARGET_THUMB)				\
61*ec02198aSmrg 	  builtin_define ("_VX_CPU=ARMARCH6_T");	\
62c7a68eb7Smrg 	else						\
63*ec02198aSmrg 	  builtin_define ("_VX_CPU=ARMARCH6");		\
64c7a68eb7Smrg       }							\
650fc04c29Smrg     else if (arm_arch5t)				\
66*ec02198aSmrg 	builtin_define ("_VX_CPU=ARMARCH5_T");		\
6710d565efSmrg     else if (arm_arch4)					\
6810d565efSmrg       {							\
6910d565efSmrg 	if (TARGET_THUMB)				\
70*ec02198aSmrg 	  builtin_define ("_VX_CPU=ARMARCH4_T");	\
7110d565efSmrg 	else						\
72*ec02198aSmrg 	  builtin_define ("_VX_CPU=ARMARCH4");		\
7310d565efSmrg       }							\
7410d565efSmrg     VXWORKS_OS_CPP_BUILTINS ();				\
75c7a68eb7Smrg     MAYBE_TARGET_BPABI_CPP_BUILTINS ();			\
7610d565efSmrg   } while (0)
7710d565efSmrg 
7810d565efSmrg #undef SUBTARGET_OVERRIDE_OPTIONS
7910d565efSmrg #define SUBTARGET_OVERRIDE_OPTIONS VXWORKS_OVERRIDE_OPTIONS
8010d565efSmrg 
8110d565efSmrg /* Subsume the arm/elf.h definition, and add RTP hooks.  */
8210d565efSmrg #undef SUBTARGET_CPP_SPEC
8310d565efSmrg #define SUBTARGET_CPP_SPEC "-D__ELF__" VXWORKS_ADDITIONAL_CPP_SPEC
8410d565efSmrg 
85c7a68eb7Smrg /* .text.hot and .text.unlikely sections are badly handled by the
86c7a68eb7Smrg    VxWorks kernel mode loader for ARM style exceptions.  */
87c7a68eb7Smrg #undef  CC1_SPEC
88*ec02198aSmrg #define CC1_SPEC VXWORKS_CC1_SPEC " %{!mrtp:-fno-reorder-functions}"
89c7a68eb7Smrg 
90c7a68eb7Smrg /* Translate an explicit -mbig-endian as an explicit -EB to assembler
91c7a68eb7Smrg    and linker, and pass abi options matching the target expectations
92c7a68eb7Smrg    or command-line requests.  */
93c7a68eb7Smrg #define VXWORKS_ENDIAN_SPEC "%{mbig-endian:-EB}"
94c7a68eb7Smrg 
95c7a68eb7Smrg #if defined (TARGET_BPABI_CPP_BUILTINS)
96c7a68eb7Smrg #define MAYBE_ASM_ABI_SPEC \
97c7a68eb7Smrg   "%{mabi=apcs-gnu|mabi=atpcs:-meabi=gnu;:-meabi=5}" TARGET_FIX_V4BX_SPEC
98c7a68eb7Smrg #else
99c7a68eb7Smrg #define MAYBE_ASM_ABI_SPEC
100c7a68eb7Smrg #endif
10110d565efSmrg 
10210d565efSmrg #undef SUBTARGET_EXTRA_ASM_SPEC
103c7a68eb7Smrg #define SUBTARGET_EXTRA_ASM_SPEC MAYBE_ASM_ABI_SPEC " " VXWORKS_ENDIAN_SPEC
10410d565efSmrg 
10510d565efSmrg #undef LINK_SPEC
10610d565efSmrg #define LINK_SPEC VXWORKS_LINK_SPEC " " VXWORKS_ENDIAN_SPEC
10710d565efSmrg 
10810d565efSmrg #undef LIB_SPEC
10910d565efSmrg #define LIB_SPEC VXWORKS_LIB_SPEC
11010d565efSmrg 
11110d565efSmrg #undef STARTFILE_SPEC
11210d565efSmrg #define STARTFILE_SPEC VXWORKS_STARTFILE_SPEC
11310d565efSmrg 
11410d565efSmrg #undef ENDFILE_SPEC
11510d565efSmrg #define ENDFILE_SPEC VXWORKS_ENDFILE_SPEC
11610d565efSmrg 
11710d565efSmrg /* There is no default multilib.  */
11810d565efSmrg #undef MULTILIB_DEFAULTS
11910d565efSmrg 
12010d565efSmrg #undef FUNCTION_PROFILER
12110d565efSmrg #define FUNCTION_PROFILER VXWORKS_FUNCTION_PROFILER
12210d565efSmrg 
12310d565efSmrg /* We want to be compatible with a version of "2.96" at one point in
12410d565efSmrg    the past before this macro was changed.  */
12510d565efSmrg #undef DEFAULT_STRUCTURE_SIZE_BOUNDARY
12610d565efSmrg #define DEFAULT_STRUCTURE_SIZE_BOUNDARY 8
12710d565efSmrg 
12810d565efSmrg /* The kernel loader does not allow relocations to overflow, so we
12910d565efSmrg    cannot allow arbitrary relocation addends in kernel modules or RTP
13010d565efSmrg    executables.  Also, the dynamic loader uses the resolved relocation
13110d565efSmrg    value to distinguish references to the text and data segments, so we
13210d565efSmrg    cannot allow arbitrary offsets for shared libraries either.  */
13310d565efSmrg #undef ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P
13410d565efSmrg #define ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P 1
13510d565efSmrg 
13610d565efSmrg #undef TARGET_DEFAULT_WORD_RELOCATIONS
13710d565efSmrg #define TARGET_DEFAULT_WORD_RELOCATIONS 1
13810d565efSmrg 
13910d565efSmrg /* Define this to be nonzero if static stack checking is supported.  */
14010d565efSmrg #define STACK_CHECK_STATIC_BUILTIN 1
14110d565efSmrg 
14210d565efSmrg /* This platform supports the probing method of stack checking (RTP mode).
14310d565efSmrg    8K is reserved in the stack to propagate exceptions in case of overflow.  */
14410d565efSmrg #define STACK_CHECK_PROTECT 8192
145c7a68eb7Smrg 
146c7a68eb7Smrg /* Unless overridded by the target options, the default is little-endian.  */
147c7a68eb7Smrg #define TARGET_ENDIAN_DEFAULT 0
148*ec02198aSmrg 
149*ec02198aSmrg /* The VxWorks environment on ARM is llvm based and we need to link
150*ec02198aSmrg    against libllvm.a to resolve __aeabi_memcpy4.  */
151*ec02198aSmrg 
152*ec02198aSmrg #undef VXWORKS_PERSONALITY
153*ec02198aSmrg #define VXWORKS_PERSONALITY "llvm"
154*ec02198aSmrg 
155*ec02198aSmrg #undef VXWORKS_EXTRA_LIBS_RTP
156*ec02198aSmrg #define VXWORKS_EXTRA_LIBS_RTP "-lllvm"
157