1 /* Common target dependent code for GDB on ARM systems.
2 
3    Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 
5    This file is part of GDB.
6 
7    This program is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3 of the License, or
10    (at your option) any later version.
11 
12    This program is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with this program.  If not, see <http://www.gnu.org/licenses/>.  */
19 
20 #include "defs.h"
21 
22 #include <ctype.h>		/* XXX for isupper ().  */
23 
24 #include "frame.h"
25 #include "inferior.h"
26 #include "gdbcmd.h"
27 #include "gdbcore.h"
28 #include "gdb_string.h"
29 #include "dis-asm.h"		/* For register styles.  */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
47 
48 #include "arm-tdep.h"
49 #include "gdb/sim-arm.h"
50 
51 #include "elf-bfd.h"
52 #include "coff/internal.h"
53 #include "elf/arm.h"
54 
55 #include "gdb_assert.h"
56 #include "vec.h"
57 
58 #include "record.h"
59 #include "record-full.h"
60 
61 #include "features/arm-with-m.c"
62 #include "features/arm-with-m-fpa-layout.c"
63 #include "features/arm-with-m-vfp-d16.c"
64 #include "features/arm-with-iwmmxt.c"
65 #include "features/arm-with-vfpv2.c"
66 #include "features/arm-with-vfpv3.c"
67 #include "features/arm-with-neon.c"
68 
69 static int arm_debug;
70 
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72    it as Thumb function.  The MSB of the minimal symbol's "info" field
73    is used for this purpose.
74 
75    MSYMBOL_SET_SPECIAL	Actually sets the "special" bit.
76    MSYMBOL_IS_SPECIAL   Tests the "special" bit in a minimal symbol.  */
77 
78 #define MSYMBOL_SET_SPECIAL(msym)				\
79 	MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 
81 #define MSYMBOL_IS_SPECIAL(msym)				\
82 	MSYMBOL_TARGET_FLAG_1 (msym)
83 
84 /* Per-objfile data used for mapping symbols.  */
85 static const struct objfile_data *arm_objfile_data_key;
86 
87 struct arm_mapping_symbol
88 {
89   bfd_vma value;
90   char type;
91 };
92 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
93 DEF_VEC_O(arm_mapping_symbol_s);
94 
95 struct arm_per_objfile
96 {
97   VEC(arm_mapping_symbol_s) **section_maps;
98 };
99 
100 /* The list of available "set arm ..." and "show arm ..." commands.  */
101 static struct cmd_list_element *setarmcmdlist = NULL;
102 static struct cmd_list_element *showarmcmdlist = NULL;
103 
104 /* The type of floating-point to use.  Keep this in sync with enum
105    arm_float_model, and the help string in _initialize_arm_tdep.  */
106 static const char *const fp_model_strings[] =
107 {
108   "auto",
109   "softfpa",
110   "fpa",
111   "softvfp",
112   "vfp",
113   NULL
114 };
115 
116 /* A variable that can be configured by the user.  */
117 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
118 static const char *current_fp_model = "auto";
119 
120 /* The ABI to use.  Keep this in sync with arm_abi_kind.  */
121 static const char *const arm_abi_strings[] =
122 {
123   "auto",
124   "APCS",
125   "AAPCS",
126   NULL
127 };
128 
129 /* A variable that can be configured by the user.  */
130 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
131 static const char *arm_abi_string = "auto";
132 
133 /* The execution mode to assume.  */
134 static const char *const arm_mode_strings[] =
135   {
136     "auto",
137     "arm",
138     "thumb",
139     NULL
140   };
141 
142 static const char *arm_fallback_mode_string = "auto";
143 static const char *arm_force_mode_string = "auto";
144 
145 /* Internal override of the execution mode.  -1 means no override,
146    0 means override to ARM mode, 1 means override to Thumb mode.
147    The effect is the same as if arm_force_mode has been set by the
148    user (except the internal override has precedence over a user's
149    arm_force_mode override).  */
150 static int arm_override_mode = -1;
151 
152 /* Number of different reg name sets (options).  */
153 static int num_disassembly_options;
154 
155 /* The standard register names, and all the valid aliases for them.  Note
156    that `fp', `sp' and `pc' are not added in this alias list, because they
157    have been added as builtin user registers in
158    std-regs.c:_initialize_frame_reg.  */
159 static const struct
160 {
161   const char *name;
162   int regnum;
163 } arm_register_aliases[] = {
164   /* Basic register numbers.  */
165   { "r0", 0 },
166   { "r1", 1 },
167   { "r2", 2 },
168   { "r3", 3 },
169   { "r4", 4 },
170   { "r5", 5 },
171   { "r6", 6 },
172   { "r7", 7 },
173   { "r8", 8 },
174   { "r9", 9 },
175   { "r10", 10 },
176   { "r11", 11 },
177   { "r12", 12 },
178   { "r13", 13 },
179   { "r14", 14 },
180   { "r15", 15 },
181   /* Synonyms (argument and variable registers).  */
182   { "a1", 0 },
183   { "a2", 1 },
184   { "a3", 2 },
185   { "a4", 3 },
186   { "v1", 4 },
187   { "v2", 5 },
188   { "v3", 6 },
189   { "v4", 7 },
190   { "v5", 8 },
191   { "v6", 9 },
192   { "v7", 10 },
193   { "v8", 11 },
194   /* Other platform-specific names for r9.  */
195   { "sb", 9 },
196   { "tr", 9 },
197   /* Special names.  */
198   { "ip", 12 },
199   { "lr", 14 },
200   /* Names used by GCC (not listed in the ARM EABI).  */
201   { "sl", 10 },
202   /* A special name from the older ATPCS.  */
203   { "wr", 7 },
204 };
205 
206 static const char *const arm_register_names[] =
207 {"r0",  "r1",  "r2",  "r3",	/*  0  1  2  3 */
208  "r4",  "r5",  "r6",  "r7",	/*  4  5  6  7 */
209  "r8",  "r9",  "r10", "r11",	/*  8  9 10 11 */
210  "r12", "sp",  "lr",  "pc",	/* 12 13 14 15 */
211  "f0",  "f1",  "f2",  "f3",	/* 16 17 18 19 */
212  "f4",  "f5",  "f6",  "f7",	/* 20 21 22 23 */
213  "fps", "cpsr" };		/* 24 25       */
214 
215 /* Valid register name styles.  */
216 static const char **valid_disassembly_styles;
217 
218 /* Disassembly style to use. Default to "std" register names.  */
219 static const char *disassembly_style;
220 
221 /* This is used to keep the bfd arch_info in sync with the disassembly
222    style.  */
223 static void set_disassembly_style_sfunc(char *, int,
224 					 struct cmd_list_element *);
225 static void set_disassembly_style (void);
226 
227 static void convert_from_extended (const struct floatformat *, const void *,
228 				   void *, int);
229 static void convert_to_extended (const struct floatformat *, void *,
230 				 const void *, int);
231 
232 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
233 						struct regcache *regcache,
234 						int regnum, gdb_byte *buf);
235 static void arm_neon_quad_write (struct gdbarch *gdbarch,
236 				 struct regcache *regcache,
237 				 int regnum, const gdb_byte *buf);
238 
239 static int thumb_insn_size (unsigned short inst1);
240 
241 struct arm_prologue_cache
242 {
243   /* The stack pointer at the time this frame was created; i.e. the
244      caller's stack pointer when this function was called.  It is used
245      to identify this frame.  */
246   CORE_ADDR prev_sp;
247 
248   /* The frame base for this frame is just prev_sp - frame size.
249      FRAMESIZE is the distance from the frame pointer to the
250      initial stack pointer.  */
251 
252   int framesize;
253 
254   /* The register used to hold the frame pointer for this frame.  */
255   int framereg;
256 
257   /* Saved register offsets.  */
258   struct trad_frame_saved_reg *saved_regs;
259 };
260 
261 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
262 				       CORE_ADDR prologue_start,
263 				       CORE_ADDR prologue_end,
264 				       struct arm_prologue_cache *cache);
265 
266 /* Architecture version for displaced stepping.  This effects the behaviour of
267    certain instructions, and really should not be hard-wired.  */
268 
269 #define DISPLACED_STEPPING_ARCH_VERSION		5
270 
271 /* Addresses for calling Thumb functions have the bit 0 set.
272    Here are some macros to test, set, or clear bit 0 of addresses.  */
273 #define IS_THUMB_ADDR(addr)	((addr) & 1)
274 #define MAKE_THUMB_ADDR(addr)	((addr) | 1)
275 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 
277 /* Set to true if the 32-bit mode is in use.  */
278 
279 int arm_apcs_32 = 1;
280 
281 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode.  */
282 
283 int
arm_psr_thumb_bit(struct gdbarch * gdbarch)284 arm_psr_thumb_bit (struct gdbarch *gdbarch)
285 {
286   if (gdbarch_tdep (gdbarch)->is_m)
287     return XPSR_T;
288   else
289     return CPSR_T;
290 }
291 
292 /* Determine if FRAME is executing in Thumb mode.  */
293 
294 int
arm_frame_is_thumb(struct frame_info * frame)295 arm_frame_is_thumb (struct frame_info *frame)
296 {
297   CORE_ADDR cpsr;
298   ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
299 
300   /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
301      directly (from a signal frame or dummy frame) or by interpreting
302      the saved LR (from a prologue or DWARF frame).  So consult it and
303      trust the unwinders.  */
304   cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
305 
306   return (cpsr & t_bit) != 0;
307 }
308 
309 /* Callback for VEC_lower_bound.  */
310 
311 static inline int
arm_compare_mapping_symbols(const struct arm_mapping_symbol * lhs,const struct arm_mapping_symbol * rhs)312 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
313 			     const struct arm_mapping_symbol *rhs)
314 {
315   return lhs->value < rhs->value;
316 }
317 
318 /* Search for the mapping symbol covering MEMADDR.  If one is found,
319    return its type.  Otherwise, return 0.  If START is non-NULL,
320    set *START to the location of the mapping symbol.  */
321 
322 static char
arm_find_mapping_symbol(CORE_ADDR memaddr,CORE_ADDR * start)323 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
324 {
325   struct obj_section *sec;
326 
327   /* If there are mapping symbols, consult them.  */
328   sec = find_pc_section (memaddr);
329   if (sec != NULL)
330     {
331       struct arm_per_objfile *data;
332       VEC(arm_mapping_symbol_s) *map;
333       struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
334 					    0 };
335       unsigned int idx;
336 
337       data = objfile_data (sec->objfile, arm_objfile_data_key);
338       if (data != NULL)
339 	{
340 	  map = data->section_maps[sec->the_bfd_section->index];
341 	  if (!VEC_empty (arm_mapping_symbol_s, map))
342 	    {
343 	      struct arm_mapping_symbol *map_sym;
344 
345 	      idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
346 				     arm_compare_mapping_symbols);
347 
348 	      /* VEC_lower_bound finds the earliest ordered insertion
349 		 point.  If the following symbol starts at this exact
350 		 address, we use that; otherwise, the preceding
351 		 mapping symbol covers this address.  */
352 	      if (idx < VEC_length (arm_mapping_symbol_s, map))
353 		{
354 		  map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
355 		  if (map_sym->value == map_key.value)
356 		    {
357 		      if (start)
358 			*start = map_sym->value + obj_section_addr (sec);
359 		      return map_sym->type;
360 		    }
361 		}
362 
363 	      if (idx > 0)
364 		{
365 		  map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
366 		  if (start)
367 		    *start = map_sym->value + obj_section_addr (sec);
368 		  return map_sym->type;
369 		}
370 	    }
371 	}
372     }
373 
374   return 0;
375 }
376 
377 /* Determine if the program counter specified in MEMADDR is in a Thumb
378    function.  This function should be called for addresses unrelated to
379    any executing frame; otherwise, prefer arm_frame_is_thumb.  */
380 
381 int
arm_pc_is_thumb(struct gdbarch * gdbarch,CORE_ADDR memaddr)382 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
383 {
384   struct minimal_symbol *sym;
385   char type;
386   struct displaced_step_closure* dsc
387     = get_displaced_step_closure_by_addr(memaddr);
388 
389   /* If checking the mode of displaced instruction in copy area, the mode
390      should be determined by instruction on the original address.  */
391   if (dsc)
392     {
393       if (debug_displaced)
394 	fprintf_unfiltered (gdb_stdlog,
395 			    "displaced: check mode of %.8lx instead of %.8lx\n",
396 			    (unsigned long) dsc->insn_addr,
397 			    (unsigned long) memaddr);
398       memaddr = dsc->insn_addr;
399     }
400 
401   /* If bit 0 of the address is set, assume this is a Thumb address.  */
402   if (IS_THUMB_ADDR (memaddr))
403     return 1;
404 
405   /* Respect internal mode override if active.  */
406   if (arm_override_mode != -1)
407     return arm_override_mode;
408 
409   /* If the user wants to override the symbol table, let him.  */
410   if (strcmp (arm_force_mode_string, "arm") == 0)
411     return 0;
412   if (strcmp (arm_force_mode_string, "thumb") == 0)
413     return 1;
414 
415   /* ARM v6-M and v7-M are always in Thumb mode.  */
416   if (gdbarch_tdep (gdbarch)->is_m)
417     return 1;
418 
419   /* If there are mapping symbols, consult them.  */
420   type = arm_find_mapping_symbol (memaddr, NULL);
421   if (type)
422     return type == 't';
423 
424   /* Thumb functions have a "special" bit set in minimal symbols.  */
425   sym = lookup_minimal_symbol_by_pc (memaddr);
426   if (sym)
427     return (MSYMBOL_IS_SPECIAL (sym));
428 
429   /* If the user wants to override the fallback mode, let them.  */
430   if (strcmp (arm_fallback_mode_string, "arm") == 0)
431     return 0;
432   if (strcmp (arm_fallback_mode_string, "thumb") == 0)
433     return 1;
434 
435   /* If we couldn't find any symbol, but we're talking to a running
436      target, then trust the current value of $cpsr.  This lets
437      "display/i $pc" always show the correct mode (though if there is
438      a symbol table we will not reach here, so it still may not be
439      displayed in the mode it will be executed).  */
440   if (target_has_registers)
441     return arm_frame_is_thumb (get_current_frame ());
442 
443   /* Otherwise we're out of luck; we assume ARM.  */
444   return 0;
445 }
446 
447 /* Remove useless bits from addresses in a running program.  */
448 static CORE_ADDR
arm_addr_bits_remove(struct gdbarch * gdbarch,CORE_ADDR val)449 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
450 {
451   /* On M-profile devices, do not strip the low bit from EXC_RETURN
452      (the magic exception return address).  */
453   if (gdbarch_tdep (gdbarch)->is_m
454       && (val & 0xfffffff0) == 0xfffffff0)
455     return val;
456 
457   if (arm_apcs_32)
458     return UNMAKE_THUMB_ADDR (val);
459   else
460     return (val & 0x03fffffc);
461 }
462 
463 /* Return 1 if PC is the start of a compiler helper function which
464    can be safely ignored during prologue skipping.  IS_THUMB is true
465    if the function is known to be a Thumb function due to the way it
466    is being called.  */
467 static int
skip_prologue_function(struct gdbarch * gdbarch,CORE_ADDR pc,int is_thumb)468 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
469 {
470   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
471   struct minimal_symbol *msym;
472 
473   msym = lookup_minimal_symbol_by_pc (pc);
474   if (msym != NULL
475       && SYMBOL_VALUE_ADDRESS (msym) == pc
476       && SYMBOL_LINKAGE_NAME (msym) != NULL)
477     {
478       const char *name = SYMBOL_LINKAGE_NAME (msym);
479 
480       /* The GNU linker's Thumb call stub to foo is named
481 	 __foo_from_thumb.  */
482       if (strstr (name, "_from_thumb") != NULL)
483 	name += 2;
484 
485       /* On soft-float targets, __truncdfsf2 is called to convert promoted
486 	 arguments to their argument types in non-prototyped
487 	 functions.  */
488       if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
489 	return 1;
490       if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
491 	return 1;
492 
493       /* Internal functions related to thread-local storage.  */
494       if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
495 	return 1;
496       if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
497 	return 1;
498     }
499   else
500     {
501       /* If we run against a stripped glibc, we may be unable to identify
502 	 special functions by name.  Check for one important case,
503 	 __aeabi_read_tp, by comparing the *code* against the default
504 	 implementation (this is hand-written ARM assembler in glibc).  */
505 
506       if (!is_thumb
507 	  && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
508 	     == 0xe3e00a0f /* mov r0, #0xffff0fff */
509 	  && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
510 	     == 0xe240f01f) /* sub pc, r0, #31 */
511 	return 1;
512     }
513 
514   return 0;
515 }
516 
517 /* Support routines for instruction parsing.  */
518 #define submask(x) ((1L << ((x) + 1)) - 1)
519 #define bit(obj,st) (((obj) >> (st)) & 1)
520 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
521 #define sbits(obj,st,fn) \
522   ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
523 #define BranchDest(addr,instr) \
524   ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
525 
526 /* Extract the immediate from instruction movw/movt of encoding T.  INSN1 is
527    the first 16-bit of instruction, and INSN2 is the second 16-bit of
528    instruction.  */
529 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
530   ((bits ((insn1), 0, 3) << 12)               \
531    | (bits ((insn1), 10, 10) << 11)           \
532    | (bits ((insn2), 12, 14) << 8)            \
533    | bits ((insn2), 0, 7))
534 
535 /* Extract the immediate from instruction movw/movt of encoding A.  INSN is
536    the 32-bit instruction.  */
537 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
538   ((bits ((insn), 16, 19) << 12) \
539    | bits ((insn), 0, 11))
540 
541 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op.  */
542 
543 static unsigned int
thumb_expand_immediate(unsigned int imm)544 thumb_expand_immediate (unsigned int imm)
545 {
546   unsigned int count = imm >> 7;
547 
548   if (count < 8)
549     switch (count / 2)
550       {
551       case 0:
552 	return imm & 0xff;
553       case 1:
554 	return (imm & 0xff) | ((imm & 0xff) << 16);
555       case 2:
556 	return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
557       case 3:
558 	return (imm & 0xff) | ((imm & 0xff) << 8)
559 		| ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
560       }
561 
562   return (0x80 | (imm & 0x7f)) << (32 - count);
563 }
564 
565 /* Return 1 if the 16-bit Thumb instruction INST might change
566    control flow, 0 otherwise.  */
567 
568 static int
thumb_instruction_changes_pc(unsigned short inst)569 thumb_instruction_changes_pc (unsigned short inst)
570 {
571   if ((inst & 0xff00) == 0xbd00)	/* pop {rlist, pc} */
572     return 1;
573 
574   if ((inst & 0xf000) == 0xd000)	/* conditional branch */
575     return 1;
576 
577   if ((inst & 0xf800) == 0xe000)	/* unconditional branch */
578     return 1;
579 
580   if ((inst & 0xff00) == 0x4700)	/* bx REG, blx REG */
581     return 1;
582 
583   if ((inst & 0xff87) == 0x4687)	/* mov pc, REG */
584     return 1;
585 
586   if ((inst & 0xf500) == 0xb100)	/* CBNZ or CBZ.  */
587     return 1;
588 
589   return 0;
590 }
591 
592 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
593    might change control flow, 0 otherwise.  */
594 
595 static int
thumb2_instruction_changes_pc(unsigned short inst1,unsigned short inst2)596 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
597 {
598   if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
599     {
600       /* Branches and miscellaneous control instructions.  */
601 
602       if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
603 	{
604 	  /* B, BL, BLX.  */
605 	  return 1;
606 	}
607       else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
608 	{
609 	  /* SUBS PC, LR, #imm8.  */
610 	  return 1;
611 	}
612       else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
613 	{
614 	  /* Conditional branch.  */
615 	  return 1;
616 	}
617 
618       return 0;
619     }
620 
621   if ((inst1 & 0xfe50) == 0xe810)
622     {
623       /* Load multiple or RFE.  */
624 
625       if (bit (inst1, 7) && !bit (inst1, 8))
626 	{
627 	  /* LDMIA or POP */
628 	  if (bit (inst2, 15))
629 	    return 1;
630 	}
631       else if (!bit (inst1, 7) && bit (inst1, 8))
632 	{
633 	  /* LDMDB */
634 	  if (bit (inst2, 15))
635 	    return 1;
636 	}
637       else if (bit (inst1, 7) && bit (inst1, 8))
638 	{
639 	  /* RFEIA */
640 	  return 1;
641 	}
642       else if (!bit (inst1, 7) && !bit (inst1, 8))
643 	{
644 	  /* RFEDB */
645 	  return 1;
646 	}
647 
648       return 0;
649     }
650 
651   if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
652     {
653       /* MOV PC or MOVS PC.  */
654       return 1;
655     }
656 
657   if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
658     {
659       /* LDR PC.  */
660       if (bits (inst1, 0, 3) == 15)
661 	return 1;
662       if (bit (inst1, 7))
663 	return 1;
664       if (bit (inst2, 11))
665 	return 1;
666       if ((inst2 & 0x0fc0) == 0x0000)
667 	return 1;
668 
669       return 0;
670     }
671 
672   if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
673     {
674       /* TBB.  */
675       return 1;
676     }
677 
678   if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
679     {
680       /* TBH.  */
681       return 1;
682     }
683 
684   return 0;
685 }
686 
687 /* Analyze a Thumb prologue, looking for a recognizable stack frame
688    and frame pointer.  Scan until we encounter a store that could
689    clobber the stack frame unexpectedly, or an unknown instruction.
690    Return the last address which is definitely safe to skip for an
691    initial breakpoint.  */
692 
693 static CORE_ADDR
thumb_analyze_prologue(struct gdbarch * gdbarch,CORE_ADDR start,CORE_ADDR limit,struct arm_prologue_cache * cache)694 thumb_analyze_prologue (struct gdbarch *gdbarch,
695 			CORE_ADDR start, CORE_ADDR limit,
696 			struct arm_prologue_cache *cache)
697 {
698   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
699   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
700   int i;
701   pv_t regs[16];
702   struct pv_area *stack;
703   struct cleanup *back_to;
704   CORE_ADDR offset;
705   CORE_ADDR unrecognized_pc = 0;
706 
707   for (i = 0; i < 16; i++)
708     regs[i] = pv_register (i, 0);
709   stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
710   back_to = make_cleanup_free_pv_area (stack);
711 
712   while (start < limit)
713     {
714       unsigned short insn;
715 
716       insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
717 
718       if ((insn & 0xfe00) == 0xb400)		/* push { rlist } */
719 	{
720 	  int regno;
721 	  int mask;
722 
723 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
724 	    break;
725 
726 	  /* Bits 0-7 contain a mask for registers R0-R7.  Bit 8 says
727 	     whether to save LR (R14).  */
728 	  mask = (insn & 0xff) | ((insn & 0x100) << 6);
729 
730 	  /* Calculate offsets of saved R0-R7 and LR.  */
731 	  for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
732 	    if (mask & (1 << regno))
733 	      {
734 		regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
735 						       -4);
736 		pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
737 	      }
738 	}
739       else if ((insn & 0xff00) == 0xb000)	/* add sp, #simm  OR
740 						   sub sp, #simm */
741 	{
742 	  offset = (insn & 0x7f) << 2;		/* get scaled offset */
743 	  if (insn & 0x80)			/* Check for SUB.  */
744 	    regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
745 						   -offset);
746 	  else
747 	    regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
748 						   offset);
749 	}
750       else if ((insn & 0xf800) == 0xa800)	/* add Rd, sp, #imm */
751 	regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
752 						    (insn & 0xff) << 2);
753       else if ((insn & 0xfe00) == 0x1c00	/* add Rd, Rn, #imm */
754 	       && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
755 	regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
756 						   bits (insn, 6, 8));
757       else if ((insn & 0xf800) == 0x3000	/* add Rd, #imm */
758 	       && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
759 	regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
760 						    bits (insn, 0, 7));
761       else if ((insn & 0xfe00) == 0x1800	/* add Rd, Rn, Rm */
762 	       && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
763 	       && pv_is_constant (regs[bits (insn, 3, 5)]))
764 	regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
765 					  regs[bits (insn, 6, 8)]);
766       else if ((insn & 0xff00) == 0x4400	/* add Rd, Rm */
767 	       && pv_is_constant (regs[bits (insn, 3, 6)]))
768 	{
769 	  int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
770 	  int rm = bits (insn, 3, 6);
771 	  regs[rd] = pv_add (regs[rd], regs[rm]);
772 	}
773       else if ((insn & 0xff00) == 0x4600)	/* mov hi, lo or mov lo, hi */
774 	{
775 	  int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
776 	  int src_reg = (insn & 0x78) >> 3;
777 	  regs[dst_reg] = regs[src_reg];
778 	}
779       else if ((insn & 0xf800) == 0x9000)	/* str rd, [sp, #off] */
780 	{
781 	  /* Handle stores to the stack.  Normally pushes are used,
782 	     but with GCC -mtpcs-frame, there may be other stores
783 	     in the prologue to create the frame.  */
784 	  int regno = (insn >> 8) & 0x7;
785 	  pv_t addr;
786 
787 	  offset = (insn & 0xff) << 2;
788 	  addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
789 
790 	  if (pv_area_store_would_trash (stack, addr))
791 	    break;
792 
793 	  pv_area_store (stack, addr, 4, regs[regno]);
794 	}
795       else if ((insn & 0xf800) == 0x6000)	/* str rd, [rn, #off] */
796 	{
797 	  int rd = bits (insn, 0, 2);
798 	  int rn = bits (insn, 3, 5);
799 	  pv_t addr;
800 
801 	  offset = bits (insn, 6, 10) << 2;
802 	  addr = pv_add_constant (regs[rn], offset);
803 
804 	  if (pv_area_store_would_trash (stack, addr))
805 	    break;
806 
807 	  pv_area_store (stack, addr, 4, regs[rd]);
808 	}
809       else if (((insn & 0xf800) == 0x7000	/* strb Rd, [Rn, #off] */
810 		|| (insn & 0xf800) == 0x8000)	/* strh Rd, [Rn, #off] */
811 	       && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
812 	/* Ignore stores of argument registers to the stack.  */
813 	;
814       else if ((insn & 0xf800) == 0xc800	/* ldmia Rn!, { registers } */
815 	       && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
816 	/* Ignore block loads from the stack, potentially copying
817 	   parameters from memory.  */
818 	;
819       else if ((insn & 0xf800) == 0x9800	/* ldr Rd, [Rn, #immed] */
820 	       || ((insn & 0xf800) == 0x6800	/* ldr Rd, [sp, #immed] */
821 		   && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
822 	/* Similarly ignore single loads from the stack.  */
823 	;
824       else if ((insn & 0xffc0) == 0x0000	/* lsls Rd, Rm, #0 */
825 	       || (insn & 0xffc0) == 0x1c00)	/* add Rd, Rn, #0 */
826 	/* Skip register copies, i.e. saves to another register
827 	   instead of the stack.  */
828 	;
829       else if ((insn & 0xf800) == 0x2000)	/* movs Rd, #imm */
830 	/* Recognize constant loads; even with small stacks these are necessary
831 	   on Thumb.  */
832 	regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
833       else if ((insn & 0xf800) == 0x4800)	/* ldr Rd, [pc, #imm] */
834 	{
835 	  /* Constant pool loads, for the same reason.  */
836 	  unsigned int constant;
837 	  CORE_ADDR loc;
838 
839 	  loc = start + 4 + bits (insn, 0, 7) * 4;
840 	  constant = read_memory_unsigned_integer (loc, 4, byte_order);
841 	  regs[bits (insn, 8, 10)] = pv_constant (constant);
842 	}
843       else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions.  */
844 	{
845 	  unsigned short inst2;
846 
847 	  inst2 = read_memory_unsigned_integer (start + 2, 2,
848 						byte_order_for_code);
849 
850 	  if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
851 	    {
852 	      /* BL, BLX.  Allow some special function calls when
853 		 skipping the prologue; GCC generates these before
854 		 storing arguments to the stack.  */
855 	      CORE_ADDR nextpc;
856 	      int j1, j2, imm1, imm2;
857 
858 	      imm1 = sbits (insn, 0, 10);
859 	      imm2 = bits (inst2, 0, 10);
860 	      j1 = bit (inst2, 13);
861 	      j2 = bit (inst2, 11);
862 
863 	      offset = ((imm1 << 12) + (imm2 << 1));
864 	      offset ^= ((!j2) << 22) | ((!j1) << 23);
865 
866 	      nextpc = start + 4 + offset;
867 	      /* For BLX make sure to clear the low bits.  */
868 	      if (bit (inst2, 12) == 0)
869 		nextpc = nextpc & 0xfffffffc;
870 
871 	      if (!skip_prologue_function (gdbarch, nextpc,
872 					   bit (inst2, 12) != 0))
873 		break;
874 	    }
875 
876 	  else if ((insn & 0xffd0) == 0xe900    /* stmdb Rn{!},
877 						   { registers } */
878 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
879 	    {
880 	      pv_t addr = regs[bits (insn, 0, 3)];
881 	      int regno;
882 
883 	      if (pv_area_store_would_trash (stack, addr))
884 		break;
885 
886 	      /* Calculate offsets of saved registers.  */
887 	      for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
888 		if (inst2 & (1 << regno))
889 		  {
890 		    addr = pv_add_constant (addr, -4);
891 		    pv_area_store (stack, addr, 4, regs[regno]);
892 		  }
893 
894 	      if (insn & 0x0020)
895 		regs[bits (insn, 0, 3)] = addr;
896 	    }
897 
898 	  else if ((insn & 0xff50) == 0xe940	/* strd Rt, Rt2,
899 						   [Rn, #+/-imm]{!} */
900 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
901 	    {
902 	      int regno1 = bits (inst2, 12, 15);
903 	      int regno2 = bits (inst2, 8, 11);
904 	      pv_t addr = regs[bits (insn, 0, 3)];
905 
906 	      offset = inst2 & 0xff;
907 	      if (insn & 0x0080)
908 		addr = pv_add_constant (addr, offset);
909 	      else
910 		addr = pv_add_constant (addr, -offset);
911 
912 	      if (pv_area_store_would_trash (stack, addr))
913 		break;
914 
915 	      pv_area_store (stack, addr, 4, regs[regno1]);
916 	      pv_area_store (stack, pv_add_constant (addr, 4),
917 			     4, regs[regno2]);
918 
919 	      if (insn & 0x0020)
920 		regs[bits (insn, 0, 3)] = addr;
921 	    }
922 
923 	  else if ((insn & 0xfff0) == 0xf8c0	/* str Rt,[Rn,+/-#imm]{!} */
924 		   && (inst2 & 0x0c00) == 0x0c00
925 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
926 	    {
927 	      int regno = bits (inst2, 12, 15);
928 	      pv_t addr = regs[bits (insn, 0, 3)];
929 
930 	      offset = inst2 & 0xff;
931 	      if (inst2 & 0x0200)
932 		addr = pv_add_constant (addr, offset);
933 	      else
934 		addr = pv_add_constant (addr, -offset);
935 
936 	      if (pv_area_store_would_trash (stack, addr))
937 		break;
938 
939 	      pv_area_store (stack, addr, 4, regs[regno]);
940 
941 	      if (inst2 & 0x0100)
942 		regs[bits (insn, 0, 3)] = addr;
943 	    }
944 
945 	  else if ((insn & 0xfff0) == 0xf8c0	/* str.w Rt,[Rn,#imm] */
946 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
947 	    {
948 	      int regno = bits (inst2, 12, 15);
949 	      pv_t addr;
950 
951 	      offset = inst2 & 0xfff;
952 	      addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
953 
954 	      if (pv_area_store_would_trash (stack, addr))
955 		break;
956 
957 	      pv_area_store (stack, addr, 4, regs[regno]);
958 	    }
959 
960 	  else if ((insn & 0xffd0) == 0xf880	/* str{bh}.w Rt,[Rn,#imm] */
961 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
962 	    /* Ignore stores of argument registers to the stack.  */
963 	    ;
964 
965 	  else if ((insn & 0xffd0) == 0xf800	/* str{bh} Rt,[Rn,#+/-imm] */
966 		   && (inst2 & 0x0d00) == 0x0c00
967 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
968 	    /* Ignore stores of argument registers to the stack.  */
969 	    ;
970 
971 	  else if ((insn & 0xffd0) == 0xe890	/* ldmia Rn[!],
972 						   { registers } */
973 		   && (inst2 & 0x8000) == 0x0000
974 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
975 	    /* Ignore block loads from the stack, potentially copying
976 	       parameters from memory.  */
977 	    ;
978 
979 	  else if ((insn & 0xffb0) == 0xe950	/* ldrd Rt, Rt2,
980 						   [Rn, #+/-imm] */
981 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
982 	    /* Similarly ignore dual loads from the stack.  */
983 	    ;
984 
985 	  else if ((insn & 0xfff0) == 0xf850	/* ldr Rt,[Rn,#+/-imm] */
986 		   && (inst2 & 0x0d00) == 0x0c00
987 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
988 	    /* Similarly ignore single loads from the stack.  */
989 	    ;
990 
991 	  else if ((insn & 0xfff0) == 0xf8d0	/* ldr.w Rt,[Rn,#imm] */
992 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
993 	    /* Similarly ignore single loads from the stack.  */
994 	    ;
995 
996 	  else if ((insn & 0xfbf0) == 0xf100	/* add.w Rd, Rn, #imm */
997 		   && (inst2 & 0x8000) == 0x0000)
998 	    {
999 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1000 				  | (bits (inst2, 12, 14) << 8)
1001 				  | bits (inst2, 0, 7));
1002 
1003 	      regs[bits (inst2, 8, 11)]
1004 		= pv_add_constant (regs[bits (insn, 0, 3)],
1005 				   thumb_expand_immediate (imm));
1006 	    }
1007 
1008 	  else if ((insn & 0xfbf0) == 0xf200	/* addw Rd, Rn, #imm */
1009 		   && (inst2 & 0x8000) == 0x0000)
1010 	    {
1011 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1012 				  | (bits (inst2, 12, 14) << 8)
1013 				  | bits (inst2, 0, 7));
1014 
1015 	      regs[bits (inst2, 8, 11)]
1016 		= pv_add_constant (regs[bits (insn, 0, 3)], imm);
1017 	    }
1018 
1019 	  else if ((insn & 0xfbf0) == 0xf1a0	/* sub.w Rd, Rn, #imm */
1020 		   && (inst2 & 0x8000) == 0x0000)
1021 	    {
1022 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1023 				  | (bits (inst2, 12, 14) << 8)
1024 				  | bits (inst2, 0, 7));
1025 
1026 	      regs[bits (inst2, 8, 11)]
1027 		= pv_add_constant (regs[bits (insn, 0, 3)],
1028 				   - (CORE_ADDR) thumb_expand_immediate (imm));
1029 	    }
1030 
1031 	  else if ((insn & 0xfbf0) == 0xf2a0	/* subw Rd, Rn, #imm */
1032 		   && (inst2 & 0x8000) == 0x0000)
1033 	    {
1034 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1035 				  | (bits (inst2, 12, 14) << 8)
1036 				  | bits (inst2, 0, 7));
1037 
1038 	      regs[bits (inst2, 8, 11)]
1039 		= pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1040 	    }
1041 
1042 	  else if ((insn & 0xfbff) == 0xf04f)	/* mov.w Rd, #const */
1043 	    {
1044 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1045 				  | (bits (inst2, 12, 14) << 8)
1046 				  | bits (inst2, 0, 7));
1047 
1048 	      regs[bits (inst2, 8, 11)]
1049 		= pv_constant (thumb_expand_immediate (imm));
1050 	    }
1051 
1052 	  else if ((insn & 0xfbf0) == 0xf240)	/* movw Rd, #const */
1053 	    {
1054 	      unsigned int imm
1055 		= EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1056 
1057 	      regs[bits (inst2, 8, 11)] = pv_constant (imm);
1058 	    }
1059 
1060 	  else if (insn == 0xea5f		/* mov.w Rd,Rm */
1061 		   && (inst2 & 0xf0f0) == 0)
1062 	    {
1063 	      int dst_reg = (inst2 & 0x0f00) >> 8;
1064 	      int src_reg = inst2 & 0xf;
1065 	      regs[dst_reg] = regs[src_reg];
1066 	    }
1067 
1068 	  else if ((insn & 0xff7f) == 0xf85f)	/* ldr.w Rt,<label> */
1069 	    {
1070 	      /* Constant pool loads.  */
1071 	      unsigned int constant;
1072 	      CORE_ADDR loc;
1073 
1074 	      offset = bits (insn, 0, 11);
1075 	      if (insn & 0x0080)
1076 		loc = start + 4 + offset;
1077 	      else
1078 		loc = start + 4 - offset;
1079 
1080 	      constant = read_memory_unsigned_integer (loc, 4, byte_order);
1081 	      regs[bits (inst2, 12, 15)] = pv_constant (constant);
1082 	    }
1083 
1084 	  else if ((insn & 0xff7f) == 0xe95f)	/* ldrd Rt,Rt2,<label> */
1085 	    {
1086 	      /* Constant pool loads.  */
1087 	      unsigned int constant;
1088 	      CORE_ADDR loc;
1089 
1090 	      offset = bits (insn, 0, 7) << 2;
1091 	      if (insn & 0x0080)
1092 		loc = start + 4 + offset;
1093 	      else
1094 		loc = start + 4 - offset;
1095 
1096 	      constant = read_memory_unsigned_integer (loc, 4, byte_order);
1097 	      regs[bits (inst2, 12, 15)] = pv_constant (constant);
1098 
1099 	      constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1100 	      regs[bits (inst2, 8, 11)] = pv_constant (constant);
1101 	    }
1102 
1103 	  else if (thumb2_instruction_changes_pc (insn, inst2))
1104 	    {
1105 	      /* Don't scan past anything that might change control flow.  */
1106 	      break;
1107 	    }
1108 	  else
1109 	    {
1110 	      /* The optimizer might shove anything into the prologue,
1111 		 so we just skip what we don't recognize.  */
1112 	      unrecognized_pc = start;
1113 	    }
1114 
1115 	  start += 2;
1116 	}
1117       else if (thumb_instruction_changes_pc (insn))
1118 	{
1119 	  /* Don't scan past anything that might change control flow.  */
1120 	  break;
1121 	}
1122       else
1123 	{
1124 	  /* The optimizer might shove anything into the prologue,
1125 	     so we just skip what we don't recognize.  */
1126 	  unrecognized_pc = start;
1127 	}
1128 
1129       start += 2;
1130     }
1131 
1132   if (arm_debug)
1133     fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1134 			paddress (gdbarch, start));
1135 
1136   if (unrecognized_pc == 0)
1137     unrecognized_pc = start;
1138 
1139   if (cache == NULL)
1140     {
1141       do_cleanups (back_to);
1142       return unrecognized_pc;
1143     }
1144 
1145   if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1146     {
1147       /* Frame pointer is fp.  Frame size is constant.  */
1148       cache->framereg = ARM_FP_REGNUM;
1149       cache->framesize = -regs[ARM_FP_REGNUM].k;
1150     }
1151   else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1152     {
1153       /* Frame pointer is r7.  Frame size is constant.  */
1154       cache->framereg = THUMB_FP_REGNUM;
1155       cache->framesize = -regs[THUMB_FP_REGNUM].k;
1156     }
1157   else
1158     {
1159       /* Try the stack pointer... this is a bit desperate.  */
1160       cache->framereg = ARM_SP_REGNUM;
1161       cache->framesize = -regs[ARM_SP_REGNUM].k;
1162     }
1163 
1164   for (i = 0; i < 16; i++)
1165     if (pv_area_find_reg (stack, gdbarch, i, &offset))
1166       cache->saved_regs[i].addr = offset;
1167 
1168   do_cleanups (back_to);
1169   return unrecognized_pc;
1170 }
1171 
1172 
1173 /* Try to analyze the instructions starting from PC, which load symbol
1174    __stack_chk_guard.  Return the address of instruction after loading this
1175    symbol, set the dest register number to *BASEREG, and set the size of
1176    instructions for loading symbol in OFFSET.  Return 0 if instructions are
1177    not recognized.  */
1178 
1179 static CORE_ADDR
arm_analyze_load_stack_chk_guard(CORE_ADDR pc,struct gdbarch * gdbarch,unsigned int * destreg,int * offset)1180 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1181 				 unsigned int *destreg, int *offset)
1182 {
1183   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1184   int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1185   unsigned int low, high, address;
1186 
1187   address = 0;
1188   if (is_thumb)
1189     {
1190       unsigned short insn1
1191 	= read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1192 
1193       if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1194 	{
1195 	  *destreg = bits (insn1, 8, 10);
1196 	  *offset = 2;
1197 	  address = bits (insn1, 0, 7);
1198 	}
1199       else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1200 	{
1201 	  unsigned short insn2
1202 	    = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1203 
1204 	  low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1205 
1206 	  insn1
1207 	    = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1208 	  insn2
1209 	    = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1210 
1211 	  /* movt Rd, #const */
1212 	  if ((insn1 & 0xfbc0) == 0xf2c0)
1213 	    {
1214 	      high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1215 	      *destreg = bits (insn2, 8, 11);
1216 	      *offset = 8;
1217 	      address = (high << 16 | low);
1218 	    }
1219 	}
1220     }
1221   else
1222     {
1223       unsigned int insn
1224 	= read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1225 
1226       if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1227 	{
1228 	  address = bits (insn, 0, 11);
1229 	  *destreg = bits (insn, 12, 15);
1230 	  *offset = 4;
1231 	}
1232       else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1233 	{
1234 	  low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1235 
1236 	  insn
1237 	    = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1238 
1239 	  if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1240 	    {
1241 	      high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1242 	      *destreg = bits (insn, 12, 15);
1243 	      *offset = 8;
1244 	      address = (high << 16 | low);
1245 	    }
1246 	}
1247     }
1248 
1249   return address;
1250 }
1251 
1252 /* Try to skip a sequence of instructions used for stack protector.  If PC
1253    points to the first instruction of this sequence, return the address of
1254    first instruction after this sequence, otherwise, return original PC.
1255 
1256    On arm, this sequence of instructions is composed of mainly three steps,
1257      Step 1: load symbol __stack_chk_guard,
1258      Step 2: load from address of __stack_chk_guard,
1259      Step 3: store it to somewhere else.
1260 
1261    Usually, instructions on step 2 and step 3 are the same on various ARM
1262    architectures.  On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1263    on step 3, it is also one instruction 'str Rx, [r7, #immd]'.  However,
1264    instructions in step 1 vary from different ARM architectures.  On ARMv7,
1265    they are,
1266 
1267 	movw	Rn, #:lower16:__stack_chk_guard
1268 	movt	Rn, #:upper16:__stack_chk_guard
1269 
1270    On ARMv5t, it is,
1271 
1272 	ldr	Rn, .Label
1273 	....
1274 	.Lable:
1275 	.word	__stack_chk_guard
1276 
1277    Since ldr/str is a very popular instruction, we can't use them as
1278    'fingerprint' or 'signature' of stack protector sequence.  Here we choose
1279    sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1280    stripped, as the 'fingerprint' of a stack protector cdoe sequence.  */
1281 
1282 static CORE_ADDR
arm_skip_stack_protector(CORE_ADDR pc,struct gdbarch * gdbarch)1283 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1284 {
1285   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1286   unsigned int basereg;
1287   struct minimal_symbol *stack_chk_guard;
1288   int offset;
1289   int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1290   CORE_ADDR addr;
1291 
1292   /* Try to parse the instructions in Step 1.  */
1293   addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1294 					   &basereg, &offset);
1295   if (!addr)
1296     return pc;
1297 
1298   stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1299   /* If name of symbol doesn't start with '__stack_chk_guard', this
1300      instruction sequence is not for stack protector.  If symbol is
1301      removed, we conservatively think this sequence is for stack protector.  */
1302   if (stack_chk_guard
1303       && strncmp (SYMBOL_LINKAGE_NAME (stack_chk_guard), "__stack_chk_guard",
1304 		  strlen ("__stack_chk_guard")) != 0)
1305    return pc;
1306 
1307   if (is_thumb)
1308     {
1309       unsigned int destreg;
1310       unsigned short insn
1311 	= read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1312 
1313       /* Step 2: ldr Rd, [Rn, #immed], encoding T1.  */
1314       if ((insn & 0xf800) != 0x6800)
1315 	return pc;
1316       if (bits (insn, 3, 5) != basereg)
1317 	return pc;
1318       destreg = bits (insn, 0, 2);
1319 
1320       insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1321 					   byte_order_for_code);
1322       /* Step 3: str Rd, [Rn, #immed], encoding T1.  */
1323       if ((insn & 0xf800) != 0x6000)
1324 	return pc;
1325       if (destreg != bits (insn, 0, 2))
1326 	return pc;
1327     }
1328   else
1329     {
1330       unsigned int destreg;
1331       unsigned int insn
1332 	= read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1333 
1334       /* Step 2: ldr Rd, [Rn, #immed], encoding A1.  */
1335       if ((insn & 0x0e500000) != 0x04100000)
1336 	return pc;
1337       if (bits (insn, 16, 19) != basereg)
1338 	return pc;
1339       destreg = bits (insn, 12, 15);
1340       /* Step 3: str Rd, [Rn, #immed], encoding A1.  */
1341       insn = read_memory_unsigned_integer (pc + offset + 4,
1342 					   4, byte_order_for_code);
1343       if ((insn & 0x0e500000) != 0x04000000)
1344 	return pc;
1345       if (bits (insn, 12, 15) != destreg)
1346 	return pc;
1347     }
1348   /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1349      on arm.  */
1350   if (is_thumb)
1351     return pc + offset + 4;
1352   else
1353     return pc + offset + 8;
1354 }
1355 
1356 /* Advance the PC across any function entry prologue instructions to
1357    reach some "real" code.
1358 
1359    The APCS (ARM Procedure Call Standard) defines the following
1360    prologue:
1361 
1362    mov          ip, sp
1363    [stmfd       sp!, {a1,a2,a3,a4}]
1364    stmfd        sp!, {...,fp,ip,lr,pc}
1365    [stfe        f7, [sp, #-12]!]
1366    [stfe        f6, [sp, #-12]!]
1367    [stfe        f5, [sp, #-12]!]
1368    [stfe        f4, [sp, #-12]!]
1369    sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn.  */
1370 
1371 static CORE_ADDR
arm_skip_prologue(struct gdbarch * gdbarch,CORE_ADDR pc)1372 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1373 {
1374   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1375   unsigned long inst;
1376   CORE_ADDR skip_pc;
1377   CORE_ADDR func_addr, limit_pc;
1378 
1379   /* See if we can determine the end of the prologue via the symbol table.
1380      If so, then return either PC, or the PC after the prologue, whichever
1381      is greater.  */
1382   if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1383     {
1384       CORE_ADDR post_prologue_pc
1385 	= skip_prologue_using_sal (gdbarch, func_addr);
1386       struct symtab *s = find_pc_symtab (func_addr);
1387 
1388       if (post_prologue_pc)
1389 	post_prologue_pc
1390 	  = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1391 
1392 
1393       /* GCC always emits a line note before the prologue and another
1394 	 one after, even if the two are at the same address or on the
1395 	 same line.  Take advantage of this so that we do not need to
1396 	 know every instruction that might appear in the prologue.  We
1397 	 will have producer information for most binaries; if it is
1398 	 missing (e.g. for -gstabs), assuming the GNU tools.  */
1399       if (post_prologue_pc
1400 	  && (s == NULL
1401 	      || s->producer == NULL
1402 	      || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0
1403 	      || strncmp (s->producer, "clang ", sizeof ("clang ") - 1) == 0))
1404 	return post_prologue_pc;
1405 
1406       if (post_prologue_pc != 0)
1407 	{
1408 	  CORE_ADDR analyzed_limit;
1409 
1410 	  /* For non-GCC compilers, make sure the entire line is an
1411 	     acceptable prologue; GDB will round this function's
1412 	     return value up to the end of the following line so we
1413 	     can not skip just part of a line (and we do not want to).
1414 
1415 	     RealView does not treat the prologue specially, but does
1416 	     associate prologue code with the opening brace; so this
1417 	     lets us skip the first line if we think it is the opening
1418 	     brace.  */
1419 	  if (arm_pc_is_thumb (gdbarch, func_addr))
1420 	    analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1421 						     post_prologue_pc, NULL);
1422 	  else
1423 	    analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1424 						   post_prologue_pc, NULL);
1425 
1426 	  if (analyzed_limit != post_prologue_pc)
1427 	    return func_addr;
1428 
1429 	  return post_prologue_pc;
1430 	}
1431     }
1432 
1433   /* Can't determine prologue from the symbol table, need to examine
1434      instructions.  */
1435 
1436   /* Find an upper limit on the function prologue using the debug
1437      information.  If the debug information could not be used to provide
1438      that bound, then use an arbitrary large number as the upper bound.  */
1439   /* Like arm_scan_prologue, stop no later than pc + 64.  */
1440   limit_pc = skip_prologue_using_sal (gdbarch, pc);
1441   if (limit_pc == 0)
1442     limit_pc = pc + 64;          /* Magic.  */
1443 
1444 
1445   /* Check if this is Thumb code.  */
1446   if (arm_pc_is_thumb (gdbarch, pc))
1447     return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1448 
1449   for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1450     {
1451       inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1452 
1453       /* "mov ip, sp" is no longer a required part of the prologue.  */
1454       if (inst == 0xe1a0c00d)			/* mov ip, sp */
1455 	continue;
1456 
1457       if ((inst & 0xfffff000) == 0xe28dc000)    /* add ip, sp #n */
1458 	continue;
1459 
1460       if ((inst & 0xfffff000) == 0xe24dc000)    /* sub ip, sp #n */
1461 	continue;
1462 
1463       /* Some prologues begin with "str lr, [sp, #-4]!".  */
1464       if (inst == 0xe52de004)			/* str lr, [sp, #-4]! */
1465 	continue;
1466 
1467       if ((inst & 0xfffffff0) == 0xe92d0000)	/* stmfd sp!,{a1,a2,a3,a4} */
1468 	continue;
1469 
1470       if ((inst & 0xfffff800) == 0xe92dd800)	/* stmfd sp!,{fp,ip,lr,pc} */
1471 	continue;
1472 
1473       /* Any insns after this point may float into the code, if it makes
1474 	 for better instruction scheduling, so we skip them only if we
1475 	 find them, but still consider the function to be frame-ful.  */
1476 
1477       /* We may have either one sfmfd instruction here, or several stfe
1478 	 insns, depending on the version of floating point code we
1479 	 support.  */
1480       if ((inst & 0xffbf0fff) == 0xec2d0200)	/* sfmfd fn, <cnt>, [sp]! */
1481 	continue;
1482 
1483       if ((inst & 0xffff8fff) == 0xed6d0103)	/* stfe fn, [sp, #-12]! */
1484 	continue;
1485 
1486       if ((inst & 0xfffff000) == 0xe24cb000)	/* sub fp, ip, #nn */
1487 	continue;
1488 
1489       if ((inst & 0xfffff000) == 0xe24dd000)	/* sub sp, sp, #nn */
1490 	continue;
1491 
1492       if ((inst & 0xffffc000) == 0xe54b0000	/* strb r(0123),[r11,#-nn] */
1493 	  || (inst & 0xffffc0f0) == 0xe14b00b0	/* strh r(0123),[r11,#-nn] */
1494 	  || (inst & 0xffffc000) == 0xe50b0000)	/* str  r(0123),[r11,#-nn] */
1495 	continue;
1496 
1497       if ((inst & 0xffffc000) == 0xe5cd0000	/* strb r(0123),[sp,#nn] */
1498 	  || (inst & 0xffffc0f0) == 0xe1cd00b0	/* strh r(0123),[sp,#nn] */
1499 	  || (inst & 0xffffc000) == 0xe58d0000)	/* str  r(0123),[sp,#nn] */
1500 	continue;
1501 
1502       /* Un-recognized instruction; stop scanning.  */
1503       break;
1504     }
1505 
1506   return skip_pc;		/* End of prologue.  */
1507 }
1508 
1509 /* *INDENT-OFF* */
1510 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1511    This function decodes a Thumb function prologue to determine:
1512      1) the size of the stack frame
1513      2) which registers are saved on it
1514      3) the offsets of saved regs
1515      4) the offset from the stack pointer to the frame pointer
1516 
1517    A typical Thumb function prologue would create this stack frame
1518    (offsets relative to FP)
1519      old SP ->	24  stack parameters
1520 		20  LR
1521 		16  R7
1522      R7 ->       0  local variables (16 bytes)
1523      SP ->     -12  additional stack space (12 bytes)
1524    The frame size would thus be 36 bytes, and the frame offset would be
1525    12 bytes.  The frame register is R7.
1526 
1527    The comments for thumb_skip_prolog() describe the algorithm we use
1528    to detect the end of the prolog.  */
1529 /* *INDENT-ON* */
1530 
1531 static void
thumb_scan_prologue(struct gdbarch * gdbarch,CORE_ADDR prev_pc,CORE_ADDR block_addr,struct arm_prologue_cache * cache)1532 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1533 		     CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1534 {
1535   CORE_ADDR prologue_start;
1536   CORE_ADDR prologue_end;
1537 
1538   if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1539 				&prologue_end))
1540     {
1541       /* See comment in arm_scan_prologue for an explanation of
1542 	 this heuristics.  */
1543       if (prologue_end > prologue_start + 64)
1544 	{
1545 	  prologue_end = prologue_start + 64;
1546 	}
1547     }
1548   else
1549     /* We're in the boondocks: we have no idea where the start of the
1550        function is.  */
1551     return;
1552 
1553   prologue_end = min (prologue_end, prev_pc);
1554 
1555   thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1556 }
1557 
1558 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise.  */
1559 
1560 static int
arm_instruction_changes_pc(uint32_t this_instr)1561 arm_instruction_changes_pc (uint32_t this_instr)
1562 {
1563   if (bits (this_instr, 28, 31) == INST_NV)
1564     /* Unconditional instructions.  */
1565     switch (bits (this_instr, 24, 27))
1566       {
1567       case 0xa:
1568       case 0xb:
1569 	/* Branch with Link and change to Thumb.  */
1570 	return 1;
1571       case 0xc:
1572       case 0xd:
1573       case 0xe:
1574 	/* Coprocessor register transfer.  */
1575         if (bits (this_instr, 12, 15) == 15)
1576 	  error (_("Invalid update to pc in instruction"));
1577 	return 0;
1578       default:
1579 	return 0;
1580       }
1581   else
1582     switch (bits (this_instr, 25, 27))
1583       {
1584       case 0x0:
1585 	if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1586 	  {
1587 	    /* Multiplies and extra load/stores.  */
1588 	    if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1589 	      /* Neither multiplies nor extension load/stores are allowed
1590 		 to modify PC.  */
1591 	      return 0;
1592 
1593 	    /* Otherwise, miscellaneous instructions.  */
1594 
1595 	    /* BX <reg>, BXJ <reg>, BLX <reg> */
1596 	    if (bits (this_instr, 4, 27) == 0x12fff1
1597 		|| bits (this_instr, 4, 27) == 0x12fff2
1598 		|| bits (this_instr, 4, 27) == 0x12fff3)
1599 	      return 1;
1600 
1601 	    /* Other miscellaneous instructions are unpredictable if they
1602 	       modify PC.  */
1603 	    return 0;
1604 	  }
1605 	/* Data processing instruction.  Fall through.  */
1606 
1607       case 0x1:
1608 	if (bits (this_instr, 12, 15) == 15)
1609 	  return 1;
1610 	else
1611 	  return 0;
1612 
1613       case 0x2:
1614       case 0x3:
1615 	/* Media instructions and architecturally undefined instructions.  */
1616 	if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1617 	  return 0;
1618 
1619 	/* Stores.  */
1620 	if (bit (this_instr, 20) == 0)
1621 	  return 0;
1622 
1623 	/* Loads.  */
1624 	if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1625 	  return 1;
1626 	else
1627 	  return 0;
1628 
1629       case 0x4:
1630 	/* Load/store multiple.  */
1631 	if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1632 	  return 1;
1633 	else
1634 	  return 0;
1635 
1636       case 0x5:
1637 	/* Branch and branch with link.  */
1638 	return 1;
1639 
1640       case 0x6:
1641       case 0x7:
1642 	/* Coprocessor transfers or SWIs can not affect PC.  */
1643 	return 0;
1644 
1645       default:
1646 	internal_error (__FILE__, __LINE__, _("bad value in switch"));
1647       }
1648 }
1649 
1650 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1651    continuing no further than PROLOGUE_END.  If CACHE is non-NULL,
1652    fill it in.  Return the first address not recognized as a prologue
1653    instruction.
1654 
1655    We recognize all the instructions typically found in ARM prologues,
1656    plus harmless instructions which can be skipped (either for analysis
1657    purposes, or a more restrictive set that can be skipped when finding
1658    the end of the prologue).  */
1659 
1660 static CORE_ADDR
arm_analyze_prologue(struct gdbarch * gdbarch,CORE_ADDR prologue_start,CORE_ADDR prologue_end,struct arm_prologue_cache * cache)1661 arm_analyze_prologue (struct gdbarch *gdbarch,
1662 		      CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1663 		      struct arm_prologue_cache *cache)
1664 {
1665   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1666   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1667   int regno;
1668   CORE_ADDR offset, current_pc;
1669   pv_t regs[ARM_FPS_REGNUM];
1670   struct pv_area *stack;
1671   struct cleanup *back_to;
1672   int framereg, framesize;
1673   CORE_ADDR unrecognized_pc = 0;
1674 
1675   /* Search the prologue looking for instructions that set up the
1676      frame pointer, adjust the stack pointer, and save registers.
1677 
1678      Be careful, however, and if it doesn't look like a prologue,
1679      don't try to scan it.  If, for instance, a frameless function
1680      begins with stmfd sp!, then we will tell ourselves there is
1681      a frame, which will confuse stack traceback, as well as "finish"
1682      and other operations that rely on a knowledge of the stack
1683      traceback.  */
1684 
1685   for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1686     regs[regno] = pv_register (regno, 0);
1687   stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1688   back_to = make_cleanup_free_pv_area (stack);
1689 
1690   for (current_pc = prologue_start;
1691        current_pc < prologue_end;
1692        current_pc += 4)
1693     {
1694       unsigned int insn
1695 	= read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1696 
1697       if (insn == 0xe1a0c00d)		/* mov ip, sp */
1698 	{
1699 	  regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1700 	  continue;
1701 	}
1702       else if ((insn & 0xfff00000) == 0xe2800000	/* add Rd, Rn, #n */
1703 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1704 	{
1705 	  unsigned imm = insn & 0xff;                   /* immediate value */
1706 	  unsigned rot = (insn & 0xf00) >> 7;           /* rotate amount */
1707 	  int rd = bits (insn, 12, 15);
1708 	  imm = (imm >> rot) | (imm << (32 - rot));
1709 	  regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1710 	  continue;
1711 	}
1712       else if ((insn & 0xfff00000) == 0xe2400000	/* sub Rd, Rn, #n */
1713 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1714 	{
1715 	  unsigned imm = insn & 0xff;                   /* immediate value */
1716 	  unsigned rot = (insn & 0xf00) >> 7;           /* rotate amount */
1717 	  int rd = bits (insn, 12, 15);
1718 	  imm = (imm >> rot) | (imm << (32 - rot));
1719 	  regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1720 	  continue;
1721 	}
1722       else if ((insn & 0xffff0fff) == 0xe52d0004)	/* str Rd,
1723 							   [sp, #-4]! */
1724 	{
1725 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1726 	    break;
1727 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1728 	  pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1729 			 regs[bits (insn, 12, 15)]);
1730 	  continue;
1731 	}
1732       else if ((insn & 0xffff0000) == 0xe92d0000)
1733 	/* stmfd sp!, {..., fp, ip, lr, pc}
1734 	   or
1735 	   stmfd sp!, {a1, a2, a3, a4}  */
1736 	{
1737 	  int mask = insn & 0xffff;
1738 
1739 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1740 	    break;
1741 
1742 	  /* Calculate offsets of saved registers.  */
1743 	  for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1744 	    if (mask & (1 << regno))
1745 	      {
1746 		regs[ARM_SP_REGNUM]
1747 		  = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1748 		pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1749 	      }
1750 	}
1751       else if ((insn & 0xffff0000) == 0xe54b0000	/* strb rx,[r11,#-n] */
1752 	       || (insn & 0xffff00f0) == 0xe14b00b0	/* strh rx,[r11,#-n] */
1753 	       || (insn & 0xffffc000) == 0xe50b0000)	/* str  rx,[r11,#-n] */
1754 	{
1755 	  /* No need to add this to saved_regs -- it's just an arg reg.  */
1756 	  continue;
1757 	}
1758       else if ((insn & 0xffff0000) == 0xe5cd0000	/* strb rx,[sp,#n] */
1759 	       || (insn & 0xffff00f0) == 0xe1cd00b0	/* strh rx,[sp,#n] */
1760 	       || (insn & 0xffffc000) == 0xe58d0000)	/* str  rx,[sp,#n] */
1761 	{
1762 	  /* No need to add this to saved_regs -- it's just an arg reg.  */
1763 	  continue;
1764 	}
1765       else if ((insn & 0xfff00000) == 0xe8800000	/* stm Rn,
1766 							   { registers } */
1767 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1768 	{
1769 	  /* No need to add this to saved_regs -- it's just arg regs.  */
1770 	  continue;
1771 	}
1772       else if ((insn & 0xfffff000) == 0xe24cb000)	/* sub fp, ip #n */
1773 	{
1774 	  unsigned imm = insn & 0xff;			/* immediate value */
1775 	  unsigned rot = (insn & 0xf00) >> 7;		/* rotate amount */
1776 	  imm = (imm >> rot) | (imm << (32 - rot));
1777 	  regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1778 	}
1779       else if ((insn & 0xfffff000) == 0xe24dd000)	/* sub sp, sp #n */
1780 	{
1781 	  unsigned imm = insn & 0xff;			/* immediate value */
1782 	  unsigned rot = (insn & 0xf00) >> 7;		/* rotate amount */
1783 	  imm = (imm >> rot) | (imm << (32 - rot));
1784 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1785 	}
1786       else if ((insn & 0xffff7fff) == 0xed6d0103	/* stfe f?,
1787 							   [sp, -#c]! */
1788 	       && gdbarch_tdep (gdbarch)->have_fpa_registers)
1789 	{
1790 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1791 	    break;
1792 
1793 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1794 	  regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1795 	  pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1796 	}
1797       else if ((insn & 0xffbf0fff) == 0xec2d0200	/* sfmfd f0, 4,
1798 							   [sp!] */
1799 	       && gdbarch_tdep (gdbarch)->have_fpa_registers)
1800 	{
1801 	  int n_saved_fp_regs;
1802 	  unsigned int fp_start_reg, fp_bound_reg;
1803 
1804 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1805 	    break;
1806 
1807 	  if ((insn & 0x800) == 0x800)		/* N0 is set */
1808 	    {
1809 	      if ((insn & 0x40000) == 0x40000)	/* N1 is set */
1810 		n_saved_fp_regs = 3;
1811 	      else
1812 		n_saved_fp_regs = 1;
1813 	    }
1814 	  else
1815 	    {
1816 	      if ((insn & 0x40000) == 0x40000)	/* N1 is set */
1817 		n_saved_fp_regs = 2;
1818 	      else
1819 		n_saved_fp_regs = 4;
1820 	    }
1821 
1822 	  fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1823 	  fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1824 	  for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1825 	    {
1826 	      regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1827 	      pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1828 			     regs[fp_start_reg++]);
1829 	    }
1830 	}
1831       else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1832 	{
1833 	  /* Allow some special function calls when skipping the
1834 	     prologue; GCC generates these before storing arguments to
1835 	     the stack.  */
1836 	  CORE_ADDR dest = BranchDest (current_pc, insn);
1837 
1838 	  if (skip_prologue_function (gdbarch, dest, 0))
1839 	    continue;
1840 	  else
1841 	    break;
1842 	}
1843       else if ((insn & 0xf0000000) != 0xe0000000)
1844 	break;			/* Condition not true, exit early.  */
1845       else if (arm_instruction_changes_pc (insn))
1846 	/* Don't scan past anything that might change control flow.  */
1847 	break;
1848       else if ((insn & 0xfe500000) == 0xe8100000	/* ldm */
1849 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1850 	/* Ignore block loads from the stack, potentially copying
1851 	   parameters from memory.  */
1852 	continue;
1853       else if ((insn & 0xfc500000) == 0xe4100000
1854 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1855 	/* Similarly ignore single loads from the stack.  */
1856 	continue;
1857       else if ((insn & 0xffff0ff0) == 0xe1a00000)
1858 	/* MOV Rd, Rm.  Skip register copies, i.e. saves to another
1859 	   register instead of the stack.  */
1860 	continue;
1861       else
1862 	{
1863 	  /* The optimizer might shove anything into the prologue,
1864 	     so we just skip what we don't recognize.  */
1865 	  unrecognized_pc = current_pc;
1866 	  continue;
1867 	}
1868     }
1869 
1870   if (unrecognized_pc == 0)
1871     unrecognized_pc = current_pc;
1872 
1873   /* The frame size is just the distance from the frame register
1874      to the original stack pointer.  */
1875   if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1876     {
1877       /* Frame pointer is fp.  */
1878       framereg = ARM_FP_REGNUM;
1879       framesize = -regs[ARM_FP_REGNUM].k;
1880     }
1881   else
1882     {
1883       /* Try the stack pointer... this is a bit desperate.  */
1884       framereg = ARM_SP_REGNUM;
1885       framesize = -regs[ARM_SP_REGNUM].k;
1886     }
1887 
1888   if (cache)
1889     {
1890       cache->framereg = framereg;
1891       cache->framesize = framesize;
1892 
1893       for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1894 	if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1895 	  cache->saved_regs[regno].addr = offset;
1896     }
1897 
1898   if (arm_debug)
1899     fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1900 			paddress (gdbarch, unrecognized_pc));
1901 
1902   do_cleanups (back_to);
1903   return unrecognized_pc;
1904 }
1905 
1906 static void
arm_scan_prologue(struct frame_info * this_frame,struct arm_prologue_cache * cache)1907 arm_scan_prologue (struct frame_info *this_frame,
1908 		   struct arm_prologue_cache *cache)
1909 {
1910   struct gdbarch *gdbarch = get_frame_arch (this_frame);
1911   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1912   int regno;
1913   CORE_ADDR prologue_start, prologue_end, current_pc;
1914   CORE_ADDR prev_pc = get_frame_pc (this_frame);
1915   CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1916   pv_t regs[ARM_FPS_REGNUM];
1917   struct pv_area *stack;
1918   struct cleanup *back_to;
1919   CORE_ADDR offset;
1920 
1921   /* Assume there is no frame until proven otherwise.  */
1922   cache->framereg = ARM_SP_REGNUM;
1923   cache->framesize = 0;
1924 
1925   /* Check for Thumb prologue.  */
1926   if (arm_frame_is_thumb (this_frame))
1927     {
1928       thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1929       return;
1930     }
1931 
1932   /* Find the function prologue.  If we can't find the function in
1933      the symbol table, peek in the stack frame to find the PC.  */
1934   if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1935 				&prologue_end))
1936     {
1937       /* One way to find the end of the prologue (which works well
1938          for unoptimized code) is to do the following:
1939 
1940 	    struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1941 
1942 	    if (sal.line == 0)
1943 	      prologue_end = prev_pc;
1944 	    else if (sal.end < prologue_end)
1945 	      prologue_end = sal.end;
1946 
1947 	 This mechanism is very accurate so long as the optimizer
1948 	 doesn't move any instructions from the function body into the
1949 	 prologue.  If this happens, sal.end will be the last
1950 	 instruction in the first hunk of prologue code just before
1951 	 the first instruction that the scheduler has moved from
1952 	 the body to the prologue.
1953 
1954 	 In order to make sure that we scan all of the prologue
1955 	 instructions, we use a slightly less accurate mechanism which
1956 	 may scan more than necessary.  To help compensate for this
1957 	 lack of accuracy, the prologue scanning loop below contains
1958 	 several clauses which'll cause the loop to terminate early if
1959 	 an implausible prologue instruction is encountered.
1960 
1961 	 The expression
1962 
1963 	      prologue_start + 64
1964 
1965 	 is a suitable endpoint since it accounts for the largest
1966 	 possible prologue plus up to five instructions inserted by
1967 	 the scheduler.  */
1968 
1969       if (prologue_end > prologue_start + 64)
1970 	{
1971 	  prologue_end = prologue_start + 64;	/* See above.  */
1972 	}
1973     }
1974   else
1975     {
1976       /* We have no symbol information.  Our only option is to assume this
1977 	 function has a standard stack frame and the normal frame register.
1978 	 Then, we can find the value of our frame pointer on entrance to
1979 	 the callee (or at the present moment if this is the innermost frame).
1980 	 The value stored there should be the address of the stmfd + 8.  */
1981       CORE_ADDR frame_loc;
1982       LONGEST return_value;
1983 
1984       frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1985       if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1986         return;
1987       else
1988         {
1989           prologue_start = gdbarch_addr_bits_remove
1990 			     (gdbarch, return_value) - 8;
1991           prologue_end = prologue_start + 64;	/* See above.  */
1992         }
1993     }
1994 
1995   if (prev_pc < prologue_end)
1996     prologue_end = prev_pc;
1997 
1998   arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1999 }
2000 
2001 static struct arm_prologue_cache *
arm_make_prologue_cache(struct frame_info * this_frame)2002 arm_make_prologue_cache (struct frame_info *this_frame)
2003 {
2004   int reg;
2005   struct arm_prologue_cache *cache;
2006   CORE_ADDR unwound_fp;
2007 
2008   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2009   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2010 
2011   arm_scan_prologue (this_frame, cache);
2012 
2013   unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2014   if (unwound_fp == 0)
2015     return cache;
2016 
2017   cache->prev_sp = unwound_fp + cache->framesize;
2018 
2019   /* Calculate actual addresses of saved registers using offsets
2020      determined by arm_scan_prologue.  */
2021   for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2022     if (trad_frame_addr_p (cache->saved_regs, reg))
2023       cache->saved_regs[reg].addr += cache->prev_sp;
2024 
2025   return cache;
2026 }
2027 
2028 /* Our frame ID for a normal frame is the current function's starting PC
2029    and the caller's SP when we were called.  */
2030 
2031 static void
arm_prologue_this_id(struct frame_info * this_frame,void ** this_cache,struct frame_id * this_id)2032 arm_prologue_this_id (struct frame_info *this_frame,
2033 		      void **this_cache,
2034 		      struct frame_id *this_id)
2035 {
2036   struct arm_prologue_cache *cache;
2037   struct frame_id id;
2038   CORE_ADDR pc, func;
2039 
2040   if (*this_cache == NULL)
2041     *this_cache = arm_make_prologue_cache (this_frame);
2042   cache = *this_cache;
2043 
2044   /* This is meant to halt the backtrace at "_start".  */
2045   pc = get_frame_pc (this_frame);
2046   if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2047     return;
2048 
2049   /* If we've hit a wall, stop.  */
2050   if (cache->prev_sp == 0)
2051     return;
2052 
2053   /* Use function start address as part of the frame ID.  If we cannot
2054      identify the start address (due to missing symbol information),
2055      fall back to just using the current PC.  */
2056   func = get_frame_func (this_frame);
2057   if (!func)
2058     func = pc;
2059 
2060   id = frame_id_build (cache->prev_sp, func);
2061   *this_id = id;
2062 }
2063 
2064 static struct value *
arm_prologue_prev_register(struct frame_info * this_frame,void ** this_cache,int prev_regnum)2065 arm_prologue_prev_register (struct frame_info *this_frame,
2066 			    void **this_cache,
2067 			    int prev_regnum)
2068 {
2069   struct gdbarch *gdbarch = get_frame_arch (this_frame);
2070   struct arm_prologue_cache *cache;
2071 
2072   if (*this_cache == NULL)
2073     *this_cache = arm_make_prologue_cache (this_frame);
2074   cache = *this_cache;
2075 
2076   /* If we are asked to unwind the PC, then we need to return the LR
2077      instead.  The prologue may save PC, but it will point into this
2078      frame's prologue, not the next frame's resume location.  Also
2079      strip the saved T bit.  A valid LR may have the low bit set, but
2080      a valid PC never does.  */
2081   if (prev_regnum == ARM_PC_REGNUM)
2082     {
2083       CORE_ADDR lr;
2084 
2085       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2086       return frame_unwind_got_constant (this_frame, prev_regnum,
2087 					arm_addr_bits_remove (gdbarch, lr));
2088     }
2089 
2090   /* SP is generally not saved to the stack, but this frame is
2091      identified by the next frame's stack pointer at the time of the call.
2092      The value was already reconstructed into PREV_SP.  */
2093   if (prev_regnum == ARM_SP_REGNUM)
2094     return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2095 
2096   /* The CPSR may have been changed by the call instruction and by the
2097      called function.  The only bit we can reconstruct is the T bit,
2098      by checking the low bit of LR as of the call.  This is a reliable
2099      indicator of Thumb-ness except for some ARM v4T pre-interworking
2100      Thumb code, which could get away with a clear low bit as long as
2101      the called function did not use bx.  Guess that all other
2102      bits are unchanged; the condition flags are presumably lost,
2103      but the processor status is likely valid.  */
2104   if (prev_regnum == ARM_PS_REGNUM)
2105     {
2106       CORE_ADDR lr, cpsr;
2107       ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2108 
2109       cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2110       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2111       if (IS_THUMB_ADDR (lr))
2112 	cpsr |= t_bit;
2113       else
2114 	cpsr &= ~t_bit;
2115       return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2116     }
2117 
2118   return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2119 				       prev_regnum);
2120 }
2121 
2122 struct frame_unwind arm_prologue_unwind = {
2123   NORMAL_FRAME,
2124   default_frame_unwind_stop_reason,
2125   arm_prologue_this_id,
2126   arm_prologue_prev_register,
2127   NULL,
2128   default_frame_sniffer
2129 };
2130 
2131 /* Maintain a list of ARM exception table entries per objfile, similar to the
2132    list of mapping symbols.  We only cache entries for standard ARM-defined
2133    personality routines; the cache will contain only the frame unwinding
2134    instructions associated with the entry (not the descriptors).  */
2135 
2136 static const struct objfile_data *arm_exidx_data_key;
2137 
2138 struct arm_exidx_entry
2139 {
2140   bfd_vma addr;
2141   gdb_byte *entry;
2142 };
2143 typedef struct arm_exidx_entry arm_exidx_entry_s;
2144 DEF_VEC_O(arm_exidx_entry_s);
2145 
2146 struct arm_exidx_data
2147 {
2148   VEC(arm_exidx_entry_s) **section_maps;
2149 };
2150 
2151 static void
arm_exidx_data_free(struct objfile * objfile,void * arg)2152 arm_exidx_data_free (struct objfile *objfile, void *arg)
2153 {
2154   struct arm_exidx_data *data = arg;
2155   unsigned int i;
2156 
2157   for (i = 0; i < objfile->obfd->section_count; i++)
2158     VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2159 }
2160 
2161 static inline int
arm_compare_exidx_entries(const struct arm_exidx_entry * lhs,const struct arm_exidx_entry * rhs)2162 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2163 			   const struct arm_exidx_entry *rhs)
2164 {
2165   return lhs->addr < rhs->addr;
2166 }
2167 
2168 static struct obj_section *
arm_obj_section_from_vma(struct objfile * objfile,bfd_vma vma)2169 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2170 {
2171   struct obj_section *osect;
2172 
2173   ALL_OBJFILE_OSECTIONS (objfile, osect)
2174     if (bfd_get_section_flags (objfile->obfd,
2175 			       osect->the_bfd_section) & SEC_ALLOC)
2176       {
2177 	bfd_vma start, size;
2178 	start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2179 	size = bfd_get_section_size (osect->the_bfd_section);
2180 
2181 	if (start <= vma && vma < start + size)
2182 	  return osect;
2183       }
2184 
2185   return NULL;
2186 }
2187 
2188 /* Parse contents of exception table and exception index sections
2189    of OBJFILE, and fill in the exception table entry cache.
2190 
2191    For each entry that refers to a standard ARM-defined personality
2192    routine, extract the frame unwinding instructions (from either
2193    the index or the table section).  The unwinding instructions
2194    are normalized by:
2195     - extracting them from the rest of the table data
2196     - converting to host endianness
2197     - appending the implicit 0xb0 ("Finish") code
2198 
2199    The extracted and normalized instructions are stored for later
2200    retrieval by the arm_find_exidx_entry routine.  */
2201 
2202 static void
arm_exidx_new_objfile(struct objfile * objfile)2203 arm_exidx_new_objfile (struct objfile *objfile)
2204 {
2205   struct cleanup *cleanups;
2206   struct arm_exidx_data *data;
2207   asection *exidx, *extab;
2208   bfd_vma exidx_vma = 0, extab_vma = 0;
2209   bfd_size_type exidx_size = 0, extab_size = 0;
2210   gdb_byte *exidx_data = NULL, *extab_data = NULL;
2211   LONGEST i;
2212 
2213   /* If we've already touched this file, do nothing.  */
2214   if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2215     return;
2216   cleanups = make_cleanup (null_cleanup, NULL);
2217 
2218   /* Read contents of exception table and index.  */
2219   exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2220   if (exidx)
2221     {
2222       exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2223       exidx_size = bfd_get_section_size (exidx);
2224       exidx_data = xmalloc (exidx_size);
2225       make_cleanup (xfree, exidx_data);
2226 
2227       if (!bfd_get_section_contents (objfile->obfd, exidx,
2228 				     exidx_data, 0, exidx_size))
2229 	{
2230 	  do_cleanups (cleanups);
2231 	  return;
2232 	}
2233     }
2234 
2235   extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2236   if (extab)
2237     {
2238       extab_vma = bfd_section_vma (objfile->obfd, extab);
2239       extab_size = bfd_get_section_size (extab);
2240       extab_data = xmalloc (extab_size);
2241       make_cleanup (xfree, extab_data);
2242 
2243       if (!bfd_get_section_contents (objfile->obfd, extab,
2244 				     extab_data, 0, extab_size))
2245 	{
2246 	  do_cleanups (cleanups);
2247 	  return;
2248 	}
2249     }
2250 
2251   /* Allocate exception table data structure.  */
2252   data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2253   set_objfile_data (objfile, arm_exidx_data_key, data);
2254   data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2255 				       objfile->obfd->section_count,
2256 				       VEC(arm_exidx_entry_s) *);
2257 
2258   /* Fill in exception table.  */
2259   for (i = 0; i < exidx_size / 8; i++)
2260     {
2261       struct arm_exidx_entry new_exidx_entry;
2262       bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2263       bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2264       bfd_vma addr = 0, word = 0;
2265       int n_bytes = 0, n_words = 0;
2266       struct obj_section *sec;
2267       gdb_byte *entry = NULL;
2268 
2269       /* Extract address of start of function.  */
2270       idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2271       idx += exidx_vma + i * 8;
2272 
2273       /* Find section containing function and compute section offset.  */
2274       sec = arm_obj_section_from_vma (objfile, idx);
2275       if (sec == NULL)
2276 	continue;
2277       idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2278 
2279       /* Determine address of exception table entry.  */
2280       if (val == 1)
2281 	{
2282 	  /* EXIDX_CANTUNWIND -- no exception table entry present.  */
2283 	}
2284       else if ((val & 0xff000000) == 0x80000000)
2285 	{
2286 	  /* Exception table entry embedded in .ARM.exidx
2287 	     -- must be short form.  */
2288 	  word = val;
2289 	  n_bytes = 3;
2290 	}
2291       else if (!(val & 0x80000000))
2292 	{
2293 	  /* Exception table entry in .ARM.extab.  */
2294 	  addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2295 	  addr += exidx_vma + i * 8 + 4;
2296 
2297 	  if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2298 	    {
2299 	      word = bfd_h_get_32 (objfile->obfd,
2300 				   extab_data + addr - extab_vma);
2301 	      addr += 4;
2302 
2303 	      if ((word & 0xff000000) == 0x80000000)
2304 		{
2305 		  /* Short form.  */
2306 		  n_bytes = 3;
2307 		}
2308 	      else if ((word & 0xff000000) == 0x81000000
2309 		       || (word & 0xff000000) == 0x82000000)
2310 		{
2311 		  /* Long form.  */
2312 		  n_bytes = 2;
2313 		  n_words = ((word >> 16) & 0xff);
2314 		}
2315 	      else if (!(word & 0x80000000))
2316 		{
2317 		  bfd_vma pers;
2318 		  struct obj_section *pers_sec;
2319 		  int gnu_personality = 0;
2320 
2321 		  /* Custom personality routine.  */
2322 		  pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2323 		  pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2324 
2325 		  /* Check whether we've got one of the variants of the
2326 		     GNU personality routines.  */
2327 		  pers_sec = arm_obj_section_from_vma (objfile, pers);
2328 		  if (pers_sec)
2329 		    {
2330 		      static const char *personality[] =
2331 			{
2332 			  "__gcc_personality_v0",
2333 			  "__gxx_personality_v0",
2334 			  "__gcj_personality_v0",
2335 			  "__gnu_objc_personality_v0",
2336 			  NULL
2337 			};
2338 
2339 		      CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2340 		      int k;
2341 
2342 		      for (k = 0; personality[k]; k++)
2343 			if (lookup_minimal_symbol_by_pc_name
2344 			      (pc, personality[k], objfile))
2345 			  {
2346 			    gnu_personality = 1;
2347 			    break;
2348 			  }
2349 		    }
2350 
2351 		  /* If so, the next word contains a word count in the high
2352 		     byte, followed by the same unwind instructions as the
2353 		     pre-defined forms.  */
2354 		  if (gnu_personality
2355 		      && addr + 4 <= extab_vma + extab_size)
2356 		    {
2357 		      word = bfd_h_get_32 (objfile->obfd,
2358 					   extab_data + addr - extab_vma);
2359 		      addr += 4;
2360 		      n_bytes = 3;
2361 		      n_words = ((word >> 24) & 0xff);
2362 		    }
2363 		}
2364 	    }
2365 	}
2366 
2367       /* Sanity check address.  */
2368       if (n_words)
2369 	if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2370 	  n_words = n_bytes = 0;
2371 
2372       /* The unwind instructions reside in WORD (only the N_BYTES least
2373 	 significant bytes are valid), followed by N_WORDS words in the
2374 	 extab section starting at ADDR.  */
2375       if (n_bytes || n_words)
2376 	{
2377 	  gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2378 					       n_bytes + n_words * 4 + 1);
2379 
2380 	  while (n_bytes--)
2381 	    *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2382 
2383 	  while (n_words--)
2384 	    {
2385 	      word = bfd_h_get_32 (objfile->obfd,
2386 				   extab_data + addr - extab_vma);
2387 	      addr += 4;
2388 
2389 	      *p++ = (gdb_byte) ((word >> 24) & 0xff);
2390 	      *p++ = (gdb_byte) ((word >> 16) & 0xff);
2391 	      *p++ = (gdb_byte) ((word >> 8) & 0xff);
2392 	      *p++ = (gdb_byte) (word & 0xff);
2393 	    }
2394 
2395 	  /* Implied "Finish" to terminate the list.  */
2396 	  *p++ = 0xb0;
2397 	}
2398 
2399       /* Push entry onto vector.  They are guaranteed to always
2400 	 appear in order of increasing addresses.  */
2401       new_exidx_entry.addr = idx;
2402       new_exidx_entry.entry = entry;
2403       VEC_safe_push (arm_exidx_entry_s,
2404 		     data->section_maps[sec->the_bfd_section->index],
2405 		     &new_exidx_entry);
2406     }
2407 
2408   do_cleanups (cleanups);
2409 }
2410 
2411 /* Search for the exception table entry covering MEMADDR.  If one is found,
2412    return a pointer to its data.  Otherwise, return 0.  If START is non-NULL,
2413    set *START to the start of the region covered by this entry.  */
2414 
2415 static gdb_byte *
arm_find_exidx_entry(CORE_ADDR memaddr,CORE_ADDR * start)2416 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2417 {
2418   struct obj_section *sec;
2419 
2420   sec = find_pc_section (memaddr);
2421   if (sec != NULL)
2422     {
2423       struct arm_exidx_data *data;
2424       VEC(arm_exidx_entry_s) *map;
2425       struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2426       unsigned int idx;
2427 
2428       data = objfile_data (sec->objfile, arm_exidx_data_key);
2429       if (data != NULL)
2430 	{
2431 	  map = data->section_maps[sec->the_bfd_section->index];
2432 	  if (!VEC_empty (arm_exidx_entry_s, map))
2433 	    {
2434 	      struct arm_exidx_entry *map_sym;
2435 
2436 	      idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2437 				     arm_compare_exidx_entries);
2438 
2439 	      /* VEC_lower_bound finds the earliest ordered insertion
2440 		 point.  If the following symbol starts at this exact
2441 		 address, we use that; otherwise, the preceding
2442 		 exception table entry covers this address.  */
2443 	      if (idx < VEC_length (arm_exidx_entry_s, map))
2444 		{
2445 		  map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2446 		  if (map_sym->addr == map_key.addr)
2447 		    {
2448 		      if (start)
2449 			*start = map_sym->addr + obj_section_addr (sec);
2450 		      return map_sym->entry;
2451 		    }
2452 		}
2453 
2454 	      if (idx > 0)
2455 		{
2456 		  map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2457 		  if (start)
2458 		    *start = map_sym->addr + obj_section_addr (sec);
2459 		  return map_sym->entry;
2460 		}
2461 	    }
2462 	}
2463     }
2464 
2465   return NULL;
2466 }
2467 
2468 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2469    instruction list from the ARM exception table entry ENTRY, allocate and
2470    return a prologue cache structure describing how to unwind this frame.
2471 
2472    Return NULL if the unwinding instruction list contains a "spare",
2473    "reserved" or "refuse to unwind" instruction as defined in section
2474    "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2475    for the ARM Architecture" document.  */
2476 
2477 static struct arm_prologue_cache *
arm_exidx_fill_cache(struct frame_info * this_frame,gdb_byte * entry)2478 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2479 {
2480   CORE_ADDR vsp = 0;
2481   int vsp_valid = 0;
2482 
2483   struct arm_prologue_cache *cache;
2484   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2485   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2486 
2487   for (;;)
2488     {
2489       gdb_byte insn;
2490 
2491       /* Whenever we reload SP, we actually have to retrieve its
2492 	 actual value in the current frame.  */
2493       if (!vsp_valid)
2494 	{
2495 	  if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2496 	    {
2497 	      int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2498 	      vsp = get_frame_register_unsigned (this_frame, reg);
2499 	    }
2500 	  else
2501 	    {
2502 	      CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2503 	      vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2504 	    }
2505 
2506 	  vsp_valid = 1;
2507 	}
2508 
2509       /* Decode next unwind instruction.  */
2510       insn = *entry++;
2511 
2512       if ((insn & 0xc0) == 0)
2513 	{
2514 	  int offset = insn & 0x3f;
2515 	  vsp += (offset << 2) + 4;
2516 	}
2517       else if ((insn & 0xc0) == 0x40)
2518 	{
2519 	  int offset = insn & 0x3f;
2520 	  vsp -= (offset << 2) + 4;
2521 	}
2522       else if ((insn & 0xf0) == 0x80)
2523 	{
2524 	  int mask = ((insn & 0xf) << 8) | *entry++;
2525 	  int i;
2526 
2527 	  /* The special case of an all-zero mask identifies
2528 	     "Refuse to unwind".  We return NULL to fall back
2529 	     to the prologue analyzer.  */
2530 	  if (mask == 0)
2531 	    return NULL;
2532 
2533 	  /* Pop registers r4..r15 under mask.  */
2534 	  for (i = 0; i < 12; i++)
2535 	    if (mask & (1 << i))
2536 	      {
2537 	        cache->saved_regs[4 + i].addr = vsp;
2538 		vsp += 4;
2539 	      }
2540 
2541 	  /* Special-case popping SP -- we need to reload vsp.  */
2542 	  if (mask & (1 << (ARM_SP_REGNUM - 4)))
2543 	    vsp_valid = 0;
2544 	}
2545       else if ((insn & 0xf0) == 0x90)
2546 	{
2547 	  int reg = insn & 0xf;
2548 
2549 	  /* Reserved cases.  */
2550 	  if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2551 	    return NULL;
2552 
2553 	  /* Set SP from another register and mark VSP for reload.  */
2554 	  cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2555 	  vsp_valid = 0;
2556 	}
2557       else if ((insn & 0xf0) == 0xa0)
2558 	{
2559 	  int count = insn & 0x7;
2560 	  int pop_lr = (insn & 0x8) != 0;
2561 	  int i;
2562 
2563 	  /* Pop r4..r[4+count].  */
2564 	  for (i = 0; i <= count; i++)
2565 	    {
2566 	      cache->saved_regs[4 + i].addr = vsp;
2567 	      vsp += 4;
2568 	    }
2569 
2570 	  /* If indicated by flag, pop LR as well.  */
2571 	  if (pop_lr)
2572 	    {
2573 	      cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2574 	      vsp += 4;
2575 	    }
2576 	}
2577       else if (insn == 0xb0)
2578 	{
2579 	  /* We could only have updated PC by popping into it; if so, it
2580 	     will show up as address.  Otherwise, copy LR into PC.  */
2581 	  if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2582 	    cache->saved_regs[ARM_PC_REGNUM]
2583 	      = cache->saved_regs[ARM_LR_REGNUM];
2584 
2585 	  /* We're done.  */
2586 	  break;
2587 	}
2588       else if (insn == 0xb1)
2589 	{
2590 	  int mask = *entry++;
2591 	  int i;
2592 
2593 	  /* All-zero mask and mask >= 16 is "spare".  */
2594 	  if (mask == 0 || mask >= 16)
2595 	    return NULL;
2596 
2597 	  /* Pop r0..r3 under mask.  */
2598 	  for (i = 0; i < 4; i++)
2599 	    if (mask & (1 << i))
2600 	      {
2601 		cache->saved_regs[i].addr = vsp;
2602 		vsp += 4;
2603 	      }
2604 	}
2605       else if (insn == 0xb2)
2606 	{
2607 	  ULONGEST offset = 0;
2608 	  unsigned shift = 0;
2609 
2610 	  do
2611 	    {
2612 	      offset |= (*entry & 0x7f) << shift;
2613 	      shift += 7;
2614 	    }
2615 	  while (*entry++ & 0x80);
2616 
2617 	  vsp += 0x204 + (offset << 2);
2618 	}
2619       else if (insn == 0xb3)
2620 	{
2621 	  int start = *entry >> 4;
2622 	  int count = (*entry++) & 0xf;
2623 	  int i;
2624 
2625 	  /* Only registers D0..D15 are valid here.  */
2626 	  if (start + count >= 16)
2627 	    return NULL;
2628 
2629 	  /* Pop VFP double-precision registers D[start]..D[start+count].  */
2630 	  for (i = 0; i <= count; i++)
2631 	    {
2632 	      cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2633 	      vsp += 8;
2634 	    }
2635 
2636 	  /* Add an extra 4 bytes for FSTMFDX-style stack.  */
2637 	  vsp += 4;
2638 	}
2639       else if ((insn & 0xf8) == 0xb8)
2640 	{
2641 	  int count = insn & 0x7;
2642 	  int i;
2643 
2644 	  /* Pop VFP double-precision registers D[8]..D[8+count].  */
2645 	  for (i = 0; i <= count; i++)
2646 	    {
2647 	      cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2648 	      vsp += 8;
2649 	    }
2650 
2651 	  /* Add an extra 4 bytes for FSTMFDX-style stack.  */
2652 	  vsp += 4;
2653 	}
2654       else if (insn == 0xc6)
2655 	{
2656 	  int start = *entry >> 4;
2657 	  int count = (*entry++) & 0xf;
2658 	  int i;
2659 
2660 	  /* Only registers WR0..WR15 are valid.  */
2661 	  if (start + count >= 16)
2662 	    return NULL;
2663 
2664 	  /* Pop iwmmx registers WR[start]..WR[start+count].  */
2665 	  for (i = 0; i <= count; i++)
2666 	    {
2667 	      cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2668 	      vsp += 8;
2669 	    }
2670 	}
2671       else if (insn == 0xc7)
2672 	{
2673 	  int mask = *entry++;
2674 	  int i;
2675 
2676 	  /* All-zero mask and mask >= 16 is "spare".  */
2677 	  if (mask == 0 || mask >= 16)
2678 	    return NULL;
2679 
2680 	  /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask.  */
2681 	  for (i = 0; i < 4; i++)
2682 	    if (mask & (1 << i))
2683 	      {
2684 		cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2685 		vsp += 4;
2686 	      }
2687 	}
2688       else if ((insn & 0xf8) == 0xc0)
2689 	{
2690 	  int count = insn & 0x7;
2691 	  int i;
2692 
2693 	  /* Pop iwmmx registers WR[10]..WR[10+count].  */
2694 	  for (i = 0; i <= count; i++)
2695 	    {
2696 	      cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2697 	      vsp += 8;
2698 	    }
2699 	}
2700       else if (insn == 0xc8)
2701 	{
2702 	  int start = *entry >> 4;
2703 	  int count = (*entry++) & 0xf;
2704 	  int i;
2705 
2706 	  /* Only registers D0..D31 are valid.  */
2707 	  if (start + count >= 16)
2708 	    return NULL;
2709 
2710 	  /* Pop VFP double-precision registers
2711 	     D[16+start]..D[16+start+count].  */
2712 	  for (i = 0; i <= count; i++)
2713 	    {
2714 	      cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2715 	      vsp += 8;
2716 	    }
2717 	}
2718       else if (insn == 0xc9)
2719 	{
2720 	  int start = *entry >> 4;
2721 	  int count = (*entry++) & 0xf;
2722 	  int i;
2723 
2724 	  /* Pop VFP double-precision registers D[start]..D[start+count].  */
2725 	  for (i = 0; i <= count; i++)
2726 	    {
2727 	      cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2728 	      vsp += 8;
2729 	    }
2730 	}
2731       else if ((insn & 0xf8) == 0xd0)
2732 	{
2733 	  int count = insn & 0x7;
2734 	  int i;
2735 
2736 	  /* Pop VFP double-precision registers D[8]..D[8+count].  */
2737 	  for (i = 0; i <= count; i++)
2738 	    {
2739 	      cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2740 	      vsp += 8;
2741 	    }
2742 	}
2743       else
2744 	{
2745 	  /* Everything else is "spare".  */
2746 	  return NULL;
2747 	}
2748     }
2749 
2750   /* If we restore SP from a register, assume this was the frame register.
2751      Otherwise just fall back to SP as frame register.  */
2752   if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2753     cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2754   else
2755     cache->framereg = ARM_SP_REGNUM;
2756 
2757   /* Determine offset to previous frame.  */
2758   cache->framesize
2759     = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2760 
2761   /* We already got the previous SP.  */
2762   cache->prev_sp = vsp;
2763 
2764   return cache;
2765 }
2766 
2767 /* Unwinding via ARM exception table entries.  Note that the sniffer
2768    already computes a filled-in prologue cache, which is then used
2769    with the same arm_prologue_this_id and arm_prologue_prev_register
2770    routines also used for prologue-parsing based unwinding.  */
2771 
2772 static int
arm_exidx_unwind_sniffer(const struct frame_unwind * self,struct frame_info * this_frame,void ** this_prologue_cache)2773 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2774 			  struct frame_info *this_frame,
2775 			  void **this_prologue_cache)
2776 {
2777   struct gdbarch *gdbarch = get_frame_arch (this_frame);
2778   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2779   CORE_ADDR addr_in_block, exidx_region, func_start;
2780   struct arm_prologue_cache *cache;
2781   gdb_byte *entry;
2782 
2783   /* See if we have an ARM exception table entry covering this address.  */
2784   addr_in_block = get_frame_address_in_block (this_frame);
2785   entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2786   if (!entry)
2787     return 0;
2788 
2789   /* The ARM exception table does not describe unwind information
2790      for arbitrary PC values, but is guaranteed to be correct only
2791      at call sites.  We have to decide here whether we want to use
2792      ARM exception table information for this frame, or fall back
2793      to using prologue parsing.  (Note that if we have DWARF CFI,
2794      this sniffer isn't even called -- CFI is always preferred.)
2795 
2796      Before we make this decision, however, we check whether we
2797      actually have *symbol* information for the current frame.
2798      If not, prologue parsing would not work anyway, so we might
2799      as well use the exception table and hope for the best.  */
2800   if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2801     {
2802       int exc_valid = 0;
2803 
2804       /* If the next frame is "normal", we are at a call site in this
2805 	 frame, so exception information is guaranteed to be valid.  */
2806       if (get_next_frame (this_frame)
2807 	  && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2808 	exc_valid = 1;
2809 
2810       /* We also assume exception information is valid if we're currently
2811 	 blocked in a system call.  The system library is supposed to
2812 	 ensure this, so that e.g. pthread cancellation works.  */
2813       if (arm_frame_is_thumb (this_frame))
2814 	{
2815 	  LONGEST insn;
2816 
2817 	  if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2818 					byte_order_for_code, &insn)
2819 	      && (insn & 0xff00) == 0xdf00 /* svc */)
2820 	    exc_valid = 1;
2821 	}
2822       else
2823 	{
2824 	  LONGEST insn;
2825 
2826 	  if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2827 					byte_order_for_code, &insn)
2828 	      && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2829 	    exc_valid = 1;
2830 	}
2831 
2832       /* Bail out if we don't know that exception information is valid.  */
2833       if (!exc_valid)
2834 	return 0;
2835 
2836      /* The ARM exception index does not mark the *end* of the region
2837 	covered by the entry, and some functions will not have any entry.
2838 	To correctly recognize the end of the covered region, the linker
2839 	should have inserted dummy records with a CANTUNWIND marker.
2840 
2841 	Unfortunately, current versions of GNU ld do not reliably do
2842 	this, and thus we may have found an incorrect entry above.
2843 	As a (temporary) sanity check, we only use the entry if it
2844 	lies *within* the bounds of the function.  Note that this check
2845 	might reject perfectly valid entries that just happen to cover
2846 	multiple functions; therefore this check ought to be removed
2847 	once the linker is fixed.  */
2848       if (func_start > exidx_region)
2849 	return 0;
2850     }
2851 
2852   /* Decode the list of unwinding instructions into a prologue cache.
2853      Note that this may fail due to e.g. a "refuse to unwind" code.  */
2854   cache = arm_exidx_fill_cache (this_frame, entry);
2855   if (!cache)
2856     return 0;
2857 
2858   *this_prologue_cache = cache;
2859   return 1;
2860 }
2861 
2862 struct frame_unwind arm_exidx_unwind = {
2863   NORMAL_FRAME,
2864   default_frame_unwind_stop_reason,
2865   arm_prologue_this_id,
2866   arm_prologue_prev_register,
2867   NULL,
2868   arm_exidx_unwind_sniffer
2869 };
2870 
2871 static struct arm_prologue_cache *
arm_make_stub_cache(struct frame_info * this_frame)2872 arm_make_stub_cache (struct frame_info *this_frame)
2873 {
2874   struct arm_prologue_cache *cache;
2875 
2876   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2877   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2878 
2879   cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2880 
2881   return cache;
2882 }
2883 
2884 /* Our frame ID for a stub frame is the current SP and LR.  */
2885 
2886 static void
arm_stub_this_id(struct frame_info * this_frame,void ** this_cache,struct frame_id * this_id)2887 arm_stub_this_id (struct frame_info *this_frame,
2888 		  void **this_cache,
2889 		  struct frame_id *this_id)
2890 {
2891   struct arm_prologue_cache *cache;
2892 
2893   if (*this_cache == NULL)
2894     *this_cache = arm_make_stub_cache (this_frame);
2895   cache = *this_cache;
2896 
2897   *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2898 }
2899 
2900 static int
arm_stub_unwind_sniffer(const struct frame_unwind * self,struct frame_info * this_frame,void ** this_prologue_cache)2901 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2902 			 struct frame_info *this_frame,
2903 			 void **this_prologue_cache)
2904 {
2905   CORE_ADDR addr_in_block;
2906   char dummy[4];
2907 
2908   addr_in_block = get_frame_address_in_block (this_frame);
2909   if (in_plt_section (addr_in_block, NULL)
2910       /* We also use the stub winder if the target memory is unreadable
2911 	 to avoid having the prologue unwinder trying to read it.  */
2912       || target_read_memory (get_frame_pc (this_frame), dummy, 4) != 0)
2913     return 1;
2914 
2915   return 0;
2916 }
2917 
2918 struct frame_unwind arm_stub_unwind = {
2919   NORMAL_FRAME,
2920   default_frame_unwind_stop_reason,
2921   arm_stub_this_id,
2922   arm_prologue_prev_register,
2923   NULL,
2924   arm_stub_unwind_sniffer
2925 };
2926 
2927 /* Put here the code to store, into CACHE->saved_regs, the addresses
2928    of the saved registers of frame described by THIS_FRAME.  CACHE is
2929    returned.  */
2930 
2931 static struct arm_prologue_cache *
arm_m_exception_cache(struct frame_info * this_frame)2932 arm_m_exception_cache (struct frame_info *this_frame)
2933 {
2934   struct gdbarch *gdbarch = get_frame_arch (this_frame);
2935   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2936   struct arm_prologue_cache *cache;
2937   CORE_ADDR unwound_sp;
2938   LONGEST xpsr;
2939 
2940   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2941   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2942 
2943   unwound_sp = get_frame_register_unsigned (this_frame,
2944 					    ARM_SP_REGNUM);
2945 
2946   /* The hardware saves eight 32-bit words, comprising xPSR,
2947      ReturnAddress, LR (R14), R12, R3, R2, R1, R0.  See details in
2948      "B1.5.6 Exception entry behavior" in
2949      "ARMv7-M Architecture Reference Manual".  */
2950   cache->saved_regs[0].addr = unwound_sp;
2951   cache->saved_regs[1].addr = unwound_sp + 4;
2952   cache->saved_regs[2].addr = unwound_sp + 8;
2953   cache->saved_regs[3].addr = unwound_sp + 12;
2954   cache->saved_regs[12].addr = unwound_sp + 16;
2955   cache->saved_regs[14].addr = unwound_sp + 20;
2956   cache->saved_regs[15].addr = unwound_sp + 24;
2957   cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2958 
2959   /* If bit 9 of the saved xPSR is set, then there is a four-byte
2960      aligner between the top of the 32-byte stack frame and the
2961      previous context's stack pointer.  */
2962   cache->prev_sp = unwound_sp + 32;
2963   if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2964       && (xpsr & (1 << 9)) != 0)
2965     cache->prev_sp += 4;
2966 
2967   return cache;
2968 }
2969 
2970 /* Implementation of function hook 'this_id' in
2971    'struct frame_uwnind'.  */
2972 
2973 static void
arm_m_exception_this_id(struct frame_info * this_frame,void ** this_cache,struct frame_id * this_id)2974 arm_m_exception_this_id (struct frame_info *this_frame,
2975 			 void **this_cache,
2976 			 struct frame_id *this_id)
2977 {
2978   struct arm_prologue_cache *cache;
2979 
2980   if (*this_cache == NULL)
2981     *this_cache = arm_m_exception_cache (this_frame);
2982   cache = *this_cache;
2983 
2984   /* Our frame ID for a stub frame is the current SP and LR.  */
2985   *this_id = frame_id_build (cache->prev_sp,
2986 			     get_frame_pc (this_frame));
2987 }
2988 
2989 /* Implementation of function hook 'prev_register' in
2990    'struct frame_uwnind'.  */
2991 
2992 static struct value *
arm_m_exception_prev_register(struct frame_info * this_frame,void ** this_cache,int prev_regnum)2993 arm_m_exception_prev_register (struct frame_info *this_frame,
2994 			       void **this_cache,
2995 			       int prev_regnum)
2996 {
2997   struct gdbarch *gdbarch = get_frame_arch (this_frame);
2998   struct arm_prologue_cache *cache;
2999 
3000   if (*this_cache == NULL)
3001     *this_cache = arm_m_exception_cache (this_frame);
3002   cache = *this_cache;
3003 
3004   /* The value was already reconstructed into PREV_SP.  */
3005   if (prev_regnum == ARM_SP_REGNUM)
3006     return frame_unwind_got_constant (this_frame, prev_regnum,
3007 				      cache->prev_sp);
3008 
3009   return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3010 				       prev_regnum);
3011 }
3012 
3013 /* Implementation of function hook 'sniffer' in
3014    'struct frame_uwnind'.  */
3015 
3016 static int
arm_m_exception_unwind_sniffer(const struct frame_unwind * self,struct frame_info * this_frame,void ** this_prologue_cache)3017 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3018 				struct frame_info *this_frame,
3019 				void **this_prologue_cache)
3020 {
3021   CORE_ADDR this_pc = get_frame_pc (this_frame);
3022 
3023   /* No need to check is_m; this sniffer is only registered for
3024      M-profile architectures.  */
3025 
3026   /* Exception frames return to one of these magic PCs.  Other values
3027      are not defined as of v7-M.  See details in "B1.5.8 Exception
3028      return behavior" in "ARMv7-M Architecture Reference Manual".  */
3029   if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3030       || this_pc == 0xfffffffd)
3031     return 1;
3032 
3033   return 0;
3034 }
3035 
3036 /* Frame unwinder for M-profile exceptions.  */
3037 
3038 struct frame_unwind arm_m_exception_unwind =
3039 {
3040   SIGTRAMP_FRAME,
3041   default_frame_unwind_stop_reason,
3042   arm_m_exception_this_id,
3043   arm_m_exception_prev_register,
3044   NULL,
3045   arm_m_exception_unwind_sniffer
3046 };
3047 
3048 static CORE_ADDR
arm_normal_frame_base(struct frame_info * this_frame,void ** this_cache)3049 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3050 {
3051   struct arm_prologue_cache *cache;
3052 
3053   if (*this_cache == NULL)
3054     *this_cache = arm_make_prologue_cache (this_frame);
3055   cache = *this_cache;
3056 
3057   return cache->prev_sp - cache->framesize;
3058 }
3059 
3060 struct frame_base arm_normal_base = {
3061   &arm_prologue_unwind,
3062   arm_normal_frame_base,
3063   arm_normal_frame_base,
3064   arm_normal_frame_base
3065 };
3066 
3067 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3068    dummy frame.  The frame ID's base needs to match the TOS value
3069    saved by save_dummy_frame_tos() and returned from
3070    arm_push_dummy_call, and the PC needs to match the dummy frame's
3071    breakpoint.  */
3072 
3073 static struct frame_id
arm_dummy_id(struct gdbarch * gdbarch,struct frame_info * this_frame)3074 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3075 {
3076   return frame_id_build (get_frame_register_unsigned (this_frame,
3077 						      ARM_SP_REGNUM),
3078 			 get_frame_pc (this_frame));
3079 }
3080 
3081 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3082    be used to construct the previous frame's ID, after looking up the
3083    containing function).  */
3084 
3085 static CORE_ADDR
arm_unwind_pc(struct gdbarch * gdbarch,struct frame_info * this_frame)3086 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3087 {
3088   CORE_ADDR pc;
3089   pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3090   return arm_addr_bits_remove (gdbarch, pc);
3091 }
3092 
3093 static CORE_ADDR
arm_unwind_sp(struct gdbarch * gdbarch,struct frame_info * this_frame)3094 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3095 {
3096   return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3097 }
3098 
3099 static struct value *
arm_dwarf2_prev_register(struct frame_info * this_frame,void ** this_cache,int regnum)3100 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3101 			  int regnum)
3102 {
3103   struct gdbarch * gdbarch = get_frame_arch (this_frame);
3104   CORE_ADDR lr, cpsr;
3105   ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3106 
3107   switch (regnum)
3108     {
3109     case ARM_PC_REGNUM:
3110       /* The PC is normally copied from the return column, which
3111 	 describes saves of LR.  However, that version may have an
3112 	 extra bit set to indicate Thumb state.  The bit is not
3113 	 part of the PC.  */
3114       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3115       return frame_unwind_got_constant (this_frame, regnum,
3116 					arm_addr_bits_remove (gdbarch, lr));
3117 
3118     case ARM_PS_REGNUM:
3119       /* Reconstruct the T bit; see arm_prologue_prev_register for details.  */
3120       cpsr = get_frame_register_unsigned (this_frame, regnum);
3121       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3122       if (IS_THUMB_ADDR (lr))
3123 	cpsr |= t_bit;
3124       else
3125 	cpsr &= ~t_bit;
3126       return frame_unwind_got_constant (this_frame, regnum, cpsr);
3127 
3128     default:
3129       internal_error (__FILE__, __LINE__,
3130 		      _("Unexpected register %d"), regnum);
3131     }
3132 }
3133 
3134 static void
arm_dwarf2_frame_init_reg(struct gdbarch * gdbarch,int regnum,struct dwarf2_frame_state_reg * reg,struct frame_info * this_frame)3135 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3136 			   struct dwarf2_frame_state_reg *reg,
3137 			   struct frame_info *this_frame)
3138 {
3139   switch (regnum)
3140     {
3141     case ARM_PC_REGNUM:
3142     case ARM_PS_REGNUM:
3143       reg->how = DWARF2_FRAME_REG_FN;
3144       reg->loc.fn = arm_dwarf2_prev_register;
3145       break;
3146     case ARM_SP_REGNUM:
3147       reg->how = DWARF2_FRAME_REG_CFA;
3148       break;
3149     }
3150 }
3151 
3152 /* Return true if we are in the function's epilogue, i.e. after the
3153    instruction that destroyed the function's stack frame.  */
3154 
3155 static int
thumb_in_function_epilogue_p(struct gdbarch * gdbarch,CORE_ADDR pc)3156 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3157 {
3158   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3159   unsigned int insn, insn2;
3160   int found_return = 0, found_stack_adjust = 0;
3161   CORE_ADDR func_start, func_end;
3162   CORE_ADDR scan_pc;
3163   gdb_byte buf[4];
3164 
3165   if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3166     return 0;
3167 
3168   /* The epilogue is a sequence of instructions along the following lines:
3169 
3170     - add stack frame size to SP or FP
3171     - [if frame pointer used] restore SP from FP
3172     - restore registers from SP [may include PC]
3173     - a return-type instruction [if PC wasn't already restored]
3174 
3175     In a first pass, we scan forward from the current PC and verify the
3176     instructions we find as compatible with this sequence, ending in a
3177     return instruction.
3178 
3179     However, this is not sufficient to distinguish indirect function calls
3180     within a function from indirect tail calls in the epilogue in some cases.
3181     Therefore, if we didn't already find any SP-changing instruction during
3182     forward scan, we add a backward scanning heuristic to ensure we actually
3183     are in the epilogue.  */
3184 
3185   scan_pc = pc;
3186   while (scan_pc < func_end && !found_return)
3187     {
3188       if (target_read_memory (scan_pc, buf, 2))
3189 	break;
3190 
3191       scan_pc += 2;
3192       insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3193 
3194       if ((insn & 0xff80) == 0x4700)  /* bx <Rm> */
3195 	found_return = 1;
3196       else if (insn == 0x46f7)  /* mov pc, lr */
3197 	found_return = 1;
3198       else if (insn == 0x46bd)  /* mov sp, r7 */
3199 	found_stack_adjust = 1;
3200       else if ((insn & 0xff00) == 0xb000)  /* add sp, imm or sub sp, imm  */
3201 	found_stack_adjust = 1;
3202       else if ((insn & 0xfe00) == 0xbc00)  /* pop <registers> */
3203 	{
3204 	  found_stack_adjust = 1;
3205 	  if (insn & 0x0100)  /* <registers> include PC.  */
3206 	    found_return = 1;
3207 	}
3208       else if (thumb_insn_size (insn) == 4)  /* 32-bit Thumb-2 instruction */
3209 	{
3210 	  if (target_read_memory (scan_pc, buf, 2))
3211 	    break;
3212 
3213 	  scan_pc += 2;
3214 	  insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3215 
3216 	  if (insn == 0xe8bd)  /* ldm.w sp!, <registers> */
3217 	    {
3218 	      found_stack_adjust = 1;
3219 	      if (insn2 & 0x8000)  /* <registers> include PC.  */
3220 		found_return = 1;
3221 	    }
3222 	  else if (insn == 0xf85d  /* ldr.w <Rt>, [sp], #4 */
3223 		   && (insn2 & 0x0fff) == 0x0b04)
3224 	    {
3225 	      found_stack_adjust = 1;
3226 	      if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC.  */
3227 		found_return = 1;
3228 	    }
3229 	  else if ((insn & 0xffbf) == 0xecbd  /* vldm sp!, <list> */
3230 		   && (insn2 & 0x0e00) == 0x0a00)
3231 	    found_stack_adjust = 1;
3232 	  else
3233 	    break;
3234 	}
3235       else
3236 	break;
3237     }
3238 
3239   if (!found_return)
3240     return 0;
3241 
3242   /* Since any instruction in the epilogue sequence, with the possible
3243      exception of return itself, updates the stack pointer, we need to
3244      scan backwards for at most one instruction.  Try either a 16-bit or
3245      a 32-bit instruction.  This is just a heuristic, so we do not worry
3246      too much about false positives.  */
3247 
3248   if (!found_stack_adjust)
3249     {
3250       if (pc - 4 < func_start)
3251 	return 0;
3252       if (target_read_memory (pc - 4, buf, 4))
3253 	return 0;
3254 
3255       insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3256       insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3257 
3258       if (insn2 == 0x46bd)  /* mov sp, r7 */
3259 	found_stack_adjust = 1;
3260       else if ((insn2 & 0xff00) == 0xb000)  /* add sp, imm or sub sp, imm  */
3261 	found_stack_adjust = 1;
3262       else if ((insn2 & 0xff00) == 0xbc00)  /* pop <registers> without PC */
3263 	found_stack_adjust = 1;
3264       else if (insn == 0xe8bd)  /* ldm.w sp!, <registers> */
3265 	found_stack_adjust = 1;
3266       else if (insn == 0xf85d  /* ldr.w <Rt>, [sp], #4 */
3267 	       && (insn2 & 0x0fff) == 0x0b04)
3268 	found_stack_adjust = 1;
3269       else if ((insn & 0xffbf) == 0xecbd  /* vldm sp!, <list> */
3270 	       && (insn2 & 0x0e00) == 0x0a00)
3271 	found_stack_adjust = 1;
3272     }
3273 
3274   return found_stack_adjust;
3275 }
3276 
3277 /* Return true if we are in the function's epilogue, i.e. after the
3278    instruction that destroyed the function's stack frame.  */
3279 
3280 static int
arm_in_function_epilogue_p(struct gdbarch * gdbarch,CORE_ADDR pc)3281 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3282 {
3283   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3284   unsigned int insn;
3285   int found_return, found_stack_adjust;
3286   CORE_ADDR func_start, func_end;
3287 
3288   if (arm_pc_is_thumb (gdbarch, pc))
3289     return thumb_in_function_epilogue_p (gdbarch, pc);
3290 
3291   if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3292     return 0;
3293 
3294   /* We are in the epilogue if the previous instruction was a stack
3295      adjustment and the next instruction is a possible return (bx, mov
3296      pc, or pop).  We could have to scan backwards to find the stack
3297      adjustment, or forwards to find the return, but this is a decent
3298      approximation.  First scan forwards.  */
3299 
3300   found_return = 0;
3301   insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3302   if (bits (insn, 28, 31) != INST_NV)
3303     {
3304       if ((insn & 0x0ffffff0) == 0x012fff10)
3305 	/* BX.  */
3306 	found_return = 1;
3307       else if ((insn & 0x0ffffff0) == 0x01a0f000)
3308 	/* MOV PC.  */
3309 	found_return = 1;
3310       else if ((insn & 0x0fff0000) == 0x08bd0000
3311 	  && (insn & 0x0000c000) != 0)
3312 	/* POP (LDMIA), including PC or LR.  */
3313 	found_return = 1;
3314     }
3315 
3316   if (!found_return)
3317     return 0;
3318 
3319   /* Scan backwards.  This is just a heuristic, so do not worry about
3320      false positives from mode changes.  */
3321 
3322   if (pc < func_start + 4)
3323     return 0;
3324 
3325   found_stack_adjust = 0;
3326   insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3327   if (bits (insn, 28, 31) != INST_NV)
3328     {
3329       if ((insn & 0x0df0f000) == 0x0080d000)
3330 	/* ADD SP (register or immediate).  */
3331 	found_stack_adjust = 1;
3332       else if ((insn & 0x0df0f000) == 0x0040d000)
3333 	/* SUB SP (register or immediate).  */
3334 	found_stack_adjust = 1;
3335       else if ((insn & 0x0ffffff0) == 0x01a0d000)
3336 	/* MOV SP.  */
3337 	found_stack_adjust = 1;
3338       else if ((insn & 0x0fff0000) == 0x08bd0000)
3339 	/* POP (LDMIA).  */
3340 	found_stack_adjust = 1;
3341       else if ((insn & 0x0fff0000) == 0x049d0000)
3342 	/* POP of a single register.  */
3343 	found_stack_adjust = 1;
3344     }
3345 
3346   if (found_stack_adjust)
3347     return 1;
3348 
3349   return 0;
3350 }
3351 
3352 
3353 /* When arguments must be pushed onto the stack, they go on in reverse
3354    order.  The code below implements a FILO (stack) to do this.  */
3355 
3356 struct stack_item
3357 {
3358   int len;
3359   struct stack_item *prev;
3360   void *data;
3361 };
3362 
3363 static struct stack_item *
push_stack_item(struct stack_item * prev,const void * contents,int len)3364 push_stack_item (struct stack_item *prev, const void *contents, int len)
3365 {
3366   struct stack_item *si;
3367   si = xmalloc (sizeof (struct stack_item));
3368   si->data = xmalloc (len);
3369   si->len = len;
3370   si->prev = prev;
3371   memcpy (si->data, contents, len);
3372   return si;
3373 }
3374 
3375 static struct stack_item *
pop_stack_item(struct stack_item * si)3376 pop_stack_item (struct stack_item *si)
3377 {
3378   struct stack_item *dead = si;
3379   si = si->prev;
3380   xfree (dead->data);
3381   xfree (dead);
3382   return si;
3383 }
3384 
3385 
3386 /* Return the alignment (in bytes) of the given type.  */
3387 
3388 static int
arm_type_align(struct type * t)3389 arm_type_align (struct type *t)
3390 {
3391   int n;
3392   int align;
3393   int falign;
3394 
3395   t = check_typedef (t);
3396   switch (TYPE_CODE (t))
3397     {
3398     default:
3399       /* Should never happen.  */
3400       internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3401       return 4;
3402 
3403     case TYPE_CODE_PTR:
3404     case TYPE_CODE_ENUM:
3405     case TYPE_CODE_INT:
3406     case TYPE_CODE_FLT:
3407     case TYPE_CODE_SET:
3408     case TYPE_CODE_RANGE:
3409     case TYPE_CODE_REF:
3410     case TYPE_CODE_CHAR:
3411     case TYPE_CODE_BOOL:
3412       return TYPE_LENGTH (t);
3413 
3414     case TYPE_CODE_ARRAY:
3415     case TYPE_CODE_COMPLEX:
3416       /* TODO: What about vector types?  */
3417       return arm_type_align (TYPE_TARGET_TYPE (t));
3418 
3419     case TYPE_CODE_STRUCT:
3420     case TYPE_CODE_UNION:
3421       align = 1;
3422       for (n = 0; n < TYPE_NFIELDS (t); n++)
3423 	{
3424 	  falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3425 	  if (falign > align)
3426 	    align = falign;
3427 	}
3428       return align;
3429     }
3430 }
3431 
3432 /* Possible base types for a candidate for passing and returning in
3433    VFP registers.  */
3434 
3435 enum arm_vfp_cprc_base_type
3436 {
3437   VFP_CPRC_UNKNOWN,
3438   VFP_CPRC_SINGLE,
3439   VFP_CPRC_DOUBLE,
3440   VFP_CPRC_VEC64,
3441   VFP_CPRC_VEC128
3442 };
3443 
3444 /* The length of one element of base type B.  */
3445 
3446 static unsigned
arm_vfp_cprc_unit_length(enum arm_vfp_cprc_base_type b)3447 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3448 {
3449   switch (b)
3450     {
3451     case VFP_CPRC_SINGLE:
3452       return 4;
3453     case VFP_CPRC_DOUBLE:
3454       return 8;
3455     case VFP_CPRC_VEC64:
3456       return 8;
3457     case VFP_CPRC_VEC128:
3458       return 16;
3459     default:
3460       internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3461 		      (int) b);
3462     }
3463 }
3464 
3465 /* The character ('s', 'd' or 'q') for the type of VFP register used
3466    for passing base type B.  */
3467 
3468 static int
arm_vfp_cprc_reg_char(enum arm_vfp_cprc_base_type b)3469 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3470 {
3471   switch (b)
3472     {
3473     case VFP_CPRC_SINGLE:
3474       return 's';
3475     case VFP_CPRC_DOUBLE:
3476       return 'd';
3477     case VFP_CPRC_VEC64:
3478       return 'd';
3479     case VFP_CPRC_VEC128:
3480       return 'q';
3481     default:
3482       internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3483 		      (int) b);
3484     }
3485 }
3486 
3487 /* Determine whether T may be part of a candidate for passing and
3488    returning in VFP registers, ignoring the limit on the total number
3489    of components.  If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3490    classification of the first valid component found; if it is not
3491    VFP_CPRC_UNKNOWN, all components must have the same classification
3492    as *BASE_TYPE.  If it is found that T contains a type not permitted
3493    for passing and returning in VFP registers, a type differently
3494    classified from *BASE_TYPE, or two types differently classified
3495    from each other, return -1, otherwise return the total number of
3496    base-type elements found (possibly 0 in an empty structure or
3497    array).  Vectors and complex types are not currently supported,
3498    matching the generic AAPCS support.  */
3499 
3500 static int
arm_vfp_cprc_sub_candidate(struct type * t,enum arm_vfp_cprc_base_type * base_type)3501 arm_vfp_cprc_sub_candidate (struct type *t,
3502 			    enum arm_vfp_cprc_base_type *base_type)
3503 {
3504   t = check_typedef (t);
3505   switch (TYPE_CODE (t))
3506     {
3507     case TYPE_CODE_FLT:
3508       switch (TYPE_LENGTH (t))
3509 	{
3510 	case 4:
3511 	  if (*base_type == VFP_CPRC_UNKNOWN)
3512 	    *base_type = VFP_CPRC_SINGLE;
3513 	  else if (*base_type != VFP_CPRC_SINGLE)
3514 	    return -1;
3515 	  return 1;
3516 
3517 	case 8:
3518 	  if (*base_type == VFP_CPRC_UNKNOWN)
3519 	    *base_type = VFP_CPRC_DOUBLE;
3520 	  else if (*base_type != VFP_CPRC_DOUBLE)
3521 	    return -1;
3522 	  return 1;
3523 
3524 	default:
3525 	  return -1;
3526 	}
3527       break;
3528 
3529     case TYPE_CODE_ARRAY:
3530       {
3531 	int count;
3532 	unsigned unitlen;
3533 	count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3534 	if (count == -1)
3535 	  return -1;
3536 	if (TYPE_LENGTH (t) == 0)
3537 	  {
3538 	    gdb_assert (count == 0);
3539 	    return 0;
3540 	  }
3541 	else if (count == 0)
3542 	  return -1;
3543 	unitlen = arm_vfp_cprc_unit_length (*base_type);
3544 	gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3545 	return TYPE_LENGTH (t) / unitlen;
3546       }
3547       break;
3548 
3549     case TYPE_CODE_STRUCT:
3550       {
3551 	int count = 0;
3552 	unsigned unitlen;
3553 	int i;
3554 	for (i = 0; i < TYPE_NFIELDS (t); i++)
3555 	  {
3556 	    int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3557 							base_type);
3558 	    if (sub_count == -1)
3559 	      return -1;
3560 	    count += sub_count;
3561 	  }
3562 	if (TYPE_LENGTH (t) == 0)
3563 	  {
3564 	    gdb_assert (count == 0);
3565 	    return 0;
3566 	  }
3567 	else if (count == 0)
3568 	  return -1;
3569 	unitlen = arm_vfp_cprc_unit_length (*base_type);
3570 	if (TYPE_LENGTH (t) != unitlen * count)
3571 	  return -1;
3572 	return count;
3573       }
3574 
3575     case TYPE_CODE_UNION:
3576       {
3577 	int count = 0;
3578 	unsigned unitlen;
3579 	int i;
3580 	for (i = 0; i < TYPE_NFIELDS (t); i++)
3581 	  {
3582 	    int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3583 							base_type);
3584 	    if (sub_count == -1)
3585 	      return -1;
3586 	    count = (count > sub_count ? count : sub_count);
3587 	  }
3588 	if (TYPE_LENGTH (t) == 0)
3589 	  {
3590 	    gdb_assert (count == 0);
3591 	    return 0;
3592 	  }
3593 	else if (count == 0)
3594 	  return -1;
3595 	unitlen = arm_vfp_cprc_unit_length (*base_type);
3596 	if (TYPE_LENGTH (t) != unitlen * count)
3597 	  return -1;
3598 	return count;
3599       }
3600 
3601     default:
3602       break;
3603     }
3604 
3605   return -1;
3606 }
3607 
3608 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3609    if passed to or returned from a non-variadic function with the VFP
3610    ABI in effect.  Return 1 if it is, 0 otherwise.  If it is, set
3611    *BASE_TYPE to the base type for T and *COUNT to the number of
3612    elements of that base type before returning.  */
3613 
3614 static int
arm_vfp_call_candidate(struct type * t,enum arm_vfp_cprc_base_type * base_type,int * count)3615 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3616 			int *count)
3617 {
3618   enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3619   int c = arm_vfp_cprc_sub_candidate (t, &b);
3620   if (c <= 0 || c > 4)
3621     return 0;
3622   *base_type = b;
3623   *count = c;
3624   return 1;
3625 }
3626 
3627 /* Return 1 if the VFP ABI should be used for passing arguments to and
3628    returning values from a function of type FUNC_TYPE, 0
3629    otherwise.  */
3630 
3631 static int
arm_vfp_abi_for_function(struct gdbarch * gdbarch,struct type * func_type)3632 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3633 {
3634   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3635   /* Variadic functions always use the base ABI.  Assume that functions
3636      without debug info are not variadic.  */
3637   if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3638     return 0;
3639   /* The VFP ABI is only supported as a variant of AAPCS.  */
3640   if (tdep->arm_abi != ARM_ABI_AAPCS)
3641     return 0;
3642   return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3643 }
3644 
3645 /* We currently only support passing parameters in integer registers, which
3646    conforms with GCC's default model, and VFP argument passing following
3647    the VFP variant of AAPCS.  Several other variants exist and
3648    we should probably support some of them based on the selected ABI.  */
3649 
3650 static CORE_ADDR
arm_push_dummy_call(struct gdbarch * gdbarch,struct value * function,struct regcache * regcache,CORE_ADDR bp_addr,int nargs,struct value ** args,CORE_ADDR sp,int struct_return,CORE_ADDR struct_addr)3651 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3652 		     struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3653 		     struct value **args, CORE_ADDR sp, int struct_return,
3654 		     CORE_ADDR struct_addr)
3655 {
3656   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3657   int argnum;
3658   int argreg;
3659   int nstack;
3660   struct stack_item *si = NULL;
3661   int use_vfp_abi;
3662   struct type *ftype;
3663   unsigned vfp_regs_free = (1 << 16) - 1;
3664 
3665   /* Determine the type of this function and whether the VFP ABI
3666      applies.  */
3667   ftype = check_typedef (value_type (function));
3668   if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3669     ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3670   use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3671 
3672   /* Set the return address.  For the ARM, the return breakpoint is
3673      always at BP_ADDR.  */
3674   if (arm_pc_is_thumb (gdbarch, bp_addr))
3675     bp_addr |= 1;
3676   regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3677 
3678   /* Walk through the list of args and determine how large a temporary
3679      stack is required.  Need to take care here as structs may be
3680      passed on the stack, and we have to push them.  */
3681   nstack = 0;
3682 
3683   argreg = ARM_A1_REGNUM;
3684   nstack = 0;
3685 
3686   /* The struct_return pointer occupies the first parameter
3687      passing register.  */
3688   if (struct_return)
3689     {
3690       if (arm_debug)
3691 	fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3692 			    gdbarch_register_name (gdbarch, argreg),
3693 			    paddress (gdbarch, struct_addr));
3694       regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3695       argreg++;
3696     }
3697 
3698   for (argnum = 0; argnum < nargs; argnum++)
3699     {
3700       int len;
3701       struct type *arg_type;
3702       struct type *target_type;
3703       enum type_code typecode;
3704       const bfd_byte *val;
3705       int align;
3706       enum arm_vfp_cprc_base_type vfp_base_type;
3707       int vfp_base_count;
3708       int may_use_core_reg = 1;
3709 
3710       arg_type = check_typedef (value_type (args[argnum]));
3711       len = TYPE_LENGTH (arg_type);
3712       target_type = TYPE_TARGET_TYPE (arg_type);
3713       typecode = TYPE_CODE (arg_type);
3714       val = value_contents (args[argnum]);
3715 
3716       align = arm_type_align (arg_type);
3717       /* Round alignment up to a whole number of words.  */
3718       align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3719       /* Different ABIs have different maximum alignments.  */
3720       if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3721 	{
3722 	  /* The APCS ABI only requires word alignment.  */
3723 	  align = INT_REGISTER_SIZE;
3724 	}
3725       else
3726 	{
3727 	  /* The AAPCS requires at most doubleword alignment.  */
3728 	  if (align > INT_REGISTER_SIZE * 2)
3729 	    align = INT_REGISTER_SIZE * 2;
3730 	}
3731 
3732       if (use_vfp_abi
3733 	  && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3734 				     &vfp_base_count))
3735 	{
3736 	  int regno;
3737 	  int unit_length;
3738 	  int shift;
3739 	  unsigned mask;
3740 
3741 	  /* Because this is a CPRC it cannot go in a core register or
3742 	     cause a core register to be skipped for alignment.
3743 	     Either it goes in VFP registers and the rest of this loop
3744 	     iteration is skipped for this argument, or it goes on the
3745 	     stack (and the stack alignment code is correct for this
3746 	     case).  */
3747 	  may_use_core_reg = 0;
3748 
3749 	  unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3750 	  shift = unit_length / 4;
3751 	  mask = (1 << (shift * vfp_base_count)) - 1;
3752 	  for (regno = 0; regno < 16; regno += shift)
3753 	    if (((vfp_regs_free >> regno) & mask) == mask)
3754 	      break;
3755 
3756 	  if (regno < 16)
3757 	    {
3758 	      int reg_char;
3759 	      int reg_scaled;
3760 	      int i;
3761 
3762 	      vfp_regs_free &= ~(mask << regno);
3763 	      reg_scaled = regno / shift;
3764 	      reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3765 	      for (i = 0; i < vfp_base_count; i++)
3766 		{
3767 		  char name_buf[4];
3768 		  int regnum;
3769 		  if (reg_char == 'q')
3770 		    arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3771 					 val + i * unit_length);
3772 		  else
3773 		    {
3774 		      xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3775 				 reg_char, reg_scaled + i);
3776 		      regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3777 							    strlen (name_buf));
3778 		      regcache_cooked_write (regcache, regnum,
3779 					     val + i * unit_length);
3780 		    }
3781 		}
3782 	      continue;
3783 	    }
3784 	  else
3785 	    {
3786 	      /* This CPRC could not go in VFP registers, so all VFP
3787 		 registers are now marked as used.  */
3788 	      vfp_regs_free = 0;
3789 	    }
3790 	}
3791 
3792       /* Push stack padding for dowubleword alignment.  */
3793       if (nstack & (align - 1))
3794 	{
3795 	  si = push_stack_item (si, val, INT_REGISTER_SIZE);
3796 	  nstack += INT_REGISTER_SIZE;
3797 	}
3798 
3799       /* Doubleword aligned quantities must go in even register pairs.  */
3800       if (may_use_core_reg
3801 	  && argreg <= ARM_LAST_ARG_REGNUM
3802 	  && align > INT_REGISTER_SIZE
3803 	  && argreg & 1)
3804 	argreg++;
3805 
3806       /* If the argument is a pointer to a function, and it is a
3807 	 Thumb function, create a LOCAL copy of the value and set
3808 	 the THUMB bit in it.  */
3809       if (TYPE_CODE_PTR == typecode
3810 	  && target_type != NULL
3811 	  && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3812 	{
3813 	  CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3814 	  if (arm_pc_is_thumb (gdbarch, regval))
3815 	    {
3816 	      bfd_byte *copy = alloca (len);
3817 	      store_unsigned_integer (copy, len, byte_order,
3818 				      MAKE_THUMB_ADDR (regval));
3819 	      val = copy;
3820 	    }
3821 	}
3822 
3823       /* Copy the argument to general registers or the stack in
3824 	 register-sized pieces.  Large arguments are split between
3825 	 registers and stack.  */
3826       while (len > 0)
3827 	{
3828 	  int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3829 
3830 	  if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3831 	    {
3832 	      /* The argument is being passed in a general purpose
3833 		 register.  */
3834 	      CORE_ADDR regval
3835 		= extract_unsigned_integer (val, partial_len, byte_order);
3836 	      if (byte_order == BFD_ENDIAN_BIG)
3837 		regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3838 	      if (arm_debug)
3839 		fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3840 				    argnum,
3841 				    gdbarch_register_name
3842 				      (gdbarch, argreg),
3843 				    phex (regval, INT_REGISTER_SIZE));
3844 	      regcache_cooked_write_unsigned (regcache, argreg, regval);
3845 	      argreg++;
3846 	    }
3847 	  else
3848 	    {
3849 	      /* Push the arguments onto the stack.  */
3850 	      if (arm_debug)
3851 		fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3852 				    argnum, nstack);
3853 	      si = push_stack_item (si, val, INT_REGISTER_SIZE);
3854 	      nstack += INT_REGISTER_SIZE;
3855 	    }
3856 
3857 	  len -= partial_len;
3858 	  val += partial_len;
3859 	}
3860     }
3861   /* If we have an odd number of words to push, then decrement the stack
3862      by one word now, so first stack argument will be dword aligned.  */
3863   if (nstack & 4)
3864     sp -= 4;
3865 
3866   while (si)
3867     {
3868       sp -= si->len;
3869       write_memory (sp, si->data, si->len);
3870       si = pop_stack_item (si);
3871     }
3872 
3873   /* Finally, update teh SP register.  */
3874   regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3875 
3876   return sp;
3877 }
3878 
3879 
3880 /* Always align the frame to an 8-byte boundary.  This is required on
3881    some platforms and harmless on the rest.  */
3882 
3883 static CORE_ADDR
arm_frame_align(struct gdbarch * gdbarch,CORE_ADDR sp)3884 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3885 {
3886   /* Align the stack to eight bytes.  */
3887   return sp & ~ (CORE_ADDR) 7;
3888 }
3889 
3890 static void
print_fpu_flags(int flags)3891 print_fpu_flags (int flags)
3892 {
3893   if (flags & (1 << 0))
3894     fputs ("IVO ", stdout);
3895   if (flags & (1 << 1))
3896     fputs ("DVZ ", stdout);
3897   if (flags & (1 << 2))
3898     fputs ("OFL ", stdout);
3899   if (flags & (1 << 3))
3900     fputs ("UFL ", stdout);
3901   if (flags & (1 << 4))
3902     fputs ("INX ", stdout);
3903   putchar ('\n');
3904 }
3905 
3906 /* Print interesting information about the floating point processor
3907    (if present) or emulator.  */
3908 static void
arm_print_float_info(struct gdbarch * gdbarch,struct ui_file * file,struct frame_info * frame,const char * args)3909 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3910 		      struct frame_info *frame, const char *args)
3911 {
3912   unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3913   int type;
3914 
3915   type = (status >> 24) & 127;
3916   if (status & (1 << 31))
3917     printf (_("Hardware FPU type %d\n"), type);
3918   else
3919     printf (_("Software FPU type %d\n"), type);
3920   /* i18n: [floating point unit] mask */
3921   fputs (_("mask: "), stdout);
3922   print_fpu_flags (status >> 16);
3923   /* i18n: [floating point unit] flags */
3924   fputs (_("flags: "), stdout);
3925   print_fpu_flags (status);
3926 }
3927 
3928 /* Construct the ARM extended floating point type.  */
3929 static struct type *
arm_ext_type(struct gdbarch * gdbarch)3930 arm_ext_type (struct gdbarch *gdbarch)
3931 {
3932   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3933 
3934   if (!tdep->arm_ext_type)
3935     tdep->arm_ext_type
3936       = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3937 			 floatformats_arm_ext);
3938 
3939   return tdep->arm_ext_type;
3940 }
3941 
3942 static struct type *
arm_neon_double_type(struct gdbarch * gdbarch)3943 arm_neon_double_type (struct gdbarch *gdbarch)
3944 {
3945   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3946 
3947   if (tdep->neon_double_type == NULL)
3948     {
3949       struct type *t, *elem;
3950 
3951       t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3952 			       TYPE_CODE_UNION);
3953       elem = builtin_type (gdbarch)->builtin_uint8;
3954       append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3955       elem = builtin_type (gdbarch)->builtin_uint16;
3956       append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3957       elem = builtin_type (gdbarch)->builtin_uint32;
3958       append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3959       elem = builtin_type (gdbarch)->builtin_uint64;
3960       append_composite_type_field (t, "u64", elem);
3961       elem = builtin_type (gdbarch)->builtin_float;
3962       append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3963       elem = builtin_type (gdbarch)->builtin_double;
3964       append_composite_type_field (t, "f64", elem);
3965 
3966       TYPE_VECTOR (t) = 1;
3967       TYPE_NAME (t) = "neon_d";
3968       tdep->neon_double_type = t;
3969     }
3970 
3971   return tdep->neon_double_type;
3972 }
3973 
3974 /* FIXME: The vector types are not correctly ordered on big-endian
3975    targets.  Just as s0 is the low bits of d0, d0[0] is also the low
3976    bits of d0 - regardless of what unit size is being held in d0.  So
3977    the offset of the first uint8 in d0 is 7, but the offset of the
3978    first float is 4.  This code works as-is for little-endian
3979    targets.  */
3980 
3981 static struct type *
arm_neon_quad_type(struct gdbarch * gdbarch)3982 arm_neon_quad_type (struct gdbarch *gdbarch)
3983 {
3984   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3985 
3986   if (tdep->neon_quad_type == NULL)
3987     {
3988       struct type *t, *elem;
3989 
3990       t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3991 			       TYPE_CODE_UNION);
3992       elem = builtin_type (gdbarch)->builtin_uint8;
3993       append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3994       elem = builtin_type (gdbarch)->builtin_uint16;
3995       append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3996       elem = builtin_type (gdbarch)->builtin_uint32;
3997       append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3998       elem = builtin_type (gdbarch)->builtin_uint64;
3999       append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4000       elem = builtin_type (gdbarch)->builtin_float;
4001       append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4002       elem = builtin_type (gdbarch)->builtin_double;
4003       append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4004 
4005       TYPE_VECTOR (t) = 1;
4006       TYPE_NAME (t) = "neon_q";
4007       tdep->neon_quad_type = t;
4008     }
4009 
4010   return tdep->neon_quad_type;
4011 }
4012 
4013 /* Return the GDB type object for the "standard" data type of data in
4014    register N.  */
4015 
4016 static struct type *
arm_register_type(struct gdbarch * gdbarch,int regnum)4017 arm_register_type (struct gdbarch *gdbarch, int regnum)
4018 {
4019   int num_regs = gdbarch_num_regs (gdbarch);
4020 
4021   if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4022       && regnum >= num_regs && regnum < num_regs + 32)
4023     return builtin_type (gdbarch)->builtin_float;
4024 
4025   if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4026       && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4027     return arm_neon_quad_type (gdbarch);
4028 
4029   /* If the target description has register information, we are only
4030      in this function so that we can override the types of
4031      double-precision registers for NEON.  */
4032   if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4033     {
4034       struct type *t = tdesc_register_type (gdbarch, regnum);
4035 
4036       if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4037 	  && TYPE_CODE (t) == TYPE_CODE_FLT
4038 	  && gdbarch_tdep (gdbarch)->have_neon)
4039 	return arm_neon_double_type (gdbarch);
4040       else
4041 	return t;
4042     }
4043 
4044   if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4045     {
4046       if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4047 	return builtin_type (gdbarch)->builtin_void;
4048 
4049       return arm_ext_type (gdbarch);
4050     }
4051   else if (regnum == ARM_SP_REGNUM)
4052     return builtin_type (gdbarch)->builtin_data_ptr;
4053   else if (regnum == ARM_PC_REGNUM)
4054     return builtin_type (gdbarch)->builtin_func_ptr;
4055   else if (regnum >= ARRAY_SIZE (arm_register_names))
4056     /* These registers are only supported on targets which supply
4057        an XML description.  */
4058     return builtin_type (gdbarch)->builtin_int0;
4059   else
4060     return builtin_type (gdbarch)->builtin_uint32;
4061 }
4062 
4063 /* Map a DWARF register REGNUM onto the appropriate GDB register
4064    number.  */
4065 
4066 static int
arm_dwarf_reg_to_regnum(struct gdbarch * gdbarch,int reg)4067 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4068 {
4069   /* Core integer regs.  */
4070   if (reg >= 0 && reg <= 15)
4071     return reg;
4072 
4073   /* Legacy FPA encoding.  These were once used in a way which
4074      overlapped with VFP register numbering, so their use is
4075      discouraged, but GDB doesn't support the ARM toolchain
4076      which used them for VFP.  */
4077   if (reg >= 16 && reg <= 23)
4078     return ARM_F0_REGNUM + reg - 16;
4079 
4080   /* New assignments for the FPA registers.  */
4081   if (reg >= 96 && reg <= 103)
4082     return ARM_F0_REGNUM + reg - 96;
4083 
4084   /* WMMX register assignments.  */
4085   if (reg >= 104 && reg <= 111)
4086     return ARM_WCGR0_REGNUM + reg - 104;
4087 
4088   if (reg >= 112 && reg <= 127)
4089     return ARM_WR0_REGNUM + reg - 112;
4090 
4091   if (reg >= 192 && reg <= 199)
4092     return ARM_WC0_REGNUM + reg - 192;
4093 
4094   /* VFP v2 registers.  A double precision value is actually
4095      in d1 rather than s2, but the ABI only defines numbering
4096      for the single precision registers.  This will "just work"
4097      in GDB for little endian targets (we'll read eight bytes,
4098      starting in s0 and then progressing to s1), but will be
4099      reversed on big endian targets with VFP.  This won't
4100      be a problem for the new Neon quad registers; you're supposed
4101      to use DW_OP_piece for those.  */
4102   if (reg >= 64 && reg <= 95)
4103     {
4104       char name_buf[4];
4105 
4106       xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4107       return user_reg_map_name_to_regnum (gdbarch, name_buf,
4108 					  strlen (name_buf));
4109     }
4110 
4111   /* VFP v3 / Neon registers.  This range is also used for VFP v2
4112      registers, except that it now describes d0 instead of s0.  */
4113   if (reg >= 256 && reg <= 287)
4114     {
4115       char name_buf[4];
4116 
4117       xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4118       return user_reg_map_name_to_regnum (gdbarch, name_buf,
4119 					  strlen (name_buf));
4120     }
4121 
4122   return -1;
4123 }
4124 
4125 /* Map GDB internal REGNUM onto the Arm simulator register numbers.  */
4126 static int
arm_register_sim_regno(struct gdbarch * gdbarch,int regnum)4127 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4128 {
4129   int reg = regnum;
4130   gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4131 
4132   if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4133     return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4134 
4135   if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4136     return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4137 
4138   if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4139     return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4140 
4141   if (reg < NUM_GREGS)
4142     return SIM_ARM_R0_REGNUM + reg;
4143   reg -= NUM_GREGS;
4144 
4145   if (reg < NUM_FREGS)
4146     return SIM_ARM_FP0_REGNUM + reg;
4147   reg -= NUM_FREGS;
4148 
4149   if (reg < NUM_SREGS)
4150     return SIM_ARM_FPS_REGNUM + reg;
4151   reg -= NUM_SREGS;
4152 
4153   internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4154 }
4155 
4156 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4157    convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4158    It is thought that this is is the floating-point register format on
4159    little-endian systems.  */
4160 
4161 static void
convert_from_extended(const struct floatformat * fmt,const void * ptr,void * dbl,int endianess)4162 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4163 		       void *dbl, int endianess)
4164 {
4165   DOUBLEST d;
4166 
4167   if (endianess == BFD_ENDIAN_BIG)
4168     floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4169   else
4170     floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4171 			     ptr, &d);
4172   floatformat_from_doublest (fmt, &d, dbl);
4173 }
4174 
4175 static void
convert_to_extended(const struct floatformat * fmt,void * dbl,const void * ptr,int endianess)4176 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4177 		     int endianess)
4178 {
4179   DOUBLEST d;
4180 
4181   floatformat_to_doublest (fmt, ptr, &d);
4182   if (endianess == BFD_ENDIAN_BIG)
4183     floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4184   else
4185     floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4186 			       &d, dbl);
4187 }
4188 
4189 static int
condition_true(unsigned long cond,unsigned long status_reg)4190 condition_true (unsigned long cond, unsigned long status_reg)
4191 {
4192   if (cond == INST_AL || cond == INST_NV)
4193     return 1;
4194 
4195   switch (cond)
4196     {
4197     case INST_EQ:
4198       return ((status_reg & FLAG_Z) != 0);
4199     case INST_NE:
4200       return ((status_reg & FLAG_Z) == 0);
4201     case INST_CS:
4202       return ((status_reg & FLAG_C) != 0);
4203     case INST_CC:
4204       return ((status_reg & FLAG_C) == 0);
4205     case INST_MI:
4206       return ((status_reg & FLAG_N) != 0);
4207     case INST_PL:
4208       return ((status_reg & FLAG_N) == 0);
4209     case INST_VS:
4210       return ((status_reg & FLAG_V) != 0);
4211     case INST_VC:
4212       return ((status_reg & FLAG_V) == 0);
4213     case INST_HI:
4214       return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4215     case INST_LS:
4216       return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4217     case INST_GE:
4218       return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4219     case INST_LT:
4220       return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4221     case INST_GT:
4222       return (((status_reg & FLAG_Z) == 0)
4223 	      && (((status_reg & FLAG_N) == 0)
4224 		  == ((status_reg & FLAG_V) == 0)));
4225     case INST_LE:
4226       return (((status_reg & FLAG_Z) != 0)
4227 	      || (((status_reg & FLAG_N) == 0)
4228 		  != ((status_reg & FLAG_V) == 0)));
4229     }
4230   return 1;
4231 }
4232 
4233 static unsigned long
shifted_reg_val(struct frame_info * frame,unsigned long inst,int carry,unsigned long pc_val,unsigned long status_reg)4234 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4235 		 unsigned long pc_val, unsigned long status_reg)
4236 {
4237   unsigned long res, shift;
4238   int rm = bits (inst, 0, 3);
4239   unsigned long shifttype = bits (inst, 5, 6);
4240 
4241   if (bit (inst, 4))
4242     {
4243       int rs = bits (inst, 8, 11);
4244       shift = (rs == 15 ? pc_val + 8
4245 			: get_frame_register_unsigned (frame, rs)) & 0xFF;
4246     }
4247   else
4248     shift = bits (inst, 7, 11);
4249 
4250   res = (rm == ARM_PC_REGNUM
4251 	 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4252 	 : get_frame_register_unsigned (frame, rm));
4253 
4254   switch (shifttype)
4255     {
4256     case 0:			/* LSL */
4257       res = shift >= 32 ? 0 : res << shift;
4258       break;
4259 
4260     case 1:			/* LSR */
4261       res = shift >= 32 ? 0 : res >> shift;
4262       break;
4263 
4264     case 2:			/* ASR */
4265       if (shift >= 32)
4266 	shift = 31;
4267       res = ((res & 0x80000000L)
4268 	     ? ~((~res) >> shift) : res >> shift);
4269       break;
4270 
4271     case 3:			/* ROR/RRX */
4272       shift &= 31;
4273       if (shift == 0)
4274 	res = (res >> 1) | (carry ? 0x80000000L : 0);
4275       else
4276 	res = (res >> shift) | (res << (32 - shift));
4277       break;
4278     }
4279 
4280   return res & 0xffffffff;
4281 }
4282 
4283 /* Return number of 1-bits in VAL.  */
4284 
4285 static int
bitcount(unsigned long val)4286 bitcount (unsigned long val)
4287 {
4288   int nbits;
4289   for (nbits = 0; val != 0; nbits++)
4290     val &= val - 1;		/* Delete rightmost 1-bit in val.  */
4291   return nbits;
4292 }
4293 
4294 /* Return the size in bytes of the complete Thumb instruction whose
4295    first halfword is INST1.  */
4296 
4297 static int
thumb_insn_size(unsigned short inst1)4298 thumb_insn_size (unsigned short inst1)
4299 {
4300   if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4301     return 4;
4302   else
4303     return 2;
4304 }
4305 
4306 static int
thumb_advance_itstate(unsigned int itstate)4307 thumb_advance_itstate (unsigned int itstate)
4308 {
4309   /* Preserve IT[7:5], the first three bits of the condition.  Shift
4310      the upcoming condition flags left by one bit.  */
4311   itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4312 
4313   /* If we have finished the IT block, clear the state.  */
4314   if ((itstate & 0x0f) == 0)
4315     itstate = 0;
4316 
4317   return itstate;
4318 }
4319 
4320 /* Find the next PC after the current instruction executes.  In some
4321    cases we can not statically determine the answer (see the IT state
4322    handling in this function); in that case, a breakpoint may be
4323    inserted in addition to the returned PC, which will be used to set
4324    another breakpoint by our caller.  */
4325 
4326 static CORE_ADDR
thumb_get_next_pc_raw(struct frame_info * frame,CORE_ADDR pc)4327 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4328 {
4329   struct gdbarch *gdbarch = get_frame_arch (frame);
4330   struct address_space *aspace = get_frame_address_space (frame);
4331   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4332   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4333   unsigned long pc_val = ((unsigned long) pc) + 4;	/* PC after prefetch */
4334   unsigned short inst1;
4335   CORE_ADDR nextpc = pc + 2;		/* Default is next instruction.  */
4336   unsigned long offset;
4337   ULONGEST status, itstate;
4338 
4339   nextpc = MAKE_THUMB_ADDR (nextpc);
4340   pc_val = MAKE_THUMB_ADDR (pc_val);
4341 
4342   inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4343 
4344   /* Thumb-2 conditional execution support.  There are eight bits in
4345      the CPSR which describe conditional execution state.  Once
4346      reconstructed (they're in a funny order), the low five bits
4347      describe the low bit of the condition for each instruction and
4348      how many instructions remain.  The high three bits describe the
4349      base condition.  One of the low four bits will be set if an IT
4350      block is active.  These bits read as zero on earlier
4351      processors.  */
4352   status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4353   itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4354 
4355   /* If-Then handling.  On GNU/Linux, where this routine is used, we
4356      use an undefined instruction as a breakpoint.  Unlike BKPT, IT
4357      can disable execution of the undefined instruction.  So we might
4358      miss the breakpoint if we set it on a skipped conditional
4359      instruction.  Because conditional instructions can change the
4360      flags, affecting the execution of further instructions, we may
4361      need to set two breakpoints.  */
4362 
4363   if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4364     {
4365       if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4366 	{
4367 	  /* An IT instruction.  Because this instruction does not
4368 	     modify the flags, we can accurately predict the next
4369 	     executed instruction.  */
4370 	  itstate = inst1 & 0x00ff;
4371 	  pc += thumb_insn_size (inst1);
4372 
4373 	  while (itstate != 0 && ! condition_true (itstate >> 4, status))
4374 	    {
4375 	      inst1 = read_memory_unsigned_integer (pc, 2,
4376 						    byte_order_for_code);
4377 	      pc += thumb_insn_size (inst1);
4378 	      itstate = thumb_advance_itstate (itstate);
4379 	    }
4380 
4381 	  return MAKE_THUMB_ADDR (pc);
4382 	}
4383       else if (itstate != 0)
4384 	{
4385 	  /* We are in a conditional block.  Check the condition.  */
4386 	  if (! condition_true (itstate >> 4, status))
4387 	    {
4388 	      /* Advance to the next executed instruction.  */
4389 	      pc += thumb_insn_size (inst1);
4390 	      itstate = thumb_advance_itstate (itstate);
4391 
4392 	      while (itstate != 0 && ! condition_true (itstate >> 4, status))
4393 		{
4394 		  inst1 = read_memory_unsigned_integer (pc, 2,
4395 							byte_order_for_code);
4396 		  pc += thumb_insn_size (inst1);
4397 		  itstate = thumb_advance_itstate (itstate);
4398 		}
4399 
4400 	      return MAKE_THUMB_ADDR (pc);
4401 	    }
4402 	  else if ((itstate & 0x0f) == 0x08)
4403 	    {
4404 	      /* This is the last instruction of the conditional
4405 		 block, and it is executed.  We can handle it normally
4406 		 because the following instruction is not conditional,
4407 		 and we must handle it normally because it is
4408 		 permitted to branch.  Fall through.  */
4409 	    }
4410 	  else
4411 	    {
4412 	      int cond_negated;
4413 
4414 	      /* There are conditional instructions after this one.
4415 		 If this instruction modifies the flags, then we can
4416 		 not predict what the next executed instruction will
4417 		 be.  Fortunately, this instruction is architecturally
4418 		 forbidden to branch; we know it will fall through.
4419 		 Start by skipping past it.  */
4420 	      pc += thumb_insn_size (inst1);
4421 	      itstate = thumb_advance_itstate (itstate);
4422 
4423 	      /* Set a breakpoint on the following instruction.  */
4424 	      gdb_assert ((itstate & 0x0f) != 0);
4425 	      arm_insert_single_step_breakpoint (gdbarch, aspace,
4426 						 MAKE_THUMB_ADDR (pc));
4427 	      cond_negated = (itstate >> 4) & 1;
4428 
4429 	      /* Skip all following instructions with the same
4430 		 condition.  If there is a later instruction in the IT
4431 		 block with the opposite condition, set the other
4432 		 breakpoint there.  If not, then set a breakpoint on
4433 		 the instruction after the IT block.  */
4434 	      do
4435 		{
4436 		  inst1 = read_memory_unsigned_integer (pc, 2,
4437 							byte_order_for_code);
4438 		  pc += thumb_insn_size (inst1);
4439 		  itstate = thumb_advance_itstate (itstate);
4440 		}
4441 	      while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4442 
4443 	      return MAKE_THUMB_ADDR (pc);
4444 	    }
4445 	}
4446     }
4447   else if (itstate & 0x0f)
4448     {
4449       /* We are in a conditional block.  Check the condition.  */
4450       int cond = itstate >> 4;
4451 
4452       if (! condition_true (cond, status))
4453 	/* Advance to the next instruction.  All the 32-bit
4454 	   instructions share a common prefix.  */
4455 	return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4456 
4457       /* Otherwise, handle the instruction normally.  */
4458     }
4459 
4460   if ((inst1 & 0xff00) == 0xbd00)	/* pop {rlist, pc} */
4461     {
4462       CORE_ADDR sp;
4463 
4464       /* Fetch the saved PC from the stack.  It's stored above
4465          all of the other registers.  */
4466       offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4467       sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4468       nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4469     }
4470   else if ((inst1 & 0xf000) == 0xd000)	/* conditional branch */
4471     {
4472       unsigned long cond = bits (inst1, 8, 11);
4473       if (cond == 0x0f)  /* 0x0f = SWI */
4474 	{
4475 	  struct gdbarch_tdep *tdep;
4476 	  tdep = gdbarch_tdep (gdbarch);
4477 
4478 	  if (tdep->syscall_next_pc != NULL)
4479 	    nextpc = tdep->syscall_next_pc (frame);
4480 
4481 	}
4482       else if (cond != 0x0f && condition_true (cond, status))
4483 	nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4484     }
4485   else if ((inst1 & 0xf800) == 0xe000)	/* unconditional branch */
4486     {
4487       nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4488     }
4489   else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4490     {
4491       unsigned short inst2;
4492       inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4493 
4494       /* Default to the next instruction.  */
4495       nextpc = pc + 4;
4496       nextpc = MAKE_THUMB_ADDR (nextpc);
4497 
4498       if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4499 	{
4500 	  /* Branches and miscellaneous control instructions.  */
4501 
4502 	  if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4503 	    {
4504 	      /* B, BL, BLX.  */
4505 	      int j1, j2, imm1, imm2;
4506 
4507 	      imm1 = sbits (inst1, 0, 10);
4508 	      imm2 = bits (inst2, 0, 10);
4509 	      j1 = bit (inst2, 13);
4510 	      j2 = bit (inst2, 11);
4511 
4512 	      offset = ((imm1 << 12) + (imm2 << 1));
4513 	      offset ^= ((!j2) << 22) | ((!j1) << 23);
4514 
4515 	      nextpc = pc_val + offset;
4516 	      /* For BLX make sure to clear the low bits.  */
4517 	      if (bit (inst2, 12) == 0)
4518 		nextpc = nextpc & 0xfffffffc;
4519 	    }
4520 	  else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4521 	    {
4522 	      /* SUBS PC, LR, #imm8.  */
4523 	      nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4524 	      nextpc -= inst2 & 0x00ff;
4525 	    }
4526 	  else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4527 	    {
4528 	      /* Conditional branch.  */
4529 	      if (condition_true (bits (inst1, 6, 9), status))
4530 		{
4531 		  int sign, j1, j2, imm1, imm2;
4532 
4533 		  sign = sbits (inst1, 10, 10);
4534 		  imm1 = bits (inst1, 0, 5);
4535 		  imm2 = bits (inst2, 0, 10);
4536 		  j1 = bit (inst2, 13);
4537 		  j2 = bit (inst2, 11);
4538 
4539 		  offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4540 		  offset += (imm1 << 12) + (imm2 << 1);
4541 
4542 		  nextpc = pc_val + offset;
4543 		}
4544 	    }
4545 	}
4546       else if ((inst1 & 0xfe50) == 0xe810)
4547 	{
4548 	  /* Load multiple or RFE.  */
4549 	  int rn, offset, load_pc = 1;
4550 
4551 	  rn = bits (inst1, 0, 3);
4552 	  if (bit (inst1, 7) && !bit (inst1, 8))
4553 	    {
4554 	      /* LDMIA or POP */
4555 	      if (!bit (inst2, 15))
4556 		load_pc = 0;
4557 	      offset = bitcount (inst2) * 4 - 4;
4558 	    }
4559 	  else if (!bit (inst1, 7) && bit (inst1, 8))
4560 	    {
4561 	      /* LDMDB */
4562 	      if (!bit (inst2, 15))
4563 		load_pc = 0;
4564 	      offset = -4;
4565 	    }
4566 	  else if (bit (inst1, 7) && bit (inst1, 8))
4567 	    {
4568 	      /* RFEIA */
4569 	      offset = 0;
4570 	    }
4571 	  else if (!bit (inst1, 7) && !bit (inst1, 8))
4572 	    {
4573 	      /* RFEDB */
4574 	      offset = -8;
4575 	    }
4576 	  else
4577 	    load_pc = 0;
4578 
4579 	  if (load_pc)
4580 	    {
4581 	      CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4582 	      nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4583 	    }
4584 	}
4585       else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4586 	{
4587 	  /* MOV PC or MOVS PC.  */
4588 	  nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4589 	  nextpc = MAKE_THUMB_ADDR (nextpc);
4590 	}
4591       else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4592 	{
4593 	  /* LDR PC.  */
4594 	  CORE_ADDR base;
4595 	  int rn, load_pc = 1;
4596 
4597 	  rn = bits (inst1, 0, 3);
4598 	  base = get_frame_register_unsigned (frame, rn);
4599 	  if (rn == ARM_PC_REGNUM)
4600 	    {
4601 	      base = (base + 4) & ~(CORE_ADDR) 0x3;
4602 	      if (bit (inst1, 7))
4603 		base += bits (inst2, 0, 11);
4604 	      else
4605 		base -= bits (inst2, 0, 11);
4606 	    }
4607 	  else if (bit (inst1, 7))
4608 	    base += bits (inst2, 0, 11);
4609 	  else if (bit (inst2, 11))
4610 	    {
4611 	      if (bit (inst2, 10))
4612 		{
4613 		  if (bit (inst2, 9))
4614 		    base += bits (inst2, 0, 7);
4615 		  else
4616 		    base -= bits (inst2, 0, 7);
4617 		}
4618 	    }
4619 	  else if ((inst2 & 0x0fc0) == 0x0000)
4620 	    {
4621 	      int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4622 	      base += get_frame_register_unsigned (frame, rm) << shift;
4623 	    }
4624 	  else
4625 	    /* Reserved.  */
4626 	    load_pc = 0;
4627 
4628 	  if (load_pc)
4629 	    nextpc = get_frame_memory_unsigned (frame, base, 4);
4630 	}
4631       else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4632 	{
4633 	  /* TBB.  */
4634 	  CORE_ADDR tbl_reg, table, offset, length;
4635 
4636 	  tbl_reg = bits (inst1, 0, 3);
4637 	  if (tbl_reg == 0x0f)
4638 	    table = pc + 4;  /* Regcache copy of PC isn't right yet.  */
4639 	  else
4640 	    table = get_frame_register_unsigned (frame, tbl_reg);
4641 
4642 	  offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4643 	  length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4644 	  nextpc = pc_val + length;
4645 	}
4646       else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4647 	{
4648 	  /* TBH.  */
4649 	  CORE_ADDR tbl_reg, table, offset, length;
4650 
4651 	  tbl_reg = bits (inst1, 0, 3);
4652 	  if (tbl_reg == 0x0f)
4653 	    table = pc + 4;  /* Regcache copy of PC isn't right yet.  */
4654 	  else
4655 	    table = get_frame_register_unsigned (frame, tbl_reg);
4656 
4657 	  offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4658 	  length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4659 	  nextpc = pc_val + length;
4660 	}
4661     }
4662   else if ((inst1 & 0xff00) == 0x4700)	/* bx REG, blx REG */
4663     {
4664       if (bits (inst1, 3, 6) == 0x0f)
4665 	nextpc = UNMAKE_THUMB_ADDR (pc_val);
4666       else
4667 	nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4668     }
4669   else if ((inst1 & 0xff87) == 0x4687)	/* mov pc, REG */
4670     {
4671       if (bits (inst1, 3, 6) == 0x0f)
4672 	nextpc = pc_val;
4673       else
4674 	nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4675 
4676       nextpc = MAKE_THUMB_ADDR (nextpc);
4677     }
4678   else if ((inst1 & 0xf500) == 0xb100)
4679     {
4680       /* CBNZ or CBZ.  */
4681       int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4682       ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4683 
4684       if (bit (inst1, 11) && reg != 0)
4685 	nextpc = pc_val + imm;
4686       else if (!bit (inst1, 11) && reg == 0)
4687 	nextpc = pc_val + imm;
4688     }
4689   return nextpc;
4690 }
4691 
4692 /* Get the raw next address.  PC is the current program counter, in
4693    FRAME, which is assumed to be executing in ARM mode.
4694 
4695    The value returned has the execution state of the next instruction
4696    encoded in it.  Use IS_THUMB_ADDR () to see whether the instruction is
4697    in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4698    address.  */
4699 
4700 static CORE_ADDR
arm_get_next_pc_raw(struct frame_info * frame,CORE_ADDR pc)4701 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4702 {
4703   struct gdbarch *gdbarch = get_frame_arch (frame);
4704   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4705   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4706   unsigned long pc_val;
4707   unsigned long this_instr;
4708   unsigned long status;
4709   CORE_ADDR nextpc;
4710 
4711   pc_val = (unsigned long) pc;
4712   this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4713 
4714   status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4715   nextpc = (CORE_ADDR) (pc_val + 4);	/* Default case */
4716 
4717   if (bits (this_instr, 28, 31) == INST_NV)
4718     switch (bits (this_instr, 24, 27))
4719       {
4720       case 0xa:
4721       case 0xb:
4722 	{
4723 	  /* Branch with Link and change to Thumb.  */
4724 	  nextpc = BranchDest (pc, this_instr);
4725 	  nextpc |= bit (this_instr, 24) << 1;
4726 	  nextpc = MAKE_THUMB_ADDR (nextpc);
4727 	  break;
4728 	}
4729       case 0xc:
4730       case 0xd:
4731       case 0xe:
4732 	/* Coprocessor register transfer.  */
4733         if (bits (this_instr, 12, 15) == 15)
4734 	  error (_("Invalid update to pc in instruction"));
4735 	break;
4736       }
4737   else if (condition_true (bits (this_instr, 28, 31), status))
4738     {
4739       switch (bits (this_instr, 24, 27))
4740 	{
4741 	case 0x0:
4742 	case 0x1:			/* data processing */
4743 	case 0x2:
4744 	case 0x3:
4745 	  {
4746 	    unsigned long operand1, operand2, result = 0;
4747 	    unsigned long rn;
4748 	    int c;
4749 
4750 	    if (bits (this_instr, 12, 15) != 15)
4751 	      break;
4752 
4753 	    if (bits (this_instr, 22, 25) == 0
4754 		&& bits (this_instr, 4, 7) == 9)	/* multiply */
4755 	      error (_("Invalid update to pc in instruction"));
4756 
4757 	    /* BX <reg>, BLX <reg> */
4758 	    if (bits (this_instr, 4, 27) == 0x12fff1
4759 		|| bits (this_instr, 4, 27) == 0x12fff3)
4760 	      {
4761 		rn = bits (this_instr, 0, 3);
4762 		nextpc = ((rn == ARM_PC_REGNUM)
4763 			  ? (pc_val + 8)
4764 			  : get_frame_register_unsigned (frame, rn));
4765 
4766 		return nextpc;
4767 	      }
4768 
4769 	    /* Multiply into PC.  */
4770 	    c = (status & FLAG_C) ? 1 : 0;
4771 	    rn = bits (this_instr, 16, 19);
4772 	    operand1 = ((rn == ARM_PC_REGNUM)
4773 			? (pc_val + 8)
4774 			: get_frame_register_unsigned (frame, rn));
4775 
4776 	    if (bit (this_instr, 25))
4777 	      {
4778 		unsigned long immval = bits (this_instr, 0, 7);
4779 		unsigned long rotate = 2 * bits (this_instr, 8, 11);
4780 		operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4781 		  & 0xffffffff;
4782 	      }
4783 	    else		/* operand 2 is a shifted register.  */
4784 	      operand2 = shifted_reg_val (frame, this_instr, c,
4785 					  pc_val, status);
4786 
4787 	    switch (bits (this_instr, 21, 24))
4788 	      {
4789 	      case 0x0:	/*and */
4790 		result = operand1 & operand2;
4791 		break;
4792 
4793 	      case 0x1:	/*eor */
4794 		result = operand1 ^ operand2;
4795 		break;
4796 
4797 	      case 0x2:	/*sub */
4798 		result = operand1 - operand2;
4799 		break;
4800 
4801 	      case 0x3:	/*rsb */
4802 		result = operand2 - operand1;
4803 		break;
4804 
4805 	      case 0x4:	/*add */
4806 		result = operand1 + operand2;
4807 		break;
4808 
4809 	      case 0x5:	/*adc */
4810 		result = operand1 + operand2 + c;
4811 		break;
4812 
4813 	      case 0x6:	/*sbc */
4814 		result = operand1 - operand2 + c;
4815 		break;
4816 
4817 	      case 0x7:	/*rsc */
4818 		result = operand2 - operand1 + c;
4819 		break;
4820 
4821 	      case 0x8:
4822 	      case 0x9:
4823 	      case 0xa:
4824 	      case 0xb:	/* tst, teq, cmp, cmn */
4825 		result = (unsigned long) nextpc;
4826 		break;
4827 
4828 	      case 0xc:	/*orr */
4829 		result = operand1 | operand2;
4830 		break;
4831 
4832 	      case 0xd:	/*mov */
4833 		/* Always step into a function.  */
4834 		result = operand2;
4835 		break;
4836 
4837 	      case 0xe:	/*bic */
4838 		result = operand1 & ~operand2;
4839 		break;
4840 
4841 	      case 0xf:	/*mvn */
4842 		result = ~operand2;
4843 		break;
4844 	      }
4845 
4846             /* In 26-bit APCS the bottom two bits of the result are
4847 	       ignored, and we always end up in ARM state.  */
4848 	    if (!arm_apcs_32)
4849 	      nextpc = arm_addr_bits_remove (gdbarch, result);
4850 	    else
4851 	      nextpc = result;
4852 
4853 	    break;
4854 	  }
4855 
4856 	case 0x4:
4857 	case 0x5:		/* data transfer */
4858 	case 0x6:
4859 	case 0x7:
4860 	  if (bit (this_instr, 20))
4861 	    {
4862 	      /* load */
4863 	      if (bits (this_instr, 12, 15) == 15)
4864 		{
4865 		  /* rd == pc */
4866 		  unsigned long rn;
4867 		  unsigned long base;
4868 
4869 		  if (bit (this_instr, 22))
4870 		    error (_("Invalid update to pc in instruction"));
4871 
4872 		  /* byte write to PC */
4873 		  rn = bits (this_instr, 16, 19);
4874 		  base = ((rn == ARM_PC_REGNUM)
4875 			  ? (pc_val + 8)
4876 			  : get_frame_register_unsigned (frame, rn));
4877 
4878 		  if (bit (this_instr, 24))
4879 		    {
4880 		      /* pre-indexed */
4881 		      int c = (status & FLAG_C) ? 1 : 0;
4882 		      unsigned long offset =
4883 		      (bit (this_instr, 25)
4884 		       ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4885 		       : bits (this_instr, 0, 11));
4886 
4887 		      if (bit (this_instr, 23))
4888 			base += offset;
4889 		      else
4890 			base -= offset;
4891 		    }
4892 		  nextpc =
4893 		    (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4894 							      4, byte_order);
4895 		}
4896 	    }
4897 	  break;
4898 
4899 	case 0x8:
4900 	case 0x9:		/* block transfer */
4901 	  if (bit (this_instr, 20))
4902 	    {
4903 	      /* LDM */
4904 	      if (bit (this_instr, 15))
4905 		{
4906 		  /* loading pc */
4907 		  int offset = 0;
4908 		  unsigned long rn_val
4909 		    = get_frame_register_unsigned (frame,
4910 						   bits (this_instr, 16, 19));
4911 
4912 		  if (bit (this_instr, 23))
4913 		    {
4914 		      /* up */
4915 		      unsigned long reglist = bits (this_instr, 0, 14);
4916 		      offset = bitcount (reglist) * 4;
4917 		      if (bit (this_instr, 24))		/* pre */
4918 			offset += 4;
4919 		    }
4920 		  else if (bit (this_instr, 24))
4921 		    offset = -4;
4922 
4923 		  nextpc =
4924 		    (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4925 							      (rn_val + offset),
4926 							      4, byte_order);
4927 		}
4928 	    }
4929 	  break;
4930 
4931 	case 0xb:		/* branch & link */
4932 	case 0xa:		/* branch */
4933 	  {
4934 	    nextpc = BranchDest (pc, this_instr);
4935 	    break;
4936 	  }
4937 
4938 	case 0xc:
4939 	case 0xd:
4940 	case 0xe:		/* coproc ops */
4941 	  break;
4942 	case 0xf:		/* SWI */
4943 	  {
4944 	    struct gdbarch_tdep *tdep;
4945 	    tdep = gdbarch_tdep (gdbarch);
4946 
4947 	    if (tdep->syscall_next_pc != NULL)
4948 	      nextpc = tdep->syscall_next_pc (frame);
4949 
4950 	  }
4951 	  break;
4952 
4953 	default:
4954 	  fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
4955 	  return (pc);
4956 	}
4957     }
4958 
4959   return nextpc;
4960 }
4961 
4962 /* Determine next PC after current instruction executes.  Will call either
4963    arm_get_next_pc_raw or thumb_get_next_pc_raw.  Error out if infinite
4964    loop is detected.  */
4965 
4966 CORE_ADDR
arm_get_next_pc(struct frame_info * frame,CORE_ADDR pc)4967 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
4968 {
4969   CORE_ADDR nextpc;
4970 
4971   if (arm_frame_is_thumb (frame))
4972     {
4973       nextpc = thumb_get_next_pc_raw (frame, pc);
4974       if (nextpc == MAKE_THUMB_ADDR (pc))
4975 	error (_("Infinite loop detected"));
4976     }
4977   else
4978     {
4979       nextpc = arm_get_next_pc_raw (frame, pc);
4980       if (nextpc == pc)
4981 	error (_("Infinite loop detected"));
4982     }
4983 
4984   return nextpc;
4985 }
4986 
4987 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4988    of the appropriate mode (as encoded in the PC value), even if this
4989    differs from what would be expected according to the symbol tables.  */
4990 
4991 void
arm_insert_single_step_breakpoint(struct gdbarch * gdbarch,struct address_space * aspace,CORE_ADDR pc)4992 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4993 				   struct address_space *aspace,
4994 				   CORE_ADDR pc)
4995 {
4996   struct cleanup *old_chain
4997     = make_cleanup_restore_integer (&arm_override_mode);
4998 
4999   arm_override_mode = IS_THUMB_ADDR (pc);
5000   pc = gdbarch_addr_bits_remove (gdbarch, pc);
5001 
5002   insert_single_step_breakpoint (gdbarch, aspace, pc);
5003 
5004   do_cleanups (old_chain);
5005 }
5006 
5007 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5008    instruction and ending with a STREX{,B,H,D} instruction.  If such a sequence
5009    is found, attempt to step through it.  A breakpoint is placed at the end of
5010    the sequence.  */
5011 
5012 static int
thumb_deal_with_atomic_sequence_raw(struct frame_info * frame)5013 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5014 {
5015   struct gdbarch *gdbarch = get_frame_arch (frame);
5016   struct address_space *aspace = get_frame_address_space (frame);
5017   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5018   CORE_ADDR pc = get_frame_pc (frame);
5019   CORE_ADDR breaks[2] = {-1, -1};
5020   CORE_ADDR loc = pc;
5021   unsigned short insn1, insn2;
5022   int insn_count;
5023   int index;
5024   int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed).  */
5025   const int atomic_sequence_length = 16; /* Instruction sequence length.  */
5026   ULONGEST status, itstate;
5027 
5028   /* We currently do not support atomic sequences within an IT block.  */
5029   status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5030   itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5031   if (itstate & 0x0f)
5032     return 0;
5033 
5034   /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.  */
5035   insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5036   loc += 2;
5037   if (thumb_insn_size (insn1) != 4)
5038     return 0;
5039 
5040   insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5041   loc += 2;
5042   if (!((insn1 & 0xfff0) == 0xe850
5043         || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5044     return 0;
5045 
5046   /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5047      instructions.  */
5048   for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5049     {
5050       insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5051       loc += 2;
5052 
5053       if (thumb_insn_size (insn1) != 4)
5054 	{
5055 	  /* Assume that there is at most one conditional branch in the
5056 	     atomic sequence.  If a conditional branch is found, put a
5057 	     breakpoint in its destination address.  */
5058 	  if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5059 	    {
5060 	      if (last_breakpoint > 0)
5061 		return 0; /* More than one conditional branch found,
5062 			     fallback to the standard code.  */
5063 
5064 	      breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5065 	      last_breakpoint++;
5066 	    }
5067 
5068 	  /* We do not support atomic sequences that use any *other*
5069 	     instructions but conditional branches to change the PC.
5070 	     Fall back to standard code to avoid losing control of
5071 	     execution.  */
5072 	  else if (thumb_instruction_changes_pc (insn1))
5073 	    return 0;
5074 	}
5075       else
5076 	{
5077 	  insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5078 	  loc += 2;
5079 
5080 	  /* Assume that there is at most one conditional branch in the
5081 	     atomic sequence.  If a conditional branch is found, put a
5082 	     breakpoint in its destination address.  */
5083 	  if ((insn1 & 0xf800) == 0xf000
5084 	      && (insn2 & 0xd000) == 0x8000
5085 	      && (insn1 & 0x0380) != 0x0380)
5086 	    {
5087 	      int sign, j1, j2, imm1, imm2;
5088 	      unsigned int offset;
5089 
5090 	      sign = sbits (insn1, 10, 10);
5091 	      imm1 = bits (insn1, 0, 5);
5092 	      imm2 = bits (insn2, 0, 10);
5093 	      j1 = bit (insn2, 13);
5094 	      j2 = bit (insn2, 11);
5095 
5096 	      offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5097 	      offset += (imm1 << 12) + (imm2 << 1);
5098 
5099 	      if (last_breakpoint > 0)
5100 		return 0; /* More than one conditional branch found,
5101 			     fallback to the standard code.  */
5102 
5103 	      breaks[1] = loc + offset;
5104 	      last_breakpoint++;
5105 	    }
5106 
5107 	  /* We do not support atomic sequences that use any *other*
5108 	     instructions but conditional branches to change the PC.
5109 	     Fall back to standard code to avoid losing control of
5110 	     execution.  */
5111 	  else if (thumb2_instruction_changes_pc (insn1, insn2))
5112 	    return 0;
5113 
5114 	  /* If we find a strex{,b,h,d}, we're done.  */
5115 	  if ((insn1 & 0xfff0) == 0xe840
5116 	      || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5117 	    break;
5118 	}
5119     }
5120 
5121   /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence.  */
5122   if (insn_count == atomic_sequence_length)
5123     return 0;
5124 
5125   /* Insert a breakpoint right after the end of the atomic sequence.  */
5126   breaks[0] = loc;
5127 
5128   /* Check for duplicated breakpoints.  Check also for a breakpoint
5129      placed (branch instruction's destination) anywhere in sequence.  */
5130   if (last_breakpoint
5131       && (breaks[1] == breaks[0]
5132 	  || (breaks[1] >= pc && breaks[1] < loc)))
5133     last_breakpoint = 0;
5134 
5135   /* Effectively inserts the breakpoints.  */
5136   for (index = 0; index <= last_breakpoint; index++)
5137     arm_insert_single_step_breakpoint (gdbarch, aspace,
5138 				       MAKE_THUMB_ADDR (breaks[index]));
5139 
5140   return 1;
5141 }
5142 
5143 static int
arm_deal_with_atomic_sequence_raw(struct frame_info * frame)5144 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5145 {
5146   struct gdbarch *gdbarch = get_frame_arch (frame);
5147   struct address_space *aspace = get_frame_address_space (frame);
5148   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5149   CORE_ADDR pc = get_frame_pc (frame);
5150   CORE_ADDR breaks[2] = {-1, -1};
5151   CORE_ADDR loc = pc;
5152   unsigned int insn;
5153   int insn_count;
5154   int index;
5155   int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed).  */
5156   const int atomic_sequence_length = 16; /* Instruction sequence length.  */
5157 
5158   /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5159      Note that we do not currently support conditionally executed atomic
5160      instructions.  */
5161   insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5162   loc += 4;
5163   if ((insn & 0xff9000f0) != 0xe1900090)
5164     return 0;
5165 
5166   /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5167      instructions.  */
5168   for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5169     {
5170       insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5171       loc += 4;
5172 
5173       /* Assume that there is at most one conditional branch in the atomic
5174          sequence.  If a conditional branch is found, put a breakpoint in
5175          its destination address.  */
5176       if (bits (insn, 24, 27) == 0xa)
5177 	{
5178           if (last_breakpoint > 0)
5179             return 0; /* More than one conditional branch found, fallback
5180                          to the standard single-step code.  */
5181 
5182 	  breaks[1] = BranchDest (loc - 4, insn);
5183 	  last_breakpoint++;
5184         }
5185 
5186       /* We do not support atomic sequences that use any *other* instructions
5187          but conditional branches to change the PC.  Fall back to standard
5188 	 code to avoid losing control of execution.  */
5189       else if (arm_instruction_changes_pc (insn))
5190 	return 0;
5191 
5192       /* If we find a strex{,b,h,d}, we're done.  */
5193       if ((insn & 0xff9000f0) == 0xe1800090)
5194 	break;
5195     }
5196 
5197   /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence.  */
5198   if (insn_count == atomic_sequence_length)
5199     return 0;
5200 
5201   /* Insert a breakpoint right after the end of the atomic sequence.  */
5202   breaks[0] = loc;
5203 
5204   /* Check for duplicated breakpoints.  Check also for a breakpoint
5205      placed (branch instruction's destination) anywhere in sequence.  */
5206   if (last_breakpoint
5207       && (breaks[1] == breaks[0]
5208 	  || (breaks[1] >= pc && breaks[1] < loc)))
5209     last_breakpoint = 0;
5210 
5211   /* Effectively inserts the breakpoints.  */
5212   for (index = 0; index <= last_breakpoint; index++)
5213     arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5214 
5215   return 1;
5216 }
5217 
5218 int
arm_deal_with_atomic_sequence(struct frame_info * frame)5219 arm_deal_with_atomic_sequence (struct frame_info *frame)
5220 {
5221   if (arm_frame_is_thumb (frame))
5222     return thumb_deal_with_atomic_sequence_raw (frame);
5223   else
5224     return arm_deal_with_atomic_sequence_raw (frame);
5225 }
5226 
5227 /* single_step() is called just before we want to resume the inferior,
5228    if we want to single-step it but there is no hardware or kernel
5229    single-step support.  We find the target of the coming instruction
5230    and breakpoint it.  */
5231 
5232 int
arm_software_single_step(struct frame_info * frame)5233 arm_software_single_step (struct frame_info *frame)
5234 {
5235   struct gdbarch *gdbarch = get_frame_arch (frame);
5236   struct address_space *aspace = get_frame_address_space (frame);
5237   CORE_ADDR next_pc;
5238 
5239   if (arm_deal_with_atomic_sequence (frame))
5240     return 1;
5241 
5242   next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5243   arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5244 
5245   return 1;
5246 }
5247 
5248 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5249    the buffer to be NEW_LEN bytes ending at ENDADDR.  Return
5250    NULL if an error occurs.  BUF is freed.  */
5251 
5252 static gdb_byte *
extend_buffer_earlier(gdb_byte * buf,CORE_ADDR endaddr,int old_len,int new_len)5253 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5254 		       int old_len, int new_len)
5255 {
5256   gdb_byte *new_buf;
5257   int bytes_to_read = new_len - old_len;
5258 
5259   new_buf = xmalloc (new_len);
5260   memcpy (new_buf + bytes_to_read, buf, old_len);
5261   xfree (buf);
5262   if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5263     {
5264       xfree (new_buf);
5265       return NULL;
5266     }
5267   return new_buf;
5268 }
5269 
5270 /* An IT block is at most the 2-byte IT instruction followed by
5271    four 4-byte instructions.  The furthest back we must search to
5272    find an IT block that affects the current instruction is thus
5273    2 + 3 * 4 == 14 bytes.  */
5274 #define MAX_IT_BLOCK_PREFIX 14
5275 
5276 /* Use a quick scan if there are more than this many bytes of
5277    code.  */
5278 #define IT_SCAN_THRESHOLD 32
5279 
5280 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5281    A breakpoint in an IT block may not be hit, depending on the
5282    condition flags.  */
5283 static CORE_ADDR
arm_adjust_breakpoint_address(struct gdbarch * gdbarch,CORE_ADDR bpaddr)5284 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5285 {
5286   gdb_byte *buf;
5287   char map_type;
5288   CORE_ADDR boundary, func_start;
5289   int buf_len;
5290   enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5291   int i, any, last_it, last_it_count;
5292 
5293   /* If we are using BKPT breakpoints, none of this is necessary.  */
5294   if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5295     return bpaddr;
5296 
5297   /* ARM mode does not have this problem.  */
5298   if (!arm_pc_is_thumb (gdbarch, bpaddr))
5299     return bpaddr;
5300 
5301   /* We are setting a breakpoint in Thumb code that could potentially
5302      contain an IT block.  The first step is to find how much Thumb
5303      code there is; we do not need to read outside of known Thumb
5304      sequences.  */
5305   map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5306   if (map_type == 0)
5307     /* Thumb-2 code must have mapping symbols to have a chance.  */
5308     return bpaddr;
5309 
5310   bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5311 
5312   if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5313       && func_start > boundary)
5314     boundary = func_start;
5315 
5316   /* Search for a candidate IT instruction.  We have to do some fancy
5317      footwork to distinguish a real IT instruction from the second
5318      half of a 32-bit instruction, but there is no need for that if
5319      there's no candidate.  */
5320   buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5321   if (buf_len == 0)
5322     /* No room for an IT instruction.  */
5323     return bpaddr;
5324 
5325   buf = xmalloc (buf_len);
5326   if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5327     return bpaddr;
5328   any = 0;
5329   for (i = 0; i < buf_len; i += 2)
5330     {
5331       unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5332       if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5333 	{
5334 	  any = 1;
5335 	  break;
5336 	}
5337     }
5338   if (any == 0)
5339     {
5340       xfree (buf);
5341       return bpaddr;
5342     }
5343 
5344   /* OK, the code bytes before this instruction contain at least one
5345      halfword which resembles an IT instruction.  We know that it's
5346      Thumb code, but there are still two possibilities.  Either the
5347      halfword really is an IT instruction, or it is the second half of
5348      a 32-bit Thumb instruction.  The only way we can tell is to
5349      scan forwards from a known instruction boundary.  */
5350   if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5351     {
5352       int definite;
5353 
5354       /* There's a lot of code before this instruction.  Start with an
5355 	 optimistic search; it's easy to recognize halfwords that can
5356 	 not be the start of a 32-bit instruction, and use that to
5357 	 lock on to the instruction boundaries.  */
5358       buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5359       if (buf == NULL)
5360 	return bpaddr;
5361       buf_len = IT_SCAN_THRESHOLD;
5362 
5363       definite = 0;
5364       for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5365 	{
5366 	  unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5367 	  if (thumb_insn_size (inst1) == 2)
5368 	    {
5369 	      definite = 1;
5370 	      break;
5371 	    }
5372 	}
5373 
5374       /* At this point, if DEFINITE, BUF[I] is the first place we
5375 	 are sure that we know the instruction boundaries, and it is far
5376 	 enough from BPADDR that we could not miss an IT instruction
5377 	 affecting BPADDR.  If ! DEFINITE, give up - start from a
5378 	 known boundary.  */
5379       if (! definite)
5380 	{
5381 	  buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5382 				       bpaddr - boundary);
5383 	  if (buf == NULL)
5384 	    return bpaddr;
5385 	  buf_len = bpaddr - boundary;
5386 	  i = 0;
5387 	}
5388     }
5389   else
5390     {
5391       buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5392       if (buf == NULL)
5393 	return bpaddr;
5394       buf_len = bpaddr - boundary;
5395       i = 0;
5396     }
5397 
5398   /* Scan forwards.  Find the last IT instruction before BPADDR.  */
5399   last_it = -1;
5400   last_it_count = 0;
5401   while (i < buf_len)
5402     {
5403       unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5404       last_it_count--;
5405       if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5406 	{
5407 	  last_it = i;
5408 	  if (inst1 & 0x0001)
5409 	    last_it_count = 4;
5410 	  else if (inst1 & 0x0002)
5411 	    last_it_count = 3;
5412 	  else if (inst1 & 0x0004)
5413 	    last_it_count = 2;
5414 	  else
5415 	    last_it_count = 1;
5416 	}
5417       i += thumb_insn_size (inst1);
5418     }
5419 
5420   xfree (buf);
5421 
5422   if (last_it == -1)
5423     /* There wasn't really an IT instruction after all.  */
5424     return bpaddr;
5425 
5426   if (last_it_count < 1)
5427     /* It was too far away.  */
5428     return bpaddr;
5429 
5430   /* This really is a trouble spot.  Move the breakpoint to the IT
5431      instruction.  */
5432   return bpaddr - buf_len + last_it;
5433 }
5434 
5435 /* ARM displaced stepping support.
5436 
5437    Generally ARM displaced stepping works as follows:
5438 
5439    1. When an instruction is to be single-stepped, it is first decoded by
5440       arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5441       Depending on the type of instruction, it is then copied to a scratch
5442       location, possibly in a modified form.  The copy_* set of functions
5443       performs such modification, as necessary.  A breakpoint is placed after
5444       the modified instruction in the scratch space to return control to GDB.
5445       Note in particular that instructions which modify the PC will no longer
5446       do so after modification.
5447 
5448    2. The instruction is single-stepped, by setting the PC to the scratch
5449       location address, and resuming.  Control returns to GDB when the
5450       breakpoint is hit.
5451 
5452    3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5453       function used for the current instruction.  This function's job is to
5454       put the CPU/memory state back to what it would have been if the
5455       instruction had been executed unmodified in its original location.  */
5456 
5457 /* NOP instruction (mov r0, r0).  */
5458 #define ARM_NOP				0xe1a00000
5459 #define THUMB_NOP 0x4600
5460 
5461 /* Helper for register reads for displaced stepping.  In particular, this
5462    returns the PC as it would be seen by the instruction at its original
5463    location.  */
5464 
5465 ULONGEST
displaced_read_reg(struct regcache * regs,struct displaced_step_closure * dsc,int regno)5466 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5467 		    int regno)
5468 {
5469   ULONGEST ret;
5470   CORE_ADDR from = dsc->insn_addr;
5471 
5472   if (regno == ARM_PC_REGNUM)
5473     {
5474       /* Compute pipeline offset:
5475 	 - When executing an ARM instruction, PC reads as the address of the
5476 	 current instruction plus 8.
5477 	 - When executing a Thumb instruction, PC reads as the address of the
5478 	 current instruction plus 4.  */
5479 
5480       if (!dsc->is_thumb)
5481 	from += 8;
5482       else
5483 	from += 4;
5484 
5485       if (debug_displaced)
5486 	fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5487 			    (unsigned long) from);
5488       return (ULONGEST) from;
5489     }
5490   else
5491     {
5492       regcache_cooked_read_unsigned (regs, regno, &ret);
5493       if (debug_displaced)
5494 	fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5495 			    regno, (unsigned long) ret);
5496       return ret;
5497     }
5498 }
5499 
5500 static int
displaced_in_arm_mode(struct regcache * regs)5501 displaced_in_arm_mode (struct regcache *regs)
5502 {
5503   ULONGEST ps;
5504   ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5505 
5506   regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5507 
5508   return (ps & t_bit) == 0;
5509 }
5510 
5511 /* Write to the PC as from a branch instruction.  */
5512 
5513 static void
branch_write_pc(struct regcache * regs,struct displaced_step_closure * dsc,ULONGEST val)5514 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5515 		 ULONGEST val)
5516 {
5517   if (!dsc->is_thumb)
5518     /* Note: If bits 0/1 are set, this branch would be unpredictable for
5519        architecture versions < 6.  */
5520     regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5521 				    val & ~(ULONGEST) 0x3);
5522   else
5523     regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5524 				    val & ~(ULONGEST) 0x1);
5525 }
5526 
5527 /* Write to the PC as from a branch-exchange instruction.  */
5528 
5529 static void
bx_write_pc(struct regcache * regs,ULONGEST val)5530 bx_write_pc (struct regcache *regs, ULONGEST val)
5531 {
5532   ULONGEST ps;
5533   ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5534 
5535   regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5536 
5537   if ((val & 1) == 1)
5538     {
5539       regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5540       regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5541     }
5542   else if ((val & 2) == 0)
5543     {
5544       regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5545       regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5546     }
5547   else
5548     {
5549       /* Unpredictable behaviour.  Try to do something sensible (switch to ARM
5550 	  mode, align dest to 4 bytes).  */
5551       warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5552       regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5553       regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5554     }
5555 }
5556 
5557 /* Write to the PC as if from a load instruction.  */
5558 
5559 static void
load_write_pc(struct regcache * regs,struct displaced_step_closure * dsc,ULONGEST val)5560 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5561 	       ULONGEST val)
5562 {
5563   if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5564     bx_write_pc (regs, val);
5565   else
5566     branch_write_pc (regs, dsc, val);
5567 }
5568 
5569 /* Write to the PC as if from an ALU instruction.  */
5570 
5571 static void
alu_write_pc(struct regcache * regs,struct displaced_step_closure * dsc,ULONGEST val)5572 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5573 	      ULONGEST val)
5574 {
5575   if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5576     bx_write_pc (regs, val);
5577   else
5578     branch_write_pc (regs, dsc, val);
5579 }
5580 
5581 /* Helper for writing to registers for displaced stepping.  Writing to the PC
5582    has a varying effects depending on the instruction which does the write:
5583    this is controlled by the WRITE_PC argument.  */
5584 
5585 void
displaced_write_reg(struct regcache * regs,struct displaced_step_closure * dsc,int regno,ULONGEST val,enum pc_write_style write_pc)5586 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5587 		     int regno, ULONGEST val, enum pc_write_style write_pc)
5588 {
5589   if (regno == ARM_PC_REGNUM)
5590     {
5591       if (debug_displaced)
5592 	fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5593 			    (unsigned long) val);
5594       switch (write_pc)
5595 	{
5596 	case BRANCH_WRITE_PC:
5597 	  branch_write_pc (regs, dsc, val);
5598 	  break;
5599 
5600 	case BX_WRITE_PC:
5601 	  bx_write_pc (regs, val);
5602   	  break;
5603 
5604 	case LOAD_WRITE_PC:
5605 	  load_write_pc (regs, dsc, val);
5606   	  break;
5607 
5608 	case ALU_WRITE_PC:
5609 	  alu_write_pc (regs, dsc, val);
5610   	  break;
5611 
5612 	case CANNOT_WRITE_PC:
5613 	  warning (_("Instruction wrote to PC in an unexpected way when "
5614 		     "single-stepping"));
5615 	  break;
5616 
5617 	default:
5618 	  internal_error (__FILE__, __LINE__,
5619 			  _("Invalid argument to displaced_write_reg"));
5620 	}
5621 
5622       dsc->wrote_to_pc = 1;
5623     }
5624   else
5625     {
5626       if (debug_displaced)
5627 	fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5628 			    regno, (unsigned long) val);
5629       regcache_cooked_write_unsigned (regs, regno, val);
5630     }
5631 }
5632 
5633 /* This function is used to concisely determine if an instruction INSN
5634    references PC.  Register fields of interest in INSN should have the
5635    corresponding fields of BITMASK set to 0b1111.  The function
5636    returns return 1 if any of these fields in INSN reference the PC
5637    (also 0b1111, r15), else it returns 0.  */
5638 
5639 static int
insn_references_pc(uint32_t insn,uint32_t bitmask)5640 insn_references_pc (uint32_t insn, uint32_t bitmask)
5641 {
5642   uint32_t lowbit = 1;
5643 
5644   while (bitmask != 0)
5645     {
5646       uint32_t mask;
5647 
5648       for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5649 	;
5650 
5651       if (!lowbit)
5652 	break;
5653 
5654       mask = lowbit * 0xf;
5655 
5656       if ((insn & mask) == mask)
5657 	return 1;
5658 
5659       bitmask &= ~mask;
5660     }
5661 
5662   return 0;
5663 }
5664 
5665 /* The simplest copy function.  Many instructions have the same effect no
5666    matter what address they are executed at: in those cases, use this.  */
5667 
5668 static int
arm_copy_unmodified(struct gdbarch * gdbarch,uint32_t insn,const char * iname,struct displaced_step_closure * dsc)5669 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5670 		     const char *iname, struct displaced_step_closure *dsc)
5671 {
5672   if (debug_displaced)
5673     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5674 			"opcode/class '%s' unmodified\n", (unsigned long) insn,
5675 			iname);
5676 
5677   dsc->modinsn[0] = insn;
5678 
5679   return 0;
5680 }
5681 
5682 static int
thumb_copy_unmodified_32bit(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,const char * iname,struct displaced_step_closure * dsc)5683 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5684 			     uint16_t insn2, const char *iname,
5685 			     struct displaced_step_closure *dsc)
5686 {
5687   if (debug_displaced)
5688     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5689 			"opcode/class '%s' unmodified\n", insn1, insn2,
5690 			iname);
5691 
5692   dsc->modinsn[0] = insn1;
5693   dsc->modinsn[1] = insn2;
5694   dsc->numinsns = 2;
5695 
5696   return 0;
5697 }
5698 
5699 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5700    modification.  */
5701 static int
thumb_copy_unmodified_16bit(struct gdbarch * gdbarch,unsigned int insn,const char * iname,struct displaced_step_closure * dsc)5702 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5703 			     const char *iname,
5704 			     struct displaced_step_closure *dsc)
5705 {
5706   if (debug_displaced)
5707     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5708 			"opcode/class '%s' unmodified\n", insn,
5709 			iname);
5710 
5711   dsc->modinsn[0] = insn;
5712 
5713   return 0;
5714 }
5715 
5716 /* Preload instructions with immediate offset.  */
5717 
5718 static void
cleanup_preload(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)5719 cleanup_preload (struct gdbarch *gdbarch,
5720 		 struct regcache *regs, struct displaced_step_closure *dsc)
5721 {
5722   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5723   if (!dsc->u.preload.immed)
5724     displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5725 }
5726 
5727 static void
install_preload(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc,unsigned int rn)5728 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5729 		 struct displaced_step_closure *dsc, unsigned int rn)
5730 {
5731   ULONGEST rn_val;
5732   /* Preload instructions:
5733 
5734      {pli/pld} [rn, #+/-imm]
5735      ->
5736      {pli/pld} [r0, #+/-imm].  */
5737 
5738   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5739   rn_val = displaced_read_reg (regs, dsc, rn);
5740   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5741   dsc->u.preload.immed = 1;
5742 
5743   dsc->cleanup = &cleanup_preload;
5744 }
5745 
5746 static int
arm_copy_preload(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)5747 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5748 		  struct displaced_step_closure *dsc)
5749 {
5750   unsigned int rn = bits (insn, 16, 19);
5751 
5752   if (!insn_references_pc (insn, 0x000f0000ul))
5753     return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5754 
5755   if (debug_displaced)
5756     fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5757 			(unsigned long) insn);
5758 
5759   dsc->modinsn[0] = insn & 0xfff0ffff;
5760 
5761   install_preload (gdbarch, regs, dsc, rn);
5762 
5763   return 0;
5764 }
5765 
5766 static int
thumb2_copy_preload(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)5767 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5768 		     struct regcache *regs, struct displaced_step_closure *dsc)
5769 {
5770   unsigned int rn = bits (insn1, 0, 3);
5771   unsigned int u_bit = bit (insn1, 7);
5772   int imm12 = bits (insn2, 0, 11);
5773   ULONGEST pc_val;
5774 
5775   if (rn != ARM_PC_REGNUM)
5776     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5777 
5778   /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5779      PLD (literal) Encoding T1.  */
5780   if (debug_displaced)
5781     fprintf_unfiltered (gdb_stdlog,
5782 			"displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5783 			(unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5784 			imm12);
5785 
5786   if (!u_bit)
5787     imm12 = -1 * imm12;
5788 
5789   /* Rewrite instruction {pli/pld} PC imm12 into:
5790      Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5791 
5792      {pli/pld} [r0, r1]
5793 
5794      Cleanup: r0 <- tmp[0], r1 <- tmp[1].  */
5795 
5796   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5797   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5798 
5799   pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5800 
5801   displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5802   displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5803   dsc->u.preload.immed = 0;
5804 
5805   /* {pli/pld} [r0, r1] */
5806   dsc->modinsn[0] = insn1 & 0xfff0;
5807   dsc->modinsn[1] = 0xf001;
5808   dsc->numinsns = 2;
5809 
5810   dsc->cleanup = &cleanup_preload;
5811   return 0;
5812 }
5813 
5814 /* Preload instructions with register offset.  */
5815 
5816 static void
install_preload_reg(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc,unsigned int rn,unsigned int rm)5817 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5818 		    struct displaced_step_closure *dsc, unsigned int rn,
5819 		    unsigned int rm)
5820 {
5821   ULONGEST rn_val, rm_val;
5822 
5823   /* Preload register-offset instructions:
5824 
5825      {pli/pld} [rn, rm {, shift}]
5826      ->
5827      {pli/pld} [r0, r1 {, shift}].  */
5828 
5829   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5830   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5831   rn_val = displaced_read_reg (regs, dsc, rn);
5832   rm_val = displaced_read_reg (regs, dsc, rm);
5833   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5834   displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5835   dsc->u.preload.immed = 0;
5836 
5837   dsc->cleanup = &cleanup_preload;
5838 }
5839 
5840 static int
arm_copy_preload_reg(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)5841 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5842 		      struct regcache *regs,
5843 		      struct displaced_step_closure *dsc)
5844 {
5845   unsigned int rn = bits (insn, 16, 19);
5846   unsigned int rm = bits (insn, 0, 3);
5847 
5848 
5849   if (!insn_references_pc (insn, 0x000f000ful))
5850     return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5851 
5852   if (debug_displaced)
5853     fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5854 			(unsigned long) insn);
5855 
5856   dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5857 
5858   install_preload_reg (gdbarch, regs, dsc, rn, rm);
5859   return 0;
5860 }
5861 
5862 /* Copy/cleanup coprocessor load and store instructions.  */
5863 
5864 static void
cleanup_copro_load_store(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)5865 cleanup_copro_load_store (struct gdbarch *gdbarch,
5866 			  struct regcache *regs,
5867 			  struct displaced_step_closure *dsc)
5868 {
5869   ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5870 
5871   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5872 
5873   if (dsc->u.ldst.writeback)
5874     displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5875 }
5876 
5877 static void
install_copro_load_store(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc,int writeback,unsigned int rn)5878 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5879 			  struct displaced_step_closure *dsc,
5880 			  int writeback, unsigned int rn)
5881 {
5882   ULONGEST rn_val;
5883 
5884   /* Coprocessor load/store instructions:
5885 
5886      {stc/stc2} [<Rn>, #+/-imm]  (and other immediate addressing modes)
5887      ->
5888      {stc/stc2} [r0, #+/-imm].
5889 
5890      ldc/ldc2 are handled identically.  */
5891 
5892   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5893   rn_val = displaced_read_reg (regs, dsc, rn);
5894   /* PC should be 4-byte aligned.  */
5895   rn_val = rn_val & 0xfffffffc;
5896   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5897 
5898   dsc->u.ldst.writeback = writeback;
5899   dsc->u.ldst.rn = rn;
5900 
5901   dsc->cleanup = &cleanup_copro_load_store;
5902 }
5903 
5904 static int
arm_copy_copro_load_store(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)5905 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5906 			   struct regcache *regs,
5907 			   struct displaced_step_closure *dsc)
5908 {
5909   unsigned int rn = bits (insn, 16, 19);
5910 
5911   if (!insn_references_pc (insn, 0x000f0000ul))
5912     return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5913 
5914   if (debug_displaced)
5915     fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5916 			"load/store insn %.8lx\n", (unsigned long) insn);
5917 
5918   dsc->modinsn[0] = insn & 0xfff0ffff;
5919 
5920   install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5921 
5922   return 0;
5923 }
5924 
5925 static int
thumb2_copy_copro_load_store(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)5926 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5927 			      uint16_t insn2, struct regcache *regs,
5928 			      struct displaced_step_closure *dsc)
5929 {
5930   unsigned int rn = bits (insn1, 0, 3);
5931 
5932   if (rn != ARM_PC_REGNUM)
5933     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5934 					"copro load/store", dsc);
5935 
5936   if (debug_displaced)
5937     fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5938 			"load/store insn %.4x%.4x\n", insn1, insn2);
5939 
5940   dsc->modinsn[0] = insn1 & 0xfff0;
5941   dsc->modinsn[1] = insn2;
5942   dsc->numinsns = 2;
5943 
5944   /* This function is called for copying instruction LDC/LDC2/VLDR, which
5945      doesn't support writeback, so pass 0.  */
5946   install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5947 
5948   return 0;
5949 }
5950 
5951 /* Clean up branch instructions (actually perform the branch, by setting
5952    PC).  */
5953 
5954 static void
cleanup_branch(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)5955 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5956 		struct displaced_step_closure *dsc)
5957 {
5958   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5959   int branch_taken = condition_true (dsc->u.branch.cond, status);
5960   enum pc_write_style write_pc = dsc->u.branch.exchange
5961 				 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5962 
5963   if (!branch_taken)
5964     return;
5965 
5966   if (dsc->u.branch.link)
5967     {
5968       /* The value of LR should be the next insn of current one.  In order
5969        not to confuse logic hanlding later insn `bx lr', if current insn mode
5970        is Thumb, the bit 0 of LR value should be set to 1.  */
5971       ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5972 
5973       if (dsc->is_thumb)
5974 	next_insn_addr |= 0x1;
5975 
5976       displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5977 			   CANNOT_WRITE_PC);
5978     }
5979 
5980   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5981 }
5982 
5983 /* Copy B/BL/BLX instructions with immediate destinations.  */
5984 
5985 static void
install_b_bl_blx(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc,unsigned int cond,int exchange,int link,long offset)5986 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5987 		  struct displaced_step_closure *dsc,
5988 		  unsigned int cond, int exchange, int link, long offset)
5989 {
5990   /* Implement "BL<cond> <label>" as:
5991 
5992      Preparation: cond <- instruction condition
5993      Insn: mov r0, r0  (nop)
5994      Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5995 
5996      B<cond> similar, but don't set r14 in cleanup.  */
5997 
5998   dsc->u.branch.cond = cond;
5999   dsc->u.branch.link = link;
6000   dsc->u.branch.exchange = exchange;
6001 
6002   dsc->u.branch.dest = dsc->insn_addr;
6003   if (link && exchange)
6004     /* For BLX, offset is computed from the Align (PC, 4).  */
6005     dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6006 
6007   if (dsc->is_thumb)
6008     dsc->u.branch.dest += 4 + offset;
6009   else
6010     dsc->u.branch.dest += 8 + offset;
6011 
6012   dsc->cleanup = &cleanup_branch;
6013 }
6014 static int
arm_copy_b_bl_blx(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)6015 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6016 		   struct regcache *regs, struct displaced_step_closure *dsc)
6017 {
6018   unsigned int cond = bits (insn, 28, 31);
6019   int exchange = (cond == 0xf);
6020   int link = exchange || bit (insn, 24);
6021   long offset;
6022 
6023   if (debug_displaced)
6024     fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6025 			"%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6026 			(unsigned long) insn);
6027   if (exchange)
6028     /* For BLX, set bit 0 of the destination.  The cleanup_branch function will
6029        then arrange the switch into Thumb mode.  */
6030     offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6031   else
6032     offset = bits (insn, 0, 23) << 2;
6033 
6034   if (bit (offset, 25))
6035     offset = offset | ~0x3ffffff;
6036 
6037   dsc->modinsn[0] = ARM_NOP;
6038 
6039   install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6040   return 0;
6041 }
6042 
6043 static int
thumb2_copy_b_bl_blx(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)6044 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6045 		      uint16_t insn2, struct regcache *regs,
6046 		      struct displaced_step_closure *dsc)
6047 {
6048   int link = bit (insn2, 14);
6049   int exchange = link && !bit (insn2, 12);
6050   int cond = INST_AL;
6051   long offset = 0;
6052   int j1 = bit (insn2, 13);
6053   int j2 = bit (insn2, 11);
6054   int s = sbits (insn1, 10, 10);
6055   int i1 = !(j1 ^ bit (insn1, 10));
6056   int i2 = !(j2 ^ bit (insn1, 10));
6057 
6058   if (!link && !exchange) /* B */
6059     {
6060       offset = (bits (insn2, 0, 10) << 1);
6061       if (bit (insn2, 12)) /* Encoding T4 */
6062 	{
6063 	  offset |= (bits (insn1, 0, 9) << 12)
6064 	    | (i2 << 22)
6065 	    | (i1 << 23)
6066 	    | (s << 24);
6067 	  cond = INST_AL;
6068 	}
6069       else /* Encoding T3 */
6070 	{
6071 	  offset |= (bits (insn1, 0, 5) << 12)
6072 	    | (j1 << 18)
6073 	    | (j2 << 19)
6074 	    | (s << 20);
6075 	  cond = bits (insn1, 6, 9);
6076 	}
6077     }
6078   else
6079     {
6080       offset = (bits (insn1, 0, 9) << 12);
6081       offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6082       offset |= exchange ?
6083 	(bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6084     }
6085 
6086   if (debug_displaced)
6087     fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6088 			"%.4x %.4x with offset %.8lx\n",
6089 			link ? (exchange) ? "blx" : "bl" : "b",
6090 			insn1, insn2, offset);
6091 
6092   dsc->modinsn[0] = THUMB_NOP;
6093 
6094   install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6095   return 0;
6096 }
6097 
6098 /* Copy B Thumb instructions.  */
6099 static int
thumb_copy_b(struct gdbarch * gdbarch,unsigned short insn,struct displaced_step_closure * dsc)6100 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6101 	      struct displaced_step_closure *dsc)
6102 {
6103   unsigned int cond = 0;
6104   int offset = 0;
6105   unsigned short bit_12_15 = bits (insn, 12, 15);
6106   CORE_ADDR from = dsc->insn_addr;
6107 
6108   if (bit_12_15 == 0xd)
6109     {
6110       /* offset = SignExtend (imm8:0, 32) */
6111       offset = sbits ((insn << 1), 0, 8);
6112       cond = bits (insn, 8, 11);
6113     }
6114   else if (bit_12_15 == 0xe) /* Encoding T2 */
6115     {
6116       offset = sbits ((insn << 1), 0, 11);
6117       cond = INST_AL;
6118     }
6119 
6120   if (debug_displaced)
6121     fprintf_unfiltered (gdb_stdlog,
6122 			"displaced: copying b immediate insn %.4x "
6123 			"with offset %d\n", insn, offset);
6124 
6125   dsc->u.branch.cond = cond;
6126   dsc->u.branch.link = 0;
6127   dsc->u.branch.exchange = 0;
6128   dsc->u.branch.dest = from + 4 + offset;
6129 
6130   dsc->modinsn[0] = THUMB_NOP;
6131 
6132   dsc->cleanup = &cleanup_branch;
6133 
6134   return 0;
6135 }
6136 
6137 /* Copy BX/BLX with register-specified destinations.  */
6138 
6139 static void
install_bx_blx_reg(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc,int link,unsigned int cond,unsigned int rm)6140 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6141 		    struct displaced_step_closure *dsc, int link,
6142 		    unsigned int cond, unsigned int rm)
6143 {
6144   /* Implement {BX,BLX}<cond> <reg>" as:
6145 
6146      Preparation: cond <- instruction condition
6147      Insn: mov r0, r0 (nop)
6148      Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6149 
6150      Don't set r14 in cleanup for BX.  */
6151 
6152   dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6153 
6154   dsc->u.branch.cond = cond;
6155   dsc->u.branch.link = link;
6156 
6157   dsc->u.branch.exchange = 1;
6158 
6159   dsc->cleanup = &cleanup_branch;
6160 }
6161 
6162 static int
arm_copy_bx_blx_reg(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)6163 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6164 		     struct regcache *regs, struct displaced_step_closure *dsc)
6165 {
6166   unsigned int cond = bits (insn, 28, 31);
6167   /* BX:  x12xxx1x
6168      BLX: x12xxx3x.  */
6169   int link = bit (insn, 5);
6170   unsigned int rm = bits (insn, 0, 3);
6171 
6172   if (debug_displaced)
6173     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6174 			(unsigned long) insn);
6175 
6176   dsc->modinsn[0] = ARM_NOP;
6177 
6178   install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6179   return 0;
6180 }
6181 
6182 static int
thumb_copy_bx_blx_reg(struct gdbarch * gdbarch,uint16_t insn,struct regcache * regs,struct displaced_step_closure * dsc)6183 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6184 		       struct regcache *regs,
6185 		       struct displaced_step_closure *dsc)
6186 {
6187   int link = bit (insn, 7);
6188   unsigned int rm = bits (insn, 3, 6);
6189 
6190   if (debug_displaced)
6191     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6192 			(unsigned short) insn);
6193 
6194   dsc->modinsn[0] = THUMB_NOP;
6195 
6196   install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6197 
6198   return 0;
6199 }
6200 
6201 
6202 /* Copy/cleanup arithmetic/logic instruction with immediate RHS.  */
6203 
6204 static void
cleanup_alu_imm(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)6205 cleanup_alu_imm (struct gdbarch *gdbarch,
6206 		 struct regcache *regs, struct displaced_step_closure *dsc)
6207 {
6208   ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6209   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6210   displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6211   displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6212 }
6213 
6214 static int
arm_copy_alu_imm(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)6215 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6216 		  struct displaced_step_closure *dsc)
6217 {
6218   unsigned int rn = bits (insn, 16, 19);
6219   unsigned int rd = bits (insn, 12, 15);
6220   unsigned int op = bits (insn, 21, 24);
6221   int is_mov = (op == 0xd);
6222   ULONGEST rd_val, rn_val;
6223 
6224   if (!insn_references_pc (insn, 0x000ff000ul))
6225     return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6226 
6227   if (debug_displaced)
6228     fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6229 			"%.8lx\n", is_mov ? "move" : "ALU",
6230 			(unsigned long) insn);
6231 
6232   /* Instruction is of form:
6233 
6234      <op><cond> rd, [rn,] #imm
6235 
6236      Rewrite as:
6237 
6238      Preparation: tmp1, tmp2 <- r0, r1;
6239 		  r0, r1 <- rd, rn
6240      Insn: <op><cond> r0, r1, #imm
6241      Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6242   */
6243 
6244   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6245   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6246   rn_val = displaced_read_reg (regs, dsc, rn);
6247   rd_val = displaced_read_reg (regs, dsc, rd);
6248   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6249   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6250   dsc->rd = rd;
6251 
6252   if (is_mov)
6253     dsc->modinsn[0] = insn & 0xfff00fff;
6254   else
6255     dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6256 
6257   dsc->cleanup = &cleanup_alu_imm;
6258 
6259   return 0;
6260 }
6261 
6262 static int
thumb2_copy_alu_imm(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)6263 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6264 		     uint16_t insn2, struct regcache *regs,
6265 		     struct displaced_step_closure *dsc)
6266 {
6267   unsigned int op = bits (insn1, 5, 8);
6268   unsigned int rn, rm, rd;
6269   ULONGEST rd_val, rn_val;
6270 
6271   rn = bits (insn1, 0, 3); /* Rn */
6272   rm = bits (insn2, 0, 3); /* Rm */
6273   rd = bits (insn2, 8, 11); /* Rd */
6274 
6275   /* This routine is only called for instruction MOV.  */
6276   gdb_assert (op == 0x2 && rn == 0xf);
6277 
6278   if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6279     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6280 
6281   if (debug_displaced)
6282     fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6283 			"ALU", insn1, insn2);
6284 
6285   /* Instruction is of form:
6286 
6287      <op><cond> rd, [rn,] #imm
6288 
6289      Rewrite as:
6290 
6291      Preparation: tmp1, tmp2 <- r0, r1;
6292 		  r0, r1 <- rd, rn
6293      Insn: <op><cond> r0, r1, #imm
6294      Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6295   */
6296 
6297   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6298   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6299   rn_val = displaced_read_reg (regs, dsc, rn);
6300   rd_val = displaced_read_reg (regs, dsc, rd);
6301   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6302   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6303   dsc->rd = rd;
6304 
6305   dsc->modinsn[0] = insn1;
6306   dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6307   dsc->numinsns = 2;
6308 
6309   dsc->cleanup = &cleanup_alu_imm;
6310 
6311   return 0;
6312 }
6313 
6314 /* Copy/cleanup arithmetic/logic insns with register RHS.  */
6315 
6316 static void
cleanup_alu_reg(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)6317 cleanup_alu_reg (struct gdbarch *gdbarch,
6318 		 struct regcache *regs, struct displaced_step_closure *dsc)
6319 {
6320   ULONGEST rd_val;
6321   int i;
6322 
6323   rd_val = displaced_read_reg (regs, dsc, 0);
6324 
6325   for (i = 0; i < 3; i++)
6326     displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6327 
6328   displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6329 }
6330 
6331 static void
install_alu_reg(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc,unsigned int rd,unsigned int rn,unsigned int rm)6332 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6333 		 struct displaced_step_closure *dsc,
6334 		 unsigned int rd, unsigned int rn, unsigned int rm)
6335 {
6336   ULONGEST rd_val, rn_val, rm_val;
6337 
6338   /* Instruction is of form:
6339 
6340      <op><cond> rd, [rn,] rm [, <shift>]
6341 
6342      Rewrite as:
6343 
6344      Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6345 		  r0, r1, r2 <- rd, rn, rm
6346      Insn: <op><cond> r0, r1, r2 [, <shift>]
6347      Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6348   */
6349 
6350   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6351   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6352   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6353   rd_val = displaced_read_reg (regs, dsc, rd);
6354   rn_val = displaced_read_reg (regs, dsc, rn);
6355   rm_val = displaced_read_reg (regs, dsc, rm);
6356   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6357   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6358   displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6359   dsc->rd = rd;
6360 
6361   dsc->cleanup = &cleanup_alu_reg;
6362 }
6363 
6364 static int
arm_copy_alu_reg(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)6365 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6366 		  struct displaced_step_closure *dsc)
6367 {
6368   unsigned int op = bits (insn, 21, 24);
6369   int is_mov = (op == 0xd);
6370 
6371   if (!insn_references_pc (insn, 0x000ff00ful))
6372     return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6373 
6374   if (debug_displaced)
6375     fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6376 			is_mov ? "move" : "ALU", (unsigned long) insn);
6377 
6378   if (is_mov)
6379     dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6380   else
6381     dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6382 
6383   install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6384 		   bits (insn, 0, 3));
6385   return 0;
6386 }
6387 
6388 static int
thumb_copy_alu_reg(struct gdbarch * gdbarch,uint16_t insn,struct regcache * regs,struct displaced_step_closure * dsc)6389 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6390 		    struct regcache *regs,
6391 		    struct displaced_step_closure *dsc)
6392 {
6393   unsigned rn, rm, rd;
6394 
6395   rd = bits (insn, 3, 6);
6396   rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6397   rm = 2;
6398 
6399   if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6400     return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6401 
6402   if (debug_displaced)
6403     fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6404 			"ALU", (unsigned short) insn);
6405 
6406   dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6407 
6408   install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6409 
6410   return 0;
6411 }
6412 
6413 /* Cleanup/copy arithmetic/logic insns with shifted register RHS.  */
6414 
6415 static void
cleanup_alu_shifted_reg(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)6416 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6417 			 struct regcache *regs,
6418 			 struct displaced_step_closure *dsc)
6419 {
6420   ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6421   int i;
6422 
6423   for (i = 0; i < 4; i++)
6424     displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6425 
6426   displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6427 }
6428 
6429 static void
install_alu_shifted_reg(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc,unsigned int rd,unsigned int rn,unsigned int rm,unsigned rs)6430 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6431 			 struct displaced_step_closure *dsc,
6432 			 unsigned int rd, unsigned int rn, unsigned int rm,
6433 			 unsigned rs)
6434 {
6435   int i;
6436   ULONGEST rd_val, rn_val, rm_val, rs_val;
6437 
6438   /* Instruction is of form:
6439 
6440      <op><cond> rd, [rn,] rm, <shift> rs
6441 
6442      Rewrite as:
6443 
6444      Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6445 		  r0, r1, r2, r3 <- rd, rn, rm, rs
6446      Insn: <op><cond> r0, r1, r2, <shift> r3
6447      Cleanup: tmp5 <- r0
6448 	      r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6449 	      rd <- tmp5
6450   */
6451 
6452   for (i = 0; i < 4; i++)
6453     dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6454 
6455   rd_val = displaced_read_reg (regs, dsc, rd);
6456   rn_val = displaced_read_reg (regs, dsc, rn);
6457   rm_val = displaced_read_reg (regs, dsc, rm);
6458   rs_val = displaced_read_reg (regs, dsc, rs);
6459   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6460   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6461   displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6462   displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6463   dsc->rd = rd;
6464   dsc->cleanup = &cleanup_alu_shifted_reg;
6465 }
6466 
6467 static int
arm_copy_alu_shifted_reg(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)6468 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6469 			  struct regcache *regs,
6470 			  struct displaced_step_closure *dsc)
6471 {
6472   unsigned int op = bits (insn, 21, 24);
6473   int is_mov = (op == 0xd);
6474   unsigned int rd, rn, rm, rs;
6475 
6476   if (!insn_references_pc (insn, 0x000fff0ful))
6477     return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6478 
6479   if (debug_displaced)
6480     fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6481 			"%.8lx\n", is_mov ? "move" : "ALU",
6482 			(unsigned long) insn);
6483 
6484   rn = bits (insn, 16, 19);
6485   rm = bits (insn, 0, 3);
6486   rs = bits (insn, 8, 11);
6487   rd = bits (insn, 12, 15);
6488 
6489   if (is_mov)
6490     dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6491   else
6492     dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6493 
6494   install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6495 
6496   return 0;
6497 }
6498 
6499 /* Clean up load instructions.  */
6500 
6501 static void
cleanup_load(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)6502 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6503 	      struct displaced_step_closure *dsc)
6504 {
6505   ULONGEST rt_val, rt_val2 = 0, rn_val;
6506 
6507   rt_val = displaced_read_reg (regs, dsc, 0);
6508   if (dsc->u.ldst.xfersize == 8)
6509     rt_val2 = displaced_read_reg (regs, dsc, 1);
6510   rn_val = displaced_read_reg (regs, dsc, 2);
6511 
6512   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6513   if (dsc->u.ldst.xfersize > 4)
6514     displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6515   displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6516   if (!dsc->u.ldst.immed)
6517     displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6518 
6519   /* Handle register writeback.  */
6520   if (dsc->u.ldst.writeback)
6521     displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6522   /* Put result in right place.  */
6523   displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6524   if (dsc->u.ldst.xfersize == 8)
6525     displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6526 }
6527 
6528 /* Clean up store instructions.  */
6529 
6530 static void
cleanup_store(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)6531 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6532 	       struct displaced_step_closure *dsc)
6533 {
6534   ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6535 
6536   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6537   if (dsc->u.ldst.xfersize > 4)
6538     displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6539   displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6540   if (!dsc->u.ldst.immed)
6541     displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6542   if (!dsc->u.ldst.restore_r4)
6543     displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6544 
6545   /* Writeback.  */
6546   if (dsc->u.ldst.writeback)
6547     displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6548 }
6549 
6550 /* Copy "extra" load/store instructions.  These are halfword/doubleword
6551    transfers, which have a different encoding to byte/word transfers.  */
6552 
6553 static int
arm_copy_extra_ld_st(struct gdbarch * gdbarch,uint32_t insn,int unpriveleged,struct regcache * regs,struct displaced_step_closure * dsc)6554 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6555 		      struct regcache *regs, struct displaced_step_closure *dsc)
6556 {
6557   unsigned int op1 = bits (insn, 20, 24);
6558   unsigned int op2 = bits (insn, 5, 6);
6559   unsigned int rt = bits (insn, 12, 15);
6560   unsigned int rn = bits (insn, 16, 19);
6561   unsigned int rm = bits (insn, 0, 3);
6562   char load[12]     = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6563   char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6564   int immed = (op1 & 0x4) != 0;
6565   int opcode;
6566   ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6567 
6568   if (!insn_references_pc (insn, 0x000ff00ful))
6569     return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6570 
6571   if (debug_displaced)
6572     fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6573 			"insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6574 			(unsigned long) insn);
6575 
6576   opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6577 
6578   if (opcode < 0)
6579     internal_error (__FILE__, __LINE__,
6580 		    _("copy_extra_ld_st: instruction decode error"));
6581 
6582   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6583   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6584   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6585   if (!immed)
6586     dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6587 
6588   rt_val = displaced_read_reg (regs, dsc, rt);
6589   if (bytesize[opcode] == 8)
6590     rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6591   rn_val = displaced_read_reg (regs, dsc, rn);
6592   if (!immed)
6593     rm_val = displaced_read_reg (regs, dsc, rm);
6594 
6595   displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6596   if (bytesize[opcode] == 8)
6597     displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6598   displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6599   if (!immed)
6600     displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6601 
6602   dsc->rd = rt;
6603   dsc->u.ldst.xfersize = bytesize[opcode];
6604   dsc->u.ldst.rn = rn;
6605   dsc->u.ldst.immed = immed;
6606   dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6607   dsc->u.ldst.restore_r4 = 0;
6608 
6609   if (immed)
6610     /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6611 	->
6612        {ldr,str}<width><cond> r0, [r1,] [r2, #imm].  */
6613     dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6614   else
6615     /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6616 	->
6617        {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3].  */
6618     dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6619 
6620   dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6621 
6622   return 0;
6623 }
6624 
6625 /* Copy byte/half word/word loads and stores.  */
6626 
6627 static void
install_load_store(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc,int load,int immed,int writeback,int size,int usermode,int rt,int rm,int rn)6628 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6629 		    struct displaced_step_closure *dsc, int load,
6630 		    int immed, int writeback, int size, int usermode,
6631 		    int rt, int rm, int rn)
6632 {
6633   ULONGEST rt_val, rn_val, rm_val = 0;
6634 
6635   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6636   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6637   if (!immed)
6638     dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6639   if (!load)
6640     dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6641 
6642   rt_val = displaced_read_reg (regs, dsc, rt);
6643   rn_val = displaced_read_reg (regs, dsc, rn);
6644   if (!immed)
6645     rm_val = displaced_read_reg (regs, dsc, rm);
6646 
6647   displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6648   displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6649   if (!immed)
6650     displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6651   dsc->rd = rt;
6652   dsc->u.ldst.xfersize = size;
6653   dsc->u.ldst.rn = rn;
6654   dsc->u.ldst.immed = immed;
6655   dsc->u.ldst.writeback = writeback;
6656 
6657   /* To write PC we can do:
6658 
6659      Before this sequence of instructions:
6660      r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6661      r2 is the Rn value got from dispalced_read_reg.
6662 
6663      Insn1: push {pc} Write address of STR instruction + offset on stack
6664      Insn2: pop  {r4} Read it back from stack, r4 = addr(Insn1) + offset
6665      Insn3: sub r4, r4, pc   r4 = addr(Insn1) + offset - pc
6666                                 = addr(Insn1) + offset - addr(Insn3) - 8
6667                                 = offset - 16
6668      Insn4: add r4, r4, #8   r4 = offset - 8
6669      Insn5: add r0, r0, r4   r0 = from + 8 + offset - 8
6670                                 = from + offset
6671      Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6672 
6673      Otherwise we don't know what value to write for PC, since the offset is
6674      architecture-dependent (sometimes PC+8, sometimes PC+12).  More details
6675      of this can be found in Section "Saving from r15" in
6676      http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6677 
6678   dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6679 }
6680 
6681 
6682 static int
thumb2_copy_load_literal(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc,int size)6683 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6684 			  uint16_t insn2, struct regcache *regs,
6685 			  struct displaced_step_closure *dsc, int size)
6686 {
6687   unsigned int u_bit = bit (insn1, 7);
6688   unsigned int rt = bits (insn2, 12, 15);
6689   int imm12 = bits (insn2, 0, 11);
6690   ULONGEST pc_val;
6691 
6692   if (debug_displaced)
6693     fprintf_unfiltered (gdb_stdlog,
6694 			"displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6695 			(unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6696 			imm12);
6697 
6698   if (!u_bit)
6699     imm12 = -1 * imm12;
6700 
6701   /* Rewrite instruction LDR Rt imm12 into:
6702 
6703      Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6704 
6705      LDR R0, R2, R3,
6706 
6707      Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2].  */
6708 
6709 
6710   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6711   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6712   dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6713 
6714   pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6715 
6716   pc_val = pc_val & 0xfffffffc;
6717 
6718   displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6719   displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6720 
6721   dsc->rd = rt;
6722 
6723   dsc->u.ldst.xfersize = size;
6724   dsc->u.ldst.immed = 0;
6725   dsc->u.ldst.writeback = 0;
6726   dsc->u.ldst.restore_r4 = 0;
6727 
6728   /* LDR R0, R2, R3 */
6729   dsc->modinsn[0] = 0xf852;
6730   dsc->modinsn[1] = 0x3;
6731   dsc->numinsns = 2;
6732 
6733   dsc->cleanup = &cleanup_load;
6734 
6735   return 0;
6736 }
6737 
6738 static int
thumb2_copy_load_reg_imm(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc,int writeback,int immed)6739 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6740 			  uint16_t insn2, struct regcache *regs,
6741 			  struct displaced_step_closure *dsc,
6742 			  int writeback, int immed)
6743 {
6744   unsigned int rt = bits (insn2, 12, 15);
6745   unsigned int rn = bits (insn1, 0, 3);
6746   unsigned int rm = bits (insn2, 0, 3);  /* Only valid if !immed.  */
6747   /* In LDR (register), there is also a register Rm, which is not allowed to
6748      be PC, so we don't have to check it.  */
6749 
6750   if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6751     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6752 					dsc);
6753 
6754   if (debug_displaced)
6755     fprintf_unfiltered (gdb_stdlog,
6756 			"displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6757 			 rt, rn, insn1, insn2);
6758 
6759   install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6760 		      0, rt, rm, rn);
6761 
6762   dsc->u.ldst.restore_r4 = 0;
6763 
6764   if (immed)
6765     /* ldr[b]<cond> rt, [rn, #imm], etc.
6766        ->
6767        ldr[b]<cond> r0, [r2, #imm].  */
6768     {
6769       dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6770       dsc->modinsn[1] = insn2 & 0x0fff;
6771     }
6772   else
6773     /* ldr[b]<cond> rt, [rn, rm], etc.
6774        ->
6775        ldr[b]<cond> r0, [r2, r3].  */
6776     {
6777       dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6778       dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6779     }
6780 
6781   dsc->numinsns = 2;
6782 
6783   return 0;
6784 }
6785 
6786 
6787 static int
arm_copy_ldr_str_ldrb_strb(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc,int load,int size,int usermode)6788 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6789 			    struct regcache *regs,
6790 			    struct displaced_step_closure *dsc,
6791 			    int load, int size, int usermode)
6792 {
6793   int immed = !bit (insn, 25);
6794   int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6795   unsigned int rt = bits (insn, 12, 15);
6796   unsigned int rn = bits (insn, 16, 19);
6797   unsigned int rm = bits (insn, 0, 3);  /* Only valid if !immed.  */
6798 
6799   if (!insn_references_pc (insn, 0x000ff00ful))
6800     return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6801 
6802   if (debug_displaced)
6803     fprintf_unfiltered (gdb_stdlog,
6804 			"displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6805 			load ? (size == 1 ? "ldrb" : "ldr")
6806 			     : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6807 			rt, rn,
6808 			(unsigned long) insn);
6809 
6810   install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6811 		      usermode, rt, rm, rn);
6812 
6813   if (load || rt != ARM_PC_REGNUM)
6814     {
6815       dsc->u.ldst.restore_r4 = 0;
6816 
6817       if (immed)
6818 	/* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6819 	   ->
6820 	   {ldr,str}[b]<cond> r0, [r2, #imm].  */
6821 	dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6822       else
6823 	/* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6824 	   ->
6825 	   {ldr,str}[b]<cond> r0, [r2, r3].  */
6826 	dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6827     }
6828   else
6829     {
6830       /* We need to use r4 as scratch.  Make sure it's restored afterwards.  */
6831       dsc->u.ldst.restore_r4 = 1;
6832       dsc->modinsn[0] = 0xe92d8000;  /* push {pc} */
6833       dsc->modinsn[1] = 0xe8bd0010;  /* pop  {r4} */
6834       dsc->modinsn[2] = 0xe044400f;  /* sub r4, r4, pc.  */
6835       dsc->modinsn[3] = 0xe2844008;  /* add r4, r4, #8.  */
6836       dsc->modinsn[4] = 0xe0800004;  /* add r0, r0, r4.  */
6837 
6838       /* As above.  */
6839       if (immed)
6840 	dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6841       else
6842 	dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6843 
6844       dsc->numinsns = 6;
6845     }
6846 
6847   dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6848 
6849   return 0;
6850 }
6851 
6852 /* Cleanup LDM instructions with fully-populated register list.  This is an
6853    unfortunate corner case: it's impossible to implement correctly by modifying
6854    the instruction.  The issue is as follows: we have an instruction,
6855 
6856    ldm rN, {r0-r15}
6857 
6858    which we must rewrite to avoid loading PC.  A possible solution would be to
6859    do the load in two halves, something like (with suitable cleanup
6860    afterwards):
6861 
6862    mov r8, rN
6863    ldm[id][ab] r8!, {r0-r7}
6864    str r7, <temp>
6865    ldm[id][ab] r8, {r7-r14}
6866    <bkpt>
6867 
6868    but at present there's no suitable place for <temp>, since the scratch space
6869    is overwritten before the cleanup routine is called.  For now, we simply
6870    emulate the instruction.  */
6871 
6872 static void
cleanup_block_load_all(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)6873 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6874 			struct displaced_step_closure *dsc)
6875 {
6876   int inc = dsc->u.block.increment;
6877   int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6878   int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6879   uint32_t regmask = dsc->u.block.regmask;
6880   int regno = inc ? 0 : 15;
6881   CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6882   int exception_return = dsc->u.block.load && dsc->u.block.user
6883 			 && (regmask & 0x8000) != 0;
6884   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6885   int do_transfer = condition_true (dsc->u.block.cond, status);
6886   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6887 
6888   if (!do_transfer)
6889     return;
6890 
6891   /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6892      sensible we can do here.  Complain loudly.  */
6893   if (exception_return)
6894     error (_("Cannot single-step exception return"));
6895 
6896   /* We don't handle any stores here for now.  */
6897   gdb_assert (dsc->u.block.load != 0);
6898 
6899   if (debug_displaced)
6900     fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6901 			"%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6902 			dsc->u.block.increment ? "inc" : "dec",
6903 			dsc->u.block.before ? "before" : "after");
6904 
6905   while (regmask)
6906     {
6907       uint32_t memword;
6908 
6909       if (inc)
6910 	while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6911 	  regno++;
6912       else
6913 	while (regno >= 0 && (regmask & (1 << regno)) == 0)
6914 	  regno--;
6915 
6916       xfer_addr += bump_before;
6917 
6918       memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6919       displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6920 
6921       xfer_addr += bump_after;
6922 
6923       regmask &= ~(1 << regno);
6924     }
6925 
6926   if (dsc->u.block.writeback)
6927     displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6928 			 CANNOT_WRITE_PC);
6929 }
6930 
6931 /* Clean up an STM which included the PC in the register list.  */
6932 
6933 static void
cleanup_block_store_pc(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)6934 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6935 			struct displaced_step_closure *dsc)
6936 {
6937   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6938   int store_executed = condition_true (dsc->u.block.cond, status);
6939   CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6940   CORE_ADDR stm_insn_addr;
6941   uint32_t pc_val;
6942   long offset;
6943   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6944 
6945   /* If condition code fails, there's nothing else to do.  */
6946   if (!store_executed)
6947     return;
6948 
6949   if (dsc->u.block.increment)
6950     {
6951       pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6952 
6953       if (dsc->u.block.before)
6954 	 pc_stored_at += 4;
6955     }
6956   else
6957     {
6958       pc_stored_at = dsc->u.block.xfer_addr;
6959 
6960       if (dsc->u.block.before)
6961 	 pc_stored_at -= 4;
6962     }
6963 
6964   pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6965   stm_insn_addr = dsc->scratch_base;
6966   offset = pc_val - stm_insn_addr;
6967 
6968   if (debug_displaced)
6969     fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
6970 			"STM instruction\n", offset);
6971 
6972   /* Rewrite the stored PC to the proper value for the non-displaced original
6973      instruction.  */
6974   write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6975 				 dsc->insn_addr + offset);
6976 }
6977 
6978 /* Clean up an LDM which includes the PC in the register list.  We clumped all
6979    the registers in the transferred list into a contiguous range r0...rX (to
6980    avoid loading PC directly and losing control of the debugged program), so we
6981    must undo that here.  */
6982 
6983 static void
cleanup_block_load_pc(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)6984 cleanup_block_load_pc (struct gdbarch *gdbarch,
6985 		       struct regcache *regs,
6986 		       struct displaced_step_closure *dsc)
6987 {
6988   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6989   int load_executed = condition_true (dsc->u.block.cond, status);
6990   unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6991   unsigned int regs_loaded = bitcount (mask);
6992   unsigned int num_to_shuffle = regs_loaded, clobbered;
6993 
6994   /* The method employed here will fail if the register list is fully populated
6995      (we need to avoid loading PC directly).  */
6996   gdb_assert (num_to_shuffle < 16);
6997 
6998   if (!load_executed)
6999     return;
7000 
7001   clobbered = (1 << num_to_shuffle) - 1;
7002 
7003   while (num_to_shuffle > 0)
7004     {
7005       if ((mask & (1 << write_reg)) != 0)
7006 	{
7007 	  unsigned int read_reg = num_to_shuffle - 1;
7008 
7009 	  if (read_reg != write_reg)
7010 	    {
7011 	      ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7012 	      displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7013 	      if (debug_displaced)
7014 		fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7015 				    "loaded register r%d to r%d\n"), read_reg,
7016 				    write_reg);
7017 	    }
7018 	  else if (debug_displaced)
7019 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7020 				"r%d already in the right place\n"),
7021 				write_reg);
7022 
7023 	  clobbered &= ~(1 << write_reg);
7024 
7025 	  num_to_shuffle--;
7026 	}
7027 
7028       write_reg--;
7029     }
7030 
7031   /* Restore any registers we scribbled over.  */
7032   for (write_reg = 0; clobbered != 0; write_reg++)
7033     {
7034       if ((clobbered & (1 << write_reg)) != 0)
7035 	{
7036 	  displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7037 			       CANNOT_WRITE_PC);
7038 	  if (debug_displaced)
7039 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7040 				"clobbered register r%d\n"), write_reg);
7041 	  clobbered &= ~(1 << write_reg);
7042 	}
7043     }
7044 
7045   /* Perform register writeback manually.  */
7046   if (dsc->u.block.writeback)
7047     {
7048       ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7049 
7050       if (dsc->u.block.increment)
7051 	new_rn_val += regs_loaded * 4;
7052       else
7053 	new_rn_val -= regs_loaded * 4;
7054 
7055       displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7056 			   CANNOT_WRITE_PC);
7057     }
7058 }
7059 
7060 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7061    in user-level code (in particular exception return, ldm rn, {...pc}^).  */
7062 
7063 static int
arm_copy_block_xfer(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7064 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7065 		     struct regcache *regs,
7066 		     struct displaced_step_closure *dsc)
7067 {
7068   int load = bit (insn, 20);
7069   int user = bit (insn, 22);
7070   int increment = bit (insn, 23);
7071   int before = bit (insn, 24);
7072   int writeback = bit (insn, 21);
7073   int rn = bits (insn, 16, 19);
7074 
7075   /* Block transfers which don't mention PC can be run directly
7076      out-of-line.  */
7077   if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7078     return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7079 
7080   if (rn == ARM_PC_REGNUM)
7081     {
7082       warning (_("displaced: Unpredictable LDM or STM with "
7083 		 "base register r15"));
7084       return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7085     }
7086 
7087   if (debug_displaced)
7088     fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7089 			"%.8lx\n", (unsigned long) insn);
7090 
7091   dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7092   dsc->u.block.rn = rn;
7093 
7094   dsc->u.block.load = load;
7095   dsc->u.block.user = user;
7096   dsc->u.block.increment = increment;
7097   dsc->u.block.before = before;
7098   dsc->u.block.writeback = writeback;
7099   dsc->u.block.cond = bits (insn, 28, 31);
7100 
7101   dsc->u.block.regmask = insn & 0xffff;
7102 
7103   if (load)
7104     {
7105       if ((insn & 0xffff) == 0xffff)
7106 	{
7107 	  /* LDM with a fully-populated register list.  This case is
7108 	     particularly tricky.  Implement for now by fully emulating the
7109 	     instruction (which might not behave perfectly in all cases, but
7110 	     these instructions should be rare enough for that not to matter
7111 	     too much).  */
7112 	  dsc->modinsn[0] = ARM_NOP;
7113 
7114 	  dsc->cleanup = &cleanup_block_load_all;
7115 	}
7116       else
7117 	{
7118 	  /* LDM of a list of registers which includes PC.  Implement by
7119 	     rewriting the list of registers to be transferred into a
7120 	     contiguous chunk r0...rX before doing the transfer, then shuffling
7121 	     registers into the correct places in the cleanup routine.  */
7122 	  unsigned int regmask = insn & 0xffff;
7123 	  unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7124 	  unsigned int to = 0, from = 0, i, new_rn;
7125 
7126 	  for (i = 0; i < num_in_list; i++)
7127 	    dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7128 
7129 	  /* Writeback makes things complicated.  We need to avoid clobbering
7130 	     the base register with one of the registers in our modified
7131 	     register list, but just using a different register can't work in
7132 	     all cases, e.g.:
7133 
7134 	       ldm r14!, {r0-r13,pc}
7135 
7136 	     which would need to be rewritten as:
7137 
7138 	       ldm rN!, {r0-r14}
7139 
7140 	     but that can't work, because there's no free register for N.
7141 
7142 	     Solve this by turning off the writeback bit, and emulating
7143 	     writeback manually in the cleanup routine.  */
7144 
7145 	  if (writeback)
7146 	    insn &= ~(1 << 21);
7147 
7148 	  new_regmask = (1 << num_in_list) - 1;
7149 
7150 	  if (debug_displaced)
7151 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7152 				"{..., pc}: original reg list %.4x, modified "
7153 				"list %.4x\n"), rn, writeback ? "!" : "",
7154 				(int) insn & 0xffff, new_regmask);
7155 
7156 	  dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7157 
7158 	  dsc->cleanup = &cleanup_block_load_pc;
7159 	}
7160     }
7161   else
7162     {
7163       /* STM of a list of registers which includes PC.  Run the instruction
7164 	 as-is, but out of line: this will store the wrong value for the PC,
7165 	 so we must manually fix up the memory in the cleanup routine.
7166 	 Doing things this way has the advantage that we can auto-detect
7167 	 the offset of the PC write (which is architecture-dependent) in
7168 	 the cleanup routine.  */
7169       dsc->modinsn[0] = insn;
7170 
7171       dsc->cleanup = &cleanup_block_store_pc;
7172     }
7173 
7174   return 0;
7175 }
7176 
7177 static int
thumb2_copy_block_xfer(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)7178 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7179 			struct regcache *regs,
7180 			struct displaced_step_closure *dsc)
7181 {
7182   int rn = bits (insn1, 0, 3);
7183   int load = bit (insn1, 4);
7184   int writeback = bit (insn1, 5);
7185 
7186   /* Block transfers which don't mention PC can be run directly
7187      out-of-line.  */
7188   if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7189     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7190 
7191   if (rn == ARM_PC_REGNUM)
7192     {
7193       warning (_("displaced: Unpredictable LDM or STM with "
7194 		 "base register r15"));
7195       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7196 					  "unpredictable ldm/stm", dsc);
7197     }
7198 
7199   if (debug_displaced)
7200     fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7201 			"%.4x%.4x\n", insn1, insn2);
7202 
7203   /* Clear bit 13, since it should be always zero.  */
7204   dsc->u.block.regmask = (insn2 & 0xdfff);
7205   dsc->u.block.rn = rn;
7206 
7207   dsc->u.block.load = load;
7208   dsc->u.block.user = 0;
7209   dsc->u.block.increment = bit (insn1, 7);
7210   dsc->u.block.before = bit (insn1, 8);
7211   dsc->u.block.writeback = writeback;
7212   dsc->u.block.cond = INST_AL;
7213   dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7214 
7215   if (load)
7216     {
7217       if (dsc->u.block.regmask == 0xffff)
7218 	{
7219 	  /* This branch is impossible to happen.  */
7220 	  gdb_assert (0);
7221 	}
7222       else
7223 	{
7224 	  unsigned int regmask = dsc->u.block.regmask;
7225 	  unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7226 	  unsigned int to = 0, from = 0, i, new_rn;
7227 
7228 	  for (i = 0; i < num_in_list; i++)
7229 	    dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7230 
7231 	  if (writeback)
7232 	    insn1 &= ~(1 << 5);
7233 
7234 	  new_regmask = (1 << num_in_list) - 1;
7235 
7236 	  if (debug_displaced)
7237 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7238 				"{..., pc}: original reg list %.4x, modified "
7239 				"list %.4x\n"), rn, writeback ? "!" : "",
7240 				(int) dsc->u.block.regmask, new_regmask);
7241 
7242 	  dsc->modinsn[0] = insn1;
7243 	  dsc->modinsn[1] = (new_regmask & 0xffff);
7244 	  dsc->numinsns = 2;
7245 
7246 	  dsc->cleanup = &cleanup_block_load_pc;
7247 	}
7248     }
7249   else
7250     {
7251       dsc->modinsn[0] = insn1;
7252       dsc->modinsn[1] = insn2;
7253       dsc->numinsns = 2;
7254       dsc->cleanup = &cleanup_block_store_pc;
7255     }
7256   return 0;
7257 }
7258 
7259 /* Cleanup/copy SVC (SWI) instructions.  These two functions are overridden
7260    for Linux, where some SVC instructions must be treated specially.  */
7261 
7262 static void
cleanup_svc(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)7263 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7264 	     struct displaced_step_closure *dsc)
7265 {
7266   CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7267 
7268   if (debug_displaced)
7269     fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7270 			"%.8lx\n", (unsigned long) resume_addr);
7271 
7272   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7273 }
7274 
7275 
7276 /* Common copy routine for svc instruciton.  */
7277 
7278 static int
install_svc(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)7279 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7280 	     struct displaced_step_closure *dsc)
7281 {
7282   /* Preparation: none.
7283      Insn: unmodified svc.
7284      Cleanup: pc <- insn_addr + insn_size.  */
7285 
7286   /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7287      instruction.  */
7288   dsc->wrote_to_pc = 1;
7289 
7290   /* Allow OS-specific code to override SVC handling.  */
7291   if (dsc->u.svc.copy_svc_os)
7292     return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7293   else
7294     {
7295       dsc->cleanup = &cleanup_svc;
7296       return 0;
7297     }
7298 }
7299 
7300 static int
arm_copy_svc(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7301 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7302 	      struct regcache *regs, struct displaced_step_closure *dsc)
7303 {
7304 
7305   if (debug_displaced)
7306     fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7307 			(unsigned long) insn);
7308 
7309   dsc->modinsn[0] = insn;
7310 
7311   return install_svc (gdbarch, regs, dsc);
7312 }
7313 
7314 static int
thumb_copy_svc(struct gdbarch * gdbarch,uint16_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7315 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7316 		struct regcache *regs, struct displaced_step_closure *dsc)
7317 {
7318 
7319   if (debug_displaced)
7320     fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7321 			insn);
7322 
7323   dsc->modinsn[0] = insn;
7324 
7325   return install_svc (gdbarch, regs, dsc);
7326 }
7327 
7328 /* Copy undefined instructions.  */
7329 
7330 static int
arm_copy_undef(struct gdbarch * gdbarch,uint32_t insn,struct displaced_step_closure * dsc)7331 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7332 		struct displaced_step_closure *dsc)
7333 {
7334   if (debug_displaced)
7335     fprintf_unfiltered (gdb_stdlog,
7336 			"displaced: copying undefined insn %.8lx\n",
7337 			(unsigned long) insn);
7338 
7339   dsc->modinsn[0] = insn;
7340 
7341   return 0;
7342 }
7343 
7344 static int
thumb_32bit_copy_undef(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct displaced_step_closure * dsc)7345 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7346                        struct displaced_step_closure *dsc)
7347 {
7348 
7349   if (debug_displaced)
7350     fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7351                        "%.4x %.4x\n", (unsigned short) insn1,
7352                        (unsigned short) insn2);
7353 
7354   dsc->modinsn[0] = insn1;
7355   dsc->modinsn[1] = insn2;
7356   dsc->numinsns = 2;
7357 
7358   return 0;
7359 }
7360 
7361 /* Copy unpredictable instructions.  */
7362 
7363 static int
arm_copy_unpred(struct gdbarch * gdbarch,uint32_t insn,struct displaced_step_closure * dsc)7364 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7365 		 struct displaced_step_closure *dsc)
7366 {
7367   if (debug_displaced)
7368     fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7369 			"%.8lx\n", (unsigned long) insn);
7370 
7371   dsc->modinsn[0] = insn;
7372 
7373   return 0;
7374 }
7375 
7376 /* The decode_* functions are instruction decoding helpers.  They mostly follow
7377    the presentation in the ARM ARM.  */
7378 
7379 static int
arm_decode_misc_memhint_neon(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7380 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7381 			      struct regcache *regs,
7382 			      struct displaced_step_closure *dsc)
7383 {
7384   unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7385   unsigned int rn = bits (insn, 16, 19);
7386 
7387   if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7388     return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7389   else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7390     return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7391   else if ((op1 & 0x60) == 0x20)
7392     return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7393   else if ((op1 & 0x71) == 0x40)
7394     return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7395 				dsc);
7396   else if ((op1 & 0x77) == 0x41)
7397     return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7398   else if ((op1 & 0x77) == 0x45)
7399     return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pli.  */
7400   else if ((op1 & 0x77) == 0x51)
7401     {
7402       if (rn != 0xf)
7403 	return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pld/pldw.  */
7404       else
7405 	return arm_copy_unpred (gdbarch, insn, dsc);
7406     }
7407   else if ((op1 & 0x77) == 0x55)
7408     return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pld/pldw.  */
7409   else if (op1 == 0x57)
7410     switch (op2)
7411       {
7412       case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7413       case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7414       case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7415       case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7416       default: return arm_copy_unpred (gdbarch, insn, dsc);
7417       }
7418   else if ((op1 & 0x63) == 0x43)
7419     return arm_copy_unpred (gdbarch, insn, dsc);
7420   else if ((op2 & 0x1) == 0x0)
7421     switch (op1 & ~0x80)
7422       {
7423       case 0x61:
7424 	return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7425       case 0x65:
7426 	return arm_copy_preload_reg (gdbarch, insn, regs, dsc);  /* pli reg.  */
7427       case 0x71: case 0x75:
7428         /* pld/pldw reg.  */
7429 	return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7430       case 0x63: case 0x67: case 0x73: case 0x77:
7431 	return arm_copy_unpred (gdbarch, insn, dsc);
7432       default:
7433 	return arm_copy_undef (gdbarch, insn, dsc);
7434       }
7435   else
7436     return arm_copy_undef (gdbarch, insn, dsc);  /* Probably unreachable.  */
7437 }
7438 
7439 static int
arm_decode_unconditional(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7440 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7441 			  struct regcache *regs,
7442 			  struct displaced_step_closure *dsc)
7443 {
7444   if (bit (insn, 27) == 0)
7445     return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7446   /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx.  */
7447   else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7448     {
7449     case 0x0: case 0x2:
7450       return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7451 
7452     case 0x1: case 0x3:
7453       return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7454 
7455     case 0x4: case 0x5: case 0x6: case 0x7:
7456       return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7457 
7458     case 0x8:
7459       switch ((insn & 0xe00000) >> 21)
7460 	{
7461 	case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7462 	  /* stc/stc2.  */
7463 	  return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7464 
7465 	case 0x2:
7466 	  return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7467 
7468 	default:
7469 	  return arm_copy_undef (gdbarch, insn, dsc);
7470 	}
7471 
7472     case 0x9:
7473       {
7474 	 int rn_f = (bits (insn, 16, 19) == 0xf);
7475 	switch ((insn & 0xe00000) >> 21)
7476 	  {
7477 	  case 0x1: case 0x3:
7478 	    /* ldc/ldc2 imm (undefined for rn == pc).  */
7479 	    return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7480 			: arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7481 
7482 	  case 0x2:
7483 	    return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7484 
7485 	  case 0x4: case 0x5: case 0x6: case 0x7:
7486 	    /* ldc/ldc2 lit (undefined for rn != pc).  */
7487 	    return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7488 			: arm_copy_undef (gdbarch, insn, dsc);
7489 
7490 	  default:
7491 	    return arm_copy_undef (gdbarch, insn, dsc);
7492 	  }
7493       }
7494 
7495     case 0xa:
7496       return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7497 
7498     case 0xb:
7499       if (bits (insn, 16, 19) == 0xf)
7500         /* ldc/ldc2 lit.  */
7501 	return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7502       else
7503 	return arm_copy_undef (gdbarch, insn, dsc);
7504 
7505     case 0xc:
7506       if (bit (insn, 4))
7507 	return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7508       else
7509 	return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7510 
7511     case 0xd:
7512       if (bit (insn, 4))
7513 	return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7514       else
7515 	return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7516 
7517     default:
7518       return arm_copy_undef (gdbarch, insn, dsc);
7519     }
7520 }
7521 
7522 /* Decode miscellaneous instructions in dp/misc encoding space.  */
7523 
7524 static int
arm_decode_miscellaneous(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7525 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7526 			  struct regcache *regs,
7527 			  struct displaced_step_closure *dsc)
7528 {
7529   unsigned int op2 = bits (insn, 4, 6);
7530   unsigned int op = bits (insn, 21, 22);
7531   unsigned int op1 = bits (insn, 16, 19);
7532 
7533   switch (op2)
7534     {
7535     case 0x0:
7536       return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7537 
7538     case 0x1:
7539       if (op == 0x1)  /* bx.  */
7540 	return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7541       else if (op == 0x3)
7542 	return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7543       else
7544 	return arm_copy_undef (gdbarch, insn, dsc);
7545 
7546     case 0x2:
7547       if (op == 0x1)
7548         /* Not really supported.  */
7549 	return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7550       else
7551 	return arm_copy_undef (gdbarch, insn, dsc);
7552 
7553     case 0x3:
7554       if (op == 0x1)
7555 	return arm_copy_bx_blx_reg (gdbarch, insn,
7556 				regs, dsc);  /* blx register.  */
7557       else
7558 	return arm_copy_undef (gdbarch, insn, dsc);
7559 
7560     case 0x5:
7561       return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7562 
7563     case 0x7:
7564       if (op == 0x1)
7565 	return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7566       else if (op == 0x3)
7567         /* Not really supported.  */
7568 	return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7569 
7570     default:
7571       return arm_copy_undef (gdbarch, insn, dsc);
7572     }
7573 }
7574 
7575 static int
arm_decode_dp_misc(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7576 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7577 		    struct regcache *regs,
7578 		    struct displaced_step_closure *dsc)
7579 {
7580   if (bit (insn, 25))
7581     switch (bits (insn, 20, 24))
7582       {
7583       case 0x10:
7584 	return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7585 
7586       case 0x14:
7587 	return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7588 
7589       case 0x12: case 0x16:
7590 	return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7591 
7592       default:
7593 	return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7594       }
7595   else
7596     {
7597       uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7598 
7599       if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7600 	return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7601       else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7602 	return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7603       else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7604 	return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7605       else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7606 	return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7607       else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7608 	return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7609       else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7610 	return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7611       else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7612 	/* 2nd arg means "unpriveleged".  */
7613 	return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7614 				     dsc);
7615     }
7616 
7617   /* Should be unreachable.  */
7618   return 1;
7619 }
7620 
7621 static int
arm_decode_ld_st_word_ubyte(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7622 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7623 			     struct regcache *regs,
7624 			     struct displaced_step_closure *dsc)
7625 {
7626   int a = bit (insn, 25), b = bit (insn, 4);
7627   uint32_t op1 = bits (insn, 20, 24);
7628   int rn_f = bits (insn, 16, 19) == 0xf;
7629 
7630   if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7631       || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7632     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7633   else if ((!a && (op1 & 0x17) == 0x02)
7634 	    || (a && (op1 & 0x17) == 0x02 && !b))
7635     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7636   else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7637 	    || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7638     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7639   else if ((!a && (op1 & 0x17) == 0x03)
7640 	   || (a && (op1 & 0x17) == 0x03 && !b))
7641     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7642   else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7643 	    || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7644     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7645   else if ((!a && (op1 & 0x17) == 0x06)
7646 	   || (a && (op1 & 0x17) == 0x06 && !b))
7647     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7648   else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7649 	   || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7650     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7651   else if ((!a && (op1 & 0x17) == 0x07)
7652 	   || (a && (op1 & 0x17) == 0x07 && !b))
7653     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7654 
7655   /* Should be unreachable.  */
7656   return 1;
7657 }
7658 
7659 static int
arm_decode_media(struct gdbarch * gdbarch,uint32_t insn,struct displaced_step_closure * dsc)7660 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7661 		  struct displaced_step_closure *dsc)
7662 {
7663   switch (bits (insn, 20, 24))
7664     {
7665     case 0x00: case 0x01: case 0x02: case 0x03:
7666       return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7667 
7668     case 0x04: case 0x05: case 0x06: case 0x07:
7669       return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7670 
7671     case 0x08: case 0x09: case 0x0a: case 0x0b:
7672     case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7673       return arm_copy_unmodified (gdbarch, insn,
7674 			      "decode/pack/unpack/saturate/reverse", dsc);
7675 
7676     case 0x18:
7677       if (bits (insn, 5, 7) == 0)  /* op2.  */
7678 	 {
7679 	  if (bits (insn, 12, 15) == 0xf)
7680 	    return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7681 	  else
7682 	    return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7683 	}
7684       else
7685 	 return arm_copy_undef (gdbarch, insn, dsc);
7686 
7687     case 0x1a: case 0x1b:
7688       if (bits (insn, 5, 6) == 0x2)  /* op2[1:0].  */
7689 	return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7690       else
7691 	return arm_copy_undef (gdbarch, insn, dsc);
7692 
7693     case 0x1c: case 0x1d:
7694       if (bits (insn, 5, 6) == 0x0)  /* op2[1:0].  */
7695 	 {
7696 	  if (bits (insn, 0, 3) == 0xf)
7697 	    return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7698 	  else
7699 	    return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7700 	}
7701       else
7702 	return arm_copy_undef (gdbarch, insn, dsc);
7703 
7704     case 0x1e: case 0x1f:
7705       if (bits (insn, 5, 6) == 0x2)  /* op2[1:0].  */
7706 	return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7707       else
7708 	return arm_copy_undef (gdbarch, insn, dsc);
7709     }
7710 
7711   /* Should be unreachable.  */
7712   return 1;
7713 }
7714 
7715 static int
arm_decode_b_bl_ldmstm(struct gdbarch * gdbarch,int32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7716 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7717 			struct regcache *regs,
7718 			struct displaced_step_closure *dsc)
7719 {
7720   if (bit (insn, 25))
7721     return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7722   else
7723     return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7724 }
7725 
7726 static int
arm_decode_ext_reg_ld_st(struct gdbarch * gdbarch,uint32_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7727 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7728 			  struct regcache *regs,
7729 			  struct displaced_step_closure *dsc)
7730 {
7731   unsigned int opcode = bits (insn, 20, 24);
7732 
7733   switch (opcode)
7734     {
7735     case 0x04: case 0x05:  /* VFP/Neon mrrc/mcrr.  */
7736       return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7737 
7738     case 0x08: case 0x0a: case 0x0c: case 0x0e:
7739     case 0x12: case 0x16:
7740       return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7741 
7742     case 0x09: case 0x0b: case 0x0d: case 0x0f:
7743     case 0x13: case 0x17:
7744       return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7745 
7746     case 0x10: case 0x14: case 0x18: case 0x1c:  /* vstr.  */
7747     case 0x11: case 0x15: case 0x19: case 0x1d:  /* vldr.  */
7748       /* Note: no writeback for these instructions.  Bit 25 will always be
7749 	 zero though (via caller), so the following works OK.  */
7750       return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7751     }
7752 
7753   /* Should be unreachable.  */
7754   return 1;
7755 }
7756 
7757 /* Decode shifted register instructions.  */
7758 
7759 static int
thumb2_decode_dp_shift_reg(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)7760 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7761 			    uint16_t insn2,  struct regcache *regs,
7762 			    struct displaced_step_closure *dsc)
7763 {
7764   /* PC is only allowed to be used in instruction MOV.  */
7765 
7766   unsigned int op = bits (insn1, 5, 8);
7767   unsigned int rn = bits (insn1, 0, 3);
7768 
7769   if (op == 0x2 && rn == 0xf) /* MOV */
7770     return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7771   else
7772     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7773 					"dp (shift reg)", dsc);
7774 }
7775 
7776 
7777 /* Decode extension register load/store.  Exactly the same as
7778    arm_decode_ext_reg_ld_st.  */
7779 
7780 static int
thumb2_decode_ext_reg_ld_st(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)7781 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7782 			     uint16_t insn2,  struct regcache *regs,
7783 			     struct displaced_step_closure *dsc)
7784 {
7785   unsigned int opcode = bits (insn1, 4, 8);
7786 
7787   switch (opcode)
7788     {
7789     case 0x04: case 0x05:
7790       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7791 					  "vfp/neon vmov", dsc);
7792 
7793     case 0x08: case 0x0c: /* 01x00 */
7794     case 0x0a: case 0x0e: /* 01x10 */
7795     case 0x12: case 0x16: /* 10x10 */
7796       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7797 					  "vfp/neon vstm/vpush", dsc);
7798 
7799     case 0x09: case 0x0d: /* 01x01 */
7800     case 0x0b: case 0x0f: /* 01x11 */
7801     case 0x13: case 0x17: /* 10x11 */
7802       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7803 					  "vfp/neon vldm/vpop", dsc);
7804 
7805     case 0x10: case 0x14: case 0x18: case 0x1c:  /* vstr.  */
7806       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7807 					  "vstr", dsc);
7808     case 0x11: case 0x15: case 0x19: case 0x1d:  /* vldr.  */
7809       return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7810     }
7811 
7812   /* Should be unreachable.  */
7813   return 1;
7814 }
7815 
7816 static int
arm_decode_svc_copro(struct gdbarch * gdbarch,uint32_t insn,CORE_ADDR to,struct regcache * regs,struct displaced_step_closure * dsc)7817 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7818 		      struct regcache *regs, struct displaced_step_closure *dsc)
7819 {
7820   unsigned int op1 = bits (insn, 20, 25);
7821   int op = bit (insn, 4);
7822   unsigned int coproc = bits (insn, 8, 11);
7823   unsigned int rn = bits (insn, 16, 19);
7824 
7825   if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7826     return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7827   else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7828 	   && (coproc & 0xe) != 0xa)
7829     /* stc/stc2.  */
7830     return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7831   else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7832 	   && (coproc & 0xe) != 0xa)
7833     /* ldc/ldc2 imm/lit.  */
7834     return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7835   else if ((op1 & 0x3e) == 0x00)
7836     return arm_copy_undef (gdbarch, insn, dsc);
7837   else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7838     return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7839   else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7840     return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7841   else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7842     return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7843   else if ((op1 & 0x30) == 0x20 && !op)
7844     {
7845       if ((coproc & 0xe) == 0xa)
7846 	return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7847       else
7848 	return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7849     }
7850   else if ((op1 & 0x30) == 0x20 && op)
7851     return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7852   else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7853     return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7854   else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7855     return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7856   else if ((op1 & 0x30) == 0x30)
7857     return arm_copy_svc (gdbarch, insn, regs, dsc);
7858   else
7859     return arm_copy_undef (gdbarch, insn, dsc);  /* Possibly unreachable.  */
7860 }
7861 
7862 static int
thumb2_decode_svc_copro(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)7863 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7864 			 uint16_t insn2, struct regcache *regs,
7865 			 struct displaced_step_closure *dsc)
7866 {
7867   unsigned int coproc = bits (insn2, 8, 11);
7868   unsigned int op1 = bits (insn1, 4, 9);
7869   unsigned int bit_5_8 = bits (insn1, 5, 8);
7870   unsigned int bit_9 = bit (insn1, 9);
7871   unsigned int bit_4 = bit (insn1, 4);
7872   unsigned int rn = bits (insn1, 0, 3);
7873 
7874   if (bit_9 == 0)
7875     {
7876       if (bit_5_8 == 2)
7877 	return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7878 					    "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7879 					    dsc);
7880       else if (bit_5_8 == 0) /* UNDEFINED.  */
7881 	return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7882       else
7883 	{
7884 	   /*coproc is 101x.  SIMD/VFP, ext registers load/store.  */
7885 	  if ((coproc & 0xe) == 0xa)
7886 	    return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7887 						dsc);
7888 	  else /* coproc is not 101x.  */
7889 	    {
7890 	      if (bit_4 == 0) /* STC/STC2.  */
7891 		return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7892 						    "stc/stc2", dsc);
7893 	      else /* LDC/LDC2 {literal, immeidate}.  */
7894 		return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7895 						     regs, dsc);
7896 	    }
7897 	}
7898     }
7899   else
7900     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7901 
7902   return 0;
7903 }
7904 
7905 static void
install_pc_relative(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc,int rd)7906 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7907 		     struct displaced_step_closure *dsc, int rd)
7908 {
7909   /* ADR Rd, #imm
7910 
7911      Rewrite as:
7912 
7913      Preparation: Rd <- PC
7914      Insn: ADD Rd, #imm
7915      Cleanup: Null.
7916   */
7917 
7918   /* Rd <- PC */
7919   int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7920   displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7921 }
7922 
7923 static int
thumb_copy_pc_relative_16bit(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc,int rd,unsigned int imm)7924 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7925 			      struct displaced_step_closure *dsc,
7926 			      int rd, unsigned int imm)
7927 {
7928 
7929   /* Encoding T2: ADDS Rd, #imm */
7930   dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7931 
7932   install_pc_relative (gdbarch, regs, dsc, rd);
7933 
7934   return 0;
7935 }
7936 
7937 static int
thumb_decode_pc_relative_16bit(struct gdbarch * gdbarch,uint16_t insn,struct regcache * regs,struct displaced_step_closure * dsc)7938 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7939 				struct regcache *regs,
7940 				struct displaced_step_closure *dsc)
7941 {
7942   unsigned int rd = bits (insn, 8, 10);
7943   unsigned int imm8 = bits (insn, 0, 7);
7944 
7945   if (debug_displaced)
7946     fprintf_unfiltered (gdb_stdlog,
7947 			"displaced: copying thumb adr r%d, #%d insn %.4x\n",
7948 			rd, imm8, insn);
7949 
7950   return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7951 }
7952 
7953 static int
thumb_copy_pc_relative_32bit(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)7954 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7955 			      uint16_t insn2, struct regcache *regs,
7956 			      struct displaced_step_closure *dsc)
7957 {
7958   unsigned int rd = bits (insn2, 8, 11);
7959   /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7960      extract raw immediate encoding rather than computing immediate.  When
7961      generating ADD or SUB instruction, we can simply perform OR operation to
7962      set immediate into ADD.  */
7963   unsigned int imm_3_8 = insn2 & 0x70ff;
7964   unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10.  */
7965 
7966   if (debug_displaced)
7967     fprintf_unfiltered (gdb_stdlog,
7968 			"displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7969 			rd, imm_i, imm_3_8, insn1, insn2);
7970 
7971   if (bit (insn1, 7)) /* Encoding T2 */
7972     {
7973       /* Encoding T3: SUB Rd, Rd, #imm */
7974       dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7975       dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7976     }
7977   else /* Encoding T3 */
7978     {
7979       /* Encoding T3: ADD Rd, Rd, #imm */
7980       dsc->modinsn[0] = (0xf100 | rd | imm_i);
7981       dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7982     }
7983   dsc->numinsns = 2;
7984 
7985   install_pc_relative (gdbarch, regs, dsc, rd);
7986 
7987   return 0;
7988 }
7989 
7990 static int
thumb_copy_16bit_ldr_literal(struct gdbarch * gdbarch,unsigned short insn1,struct regcache * regs,struct displaced_step_closure * dsc)7991 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
7992 			      struct regcache *regs,
7993 			      struct displaced_step_closure *dsc)
7994 {
7995   unsigned int rt = bits (insn1, 8, 10);
7996   unsigned int pc;
7997   int imm8 = (bits (insn1, 0, 7) << 2);
7998   CORE_ADDR from = dsc->insn_addr;
7999 
8000   /* LDR Rd, #imm8
8001 
8002      Rwrite as:
8003 
8004      Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8005 
8006      Insn: LDR R0, [R2, R3];
8007      Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8008 
8009   if (debug_displaced)
8010     fprintf_unfiltered (gdb_stdlog,
8011 			"displaced: copying thumb ldr r%d [pc #%d]\n"
8012 			, rt, imm8);
8013 
8014   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8015   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8016   dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8017   pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8018   /* The assembler calculates the required value of the offset from the
8019      Align(PC,4) value of this instruction to the label.  */
8020   pc = pc & 0xfffffffc;
8021 
8022   displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8023   displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8024 
8025   dsc->rd = rt;
8026   dsc->u.ldst.xfersize = 4;
8027   dsc->u.ldst.rn = 0;
8028   dsc->u.ldst.immed = 0;
8029   dsc->u.ldst.writeback = 0;
8030   dsc->u.ldst.restore_r4 = 0;
8031 
8032   dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8033 
8034   dsc->cleanup = &cleanup_load;
8035 
8036   return 0;
8037 }
8038 
8039 /* Copy Thumb cbnz/cbz insruction.  */
8040 
8041 static int
thumb_copy_cbnz_cbz(struct gdbarch * gdbarch,uint16_t insn1,struct regcache * regs,struct displaced_step_closure * dsc)8042 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8043 		     struct regcache *regs,
8044 		     struct displaced_step_closure *dsc)
8045 {
8046   int non_zero = bit (insn1, 11);
8047   unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8048   CORE_ADDR from = dsc->insn_addr;
8049   int rn = bits (insn1, 0, 2);
8050   int rn_val = displaced_read_reg (regs, dsc, rn);
8051 
8052   dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8053   /* CBNZ and CBZ do not affect the condition flags.  If condition is true,
8054      set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8055      condition is false, let it be, cleanup_branch will do nothing.  */
8056   if (dsc->u.branch.cond)
8057     {
8058       dsc->u.branch.cond = INST_AL;
8059       dsc->u.branch.dest = from + 4 + imm5;
8060     }
8061   else
8062       dsc->u.branch.dest = from + 2;
8063 
8064   dsc->u.branch.link = 0;
8065   dsc->u.branch.exchange = 0;
8066 
8067   if (debug_displaced)
8068     fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8069 			" insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8070 			rn, rn_val, insn1, dsc->u.branch.dest);
8071 
8072   dsc->modinsn[0] = THUMB_NOP;
8073 
8074   dsc->cleanup = &cleanup_branch;
8075   return 0;
8076 }
8077 
8078 /* Copy Table Branch Byte/Halfword */
8079 static int
thumb2_copy_table_branch(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)8080 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8081 			  uint16_t insn2, struct regcache *regs,
8082 			  struct displaced_step_closure *dsc)
8083 {
8084   ULONGEST rn_val, rm_val;
8085   int is_tbh = bit (insn2, 4);
8086   CORE_ADDR halfwords = 0;
8087   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8088 
8089   rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8090   rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8091 
8092   if (is_tbh)
8093     {
8094       gdb_byte buf[2];
8095 
8096       target_read_memory (rn_val + 2 * rm_val, buf, 2);
8097       halfwords = extract_unsigned_integer (buf, 2, byte_order);
8098     }
8099   else
8100     {
8101       gdb_byte buf[1];
8102 
8103       target_read_memory (rn_val + rm_val, buf, 1);
8104       halfwords = extract_unsigned_integer (buf, 1, byte_order);
8105     }
8106 
8107   if (debug_displaced)
8108     fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8109 			" offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8110 			(unsigned int) rn_val, (unsigned int) rm_val,
8111 			(unsigned int) halfwords);
8112 
8113   dsc->u.branch.cond = INST_AL;
8114   dsc->u.branch.link = 0;
8115   dsc->u.branch.exchange = 0;
8116   dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8117 
8118   dsc->cleanup = &cleanup_branch;
8119 
8120   return 0;
8121 }
8122 
8123 static void
cleanup_pop_pc_16bit_all(struct gdbarch * gdbarch,struct regcache * regs,struct displaced_step_closure * dsc)8124 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8125 			  struct displaced_step_closure *dsc)
8126 {
8127   /* PC <- r7 */
8128   int val = displaced_read_reg (regs, dsc, 7);
8129   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8130 
8131   /* r7 <- r8 */
8132   val = displaced_read_reg (regs, dsc, 8);
8133   displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8134 
8135   /* r8 <- tmp[0] */
8136   displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8137 
8138 }
8139 
8140 static int
thumb_copy_pop_pc_16bit(struct gdbarch * gdbarch,unsigned short insn1,struct regcache * regs,struct displaced_step_closure * dsc)8141 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8142 			 struct regcache *regs,
8143 			 struct displaced_step_closure *dsc)
8144 {
8145   dsc->u.block.regmask = insn1 & 0x00ff;
8146 
8147   /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8148      to :
8149 
8150      (1) register list is full, that is, r0-r7 are used.
8151      Prepare: tmp[0] <- r8
8152 
8153      POP {r0, r1, ...., r6, r7}; remove PC from reglist
8154      MOV r8, r7; Move value of r7 to r8;
8155      POP {r7}; Store PC value into r7.
8156 
8157      Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8158 
8159      (2) register list is not full, supposing there are N registers in
8160      register list (except PC, 0 <= N <= 7).
8161      Prepare: for each i, 0 - N, tmp[i] <- ri.
8162 
8163      POP {r0, r1, ...., rN};
8164 
8165      Cleanup: Set registers in original reglist from r0 - rN.  Restore r0 - rN
8166      from tmp[] properly.
8167   */
8168   if (debug_displaced)
8169     fprintf_unfiltered (gdb_stdlog,
8170 			"displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8171 			dsc->u.block.regmask, insn1);
8172 
8173   if (dsc->u.block.regmask == 0xff)
8174     {
8175       dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8176 
8177       dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8178       dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8179       dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8180 
8181       dsc->numinsns = 3;
8182       dsc->cleanup = &cleanup_pop_pc_16bit_all;
8183     }
8184   else
8185     {
8186       unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8187       unsigned int new_regmask, bit = 1;
8188       unsigned int to = 0, from = 0, i, new_rn;
8189 
8190       for (i = 0; i < num_in_list + 1; i++)
8191 	dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8192 
8193       new_regmask = (1 << (num_in_list + 1)) - 1;
8194 
8195       if (debug_displaced)
8196 	fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8197 					  "{..., pc}: original reg list %.4x,"
8198 					  " modified list %.4x\n"),
8199 			    (int) dsc->u.block.regmask, new_regmask);
8200 
8201       dsc->u.block.regmask |= 0x8000;
8202       dsc->u.block.writeback = 0;
8203       dsc->u.block.cond = INST_AL;
8204 
8205       dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8206 
8207       dsc->cleanup = &cleanup_block_load_pc;
8208     }
8209 
8210   return 0;
8211 }
8212 
8213 static void
thumb_process_displaced_16bit_insn(struct gdbarch * gdbarch,uint16_t insn1,struct regcache * regs,struct displaced_step_closure * dsc)8214 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8215 				    struct regcache *regs,
8216 				    struct displaced_step_closure *dsc)
8217 {
8218   unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8219   unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8220   int err = 0;
8221 
8222   /* 16-bit thumb instructions.  */
8223   switch (op_bit_12_15)
8224     {
8225       /* Shift (imme), add, subtract, move and compare.  */
8226     case 0: case 1: case 2: case 3:
8227       err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8228 					 "shift/add/sub/mov/cmp",
8229 					 dsc);
8230       break;
8231     case 4:
8232       switch (op_bit_10_11)
8233 	{
8234 	case 0: /* Data-processing */
8235 	  err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8236 					     "data-processing",
8237 					     dsc);
8238 	  break;
8239 	case 1: /* Special data instructions and branch and exchange.  */
8240 	  {
8241 	    unsigned short op = bits (insn1, 7, 9);
8242 	    if (op == 6 || op == 7) /* BX or BLX */
8243 	      err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8244 	    else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers.  */
8245 	      err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8246 	    else
8247 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8248 						 dsc);
8249 	  }
8250 	  break;
8251 	default: /* LDR (literal) */
8252 	  err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8253 	}
8254       break;
8255     case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8256       err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8257       break;
8258     case 10:
8259       if (op_bit_10_11 < 2) /* Generate PC-relative address */
8260 	err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8261       else /* Generate SP-relative address */
8262 	err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8263       break;
8264     case 11: /* Misc 16-bit instructions */
8265       {
8266 	switch (bits (insn1, 8, 11))
8267 	  {
8268 	  case 1: case 3:  case 9: case 11: /* CBNZ, CBZ */
8269 	    err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8270 	    break;
8271 	  case 12: case 13: /* POP */
8272 	    if (bit (insn1, 8)) /* PC is in register list.  */
8273 	      err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8274 	    else
8275 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8276 	    break;
8277 	  case 15: /* If-Then, and hints */
8278 	    if (bits (insn1, 0, 3))
8279 	      /* If-Then makes up to four following instructions conditional.
8280 		 IT instruction itself is not conditional, so handle it as a
8281 		 common unmodified instruction.  */
8282 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8283 						 dsc);
8284 	    else
8285 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8286 	    break;
8287 	  default:
8288 	    err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8289 	  }
8290       }
8291       break;
8292     case 12:
8293       if (op_bit_10_11 < 2) /* Store multiple registers */
8294 	err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8295       else /* Load multiple registers */
8296 	err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8297       break;
8298     case 13: /* Conditional branch and supervisor call */
8299       if (bits (insn1, 9, 11) != 7) /* conditional branch */
8300 	err = thumb_copy_b (gdbarch, insn1, dsc);
8301       else
8302 	err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8303       break;
8304     case 14: /* Unconditional branch */
8305       err = thumb_copy_b (gdbarch, insn1, dsc);
8306       break;
8307     default:
8308       err = 1;
8309     }
8310 
8311   if (err)
8312     internal_error (__FILE__, __LINE__,
8313 		    _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8314 }
8315 
8316 static int
decode_thumb_32bit_ld_mem_hints(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)8317 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8318 				 uint16_t insn1, uint16_t insn2,
8319 				 struct regcache *regs,
8320 				 struct displaced_step_closure *dsc)
8321 {
8322   int rt = bits (insn2, 12, 15);
8323   int rn = bits (insn1, 0, 3);
8324   int op1 = bits (insn1, 7, 8);
8325   int err = 0;
8326 
8327   switch (bits (insn1, 5, 6))
8328     {
8329     case 0: /* Load byte and memory hints */
8330       if (rt == 0xf) /* PLD/PLI */
8331 	{
8332 	  if (rn == 0xf)
8333 	    /* PLD literal or Encoding T3 of PLI(immediate, literal).  */
8334 	    return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8335 	  else
8336 	    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8337 						"pli/pld", dsc);
8338 	}
8339       else
8340 	{
8341 	  if (rn == 0xf) /* LDRB/LDRSB (literal) */
8342 	    return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8343 					     1);
8344 	  else
8345 	    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8346 						"ldrb{reg, immediate}/ldrbt",
8347 						dsc);
8348 	}
8349 
8350       break;
8351     case 1: /* Load halfword and memory hints.  */
8352       if (rt == 0xf) /* PLD{W} and Unalloc memory hint.  */
8353 	return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8354 					    "pld/unalloc memhint", dsc);
8355       else
8356 	{
8357 	  if (rn == 0xf)
8358 	    return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8359 					     2);
8360 	  else
8361 	    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8362 						"ldrh/ldrht", dsc);
8363 	}
8364       break;
8365     case 2: /* Load word */
8366       {
8367 	int insn2_bit_8_11 = bits (insn2, 8, 11);
8368 
8369 	if (rn == 0xf)
8370 	  return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8371 	else if (op1 == 0x1) /* Encoding T3 */
8372 	  return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8373 					   0, 1);
8374 	else /* op1 == 0x0 */
8375 	  {
8376 	    if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8377 	      /* LDR (immediate) */
8378 	      return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8379 					       dsc, bit (insn2, 8), 1);
8380 	    else if (insn2_bit_8_11 == 0xe) /* LDRT */
8381 	      return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8382 						  "ldrt", dsc);
8383 	    else
8384 	      /* LDR (register) */
8385 	      return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8386 					       dsc, 0, 0);
8387 	  }
8388 	break;
8389       }
8390     default:
8391       return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8392       break;
8393     }
8394   return 0;
8395 }
8396 
8397 static void
thumb_process_displaced_32bit_insn(struct gdbarch * gdbarch,uint16_t insn1,uint16_t insn2,struct regcache * regs,struct displaced_step_closure * dsc)8398 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8399 				    uint16_t insn2, struct regcache *regs,
8400 				    struct displaced_step_closure *dsc)
8401 {
8402   int err = 0;
8403   unsigned short op = bit (insn2, 15);
8404   unsigned int op1 = bits (insn1, 11, 12);
8405 
8406   switch (op1)
8407     {
8408     case 1:
8409       {
8410 	switch (bits (insn1, 9, 10))
8411 	  {
8412 	  case 0:
8413 	    if (bit (insn1, 6))
8414 	      {
8415 		/* Load/store {dual, execlusive}, table branch.  */
8416 		if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8417 		    && bits (insn2, 5, 7) == 0)
8418 		  err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8419 						  dsc);
8420 		else
8421 		  /* PC is not allowed to use in load/store {dual, exclusive}
8422 		     instructions.  */
8423 		  err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8424 						     "load/store dual/ex", dsc);
8425 	      }
8426 	    else /* load/store multiple */
8427 	      {
8428 		switch (bits (insn1, 7, 8))
8429 		  {
8430 		  case 0: case 3: /* SRS, RFE */
8431 		    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8432 						       "srs/rfe", dsc);
8433 		    break;
8434 		  case 1: case 2: /* LDM/STM/PUSH/POP */
8435 		    err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8436 		    break;
8437 		  }
8438 	      }
8439 	    break;
8440 
8441 	  case 1:
8442 	    /* Data-processing (shift register).  */
8443 	    err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8444 					      dsc);
8445 	    break;
8446 	  default: /* Coprocessor instructions.  */
8447 	    err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8448 	    break;
8449 	  }
8450       break;
8451       }
8452     case 2: /* op1 = 2 */
8453       if (op) /* Branch and misc control.  */
8454 	{
8455 	  if (bit (insn2, 14)  /* BLX/BL */
8456 	      || bit (insn2, 12) /* Unconditional branch */
8457 	      || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8458 	    err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8459 	  else
8460 	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8461 					       "misc ctrl", dsc);
8462 	}
8463       else
8464 	{
8465 	  if (bit (insn1, 9)) /* Data processing (plain binary imm).  */
8466 	    {
8467 	      int op = bits (insn1, 4, 8);
8468 	      int rn = bits (insn1, 0, 3);
8469 	      if ((op == 0 || op == 0xa) && rn == 0xf)
8470 		err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8471 						    regs, dsc);
8472 	      else
8473 		err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8474 						   "dp/pb", dsc);
8475 	    }
8476 	  else /* Data processing (modified immeidate) */
8477 	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8478 					       "dp/mi", dsc);
8479 	}
8480       break;
8481     case 3: /* op1 = 3 */
8482       switch (bits (insn1, 9, 10))
8483 	{
8484 	case 0:
8485 	  if (bit (insn1, 4))
8486 	    err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8487 						   regs, dsc);
8488 	  else /* NEON Load/Store and Store single data item */
8489 	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8490 					       "neon elt/struct load/store",
8491 					       dsc);
8492 	  break;
8493 	case 1: /* op1 = 3, bits (9, 10) == 1 */
8494 	  switch (bits (insn1, 7, 8))
8495 	    {
8496 	    case 0: case 1: /* Data processing (register) */
8497 	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8498 						 "dp(reg)", dsc);
8499 	      break;
8500 	    case 2: /* Multiply and absolute difference */
8501 	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8502 						 "mul/mua/diff", dsc);
8503 	      break;
8504 	    case 3: /* Long multiply and divide */
8505 	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8506 						 "lmul/lmua", dsc);
8507 	      break;
8508 	    }
8509 	  break;
8510 	default: /* Coprocessor instructions */
8511 	  err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8512 	  break;
8513 	}
8514       break;
8515     default:
8516       err = 1;
8517     }
8518 
8519   if (err)
8520     internal_error (__FILE__, __LINE__,
8521 		    _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8522 
8523 }
8524 
8525 static void
thumb_process_displaced_insn(struct gdbarch * gdbarch,CORE_ADDR from,CORE_ADDR to,struct regcache * regs,struct displaced_step_closure * dsc)8526 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8527 			      CORE_ADDR to, struct regcache *regs,
8528 			      struct displaced_step_closure *dsc)
8529 {
8530   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8531   uint16_t insn1
8532     = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8533 
8534   if (debug_displaced)
8535     fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8536 			"at %.8lx\n", insn1, (unsigned long) from);
8537 
8538   dsc->is_thumb = 1;
8539   dsc->insn_size = thumb_insn_size (insn1);
8540   if (thumb_insn_size (insn1) == 4)
8541     {
8542       uint16_t insn2
8543 	= read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8544       thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8545     }
8546   else
8547     thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8548 }
8549 
8550 void
arm_process_displaced_insn(struct gdbarch * gdbarch,CORE_ADDR from,CORE_ADDR to,struct regcache * regs,struct displaced_step_closure * dsc)8551 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8552 			    CORE_ADDR to, struct regcache *regs,
8553 			    struct displaced_step_closure *dsc)
8554 {
8555   int err = 0;
8556   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8557   uint32_t insn;
8558 
8559   /* Most displaced instructions use a 1-instruction scratch space, so set this
8560      here and override below if/when necessary.  */
8561   dsc->numinsns = 1;
8562   dsc->insn_addr = from;
8563   dsc->scratch_base = to;
8564   dsc->cleanup = NULL;
8565   dsc->wrote_to_pc = 0;
8566 
8567   if (!displaced_in_arm_mode (regs))
8568     return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8569 
8570   dsc->is_thumb = 0;
8571   dsc->insn_size = 4;
8572   insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8573   if (debug_displaced)
8574     fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8575 			"at %.8lx\n", (unsigned long) insn,
8576 			(unsigned long) from);
8577 
8578   if ((insn & 0xf0000000) == 0xf0000000)
8579     err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8580   else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8581     {
8582     case 0x0: case 0x1: case 0x2: case 0x3:
8583       err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8584       break;
8585 
8586     case 0x4: case 0x5: case 0x6:
8587       err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8588       break;
8589 
8590     case 0x7:
8591       err = arm_decode_media (gdbarch, insn, dsc);
8592       break;
8593 
8594     case 0x8: case 0x9: case 0xa: case 0xb:
8595       err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8596       break;
8597 
8598     case 0xc: case 0xd: case 0xe: case 0xf:
8599       err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8600       break;
8601     }
8602 
8603   if (err)
8604     internal_error (__FILE__, __LINE__,
8605 		    _("arm_process_displaced_insn: Instruction decode error"));
8606 }
8607 
8608 /* Actually set up the scratch space for a displaced instruction.  */
8609 
8610 void
arm_displaced_init_closure(struct gdbarch * gdbarch,CORE_ADDR from,CORE_ADDR to,struct displaced_step_closure * dsc)8611 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8612 			    CORE_ADDR to, struct displaced_step_closure *dsc)
8613 {
8614   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8615   unsigned int i, len, offset;
8616   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8617   int size = dsc->is_thumb? 2 : 4;
8618   const unsigned char *bkp_insn;
8619 
8620   offset = 0;
8621   /* Poke modified instruction(s).  */
8622   for (i = 0; i < dsc->numinsns; i++)
8623     {
8624       if (debug_displaced)
8625 	{
8626 	  fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8627 	  if (size == 4)
8628 	    fprintf_unfiltered (gdb_stdlog, "%.8lx",
8629 				dsc->modinsn[i]);
8630 	  else if (size == 2)
8631 	    fprintf_unfiltered (gdb_stdlog, "%.4x",
8632 				(unsigned short)dsc->modinsn[i]);
8633 
8634 	  fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8635 			      (unsigned long) to + offset);
8636 
8637 	}
8638       write_memory_unsigned_integer (to + offset, size,
8639 				     byte_order_for_code,
8640 				     dsc->modinsn[i]);
8641       offset += size;
8642     }
8643 
8644   /* Choose the correct breakpoint instruction.  */
8645   if (dsc->is_thumb)
8646     {
8647       bkp_insn = tdep->thumb_breakpoint;
8648       len = tdep->thumb_breakpoint_size;
8649     }
8650   else
8651     {
8652       bkp_insn = tdep->arm_breakpoint;
8653       len = tdep->arm_breakpoint_size;
8654     }
8655 
8656   /* Put breakpoint afterwards.  */
8657   write_memory (to + offset, bkp_insn, len);
8658 
8659   if (debug_displaced)
8660     fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8661 			paddress (gdbarch, from), paddress (gdbarch, to));
8662 }
8663 
8664 /* Entry point for copying an instruction into scratch space for displaced
8665    stepping.  */
8666 
8667 struct displaced_step_closure *
arm_displaced_step_copy_insn(struct gdbarch * gdbarch,CORE_ADDR from,CORE_ADDR to,struct regcache * regs)8668 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8669 			      CORE_ADDR from, CORE_ADDR to,
8670 			      struct regcache *regs)
8671 {
8672   struct displaced_step_closure *dsc
8673     = xmalloc (sizeof (struct displaced_step_closure));
8674   arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8675   arm_displaced_init_closure (gdbarch, from, to, dsc);
8676 
8677   return dsc;
8678 }
8679 
8680 /* Entry point for cleaning things up after a displaced instruction has been
8681    single-stepped.  */
8682 
8683 void
arm_displaced_step_fixup(struct gdbarch * gdbarch,struct displaced_step_closure * dsc,CORE_ADDR from,CORE_ADDR to,struct regcache * regs)8684 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8685 			  struct displaced_step_closure *dsc,
8686 			  CORE_ADDR from, CORE_ADDR to,
8687 			  struct regcache *regs)
8688 {
8689   if (dsc->cleanup)
8690     dsc->cleanup (gdbarch, regs, dsc);
8691 
8692   if (!dsc->wrote_to_pc)
8693     regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8694 				    dsc->insn_addr + dsc->insn_size);
8695 
8696 }
8697 
8698 #include "bfd-in2.h"
8699 #include "libcoff.h"
8700 
8701 static int
gdb_print_insn_arm(bfd_vma memaddr,disassemble_info * info)8702 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8703 {
8704   struct gdbarch *gdbarch = info->application_data;
8705 
8706   if (arm_pc_is_thumb (gdbarch, memaddr))
8707     {
8708       static asymbol *asym;
8709       static combined_entry_type ce;
8710       static struct coff_symbol_struct csym;
8711       static struct bfd fake_bfd;
8712       static bfd_target fake_target;
8713 
8714       if (csym.native == NULL)
8715 	{
8716 	  /* Create a fake symbol vector containing a Thumb symbol.
8717 	     This is solely so that the code in print_insn_little_arm()
8718 	     and print_insn_big_arm() in opcodes/arm-dis.c will detect
8719 	     the presence of a Thumb symbol and switch to decoding
8720 	     Thumb instructions.  */
8721 
8722 	  fake_target.flavour = bfd_target_coff_flavour;
8723 	  fake_bfd.xvec = &fake_target;
8724 	  ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8725 	  csym.native = &ce;
8726 	  csym.symbol.the_bfd = &fake_bfd;
8727 	  csym.symbol.name = "fake";
8728 	  asym = (asymbol *) & csym;
8729 	}
8730 
8731       memaddr = UNMAKE_THUMB_ADDR (memaddr);
8732       info->symbols = &asym;
8733     }
8734   else
8735     info->symbols = NULL;
8736 
8737   if (info->endian == BFD_ENDIAN_BIG)
8738     return print_insn_big_arm (memaddr, info);
8739   else
8740     return print_insn_little_arm (memaddr, info);
8741 }
8742 
8743 /* The following define instruction sequences that will cause ARM
8744    cpu's to take an undefined instruction trap.  These are used to
8745    signal a breakpoint to GDB.
8746 
8747    The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8748    modes.  A different instruction is required for each mode.  The ARM
8749    cpu's can also be big or little endian.  Thus four different
8750    instructions are needed to support all cases.
8751 
8752    Note: ARMv4 defines several new instructions that will take the
8753    undefined instruction trap.  ARM7TDMI is nominally ARMv4T, but does
8754    not in fact add the new instructions.  The new undefined
8755    instructions in ARMv4 are all instructions that had no defined
8756    behaviour in earlier chips.  There is no guarantee that they will
8757    raise an exception, but may be treated as NOP's.  In practice, it
8758    may only safe to rely on instructions matching:
8759 
8760    3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8761    1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8762    C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8763 
8764    Even this may only true if the condition predicate is true.  The
8765    following use a condition predicate of ALWAYS so it is always TRUE.
8766 
8767    There are other ways of forcing a breakpoint.  GNU/Linux, RISC iX,
8768    and NetBSD all use a software interrupt rather than an undefined
8769    instruction to force a trap.  This can be handled by by the
8770    abi-specific code during establishment of the gdbarch vector.  */
8771 
8772 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8773 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8774 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8775 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8776 
8777 static const char arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8778 static const char arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8779 static const char arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8780 static const char arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8781 
8782 /* Determine the type and size of breakpoint to insert at PCPTR.  Uses
8783    the program counter value to determine whether a 16-bit or 32-bit
8784    breakpoint should be used.  It returns a pointer to a string of
8785    bytes that encode a breakpoint instruction, stores the length of
8786    the string to *lenptr, and adjusts the program counter (if
8787    necessary) to point to the actual memory location where the
8788    breakpoint should be inserted.  */
8789 
8790 static const unsigned char *
arm_breakpoint_from_pc(struct gdbarch * gdbarch,CORE_ADDR * pcptr,int * lenptr)8791 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8792 {
8793   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8794   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8795 
8796   if (arm_pc_is_thumb (gdbarch, *pcptr))
8797     {
8798       *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8799 
8800       /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8801 	 check whether we are replacing a 32-bit instruction.  */
8802       if (tdep->thumb2_breakpoint != NULL)
8803 	{
8804 	  gdb_byte buf[2];
8805 	  if (target_read_memory (*pcptr, buf, 2) == 0)
8806 	    {
8807 	      unsigned short inst1;
8808 	      inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8809 	      if (thumb_insn_size (inst1) == 4)
8810 		{
8811 		  *lenptr = tdep->thumb2_breakpoint_size;
8812 		  return tdep->thumb2_breakpoint;
8813 		}
8814 	    }
8815 	}
8816 
8817       *lenptr = tdep->thumb_breakpoint_size;
8818       return tdep->thumb_breakpoint;
8819     }
8820   else
8821     {
8822       *lenptr = tdep->arm_breakpoint_size;
8823       return tdep->arm_breakpoint;
8824     }
8825 }
8826 
8827 static void
arm_remote_breakpoint_from_pc(struct gdbarch * gdbarch,CORE_ADDR * pcptr,int * kindptr)8828 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8829 			       int *kindptr)
8830 {
8831   arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8832 
8833   if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8834     /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8835        that this is not confused with a 32-bit ARM breakpoint.  */
8836     *kindptr = 3;
8837 }
8838 
8839 /* Extract from an array REGBUF containing the (raw) register state a
8840    function return value of type TYPE, and copy that, in virtual
8841    format, into VALBUF.  */
8842 
8843 static void
arm_extract_return_value(struct type * type,struct regcache * regs,gdb_byte * valbuf)8844 arm_extract_return_value (struct type *type, struct regcache *regs,
8845 			  gdb_byte *valbuf)
8846 {
8847   struct gdbarch *gdbarch = get_regcache_arch (regs);
8848   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8849 
8850   if (TYPE_CODE_FLT == TYPE_CODE (type))
8851     {
8852       switch (gdbarch_tdep (gdbarch)->fp_model)
8853 	{
8854 	case ARM_FLOAT_FPA:
8855 	  {
8856 	    /* The value is in register F0 in internal format.  We need to
8857 	       extract the raw value and then convert it to the desired
8858 	       internal type.  */
8859 	    bfd_byte tmpbuf[FP_REGISTER_SIZE];
8860 
8861 	    regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8862 	    convert_from_extended (floatformat_from_type (type), tmpbuf,
8863 				   valbuf, gdbarch_byte_order (gdbarch));
8864 	  }
8865 	  break;
8866 
8867 	case ARM_FLOAT_SOFT_FPA:
8868 	case ARM_FLOAT_SOFT_VFP:
8869 	  /* ARM_FLOAT_VFP can arise if this is a variadic function so
8870 	     not using the VFP ABI code.  */
8871 	case ARM_FLOAT_VFP:
8872 	  regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8873 	  if (TYPE_LENGTH (type) > 4)
8874 	    regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8875 				  valbuf + INT_REGISTER_SIZE);
8876 	  break;
8877 
8878 	default:
8879 	  internal_error (__FILE__, __LINE__,
8880 			  _("arm_extract_return_value: "
8881 			    "Floating point model not supported"));
8882 	  break;
8883 	}
8884     }
8885   else if (TYPE_CODE (type) == TYPE_CODE_INT
8886 	   || TYPE_CODE (type) == TYPE_CODE_CHAR
8887 	   || TYPE_CODE (type) == TYPE_CODE_BOOL
8888 	   || TYPE_CODE (type) == TYPE_CODE_PTR
8889 	   || TYPE_CODE (type) == TYPE_CODE_REF
8890 	   || TYPE_CODE (type) == TYPE_CODE_ENUM)
8891     {
8892       /* If the type is a plain integer, then the access is
8893 	 straight-forward.  Otherwise we have to play around a bit
8894 	 more.  */
8895       int len = TYPE_LENGTH (type);
8896       int regno = ARM_A1_REGNUM;
8897       ULONGEST tmp;
8898 
8899       while (len > 0)
8900 	{
8901 	  /* By using store_unsigned_integer we avoid having to do
8902 	     anything special for small big-endian values.  */
8903 	  regcache_cooked_read_unsigned (regs, regno++, &tmp);
8904 	  store_unsigned_integer (valbuf,
8905 				  (len > INT_REGISTER_SIZE
8906 				   ? INT_REGISTER_SIZE : len),
8907 				  byte_order, tmp);
8908 	  len -= INT_REGISTER_SIZE;
8909 	  valbuf += INT_REGISTER_SIZE;
8910 	}
8911     }
8912   else
8913     {
8914       /* For a structure or union the behaviour is as if the value had
8915          been stored to word-aligned memory and then loaded into
8916          registers with 32-bit load instruction(s).  */
8917       int len = TYPE_LENGTH (type);
8918       int regno = ARM_A1_REGNUM;
8919       bfd_byte tmpbuf[INT_REGISTER_SIZE];
8920 
8921       while (len > 0)
8922 	{
8923 	  regcache_cooked_read (regs, regno++, tmpbuf);
8924 	  memcpy (valbuf, tmpbuf,
8925 		  len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8926 	  len -= INT_REGISTER_SIZE;
8927 	  valbuf += INT_REGISTER_SIZE;
8928 	}
8929     }
8930 }
8931 
8932 
8933 /* Will a function return an aggregate type in memory or in a
8934    register?  Return 0 if an aggregate type can be returned in a
8935    register, 1 if it must be returned in memory.  */
8936 
8937 static int
arm_return_in_memory(struct gdbarch * gdbarch,struct type * type)8938 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8939 {
8940   int nRc;
8941   enum type_code code;
8942 
8943   CHECK_TYPEDEF (type);
8944 
8945   /* In the ARM ABI, "integer" like aggregate types are returned in
8946      registers.  For an aggregate type to be integer like, its size
8947      must be less than or equal to INT_REGISTER_SIZE and the
8948      offset of each addressable subfield must be zero.  Note that bit
8949      fields are not addressable, and all addressable subfields of
8950      unions always start at offset zero.
8951 
8952      This function is based on the behaviour of GCC 2.95.1.
8953      See: gcc/arm.c: arm_return_in_memory() for details.
8954 
8955      Note: All versions of GCC before GCC 2.95.2 do not set up the
8956      parameters correctly for a function returning the following
8957      structure: struct { float f;}; This should be returned in memory,
8958      not a register.  Richard Earnshaw sent me a patch, but I do not
8959      know of any way to detect if a function like the above has been
8960      compiled with the correct calling convention.  */
8961 
8962   /* All aggregate types that won't fit in a register must be returned
8963      in memory.  */
8964   if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8965     {
8966       return 1;
8967     }
8968 
8969   /* The AAPCS says all aggregates not larger than a word are returned
8970      in a register.  */
8971   if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8972     return 0;
8973 
8974   /* The only aggregate types that can be returned in a register are
8975      structs and unions.  Arrays must be returned in memory.  */
8976   code = TYPE_CODE (type);
8977   if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
8978     {
8979       return 1;
8980     }
8981 
8982   /* Assume all other aggregate types can be returned in a register.
8983      Run a check for structures, unions and arrays.  */
8984   nRc = 0;
8985 
8986   if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8987     {
8988       int i;
8989       /* Need to check if this struct/union is "integer" like.  For
8990          this to be true, its size must be less than or equal to
8991          INT_REGISTER_SIZE and the offset of each addressable
8992          subfield must be zero.  Note that bit fields are not
8993          addressable, and unions always start at offset zero.  If any
8994          of the subfields is a floating point type, the struct/union
8995          cannot be an integer type.  */
8996 
8997       /* For each field in the object, check:
8998          1) Is it FP? --> yes, nRc = 1;
8999          2) Is it addressable (bitpos != 0) and
9000          not packed (bitsize == 0)?
9001          --> yes, nRc = 1
9002        */
9003 
9004       for (i = 0; i < TYPE_NFIELDS (type); i++)
9005 	{
9006 	  enum type_code field_type_code;
9007 	  field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9008 								       i)));
9009 
9010 	  /* Is it a floating point type field?  */
9011 	  if (field_type_code == TYPE_CODE_FLT)
9012 	    {
9013 	      nRc = 1;
9014 	      break;
9015 	    }
9016 
9017 	  /* If bitpos != 0, then we have to care about it.  */
9018 	  if (TYPE_FIELD_BITPOS (type, i) != 0)
9019 	    {
9020 	      /* Bitfields are not addressable.  If the field bitsize is
9021 	         zero, then the field is not packed.  Hence it cannot be
9022 	         a bitfield or any other packed type.  */
9023 	      if (TYPE_FIELD_BITSIZE (type, i) == 0)
9024 		{
9025 		  nRc = 1;
9026 		  break;
9027 		}
9028 	    }
9029 	}
9030     }
9031 
9032   return nRc;
9033 }
9034 
9035 /* Write into appropriate registers a function return value of type
9036    TYPE, given in virtual format.  */
9037 
9038 static void
arm_store_return_value(struct type * type,struct regcache * regs,const gdb_byte * valbuf)9039 arm_store_return_value (struct type *type, struct regcache *regs,
9040 			const gdb_byte *valbuf)
9041 {
9042   struct gdbarch *gdbarch = get_regcache_arch (regs);
9043   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9044 
9045   if (TYPE_CODE (type) == TYPE_CODE_FLT)
9046     {
9047       gdb_byte buf[MAX_REGISTER_SIZE];
9048 
9049       switch (gdbarch_tdep (gdbarch)->fp_model)
9050 	{
9051 	case ARM_FLOAT_FPA:
9052 
9053 	  convert_to_extended (floatformat_from_type (type), buf, valbuf,
9054 			       gdbarch_byte_order (gdbarch));
9055 	  regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9056 	  break;
9057 
9058 	case ARM_FLOAT_SOFT_FPA:
9059 	case ARM_FLOAT_SOFT_VFP:
9060 	  /* ARM_FLOAT_VFP can arise if this is a variadic function so
9061 	     not using the VFP ABI code.  */
9062 	case ARM_FLOAT_VFP:
9063 	  regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9064 	  if (TYPE_LENGTH (type) > 4)
9065 	    regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9066 				   valbuf + INT_REGISTER_SIZE);
9067 	  break;
9068 
9069 	default:
9070 	  internal_error (__FILE__, __LINE__,
9071 			  _("arm_store_return_value: Floating "
9072 			    "point model not supported"));
9073 	  break;
9074 	}
9075     }
9076   else if (TYPE_CODE (type) == TYPE_CODE_INT
9077 	   || TYPE_CODE (type) == TYPE_CODE_CHAR
9078 	   || TYPE_CODE (type) == TYPE_CODE_BOOL
9079 	   || TYPE_CODE (type) == TYPE_CODE_PTR
9080 	   || TYPE_CODE (type) == TYPE_CODE_REF
9081 	   || TYPE_CODE (type) == TYPE_CODE_ENUM)
9082     {
9083       if (TYPE_LENGTH (type) <= 4)
9084 	{
9085 	  /* Values of one word or less are zero/sign-extended and
9086 	     returned in r0.  */
9087 	  bfd_byte tmpbuf[INT_REGISTER_SIZE];
9088 	  LONGEST val = unpack_long (type, valbuf);
9089 
9090 	  store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9091 	  regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9092 	}
9093       else
9094 	{
9095 	  /* Integral values greater than one word are stored in consecutive
9096 	     registers starting with r0.  This will always be a multiple of
9097 	     the regiser size.  */
9098 	  int len = TYPE_LENGTH (type);
9099 	  int regno = ARM_A1_REGNUM;
9100 
9101 	  while (len > 0)
9102 	    {
9103 	      regcache_cooked_write (regs, regno++, valbuf);
9104 	      len -= INT_REGISTER_SIZE;
9105 	      valbuf += INT_REGISTER_SIZE;
9106 	    }
9107 	}
9108     }
9109   else
9110     {
9111       /* For a structure or union the behaviour is as if the value had
9112          been stored to word-aligned memory and then loaded into
9113          registers with 32-bit load instruction(s).  */
9114       int len = TYPE_LENGTH (type);
9115       int regno = ARM_A1_REGNUM;
9116       bfd_byte tmpbuf[INT_REGISTER_SIZE];
9117 
9118       while (len > 0)
9119 	{
9120 	  memcpy (tmpbuf, valbuf,
9121 		  len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9122 	  regcache_cooked_write (regs, regno++, tmpbuf);
9123 	  len -= INT_REGISTER_SIZE;
9124 	  valbuf += INT_REGISTER_SIZE;
9125 	}
9126     }
9127 }
9128 
9129 
9130 /* Handle function return values.  */
9131 
9132 static enum return_value_convention
arm_return_value(struct gdbarch * gdbarch,struct value * function,struct type * valtype,struct regcache * regcache,gdb_byte * readbuf,const gdb_byte * writebuf)9133 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9134 		  struct type *valtype, struct regcache *regcache,
9135 		  gdb_byte *readbuf, const gdb_byte *writebuf)
9136 {
9137   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9138   struct type *func_type = function ? value_type (function) : NULL;
9139   enum arm_vfp_cprc_base_type vfp_base_type;
9140   int vfp_base_count;
9141 
9142   if (arm_vfp_abi_for_function (gdbarch, func_type)
9143       && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9144     {
9145       int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9146       int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9147       int i;
9148       for (i = 0; i < vfp_base_count; i++)
9149 	{
9150 	  if (reg_char == 'q')
9151 	    {
9152 	      if (writebuf)
9153 		arm_neon_quad_write (gdbarch, regcache, i,
9154 				     writebuf + i * unit_length);
9155 
9156 	      if (readbuf)
9157 		arm_neon_quad_read (gdbarch, regcache, i,
9158 				    readbuf + i * unit_length);
9159 	    }
9160 	  else
9161 	    {
9162 	      char name_buf[4];
9163 	      int regnum;
9164 
9165 	      xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9166 	      regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9167 						    strlen (name_buf));
9168 	      if (writebuf)
9169 		regcache_cooked_write (regcache, regnum,
9170 				       writebuf + i * unit_length);
9171 	      if (readbuf)
9172 		regcache_cooked_read (regcache, regnum,
9173 				      readbuf + i * unit_length);
9174 	    }
9175 	}
9176       return RETURN_VALUE_REGISTER_CONVENTION;
9177     }
9178 
9179   if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9180       || TYPE_CODE (valtype) == TYPE_CODE_UNION
9181       || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9182     {
9183       if (tdep->struct_return == pcc_struct_return
9184 	  || arm_return_in_memory (gdbarch, valtype))
9185 	return RETURN_VALUE_STRUCT_CONVENTION;
9186     }
9187 
9188   /* AAPCS returns complex types longer than a register in memory.  */
9189   if (tdep->arm_abi != ARM_ABI_APCS
9190       && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9191       && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9192     return RETURN_VALUE_STRUCT_CONVENTION;
9193 
9194   if (writebuf)
9195     arm_store_return_value (valtype, regcache, writebuf);
9196 
9197   if (readbuf)
9198     arm_extract_return_value (valtype, regcache, readbuf);
9199 
9200   return RETURN_VALUE_REGISTER_CONVENTION;
9201 }
9202 
9203 
9204 static int
arm_get_longjmp_target(struct frame_info * frame,CORE_ADDR * pc)9205 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9206 {
9207   struct gdbarch *gdbarch = get_frame_arch (frame);
9208   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9209   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9210   CORE_ADDR jb_addr;
9211   gdb_byte buf[INT_REGISTER_SIZE];
9212 
9213   jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9214 
9215   if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9216 			  INT_REGISTER_SIZE))
9217     return 0;
9218 
9219   *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9220   return 1;
9221 }
9222 
9223 /* Recognize GCC and GNU ld's trampolines.  If we are in a trampoline,
9224    return the target PC.  Otherwise return 0.  */
9225 
9226 CORE_ADDR
arm_skip_stub(struct frame_info * frame,CORE_ADDR pc)9227 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9228 {
9229   const char *name;
9230   int namelen;
9231   CORE_ADDR start_addr;
9232 
9233   /* Find the starting address and name of the function containing the PC.  */
9234   if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9235     return 0;
9236 
9237   /* If PC is in a Thumb call or return stub, return the address of the
9238      target PC, which is in a register.  The thunk functions are called
9239      _call_via_xx, where x is the register name.  The possible names
9240      are r0-r9, sl, fp, ip, sp, and lr.  ARM RealView has similar
9241      functions, named __ARM_call_via_r[0-7].  */
9242   if (strncmp (name, "_call_via_", 10) == 0
9243       || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9244     {
9245       /* Use the name suffix to determine which register contains the
9246          target PC.  */
9247       static char *table[15] =
9248       {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9249        "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9250       };
9251       int regno;
9252       int offset = strlen (name) - 2;
9253 
9254       for (regno = 0; regno <= 14; regno++)
9255 	if (strcmp (&name[offset], table[regno]) == 0)
9256 	  return get_frame_register_unsigned (frame, regno);
9257     }
9258 
9259   /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9260      non-interworking calls to foo.  We could decode the stubs
9261      to find the target but it's easier to use the symbol table.  */
9262   namelen = strlen (name);
9263   if (name[0] == '_' && name[1] == '_'
9264       && ((namelen > 2 + strlen ("_from_thumb")
9265 	   && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9266 		       strlen ("_from_thumb")) == 0)
9267 	  || (namelen > 2 + strlen ("_from_arm")
9268 	      && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9269 			  strlen ("_from_arm")) == 0)))
9270     {
9271       char *target_name;
9272       int target_len = namelen - 2;
9273       struct minimal_symbol *minsym;
9274       struct objfile *objfile;
9275       struct obj_section *sec;
9276 
9277       if (name[namelen - 1] == 'b')
9278 	target_len -= strlen ("_from_thumb");
9279       else
9280 	target_len -= strlen ("_from_arm");
9281 
9282       target_name = alloca (target_len + 1);
9283       memcpy (target_name, name + 2, target_len);
9284       target_name[target_len] = '\0';
9285 
9286       sec = find_pc_section (pc);
9287       objfile = (sec == NULL) ? NULL : sec->objfile;
9288       minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9289       if (minsym != NULL)
9290 	return SYMBOL_VALUE_ADDRESS (minsym);
9291       else
9292 	return 0;
9293     }
9294 
9295   return 0;			/* not a stub */
9296 }
9297 
9298 static void
set_arm_command(char * args,int from_tty)9299 set_arm_command (char *args, int from_tty)
9300 {
9301   printf_unfiltered (_("\
9302 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9303   help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9304 }
9305 
9306 static void
show_arm_command(char * args,int from_tty)9307 show_arm_command (char *args, int from_tty)
9308 {
9309   cmd_show_list (showarmcmdlist, from_tty, "");
9310 }
9311 
9312 static void
arm_update_current_architecture(void)9313 arm_update_current_architecture (void)
9314 {
9315   struct gdbarch_info info;
9316 
9317   /* If the current architecture is not ARM, we have nothing to do.  */
9318   if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9319     return;
9320 
9321   /* Update the architecture.  */
9322   gdbarch_info_init (&info);
9323 
9324   if (!gdbarch_update_p (info))
9325     internal_error (__FILE__, __LINE__, _("could not update architecture"));
9326 }
9327 
9328 static void
set_fp_model_sfunc(char * args,int from_tty,struct cmd_list_element * c)9329 set_fp_model_sfunc (char *args, int from_tty,
9330 		    struct cmd_list_element *c)
9331 {
9332   enum arm_float_model fp_model;
9333 
9334   for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9335     if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9336       {
9337 	arm_fp_model = fp_model;
9338 	break;
9339       }
9340 
9341   if (fp_model == ARM_FLOAT_LAST)
9342     internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9343 		    current_fp_model);
9344 
9345   arm_update_current_architecture ();
9346 }
9347 
9348 static void
show_fp_model(struct ui_file * file,int from_tty,struct cmd_list_element * c,const char * value)9349 show_fp_model (struct ui_file *file, int from_tty,
9350 	       struct cmd_list_element *c, const char *value)
9351 {
9352   struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9353 
9354   if (arm_fp_model == ARM_FLOAT_AUTO
9355       && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9356     fprintf_filtered (file, _("\
9357 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9358 		      fp_model_strings[tdep->fp_model]);
9359   else
9360     fprintf_filtered (file, _("\
9361 The current ARM floating point model is \"%s\".\n"),
9362 		      fp_model_strings[arm_fp_model]);
9363 }
9364 
9365 static void
arm_set_abi(char * args,int from_tty,struct cmd_list_element * c)9366 arm_set_abi (char *args, int from_tty,
9367 	     struct cmd_list_element *c)
9368 {
9369   enum arm_abi_kind arm_abi;
9370 
9371   for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9372     if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9373       {
9374 	arm_abi_global = arm_abi;
9375 	break;
9376       }
9377 
9378   if (arm_abi == ARM_ABI_LAST)
9379     internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9380 		    arm_abi_string);
9381 
9382   arm_update_current_architecture ();
9383 }
9384 
9385 static void
arm_show_abi(struct ui_file * file,int from_tty,struct cmd_list_element * c,const char * value)9386 arm_show_abi (struct ui_file *file, int from_tty,
9387 	     struct cmd_list_element *c, const char *value)
9388 {
9389   struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9390 
9391   if (arm_abi_global == ARM_ABI_AUTO
9392       && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9393     fprintf_filtered (file, _("\
9394 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9395 		      arm_abi_strings[tdep->arm_abi]);
9396   else
9397     fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9398 		      arm_abi_string);
9399 }
9400 
9401 static void
arm_show_fallback_mode(struct ui_file * file,int from_tty,struct cmd_list_element * c,const char * value)9402 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9403 			struct cmd_list_element *c, const char *value)
9404 {
9405   fprintf_filtered (file,
9406 		    _("The current execution mode assumed "
9407 		      "(when symbols are unavailable) is \"%s\".\n"),
9408 		    arm_fallback_mode_string);
9409 }
9410 
9411 static void
arm_show_force_mode(struct ui_file * file,int from_tty,struct cmd_list_element * c,const char * value)9412 arm_show_force_mode (struct ui_file *file, int from_tty,
9413 		     struct cmd_list_element *c, const char *value)
9414 {
9415   struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9416 
9417   fprintf_filtered (file,
9418 		    _("The current execution mode assumed "
9419 		      "(even when symbols are available) is \"%s\".\n"),
9420 		    arm_force_mode_string);
9421 }
9422 
9423 /* If the user changes the register disassembly style used for info
9424    register and other commands, we have to also switch the style used
9425    in opcodes for disassembly output.  This function is run in the "set
9426    arm disassembly" command, and does that.  */
9427 
9428 static void
set_disassembly_style_sfunc(char * args,int from_tty,struct cmd_list_element * c)9429 set_disassembly_style_sfunc (char *args, int from_tty,
9430 			      struct cmd_list_element *c)
9431 {
9432   set_disassembly_style ();
9433 }
9434 
9435 /* Return the ARM register name corresponding to register I.  */
9436 static const char *
arm_register_name(struct gdbarch * gdbarch,int i)9437 arm_register_name (struct gdbarch *gdbarch, int i)
9438 {
9439   const int num_regs = gdbarch_num_regs (gdbarch);
9440 
9441   if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9442       && i >= num_regs && i < num_regs + 32)
9443     {
9444       static const char *const vfp_pseudo_names[] = {
9445 	"s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9446 	"s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9447 	"s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9448 	"s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9449       };
9450 
9451       return vfp_pseudo_names[i - num_regs];
9452     }
9453 
9454   if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9455       && i >= num_regs + 32 && i < num_regs + 32 + 16)
9456     {
9457       static const char *const neon_pseudo_names[] = {
9458 	"q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9459 	"q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9460       };
9461 
9462       return neon_pseudo_names[i - num_regs - 32];
9463     }
9464 
9465   if (i >= ARRAY_SIZE (arm_register_names))
9466     /* These registers are only supported on targets which supply
9467        an XML description.  */
9468     return "";
9469 
9470   return arm_register_names[i];
9471 }
9472 
9473 static void
set_disassembly_style(void)9474 set_disassembly_style (void)
9475 {
9476   int current;
9477 
9478   /* Find the style that the user wants.  */
9479   for (current = 0; current < num_disassembly_options; current++)
9480     if (disassembly_style == valid_disassembly_styles[current])
9481       break;
9482   gdb_assert (current < num_disassembly_options);
9483 
9484   /* Synchronize the disassembler.  */
9485   set_arm_regname_option (current);
9486 }
9487 
9488 /* Test whether the coff symbol specific value corresponds to a Thumb
9489    function.  */
9490 
9491 static int
coff_sym_is_thumb(int val)9492 coff_sym_is_thumb (int val)
9493 {
9494   return (val == C_THUMBEXT
9495 	  || val == C_THUMBSTAT
9496 	  || val == C_THUMBEXTFUNC
9497 	  || val == C_THUMBSTATFUNC
9498 	  || val == C_THUMBLABEL);
9499 }
9500 
9501 /* arm_coff_make_msymbol_special()
9502    arm_elf_make_msymbol_special()
9503 
9504    These functions test whether the COFF or ELF symbol corresponds to
9505    an address in thumb code, and set a "special" bit in a minimal
9506    symbol to indicate that it does.  */
9507 
9508 static void
arm_elf_make_msymbol_special(asymbol * sym,struct minimal_symbol * msym)9509 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9510 {
9511   if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9512       == ST_BRANCH_TO_THUMB)
9513     MSYMBOL_SET_SPECIAL (msym);
9514 }
9515 
9516 static void
arm_coff_make_msymbol_special(int val,struct minimal_symbol * msym)9517 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9518 {
9519   if (coff_sym_is_thumb (val))
9520     MSYMBOL_SET_SPECIAL (msym);
9521 }
9522 
9523 static void
arm_objfile_data_free(struct objfile * objfile,void * arg)9524 arm_objfile_data_free (struct objfile *objfile, void *arg)
9525 {
9526   struct arm_per_objfile *data = arg;
9527   unsigned int i;
9528 
9529   for (i = 0; i < objfile->obfd->section_count; i++)
9530     VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9531 }
9532 
9533 static void
arm_record_special_symbol(struct gdbarch * gdbarch,struct objfile * objfile,asymbol * sym)9534 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9535 			   asymbol *sym)
9536 {
9537   const char *name = bfd_asymbol_name (sym);
9538   struct arm_per_objfile *data;
9539   VEC(arm_mapping_symbol_s) **map_p;
9540   struct arm_mapping_symbol new_map_sym;
9541 
9542   gdb_assert (name[0] == '$');
9543   if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9544     return;
9545 
9546   data = objfile_data (objfile, arm_objfile_data_key);
9547   if (data == NULL)
9548     {
9549       data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9550 			     struct arm_per_objfile);
9551       set_objfile_data (objfile, arm_objfile_data_key, data);
9552       data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9553 					   objfile->obfd->section_count,
9554 					   VEC(arm_mapping_symbol_s) *);
9555     }
9556   map_p = &data->section_maps[bfd_get_section (sym)->index];
9557 
9558   new_map_sym.value = sym->value;
9559   new_map_sym.type = name[1];
9560 
9561   /* Assume that most mapping symbols appear in order of increasing
9562      value.  If they were randomly distributed, it would be faster to
9563      always push here and then sort at first use.  */
9564   if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9565     {
9566       struct arm_mapping_symbol *prev_map_sym;
9567 
9568       prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9569       if (prev_map_sym->value >= sym->value)
9570 	{
9571 	  unsigned int idx;
9572 	  idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9573 				 arm_compare_mapping_symbols);
9574 	  VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9575 	  return;
9576 	}
9577     }
9578 
9579   VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9580 }
9581 
9582 static void
arm_write_pc(struct regcache * regcache,CORE_ADDR pc)9583 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9584 {
9585   struct gdbarch *gdbarch = get_regcache_arch (regcache);
9586   regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9587 
9588   /* If necessary, set the T bit.  */
9589   if (arm_apcs_32)
9590     {
9591       ULONGEST val, t_bit;
9592       regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9593       t_bit = arm_psr_thumb_bit (gdbarch);
9594       if (arm_pc_is_thumb (gdbarch, pc))
9595 	regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9596 					val | t_bit);
9597       else
9598 	regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9599 					val & ~t_bit);
9600     }
9601 }
9602 
9603 /* Read the contents of a NEON quad register, by reading from two
9604    double registers.  This is used to implement the quad pseudo
9605    registers, and for argument passing in case the quad registers are
9606    missing; vectors are passed in quad registers when using the VFP
9607    ABI, even if a NEON unit is not present.  REGNUM is the index of
9608    the quad register, in [0, 15].  */
9609 
9610 static enum register_status
arm_neon_quad_read(struct gdbarch * gdbarch,struct regcache * regcache,int regnum,gdb_byte * buf)9611 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9612 		    int regnum, gdb_byte *buf)
9613 {
9614   char name_buf[4];
9615   gdb_byte reg_buf[8];
9616   int offset, double_regnum;
9617   enum register_status status;
9618 
9619   xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9620   double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9621 					       strlen (name_buf));
9622 
9623   /* d0 is always the least significant half of q0.  */
9624   if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9625     offset = 8;
9626   else
9627     offset = 0;
9628 
9629   status = regcache_raw_read (regcache, double_regnum, reg_buf);
9630   if (status != REG_VALID)
9631     return status;
9632   memcpy (buf + offset, reg_buf, 8);
9633 
9634   offset = 8 - offset;
9635   status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9636   if (status != REG_VALID)
9637     return status;
9638   memcpy (buf + offset, reg_buf, 8);
9639 
9640   return REG_VALID;
9641 }
9642 
9643 static enum register_status
arm_pseudo_read(struct gdbarch * gdbarch,struct regcache * regcache,int regnum,gdb_byte * buf)9644 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9645 		 int regnum, gdb_byte *buf)
9646 {
9647   const int num_regs = gdbarch_num_regs (gdbarch);
9648   char name_buf[4];
9649   gdb_byte reg_buf[8];
9650   int offset, double_regnum;
9651 
9652   gdb_assert (regnum >= num_regs);
9653   regnum -= num_regs;
9654 
9655   if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9656     /* Quad-precision register.  */
9657     return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9658   else
9659     {
9660       enum register_status status;
9661 
9662       /* Single-precision register.  */
9663       gdb_assert (regnum < 32);
9664 
9665       /* s0 is always the least significant half of d0.  */
9666       if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9667 	offset = (regnum & 1) ? 0 : 4;
9668       else
9669 	offset = (regnum & 1) ? 4 : 0;
9670 
9671       xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9672       double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9673 						   strlen (name_buf));
9674 
9675       status = regcache_raw_read (regcache, double_regnum, reg_buf);
9676       if (status == REG_VALID)
9677 	memcpy (buf, reg_buf + offset, 4);
9678       return status;
9679     }
9680 }
9681 
9682 /* Store the contents of BUF to a NEON quad register, by writing to
9683    two double registers.  This is used to implement the quad pseudo
9684    registers, and for argument passing in case the quad registers are
9685    missing; vectors are passed in quad registers when using the VFP
9686    ABI, even if a NEON unit is not present.  REGNUM is the index
9687    of the quad register, in [0, 15].  */
9688 
9689 static void
arm_neon_quad_write(struct gdbarch * gdbarch,struct regcache * regcache,int regnum,const gdb_byte * buf)9690 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9691 		     int regnum, const gdb_byte *buf)
9692 {
9693   char name_buf[4];
9694   int offset, double_regnum;
9695 
9696   xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9697   double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9698 					       strlen (name_buf));
9699 
9700   /* d0 is always the least significant half of q0.  */
9701   if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9702     offset = 8;
9703   else
9704     offset = 0;
9705 
9706   regcache_raw_write (regcache, double_regnum, buf + offset);
9707   offset = 8 - offset;
9708   regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9709 }
9710 
9711 static void
arm_pseudo_write(struct gdbarch * gdbarch,struct regcache * regcache,int regnum,const gdb_byte * buf)9712 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9713 		  int regnum, const gdb_byte *buf)
9714 {
9715   const int num_regs = gdbarch_num_regs (gdbarch);
9716   char name_buf[4];
9717   gdb_byte reg_buf[8];
9718   int offset, double_regnum;
9719 
9720   gdb_assert (regnum >= num_regs);
9721   regnum -= num_regs;
9722 
9723   if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9724     /* Quad-precision register.  */
9725     arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9726   else
9727     {
9728       /* Single-precision register.  */
9729       gdb_assert (regnum < 32);
9730 
9731       /* s0 is always the least significant half of d0.  */
9732       if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9733 	offset = (regnum & 1) ? 0 : 4;
9734       else
9735 	offset = (regnum & 1) ? 4 : 0;
9736 
9737       xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9738       double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9739 						   strlen (name_buf));
9740 
9741       regcache_raw_read (regcache, double_regnum, reg_buf);
9742       memcpy (reg_buf + offset, buf, 4);
9743       regcache_raw_write (regcache, double_regnum, reg_buf);
9744     }
9745 }
9746 
9747 static struct value *
value_of_arm_user_reg(struct frame_info * frame,const void * baton)9748 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9749 {
9750   const int *reg_p = baton;
9751   return value_of_register (*reg_p, frame);
9752 }
9753 
9754 static enum gdb_osabi
arm_elf_osabi_sniffer(bfd * abfd)9755 arm_elf_osabi_sniffer (bfd *abfd)
9756 {
9757   unsigned int elfosabi;
9758   enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9759 
9760   elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9761 
9762   if (elfosabi == ELFOSABI_ARM)
9763     /* GNU tools use this value.  Check note sections in this case,
9764        as well.  */
9765     bfd_map_over_sections (abfd,
9766 			   generic_elf_osabi_sniff_abi_tag_sections,
9767 			   &osabi);
9768 
9769   /* Anything else will be handled by the generic ELF sniffer.  */
9770   return osabi;
9771 }
9772 
9773 static int
arm_register_reggroup_p(struct gdbarch * gdbarch,int regnum,struct reggroup * group)9774 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9775 			  struct reggroup *group)
9776 {
9777   /* FPS register's type is INT, but belongs to float_reggroup.  Beside
9778      this, FPS register belongs to save_regroup, restore_reggroup, and
9779      all_reggroup, of course.  */
9780   if (regnum == ARM_FPS_REGNUM)
9781     return (group == float_reggroup
9782 	    || group == save_reggroup
9783 	    || group == restore_reggroup
9784 	    || group == all_reggroup);
9785   else
9786     return default_register_reggroup_p (gdbarch, regnum, group);
9787 }
9788 
9789 
9790 /* For backward-compatibility we allow two 'g' packet lengths with
9791    the remote protocol depending on whether FPA registers are
9792    supplied.  M-profile targets do not have FPA registers, but some
9793    stubs already exist in the wild which use a 'g' packet which
9794    supplies them albeit with dummy values.  The packet format which
9795    includes FPA registers should be considered deprecated for
9796    M-profile targets.  */
9797 
9798 static void
arm_register_g_packet_guesses(struct gdbarch * gdbarch)9799 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9800 {
9801   if (gdbarch_tdep (gdbarch)->is_m)
9802     {
9803       /* If we know from the executable this is an M-profile target,
9804 	 cater for remote targets whose register set layout is the
9805 	 same as the FPA layout.  */
9806       register_remote_g_packet_guess (gdbarch,
9807 				      /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9808 				      (16 * INT_REGISTER_SIZE)
9809 				      + (8 * FP_REGISTER_SIZE)
9810 				      + (2 * INT_REGISTER_SIZE),
9811 				      tdesc_arm_with_m_fpa_layout);
9812 
9813       /* The regular M-profile layout.  */
9814       register_remote_g_packet_guess (gdbarch,
9815 				      /* r0-r12,sp,lr,pc; xpsr */
9816 				      (16 * INT_REGISTER_SIZE)
9817 				      + INT_REGISTER_SIZE,
9818 				      tdesc_arm_with_m);
9819 
9820       /* M-profile plus M4F VFP.  */
9821       register_remote_g_packet_guess (gdbarch,
9822 				      /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9823 				      (16 * INT_REGISTER_SIZE)
9824 				      + (16 * VFP_REGISTER_SIZE)
9825 				      + (2 * INT_REGISTER_SIZE),
9826 				      tdesc_arm_with_m_vfp_d16);
9827     }
9828 
9829   /* Otherwise we don't have a useful guess.  */
9830 }
9831 
9832 
9833 /* Initialize the current architecture based on INFO.  If possible,
9834    re-use an architecture from ARCHES, which is a list of
9835    architectures already created during this debugging session.
9836 
9837    Called e.g. at program startup, when reading a core file, and when
9838    reading a binary file.  */
9839 
9840 static struct gdbarch *
arm_gdbarch_init(struct gdbarch_info info,struct gdbarch_list * arches)9841 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9842 {
9843   struct gdbarch_tdep *tdep;
9844   struct gdbarch *gdbarch;
9845   struct gdbarch_list *best_arch;
9846   enum arm_abi_kind arm_abi = arm_abi_global;
9847   enum arm_float_model fp_model = arm_fp_model;
9848   struct tdesc_arch_data *tdesc_data = NULL;
9849   int i, is_m = 0;
9850   int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9851   int have_neon = 0;
9852   int have_fpa_registers = 1;
9853   const struct target_desc *tdesc = info.target_desc;
9854 
9855   /* If we have an object to base this architecture on, try to determine
9856      its ABI.  */
9857 
9858   if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9859     {
9860       int ei_osabi, e_flags;
9861 
9862       switch (bfd_get_flavour (info.abfd))
9863 	{
9864 	case bfd_target_aout_flavour:
9865 	  /* Assume it's an old APCS-style ABI.  */
9866 	  arm_abi = ARM_ABI_APCS;
9867 	  break;
9868 
9869 	case bfd_target_coff_flavour:
9870 	  /* Assume it's an old APCS-style ABI.  */
9871 	  /* XXX WinCE?  */
9872 	  arm_abi = ARM_ABI_APCS;
9873 	  break;
9874 
9875 	case bfd_target_elf_flavour:
9876 	  ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9877 	  e_flags = elf_elfheader (info.abfd)->e_flags;
9878 
9879 	  if (ei_osabi == ELFOSABI_ARM)
9880 	    {
9881 	      /* GNU tools used to use this value, but do not for EABI
9882 		 objects.  There's nowhere to tag an EABI version
9883 		 anyway, so assume APCS.  */
9884 	      arm_abi = ARM_ABI_APCS;
9885 	    }
9886 	  else if (ei_osabi == ELFOSABI_NONE)
9887 	    {
9888 	      int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9889 	      int attr_arch, attr_profile;
9890 
9891 	      switch (eabi_ver)
9892 		{
9893 		case EF_ARM_EABI_UNKNOWN:
9894 		  /* Assume GNU tools.  */
9895 		  arm_abi = ARM_ABI_APCS;
9896 		  break;
9897 
9898 		case EF_ARM_EABI_VER4:
9899 		case EF_ARM_EABI_VER5:
9900 		  arm_abi = ARM_ABI_AAPCS;
9901 		  /* EABI binaries default to VFP float ordering.
9902 		     They may also contain build attributes that can
9903 		     be used to identify if the VFP argument-passing
9904 		     ABI is in use.  */
9905 		  if (fp_model == ARM_FLOAT_AUTO)
9906 		    {
9907 #ifdef HAVE_ELF
9908 		      switch (bfd_elf_get_obj_attr_int (info.abfd,
9909 							OBJ_ATTR_PROC,
9910 							Tag_ABI_VFP_args))
9911 			{
9912 			case 0:
9913 			  /* "The user intended FP parameter/result
9914 			     passing to conform to AAPCS, base
9915 			     variant".  */
9916 			  fp_model = ARM_FLOAT_SOFT_VFP;
9917 			  break;
9918 			case 1:
9919 			  /* "The user intended FP parameter/result
9920 			     passing to conform to AAPCS, VFP
9921 			     variant".  */
9922 			  fp_model = ARM_FLOAT_VFP;
9923 			  break;
9924 			case 2:
9925 			  /* "The user intended FP parameter/result
9926 			     passing to conform to tool chain-specific
9927 			     conventions" - we don't know any such
9928 			     conventions, so leave it as "auto".  */
9929 			  break;
9930 			default:
9931 			  /* Attribute value not mentioned in the
9932 			     October 2008 ABI, so leave it as
9933 			     "auto".  */
9934 			  break;
9935 			}
9936 #else
9937 		      fp_model = ARM_FLOAT_SOFT_VFP;
9938 #endif
9939 		    }
9940 		  break;
9941 
9942 		default:
9943 		  /* Leave it as "auto".  */
9944 		  warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9945 		  break;
9946 		}
9947 
9948 #ifdef HAVE_ELF
9949 	      /* Detect M-profile programs.  This only works if the
9950 		 executable file includes build attributes; GCC does
9951 		 copy them to the executable, but e.g. RealView does
9952 		 not.  */
9953 	      attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9954 						    Tag_CPU_arch);
9955 	      attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9956 						       OBJ_ATTR_PROC,
9957 						       Tag_CPU_arch_profile);
9958 	      /* GCC specifies the profile for v6-M; RealView only
9959 		 specifies the profile for architectures starting with
9960 		 V7 (as opposed to architectures with a tag
9961 		 numerically greater than TAG_CPU_ARCH_V7).  */
9962 	      if (!tdesc_has_registers (tdesc)
9963 		  && (attr_arch == TAG_CPU_ARCH_V6_M
9964 		      || attr_arch == TAG_CPU_ARCH_V6S_M
9965 		      || attr_profile == 'M'))
9966 		is_m = 1;
9967 #endif
9968 	    }
9969 
9970 	  if (fp_model == ARM_FLOAT_AUTO)
9971 	    {
9972 	      int e_flags = elf_elfheader (info.abfd)->e_flags;
9973 
9974 	      switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9975 		{
9976 		case 0:
9977 		  /* Leave it as "auto".  Strictly speaking this case
9978 		     means FPA, but almost nobody uses that now, and
9979 		     many toolchains fail to set the appropriate bits
9980 		     for the floating-point model they use.  */
9981 		  break;
9982 		case EF_ARM_SOFT_FLOAT:
9983 		  fp_model = ARM_FLOAT_SOFT_FPA;
9984 		  break;
9985 		case EF_ARM_VFP_FLOAT:
9986 		  fp_model = ARM_FLOAT_VFP;
9987 		  break;
9988 		case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9989 		  fp_model = ARM_FLOAT_SOFT_VFP;
9990 		  break;
9991 		}
9992 	    }
9993 
9994 	  if (e_flags & EF_ARM_BE8)
9995 	    info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9996 
9997 	  break;
9998 
9999 	default:
10000 	  /* Leave it as "auto".  */
10001 	  break;
10002 	}
10003     }
10004 
10005   /* Check any target description for validity.  */
10006   if (tdesc_has_registers (tdesc))
10007     {
10008       /* For most registers we require GDB's default names; but also allow
10009 	 the numeric names for sp / lr / pc, as a convenience.  */
10010       static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10011       static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10012       static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10013 
10014       const struct tdesc_feature *feature;
10015       int valid_p;
10016 
10017       feature = tdesc_find_feature (tdesc,
10018 				    "org.gnu.gdb.arm.core");
10019       if (feature == NULL)
10020 	{
10021 	  feature = tdesc_find_feature (tdesc,
10022 					"org.gnu.gdb.arm.m-profile");
10023 	  if (feature == NULL)
10024 	    return NULL;
10025 	  else
10026 	    is_m = 1;
10027 	}
10028 
10029       tdesc_data = tdesc_data_alloc ();
10030 
10031       valid_p = 1;
10032       for (i = 0; i < ARM_SP_REGNUM; i++)
10033 	valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10034 					    arm_register_names[i]);
10035       valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10036 						  ARM_SP_REGNUM,
10037 						  arm_sp_names);
10038       valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10039 						  ARM_LR_REGNUM,
10040 						  arm_lr_names);
10041       valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10042 						  ARM_PC_REGNUM,
10043 						  arm_pc_names);
10044       if (is_m)
10045 	valid_p &= tdesc_numbered_register (feature, tdesc_data,
10046 					    ARM_PS_REGNUM, "xpsr");
10047       else
10048 	valid_p &= tdesc_numbered_register (feature, tdesc_data,
10049 					    ARM_PS_REGNUM, "cpsr");
10050 
10051       if (!valid_p)
10052 	{
10053 	  tdesc_data_cleanup (tdesc_data);
10054 	  return NULL;
10055 	}
10056 
10057       feature = tdesc_find_feature (tdesc,
10058 				    "org.gnu.gdb.arm.fpa");
10059       if (feature != NULL)
10060 	{
10061 	  valid_p = 1;
10062 	  for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10063 	    valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10064 						arm_register_names[i]);
10065 	  if (!valid_p)
10066 	    {
10067 	      tdesc_data_cleanup (tdesc_data);
10068 	      return NULL;
10069 	    }
10070 	}
10071       else
10072 	have_fpa_registers = 0;
10073 
10074       feature = tdesc_find_feature (tdesc,
10075 				    "org.gnu.gdb.xscale.iwmmxt");
10076       if (feature != NULL)
10077 	{
10078 	  static const char *const iwmmxt_names[] = {
10079 	    "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10080 	    "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10081 	    "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10082 	    "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10083 	  };
10084 
10085 	  valid_p = 1;
10086 	  for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10087 	    valid_p
10088 	      &= tdesc_numbered_register (feature, tdesc_data, i,
10089 					  iwmmxt_names[i - ARM_WR0_REGNUM]);
10090 
10091 	  /* Check for the control registers, but do not fail if they
10092 	     are missing.  */
10093 	  for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10094 	    tdesc_numbered_register (feature, tdesc_data, i,
10095 				     iwmmxt_names[i - ARM_WR0_REGNUM]);
10096 
10097 	  for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10098 	    valid_p
10099 	      &= tdesc_numbered_register (feature, tdesc_data, i,
10100 					  iwmmxt_names[i - ARM_WR0_REGNUM]);
10101 
10102 	  if (!valid_p)
10103 	    {
10104 	      tdesc_data_cleanup (tdesc_data);
10105 	      return NULL;
10106 	    }
10107 	}
10108 
10109       /* If we have a VFP unit, check whether the single precision registers
10110 	 are present.  If not, then we will synthesize them as pseudo
10111 	 registers.  */
10112       feature = tdesc_find_feature (tdesc,
10113 				    "org.gnu.gdb.arm.vfp");
10114       if (feature != NULL)
10115 	{
10116 	  static const char *const vfp_double_names[] = {
10117 	    "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10118 	    "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10119 	    "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10120 	    "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10121 	  };
10122 
10123 	  /* Require the double precision registers.  There must be either
10124 	     16 or 32.  */
10125 	  valid_p = 1;
10126 	  for (i = 0; i < 32; i++)
10127 	    {
10128 	      valid_p &= tdesc_numbered_register (feature, tdesc_data,
10129 						  ARM_D0_REGNUM + i,
10130 						  vfp_double_names[i]);
10131 	      if (!valid_p)
10132 		break;
10133 	    }
10134 	  if (!valid_p && i == 16)
10135 	    valid_p = 1;
10136 
10137 	  /* Also require FPSCR.  */
10138 	  valid_p &= tdesc_numbered_register (feature, tdesc_data,
10139 					      ARM_FPSCR_REGNUM, "fpscr");
10140 	  if (!valid_p)
10141 	    {
10142 	      tdesc_data_cleanup (tdesc_data);
10143 	      return NULL;
10144 	    }
10145 
10146 	  if (tdesc_unnumbered_register (feature, "s0") == 0)
10147 	    have_vfp_pseudos = 1;
10148 
10149 	  have_vfp_registers = 1;
10150 
10151 	  /* If we have VFP, also check for NEON.  The architecture allows
10152 	     NEON without VFP (integer vector operations only), but GDB
10153 	     does not support that.  */
10154 	  feature = tdesc_find_feature (tdesc,
10155 					"org.gnu.gdb.arm.neon");
10156 	  if (feature != NULL)
10157 	    {
10158 	      /* NEON requires 32 double-precision registers.  */
10159 	      if (i != 32)
10160 		{
10161 		  tdesc_data_cleanup (tdesc_data);
10162 		  return NULL;
10163 		}
10164 
10165 	      /* If there are quad registers defined by the stub, use
10166 		 their type; otherwise (normally) provide them with
10167 		 the default type.  */
10168 	      if (tdesc_unnumbered_register (feature, "q0") == 0)
10169 		have_neon_pseudos = 1;
10170 
10171 	      have_neon = 1;
10172 	    }
10173 	}
10174     }
10175 
10176   /* If there is already a candidate, use it.  */
10177   for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10178        best_arch != NULL;
10179        best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10180     {
10181       if (arm_abi != ARM_ABI_AUTO
10182 	  && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10183 	continue;
10184 
10185       if (fp_model != ARM_FLOAT_AUTO
10186 	  && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10187 	continue;
10188 
10189       /* There are various other properties in tdep that we do not
10190 	 need to check here: those derived from a target description,
10191 	 since gdbarches with a different target description are
10192 	 automatically disqualified.  */
10193 
10194       /* Do check is_m, though, since it might come from the binary.  */
10195       if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10196 	continue;
10197 
10198       /* Found a match.  */
10199       break;
10200     }
10201 
10202   if (best_arch != NULL)
10203     {
10204       if (tdesc_data != NULL)
10205 	tdesc_data_cleanup (tdesc_data);
10206       return best_arch->gdbarch;
10207     }
10208 
10209   tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10210   gdbarch = gdbarch_alloc (&info, tdep);
10211 
10212   /* Record additional information about the architecture we are defining.
10213      These are gdbarch discriminators, like the OSABI.  */
10214   tdep->arm_abi = arm_abi;
10215   tdep->fp_model = fp_model;
10216   tdep->is_m = is_m;
10217   tdep->have_fpa_registers = have_fpa_registers;
10218   tdep->have_vfp_registers = have_vfp_registers;
10219   tdep->have_vfp_pseudos = have_vfp_pseudos;
10220   tdep->have_neon_pseudos = have_neon_pseudos;
10221   tdep->have_neon = have_neon;
10222 
10223   arm_register_g_packet_guesses (gdbarch);
10224 
10225   /* Breakpoints.  */
10226   switch (info.byte_order_for_code)
10227     {
10228     case BFD_ENDIAN_BIG:
10229       tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10230       tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10231       tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10232       tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10233 
10234       break;
10235 
10236     case BFD_ENDIAN_LITTLE:
10237       tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10238       tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10239       tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10240       tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10241 
10242       break;
10243 
10244     default:
10245       internal_error (__FILE__, __LINE__,
10246 		      _("arm_gdbarch_init: bad byte order for float format"));
10247     }
10248 
10249   /* On ARM targets char defaults to unsigned.  */
10250   set_gdbarch_char_signed (gdbarch, 0);
10251 
10252   /* Note: for displaced stepping, this includes the breakpoint, and one word
10253      of additional scratch space.  This setting isn't used for anything beside
10254      displaced stepping at present.  */
10255   set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10256 
10257   /* This should be low enough for everything.  */
10258   tdep->lowest_pc = 0x20;
10259   tdep->jb_pc = -1;	/* Longjump support not enabled by default.  */
10260 
10261   /* The default, for both APCS and AAPCS, is to return small
10262      structures in registers.  */
10263   tdep->struct_return = reg_struct_return;
10264 
10265   set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10266   set_gdbarch_frame_align (gdbarch, arm_frame_align);
10267 
10268   set_gdbarch_write_pc (gdbarch, arm_write_pc);
10269 
10270   /* Frame handling.  */
10271   set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10272   set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10273   set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10274 
10275   frame_base_set_default (gdbarch, &arm_normal_base);
10276 
10277   /* Address manipulation.  */
10278   set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10279 
10280   /* Advance PC across function entry code.  */
10281   set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10282 
10283   /* Detect whether PC is in function epilogue.  */
10284   set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10285 
10286   /* Skip trampolines.  */
10287   set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10288 
10289   /* The stack grows downward.  */
10290   set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10291 
10292   /* Breakpoint manipulation.  */
10293   set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10294   set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10295 					 arm_remote_breakpoint_from_pc);
10296 
10297   /* Information about registers, etc.  */
10298   set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10299   set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10300   set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10301   set_gdbarch_register_type (gdbarch, arm_register_type);
10302   set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10303 
10304   /* This "info float" is FPA-specific.  Use the generic version if we
10305      do not have FPA.  */
10306   if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10307     set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10308 
10309   /* Internal <-> external register number maps.  */
10310   set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10311   set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10312 
10313   set_gdbarch_register_name (gdbarch, arm_register_name);
10314 
10315   /* Returning results.  */
10316   set_gdbarch_return_value (gdbarch, arm_return_value);
10317 
10318   /* Disassembly.  */
10319   set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10320 
10321   /* Minsymbol frobbing.  */
10322   set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10323   set_gdbarch_coff_make_msymbol_special (gdbarch,
10324 					 arm_coff_make_msymbol_special);
10325   set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10326 
10327   /* Thumb-2 IT block support.  */
10328   set_gdbarch_adjust_breakpoint_address (gdbarch,
10329 					 arm_adjust_breakpoint_address);
10330 
10331   /* Virtual tables.  */
10332   set_gdbarch_vbit_in_delta (gdbarch, 1);
10333 
10334   /* Hook in the ABI-specific overrides, if they have been registered.  */
10335   gdbarch_init_osabi (info, gdbarch);
10336 
10337   dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10338 
10339   /* Add some default predicates.  */
10340   if (is_m)
10341     frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10342   frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10343   dwarf2_append_unwinders (gdbarch);
10344   frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10345   frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10346 
10347   /* Now we have tuned the configuration, set a few final things,
10348      based on what the OS ABI has told us.  */
10349 
10350   /* If the ABI is not otherwise marked, assume the old GNU APCS.  EABI
10351      binaries are always marked.  */
10352   if (tdep->arm_abi == ARM_ABI_AUTO)
10353     tdep->arm_abi = ARM_ABI_APCS;
10354 
10355   /* Watchpoints are not steppable.  */
10356   set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10357 
10358   /* We used to default to FPA for generic ARM, but almost nobody
10359      uses that now, and we now provide a way for the user to force
10360      the model.  So default to the most useful variant.  */
10361   if (tdep->fp_model == ARM_FLOAT_AUTO)
10362     tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10363 
10364   if (tdep->jb_pc >= 0)
10365     set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10366 
10367   /* Floating point sizes and format.  */
10368   set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10369   if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10370     {
10371       set_gdbarch_double_format
10372 	(gdbarch, floatformats_ieee_double_littlebyte_bigword);
10373       set_gdbarch_long_double_format
10374 	(gdbarch, floatformats_ieee_double_littlebyte_bigword);
10375     }
10376   else
10377     {
10378       set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10379       set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10380     }
10381 
10382   if (have_vfp_pseudos)
10383     {
10384       /* NOTE: These are the only pseudo registers used by
10385 	 the ARM target at the moment.  If more are added, a
10386 	 little more care in numbering will be needed.  */
10387 
10388       int num_pseudos = 32;
10389       if (have_neon_pseudos)
10390 	num_pseudos += 16;
10391       set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10392       set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10393       set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10394     }
10395 
10396   if (tdesc_data)
10397     {
10398       set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10399 
10400       tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10401 
10402       /* Override tdesc_register_type to adjust the types of VFP
10403 	 registers for NEON.  */
10404       set_gdbarch_register_type (gdbarch, arm_register_type);
10405     }
10406 
10407   /* Add standard register aliases.  We add aliases even for those
10408      nanes which are used by the current architecture - it's simpler,
10409      and does no harm, since nothing ever lists user registers.  */
10410   for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10411     user_reg_add (gdbarch, arm_register_aliases[i].name,
10412 		  value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10413 
10414   return gdbarch;
10415 }
10416 
10417 static void
arm_dump_tdep(struct gdbarch * gdbarch,struct ui_file * file)10418 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10419 {
10420   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10421 
10422   if (tdep == NULL)
10423     return;
10424 
10425   fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10426 		      (unsigned long) tdep->lowest_pc);
10427 }
10428 
10429 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10430 
10431 void
_initialize_arm_tdep(void)10432 _initialize_arm_tdep (void)
10433 {
10434   struct ui_file *stb;
10435   long length;
10436   struct cmd_list_element *new_set, *new_show;
10437   const char *setname;
10438   const char *setdesc;
10439   const char *const *regnames;
10440   int numregs, i, j;
10441   static char *helptext;
10442   char regdesc[1024], *rdptr = regdesc;
10443   size_t rest = sizeof (regdesc);
10444 
10445   gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10446 
10447   arm_objfile_data_key
10448     = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10449 
10450   /* Add ourselves to objfile event chain.  */
10451   observer_attach_new_objfile (arm_exidx_new_objfile);
10452   arm_exidx_data_key
10453     = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10454 
10455   /* Register an ELF OS ABI sniffer for ARM binaries.  */
10456   gdbarch_register_osabi_sniffer (bfd_arch_arm,
10457 				  bfd_target_elf_flavour,
10458 				  arm_elf_osabi_sniffer);
10459 
10460   /* Initialize the standard target descriptions.  */
10461   initialize_tdesc_arm_with_m ();
10462   initialize_tdesc_arm_with_m_fpa_layout ();
10463   initialize_tdesc_arm_with_m_vfp_d16 ();
10464   initialize_tdesc_arm_with_iwmmxt ();
10465   initialize_tdesc_arm_with_vfpv2 ();
10466   initialize_tdesc_arm_with_vfpv3 ();
10467   initialize_tdesc_arm_with_neon ();
10468 
10469   /* Get the number of possible sets of register names defined in opcodes.  */
10470   num_disassembly_options = get_arm_regname_num_options ();
10471 
10472   /* Add root prefix command for all "set arm"/"show arm" commands.  */
10473   add_prefix_cmd ("arm", no_class, set_arm_command,
10474 		  _("Various ARM-specific commands."),
10475 		  &setarmcmdlist, "set arm ", 0, &setlist);
10476 
10477   add_prefix_cmd ("arm", no_class, show_arm_command,
10478 		  _("Various ARM-specific commands."),
10479 		  &showarmcmdlist, "show arm ", 0, &showlist);
10480 
10481   /* Sync the opcode insn printer with our register viewer.  */
10482   parse_arm_disassembler_option ("reg-names-std");
10483 
10484   /* Initialize the array that will be passed to
10485      add_setshow_enum_cmd().  */
10486   valid_disassembly_styles
10487     = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10488   for (i = 0; i < num_disassembly_options; i++)
10489     {
10490       numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
10491       valid_disassembly_styles[i] = setname;
10492       length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10493       rdptr += length;
10494       rest -= length;
10495       /* When we find the default names, tell the disassembler to use
10496 	 them.  */
10497       if (!strcmp (setname, "std"))
10498 	{
10499           disassembly_style = setname;
10500           set_arm_regname_option (i);
10501 	}
10502     }
10503   /* Mark the end of valid options.  */
10504   valid_disassembly_styles[num_disassembly_options] = NULL;
10505 
10506   /* Create the help text.  */
10507   stb = mem_fileopen ();
10508   fprintf_unfiltered (stb, "%s%s%s",
10509 		      _("The valid values are:\n"),
10510 		      regdesc,
10511 		      _("The default is \"std\"."));
10512   helptext = ui_file_xstrdup (stb, NULL);
10513   ui_file_delete (stb);
10514 
10515   add_setshow_enum_cmd("disassembler", no_class,
10516 		       valid_disassembly_styles, &disassembly_style,
10517 		       _("Set the disassembly style."),
10518 		       _("Show the disassembly style."),
10519 		       helptext,
10520 		       set_disassembly_style_sfunc,
10521 		       NULL, /* FIXME: i18n: The disassembly style is
10522 				\"%s\".  */
10523 		       &setarmcmdlist, &showarmcmdlist);
10524 
10525   add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10526 			   _("Set usage of ARM 32-bit mode."),
10527 			   _("Show usage of ARM 32-bit mode."),
10528 			   _("When off, a 26-bit PC will be used."),
10529 			   NULL,
10530 			   NULL, /* FIXME: i18n: Usage of ARM 32-bit
10531 				    mode is %s.  */
10532 			   &setarmcmdlist, &showarmcmdlist);
10533 
10534   /* Add a command to allow the user to force the FPU model.  */
10535   add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10536 			_("Set the floating point type."),
10537 			_("Show the floating point type."),
10538 			_("auto - Determine the FP typefrom the OS-ABI.\n\
10539 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10540 fpa - FPA co-processor (GCC compiled).\n\
10541 softvfp - Software FP with pure-endian doubles.\n\
10542 vfp - VFP co-processor."),
10543 			set_fp_model_sfunc, show_fp_model,
10544 			&setarmcmdlist, &showarmcmdlist);
10545 
10546   /* Add a command to allow the user to force the ABI.  */
10547   add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10548 			_("Set the ABI."),
10549 			_("Show the ABI."),
10550 			NULL, arm_set_abi, arm_show_abi,
10551 			&setarmcmdlist, &showarmcmdlist);
10552 
10553   /* Add two commands to allow the user to force the assumed
10554      execution mode.  */
10555   add_setshow_enum_cmd ("fallback-mode", class_support,
10556 			arm_mode_strings, &arm_fallback_mode_string,
10557 			_("Set the mode assumed when symbols are unavailable."),
10558 			_("Show the mode assumed when symbols are unavailable."),
10559 			NULL, NULL, arm_show_fallback_mode,
10560 			&setarmcmdlist, &showarmcmdlist);
10561   add_setshow_enum_cmd ("force-mode", class_support,
10562 			arm_mode_strings, &arm_force_mode_string,
10563 			_("Set the mode assumed even when symbols are available."),
10564 			_("Show the mode assumed even when symbols are available."),
10565 			NULL, NULL, arm_show_force_mode,
10566 			&setarmcmdlist, &showarmcmdlist);
10567 
10568   /* Debugging flag.  */
10569   add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10570 			   _("Set ARM debugging."),
10571 			   _("Show ARM debugging."),
10572 			   _("When on, arm-specific debugging is enabled."),
10573 			   NULL,
10574 			   NULL, /* FIXME: i18n: "ARM debugging is %s.  */
10575 			   &setdebuglist, &showdebuglist);
10576 }
10577 
10578 /* ARM-reversible process record data structures.  */
10579 
10580 #define ARM_INSN_SIZE_BYTES 4
10581 #define THUMB_INSN_SIZE_BYTES 2
10582 #define THUMB2_INSN_SIZE_BYTES 4
10583 
10584 
10585 #define INSN_S_L_BIT_NUM 20
10586 
10587 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10588         do  \
10589           { \
10590             unsigned int reg_len = LENGTH; \
10591             if (reg_len) \
10592               { \
10593                 REGS = XNEWVEC (uint32_t, reg_len); \
10594                 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10595               } \
10596           } \
10597         while (0)
10598 
10599 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10600         do  \
10601           { \
10602             unsigned int mem_len = LENGTH; \
10603             if (mem_len) \
10604             { \
10605               MEMS =  XNEWVEC (struct arm_mem_r, mem_len);  \
10606               memcpy(&MEMS->len, &RECORD_BUF[0], \
10607                      sizeof(struct arm_mem_r) * LENGTH); \
10608             } \
10609           } \
10610           while (0)
10611 
10612 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression).  */
10613 #define INSN_RECORDED(ARM_RECORD) \
10614         (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10615 
10616 /* ARM memory record structure.  */
10617 struct arm_mem_r
10618 {
10619   uint32_t len;    /* Record length.  */
10620   CORE_ADDR addr;  /* Memory address.  */
10621 };
10622 
10623 /* ARM instruction record contains opcode of current insn
10624    and execution state (before entry to decode_insn()),
10625    contains list of to-be-modified registers and
10626    memory blocks (on return from decode_insn()).  */
10627 
10628 typedef struct insn_decode_record_t
10629 {
10630   struct gdbarch *gdbarch;
10631   struct regcache *regcache;
10632   CORE_ADDR this_addr;          /* Address of the insn being decoded.  */
10633   uint32_t arm_insn;            /* Should accommodate thumb.  */
10634   uint32_t cond;                /* Condition code.  */
10635   uint32_t opcode;              /* Insn opcode.  */
10636   uint32_t decode;              /* Insn decode bits.  */
10637   uint32_t mem_rec_count;       /* No of mem records.  */
10638   uint32_t reg_rec_count;       /* No of reg records.  */
10639   uint32_t *arm_regs;           /* Registers to be saved for this record.  */
10640   struct arm_mem_r *arm_mems;   /* Memory to be saved for this record.  */
10641 } insn_decode_record;
10642 
10643 
10644 /* Checks ARM SBZ and SBO mandatory fields.  */
10645 
10646 static int
sbo_sbz(uint32_t insn,uint32_t bit_num,uint32_t len,uint32_t sbo)10647 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10648 {
10649   uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10650 
10651   if (!len)
10652     return 1;
10653 
10654   if (!sbo)
10655     ones = ~ones;
10656 
10657   while (ones)
10658     {
10659       if (!(ones & sbo))
10660         {
10661           return 0;
10662         }
10663       ones = ones >> 1;
10664     }
10665   return 1;
10666 }
10667 
10668 typedef enum
10669 {
10670   ARM_RECORD_STRH=1,
10671   ARM_RECORD_STRD
10672 } arm_record_strx_t;
10673 
10674 typedef enum
10675 {
10676   ARM_RECORD=1,
10677   THUMB_RECORD,
10678   THUMB2_RECORD
10679 } record_type_t;
10680 
10681 
10682 static int
arm_record_strx(insn_decode_record * arm_insn_r,uint32_t * record_buf,uint32_t * record_buf_mem,arm_record_strx_t str_type)10683 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10684                  uint32_t *record_buf_mem, arm_record_strx_t str_type)
10685 {
10686 
10687   struct regcache *reg_cache = arm_insn_r->regcache;
10688   ULONGEST u_regval[2]= {0};
10689 
10690   uint32_t reg_src1 = 0, reg_src2 = 0;
10691   uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10692   uint32_t opcode1 = 0;
10693 
10694   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10695   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10696   opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10697 
10698 
10699   if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10700     {
10701       /* 1) Handle misc store, immediate offset.  */
10702       immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10703       immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10704       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10705       regcache_raw_read_unsigned (reg_cache, reg_src1,
10706                                   &u_regval[0]);
10707       if (ARM_PC_REGNUM == reg_src1)
10708         {
10709           /* If R15 was used as Rn, hence current PC+8.  */
10710           u_regval[0] = u_regval[0] + 8;
10711         }
10712       offset_8 = (immed_high << 4) | immed_low;
10713       /* Calculate target store address.  */
10714       if (14 == arm_insn_r->opcode)
10715         {
10716           tgt_mem_addr = u_regval[0] + offset_8;
10717         }
10718       else
10719         {
10720           tgt_mem_addr = u_regval[0] - offset_8;
10721         }
10722       if (ARM_RECORD_STRH == str_type)
10723         {
10724           record_buf_mem[0] = 2;
10725           record_buf_mem[1] = tgt_mem_addr;
10726           arm_insn_r->mem_rec_count = 1;
10727         }
10728       else if (ARM_RECORD_STRD == str_type)
10729         {
10730           record_buf_mem[0] = 4;
10731           record_buf_mem[1] = tgt_mem_addr;
10732           record_buf_mem[2] = 4;
10733           record_buf_mem[3] = tgt_mem_addr + 4;
10734           arm_insn_r->mem_rec_count = 2;
10735         }
10736     }
10737   else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10738     {
10739       /* 2) Store, register offset.  */
10740       /* Get Rm.  */
10741       reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10742       /* Get Rn.  */
10743       reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10744       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10745       regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10746       if (15 == reg_src2)
10747         {
10748           /* If R15 was used as Rn, hence current PC+8.  */
10749           u_regval[0] = u_regval[0] + 8;
10750         }
10751       /* Calculate target store address, Rn +/- Rm, register offset.  */
10752       if (12 == arm_insn_r->opcode)
10753         {
10754           tgt_mem_addr = u_regval[0] + u_regval[1];
10755         }
10756       else
10757         {
10758           tgt_mem_addr = u_regval[1] - u_regval[0];
10759         }
10760       if (ARM_RECORD_STRH == str_type)
10761         {
10762           record_buf_mem[0] = 2;
10763           record_buf_mem[1] = tgt_mem_addr;
10764           arm_insn_r->mem_rec_count = 1;
10765         }
10766       else if (ARM_RECORD_STRD == str_type)
10767         {
10768           record_buf_mem[0] = 4;
10769           record_buf_mem[1] = tgt_mem_addr;
10770           record_buf_mem[2] = 4;
10771           record_buf_mem[3] = tgt_mem_addr + 4;
10772           arm_insn_r->mem_rec_count = 2;
10773         }
10774     }
10775   else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10776            || 2 == arm_insn_r->opcode  || 6 == arm_insn_r->opcode)
10777     {
10778       /* 3) Store, immediate pre-indexed.  */
10779       /* 5) Store, immediate post-indexed.  */
10780       immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10781       immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10782       offset_8 = (immed_high << 4) | immed_low;
10783       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10784       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10785       /* Calculate target store address, Rn +/- Rm, register offset.  */
10786       if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10787         {
10788           tgt_mem_addr = u_regval[0] + offset_8;
10789         }
10790       else
10791         {
10792           tgt_mem_addr = u_regval[0] - offset_8;
10793         }
10794       if (ARM_RECORD_STRH == str_type)
10795         {
10796           record_buf_mem[0] = 2;
10797           record_buf_mem[1] = tgt_mem_addr;
10798           arm_insn_r->mem_rec_count = 1;
10799         }
10800       else if (ARM_RECORD_STRD == str_type)
10801         {
10802           record_buf_mem[0] = 4;
10803           record_buf_mem[1] = tgt_mem_addr;
10804           record_buf_mem[2] = 4;
10805           record_buf_mem[3] = tgt_mem_addr + 4;
10806           arm_insn_r->mem_rec_count = 2;
10807         }
10808       /* Record Rn also as it changes.  */
10809       *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10810       arm_insn_r->reg_rec_count = 1;
10811     }
10812   else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10813            || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10814     {
10815       /* 4) Store, register pre-indexed.  */
10816       /* 6) Store, register post -indexed.  */
10817       reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10818       reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10819       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10820       regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10821       /* Calculate target store address, Rn +/- Rm, register offset.  */
10822       if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10823         {
10824           tgt_mem_addr = u_regval[0] + u_regval[1];
10825         }
10826       else
10827         {
10828           tgt_mem_addr = u_regval[1] - u_regval[0];
10829         }
10830       if (ARM_RECORD_STRH == str_type)
10831         {
10832           record_buf_mem[0] = 2;
10833           record_buf_mem[1] = tgt_mem_addr;
10834           arm_insn_r->mem_rec_count = 1;
10835         }
10836       else if (ARM_RECORD_STRD == str_type)
10837         {
10838           record_buf_mem[0] = 4;
10839           record_buf_mem[1] = tgt_mem_addr;
10840           record_buf_mem[2] = 4;
10841           record_buf_mem[3] = tgt_mem_addr + 4;
10842           arm_insn_r->mem_rec_count = 2;
10843         }
10844       /* Record Rn also as it changes.  */
10845       *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10846       arm_insn_r->reg_rec_count = 1;
10847     }
10848   return 0;
10849 }
10850 
10851 /* Handling ARM extension space insns.  */
10852 
10853 static int
arm_record_extension_space(insn_decode_record * arm_insn_r)10854 arm_record_extension_space (insn_decode_record *arm_insn_r)
10855 {
10856   uint32_t ret = 0;  /* Return value: -1:record failure ;  0:success  */
10857   uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10858   uint32_t record_buf[8], record_buf_mem[8];
10859   uint32_t reg_src1 = 0;
10860   uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10861   struct regcache *reg_cache = arm_insn_r->regcache;
10862   ULONGEST u_regval = 0;
10863 
10864   gdb_assert (!INSN_RECORDED(arm_insn_r));
10865   /* Handle unconditional insn extension space.  */
10866 
10867   opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10868   opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10869   if (arm_insn_r->cond)
10870     {
10871       /* PLD has no affect on architectural state, it just affects
10872          the caches.  */
10873       if (5 == ((opcode1 & 0xE0) >> 5))
10874         {
10875           /* BLX(1) */
10876           record_buf[0] = ARM_PS_REGNUM;
10877           record_buf[1] = ARM_LR_REGNUM;
10878           arm_insn_r->reg_rec_count = 2;
10879         }
10880       /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn.  */
10881     }
10882 
10883 
10884   opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10885   if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10886     {
10887       ret = -1;
10888       /* Undefined instruction on ARM V5; need to handle if later
10889          versions define it.  */
10890     }
10891 
10892   opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10893   opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10894   insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10895 
10896   /* Handle arithmetic insn extension space.  */
10897   if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10898       && !INSN_RECORDED(arm_insn_r))
10899     {
10900       /* Handle MLA(S) and MUL(S).  */
10901       if (0 <= insn_op1 && 3 >= insn_op1)
10902       {
10903         record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10904         record_buf[1] = ARM_PS_REGNUM;
10905         arm_insn_r->reg_rec_count = 2;
10906       }
10907       else if (4 <= insn_op1 && 15 >= insn_op1)
10908       {
10909         /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S).  */
10910         record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10911         record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10912         record_buf[2] = ARM_PS_REGNUM;
10913         arm_insn_r->reg_rec_count = 3;
10914       }
10915     }
10916 
10917   opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10918   opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10919   insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10920 
10921   /* Handle control insn extension space.  */
10922 
10923   if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10924       && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10925     {
10926       if (!bit (arm_insn_r->arm_insn,25))
10927         {
10928           if (!bits (arm_insn_r->arm_insn, 4, 7))
10929             {
10930               if ((0 == insn_op1) || (2 == insn_op1))
10931                 {
10932                   /* MRS.  */
10933                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10934                   arm_insn_r->reg_rec_count = 1;
10935                 }
10936               else if (1 == insn_op1)
10937                 {
10938                   /* CSPR is going to be changed.  */
10939                   record_buf[0] = ARM_PS_REGNUM;
10940                   arm_insn_r->reg_rec_count = 1;
10941                 }
10942               else if (3 == insn_op1)
10943                 {
10944                   /* SPSR is going to be changed.  */
10945                   /* We need to get SPSR value, which is yet to be done.  */
10946                   printf_unfiltered (_("Process record does not support "
10947                                      "instruction  0x%0x at address %s.\n"),
10948                                      arm_insn_r->arm_insn,
10949                                      paddress (arm_insn_r->gdbarch,
10950                                      arm_insn_r->this_addr));
10951                   return -1;
10952                 }
10953             }
10954           else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10955             {
10956               if (1 == insn_op1)
10957                 {
10958                   /* BX.  */
10959                   record_buf[0] = ARM_PS_REGNUM;
10960                   arm_insn_r->reg_rec_count = 1;
10961                 }
10962               else if (3 == insn_op1)
10963                 {
10964                   /* CLZ.  */
10965                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10966                   arm_insn_r->reg_rec_count = 1;
10967                 }
10968             }
10969           else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10970             {
10971               /* BLX.  */
10972               record_buf[0] = ARM_PS_REGNUM;
10973               record_buf[1] = ARM_LR_REGNUM;
10974               arm_insn_r->reg_rec_count = 2;
10975             }
10976           else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10977             {
10978               /* QADD, QSUB, QDADD, QDSUB */
10979               record_buf[0] = ARM_PS_REGNUM;
10980               record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10981               arm_insn_r->reg_rec_count = 2;
10982             }
10983           else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10984             {
10985               /* BKPT.  */
10986               record_buf[0] = ARM_PS_REGNUM;
10987               record_buf[1] = ARM_LR_REGNUM;
10988               arm_insn_r->reg_rec_count = 2;
10989 
10990               /* Save SPSR also;how?  */
10991               printf_unfiltered (_("Process record does not support "
10992                                   "instruction 0x%0x at address %s.\n"),
10993                                   arm_insn_r->arm_insn,
10994                   paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10995               return -1;
10996             }
10997           else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10998                   || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10999                   || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11000                   || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11001                  )
11002             {
11003               if (0 == insn_op1 || 1 == insn_op1)
11004                 {
11005                   /* SMLA<x><y>, SMLAW<y>, SMULW<y>.  */
11006                   /* We dont do optimization for SMULW<y> where we
11007                      need only Rd.  */
11008                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11009                   record_buf[1] = ARM_PS_REGNUM;
11010                   arm_insn_r->reg_rec_count = 2;
11011                 }
11012               else if (2 == insn_op1)
11013                 {
11014                   /* SMLAL<x><y>.  */
11015                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11016                   record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11017                   arm_insn_r->reg_rec_count = 2;
11018                 }
11019               else if (3 == insn_op1)
11020                 {
11021                   /* SMUL<x><y>.  */
11022                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11023                   arm_insn_r->reg_rec_count = 1;
11024                 }
11025             }
11026         }
11027       else
11028         {
11029           /* MSR : immediate form.  */
11030           if (1 == insn_op1)
11031             {
11032               /* CSPR is going to be changed.  */
11033               record_buf[0] = ARM_PS_REGNUM;
11034               arm_insn_r->reg_rec_count = 1;
11035             }
11036           else if (3 == insn_op1)
11037             {
11038               /* SPSR is going to be changed.  */
11039               /* we need to get SPSR value, which is yet to be done  */
11040               printf_unfiltered (_("Process record does not support "
11041                                    "instruction 0x%0x at address %s.\n"),
11042                                     arm_insn_r->arm_insn,
11043                                     paddress (arm_insn_r->gdbarch,
11044                                     arm_insn_r->this_addr));
11045               return -1;
11046             }
11047         }
11048     }
11049 
11050   opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11051   opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11052   insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11053 
11054   /* Handle load/store insn extension space.  */
11055 
11056   if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11057       && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11058       && !INSN_RECORDED(arm_insn_r))
11059     {
11060       /* SWP/SWPB.  */
11061       if (0 == insn_op1)
11062         {
11063           /* These insn, changes register and memory as well.  */
11064           /* SWP or SWPB insn.  */
11065           /* Get memory address given by Rn.  */
11066           reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11067           regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11068           /* SWP insn ?, swaps word.  */
11069           if (8 == arm_insn_r->opcode)
11070             {
11071               record_buf_mem[0] = 4;
11072             }
11073           else
11074             {
11075               /* SWPB insn, swaps only byte.  */
11076               record_buf_mem[0] = 1;
11077             }
11078           record_buf_mem[1] = u_regval;
11079           arm_insn_r->mem_rec_count = 1;
11080           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11081           arm_insn_r->reg_rec_count = 1;
11082         }
11083       else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11084         {
11085           /* STRH.  */
11086           arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11087                           ARM_RECORD_STRH);
11088         }
11089       else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11090         {
11091           /* LDRD.  */
11092           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11093           record_buf[1] = record_buf[0] + 1;
11094           arm_insn_r->reg_rec_count = 2;
11095         }
11096       else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11097         {
11098           /* STRD.  */
11099           arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11100                         ARM_RECORD_STRD);
11101         }
11102       else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11103         {
11104           /* LDRH, LDRSB, LDRSH.  */
11105           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11106           arm_insn_r->reg_rec_count = 1;
11107         }
11108 
11109     }
11110 
11111   opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11112   if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11113       && !INSN_RECORDED(arm_insn_r))
11114     {
11115       ret = -1;
11116       /* Handle coprocessor insn extension space.  */
11117     }
11118 
11119   /* To be done for ARMv5 and later; as of now we return -1.  */
11120   if (-1 == ret)
11121     printf_unfiltered (_("Process record does not support instruction x%0x "
11122                          "at address %s.\n"),arm_insn_r->arm_insn,
11123                          paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11124 
11125 
11126   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11127   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11128 
11129   return ret;
11130 }
11131 
11132 /* Handling opcode 000 insns.  */
11133 
11134 static int
arm_record_data_proc_misc_ld_str(insn_decode_record * arm_insn_r)11135 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11136 {
11137   struct regcache *reg_cache = arm_insn_r->regcache;
11138   uint32_t record_buf[8], record_buf_mem[8];
11139   ULONGEST u_regval[2] = {0};
11140 
11141   uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11142   uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11143   uint32_t opcode1 = 0;
11144 
11145   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11146   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11147   opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11148 
11149   /* Data processing insn /multiply insn.  */
11150   if (9 == arm_insn_r->decode
11151       && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11152       ||  (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11153     {
11154       /* Handle multiply instructions.  */
11155       /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL.  */
11156         if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11157           {
11158             /* Handle MLA and MUL.  */
11159             record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11160             record_buf[1] = ARM_PS_REGNUM;
11161             arm_insn_r->reg_rec_count = 2;
11162           }
11163         else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11164           {
11165             /* Handle SMLAL, SMULL, UMLAL, UMULL.  */
11166             record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11167             record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11168             record_buf[2] = ARM_PS_REGNUM;
11169             arm_insn_r->reg_rec_count = 3;
11170           }
11171     }
11172   else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11173            && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11174     {
11175       /* Handle misc load insns, as 20th bit  (L = 1).  */
11176       /* LDR insn has a capability to do branching, if
11177          MOV LR, PC is precceded by LDR insn having Rn as R15
11178          in that case, it emulates branch and link insn, and hence we
11179          need to save CSPR and PC as well. I am not sure this is right
11180          place; as opcode = 010 LDR insn make this happen, if R15 was
11181          used.  */
11182       reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11183       if (15 != reg_dest)
11184         {
11185           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11186           arm_insn_r->reg_rec_count = 1;
11187         }
11188       else
11189         {
11190           record_buf[0] = reg_dest;
11191           record_buf[1] = ARM_PS_REGNUM;
11192           arm_insn_r->reg_rec_count = 2;
11193         }
11194     }
11195   else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11196            && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11197            && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11198            && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11199     {
11200       /* Handle MSR insn.  */
11201       if (9 == arm_insn_r->opcode)
11202         {
11203           /* CSPR is going to be changed.  */
11204           record_buf[0] = ARM_PS_REGNUM;
11205           arm_insn_r->reg_rec_count = 1;
11206         }
11207       else
11208         {
11209           /* SPSR is going to be changed.  */
11210           /* How to read SPSR value?  */
11211           printf_unfiltered (_("Process record does not support instruction "
11212                             "0x%0x at address %s.\n"),
11213                             arm_insn_r->arm_insn,
11214                         paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11215           return -1;
11216         }
11217     }
11218   else if (9 == arm_insn_r->decode
11219            && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11220            && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11221     {
11222       /* Handling SWP, SWPB.  */
11223       /* These insn, changes register and memory as well.  */
11224       /* SWP or SWPB insn.  */
11225 
11226       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11227       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11228       /* SWP insn ?, swaps word.  */
11229       if (8 == arm_insn_r->opcode)
11230         {
11231           record_buf_mem[0] = 4;
11232         }
11233         else
11234         {
11235           /* SWPB insn, swaps only byte.  */
11236           record_buf_mem[0] = 1;
11237         }
11238       record_buf_mem[1] = u_regval[0];
11239       arm_insn_r->mem_rec_count = 1;
11240       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11241       arm_insn_r->reg_rec_count = 1;
11242     }
11243   else if (3 == arm_insn_r->decode && 0x12 == opcode1
11244            && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11245     {
11246       /* Handle BLX, branch and link/exchange.  */
11247       if (9 == arm_insn_r->opcode)
11248       {
11249         /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11250            and R14 stores the return address.  */
11251         record_buf[0] = ARM_PS_REGNUM;
11252         record_buf[1] = ARM_LR_REGNUM;
11253         arm_insn_r->reg_rec_count = 2;
11254       }
11255     }
11256   else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11257     {
11258       /* Handle enhanced software breakpoint insn, BKPT.  */
11259       /* CPSR is changed to be executed in ARM state,  disabling normal
11260          interrupts, entering abort mode.  */
11261       /* According to high vector configuration PC is set.  */
11262       /* user hit breakpoint and type reverse, in
11263          that case, we need to go back with previous CPSR and
11264          Program Counter.  */
11265       record_buf[0] = ARM_PS_REGNUM;
11266       record_buf[1] = ARM_LR_REGNUM;
11267       arm_insn_r->reg_rec_count = 2;
11268 
11269       /* Save SPSR also; how?  */
11270       printf_unfiltered (_("Process record does not support instruction "
11271                            "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11272                            paddress (arm_insn_r->gdbarch,
11273                            arm_insn_r->this_addr));
11274       return -1;
11275     }
11276   else if (11 == arm_insn_r->decode
11277            && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11278   {
11279     /* Handle enhanced store insns and DSP insns (e.g. LDRD).  */
11280 
11281     /* Handle str(x) insn */
11282     arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11283                     ARM_RECORD_STRH);
11284   }
11285   else if (1 == arm_insn_r->decode && 0x12 == opcode1
11286            && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11287     {
11288       /* Handle BX, branch and link/exchange.  */
11289       /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm.  */
11290       record_buf[0] = ARM_PS_REGNUM;
11291       arm_insn_r->reg_rec_count = 1;
11292     }
11293   else if (1 == arm_insn_r->decode && 0x16 == opcode1
11294            && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11295            && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11296     {
11297       /* Count leading zeros: CLZ.  */
11298       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11299       arm_insn_r->reg_rec_count = 1;
11300     }
11301   else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11302            && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11303            && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11304            && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11305           )
11306     {
11307       /* Handle MRS insn.  */
11308       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11309       arm_insn_r->reg_rec_count = 1;
11310     }
11311   else if (arm_insn_r->opcode <= 15)
11312     {
11313       /* Normal data processing insns.  */
11314       /* Out of 11 shifter operands mode, all the insn modifies destination
11315          register, which is specified by 13-16 decode.  */
11316       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11317       record_buf[1] = ARM_PS_REGNUM;
11318       arm_insn_r->reg_rec_count = 2;
11319     }
11320   else
11321     {
11322       return -1;
11323     }
11324 
11325   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11326   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11327   return 0;
11328 }
11329 
11330 /* Handling opcode 001 insns.  */
11331 
11332 static int
arm_record_data_proc_imm(insn_decode_record * arm_insn_r)11333 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11334 {
11335   uint32_t record_buf[8], record_buf_mem[8];
11336 
11337   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11338   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11339 
11340   if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11341       && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11342       && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11343      )
11344     {
11345       /* Handle MSR insn.  */
11346       if (9 == arm_insn_r->opcode)
11347         {
11348           /* CSPR is going to be changed.  */
11349           record_buf[0] = ARM_PS_REGNUM;
11350           arm_insn_r->reg_rec_count = 1;
11351         }
11352       else
11353         {
11354           /* SPSR is going to be changed.  */
11355         }
11356     }
11357   else if (arm_insn_r->opcode <= 15)
11358     {
11359       /* Normal data processing insns.  */
11360       /* Out of 11 shifter operands mode, all the insn modifies destination
11361          register, which is specified by 13-16 decode.  */
11362       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11363       record_buf[1] = ARM_PS_REGNUM;
11364       arm_insn_r->reg_rec_count = 2;
11365     }
11366   else
11367     {
11368       return -1;
11369     }
11370 
11371   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11372   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11373   return 0;
11374 }
11375 
11376 /* Handling opcode 010 insns.  */
11377 
11378 static int
arm_record_ld_st_imm_offset(insn_decode_record * arm_insn_r)11379 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11380 {
11381   struct regcache *reg_cache = arm_insn_r->regcache;
11382 
11383   uint32_t reg_src1 = 0 , reg_dest = 0;
11384   uint32_t offset_12 = 0, tgt_mem_addr = 0;
11385   uint32_t record_buf[8], record_buf_mem[8];
11386 
11387   ULONGEST u_regval = 0;
11388 
11389   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11390   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11391 
11392   if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11393     {
11394       reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11395       /* LDR insn has a capability to do branching, if
11396          MOV LR, PC is precedded by LDR insn having Rn as R15
11397          in that case, it emulates branch and link insn, and hence we
11398          need to save CSPR and PC as well.  */
11399       if (ARM_PC_REGNUM != reg_dest)
11400         {
11401           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11402           arm_insn_r->reg_rec_count = 1;
11403         }
11404       else
11405         {
11406           record_buf[0] = reg_dest;
11407           record_buf[1] = ARM_PS_REGNUM;
11408           arm_insn_r->reg_rec_count = 2;
11409         }
11410     }
11411   else
11412     {
11413       /* Store, immediate offset, immediate pre-indexed,
11414          immediate post-indexed.  */
11415       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11416       offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11417       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11418       /* U == 1 */
11419       if (bit (arm_insn_r->arm_insn, 23))
11420         {
11421           tgt_mem_addr = u_regval + offset_12;
11422         }
11423       else
11424         {
11425           tgt_mem_addr = u_regval - offset_12;
11426         }
11427 
11428       switch (arm_insn_r->opcode)
11429         {
11430           /* STR.  */
11431           case 8:
11432           case 12:
11433           /* STR.  */
11434           case 9:
11435           case 13:
11436           /* STRT.  */
11437           case 1:
11438           case 5:
11439           /* STR.  */
11440           case 4:
11441           case 0:
11442             record_buf_mem[0] = 4;
11443           break;
11444 
11445           /* STRB.  */
11446           case 10:
11447           case 14:
11448           /* STRB.  */
11449           case 11:
11450           case 15:
11451           /* STRBT.  */
11452           case 3:
11453           case 7:
11454           /* STRB.  */
11455           case 2:
11456           case 6:
11457             record_buf_mem[0] = 1;
11458           break;
11459 
11460           default:
11461             gdb_assert_not_reached ("no decoding pattern found");
11462           break;
11463         }
11464       record_buf_mem[1] = tgt_mem_addr;
11465       arm_insn_r->mem_rec_count = 1;
11466 
11467       if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11468           || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11469           || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11470           || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11471           || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11472           || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11473          )
11474         {
11475           /* We are handling pre-indexed mode; post-indexed mode;
11476              where Rn is going to be changed.  */
11477           record_buf[0] = reg_src1;
11478           arm_insn_r->reg_rec_count = 1;
11479         }
11480     }
11481 
11482   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11483   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11484   return 0;
11485 }
11486 
11487 /* Handling opcode 011 insns.  */
11488 
11489 static int
arm_record_ld_st_reg_offset(insn_decode_record * arm_insn_r)11490 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11491 {
11492   struct regcache *reg_cache = arm_insn_r->regcache;
11493 
11494   uint32_t shift_imm = 0;
11495   uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11496   uint32_t offset_12 = 0, tgt_mem_addr = 0;
11497   uint32_t record_buf[8], record_buf_mem[8];
11498 
11499   LONGEST s_word;
11500   ULONGEST u_regval[2];
11501 
11502   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11503   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11504 
11505   /* Handle enhanced store insns and LDRD DSP insn,
11506      order begins according to addressing modes for store insns
11507      STRH insn.  */
11508 
11509   /* LDR or STR?  */
11510   if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11511     {
11512       reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11513       /* LDR insn has a capability to do branching, if
11514          MOV LR, PC is precedded by LDR insn having Rn as R15
11515          in that case, it emulates branch and link insn, and hence we
11516          need to save CSPR and PC as well.  */
11517       if (15 != reg_dest)
11518         {
11519           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11520           arm_insn_r->reg_rec_count = 1;
11521         }
11522       else
11523         {
11524           record_buf[0] = reg_dest;
11525           record_buf[1] = ARM_PS_REGNUM;
11526           arm_insn_r->reg_rec_count = 2;
11527         }
11528     }
11529   else
11530     {
11531       if (! bits (arm_insn_r->arm_insn, 4, 11))
11532         {
11533           /* Store insn, register offset and register pre-indexed,
11534              register post-indexed.  */
11535           /* Get Rm.  */
11536           reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11537           /* Get Rn.  */
11538           reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11539           regcache_raw_read_unsigned (reg_cache, reg_src1
11540                                       , &u_regval[0]);
11541           regcache_raw_read_unsigned (reg_cache, reg_src2
11542                                       , &u_regval[1]);
11543           if (15 == reg_src2)
11544             {
11545               /* If R15 was used as Rn, hence current PC+8.  */
11546               /* Pre-indexed mode doesnt reach here ; illegal insn.  */
11547                 u_regval[0] = u_regval[0] + 8;
11548             }
11549           /* Calculate target store address, Rn +/- Rm, register offset.  */
11550           /* U == 1.  */
11551           if (bit (arm_insn_r->arm_insn, 23))
11552             {
11553               tgt_mem_addr = u_regval[0] + u_regval[1];
11554             }
11555           else
11556             {
11557               tgt_mem_addr = u_regval[1] - u_regval[0];
11558             }
11559 
11560           switch (arm_insn_r->opcode)
11561             {
11562               /* STR.  */
11563               case 8:
11564               case 12:
11565               /* STR.  */
11566               case 9:
11567               case 13:
11568               /* STRT.  */
11569               case 1:
11570               case 5:
11571               /* STR.  */
11572               case 0:
11573               case 4:
11574                 record_buf_mem[0] = 4;
11575               break;
11576 
11577               /* STRB.  */
11578               case 10:
11579               case 14:
11580               /* STRB.  */
11581               case 11:
11582               case 15:
11583               /* STRBT.  */
11584               case 3:
11585               case 7:
11586               /* STRB.  */
11587               case 2:
11588               case 6:
11589                 record_buf_mem[0] = 1;
11590               break;
11591 
11592               default:
11593                 gdb_assert_not_reached ("no decoding pattern found");
11594               break;
11595             }
11596           record_buf_mem[1] = tgt_mem_addr;
11597           arm_insn_r->mem_rec_count = 1;
11598 
11599           if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11600               || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11601               || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11602               || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11603               || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11604               || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11605              )
11606             {
11607               /* Rn is going to be changed in pre-indexed mode and
11608                  post-indexed mode as well.  */
11609               record_buf[0] = reg_src2;
11610               arm_insn_r->reg_rec_count = 1;
11611             }
11612         }
11613       else
11614         {
11615           /* Store insn, scaled register offset; scaled pre-indexed.  */
11616           offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11617           /* Get Rm.  */
11618           reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11619           /* Get Rn.  */
11620           reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11621           /* Get shift_imm.  */
11622           shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11623           regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11624           regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11625           regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11626           /* Offset_12 used as shift.  */
11627           switch (offset_12)
11628             {
11629               case 0:
11630                 /* Offset_12 used as index.  */
11631                 offset_12 = u_regval[0] << shift_imm;
11632               break;
11633 
11634               case 1:
11635                 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11636               break;
11637 
11638               case 2:
11639                 if (!shift_imm)
11640                   {
11641                     if (bit (u_regval[0], 31))
11642                       {
11643                         offset_12 = 0xFFFFFFFF;
11644                       }
11645                     else
11646                       {
11647                         offset_12 = 0;
11648                       }
11649                   }
11650                 else
11651                   {
11652                     /* This is arithmetic shift.  */
11653                     offset_12 = s_word >> shift_imm;
11654                   }
11655                 break;
11656 
11657               case 3:
11658                 if (!shift_imm)
11659                   {
11660                     regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11661                                                 &u_regval[1]);
11662                     /* Get C flag value and shift it by 31.  */
11663                     offset_12 = (((bit (u_regval[1], 29)) << 31) \
11664                                   | (u_regval[0]) >> 1);
11665                   }
11666                 else
11667                   {
11668                     offset_12 = (u_regval[0] >> shift_imm) \
11669                                 | (u_regval[0] <<
11670                                 (sizeof(uint32_t) - shift_imm));
11671                   }
11672               break;
11673 
11674               default:
11675                 gdb_assert_not_reached ("no decoding pattern found");
11676               break;
11677             }
11678 
11679           regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11680           /* bit U set.  */
11681           if (bit (arm_insn_r->arm_insn, 23))
11682             {
11683               tgt_mem_addr = u_regval[1] + offset_12;
11684             }
11685           else
11686             {
11687               tgt_mem_addr = u_regval[1] - offset_12;
11688             }
11689 
11690           switch (arm_insn_r->opcode)
11691             {
11692               /* STR.  */
11693               case 8:
11694               case 12:
11695               /* STR.  */
11696               case 9:
11697               case 13:
11698               /* STRT.  */
11699               case 1:
11700               case 5:
11701               /* STR.  */
11702               case 0:
11703               case 4:
11704                 record_buf_mem[0] = 4;
11705               break;
11706 
11707               /* STRB.  */
11708               case 10:
11709               case 14:
11710               /* STRB.  */
11711               case 11:
11712               case 15:
11713               /* STRBT.  */
11714               case 3:
11715               case 7:
11716               /* STRB.  */
11717               case 2:
11718               case 6:
11719                 record_buf_mem[0] = 1;
11720               break;
11721 
11722               default:
11723                 gdb_assert_not_reached ("no decoding pattern found");
11724               break;
11725             }
11726           record_buf_mem[1] = tgt_mem_addr;
11727           arm_insn_r->mem_rec_count = 1;
11728 
11729           if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11730               || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11731               || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11732               || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11733               || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11734               || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11735              )
11736             {
11737               /* Rn is going to be changed in register scaled pre-indexed
11738                  mode,and scaled post indexed mode.  */
11739               record_buf[0] = reg_src2;
11740               arm_insn_r->reg_rec_count = 1;
11741             }
11742         }
11743     }
11744 
11745   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11746   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11747   return 0;
11748 }
11749 
11750 /* Handling opcode 100 insns.  */
11751 
11752 static int
arm_record_ld_st_multiple(insn_decode_record * arm_insn_r)11753 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11754 {
11755   struct regcache *reg_cache = arm_insn_r->regcache;
11756 
11757   uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11758   uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11759   uint32_t start_address = 0, index = 0;
11760   uint32_t record_buf[24], record_buf_mem[48];
11761 
11762   ULONGEST u_regval[2] = {0};
11763 
11764   /* This mode is exclusively for load and store multiple.  */
11765   /* Handle incremenrt after/before and decrment after.before mode;
11766      Rn is changing depending on W bit, but as of now we store Rn too
11767      without optimization.  */
11768 
11769   if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11770     {
11771       /* LDM  (1,2,3) where LDM  (3) changes CPSR too.  */
11772 
11773       if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11774         {
11775           register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11776           no_of_regs = 15;
11777         }
11778       else
11779         {
11780           register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11781           no_of_regs = 14;
11782         }
11783       /* Get Rn.  */
11784       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11785       while (register_bits)
11786       {
11787         if (register_bits & 0x00000001)
11788           register_list[register_count++] = 1;
11789         register_bits = register_bits >> 1;
11790       }
11791 
11792         /* Extra space for Base Register and CPSR; wihtout optimization.  */
11793         record_buf[register_count] = reg_src1;
11794         record_buf[register_count + 1] = ARM_PS_REGNUM;
11795         arm_insn_r->reg_rec_count = register_count + 2;
11796 
11797         for (register_count = 0; register_count < no_of_regs; register_count++)
11798           {
11799             if  (register_list[register_count])
11800               {
11801                 /* Register_count gives total no of registers
11802                 and dually working as reg number.  */
11803                 record_buf[index] = register_count;
11804                 index++;
11805               }
11806           }
11807 
11808     }
11809   else
11810     {
11811       /* It handles both STM(1) and STM(2).  */
11812       addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11813 
11814       register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11815       /* Get Rn.  */
11816       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11817       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11818       while (register_bits)
11819         {
11820           if (register_bits & 0x00000001)
11821             register_count++;
11822           register_bits = register_bits >> 1;
11823         }
11824 
11825       switch (addr_mode)
11826         {
11827           /* Decrement after.  */
11828           case 0:
11829             start_address = (u_regval[0]) - (register_count * 4) + 4;
11830             arm_insn_r->mem_rec_count = register_count;
11831             while (register_count)
11832               {
11833                 record_buf_mem[(register_count * 2) - 1] = start_address;
11834                 record_buf_mem[(register_count * 2) - 2] = 4;
11835                 start_address = start_address + 4;
11836                 register_count--;
11837               }
11838           break;
11839 
11840           /* Increment after.  */
11841           case 1:
11842             start_address = u_regval[0];
11843             arm_insn_r->mem_rec_count = register_count;
11844             while (register_count)
11845               {
11846                 record_buf_mem[(register_count * 2) - 1] = start_address;
11847                 record_buf_mem[(register_count * 2) - 2] = 4;
11848                 start_address = start_address + 4;
11849                 register_count--;
11850               }
11851           break;
11852 
11853           /* Decrement before.  */
11854           case 2:
11855 
11856             start_address = (u_regval[0]) - (register_count * 4);
11857             arm_insn_r->mem_rec_count = register_count;
11858             while (register_count)
11859               {
11860                 record_buf_mem[(register_count * 2) - 1] = start_address;
11861                 record_buf_mem[(register_count * 2) - 2] = 4;
11862                 start_address = start_address + 4;
11863                 register_count--;
11864               }
11865           break;
11866 
11867           /* Increment before.  */
11868           case 3:
11869             start_address = u_regval[0] + 4;
11870             arm_insn_r->mem_rec_count = register_count;
11871             while (register_count)
11872               {
11873                 record_buf_mem[(register_count * 2) - 1] = start_address;
11874                 record_buf_mem[(register_count * 2) - 2] = 4;
11875                 start_address = start_address + 4;
11876                 register_count--;
11877               }
11878           break;
11879 
11880           default:
11881             gdb_assert_not_reached ("no decoding pattern found");
11882           break;
11883         }
11884 
11885       /* Base register also changes; based on condition and W bit.  */
11886       /* We save it anyway without optimization.  */
11887       record_buf[0] = reg_src1;
11888       arm_insn_r->reg_rec_count = 1;
11889     }
11890 
11891   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11892   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11893   return 0;
11894 }
11895 
11896 /* Handling opcode 101 insns.  */
11897 
11898 static int
arm_record_b_bl(insn_decode_record * arm_insn_r)11899 arm_record_b_bl (insn_decode_record *arm_insn_r)
11900 {
11901   uint32_t record_buf[8];
11902 
11903   /* Handle B, BL, BLX(1) insns.  */
11904   /* B simply branches so we do nothing here.  */
11905   /* Note: BLX(1) doesnt fall here but instead it falls into
11906      extension space.  */
11907   if (bit (arm_insn_r->arm_insn, 24))
11908   {
11909     record_buf[0] = ARM_LR_REGNUM;
11910     arm_insn_r->reg_rec_count = 1;
11911   }
11912 
11913   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11914 
11915   return 0;
11916 }
11917 
11918 /* Handling opcode 110 insns.  */
11919 
11920 static int
arm_record_coproc(insn_decode_record * arm_insn_r)11921 arm_record_coproc (insn_decode_record *arm_insn_r)
11922 {
11923   printf_unfiltered (_("Process record does not support instruction "
11924                     "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11925                     paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11926 
11927   return -1;
11928 }
11929 
11930 /* Handling opcode 111 insns.  */
11931 
11932 static int
arm_record_coproc_data_proc(insn_decode_record * arm_insn_r)11933 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11934 {
11935   struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11936   struct regcache *reg_cache = arm_insn_r->regcache;
11937   uint32_t ret = 0; /* function return value: -1:record failure ;  0:success  */
11938 
11939   /* Handle SWI insn; system call would be handled over here.  */
11940 
11941   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11942   if (15 == arm_insn_r->opcode)
11943   {
11944     /* Handle arm syscall insn.  */
11945     if (tdep->arm_swi_record != NULL)
11946       {
11947         ret = tdep->arm_swi_record(reg_cache);
11948       }
11949     else
11950       {
11951         printf_unfiltered (_("no syscall record support\n"));
11952         ret = -1;
11953       }
11954   }
11955 
11956   printf_unfiltered (_("Process record does not support instruction "
11957                         "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11958                         paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11959   return ret;
11960 }
11961 
11962 /* Handling opcode 000 insns.  */
11963 
11964 static int
thumb_record_shift_add_sub(insn_decode_record * thumb_insn_r)11965 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11966 {
11967   uint32_t record_buf[8];
11968   uint32_t reg_src1 = 0;
11969 
11970   reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11971 
11972   record_buf[0] = ARM_PS_REGNUM;
11973   record_buf[1] = reg_src1;
11974   thumb_insn_r->reg_rec_count = 2;
11975 
11976   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11977 
11978   return 0;
11979 }
11980 
11981 
11982 /* Handling opcode 001 insns.  */
11983 
11984 static int
thumb_record_add_sub_cmp_mov(insn_decode_record * thumb_insn_r)11985 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11986 {
11987   uint32_t record_buf[8];
11988   uint32_t reg_src1 = 0;
11989 
11990   reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11991 
11992   record_buf[0] = ARM_PS_REGNUM;
11993   record_buf[1] = reg_src1;
11994   thumb_insn_r->reg_rec_count = 2;
11995 
11996   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11997 
11998   return 0;
11999 }
12000 
12001 /* Handling opcode 010 insns.  */
12002 
12003 static int
thumb_record_ld_st_reg_offset(insn_decode_record * thumb_insn_r)12004 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12005 {
12006   struct regcache *reg_cache =  thumb_insn_r->regcache;
12007   uint32_t record_buf[8], record_buf_mem[8];
12008 
12009   uint32_t reg_src1 = 0, reg_src2 = 0;
12010   uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12011 
12012   ULONGEST u_regval[2] = {0};
12013 
12014   opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12015 
12016   if (bit (thumb_insn_r->arm_insn, 12))
12017     {
12018       /* Handle load/store register offset.  */
12019       opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12020       if (opcode2 >= 12 && opcode2 <= 15)
12021         {
12022           /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH.  */
12023           reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12024           record_buf[0] = reg_src1;
12025           thumb_insn_r->reg_rec_count = 1;
12026         }
12027       else if (opcode2 >= 8 && opcode2 <= 10)
12028         {
12029           /* STR(2), STRB(2), STRH(2) .  */
12030           reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12031           reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12032           regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12033           regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12034           if (8 == opcode2)
12035             record_buf_mem[0] = 4;    /* STR (2).  */
12036           else if (10 == opcode2)
12037             record_buf_mem[0] = 1;    /*  STRB (2).  */
12038           else if (9 == opcode2)
12039             record_buf_mem[0] = 2;    /* STRH (2).  */
12040           record_buf_mem[1] = u_regval[0] + u_regval[1];
12041           thumb_insn_r->mem_rec_count = 1;
12042         }
12043     }
12044   else if (bit (thumb_insn_r->arm_insn, 11))
12045     {
12046       /* Handle load from literal pool.  */
12047       /* LDR(3).  */
12048       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12049       record_buf[0] = reg_src1;
12050       thumb_insn_r->reg_rec_count = 1;
12051     }
12052   else if (opcode1)
12053     {
12054       opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12055       opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12056       if ((3 == opcode2) && (!opcode3))
12057         {
12058           /* Branch with exchange.  */
12059           record_buf[0] = ARM_PS_REGNUM;
12060           thumb_insn_r->reg_rec_count = 1;
12061         }
12062       else
12063         {
12064           /* Format 8; special data processing insns.  */
12065           reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12066           record_buf[0] = ARM_PS_REGNUM;
12067           record_buf[1] = reg_src1;
12068           thumb_insn_r->reg_rec_count = 2;
12069         }
12070     }
12071   else
12072     {
12073       /* Format 5; data processing insns.  */
12074       reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12075       if (bit (thumb_insn_r->arm_insn, 7))
12076         {
12077           reg_src1 = reg_src1 + 8;
12078         }
12079       record_buf[0] = ARM_PS_REGNUM;
12080       record_buf[1] = reg_src1;
12081       thumb_insn_r->reg_rec_count = 2;
12082     }
12083 
12084   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12085   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12086              record_buf_mem);
12087 
12088   return 0;
12089 }
12090 
12091 /* Handling opcode 001 insns.  */
12092 
12093 static int
thumb_record_ld_st_imm_offset(insn_decode_record * thumb_insn_r)12094 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12095 {
12096   struct regcache *reg_cache = thumb_insn_r->regcache;
12097   uint32_t record_buf[8], record_buf_mem[8];
12098 
12099   uint32_t reg_src1 = 0;
12100   uint32_t opcode = 0, immed_5 = 0;
12101 
12102   ULONGEST u_regval = 0;
12103 
12104   opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12105 
12106   if (opcode)
12107     {
12108       /* LDR(1).  */
12109       reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12110       record_buf[0] = reg_src1;
12111       thumb_insn_r->reg_rec_count = 1;
12112     }
12113   else
12114     {
12115       /* STR(1).  */
12116       reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12117       immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12118       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12119       record_buf_mem[0] = 4;
12120       record_buf_mem[1] = u_regval + (immed_5 * 4);
12121       thumb_insn_r->mem_rec_count = 1;
12122     }
12123 
12124   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12125   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12126              record_buf_mem);
12127 
12128   return 0;
12129 }
12130 
12131 /* Handling opcode 100 insns.  */
12132 
12133 static int
thumb_record_ld_st_stack(insn_decode_record * thumb_insn_r)12134 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12135 {
12136   struct regcache *reg_cache = thumb_insn_r->regcache;
12137   uint32_t record_buf[8], record_buf_mem[8];
12138 
12139   uint32_t reg_src1 = 0;
12140   uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12141 
12142   ULONGEST u_regval = 0;
12143 
12144   opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12145 
12146   if (3 == opcode)
12147     {
12148       /* LDR(4).  */
12149       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12150       record_buf[0] = reg_src1;
12151       thumb_insn_r->reg_rec_count = 1;
12152     }
12153   else if (1 == opcode)
12154     {
12155       /* LDRH(1).  */
12156       reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12157       record_buf[0] = reg_src1;
12158       thumb_insn_r->reg_rec_count = 1;
12159     }
12160   else if (2 == opcode)
12161     {
12162       /* STR(3).  */
12163       immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12164       regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12165       record_buf_mem[0] = 4;
12166       record_buf_mem[1] = u_regval + (immed_8 * 4);
12167       thumb_insn_r->mem_rec_count = 1;
12168     }
12169   else if (0 == opcode)
12170     {
12171       /* STRH(1).  */
12172       immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12173       reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12174       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12175       record_buf_mem[0] = 2;
12176       record_buf_mem[1] = u_regval + (immed_5 * 2);
12177       thumb_insn_r->mem_rec_count = 1;
12178     }
12179 
12180   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12181   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12182              record_buf_mem);
12183 
12184   return 0;
12185 }
12186 
12187 /* Handling opcode 101 insns.  */
12188 
12189 static int
thumb_record_misc(insn_decode_record * thumb_insn_r)12190 thumb_record_misc (insn_decode_record *thumb_insn_r)
12191 {
12192   struct regcache *reg_cache = thumb_insn_r->regcache;
12193 
12194   uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12195   uint32_t register_bits = 0, register_count = 0;
12196   uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12197   uint32_t record_buf[24], record_buf_mem[48];
12198   uint32_t reg_src1;
12199 
12200   ULONGEST u_regval = 0;
12201 
12202   opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12203   opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12204   opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12205 
12206   if (14 == opcode2)
12207     {
12208       /* POP.  */
12209       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12210       while (register_bits)
12211         {
12212           if (register_bits & 0x00000001)
12213             register_list[register_count++] = 1;
12214           register_bits = register_bits >> 1;
12215         }
12216       record_buf[register_count] = ARM_PS_REGNUM;
12217       record_buf[register_count + 1] = ARM_SP_REGNUM;
12218       thumb_insn_r->reg_rec_count = register_count + 2;
12219       for (register_count = 0; register_count < 8; register_count++)
12220         {
12221           if  (register_list[register_count])
12222             {
12223               record_buf[index] = register_count;
12224               index++;
12225             }
12226         }
12227     }
12228   else if (10 == opcode2)
12229     {
12230       /* PUSH.  */
12231       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12232       regcache_raw_read_unsigned (reg_cache, ARM_PC_REGNUM, &u_regval);
12233       while (register_bits)
12234         {
12235           if (register_bits & 0x00000001)
12236             register_count++;
12237           register_bits = register_bits >> 1;
12238         }
12239       start_address = u_regval -  \
12240                   (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12241       thumb_insn_r->mem_rec_count = register_count;
12242       while (register_count)
12243         {
12244           record_buf_mem[(register_count * 2) - 1] = start_address;
12245           record_buf_mem[(register_count * 2) - 2] = 4;
12246           start_address = start_address + 4;
12247           register_count--;
12248         }
12249       record_buf[0] = ARM_SP_REGNUM;
12250       thumb_insn_r->reg_rec_count = 1;
12251     }
12252   else if (0x1E == opcode1)
12253     {
12254       /* BKPT insn.  */
12255       /* Handle enhanced software breakpoint insn, BKPT.  */
12256       /* CPSR is changed to be executed in ARM state,  disabling normal
12257          interrupts, entering abort mode.  */
12258       /* According to high vector configuration PC is set.  */
12259       /* User hits breakpoint and type reverse, in that case, we need to go back with
12260       previous CPSR and Program Counter.  */
12261       record_buf[0] = ARM_PS_REGNUM;
12262       record_buf[1] = ARM_LR_REGNUM;
12263       thumb_insn_r->reg_rec_count = 2;
12264       /* We need to save SPSR value, which is not yet done.  */
12265       printf_unfiltered (_("Process record does not support instruction "
12266                            "0x%0x at address %s.\n"),
12267                            thumb_insn_r->arm_insn,
12268                            paddress (thumb_insn_r->gdbarch,
12269                            thumb_insn_r->this_addr));
12270       return -1;
12271     }
12272   else if ((0 == opcode) || (1 == opcode))
12273     {
12274       /* ADD(5), ADD(6).  */
12275       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12276       record_buf[0] = reg_src1;
12277       thumb_insn_r->reg_rec_count = 1;
12278     }
12279   else if (2 == opcode)
12280     {
12281       /* ADD(7), SUB(4).  */
12282       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12283       record_buf[0] = ARM_SP_REGNUM;
12284       thumb_insn_r->reg_rec_count = 1;
12285     }
12286 
12287   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12288   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12289              record_buf_mem);
12290 
12291   return 0;
12292 }
12293 
12294 /* Handling opcode 110 insns.  */
12295 
12296 static int
thumb_record_ldm_stm_swi(insn_decode_record * thumb_insn_r)12297 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12298 {
12299   struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12300   struct regcache *reg_cache = thumb_insn_r->regcache;
12301 
12302   uint32_t ret = 0; /* function return value: -1:record failure ;  0:success  */
12303   uint32_t reg_src1 = 0;
12304   uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12305   uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12306   uint32_t record_buf[24], record_buf_mem[48];
12307 
12308   ULONGEST u_regval = 0;
12309 
12310   opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12311   opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12312 
12313   if (1 == opcode2)
12314     {
12315 
12316       /* LDMIA.  */
12317       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12318       /* Get Rn.  */
12319       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12320       while (register_bits)
12321         {
12322           if (register_bits & 0x00000001)
12323             register_list[register_count++] = 1;
12324           register_bits = register_bits >> 1;
12325         }
12326       record_buf[register_count] = reg_src1;
12327       thumb_insn_r->reg_rec_count = register_count + 1;
12328       for (register_count = 0; register_count < 8; register_count++)
12329         {
12330           if (register_list[register_count])
12331             {
12332               record_buf[index] = register_count;
12333               index++;
12334             }
12335         }
12336     }
12337   else if (0 == opcode2)
12338     {
12339       /* It handles both STMIA.  */
12340       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12341       /* Get Rn.  */
12342       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12343       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12344       while (register_bits)
12345         {
12346           if (register_bits & 0x00000001)
12347             register_count++;
12348           register_bits = register_bits >> 1;
12349         }
12350       start_address = u_regval;
12351       thumb_insn_r->mem_rec_count = register_count;
12352       while (register_count)
12353         {
12354           record_buf_mem[(register_count * 2) - 1] = start_address;
12355           record_buf_mem[(register_count * 2) - 2] = 4;
12356           start_address = start_address + 4;
12357           register_count--;
12358         }
12359     }
12360   else if (0x1F == opcode1)
12361     {
12362         /* Handle arm syscall insn.  */
12363         if (tdep->arm_swi_record != NULL)
12364           {
12365             ret = tdep->arm_swi_record(reg_cache);
12366           }
12367         else
12368           {
12369             printf_unfiltered (_("no syscall record support\n"));
12370             return -1;
12371           }
12372     }
12373 
12374   /* B (1), conditional branch is automatically taken care in process_record,
12375     as PC is saved there.  */
12376 
12377   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12378   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12379              record_buf_mem);
12380 
12381   return ret;
12382 }
12383 
12384 /* Handling opcode 111 insns.  */
12385 
12386 static int
thumb_record_branch(insn_decode_record * thumb_insn_r)12387 thumb_record_branch (insn_decode_record *thumb_insn_r)
12388 {
12389   uint32_t record_buf[8];
12390   uint32_t bits_h = 0;
12391 
12392   bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12393 
12394   if (2 == bits_h || 3 == bits_h)
12395     {
12396       /* BL */
12397       record_buf[0] = ARM_LR_REGNUM;
12398       thumb_insn_r->reg_rec_count = 1;
12399     }
12400   else if (1 == bits_h)
12401     {
12402       /* BLX(1). */
12403       record_buf[0] = ARM_PS_REGNUM;
12404       record_buf[1] = ARM_LR_REGNUM;
12405       thumb_insn_r->reg_rec_count = 2;
12406     }
12407 
12408   /* B(2) is automatically taken care in process_record, as PC is
12409      saved there.  */
12410 
12411   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12412 
12413   return 0;
12414 }
12415 
12416 
12417 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12418 and positive val on fauilure.  */
12419 
12420 static int
extract_arm_insn(insn_decode_record * insn_record,uint32_t insn_size)12421 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12422 {
12423   gdb_byte buf[insn_size];
12424 
12425   memset (&buf[0], 0, insn_size);
12426 
12427   if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12428     return 1;
12429   insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12430                            insn_size,
12431                            gdbarch_byte_order (insn_record->gdbarch));
12432   return 0;
12433 }
12434 
12435 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12436 
12437 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12438    dispatch it.  */
12439 
12440 static int
decode_insn(insn_decode_record * arm_record,record_type_t record_type,uint32_t insn_size)12441 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12442                 uint32_t insn_size)
12443 {
12444 
12445   /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction.  */
12446   static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
12447   {
12448     arm_record_data_proc_misc_ld_str,   /* 000.  */
12449     arm_record_data_proc_imm,           /* 001.  */
12450     arm_record_ld_st_imm_offset,        /* 010.  */
12451     arm_record_ld_st_reg_offset,        /* 011.  */
12452     arm_record_ld_st_multiple,          /* 100.  */
12453     arm_record_b_bl,                    /* 101.  */
12454     arm_record_coproc,                  /* 110.  */
12455     arm_record_coproc_data_proc         /* 111.  */
12456   };
12457 
12458   /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction.  */
12459   static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
12460   { \
12461     thumb_record_shift_add_sub,        /* 000.  */
12462     thumb_record_add_sub_cmp_mov,      /* 001.  */
12463     thumb_record_ld_st_reg_offset,     /* 010.  */
12464     thumb_record_ld_st_imm_offset,     /* 011.  */
12465     thumb_record_ld_st_stack,          /* 100.  */
12466     thumb_record_misc,                 /* 101.  */
12467     thumb_record_ldm_stm_swi,          /* 110.  */
12468     thumb_record_branch                /* 111.  */
12469   };
12470 
12471   uint32_t ret = 0;    /* return value: negative:failure   0:success.  */
12472   uint32_t insn_id = 0;
12473 
12474   if (extract_arm_insn (arm_record, insn_size))
12475     {
12476       if (record_debug)
12477         {
12478           printf_unfiltered (_("Process record: error reading memory at "
12479                               "addr %s len = %d.\n"),
12480           paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
12481         }
12482       return -1;
12483     }
12484   else if (ARM_RECORD == record_type)
12485     {
12486       arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12487       insn_id = bits (arm_record->arm_insn, 25, 27);
12488       ret = arm_record_extension_space (arm_record);
12489       /* If this insn has fallen into extension space
12490          then we need not decode it anymore.  */
12491       if (ret != -1 && !INSN_RECORDED(arm_record))
12492         {
12493           ret = arm_handle_insn[insn_id] (arm_record);
12494         }
12495     }
12496   else if (THUMB_RECORD == record_type)
12497     {
12498       /* As thumb does not have condition codes, we set negative.  */
12499       arm_record->cond = -1;
12500       insn_id = bits (arm_record->arm_insn, 13, 15);
12501       ret = thumb_handle_insn[insn_id] (arm_record);
12502     }
12503   else if (THUMB2_RECORD == record_type)
12504     {
12505       printf_unfiltered (_("Process record doesnt support thumb32 instruction "
12506                            "0x%0x at address %s.\n"),arm_record->arm_insn,
12507                            paddress (arm_record->gdbarch,
12508                            arm_record->this_addr));
12509       ret = -1;
12510     }
12511   else
12512     {
12513       /* Throw assertion.  */
12514       gdb_assert_not_reached ("not a valid instruction, could not decode");
12515     }
12516 
12517   return ret;
12518 }
12519 
12520 
12521 /* Cleans up local record registers and memory allocations.  */
12522 
12523 static void
deallocate_reg_mem(insn_decode_record * record)12524 deallocate_reg_mem (insn_decode_record *record)
12525 {
12526   xfree (record->arm_regs);
12527   xfree (record->arm_mems);
12528 }
12529 
12530 
12531 /* Parse the current instruction and record the values of the registers and
12532    memory that will be changed in current instruction to record_arch_list".
12533    Return -1 if something is wrong.  */
12534 
12535 int
arm_process_record(struct gdbarch * gdbarch,struct regcache * regcache,CORE_ADDR insn_addr)12536 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12537                         CORE_ADDR insn_addr)
12538 {
12539 
12540   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
12541   uint32_t no_of_rec = 0;
12542   uint32_t ret = 0;  /* return value: -1:record failure ;  0:success  */
12543   ULONGEST t_bit = 0, insn_id = 0;
12544 
12545   ULONGEST u_regval = 0;
12546 
12547   insn_decode_record arm_record;
12548 
12549   memset (&arm_record, 0, sizeof (insn_decode_record));
12550   arm_record.regcache = regcache;
12551   arm_record.this_addr = insn_addr;
12552   arm_record.gdbarch = gdbarch;
12553 
12554 
12555   if (record_debug > 1)
12556     {
12557       fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
12558                                       "addr = %s\n",
12559       paddress (gdbarch, arm_record.this_addr));
12560     }
12561 
12562   if (extract_arm_insn (&arm_record, 2))
12563     {
12564       if (record_debug)
12565         {
12566           printf_unfiltered (_("Process record: error reading memory at "
12567                              "addr %s len = %d.\n"),
12568                              paddress (arm_record.gdbarch,
12569                              arm_record.this_addr), 2);
12570         }
12571       return -1;
12572     }
12573 
12574   /* Check the insn, whether it is thumb or arm one.  */
12575 
12576   t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
12577   regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
12578 
12579 
12580   if (!(u_regval & t_bit))
12581     {
12582       /* We are decoding arm insn.  */
12583       ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
12584     }
12585   else
12586     {
12587       insn_id = bits (arm_record.arm_insn, 11, 15);
12588       /* is it thumb2 insn?  */
12589       if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
12590         {
12591           ret = decode_insn (&arm_record, THUMB2_RECORD,
12592                              THUMB2_INSN_SIZE_BYTES);
12593         }
12594       else
12595         {
12596           /* We are decoding thumb insn.  */
12597           ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
12598         }
12599     }
12600 
12601   if (0 == ret)
12602     {
12603       /* Record registers.  */
12604       record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
12605       if (arm_record.arm_regs)
12606         {
12607           for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
12608             {
12609               if (record_full_arch_list_add_reg
12610 		  (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
12611               ret = -1;
12612             }
12613         }
12614       /* Record memories.  */
12615       if (arm_record.arm_mems)
12616         {
12617           for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
12618             {
12619               if (record_full_arch_list_add_mem
12620                   ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
12621 		   arm_record.arm_mems[no_of_rec].len))
12622                 ret = -1;
12623             }
12624         }
12625 
12626       if (record_full_arch_list_add_end ())
12627         ret = -1;
12628     }
12629 
12630 
12631   deallocate_reg_mem (&arm_record);
12632 
12633   return ret;
12634 }
12635 
12636