xref: /dragonfly/contrib/gdb-7/gdb/dwarf2expr.c (revision bcb3e04d)
1 /* DWARF 2 Expression Evaluator.
2 
3    Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009
4    Free Software Foundation, Inc.
5 
6    Contributed by Daniel Berlin (dan@dberlin.org)
7 
8    This file is part of GDB.
9 
10    This program is free software; you can redistribute it and/or modify
11    it under the terms of the GNU General Public License as published by
12    the Free Software Foundation; either version 3 of the License, or
13    (at your option) any later version.
14 
15    This program is distributed in the hope that it will be useful,
16    but WITHOUT ANY WARRANTY; without even the implied warranty of
17    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18    GNU General Public License for more details.
19 
20    You should have received a copy of the GNU General Public License
21    along with this program.  If not, see <http://www.gnu.org/licenses/>.  */
22 
23 #include "defs.h"
24 #include "symtab.h"
25 #include "gdbtypes.h"
26 #include "value.h"
27 #include "gdbcore.h"
28 #include "dwarf2.h"
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
31 
32 /* Local prototypes.  */
33 
34 static void execute_stack_op (struct dwarf_expr_context *,
35 			      gdb_byte *, gdb_byte *);
36 static struct type *unsigned_address_type (struct gdbarch *, int);
37 
38 /* Create a new context for the expression evaluator.  */
39 
40 struct dwarf_expr_context *
41 new_dwarf_expr_context (void)
42 {
43   struct dwarf_expr_context *retval;
44   retval = xcalloc (1, sizeof (struct dwarf_expr_context));
45   retval->stack_len = 0;
46   retval->stack_allocated = 10;
47   retval->stack = xmalloc (retval->stack_allocated * sizeof (CORE_ADDR));
48   retval->num_pieces = 0;
49   retval->pieces = 0;
50   retval->max_recursion_depth = 0x100;
51   return retval;
52 }
53 
54 /* Release the memory allocated to CTX.  */
55 
56 void
57 free_dwarf_expr_context (struct dwarf_expr_context *ctx)
58 {
59   xfree (ctx->stack);
60   xfree (ctx->pieces);
61   xfree (ctx);
62 }
63 
64 /* Helper for make_cleanup_free_dwarf_expr_context.  */
65 
66 static void
67 free_dwarf_expr_context_cleanup (void *arg)
68 {
69   free_dwarf_expr_context (arg);
70 }
71 
72 /* Return a cleanup that calls free_dwarf_expr_context.  */
73 
74 struct cleanup *
75 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx)
76 {
77   return make_cleanup (free_dwarf_expr_context_cleanup, ctx);
78 }
79 
80 /* Expand the memory allocated to CTX's stack to contain at least
81    NEED more elements than are currently used.  */
82 
83 static void
84 dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need)
85 {
86   if (ctx->stack_len + need > ctx->stack_allocated)
87     {
88       size_t newlen = ctx->stack_len + need + 10;
89       ctx->stack = xrealloc (ctx->stack,
90 			     newlen * sizeof (struct dwarf_stack_value));
91       ctx->stack_allocated = newlen;
92     }
93 }
94 
95 /* Push VALUE onto CTX's stack.  */
96 
97 void
98 dwarf_expr_push (struct dwarf_expr_context *ctx, CORE_ADDR value,
99 		 int in_stack_memory)
100 {
101   struct dwarf_stack_value *v;
102 
103   dwarf_expr_grow_stack (ctx, 1);
104   v = &ctx->stack[ctx->stack_len++];
105   v->value = value;
106   v->in_stack_memory = in_stack_memory;
107 }
108 
109 /* Pop the top item off of CTX's stack.  */
110 
111 void
112 dwarf_expr_pop (struct dwarf_expr_context *ctx)
113 {
114   if (ctx->stack_len <= 0)
115     error (_("dwarf expression stack underflow"));
116   ctx->stack_len--;
117 }
118 
119 /* Retrieve the N'th item on CTX's stack.  */
120 
121 CORE_ADDR
122 dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n)
123 {
124   if (ctx->stack_len <= n)
125      error (_("Asked for position %d of stack, stack only has %d elements on it."),
126 	    n, ctx->stack_len);
127   return ctx->stack[ctx->stack_len - (1 + n)].value;
128 
129 }
130 
131 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack.  */
132 
133 int
134 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n)
135 {
136   if (ctx->stack_len <= n)
137      error (_("Asked for position %d of stack, stack only has %d elements on it."),
138 	    n, ctx->stack_len);
139   return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory;
140 
141 }
142 
143 /* Add a new piece to CTX's piece list.  */
144 static void
145 add_piece (struct dwarf_expr_context *ctx, ULONGEST size)
146 {
147   struct dwarf_expr_piece *p;
148 
149   ctx->num_pieces++;
150 
151   if (ctx->pieces)
152     ctx->pieces = xrealloc (ctx->pieces,
153                             (ctx->num_pieces
154                              * sizeof (struct dwarf_expr_piece)));
155   else
156     ctx->pieces = xmalloc (ctx->num_pieces
157                            * sizeof (struct dwarf_expr_piece));
158 
159   p = &ctx->pieces[ctx->num_pieces - 1];
160   p->location = ctx->location;
161   p->size = size;
162   if (p->location == DWARF_VALUE_LITERAL)
163     {
164       p->v.literal.data = ctx->data;
165       p->v.literal.length = ctx->len;
166     }
167   else
168     {
169       p->v.expr.value = dwarf_expr_fetch (ctx, 0);
170       p->v.expr.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
171     }
172 }
173 
174 /* Evaluate the expression at ADDR (LEN bytes long) using the context
175    CTX.  */
176 
177 void
178 dwarf_expr_eval (struct dwarf_expr_context *ctx, gdb_byte *addr, size_t len)
179 {
180   int old_recursion_depth = ctx->recursion_depth;
181 
182   execute_stack_op (ctx, addr, addr + len);
183 
184   /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here.  */
185 
186   gdb_assert (ctx->recursion_depth == old_recursion_depth);
187 }
188 
189 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
190    by R, and return the new value of BUF.  Verify that it doesn't extend
191    past BUF_END.  */
192 
193 gdb_byte *
194 read_uleb128 (gdb_byte *buf, gdb_byte *buf_end, ULONGEST * r)
195 {
196   unsigned shift = 0;
197   ULONGEST result = 0;
198   gdb_byte byte;
199 
200   while (1)
201     {
202       if (buf >= buf_end)
203 	error (_("read_uleb128: Corrupted DWARF expression."));
204 
205       byte = *buf++;
206       result |= (byte & 0x7f) << shift;
207       if ((byte & 0x80) == 0)
208 	break;
209       shift += 7;
210     }
211   *r = result;
212   return buf;
213 }
214 
215 /* Decode the signed LEB128 constant at BUF into the variable pointed to
216    by R, and return the new value of BUF.  Verify that it doesn't extend
217    past BUF_END.  */
218 
219 gdb_byte *
220 read_sleb128 (gdb_byte *buf, gdb_byte *buf_end, LONGEST * r)
221 {
222   unsigned shift = 0;
223   LONGEST result = 0;
224   gdb_byte byte;
225 
226   while (1)
227     {
228       if (buf >= buf_end)
229 	error (_("read_sleb128: Corrupted DWARF expression."));
230 
231       byte = *buf++;
232       result |= (byte & 0x7f) << shift;
233       shift += 7;
234       if ((byte & 0x80) == 0)
235 	break;
236     }
237   if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0)
238     result |= -(1 << shift);
239 
240   *r = result;
241   return buf;
242 }
243 
244 /* Read an address of size ADDR_SIZE from BUF, and verify that it
245    doesn't extend past BUF_END.  */
246 
247 CORE_ADDR
248 dwarf2_read_address (struct gdbarch *gdbarch, gdb_byte *buf,
249 		     gdb_byte *buf_end, int addr_size)
250 {
251   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
252   CORE_ADDR result;
253 
254   if (buf_end - buf < addr_size)
255     error (_("dwarf2_read_address: Corrupted DWARF expression."));
256 
257   /* For most architectures, calling extract_unsigned_integer() alone
258      is sufficient for extracting an address.  However, some
259      architectures (e.g. MIPS) use signed addresses and using
260      extract_unsigned_integer() will not produce a correct
261      result.  Make sure we invoke gdbarch_integer_to_address()
262      for those architectures which require it.
263 
264      The use of `unsigned_address_type' in the code below refers to
265      the type of buf and has no bearing on the signedness of the
266      address being returned.  */
267 
268   if (gdbarch_integer_to_address_p (gdbarch))
269     return gdbarch_integer_to_address
270 	     (gdbarch, unsigned_address_type (gdbarch, addr_size), buf);
271 
272   return extract_unsigned_integer (buf, addr_size, byte_order);
273 }
274 
275 /* Return the type of an address of size ADDR_SIZE,
276    for unsigned arithmetic.  */
277 
278 static struct type *
279 unsigned_address_type (struct gdbarch *gdbarch, int addr_size)
280 {
281   switch (addr_size)
282     {
283     case 2:
284       return builtin_type (gdbarch)->builtin_uint16;
285     case 4:
286       return builtin_type (gdbarch)->builtin_uint32;
287     case 8:
288       return builtin_type (gdbarch)->builtin_uint64;
289     default:
290       internal_error (__FILE__, __LINE__,
291 		      _("Unsupported address size.\n"));
292     }
293 }
294 
295 /* Return the type of an address of size ADDR_SIZE,
296    for signed arithmetic.  */
297 
298 static struct type *
299 signed_address_type (struct gdbarch *gdbarch, int addr_size)
300 {
301   switch (addr_size)
302     {
303     case 2:
304       return builtin_type (gdbarch)->builtin_int16;
305     case 4:
306       return builtin_type (gdbarch)->builtin_int32;
307     case 8:
308       return builtin_type (gdbarch)->builtin_int64;
309     default:
310       internal_error (__FILE__, __LINE__,
311 		      _("Unsupported address size.\n"));
312     }
313 }
314 
315 
316 /* Check that the current operator is either at the end of an
317    expression, or that it is followed by a composition operator.  */
318 
319 static void
320 require_composition (gdb_byte *op_ptr, gdb_byte *op_end, const char *op_name)
321 {
322   /* It seems like DW_OP_GNU_uninit should be handled here.  However,
323      it doesn't seem to make sense for DW_OP_*_value, and it was not
324      checked at the other place that this function is called.  */
325   if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece)
326     error (_("DWARF-2 expression error: `%s' operations must be "
327 	     "used either alone or in conjuction with DW_OP_piece "
328 	     "or DW_OP_bit_piece."),
329 	   op_name);
330 }
331 
332 /* The engine for the expression evaluator.  Using the context in CTX,
333    evaluate the expression between OP_PTR and OP_END.  */
334 
335 static void
336 execute_stack_op (struct dwarf_expr_context *ctx,
337 		  gdb_byte *op_ptr, gdb_byte *op_end)
338 {
339   enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
340   ctx->location = DWARF_VALUE_MEMORY;
341   ctx->initialized = 1;  /* Default is initialized.  */
342 
343   if (ctx->recursion_depth > ctx->max_recursion_depth)
344     error (_("DWARF-2 expression error: Loop detected (%d)."),
345 	   ctx->recursion_depth);
346   ctx->recursion_depth++;
347 
348   while (op_ptr < op_end)
349     {
350       enum dwarf_location_atom op = *op_ptr++;
351       CORE_ADDR result;
352       /* Assume the value is not in stack memory.
353 	 Code that knows otherwise sets this to 1.
354 	 Some arithmetic on stack addresses can probably be assumed to still
355 	 be a stack address, but we skip this complication for now.
356 	 This is just an optimization, so it's always ok to punt
357 	 and leave this as 0.  */
358       int in_stack_memory = 0;
359       ULONGEST uoffset, reg;
360       LONGEST offset;
361 
362       switch (op)
363 	{
364 	case DW_OP_lit0:
365 	case DW_OP_lit1:
366 	case DW_OP_lit2:
367 	case DW_OP_lit3:
368 	case DW_OP_lit4:
369 	case DW_OP_lit5:
370 	case DW_OP_lit6:
371 	case DW_OP_lit7:
372 	case DW_OP_lit8:
373 	case DW_OP_lit9:
374 	case DW_OP_lit10:
375 	case DW_OP_lit11:
376 	case DW_OP_lit12:
377 	case DW_OP_lit13:
378 	case DW_OP_lit14:
379 	case DW_OP_lit15:
380 	case DW_OP_lit16:
381 	case DW_OP_lit17:
382 	case DW_OP_lit18:
383 	case DW_OP_lit19:
384 	case DW_OP_lit20:
385 	case DW_OP_lit21:
386 	case DW_OP_lit22:
387 	case DW_OP_lit23:
388 	case DW_OP_lit24:
389 	case DW_OP_lit25:
390 	case DW_OP_lit26:
391 	case DW_OP_lit27:
392 	case DW_OP_lit28:
393 	case DW_OP_lit29:
394 	case DW_OP_lit30:
395 	case DW_OP_lit31:
396 	  result = op - DW_OP_lit0;
397 	  break;
398 
399 	case DW_OP_addr:
400 	  result = dwarf2_read_address (ctx->gdbarch,
401 					op_ptr, op_end, ctx->addr_size);
402 	  op_ptr += ctx->addr_size;
403 	  break;
404 
405 	case DW_OP_const1u:
406 	  result = extract_unsigned_integer (op_ptr, 1, byte_order);
407 	  op_ptr += 1;
408 	  break;
409 	case DW_OP_const1s:
410 	  result = extract_signed_integer (op_ptr, 1, byte_order);
411 	  op_ptr += 1;
412 	  break;
413 	case DW_OP_const2u:
414 	  result = extract_unsigned_integer (op_ptr, 2, byte_order);
415 	  op_ptr += 2;
416 	  break;
417 	case DW_OP_const2s:
418 	  result = extract_signed_integer (op_ptr, 2, byte_order);
419 	  op_ptr += 2;
420 	  break;
421 	case DW_OP_const4u:
422 	  result = extract_unsigned_integer (op_ptr, 4, byte_order);
423 	  op_ptr += 4;
424 	  break;
425 	case DW_OP_const4s:
426 	  result = extract_signed_integer (op_ptr, 4, byte_order);
427 	  op_ptr += 4;
428 	  break;
429 	case DW_OP_const8u:
430 	  result = extract_unsigned_integer (op_ptr, 8, byte_order);
431 	  op_ptr += 8;
432 	  break;
433 	case DW_OP_const8s:
434 	  result = extract_signed_integer (op_ptr, 8, byte_order);
435 	  op_ptr += 8;
436 	  break;
437 	case DW_OP_constu:
438 	  op_ptr = read_uleb128 (op_ptr, op_end, &uoffset);
439 	  result = uoffset;
440 	  break;
441 	case DW_OP_consts:
442 	  op_ptr = read_sleb128 (op_ptr, op_end, &offset);
443 	  result = offset;
444 	  break;
445 
446 	/* The DW_OP_reg operations are required to occur alone in
447 	   location expressions.  */
448 	case DW_OP_reg0:
449 	case DW_OP_reg1:
450 	case DW_OP_reg2:
451 	case DW_OP_reg3:
452 	case DW_OP_reg4:
453 	case DW_OP_reg5:
454 	case DW_OP_reg6:
455 	case DW_OP_reg7:
456 	case DW_OP_reg8:
457 	case DW_OP_reg9:
458 	case DW_OP_reg10:
459 	case DW_OP_reg11:
460 	case DW_OP_reg12:
461 	case DW_OP_reg13:
462 	case DW_OP_reg14:
463 	case DW_OP_reg15:
464 	case DW_OP_reg16:
465 	case DW_OP_reg17:
466 	case DW_OP_reg18:
467 	case DW_OP_reg19:
468 	case DW_OP_reg20:
469 	case DW_OP_reg21:
470 	case DW_OP_reg22:
471 	case DW_OP_reg23:
472 	case DW_OP_reg24:
473 	case DW_OP_reg25:
474 	case DW_OP_reg26:
475 	case DW_OP_reg27:
476 	case DW_OP_reg28:
477 	case DW_OP_reg29:
478 	case DW_OP_reg30:
479 	case DW_OP_reg31:
480 	  if (op_ptr != op_end
481 	      && *op_ptr != DW_OP_piece
482 	      && *op_ptr != DW_OP_GNU_uninit)
483 	    error (_("DWARF-2 expression error: DW_OP_reg operations must be "
484 		   "used either alone or in conjuction with DW_OP_piece."));
485 
486 	  result = op - DW_OP_reg0;
487 	  ctx->location = DWARF_VALUE_REGISTER;
488 	  break;
489 
490 	case DW_OP_regx:
491 	  op_ptr = read_uleb128 (op_ptr, op_end, &reg);
492 	  require_composition (op_ptr, op_end, "DW_OP_regx");
493 
494 	  result = reg;
495 	  ctx->location = DWARF_VALUE_REGISTER;
496 	  break;
497 
498 	case DW_OP_implicit_value:
499 	  {
500 	    ULONGEST len;
501 	    op_ptr = read_uleb128 (op_ptr, op_end, &len);
502 	    if (op_ptr + len > op_end)
503 	      error (_("DW_OP_implicit_value: too few bytes available."));
504 	    ctx->len = len;
505 	    ctx->data = op_ptr;
506 	    ctx->location = DWARF_VALUE_LITERAL;
507 	    op_ptr += len;
508 	    require_composition (op_ptr, op_end, "DW_OP_implicit_value");
509 	  }
510 	  goto no_push;
511 
512 	case DW_OP_stack_value:
513 	  ctx->location = DWARF_VALUE_STACK;
514 	  require_composition (op_ptr, op_end, "DW_OP_stack_value");
515 	  goto no_push;
516 
517 	case DW_OP_breg0:
518 	case DW_OP_breg1:
519 	case DW_OP_breg2:
520 	case DW_OP_breg3:
521 	case DW_OP_breg4:
522 	case DW_OP_breg5:
523 	case DW_OP_breg6:
524 	case DW_OP_breg7:
525 	case DW_OP_breg8:
526 	case DW_OP_breg9:
527 	case DW_OP_breg10:
528 	case DW_OP_breg11:
529 	case DW_OP_breg12:
530 	case DW_OP_breg13:
531 	case DW_OP_breg14:
532 	case DW_OP_breg15:
533 	case DW_OP_breg16:
534 	case DW_OP_breg17:
535 	case DW_OP_breg18:
536 	case DW_OP_breg19:
537 	case DW_OP_breg20:
538 	case DW_OP_breg21:
539 	case DW_OP_breg22:
540 	case DW_OP_breg23:
541 	case DW_OP_breg24:
542 	case DW_OP_breg25:
543 	case DW_OP_breg26:
544 	case DW_OP_breg27:
545 	case DW_OP_breg28:
546 	case DW_OP_breg29:
547 	case DW_OP_breg30:
548 	case DW_OP_breg31:
549 	  {
550 	    op_ptr = read_sleb128 (op_ptr, op_end, &offset);
551 	    result = (ctx->read_reg) (ctx->baton, op - DW_OP_breg0);
552 	    result += offset;
553 	  }
554 	  break;
555 	case DW_OP_bregx:
556 	  {
557 	    op_ptr = read_uleb128 (op_ptr, op_end, &reg);
558 	    op_ptr = read_sleb128 (op_ptr, op_end, &offset);
559 	    result = (ctx->read_reg) (ctx->baton, reg);
560 	    result += offset;
561 	  }
562 	  break;
563 	case DW_OP_fbreg:
564 	  {
565 	    gdb_byte *datastart;
566 	    size_t datalen;
567 	    unsigned int before_stack_len;
568 
569 	    op_ptr = read_sleb128 (op_ptr, op_end, &offset);
570 	    /* Rather than create a whole new context, we simply
571 	       record the stack length before execution, then reset it
572 	       afterwards, effectively erasing whatever the recursive
573 	       call put there.  */
574 	    before_stack_len = ctx->stack_len;
575 	    /* FIXME: cagney/2003-03-26: This code should be using
576                get_frame_base_address(), and then implement a dwarf2
577                specific this_base method.  */
578 	    (ctx->get_frame_base) (ctx->baton, &datastart, &datalen);
579 	    dwarf_expr_eval (ctx, datastart, datalen);
580 	    if (ctx->location == DWARF_VALUE_LITERAL
581 		|| ctx->location == DWARF_VALUE_STACK)
582 	      error (_("Not implemented: computing frame base using explicit value operator"));
583 	    result = dwarf_expr_fetch (ctx, 0);
584 	    if (ctx->location == DWARF_VALUE_REGISTER)
585 	      result = (ctx->read_reg) (ctx->baton, result);
586 	    result = result + offset;
587 	    in_stack_memory = 1;
588 	    ctx->stack_len = before_stack_len;
589 	    ctx->location = DWARF_VALUE_MEMORY;
590 	  }
591 	  break;
592 
593 	case DW_OP_dup:
594 	  result = dwarf_expr_fetch (ctx, 0);
595 	  in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
596 	  break;
597 
598 	case DW_OP_drop:
599 	  dwarf_expr_pop (ctx);
600 	  goto no_push;
601 
602 	case DW_OP_pick:
603 	  offset = *op_ptr++;
604 	  result = dwarf_expr_fetch (ctx, offset);
605 	  in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset);
606 	  break;
607 
608 	case DW_OP_swap:
609 	  {
610 	    struct dwarf_stack_value t1, t2;
611 
612 	    if (ctx->stack_len < 2)
613 	       error (_("Not enough elements for DW_OP_swap. Need 2, have %d."),
614 		      ctx->stack_len);
615 	    t1 = ctx->stack[ctx->stack_len - 1];
616 	    t2 = ctx->stack[ctx->stack_len - 2];
617 	    ctx->stack[ctx->stack_len - 1] = t2;
618 	    ctx->stack[ctx->stack_len - 2] = t1;
619 	    goto no_push;
620 	  }
621 
622 	case DW_OP_over:
623 	  result = dwarf_expr_fetch (ctx, 1);
624 	  in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1);
625 	  break;
626 
627 	case DW_OP_rot:
628 	  {
629 	    struct dwarf_stack_value t1, t2, t3;
630 
631 	    if (ctx->stack_len < 3)
632 	       error (_("Not enough elements for DW_OP_rot. Need 3, have %d."),
633 		      ctx->stack_len);
634 	    t1 = ctx->stack[ctx->stack_len - 1];
635 	    t2 = ctx->stack[ctx->stack_len - 2];
636 	    t3 = ctx->stack[ctx->stack_len - 3];
637 	    ctx->stack[ctx->stack_len - 1] = t2;
638 	    ctx->stack[ctx->stack_len - 2] = t3;
639 	    ctx->stack[ctx->stack_len - 3] = t1;
640 	    goto no_push;
641 	  }
642 
643 	case DW_OP_deref:
644 	case DW_OP_deref_size:
645 	case DW_OP_abs:
646 	case DW_OP_neg:
647 	case DW_OP_not:
648 	case DW_OP_plus_uconst:
649 	  /* Unary operations.  */
650 	  result = dwarf_expr_fetch (ctx, 0);
651 	  dwarf_expr_pop (ctx);
652 
653 	  switch (op)
654 	    {
655 	    case DW_OP_deref:
656 	      {
657 		gdb_byte *buf = alloca (ctx->addr_size);
658 		(ctx->read_mem) (ctx->baton, buf, result, ctx->addr_size);
659 		result = dwarf2_read_address (ctx->gdbarch,
660 					      buf, buf + ctx->addr_size,
661 					      ctx->addr_size);
662 	      }
663 	      break;
664 
665 	    case DW_OP_deref_size:
666 	      {
667 		int addr_size = *op_ptr++;
668 		gdb_byte *buf = alloca (addr_size);
669 		(ctx->read_mem) (ctx->baton, buf, result, addr_size);
670 		result = dwarf2_read_address (ctx->gdbarch,
671 					      buf, buf + addr_size,
672 					      addr_size);
673 	      }
674 	      break;
675 
676 	    case DW_OP_abs:
677 	      if ((signed int) result < 0)
678 		result = -result;
679 	      break;
680 	    case DW_OP_neg:
681 	      result = -result;
682 	      break;
683 	    case DW_OP_not:
684 	      result = ~result;
685 	      break;
686 	    case DW_OP_plus_uconst:
687 	      op_ptr = read_uleb128 (op_ptr, op_end, &reg);
688 	      result += reg;
689 	      break;
690 	    }
691 	  break;
692 
693 	case DW_OP_and:
694 	case DW_OP_div:
695 	case DW_OP_minus:
696 	case DW_OP_mod:
697 	case DW_OP_mul:
698 	case DW_OP_or:
699 	case DW_OP_plus:
700 	case DW_OP_shl:
701 	case DW_OP_shr:
702 	case DW_OP_shra:
703 	case DW_OP_xor:
704 	case DW_OP_le:
705 	case DW_OP_ge:
706 	case DW_OP_eq:
707 	case DW_OP_lt:
708 	case DW_OP_gt:
709 	case DW_OP_ne:
710 	  {
711 	    /* Binary operations.  Use the value engine to do computations in
712 	       the right width.  */
713 	    CORE_ADDR first, second;
714 	    enum exp_opcode binop;
715 	    struct value *val1, *val2;
716 	    struct type *stype, *utype;
717 
718 	    second = dwarf_expr_fetch (ctx, 0);
719 	    dwarf_expr_pop (ctx);
720 
721 	    first = dwarf_expr_fetch (ctx, 0);
722 	    dwarf_expr_pop (ctx);
723 
724 	    utype = unsigned_address_type (ctx->gdbarch, ctx->addr_size);
725 	    stype = signed_address_type (ctx->gdbarch, ctx->addr_size);
726 	    val1 = value_from_longest (utype, first);
727 	    val2 = value_from_longest (utype, second);
728 
729 	    switch (op)
730 	      {
731 	      case DW_OP_and:
732 		binop = BINOP_BITWISE_AND;
733 		break;
734 	      case DW_OP_div:
735 		binop = BINOP_DIV;
736                 break;
737 	      case DW_OP_minus:
738 		binop = BINOP_SUB;
739 		break;
740 	      case DW_OP_mod:
741 		binop = BINOP_MOD;
742 		break;
743 	      case DW_OP_mul:
744 		binop = BINOP_MUL;
745 		break;
746 	      case DW_OP_or:
747 		binop = BINOP_BITWISE_IOR;
748 		break;
749 	      case DW_OP_plus:
750 		binop = BINOP_ADD;
751 		break;
752 	      case DW_OP_shl:
753 		binop = BINOP_LSH;
754 		break;
755 	      case DW_OP_shr:
756 		binop = BINOP_RSH;
757                 break;
758 	      case DW_OP_shra:
759 		binop = BINOP_RSH;
760 		val1 = value_from_longest (stype, first);
761 		break;
762 	      case DW_OP_xor:
763 		binop = BINOP_BITWISE_XOR;
764 		break;
765 	      case DW_OP_le:
766 		binop = BINOP_LEQ;
767 		break;
768 	      case DW_OP_ge:
769 		binop = BINOP_GEQ;
770 		break;
771 	      case DW_OP_eq:
772 		binop = BINOP_EQUAL;
773 		break;
774 	      case DW_OP_lt:
775 		binop = BINOP_LESS;
776 		break;
777 	      case DW_OP_gt:
778 		binop = BINOP_GTR;
779 		break;
780 	      case DW_OP_ne:
781 		binop = BINOP_NOTEQUAL;
782 		break;
783 	      default:
784 		internal_error (__FILE__, __LINE__,
785 				_("Can't be reached."));
786 	      }
787 	    result = value_as_long (value_binop (val1, val2, binop));
788 	  }
789 	  break;
790 
791 	case DW_OP_call_frame_cfa:
792 	  result = (ctx->get_frame_cfa) (ctx->baton);
793 	  in_stack_memory = 1;
794 	  break;
795 
796 	case DW_OP_GNU_push_tls_address:
797 	  /* Variable is at a constant offset in the thread-local
798 	  storage block into the objfile for the current thread and
799 	  the dynamic linker module containing this expression. Here
800 	  we return returns the offset from that base.  The top of the
801 	  stack has the offset from the beginning of the thread
802 	  control block at which the variable is located.  Nothing
803 	  should follow this operator, so the top of stack would be
804 	  returned.  */
805 	  result = dwarf_expr_fetch (ctx, 0);
806 	  dwarf_expr_pop (ctx);
807 	  result = (ctx->get_tls_address) (ctx->baton, result);
808 	  break;
809 
810 	case DW_OP_skip:
811 	  offset = extract_signed_integer (op_ptr, 2, byte_order);
812 	  op_ptr += 2;
813 	  op_ptr += offset;
814 	  goto no_push;
815 
816 	case DW_OP_bra:
817 	  offset = extract_signed_integer (op_ptr, 2, byte_order);
818 	  op_ptr += 2;
819 	  if (dwarf_expr_fetch (ctx, 0) != 0)
820 	    op_ptr += offset;
821 	  dwarf_expr_pop (ctx);
822 	  goto no_push;
823 
824 	case DW_OP_nop:
825 	  goto no_push;
826 
827         case DW_OP_piece:
828           {
829             ULONGEST size;
830 
831             /* Record the piece.  */
832             op_ptr = read_uleb128 (op_ptr, op_end, &size);
833 	    add_piece (ctx, size);
834 
835             /* Pop off the address/regnum, and reset the location
836 	       type.  */
837 	    if (ctx->location != DWARF_VALUE_LITERAL)
838 	      dwarf_expr_pop (ctx);
839             ctx->location = DWARF_VALUE_MEMORY;
840           }
841           goto no_push;
842 
843 	case DW_OP_GNU_uninit:
844 	  if (op_ptr != op_end)
845 	    error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
846 		   "be the very last op."));
847 
848 	  ctx->initialized = 0;
849 	  goto no_push;
850 
851 	default:
852 	  error (_("Unhandled dwarf expression opcode 0x%x"), op);
853 	}
854 
855       /* Most things push a result value.  */
856       dwarf_expr_push (ctx, result, in_stack_memory);
857     no_push:;
858     }
859 
860   ctx->recursion_depth--;
861   gdb_assert (ctx->recursion_depth >= 0);
862 }
863