1 /* Copyright (C) 2004, 2005  Free Software Foundation.
2 
3    Ensure builtin __mempcpy_chk performs correctly.  */
4 
5 extern void abort (void);
6 typedef __SIZE_TYPE__ size_t;
7 extern size_t strlen(const char *);
8 extern void *memcpy (void *, const void *, size_t);
9 extern void *mempcpy (void *, const void *, size_t);
10 extern int memcmp (const void *, const void *, size_t);
11 
12 #include "chk.h"
13 
14 const char s1[] = "123";
15 char p[32] = "";
16 volatile char *s2 = "defg"; /* prevent constant propagation to happen when whole program assumptions are made.  */
17 volatile char *s3 = "FGH"; /* prevent constant propagation to happen when whole program assumptions are made.  */
18 volatile size_t l1 = 1; /* prevent constant propagation to happen when whole program assumptions are made.  */
19 
20 void
21 __attribute__((noinline))
test1(void)22 test1 (void)
23 {
24   int i;
25 
26 #if defined __i386__ || defined __x86_64__
27   /* The functions below might not be optimized into direct stores on all
28      arches.  It depends on how many instructions would be generated and
29      what limits the architecture chooses in STORE_BY_PIECES_P.  */
30   mempcpy_disallowed = 1;
31 #endif
32 
33   /* All the mempcpy calls in this routine except last have fixed length, so
34      object size checking should be done at compile time if optimizing.  */
35   chk_calls = 0;
36 
37   if (mempcpy (p, "ABCDE", 6) != p + 6 || memcmp (p, "ABCDE", 6))
38     abort ();
39   if (mempcpy (p + 16, "VWX" + 1, 2) != p + 16 + 2
40       || memcmp (p + 16, "WX\0\0", 5))
41     abort ();
42   if (mempcpy (p + 1, "", 1) != p + 1 + 1 || memcmp (p, "A\0CDE", 6))
43     abort ();
44   if (mempcpy (p + 3, "FGHI", 4) != p + 3 + 4 || memcmp (p, "A\0CFGHI", 8))
45     abort ();
46 
47   i = 8;
48   memcpy (p + 20, "qrstu", 6);
49   memcpy (p + 25, "QRSTU", 6);
50   if (mempcpy (p + 25 + 1, s1, 3) != (p + 25 + 1 + 3)
51       || memcmp (p + 25, "Q123U", 6))
52     abort ();
53 
54   if (mempcpy (mempcpy (p, "abcdEFG", 4), "efg", 4) != p + 8
55       || memcmp (p, "abcdefg", 8))
56     abort();
57 
58   /* Test at least one instance of the __builtin_ style.  We do this
59      to ensure that it works and that the prototype is correct.  */
60   if (__builtin_mempcpy (p, "ABCDE", 6) != p + 6 || memcmp (p, "ABCDE", 6))
61     abort ();
62 
63   /* If the result of mempcpy is ignored, gcc should use memcpy.
64      This should be optimized always, so disallow mempcpy calls.  */
65   mempcpy_disallowed = 1;
66   mempcpy (p + 5, s3, 1);
67   if (memcmp (p, "ABCDEFg", 8))
68     abort ();
69 
70   if (chk_calls)
71     abort ();
72   chk_calls = 0;
73 
74   mempcpy (p + 6, s1 + 1, l1);
75   if (memcmp (p, "ABCDEF2", 8))
76     abort ();
77 
78   /* The above mempcpy copies into an object with known size, but
79      unknown length and with result ignored, so it should be a
80      __memcpy_chk call.  */
81   if (chk_calls != 1)
82     abort ();
83 
84   mempcpy_disallowed = 0;
85 }
86 
87 long buf1[64];
88 char *buf2 = (char *) (buf1 + 32);
89 long buf5[20];
90 char buf7[20];
91 
92 void
93 __attribute__((noinline))
test2_sub(long * buf3,char * buf4,char * buf6,int n)94 test2_sub (long *buf3, char *buf4, char *buf6, int n)
95 {
96   int i = 0;
97 
98   /* All the mempcpy/__builtin_mempcpy/__builtin___mempcpy_chk
99      calls in this routine are either fixed length, or have
100      side-effects in __builtin_object_size arguments, or
101      dst doesn't point into a known object.  */
102   chk_calls = 0;
103 
104   /* These should probably be handled by store_by_pieces on most arches.  */
105   if (mempcpy (buf1, "ABCDEFGHI", 9) != (char *) buf1 + 9
106       || memcmp (buf1, "ABCDEFGHI\0", 11))
107     abort ();
108 
109   if (mempcpy (buf1, "abcdefghijklmnopq", 17) != (char *) buf1 + 17
110       || memcmp (buf1, "abcdefghijklmnopq\0", 19))
111     abort ();
112 
113   if (__builtin_mempcpy (buf3, "ABCDEF", 6) != (char *) buf1 + 6
114       || memcmp (buf1, "ABCDEFghijklmnopq\0", 19))
115     abort ();
116 
117   if (__builtin_mempcpy (buf3, "a", 1) != (char *) buf1 + 1
118       || memcmp (buf1, "aBCDEFghijklmnopq\0", 19))
119     abort ();
120 
121   if (mempcpy ((char *) buf3 + 2, "bcd" + ++i, 2) != (char *) buf1 + 4
122       || memcmp (buf1, "aBcdEFghijklmnopq\0", 19)
123       || i != 1)
124     abort ();
125 
126   /* These should probably be handled by move_by_pieces on most arches.  */
127   if (mempcpy ((char *) buf3 + 4, buf5, 6) != (char *) buf1 + 10
128       || memcmp (buf1, "aBcdRSTUVWklmnopq\0", 19))
129     abort ();
130 
131   if (__builtin_mempcpy ((char *) buf1 + ++i + 8, (char *) buf5 + 1, 1)
132       != (char *) buf1 + 11
133       || memcmp (buf1, "aBcdRSTUVWSlmnopq\0", 19)
134       || i != 2)
135     abort ();
136 
137   if (mempcpy ((char *) buf3 + 14, buf6, 2) != (char *) buf1 + 16
138       || memcmp (buf1, "aBcdRSTUVWSlmnrsq\0", 19))
139     abort ();
140 
141   if (mempcpy (buf3, buf5, 8) != (char *) buf1 + 8
142       || memcmp (buf1, "RSTUVWXYVWSlmnrsq\0", 19))
143     abort ();
144 
145   if (mempcpy (buf3, buf5, 17) != (char *) buf1 + 17
146       || memcmp (buf1, "RSTUVWXYZ01234567\0", 19))
147     abort ();
148 
149   __builtin_memcpy (buf3, "aBcdEFghijklmnopq\0", 19);
150 
151   /* These should be handled either by movmemendM or mempcpy
152      call.  */
153 
154   /* buf3 points to an unknown object, so __mempcpy_chk should not be done.  */
155   if (mempcpy ((char *) buf3 + 4, buf5, n + 6) != (char *) buf1 + 10
156       || memcmp (buf1, "aBcdRSTUVWklmnopq\0", 19))
157     abort ();
158 
159   /* This call has side-effects in dst, therefore no checking.  */
160   if (__builtin___mempcpy_chk ((char *) buf1 + ++i + 8, (char *) buf5 + 1,
161 			       n + 1, os ((char *) buf1 + ++i + 8))
162       != (char *) buf1 + 12
163       || memcmp (buf1, "aBcdRSTUVWkSmnopq\0", 19)
164       || i != 3)
165     abort ();
166 
167   if (mempcpy ((char *) buf3 + 14, buf6, n + 2) != (char *) buf1 + 16
168       || memcmp (buf1, "aBcdRSTUVWkSmnrsq\0", 19))
169     abort ();
170 
171   i = 1;
172 
173   /* These might be handled by store_by_pieces.  */
174   if (mempcpy (buf2, "ABCDEFGHI", 9) != buf2 + 9
175       || memcmp (buf2, "ABCDEFGHI\0", 11))
176     abort ();
177 
178   if (mempcpy (buf2, "abcdefghijklmnopq", 17) != buf2 + 17
179       || memcmp (buf2, "abcdefghijklmnopq\0", 19))
180     abort ();
181 
182   if (__builtin_mempcpy (buf4, "ABCDEF", 6) != buf2 + 6
183       || memcmp (buf2, "ABCDEFghijklmnopq\0", 19))
184     abort ();
185 
186   if (__builtin_mempcpy (buf4, "a", 1) != buf2 + 1
187       || memcmp (buf2, "aBCDEFghijklmnopq\0", 19))
188     abort ();
189 
190   if (mempcpy (buf4 + 2, "bcd" + i++, 2) != buf2 + 4
191       || memcmp (buf2, "aBcdEFghijklmnopq\0", 19)
192       || i != 2)
193     abort ();
194 
195   /* These might be handled by move_by_pieces.  */
196   if (mempcpy (buf4 + 4, buf7, 6) != buf2 + 10
197       || memcmp (buf2, "aBcdRSTUVWklmnopq\0", 19))
198     abort ();
199 
200   /* Side effect.  */
201   if (__builtin___mempcpy_chk (buf2 + i++ + 8, buf7 + 1, 1,
202 			       os (buf2 + i++ + 8))
203       != buf2 + 11
204       || memcmp (buf2, "aBcdRSTUVWSlmnopq\0", 19)
205       || i != 3)
206     abort ();
207 
208   if (mempcpy (buf4 + 14, buf6, 2) != buf2 + 16
209       || memcmp (buf2, "aBcdRSTUVWSlmnrsq\0", 19))
210     abort ();
211 
212   __builtin_memcpy (buf4, "aBcdEFghijklmnopq\0", 19);
213 
214   /* These should be handled either by movmemendM or mempcpy
215      call.  */
216   if (mempcpy (buf4 + 4, buf7, n + 6) != buf2 + 10
217       || memcmp (buf2, "aBcdRSTUVWklmnopq\0", 19))
218     abort ();
219 
220   /* Side effect.  */
221   if (__builtin___mempcpy_chk (buf2 + i++ + 8, buf7 + 1,
222 			       n + 1, os (buf2 + i++ + 8))
223       != buf2 + 12
224       || memcmp (buf2, "aBcdRSTUVWkSmnopq\0", 19)
225       || i != 4)
226     abort ();
227 
228   if (mempcpy (buf4 + 14, buf6, n + 2) != buf2 + 16
229       || memcmp (buf2, "aBcdRSTUVWkSmnrsq\0", 19))
230     abort ();
231 
232   if (chk_calls)
233     abort ();
234 }
235 
236 void
237 __attribute__((noinline))
test2(void)238 test2 (void)
239 {
240   long *x;
241   char *y;
242   int z;
243   __builtin_memcpy (buf5, "RSTUVWXYZ0123456789", 20);
244   __builtin_memcpy (buf7, "RSTUVWXYZ0123456789", 20);
245  __asm ("" : "=r" (x) : "0" (buf1));
246  __asm ("" : "=r" (y) : "0" (buf2));
247  __asm ("" : "=r" (z) : "0" (0));
248   test2_sub (x, y, "rstuvwxyz", z);
249 }
250 
251 volatile void *vx;
252 
253 /* Test whether compile time checking is done where it should
254    and so is runtime object size checking.  */
255 void
256 __attribute__((noinline))
test3(void)257 test3 (void)
258 {
259   struct A { char buf1[10]; char buf2[10]; } a;
260   char *r = l1 == 1 ? &a.buf1[5] : &a.buf2[4];
261   char buf3[20];
262   int i;
263   size_t l;
264 
265   /* The following calls should do runtime checking
266      - length is not known, but destination is.  */
267   chk_calls = 0;
268   vx = mempcpy (a.buf1 + 2, s3, l1);
269   vx = mempcpy (r, s3, l1 + 1);
270   r = l1 == 1 ? __builtin_alloca (4) : &a.buf2[7];
271   vx = mempcpy (r, s2, l1 + 2);
272   vx = mempcpy (r + 2, s3, l1);
273   r = buf3;
274   for (i = 0; i < 4; ++i)
275     {
276       if (i == l1 - 1)
277 	r = &a.buf1[1];
278       else if (i == l1)
279 	r = &a.buf2[7];
280       else if (i == l1 + 1)
281 	r = &buf3[5];
282       else if (i == l1 + 2)
283 	r = &a.buf1[9];
284     }
285   vx = mempcpy (r, s2, l1);
286   if (chk_calls != 5)
287     abort ();
288 
289   /* Following have known destination and known length,
290      so if optimizing certainly shouldn't result in the checking
291      variants.  */
292   chk_calls = 0;
293   vx = mempcpy (a.buf1 + 2, s3, 1);
294   vx = mempcpy (r, s3, 2);
295   r = l1 == 1 ? __builtin_alloca (4) : &a.buf2[7];
296   vx = mempcpy (r, s2, 3);
297   r = buf3;
298   l = 4;
299   for (i = 0; i < 4; ++i)
300     {
301       if (i == l1 - 1)
302 	r = &a.buf1[1], l = 2;
303       else if (i == l1)
304 	r = &a.buf2[7], l = 3;
305       else if (i == l1 + 1)
306 	r = &buf3[5], l = 4;
307       else if (i == l1 + 2)
308 	r = &a.buf1[9], l = 1;
309     }
310   vx = mempcpy (r, s2, 1);
311   /* Here, l is known to be at most 4 and __builtin_object_size (&buf3[16], 0)
312      is 4, so this doesn't need runtime checking.  */
313   vx = mempcpy (&buf3[16], s2, l);
314   if (chk_calls)
315     abort ();
316   chk_calls = 0;
317 }
318 
319 /* Test whether runtime and/or compile time checking catches
320    buffer overflows.  */
321 void
322 __attribute__((noinline))
test4(void)323 test4 (void)
324 {
325   struct A { char buf1[10]; char buf2[10]; } a;
326   char buf3[20];
327 
328   chk_fail_allowed = 1;
329   /* Runtime checks.  */
330   if (__builtin_setjmp (chk_fail_buf) == 0)
331     {
332       vx = mempcpy (&a.buf2[9], s2, l1 + 1);
333       abort ();
334     }
335   if (__builtin_setjmp (chk_fail_buf) == 0)
336     {
337       vx = mempcpy (&a.buf2[7], s3, strlen (s3) + 1);
338       abort ();
339     }
340   /* This should be detectable at compile time already.  */
341   if (__builtin_setjmp (chk_fail_buf) == 0)
342     {
343       vx = mempcpy (&buf3[19], "ab", 2);
344       abort ();
345     }
346   chk_fail_allowed = 0;
347 }
348 
349 #ifndef MAX_OFFSET
350 #define MAX_OFFSET (sizeof (long long))
351 #endif
352 
353 #ifndef MAX_COPY
354 #define MAX_COPY (10 * sizeof (long long))
355 #endif
356 
357 #ifndef MAX_EXTRA
358 #define MAX_EXTRA (sizeof (long long))
359 #endif
360 
361 #define MAX_LENGTH (MAX_OFFSET + MAX_COPY + MAX_EXTRA)
362 
363 /* Use a sequence length that is not divisible by two, to make it more
364    likely to detect when words are mixed up.  */
365 #define SEQUENCE_LENGTH 31
366 
367 static union {
368   char buf[MAX_LENGTH];
369   long long align_int;
370   long double align_fp;
371 } u1, u2;
372 
373 void
374 __attribute__((noinline))
test5(void)375 test5 (void)
376 {
377   int off1, off2, len, i;
378   char *p, *q, c;
379 
380   for (off1 = 0; off1 < MAX_OFFSET; off1++)
381     for (off2 = 0; off2 < MAX_OFFSET; off2++)
382       for (len = 1; len < MAX_COPY; len++)
383 	{
384 	  for (i = 0, c = 'A'; i < MAX_LENGTH; i++, c++)
385 	    {
386 	      u1.buf[i] = 'a';
387 	      if (c >= 'A' + SEQUENCE_LENGTH)
388 		c = 'A';
389 	      u2.buf[i] = c;
390 	    }
391 
392 	  p = mempcpy (u1.buf + off1, u2.buf + off2, len);
393 	  if (p != u1.buf + off1 + len)
394 	    abort ();
395 
396 	  q = u1.buf;
397 	  for (i = 0; i < off1; i++, q++)
398 	    if (*q != 'a')
399 	      abort ();
400 
401 	  for (i = 0, c = 'A' + off2; i < len; i++, q++, c++)
402 	    {
403 	      if (c >= 'A' + SEQUENCE_LENGTH)
404 		c = 'A';
405 	      if (*q != c)
406 		abort ();
407 	    }
408 
409 	  for (i = 0; i < MAX_EXTRA; i++, q++)
410 	    if (*q != 'a')
411 	      abort ();
412 	}
413 }
414 
415 #define TESTSIZE 80
416 
417 char srcb[TESTSIZE] __attribute__ ((aligned));
418 char dstb[TESTSIZE] __attribute__ ((aligned));
419 
420 void
421 __attribute__((noinline))
check(char * test,char * match,int n)422 check (char *test, char *match, int n)
423 {
424   if (memcmp (test, match, n))
425     abort ();
426 }
427 
428 #define TN(n) \
429 { memset (dstb, 0, n); vx = mempcpy (dstb, srcb, n); check (dstb, srcb, n); }
430 #define T(n) \
431 TN (n) \
432 TN ((n) + 1) \
433 TN ((n) + 2) \
434 TN ((n) + 3)
435 
436 void
437 __attribute__((noinline))
test6(void)438 test6 (void)
439 {
440   int i;
441 
442   chk_calls = 0;
443 
444   for (i = 0; i < sizeof (srcb); ++i)
445       srcb[i] = 'a' + i % 26;
446 
447   T (0);
448   T (4);
449   T (8);
450   T (12);
451   T (16);
452   T (20);
453   T (24);
454   T (28);
455   T (32);
456   T (36);
457   T (40);
458   T (44);
459   T (48);
460   T (52);
461   T (56);
462   T (60);
463   T (64);
464   T (68);
465   T (72);
466   T (76);
467 
468   /* All mempcpy calls in this routine have constant arguments.  */
469   if (chk_calls)
470     abort ();
471 }
472 
473 void
main_test(void)474 main_test (void)
475 {
476 #ifndef __OPTIMIZE__
477   /* Object size checking is only intended for -O[s123].  */
478   return;
479 #endif
480   __asm ("" : "=r" (l1) : "0" (l1));
481   test1 ();
482   test2 ();
483   test3 ();
484   test4 ();
485   test5 ();
486   test6 ();
487 }
488