1 /* Copyright (c) 2006, 2019, Oracle and/or its affiliates. All rights reserved.
2
3 This program is free software; you can redistribute it and/or modify
4 it under the terms of the GNU General Public License, version 2.0,
5 as published by the Free Software Foundation.
6
7 This program is also distributed with certain software (including
8 but not limited to OpenSSL) that is licensed under separate terms,
9 as designated in a particular file or component or in included license
10 documentation. The authors of MySQL hereby grant you an additional
11 permission to link the program and your derivative works with the
12 separately licensed software that they have included with MySQL.
13
14 This program is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License, version 2.0, for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with this program; if not, write to the Free Software
21 Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
22 */
23
24 #include "unittest/mytap/tap.h"
25
26 #include "my_config.h"
27
28 #include <signal.h>
29 #include <stdarg.h>
30 #include <stdio.h>
31 #include <stdlib.h>
32 #include <string.h>
33
34 #include "my_stacktrace.h"
35 #ifdef HAVE_UNISTD_H
36 #include <unistd.h>
37 #endif
38
39 /*
40 Visual Studio 2003 does not know vsnprintf but knows _vsnprintf.
41 We don't put this #define elsewhere because we prefer my_vsnprintf
42 everywhere instead, except when linking with libmysys is not
43 desirable - the case here.
44 */
45 #if defined(_MSC_VER) && (_MSC_VER == 1310)
46 #define vsnprintf _vsnprintf
47 #endif
48
49 static void handle_core_signal(int signo) MY_ATTRIBUTE((noreturn));
50 static void vemit_tap(int pass, char const *fmt, va_list ap)
51 MY_ATTRIBUTE((format(printf, 2, 0)));
52
53 /**
54 @defgroup MyTAP_Internal MyTAP Internals
55
56 Internal functions and data structures for the MyTAP implementation.
57 */
58
59 /**
60 Test data structure.
61
62 Data structure containing all information about the test suite.
63
64 @ingroup MyTAP_Internal
65 */
66 static TEST_DATA g_test = {NO_PLAN, 0, 0, ""};
67
68 /**
69 Output stream for test report message.
70
71 The macro is just a temporary solution.
72
73 @ingroup MyTAP_Internal
74 */
75 #define tapout stdout
76
77 /**
78 Emit the beginning of a test line, that is: "(not) ok", test number,
79 and description.
80
81 To emit the directive, use the emit_dir() function
82
83 @ingroup MyTAP_Internal
84
85 @see emit_dir
86
87 @param pass 'true' if test passed, 'false' otherwise
88 @param fmt Description of test in printf() format.
89 @param ap Vararg list for the description string above.
90 */
vemit_tap(int pass,char const * fmt,va_list ap)91 static void vemit_tap(int pass, char const *fmt, va_list ap) {
92 fprintf(tapout, "%sok %d%s", pass ? "" : "not ", ++g_test.last,
93 (fmt && *fmt) ? " - " : "");
94 if (fmt && *fmt) vfprintf(tapout, fmt, ap);
95 fflush(tapout);
96 }
97
vemit_tap1(int pass)98 static void vemit_tap1(int pass) {
99 fprintf(tapout, "%sok %d%s", pass ? "" : "not ", ++g_test.last, "");
100 fflush(tapout);
101 }
102
103 /**
104 Emit a TAP directive.
105
106 TAP directives are comments after that have the form:
107
108 @code
109 ok 1 # skip reason for skipping
110 not ok 2 # todo some text explaining what remains
111 @endcode
112
113 @ingroup MyTAP_Internal
114
115 @param dir Directive as a string
116 @param why Explanation string
117 */
emit_dir(const char * dir,const char * why)118 static void emit_dir(const char *dir, const char *why) {
119 fprintf(tapout, " # %s %s", dir, why);
120 fflush(tapout);
121 }
122
123 /**
124 Emit a newline to the TAP output stream.
125
126 @ingroup MyTAP_Internal
127 */
emit_endl()128 static void emit_endl() {
129 fprintf(tapout, "\n");
130 fflush(tapout);
131 }
132
handle_core_signal(int signo)133 static void handle_core_signal(int signo) {
134 /* BAIL_OUT("Signal %d thrown", signo); */
135 #ifdef HAVE_STACKTRACE
136 fprintf(stderr, "Signal %d thrown, attempting backtrace.\n", signo);
137 my_print_stacktrace(nullptr, 0);
138 #endif
139 signal(signo, SIG_DFL);
140 raise(signo);
141 _exit(EXIT_FAILURE);
142 }
143
BAIL_OUT(char const * fmt,...)144 void BAIL_OUT(char const *fmt, ...) {
145 va_list ap;
146 va_start(ap, fmt);
147 fprintf(tapout, "Bail out! ");
148 vfprintf(tapout, fmt, ap);
149 emit_endl();
150 va_end(ap);
151 exit(255);
152 }
153
diag(char const * fmt,...)154 void diag(char const *fmt, ...) {
155 va_list ap;
156 va_start(ap, fmt);
157 fprintf(tapout, "# ");
158 vfprintf(tapout, fmt, ap);
159 emit_endl();
160 va_end(ap);
161 }
162
163 typedef struct signal_entry {
164 int signo;
165 void (*handler)(int);
166 } signal_entry;
167
168 static signal_entry install_signal[] = {
169 #ifdef _WIN32
170 {SIGTERM, handle_core_signal},
171 #else
172 {SIGQUIT, handle_core_signal},
173 #endif
174 {SIGILL, handle_core_signal},
175 {SIGABRT, handle_core_signal},
176 {SIGFPE, handle_core_signal},
177 {SIGSEGV, handle_core_signal}
178 #ifdef SIGBUS
179 ,
180 {SIGBUS, handle_core_signal}
181 #endif
182 #ifdef SIGXCPU
183 ,
184 {SIGXCPU, handle_core_signal}
185 #endif
186 #ifdef SIGXCPU
187 ,
188 {SIGXFSZ, handle_core_signal}
189 #endif
190 #ifdef SIGXCPU
191 ,
192 {SIGSYS, handle_core_signal}
193 #endif
194 #ifdef SIGXCPU
195 ,
196 {SIGTRAP, handle_core_signal}
197 #endif
198 };
199
200 int skip_big_tests = 1;
201
plan(int const count)202 void plan(int const count) {
203 char *config = getenv("MYTAP_CONFIG");
204 size_t i;
205
206 if (config) skip_big_tests = strcmp(config, "big");
207
208 /*
209 Install signal handler
210 */
211
212 for (i = 0; i < sizeof(install_signal) / sizeof(*install_signal); ++i)
213 signal(install_signal[i].signo, install_signal[i].handler);
214
215 g_test.plan = count;
216 switch (count) {
217 case NO_PLAN:
218 break;
219 default:
220 if (count > 0) {
221 fprintf(tapout, "1..%d\n", count);
222 fflush(tapout);
223 }
224 break;
225 }
226 }
227
skip_all(char const * reason,...)228 void skip_all(char const *reason, ...) {
229 va_list ap;
230 va_start(ap, reason);
231 fprintf(tapout, "1..0 # skip ");
232 vfprintf(tapout, reason, ap);
233 fflush(tapout);
234 va_end(ap);
235 exit(0);
236 }
237
ok(int const pass,char const * fmt,...)238 void ok(int const pass, char const *fmt, ...) {
239 va_list ap;
240 va_start(ap, fmt);
241
242 if (!pass && *g_test.todo == '\0') ++g_test.failed;
243
244 vemit_tap(pass, fmt, ap);
245 va_end(ap);
246 if (*g_test.todo != '\0') emit_dir("todo", g_test.todo);
247 emit_endl();
248 }
249
ok1(int const pass)250 void ok1(int const pass) {
251 va_list ap;
252
253 memset(&ap, 0, sizeof(ap));
254
255 if (!pass && *g_test.todo == '\0') ++g_test.failed;
256
257 vemit_tap1(pass);
258
259 if (*g_test.todo != '\0') emit_dir("todo", g_test.todo);
260
261 emit_endl();
262 }
263
skip(int how_many,char const * fmt,...)264 void skip(int how_many, char const *fmt, ...) {
265 char reason[80];
266 if (fmt && *fmt) {
267 va_list ap;
268 va_start(ap, fmt);
269 vsnprintf(reason, sizeof(reason), fmt, ap);
270 va_end(ap);
271 } else
272 reason[0] = '\0';
273
274 while (how_many-- > 0) {
275 va_list ap;
276 memset((char *)&ap, 0, sizeof(ap)); /* Keep compiler happy */
277 vemit_tap1(1);
278 emit_dir("skip", reason);
279 emit_endl();
280 }
281 }
282
todo_start(char const * message,...)283 void todo_start(char const *message, ...) {
284 va_list ap;
285 va_start(ap, message);
286 vsnprintf(g_test.todo, sizeof(g_test.todo), message, ap);
287 va_end(ap);
288 }
289
todo_end()290 void todo_end() { *g_test.todo = '\0'; }
291
exit_status()292 int exit_status() {
293 /*
294 If there were no plan, we write one last instead.
295 */
296 if (g_test.plan == NO_PLAN) plan(g_test.last);
297
298 if (g_test.plan != g_test.last) {
299 diag("%d tests planned but%s %d executed", g_test.plan,
300 (g_test.plan > g_test.last ? " only" : ""), g_test.last);
301 return EXIT_FAILURE;
302 }
303
304 if (g_test.failed > 0) {
305 diag("Failed %d tests!", g_test.failed);
306 return EXIT_FAILURE;
307 }
308
309 return EXIT_SUCCESS;
310 }
311
312 /**
313 @mainpage Testing C and C++ using MyTAP
314
315 @section IntroSec Introduction
316
317 Unit tests are used to test individual components of a system. In
318 contrast, functional tests usually test the entire system. The
319 rationale is that each component should be correct if the system is
320 to be correct. Unit tests are usually small pieces of code that
321 tests an individual function, class, a module, or other unit of the
322 code.
323
324 Observe that a correctly functioning system can be built from
325 "faulty" components. The problem with this approach is that as the
326 system evolves, the bugs surface in unexpected ways, making
327 maintenance harder.
328
329 The advantages of using unit tests to test components of the system
330 are several:
331
332 - The unit tests can make a more thorough testing than the
333 functional tests by testing correctness even for pathological use
334 (which shouldn't be present in the system). This increases the
335 overall robustness of the system and makes maintenance easier.
336
337 - It is easier and faster to find problems with a malfunctioning
338 component than to find problems in a malfunctioning system. This
339 shortens the compile-run-edit cycle and therefore improves the
340 overall performance of development.
341
342 - The component has to support at least two uses: in the system and
343 in a unit test. This leads to more generic and stable interfaces
344 and in addition promotes the development of reusable components.
345
346 For example, the following are typical functional tests:
347 - Does transactions work according to specifications?
348 - Can we connect a client to the server and execute statements?
349
350 In contrast, the following are typical unit tests:
351
352 - Can the 'String' class handle a specified list of character sets?
353 - Does all operations for 'my_bitmap' produce the correct result?
354 - Does all the NIST test vectors for the AES implementation encrypt
355 correctly?
356
357
358 @section UnitTest Writing unit tests
359
360 The purpose of writing unit tests is to use them to drive component
361 development towards a solution that passes the tests. This means that the
362 unit tests has to be as complete as possible, testing at least:
363
364 - Normal input
365 - Borderline cases
366 - Faulty input
367 - Error handling
368 - Bad environment
369
370 @subsection NormalSubSec Normal input
371
372 This is to test that the component have the expected behaviour.
373 This is just plain simple: test that it works. For example, test
374 that you can unpack what you packed, adding gives the sum, pincing
375 the duck makes it quack.
376
377 This is what everybody does when they write tests.
378
379
380 @subsection BorderlineTests Borderline cases
381
382 If you have a size anywhere for your component, does it work for
383 size 1? Size 0? Sizes close to <code>UINT_MAX</code>?
384
385 It might not be sensible to have a size 0, so in this case it is
386 not a borderline case, but rather a faulty input (see @ref
387 FaultyInputTests).
388
389
390 @subsection FaultyInputTests Faulty input
391
392 Does your bitmap handle 0 bits size? Well, it might not be designed
393 for it, but is should <em>not</em> crash the application, but
394 rather produce an error. This is called defensive programming.
395
396 Unfortunately, adding checks for values that should just not be
397 entered at all is not always practical: the checks cost cycles and
398 might cost more than it's worth. For example, some functions are
399 designed so that you may not give it a null pointer. In those
400 cases it's not sensible to pass it <code>NULL</code> just to see it
401 crash.
402
403 Since every experienced programmer add an <code>assert()</code> to
404 ensure that you get a proper failure for the debug builds when a
405 null pointer passed (you add asserts too, right?), you will in this
406 case instead have a controlled (early) crash in the debug build.
407
408
409 @subsection ErrorHandlingTests Error handling
410
411 This is testing that the errors your component is designed to give
412 actually are produced. For example, testing that trying to open a
413 non-existing file produces a sensible error code.
414
415
416 @subsection BadEnvironmentTests Environment
417
418 Sometimes, modules has to behave well even when the environment
419 fails to work correctly. Typical examples are when the computer is
420 out of dynamic memory or when the disk is full. You can emulate
421 this by replacing, e.g., <code>malloc()</code> with your own
422 version that will work for a while, but then fail. Some things are
423 worth to keep in mind here:
424
425 - Make sure to make the function fail deterministically, so that
426 you really can repeat the test.
427
428 - Make sure that it doesn't just fail immediately. The unit might
429 have checks for the first case, but might actually fail some time
430 in the near future.
431
432
433 @section UnitTest How to structure a unit test
434
435 In this section we will give some advice on how to structure the
436 unit tests to make the development run smoothly. The basic
437 structure of a test is:
438
439 - Plan
440 - Test
441 - Report
442
443
444 @subsection TestPlanning Plan the test
445
446 Planning the test means telling how many tests there are. In the
447 event that one of the tests causes a crash, it is then possible to
448 see that there are fewer tests than expected, and print a proper
449 error message.
450
451 To plan a test, use the @c plan() function in the following manner:
452
453 @code
454 int main(int argc, char *argv[])
455 {
456 plan(5);
457 .
458 .
459 .
460 }
461 @endcode
462
463 If you don't call the @c plan() function, the number of tests
464 executed will be printed at the end. This is intended to be used
465 while developing the unit and you are constantly adding tests. It
466 is not indented to be used after the unit has been released.
467
468
469 @subsection TestRunning Execute the test
470
471 To report the status of a test, the @c ok() function is used in the
472 following manner:
473
474 @code
475 int main(int argc, char *argv[])
476 {
477 plan(5);
478 ok(ducks == paddling_ducks,
479 "%d ducks did not paddle", ducks - paddling_ducks);
480 .
481 .
482 .
483 }
484 @endcode
485
486 This will print a test result line on the standard output in TAP
487 format, which allows TAP handling frameworks (like Test::Harness)
488 to parse the status of the test.
489
490 @subsection TestReport Report the result of the test
491
492 At the end, a complete test report should be written, with some
493 statistics. If the test returns EXIT_SUCCESS, all tests were
494 successful, otherwise at least one test failed.
495
496 To get a TAP complient output and exit status, report the exit
497 status in the following manner:
498
499 @code
500 int main(int argc, char *argv[])
501 {
502 plan(5);
503 ok(ducks == paddling_ducks,
504 "%d ducks did not paddle", ducks - paddling_ducks);
505 .
506 .
507 .
508 return exit_status();
509 }
510 @endcode
511
512 @section DontDoThis Ways to not do unit testing
513
514 In this section, we'll go through some quite common ways to write
515 tests that are <em>not</em> a good idea.
516
517 @subsection BreadthFirstTests Doing breadth-first testing
518
519 If you're writing a library with several functions, don't test all
520 functions using size 1, then all functions using size 2, etc. If a
521 test for size 42 fails, you have no easy way of tracking down why
522 it failed.
523
524 It is better to concentrate on getting one function to work at a
525 time, which means that you test each function for all sizes that
526 you think is reasonable. Then you continue with the next function,
527 doing the same. This is usually also the way that a library is
528 developed (one function at a time) so stick to testing that is
529 appropriate for now the unit is developed.
530
531 @subsection JustToBeSafeTest Writing unnecessarily large tests
532
533 Don't write tests that use parameters in the range 1-1024 unless
534 you have a very good reason to belive that the component will
535 succeed for 562 but fail for 564 (the numbers picked are just
536 examples).
537
538 It is very common to write extensive tests "just to be safe."
539 Having a test suite with a lot of values might give you a warm
540 fuzzy feeling, but it doesn't really help you find the bugs. Good
541 tests fail; seriously, if you write a test that you expect to
542 succeed, you don't need to write it. If you think that it
543 <em>might</em> fail, <em>then</em> you should write it.
544
545 Don't take this as an excuse to avoid writing any tests at all
546 "since I make no mistakes" (when it comes to this, there are two
547 kinds of people: those who admit they make mistakes, and those who
548 don't); rather, this means that there is no reason to test that
549 using a buffer with size 100 works when you have a test for buffer
550 size 96.
551
552 The drawback is that the test suite takes longer to run, for little
553 or no benefit. It is acceptable to do a exhaustive test if it
554 doesn't take too long to run and it is quite common to do an
555 exhaustive test of a function for a small set of values.
556 Use your judgment to decide what is excessive: your milage may
557 vary.
558 */
559
560 /**
561 @example simple.t.c
562
563 This is an simple example of how to write a test using the
564 library. The output of this program is:
565
566 @code
567 1..1
568 # Testing basic functions
569 ok 1 - Testing gcs()
570 @endcode
571
572 The basic structure is: plan the number of test points using the
573 plan() function, perform the test and write out the result of each
574 test point using the ok() function, print out a diagnostics message
575 using diag(), and report the result of the test by calling the
576 exit_status() function. Observe that this test does excessive
577 testing (see @ref JustToBeSafeTest), but the test point doesn't
578 take very long time.
579 */
580
581 /**
582 @example todo.t.c
583
584 This example demonstrates how to use the <code>todo_start()</code>
585 and <code>todo_end()</code> function to mark a sequence of tests to
586 be done. Observe that the tests are assumed to fail: if any test
587 succeeds, it is considered a "bonus".
588 */
589
590 /**
591 @example skip.t.c
592
593 This is an example of how the <code>SKIP_BLOCK_IF</code> can be
594 used to skip a predetermined number of tests. Observe that the
595 macro actually skips the following statement, but it's not sensible
596 to use anything than a block.
597 */
598
599 /**
600 @example skip_all.t.c
601
602 Sometimes, you skip an entire test because it's testing a feature
603 that doesn't exist on the system that you're testing. To skip an
604 entire test, use the <code>skip_all()</code> function according to
605 this example.
606 */
607