1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/test/launcher/test_results_tracker.h"
6 
7 #include <stddef.h>
8 
9 #include <memory>
10 #include <utility>
11 
12 #include "base/base64.h"
13 #include "base/command_line.h"
14 #include "base/files/file.h"
15 #include "base/files/file_path.h"
16 #include "base/files/file_util.h"
17 #include "base/format_macros.h"
18 #include "base/json/json_writer.h"
19 #include "base/json/string_escape.h"
20 #include "base/logging.h"
21 #include "base/strings/string_util.h"
22 #include "base/strings/stringprintf.h"
23 #include "base/test/gtest_util.h"
24 #include "base/test/launcher/test_launcher.h"
25 #include "base/test/test_switches.h"
26 #include "base/time/time.h"
27 #include "base/values.h"
28 
29 namespace base {
30 
31 namespace {
32 
33 // The default output file for XML output.
34 const FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL(
35     "test_detail.xml");
36 
37 // Converts the given epoch time in milliseconds to a date string in the ISO
38 // 8601 format, without the timezone information.
39 // TODO(xyzzyz): Find a good place in Chromium to put it and refactor all uses
40 // to point to it.
FormatTimeAsIso8601(Time time)41 std::string FormatTimeAsIso8601(Time time) {
42   Time::Exploded exploded;
43   time.UTCExplode(&exploded);
44   return StringPrintf("%04d-%02d-%02dT%02d:%02d:%02d",
45                       exploded.year,
46                       exploded.month,
47                       exploded.day_of_month,
48                       exploded.hour,
49                       exploded.minute,
50                       exploded.second);
51 }
52 
53 struct TestSuiteResultsAggregator {
TestSuiteResultsAggregatorbase::__anond401828f0111::TestSuiteResultsAggregator54   TestSuiteResultsAggregator()
55       : tests(0), failures(0), disabled(0), errors(0) {}
56 
Addbase::__anond401828f0111::TestSuiteResultsAggregator57   void Add(const TestResult& result) {
58     tests++;
59     elapsed_time += result.elapsed_time;
60 
61     switch (result.status) {
62       case TestResult::TEST_SUCCESS:
63         break;
64       case TestResult::TEST_FAILURE:
65         failures++;
66         break;
67       case TestResult::TEST_EXCESSIVE_OUTPUT:
68       case TestResult::TEST_FAILURE_ON_EXIT:
69       case TestResult::TEST_TIMEOUT:
70       case TestResult::TEST_CRASH:
71       case TestResult::TEST_UNKNOWN:
72       case TestResult::TEST_NOT_RUN:
73         errors++;
74         break;
75       case TestResult::TEST_SKIPPED:
76         disabled++;
77         break;
78     }
79   }
80 
81   int tests;
82   int failures;
83   int disabled;
84   int errors;
85 
86   TimeDelta elapsed_time;
87 };
88 
89 }  // namespace
90 
TestResultsTracker()91 TestResultsTracker::TestResultsTracker() : iteration_(-1), out_(nullptr) {}
92 
~TestResultsTracker()93 TestResultsTracker::~TestResultsTracker() {
94   CHECK(thread_checker_.CalledOnValidThread());
95 
96   if (!out_)
97     return;
98 
99   CHECK_GE(iteration_, 0);
100 
101   // Maps test case names to test results.
102   typedef std::map<std::string, std::vector<TestResult> > TestCaseMap;
103   TestCaseMap test_case_map;
104 
105   TestSuiteResultsAggregator all_tests_aggregator;
106   for (const PerIterationData::ResultsMap::value_type& i
107            : per_iteration_data_[iteration_].results) {
108     // Use the last test result as the final one.
109     TestResult result = i.second.test_results.back();
110     test_case_map[result.GetTestCaseName()].push_back(result);
111     all_tests_aggregator.Add(result);
112   }
113 
114   fprintf(out_, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
115   fprintf(out_,
116           "<testsuites name=\"AllTests\" tests=\"%d\" failures=\"%d\""
117           " disabled=\"%d\" errors=\"%d\" time=\"%.3f\" timestamp=\"%s\">\n",
118           all_tests_aggregator.tests, all_tests_aggregator.failures,
119           all_tests_aggregator.disabled, all_tests_aggregator.errors,
120           all_tests_aggregator.elapsed_time.InSecondsF(),
121           FormatTimeAsIso8601(Time::Now()).c_str());
122 
123   for (const TestCaseMap::value_type& i : test_case_map) {
124     const std::string testsuite_name = i.first;
125     const std::vector<TestResult>& results = i.second;
126 
127     TestSuiteResultsAggregator aggregator;
128     for (const TestResult& result : results) {
129       aggregator.Add(result);
130     }
131     fprintf(out_,
132             "  <testsuite name=\"%s\" tests=\"%d\" "
133             "failures=\"%d\" disabled=\"%d\" errors=\"%d\" time=\"%.3f\" "
134             "timestamp=\"%s\">\n",
135             testsuite_name.c_str(), aggregator.tests, aggregator.failures,
136             aggregator.disabled, aggregator.errors,
137             aggregator.elapsed_time.InSecondsF(),
138             FormatTimeAsIso8601(Time::Now()).c_str());
139 
140     for (const TestResult& result : results) {
141       fprintf(out_, "    <testcase name=\"%s\" status=\"run\" time=\"%.3f\""
142               " classname=\"%s\">\n",
143               result.GetTestName().c_str(),
144               result.elapsed_time.InSecondsF(),
145               result.GetTestCaseName().c_str());
146       if (result.status != TestResult::TEST_SUCCESS) {
147         // The actual failure message is not propagated up to here, as it's too
148         // much work to escape it properly, and in case of failure, almost
149         // always one needs to look into full log anyway.
150         fprintf(out_, "      <failure message=\"\" type=\"\"></failure>\n");
151       }
152       fprintf(out_, "    </testcase>\n");
153     }
154     fprintf(out_, "  </testsuite>\n");
155   }
156 
157   fprintf(out_, "</testsuites>\n");
158   fclose(out_);
159 }
160 
Init(const CommandLine & command_line)161 bool TestResultsTracker::Init(const CommandLine& command_line) {
162   CHECK(thread_checker_.CalledOnValidThread());
163 
164   // Prevent initializing twice.
165   if (out_) {
166     NOTREACHED();
167     return false;
168   }
169 
170   print_temp_leaks_ =
171       command_line.HasSwitch(switches::kTestLauncherPrintTempLeaks);
172 
173   if (!command_line.HasSwitch(kGTestOutputFlag))
174     return true;
175 
176   std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag);
177   size_t colon_pos = flag.find(':');
178   FilePath path;
179   if (colon_pos != std::string::npos) {
180     FilePath flag_path =
181         command_line.GetSwitchValuePath(kGTestOutputFlag);
182     FilePath::StringType path_string = flag_path.value();
183     path = FilePath(path_string.substr(colon_pos + 1));
184     // If the given path ends with '/', consider it is a directory.
185     // Note: This does NOT check that a directory (or file) actually exists
186     // (the behavior is same as what gtest does).
187     if (path.EndsWithSeparator()) {
188       FilePath executable = command_line.GetProgram().BaseName();
189       path = path.Append(executable.ReplaceExtension(
190                              FilePath::StringType(FILE_PATH_LITERAL("xml"))));
191     }
192   }
193   if (path.value().empty())
194     path = FilePath(kDefaultOutputFile);
195   FilePath dir_name = path.DirName();
196   if (!DirectoryExists(dir_name)) {
197     LOG(WARNING) << "The output directory does not exist. "
198                  << "Creating the directory: " << dir_name.value();
199     // Create the directory if necessary (because the gtest does the same).
200     if (!CreateDirectory(dir_name)) {
201       LOG(ERROR) << "Failed to created directory " << dir_name.value();
202       return false;
203     }
204   }
205   out_ = OpenFile(path, "w");
206   if (!out_) {
207     LOG(ERROR) << "Cannot open output file: "
208                << path.value() << ".";
209     return false;
210   }
211 
212   return true;
213 }
214 
OnTestIterationStarting()215 void TestResultsTracker::OnTestIterationStarting() {
216   CHECK(thread_checker_.CalledOnValidThread());
217 
218   // Start with a fresh state for new iteration.
219   iteration_++;
220   per_iteration_data_.push_back(PerIterationData());
221 }
222 
AddTest(const std::string & test_name)223 void TestResultsTracker::AddTest(const std::string& test_name) {
224   // Record disabled test names without DISABLED_ prefix so that they are easy
225   // to compare with regular test names, e.g. before or after disabling.
226   all_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
227 }
228 
AddDisabledTest(const std::string & test_name)229 void TestResultsTracker::AddDisabledTest(const std::string& test_name) {
230   // Record disabled test names without DISABLED_ prefix so that they are easy
231   // to compare with regular test names, e.g. before or after disabling.
232   disabled_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
233 }
234 
AddTestLocation(const std::string & test_name,const std::string & file,int line)235 void TestResultsTracker::AddTestLocation(const std::string& test_name,
236                                          const std::string& file,
237                                          int line) {
238   test_locations_.insert(std::make_pair(test_name, CodeLocation(file, line)));
239 }
240 
AddTestPlaceholder(const std::string & test_name)241 void TestResultsTracker::AddTestPlaceholder(const std::string& test_name) {
242   test_placeholders_.insert(test_name);
243 }
244 
AddTestResult(const TestResult & result)245 void TestResultsTracker::AddTestResult(const TestResult& result) {
246   CHECK(thread_checker_.CalledOnValidThread());
247   CHECK_GE(iteration_, 0);
248 
249   PerIterationData::ResultsMap& results_map =
250       per_iteration_data_[iteration_].results;
251   std::string test_name_without_disabled_prefix =
252       TestNameWithoutDisabledPrefix(result.full_name);
253   auto it = results_map.find(test_name_without_disabled_prefix);
254   // If the test name is not present in the results map, then we did not
255   // generate a placeholder for the test. We shouldn't record its result either.
256   // It's a test that the delegate ran, e.g. a PRE_XYZ test.
257   if (it == results_map.end())
258     return;
259 
260   // Record disabled test names without DISABLED_ prefix so that they are easy
261   // to compare with regular test names, e.g. before or after disabling.
262   AggregateTestResult& aggregate_test_result = it->second;
263 
264   // If the last test result is a placeholder, then get rid of it now that we
265   // have real results. It's possible for no placeholder to exist if the test is
266   // setup for another test, e.g. PRE_ComponentAppBackgroundPage is a test whose
267   // sole purpose is to prime the test ComponentAppBackgroundPage.
268   if (!aggregate_test_result.test_results.empty() &&
269       aggregate_test_result.test_results.back().status ==
270           TestResult::TEST_NOT_RUN) {
271     aggregate_test_result.test_results.pop_back();
272   }
273 
274   aggregate_test_result.test_results.push_back(result);
275 }
276 
AddLeakedItems(int count,const std::vector<std::string> & test_names)277 void TestResultsTracker::AddLeakedItems(
278     int count,
279     const std::vector<std::string>& test_names) {
280   DCHECK(count);
281   per_iteration_data_.back().leaked_temp_items.emplace_back(count, test_names);
282 }
283 
GeneratePlaceholderIteration()284 void TestResultsTracker::GeneratePlaceholderIteration() {
285   CHECK(thread_checker_.CalledOnValidThread());
286 
287   for (auto& full_test_name : test_placeholders_) {
288     std::string test_name = TestNameWithoutDisabledPrefix(full_test_name);
289 
290     TestResult test_result;
291     test_result.full_name = full_test_name;
292     test_result.status = TestResult::TEST_NOT_RUN;
293 
294     // There shouldn't be any existing results when we generate placeholder
295     // results.
296     CHECK(
297         per_iteration_data_[iteration_].results[test_name].test_results.empty())
298         << test_name;
299     per_iteration_data_[iteration_].results[test_name].test_results.push_back(
300         test_result);
301   }
302 }
303 
PrintSummaryOfCurrentIteration() const304 void TestResultsTracker::PrintSummaryOfCurrentIteration() const {
305   TestStatusMap tests_by_status(GetTestStatusMapForCurrentIteration());
306 
307   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
308              tests_by_status[TestResult::TEST_FAILURE].end(),
309              "failed");
310   PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
311              tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
312              "failed on exit");
313   PrintTests(tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].begin(),
314              tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].end(),
315              "produced excessive output");
316   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
317              tests_by_status[TestResult::TEST_TIMEOUT].end(),
318              "timed out");
319   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
320              tests_by_status[TestResult::TEST_CRASH].end(),
321              "crashed");
322   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
323              tests_by_status[TestResult::TEST_SKIPPED].end(),
324              "skipped");
325   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
326              tests_by_status[TestResult::TEST_UNKNOWN].end(),
327              "had unknown result");
328   PrintTests(tests_by_status[TestResult::TEST_NOT_RUN].begin(),
329              tests_by_status[TestResult::TEST_NOT_RUN].end(), "not run");
330 
331   if (print_temp_leaks_) {
332     for (const auto& leaking_tests :
333          per_iteration_data_.back().leaked_temp_items) {
334       PrintLeaks(leaking_tests.first, leaking_tests.second);
335     }
336   }
337 }
338 
PrintSummaryOfAllIterations() const339 void TestResultsTracker::PrintSummaryOfAllIterations() const {
340   CHECK(thread_checker_.CalledOnValidThread());
341 
342   TestStatusMap tests_by_status(GetTestStatusMapForAllIterations());
343 
344   fprintf(stdout, "Summary of all test iterations:\n");
345   fflush(stdout);
346 
347   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
348              tests_by_status[TestResult::TEST_FAILURE].end(),
349              "failed");
350   PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
351              tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
352              "failed on exit");
353   PrintTests(tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].begin(),
354              tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].end(),
355              "produced excessive output");
356   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
357              tests_by_status[TestResult::TEST_TIMEOUT].end(),
358              "timed out");
359   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
360              tests_by_status[TestResult::TEST_CRASH].end(),
361              "crashed");
362   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
363              tests_by_status[TestResult::TEST_SKIPPED].end(),
364              "skipped");
365   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
366              tests_by_status[TestResult::TEST_UNKNOWN].end(),
367              "had unknown result");
368   PrintTests(tests_by_status[TestResult::TEST_NOT_RUN].begin(),
369              tests_by_status[TestResult::TEST_NOT_RUN].end(), "not run");
370 
371   fprintf(stdout, "End of the summary.\n");
372   fflush(stdout);
373 }
374 
AddGlobalTag(const std::string & tag)375 void TestResultsTracker::AddGlobalTag(const std::string& tag) {
376   global_tags_.insert(tag);
377 }
378 
SaveSummaryAsJSON(const FilePath & path,const std::vector<std::string> & additional_tags) const379 bool TestResultsTracker::SaveSummaryAsJSON(
380     const FilePath& path,
381     const std::vector<std::string>& additional_tags) const {
382   std::unique_ptr<DictionaryValue> summary_root(new DictionaryValue);
383 
384   std::unique_ptr<ListValue> global_tags(new ListValue);
385   for (const auto& global_tag : global_tags_) {
386     global_tags->AppendString(global_tag);
387   }
388   for (const auto& tag : additional_tags) {
389     global_tags->AppendString(tag);
390   }
391   summary_root->Set("global_tags", std::move(global_tags));
392 
393   std::unique_ptr<ListValue> all_tests(new ListValue);
394   for (const auto& test : all_tests_) {
395     all_tests->AppendString(test);
396   }
397   summary_root->Set("all_tests", std::move(all_tests));
398 
399   std::unique_ptr<ListValue> disabled_tests(new ListValue);
400   for (const auto& disabled_test : disabled_tests_) {
401     disabled_tests->AppendString(disabled_test);
402   }
403   summary_root->Set("disabled_tests", std::move(disabled_tests));
404 
405   std::unique_ptr<ListValue> per_iteration_data(new ListValue);
406 
407   // Even if we haven't run any tests, we still have the dummy iteration.
408   int max_iteration = iteration_ < 0 ? 0 : iteration_;
409 
410   for (int i = 0; i <= max_iteration; i++) {
411     std::unique_ptr<DictionaryValue> current_iteration_data(
412         new DictionaryValue);
413 
414     for (const auto& j : per_iteration_data_[i].results) {
415       std::unique_ptr<ListValue> test_results(new ListValue);
416 
417       for (size_t k = 0; k < j.second.test_results.size(); k++) {
418         const TestResult& test_result = j.second.test_results[k];
419 
420         std::unique_ptr<DictionaryValue> test_result_value(new DictionaryValue);
421 
422         test_result_value->SetStringKey("status", test_result.StatusAsString());
423         test_result_value->SetInteger(
424             "elapsed_time_ms",
425             static_cast<int>(test_result.elapsed_time.InMilliseconds()));
426 
427         bool lossless_snippet = false;
428         if (IsStringUTF8(test_result.output_snippet)) {
429           test_result_value->SetString(
430               "output_snippet", test_result.output_snippet);
431           lossless_snippet = true;
432         } else {
433           test_result_value->SetString(
434               "output_snippet",
435               "<non-UTF-8 snippet, see output_snippet_base64>");
436         }
437 
438         // TODO(phajdan.jr): Fix typo in JSON key (losless -> lossless)
439         // making sure not to break any consumers of this data.
440         test_result_value->SetBoolKey("losless_snippet", lossless_snippet);
441 
442         // Also include the raw version (base64-encoded so that it can be safely
443         // JSON-serialized - there are no guarantees about character encoding
444         // of the snippet). This can be very useful piece of information when
445         // debugging a test failure related to character encoding.
446         std::string base64_output_snippet;
447         Base64Encode(test_result.output_snippet, &base64_output_snippet);
448         test_result_value->SetStringKey("output_snippet_base64",
449                                         base64_output_snippet);
450         if (!test_result.links.empty()) {
451           auto links = std::make_unique<DictionaryValue>();
452           for (const auto& link : test_result.links) {
453             auto link_info = std::make_unique<DictionaryValue>();
454             link_info->SetStringKey("content", link.second);
455             links->Set(link.first, std::move(link_info));
456           }
457           test_result_value->Set("links", std::move(links));
458         }
459         auto test_result_parts = std::make_unique<ListValue>();
460         for (const TestResultPart& result_part :
461              test_result.test_result_parts) {
462           std::unique_ptr<DictionaryValue> result_part_value(
463               new DictionaryValue);
464           result_part_value->SetStringKey("type", result_part.TypeAsString());
465           result_part_value->SetStringKey("file", result_part.file_name);
466           result_part_value->SetIntKey("line", result_part.line_number);
467 
468           bool lossless_summary = IsStringUTF8(result_part.summary);
469           if (lossless_summary) {
470             result_part_value->SetStringKey("summary", result_part.summary);
471           } else {
472             result_part_value->SetString(
473                 "summary", "<non-UTF-8 snippet, see summary_base64>");
474           }
475           result_part_value->SetBoolKey("lossless_summary", lossless_summary);
476 
477           std::string encoded_summary;
478           Base64Encode(result_part.summary, &encoded_summary);
479           result_part_value->SetStringKey("summary_base64", encoded_summary);
480 
481           bool lossless_message = IsStringUTF8(result_part.message);
482           if (lossless_message) {
483             result_part_value->SetStringKey("message", result_part.message);
484           } else {
485             result_part_value->SetString(
486                 "message", "<non-UTF-8 snippet, see message_base64>");
487           }
488           result_part_value->SetBoolKey("lossless_message", lossless_message);
489 
490           std::string encoded_message;
491           Base64Encode(result_part.message, &encoded_message);
492           result_part_value->SetStringKey("message_base64", encoded_message);
493 
494           test_result_parts->Append(std::move(result_part_value));
495         }
496         test_result_value->Set("result_parts", std::move(test_result_parts));
497 
498         test_results->Append(std::move(test_result_value));
499       }
500 
501       current_iteration_data->SetWithoutPathExpansion(j.first,
502                                                       std::move(test_results));
503     }
504     per_iteration_data->Append(std::move(current_iteration_data));
505   }
506   summary_root->Set("per_iteration_data", std::move(per_iteration_data));
507 
508   std::unique_ptr<DictionaryValue> test_locations(new DictionaryValue);
509   for (const auto& item : test_locations_) {
510     std::string test_name = item.first;
511     CodeLocation location = item.second;
512     std::unique_ptr<DictionaryValue> location_value(new DictionaryValue);
513     location_value->SetStringKey("file", location.file);
514     location_value->SetIntKey("line", location.line);
515     test_locations->SetWithoutPathExpansion(test_name,
516                                             std::move(location_value));
517   }
518   summary_root->Set("test_locations", std::move(test_locations));
519 
520   std::string json;
521   if (!JSONWriter::Write(*summary_root, &json))
522     return false;
523 
524   File output(path, File::FLAG_CREATE_ALWAYS | File::FLAG_WRITE);
525   if (!output.IsValid())
526     return false;
527 
528   int json_size = static_cast<int>(json.size());
529   if (output.WriteAtCurrentPos(json.data(), json_size) != json_size) {
530     return false;
531   }
532 
533   // File::Flush() will call fsync(). This is important on Fuchsia to ensure
534   // that the file is written to the disk - the system running under qemu will
535   // shutdown shortly after the test completes. On Fuchsia fsync() times out
536   // after 15 seconds. Apparently this may not be enough in some cases,
537   // particularly when running net_unittests on buildbots, see
538   // https://crbug.com/796318. Try calling fsync() more than once to workaround
539   // this issue.
540   //
541   // TODO(sergeyu): Figure out a better solution.
542   int flush_attempts_left = 4;
543   while (flush_attempts_left-- > 0) {
544     if (output.Flush())
545       return true;
546     LOG(ERROR) << "fsync() failed when saving test output summary. "
547                << ((flush_attempts_left > 0) ? "Retrying." : " Giving up.");
548   }
549 
550   return false;
551 }
552 
553 TestResultsTracker::TestStatusMap
GetTestStatusMapForCurrentIteration() const554     TestResultsTracker::GetTestStatusMapForCurrentIteration() const {
555   TestStatusMap tests_by_status;
556   GetTestStatusForIteration(iteration_, &tests_by_status);
557   return tests_by_status;
558 }
559 
560 TestResultsTracker::TestStatusMap
GetTestStatusMapForAllIterations() const561     TestResultsTracker::GetTestStatusMapForAllIterations() const {
562   TestStatusMap tests_by_status;
563   for (int i = 0; i <= iteration_; i++)
564     GetTestStatusForIteration(i, &tests_by_status);
565   return tests_by_status;
566 }
567 
GetTestStatusForIteration(int iteration,TestStatusMap * map) const568 void TestResultsTracker::GetTestStatusForIteration(
569     int iteration, TestStatusMap* map) const {
570   for (const auto& j : per_iteration_data_[iteration].results) {
571     // Use the last test result as the final one.
572     const TestResult& result = j.second.test_results.back();
573     (*map)[result.status].insert(result.full_name);
574   }
575 }
576 
577 // Utility function to print a list of test names. Uses iterator to be
578 // compatible with different containers, like vector and set.
579 template<typename InputIterator>
PrintTests(InputIterator first,InputIterator last,const std::string & description) const580 void TestResultsTracker::PrintTests(InputIterator first,
581                                     InputIterator last,
582                                     const std::string& description) const {
583   size_t count = std::distance(first, last);
584   if (count == 0)
585     return;
586 
587   fprintf(stdout,
588           "%" PRIuS " test%s %s:\n",
589           count,
590           count != 1 ? "s" : "",
591           description.c_str());
592   for (InputIterator it = first; it != last; ++it) {
593     const std::string& test_name = *it;
594     const auto location_it = test_locations_.find(test_name);
595     CHECK(location_it != test_locations_.end()) << test_name;
596     const CodeLocation& location = location_it->second;
597     fprintf(stdout, "    %s (%s:%d)\n", test_name.c_str(),
598             location.file.c_str(), location.line);
599   }
600   fflush(stdout);
601 }
602 
PrintLeaks(int count,const std::vector<std::string> & test_names) const603 void TestResultsTracker::PrintLeaks(
604     int count,
605     const std::vector<std::string>& test_names) const {
606   fprintf(stdout,
607           "ERROR: %d files and/or directories were left behind in the temporary"
608           " directory by one or more of these tests: %s\n",
609           count, JoinString(test_names, ":").c_str());
610   fflush(stdout);
611 }
612 
613 TestResultsTracker::AggregateTestResult::AggregateTestResult() = default;
614 
615 TestResultsTracker::AggregateTestResult::AggregateTestResult(
616     const AggregateTestResult& other) = default;
617 
618 TestResultsTracker::AggregateTestResult::~AggregateTestResult() = default;
619 
620 TestResultsTracker::PerIterationData::PerIterationData() = default;
621 
622 TestResultsTracker::PerIterationData::PerIterationData(
623     const PerIterationData& other) = default;
624 
625 TestResultsTracker::PerIterationData::~PerIterationData() = default;
626 
627 }  // namespace base
628