xref: /aosp_15_r20/external/cronet/base/test/launcher/test_results_tracker.cc (revision 6777b5387eb2ff775bb5750e3f5d96f37fb7352b)
1 // Copyright 2013 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/test/launcher/test_results_tracker.h"
6 
7 #include <stddef.h>
8 
9 #include <memory>
10 #include <utility>
11 
12 #include "base/base64.h"
13 #include "base/command_line.h"
14 #include "base/files/file.h"
15 #include "base/files/file_path.h"
16 #include "base/files/file_util.h"
17 #include "base/format_macros.h"
18 #include "base/i18n/time_formatting.h"
19 #include "base/json/json_writer.h"
20 #include "base/json/string_escape.h"
21 #include "base/logging.h"
22 #include "base/strings/strcat.h"
23 #include "base/strings/string_util.h"
24 #include "base/strings/stringprintf.h"
25 #include "base/test/gtest_util.h"
26 #include "base/test/launcher/test_launcher.h"
27 #include "base/test/test_switches.h"
28 #include "base/time/time.h"
29 #include "base/values.h"
30 #include "third_party/icu/source/i18n/unicode/timezone.h"
31 
32 namespace base {
33 
34 namespace {
35 
36 // The default output file for XML output.
37 const FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL(
38     "test_detail.xml");
39 
40 // Converts the given epoch time in milliseconds to a date string in the ISO
41 // 8601 format, without the timezone information.
42 // TODO(pkasting): Consider using `TimeFormatAsIso8601()`, possibly modified.
FormatTimeAsIso8601(Time time)43 std::string FormatTimeAsIso8601(Time time) {
44   return base::UnlocalizedTimeFormatWithPattern(time, "yyyy-MM-dd'T'HH:mm:ss",
45                                                 icu::TimeZone::getGMT());
46 }
47 
48 struct TestSuiteResultsAggregator {
TestSuiteResultsAggregatorbase::__anon369ca56b0111::TestSuiteResultsAggregator49   TestSuiteResultsAggregator()
50       : tests(0), failures(0), disabled(0), errors(0) {}
51 
Addbase::__anon369ca56b0111::TestSuiteResultsAggregator52   void Add(const TestResult& result) {
53     tests++;
54     elapsed_time += result.elapsed_time;
55 
56     switch (result.status) {
57       case TestResult::TEST_SUCCESS:
58         break;
59       case TestResult::TEST_FAILURE:
60         failures++;
61         break;
62       case TestResult::TEST_EXCESSIVE_OUTPUT:
63       case TestResult::TEST_FAILURE_ON_EXIT:
64       case TestResult::TEST_TIMEOUT:
65       case TestResult::TEST_CRASH:
66       case TestResult::TEST_UNKNOWN:
67       case TestResult::TEST_NOT_RUN:
68         errors++;
69         break;
70       case TestResult::TEST_SKIPPED:
71         disabled++;
72         break;
73     }
74   }
75 
76   int tests;
77   int failures;
78   int disabled;
79   int errors;
80 
81   TimeDelta elapsed_time;
82 };
83 
84 }  // namespace
85 
TestResultsTracker()86 TestResultsTracker::TestResultsTracker() : iteration_(-1), out_(nullptr) {}
87 
~TestResultsTracker()88 TestResultsTracker::~TestResultsTracker() {
89   CHECK(thread_checker_.CalledOnValidThread());
90 
91   if (!out_)
92     return;
93 
94   CHECK_GE(iteration_, 0);
95 
96   // Maps test case names to test results.
97   typedef std::map<std::string, std::vector<TestResult> > TestCaseMap;
98   TestCaseMap test_case_map;
99 
100   TestSuiteResultsAggregator all_tests_aggregator;
101   for (const PerIterationData::ResultsMap::value_type& i
102            : per_iteration_data_[iteration_].results) {
103     // Use the last test result as the final one.
104     TestResult result = i.second.test_results.back();
105     test_case_map[result.GetTestCaseName()].push_back(result);
106     all_tests_aggregator.Add(result);
107   }
108 
109   fprintf(out_.get(), "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
110   fprintf(out_.get(),
111           "<testsuites name=\"AllTests\" tests=\"%d\" failures=\"%d\""
112           " disabled=\"%d\" errors=\"%d\" time=\"%.3f\" timestamp=\"%s\">\n",
113           all_tests_aggregator.tests, all_tests_aggregator.failures,
114           all_tests_aggregator.disabled, all_tests_aggregator.errors,
115           all_tests_aggregator.elapsed_time.InSecondsF(),
116           FormatTimeAsIso8601(Time::Now()).c_str());
117 
118   for (const TestCaseMap::value_type& i : test_case_map) {
119     const std::string testsuite_name = i.first;
120     const std::vector<TestResult>& results = i.second;
121 
122     TestSuiteResultsAggregator aggregator;
123     for (const TestResult& result : results) {
124       aggregator.Add(result);
125     }
126     fprintf(out_.get(),
127             "  <testsuite name=\"%s\" tests=\"%d\" "
128             "failures=\"%d\" disabled=\"%d\" errors=\"%d\" time=\"%.3f\" "
129             "timestamp=\"%s\">\n",
130             testsuite_name.c_str(), aggregator.tests, aggregator.failures,
131             aggregator.disabled, aggregator.errors,
132             aggregator.elapsed_time.InSecondsF(),
133             FormatTimeAsIso8601(Time::Now()).c_str());
134 
135     for (const TestResult& result : results) {
136       fprintf(out_.get(),
137               "    <testcase name=\"%s\" status=\"run\" time=\"%.3f\""
138               "%s classname=\"%s\">\n",
139               result.GetTestName().c_str(), result.elapsed_time.InSecondsF(),
140               (result.timestamp
141                    ? StrCat({" timestamp=\"",
142                              FormatTimeAsIso8601(*result.timestamp), "\""})
143                          .c_str()
144                    : ""),
145               result.GetTestCaseName().c_str());
146       if (result.status != TestResult::TEST_SUCCESS) {
147         // The actual failure message is not propagated up to here, as it's too
148         // much work to escape it properly, and in case of failure, almost
149         // always one needs to look into full log anyway.
150         fprintf(out_.get(),
151                 "      <failure message=\"\" type=\"\"></failure>\n");
152       }
153       fprintf(out_.get(), "    </testcase>\n");
154     }
155     fprintf(out_.get(), "  </testsuite>\n");
156   }
157 
158   fprintf(out_.get(), "</testsuites>\n");
159   fclose(out_);
160 }
161 
Init(const CommandLine & command_line)162 bool TestResultsTracker::Init(const CommandLine& command_line) {
163   CHECK(thread_checker_.CalledOnValidThread());
164 
165   // Prevent initializing twice.
166   if (out_) {
167     NOTREACHED();
168     return false;
169   }
170 
171   print_temp_leaks_ =
172       command_line.HasSwitch(switches::kTestLauncherPrintTempLeaks);
173 
174   if (!command_line.HasSwitch(kGTestOutputFlag))
175     return true;
176 
177   std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag);
178   size_t colon_pos = flag.find(':');
179   FilePath path;
180   if (colon_pos != std::string::npos) {
181     FilePath flag_path =
182         command_line.GetSwitchValuePath(kGTestOutputFlag);
183     FilePath::StringType path_string = flag_path.value();
184     path = FilePath(path_string.substr(colon_pos + 1));
185     // If the given path ends with '/', consider it is a directory.
186     // Note: This does NOT check that a directory (or file) actually exists
187     // (the behavior is same as what gtest does).
188     if (path.EndsWithSeparator()) {
189       FilePath executable = command_line.GetProgram().BaseName();
190       path = path.Append(executable.ReplaceExtension(
191                              FilePath::StringType(FILE_PATH_LITERAL("xml"))));
192     }
193   }
194   if (path.value().empty())
195     path = FilePath(kDefaultOutputFile);
196   FilePath dir_name = path.DirName();
197   if (!DirectoryExists(dir_name)) {
198     LOG(WARNING) << "The output directory does not exist. "
199                  << "Creating the directory: " << dir_name.value();
200     // Create the directory if necessary (because the gtest does the same).
201     if (!CreateDirectory(dir_name)) {
202       LOG(ERROR) << "Failed to created directory " << dir_name.value();
203       return false;
204     }
205   }
206   out_ = OpenFile(path, "w");
207   if (!out_) {
208     LOG(ERROR) << "Cannot open output file: "
209                << path.value() << ".";
210     return false;
211   }
212 
213   return true;
214 }
215 
OnTestIterationStarting()216 void TestResultsTracker::OnTestIterationStarting() {
217   CHECK(thread_checker_.CalledOnValidThread());
218 
219   // Start with a fresh state for new iteration.
220   iteration_++;
221   per_iteration_data_.push_back(PerIterationData());
222 }
223 
AddTest(const std::string & test_name)224 void TestResultsTracker::AddTest(const std::string& test_name) {
225   // Record disabled test names without DISABLED_ prefix so that they are easy
226   // to compare with regular test names, e.g. before or after disabling.
227   all_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
228 }
229 
AddDisabledTest(const std::string & test_name)230 void TestResultsTracker::AddDisabledTest(const std::string& test_name) {
231   // Record disabled test names without DISABLED_ prefix so that they are easy
232   // to compare with regular test names, e.g. before or after disabling.
233   disabled_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
234 }
235 
AddTestLocation(const std::string & test_name,const std::string & file,int line)236 void TestResultsTracker::AddTestLocation(const std::string& test_name,
237                                          const std::string& file,
238                                          int line) {
239   test_locations_.insert(std::make_pair(
240       TestNameWithoutDisabledPrefix(test_name), CodeLocation(file, line)));
241 }
242 
AddTestPlaceholder(const std::string & test_name)243 void TestResultsTracker::AddTestPlaceholder(const std::string& test_name) {
244   test_placeholders_.insert(test_name);
245 }
246 
AddTestResult(const TestResult & result)247 void TestResultsTracker::AddTestResult(const TestResult& result) {
248   CHECK(thread_checker_.CalledOnValidThread());
249   CHECK_GE(iteration_, 0);
250 
251   PerIterationData::ResultsMap& results_map =
252       per_iteration_data_[iteration_].results;
253   std::string test_name_without_disabled_prefix =
254       TestNameWithoutDisabledPrefix(result.full_name);
255   auto it = results_map.find(test_name_without_disabled_prefix);
256 
257   // Record disabled test names without DISABLED_ prefix so that they are easy
258   // to compare with regular test names, e.g. before or after disabling.
259   AggregateTestResult& aggregate_test_result = it->second;
260 
261   // If the current test_result is a PRE test and it failed, insert its result
262   // in the corresponding non-PRE test's place.
263   std::string test_name_without_pre_prefix(test_name_without_disabled_prefix);
264   ReplaceSubstringsAfterOffset(&test_name_without_pre_prefix, 0, "PRE_", "");
265   if (test_name_without_pre_prefix != test_name_without_disabled_prefix) {
266     if (result.status != TestResult::TEST_SUCCESS) {
267       it = results_map.find(test_name_without_pre_prefix);
268       if (!it->second.test_results.empty() &&
269           it->second.test_results.back().status == TestResult::TEST_NOT_RUN) {
270         // Also need to remove the non-PRE test's placeholder.
271         it->second.test_results.pop_back();
272       }
273       it->second.test_results.push_back(result);
274     }
275     // We quit early here and let the non-PRE test detect this result and
276     // modify its result appropriately.
277     return;
278   }
279 
280   // If the last test result is a placeholder, then get rid of it now that we
281   // have real results.
282   if (!aggregate_test_result.test_results.empty() &&
283       aggregate_test_result.test_results.back().status ==
284           TestResult::TEST_NOT_RUN) {
285     aggregate_test_result.test_results.pop_back();
286   }
287 
288   TestResult result_to_add = result;
289   result_to_add.full_name = test_name_without_disabled_prefix;
290   if (!aggregate_test_result.test_results.empty()) {
291     TestResult prev_result = aggregate_test_result.test_results.back();
292     if (prev_result.full_name != test_name_without_disabled_prefix) {
293       // Some other test's result is in our place! It must be our failed PRE
294       // test. Modify our own result if it failed and we succeeded so we don't
295       // end up silently swallowing PRE-only failures.
296       std::string prev_result_name(prev_result.full_name);
297       ReplaceSubstringsAfterOffset(&prev_result_name, 0, "PRE_", "");
298       CHECK_EQ(prev_result_name, test_name_without_disabled_prefix);
299 
300       if (result.status == TestResult::TEST_SUCCESS) {
301         TestResult modified_result(prev_result);
302         modified_result.full_name = test_name_without_disabled_prefix;
303         result_to_add = modified_result;
304       }
305       aggregate_test_result.test_results.pop_back();
306     }
307   }
308   aggregate_test_result.test_results.push_back(result_to_add);
309 }
310 
AddLeakedItems(int count,const std::vector<std::string> & test_names)311 void TestResultsTracker::AddLeakedItems(
312     int count,
313     const std::vector<std::string>& test_names) {
314   DCHECK(count);
315   per_iteration_data_.back().leaked_temp_items.emplace_back(count, test_names);
316 }
317 
GeneratePlaceholderIteration()318 void TestResultsTracker::GeneratePlaceholderIteration() {
319   CHECK(thread_checker_.CalledOnValidThread());
320 
321   for (auto& full_test_name : test_placeholders_) {
322     std::string test_name = TestNameWithoutDisabledPrefix(full_test_name);
323 
324     TestResult test_result;
325     test_result.full_name = test_name;
326     test_result.status = TestResult::TEST_NOT_RUN;
327 
328     // There shouldn't be any existing results when we generate placeholder
329     // results.
330     CHECK(
331         per_iteration_data_[iteration_].results[test_name].test_results.empty())
332         << test_name;
333     per_iteration_data_[iteration_].results[test_name].test_results.push_back(
334         test_result);
335   }
336 }
337 
PrintSummaryOfCurrentIteration() const338 void TestResultsTracker::PrintSummaryOfCurrentIteration() const {
339   TestStatusMap tests_by_status(GetTestStatusMapForCurrentIteration());
340 
341   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
342              tests_by_status[TestResult::TEST_FAILURE].end(),
343              "failed");
344   PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
345              tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
346              "failed on exit");
347   PrintTests(tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].begin(),
348              tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].end(),
349              "produced excessive output");
350   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
351              tests_by_status[TestResult::TEST_TIMEOUT].end(),
352              "timed out");
353   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
354              tests_by_status[TestResult::TEST_CRASH].end(),
355              "crashed");
356   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
357              tests_by_status[TestResult::TEST_SKIPPED].end(),
358              "skipped");
359   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
360              tests_by_status[TestResult::TEST_UNKNOWN].end(),
361              "had unknown result");
362   PrintTests(tests_by_status[TestResult::TEST_NOT_RUN].begin(),
363              tests_by_status[TestResult::TEST_NOT_RUN].end(), "not run");
364 
365   if (print_temp_leaks_) {
366     for (const auto& leaking_tests :
367          per_iteration_data_.back().leaked_temp_items) {
368       PrintLeaks(leaking_tests.first, leaking_tests.second);
369     }
370   }
371 }
372 
PrintSummaryOfAllIterations() const373 void TestResultsTracker::PrintSummaryOfAllIterations() const {
374   CHECK(thread_checker_.CalledOnValidThread());
375 
376   TestStatusMap tests_by_status(GetTestStatusMapForAllIterations());
377 
378   fprintf(stdout, "Summary of all test iterations:\n");
379   fflush(stdout);
380 
381   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
382              tests_by_status[TestResult::TEST_FAILURE].end(),
383              "failed");
384   PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
385              tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
386              "failed on exit");
387   PrintTests(tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].begin(),
388              tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].end(),
389              "produced excessive output");
390   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
391              tests_by_status[TestResult::TEST_TIMEOUT].end(),
392              "timed out");
393   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
394              tests_by_status[TestResult::TEST_CRASH].end(),
395              "crashed");
396   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
397              tests_by_status[TestResult::TEST_SKIPPED].end(),
398              "skipped");
399   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
400              tests_by_status[TestResult::TEST_UNKNOWN].end(),
401              "had unknown result");
402   PrintTests(tests_by_status[TestResult::TEST_NOT_RUN].begin(),
403              tests_by_status[TestResult::TEST_NOT_RUN].end(), "not run");
404 
405   fprintf(stdout, "End of the summary.\n");
406   fflush(stdout);
407 }
408 
AddGlobalTag(const std::string & tag)409 void TestResultsTracker::AddGlobalTag(const std::string& tag) {
410   global_tags_.insert(tag);
411 }
412 
SaveSummaryAsJSON(const FilePath & path,const std::vector<std::string> & additional_tags) const413 bool TestResultsTracker::SaveSummaryAsJSON(
414     const FilePath& path,
415     const std::vector<std::string>& additional_tags) const {
416   Value::Dict summary_root;
417 
418   Value::List global_tags;
419   for (const auto& global_tag : global_tags_) {
420     global_tags.Append(global_tag);
421   }
422   for (const auto& tag : additional_tags) {
423     global_tags.Append(tag);
424   }
425   summary_root.Set("global_tags", std::move(global_tags));
426 
427   Value::List all_tests;
428   for (const auto& test : all_tests_) {
429     all_tests.Append(test);
430   }
431   summary_root.Set("all_tests", std::move(all_tests));
432 
433   Value::List disabled_tests;
434   for (const auto& disabled_test : disabled_tests_) {
435     disabled_tests.Append(disabled_test);
436   }
437   summary_root.Set("disabled_tests", std::move(disabled_tests));
438 
439   Value::List per_iteration_data;
440 
441   // Even if we haven't run any tests, we still have the dummy iteration.
442   int max_iteration = iteration_ < 0 ? 0 : iteration_;
443 
444   for (int i = 0; i <= max_iteration; i++) {
445     Value::Dict current_iteration_data;
446 
447     for (const auto& j : per_iteration_data_[i].results) {
448       Value::List test_results;
449 
450       for (size_t k = 0; k < j.second.test_results.size(); k++) {
451         const TestResult& test_result = j.second.test_results[k];
452 
453         Value::Dict test_result_value;
454 
455         test_result_value.Set("status", test_result.StatusAsString());
456         test_result_value.Set(
457             "elapsed_time_ms",
458             static_cast<int>(test_result.elapsed_time.InMilliseconds()));
459 
460         if (test_result.thread_id) {
461           test_result_value.Set("thread_id",
462                                 static_cast<int>(*test_result.thread_id));
463         }
464         if (test_result.process_num)
465           test_result_value.Set("process_num", *test_result.process_num);
466         if (test_result.timestamp) {
467           // The timestamp is formatted using TimeFormatAsIso8601 instead of
468           // FormatTimeAsIso8601 here for a better accuracy, since the former
469           // method includes fractions of a second.
470           test_result_value.Set(
471               "timestamp", TimeFormatAsIso8601(*test_result.timestamp).c_str());
472         }
473 
474         bool lossless_snippet = false;
475         if (IsStringUTF8(test_result.output_snippet)) {
476           test_result_value.Set("output_snippet", test_result.output_snippet);
477           lossless_snippet = true;
478         } else {
479           test_result_value.Set(
480               "output_snippet",
481               "<non-UTF-8 snippet, see output_snippet_base64>");
482         }
483 
484         // TODO(phajdan.jr): Fix typo in JSON key (losless -> lossless)
485         // making sure not to break any consumers of this data.
486         test_result_value.Set("losless_snippet", lossless_snippet);
487 
488         // Also include the raw version (base64-encoded so that it can be safely
489         // JSON-serialized - there are no guarantees about character encoding
490         // of the snippet). This can be very useful piece of information when
491         // debugging a test failure related to character encoding.
492         std::string base64_output_snippet =
493             base::Base64Encode(test_result.output_snippet);
494         test_result_value.Set("output_snippet_base64", base64_output_snippet);
495         if (!test_result.links.empty()) {
496           Value::Dict links;
497           for (const auto& link : test_result.links) {
498             Value::Dict link_info;
499             link_info.Set("content", link.second);
500             links.SetByDottedPath(link.first, std::move(link_info));
501           }
502           test_result_value.Set("links", std::move(links));
503         }
504         if (!test_result.tags.empty()) {
505           Value::Dict tags;
506           for (const auto& tag : test_result.tags) {
507             Value::List tag_values;
508             for (const auto& tag_value : tag.second) {
509               tag_values.Append(tag_value);
510             }
511             Value::Dict tag_info;
512             tag_info.Set("values", std::move(tag_values));
513             tags.SetByDottedPath(tag.first, std::move(tag_info));
514           }
515           test_result_value.Set("tags", std::move(tags));
516         }
517         if (!test_result.properties.empty()) {
518           Value::Dict properties;
519           for (const auto& property : test_result.properties) {
520             Value::Dict property_info;
521             property_info.Set("value", property.second);
522             properties.SetByDottedPath(property.first,
523                                        std::move(property_info));
524           }
525           test_result_value.Set("properties", std::move(properties));
526         }
527 
528         Value::List test_result_parts;
529         for (const TestResultPart& result_part :
530              test_result.test_result_parts) {
531           Value::Dict result_part_value;
532 
533           result_part_value.Set("type", result_part.TypeAsString());
534           result_part_value.Set("file", result_part.file_name);
535           result_part_value.Set("line", result_part.line_number);
536 
537           bool lossless_summary = IsStringUTF8(result_part.summary);
538           if (lossless_summary) {
539             result_part_value.Set("summary", result_part.summary);
540           } else {
541             result_part_value.Set("summary",
542                                   "<non-UTF-8 snippet, see summary_base64>");
543           }
544           result_part_value.Set("lossless_summary", lossless_summary);
545 
546           std::string encoded_summary = base::Base64Encode(result_part.summary);
547           result_part_value.Set("summary_base64", encoded_summary);
548 
549           bool lossless_message = IsStringUTF8(result_part.message);
550           if (lossless_message) {
551             result_part_value.Set("message", result_part.message);
552           } else {
553             result_part_value.Set("message",
554                                   "<non-UTF-8 snippet, see message_base64>");
555           }
556           result_part_value.Set("lossless_message", lossless_message);
557 
558           std::string encoded_message = base::Base64Encode(result_part.message);
559           result_part_value.Set("message_base64", encoded_message);
560 
561           test_result_parts.Append(std::move(result_part_value));
562         }
563         test_result_value.Set("result_parts", std::move(test_result_parts));
564 
565         test_results.Append(std::move(test_result_value));
566       }
567 
568       current_iteration_data.Set(j.first, std::move(test_results));
569     }
570     per_iteration_data.Append(std::move(current_iteration_data));
571   }
572   summary_root.Set("per_iteration_data", std::move(per_iteration_data));
573 
574   Value::Dict test_locations;
575   for (const auto& item : test_locations_) {
576     std::string test_name = item.first;
577     CodeLocation location = item.second;
578     Value::Dict location_value;
579     location_value.Set("file", location.file);
580     location_value.Set("line", location.line);
581     test_locations.Set(test_name, std::move(location_value));
582   }
583   summary_root.Set("test_locations", std::move(test_locations));
584 
585   std::string json;
586   if (!JSONWriter::Write(summary_root, &json))
587     return false;
588 
589   File output(path, File::FLAG_CREATE_ALWAYS | File::FLAG_WRITE);
590   if (!output.IsValid())
591     return false;
592 
593   int json_size = static_cast<int>(json.size());
594   if (output.WriteAtCurrentPos(json.data(), json_size) != json_size) {
595     return false;
596   }
597 
598   // File::Flush() will call fsync(). This is important on Fuchsia to ensure
599   // that the file is written to the disk - the system running under qemu will
600   // shutdown shortly after the test completes. On Fuchsia fsync() times out
601   // after 15 seconds. Apparently this may not be enough in some cases,
602   // particularly when running net_unittests on buildbots, see
603   // https://crbug.com/796318. Try calling fsync() more than once to workaround
604   // this issue.
605   //
606   // TODO(sergeyu): Figure out a better solution.
607   int flush_attempts_left = 4;
608   while (flush_attempts_left-- > 0) {
609     if (output.Flush())
610       return true;
611     LOG(ERROR) << "fsync() failed when saving test output summary. "
612                << ((flush_attempts_left > 0) ? "Retrying." : " Giving up.");
613   }
614 
615   return false;
616 }
617 
618 TestResultsTracker::TestStatusMap
GetTestStatusMapForCurrentIteration() const619     TestResultsTracker::GetTestStatusMapForCurrentIteration() const {
620   TestStatusMap tests_by_status;
621   GetTestStatusForIteration(iteration_, &tests_by_status);
622   return tests_by_status;
623 }
624 
625 TestResultsTracker::TestStatusMap
GetTestStatusMapForAllIterations() const626     TestResultsTracker::GetTestStatusMapForAllIterations() const {
627   TestStatusMap tests_by_status;
628   for (int i = 0; i <= iteration_; i++)
629     GetTestStatusForIteration(i, &tests_by_status);
630   return tests_by_status;
631 }
632 
GetTestStatusForIteration(int iteration,TestStatusMap * map) const633 void TestResultsTracker::GetTestStatusForIteration(
634     int iteration, TestStatusMap* map) const {
635   for (const auto& j : per_iteration_data_[iteration].results) {
636     // Use the last test result as the final one.
637     const TestResult& result = j.second.test_results.back();
638     (*map)[result.status].insert(result.full_name);
639   }
640 }
641 
642 // Utility function to print a list of test names. Uses iterator to be
643 // compatible with different containers, like vector and set.
644 template<typename InputIterator>
PrintTests(InputIterator first,InputIterator last,const std::string & description) const645 void TestResultsTracker::PrintTests(InputIterator first,
646                                     InputIterator last,
647                                     const std::string& description) const {
648   size_t count = std::distance(first, last);
649   if (count == 0)
650     return;
651 
652   fprintf(stdout,
653           "%" PRIuS " test%s %s:\n",
654           count,
655           count != 1 ? "s" : "",
656           description.c_str());
657   for (InputIterator it = first; it != last; ++it) {
658     const std::string& test_name = *it;
659     const auto location_it = test_locations_.find(test_name);
660     CHECK(location_it != test_locations_.end()) << test_name;
661     const CodeLocation& location = location_it->second;
662     fprintf(stdout, "    %s (%s:%d)\n", test_name.c_str(),
663             location.file.c_str(), location.line);
664   }
665   fflush(stdout);
666 }
667 
PrintLeaks(int count,const std::vector<std::string> & test_names) const668 void TestResultsTracker::PrintLeaks(
669     int count,
670     const std::vector<std::string>& test_names) const {
671   fprintf(stdout,
672           "ERROR: %d files and/or directories were left behind in the temporary"
673           " directory by one or more of these tests: %s\n",
674           count, JoinString(test_names, ":").c_str());
675   fflush(stdout);
676 }
677 
678 TestResultsTracker::AggregateTestResult::AggregateTestResult() = default;
679 
680 TestResultsTracker::AggregateTestResult::AggregateTestResult(
681     const AggregateTestResult& other) = default;
682 
683 TestResultsTracker::AggregateTestResult::~AggregateTestResult() = default;
684 
685 TestResultsTracker::PerIterationData::PerIterationData() = default;
686 
687 TestResultsTracker::PerIterationData::PerIterationData(
688     const PerIterationData& other) = default;
689 
690 TestResultsTracker::PerIterationData::~PerIterationData() = default;
691 
692 }  // namespace base
693