1 # Copyright (C) 2010 Google Inc. All rights reserved.
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are
7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the
13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission.
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 from jsonresults import JsonResults
33 print "ERROR: Add the TestResultServer, google_appengine and yaml/lib directories to your PYTHONPATH"
39 JSON_RESULTS_TEMPLATE = (
41 '"allFixableCount":[[TESTDATA_COUNT]],'
42 '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],'
43 '"chromeRevision":[[TESTDATA_CHROMEREVISION]],'
44 '"deferredCounts":[[TESTDATA_COUNTS]],'
45 '"fixableCount":[[TESTDATA_COUNT]],'
46 '"fixableCounts":[[TESTDATA_COUNTS]],'
47 '"secondsSinceEpoch":[[TESTDATA_TIMES]],'
48 '"tests":{[TESTDATA_TESTS]},'
49 '"webkitRevision":[[TESTDATA_WEBKITREVISION]],'
50 '"wontfixCounts":[[TESTDATA_COUNTS]]'
55 JSON_RESULTS_COUNTS_TEMPLATE = (
66 JSON_RESULTS_DIRECTORY_TEMPLATE = '"[TESTDATA_DIRECTORY]":{[TESTDATA_DATA]}'
68 JSON_RESULTS_TESTS_TEMPLATE = (
69 '"[TESTDATA_TEST_NAME]":{'
70 '"results":[[TESTDATA_TEST_RESULTS]],'
71 '"times":[[TESTDATA_TEST_TIMES]]}')
73 JSON_RESULTS_PREFIX = "ADD_RESULTS("
74 JSON_RESULTS_SUFFIX = ");"
76 JSON_RESULTS_TEST_LIST_TEMPLATE = (
77 '{"Webkit":{"tests":{[TESTDATA_TESTS]}}}')
80 class JsonResultsTest(unittest.TestCase):
82 self._builder = "Webkit"
84 def _make_test_json(self, test_data):
86 return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX
88 builds = test_data["builds"]
89 tests = test_data["tests"]
90 if not builds or not tests:
91 return JSON_RESULTS_PREFIX + JSON_RESULTS_SUFFIX
93 json = JSON_RESULTS_TEMPLATE
101 counts.append(JSON_RESULTS_COUNTS_TEMPLATE.replace("[TESTDATA]", build))
102 build_numbers.append("1000%s" % build)
103 webkit_revision.append("2000%s" % build)
104 chrome_revision.append("3000%s" % build)
105 times.append("100000%s000" % build)
107 json = json.replace("[TESTDATA_COUNTS]", ",".join(counts))
108 json = json.replace("[TESTDATA_COUNT]", ",".join(builds))
109 json = json.replace("[TESTDATA_BUILDNUMBERS]", ",".join(build_numbers))
110 json = json.replace("[TESTDATA_WEBKITREVISION]", ",".join(webkit_revision))
111 json = json.replace("[TESTDATA_CHROMEREVISION]", ",".join(chrome_revision))
112 json = json.replace("[TESTDATA_TIMES]", ",".join(times))
114 if "version" in test_data:
115 json = json.replace("[VERSION]", str(test_data["version"]))
117 json = json.replace("[VERSION]", "3")
120 for (name, test) in sorted(tests.iteritems()):
121 json_tests.append(self._parse_tests_dict(name, test))
123 json = json.replace("[TESTDATA_TESTS]", ",".join(json_tests))
125 return JSON_RESULTS_PREFIX + json + JSON_RESULTS_SUFFIX
127 def _parse_tests_dict(self, name, test):
128 if "results" in test:
129 test_results = JSON_RESULTS_TESTS_TEMPLATE.replace("[TESTDATA_TEST_NAME]", name)
130 test_results = test_results.replace("[TESTDATA_TEST_RESULTS]", test["results"])
131 test_results = test_results.replace("[TESTDATA_TEST_TIMES]", test["times"])
134 test_results = JSON_RESULTS_DIRECTORY_TEMPLATE.replace("[TESTDATA_DIRECTORY]", name)
136 for (child_name, child_test) in sorted(test.iteritems()):
137 testdata.append(self._parse_tests_dict(child_name, child_test))
138 test_results = test_results.replace("[TESTDATA_DATA]", ",".join(testdata))
141 def _test_merge(self, aggregated_data, incremental_data, expected_data, max_builds=jsonresults.JSON_RESULTS_MAX_BUILDS):
142 aggregated_results = self._make_test_json(aggregated_data)
143 incremental_results = self._make_test_json(incremental_data)
144 merged_results = JsonResults.merge(self._builder, aggregated_results, incremental_results, max_builds, sort_keys=True)
147 expected_results = self._make_test_json(expected_data)
148 self.assertEquals(merged_results, expected_results)
150 self.assertFalse(merged_results)
152 def _test_get_test_list(self, input_data, expected_data):
153 input_results = self._make_test_json(input_data)
156 for test in expected_data:
157 json_tests.append("\"" + test + "\":{}")
159 expected_results = (JSON_RESULTS_PREFIX +
160 JSON_RESULTS_TEST_LIST_TEMPLATE.replace("[TESTDATA_TESTS]", ",".join(json_tests)) +
163 actual_results = JsonResults.get_test_list(self._builder, input_results)
164 self.assertEquals(actual_results, expected_results)
166 def test_merge_null_incremental_results(self):
167 # Empty incremental results json.
171 {"builds": ["2", "1"],
172 "tests": {"001.html": {
173 "results": "[200,\"F\"]",
174 "times": "[200,0]"}}},
175 # Incremental results
177 # Expect no merge happens.
180 def test_merge_empty_incremental_results(self):
181 # No actual incremental test results (only prefix and suffix) to merge.
185 {"builds": ["2", "1"],
186 "tests": {"001.html": {
187 "results": "[200,\"F\"]",
188 "times": "[200,0]"}}},
189 # Incremental results
192 # Expected no merge happens.
195 def test_merge_empty_aggregated_results(self):
196 # No existing aggregated results.
197 # Merged results == new incremental results.
201 # Incremental results
203 {"builds": ["2", "1"],
204 "tests": {"001.html": {
205 "results": "[200,\"F\"]",
206 "times": "[200,0]"}}},
208 {"builds": ["2", "1"],
209 "tests": {"001.html": {
210 "results": "[200,\"F\"]",
211 "times": "[200,0]"}}})
213 def test_merge_incremental_single_test_single_run_same_result(self):
214 # Incremental results has the latest build and same test results for
216 # Insert the incremental results at the first place and sum number
217 # of runs for "F" (200 + 1) to get merged results.
220 {"builds": ["2", "1"],
221 "tests": {"001.html": {
222 "results": "[200,\"F\"]",
223 "times": "[200,0]"}}},
224 # Incremental results
226 "tests": {"001.html": {
227 "results": "[1,\"F\"]",
230 {"builds": ["3", "2", "1"],
231 "tests": {"001.html": {
232 "results": "[201,\"F\"]",
233 "times": "[201,0]"}}})
235 def test_merge_single_test_single_run_different_result(self):
236 # Incremental results has the latest build but different test results
238 # Insert the incremental results at the first place.
241 {"builds": ["2", "1"],
242 "tests": {"001.html": {
243 "results": "[200,\"F\"]",
244 "times": "[200,0]"}}},
245 # Incremental results
247 "tests": {"001.html": {
248 "results": "[1, \"I\"]",
251 {"builds": ["3", "2", "1"],
252 "tests": {"001.html": {
253 "results": "[1,\"I\"],[200,\"F\"]",
254 "times": "[1,1],[200,0]"}}})
256 def test_merge_single_test_single_run_result_changed(self):
257 # Incremental results has the latest build but results which differ from
258 # the latest result (but are the same as an older result).
261 {"builds": ["2", "1"],
262 "tests": {"001.html": {
263 "results": "[200,\"F\"],[10,\"I\"]",
264 "times": "[200,0],[10,1]"}}},
265 # Incremental results
267 "tests": {"001.html": {
268 "results": "[1,\"I\"]",
271 {"builds": ["3", "2", "1"],
272 "tests": {"001.html": {
273 "results": "[1,\"I\"],[200,\"F\"],[10,\"I\"]",
274 "times": "[1,1],[200,0],[10,1]"}}})
276 def test_merge_multiple_tests_single_run(self):
277 # All tests have incremental updates.
280 {"builds": ["2", "1"],
281 "tests": {"001.html": {
282 "results": "[200,\"F\"]",
285 "results": "[100,\"I\"]",
286 "times": "[100,1]"}}},
287 # Incremental results
289 "tests": {"001.html": {
290 "results": "[1,\"F\"]",
293 "results": "[1,\"I\"]",
296 {"builds": ["3", "2", "1"],
297 "tests": {"001.html": {
298 "results": "[201,\"F\"]",
301 "results": "[101,\"I\"]",
302 "times": "[101,1]"}}})
304 def test_merge_multiple_tests_single_run_one_no_result(self):
307 {"builds": ["2", "1"],
308 "tests": {"001.html": {
309 "results": "[200,\"F\"]",
312 "results": "[100,\"I\"]",
313 "times": "[100,1]"}}},
314 # Incremental results
316 "tests": {"002.html": {
317 "results": "[1,\"I\"]",
320 {"builds": ["3", "2", "1"],
321 "tests": {"001.html": {
322 "results": "[1,\"N\"],[200,\"F\"]",
325 "results": "[101,\"I\"]",
326 "times": "[101,1]"}}})
328 def test_merge_single_test_multiple_runs(self):
331 {"builds": ["2", "1"],
332 "tests": {"001.html": {
333 "results": "[200,\"F\"]",
334 "times": "[200,0]"}}},
335 # Incremental results
336 {"builds": ["4", "3"],
337 "tests": {"001.html": {
338 "results": "[2, \"I\"]",
341 {"builds": ["4", "3", "2", "1"],
342 "tests": {"001.html": {
343 "results": "[2,\"I\"],[200,\"F\"]",
344 "times": "[2,2],[200,0]"}}})
346 def test_merge_multiple_tests_multiple_runs(self):
349 {"builds": ["2", "1"],
350 "tests": {"001.html": {
351 "results": "[200,\"F\"]",
354 "results": "[10,\"Z\"]",
355 "times": "[10,0]"}}},
356 # Incremental results
357 {"builds": ["4", "3"],
358 "tests": {"001.html": {
359 "results": "[2, \"I\"]",
362 "results": "[1,\"C\"]",
365 {"builds": ["4", "3", "2", "1"],
366 "tests": {"001.html": {
367 "results": "[2,\"I\"],[200,\"F\"]",
368 "times": "[2,2],[200,0]"},
370 "results": "[1,\"C\"],[10,\"Z\"]",
371 "times": "[1,1],[10,0]"}}})
373 def test_merge_incremental_result_older_build(self):
374 # Test the build in incremental results is older than the most recent
375 # build in aggregated results.
376 # The incremental results should be dropped and no merge happens.
379 {"builds": ["3", "1"],
380 "tests": {"001.html": {
381 "results": "[200,\"F\"]",
382 "times": "[200,0]"}}},
383 # Incremental results
385 "tests": {"001.html": {
386 "results": "[1, \"F\"]",
388 # Expected no merge happens.
391 def test_merge_incremental_result_same_build(self):
392 # Test the build in incremental results is same as the build in
393 # aggregated results.
394 # The incremental results should be dropped and no merge happens.
397 {"builds": ["2", "1"],
398 "tests": {"001.html": {
399 "results": "[200,\"F\"]",
400 "times": "[200,0]"}}},
401 # Incremental results
402 {"builds": ["3", "2"],
403 "tests": {"001.html": {
404 "results": "[2, \"F\"]",
406 # Expected no merge happens.
409 def test_merge_remove_test_with_no_data(self):
410 # Remove test where there is no data in all runs.
413 {"builds": ["2", "1"],
414 "tests": {"001.html": {
415 "results": "[200,\"N\"]",
418 "results": "[10,\"F\"]",
419 "times": "[10,0]"}}},
420 # Incremental results
422 "tests": {"001.html": {
423 "results": "[1,\"N\"]",
426 "results": "[1,\"P\"]",
429 {"builds": ["3", "2", "1"],
430 "tests": {"002.html": {
431 "results": "[1,\"P\"],[10,\"F\"]",
432 "times": "[11,0]"}}})
434 def test_merge_remove_test_with_all_pass(self):
435 # Remove test where all run pass and max running time < 1 seconds
438 {"builds": ["2", "1"],
439 "tests": {"001.html": {
440 "results": "[200,\"P\"]",
443 "results": "[10,\"F\"]",
444 "times": "[10,0]"}}},
445 # Incremental results
447 "tests": {"001.html": {
448 "results": "[1,\"P\"]",
451 "results": "[1,\"P\"]",
454 {"builds": ["3", "2", "1"],
455 "tests": {"002.html": {
456 "results": "[1,\"P\"],[10,\"F\"]",
457 "times": "[11,0]"}}})
459 def test_merge_keep_test_with_all_pass_but_slow_time(self):
460 # Do not remove test where all run pass but max running time >= 1 seconds
463 {"builds": ["2", "1"],
464 "tests": {"001.html": {
465 "results": "[200,\"P\"]",
468 "results": "[10,\"F\"]",
469 "times": "[10,0]"}}},
470 # Incremental results
472 "tests": {"001.html": {
473 "results": "[1,\"P\"]",
476 "results": "[1,\"P\"]",
479 {"builds": ["3", "2", "1"],
480 "tests": {"001.html": {
481 "results": "[201,\"P\"]",
482 "times": "[1,1],[200,0]"},
484 "results": "[1,\"P\"],[10,\"F\"]",
485 "times": "[11,0]"}}})
487 def test_merge_prune_extra_results(self):
488 # Remove items from test results and times that exceed the max number
489 # of builds to track.
490 max_builds = str(jsonresults.JSON_RESULTS_MAX_BUILDS)
493 {"builds": ["2", "1"],
494 "tests": {"001.html": {
495 "results": "[" + max_builds + ",\"F\"],[1,\"I\"]",
496 "times": "[" + max_builds + ",0],[1,1]"}}},
497 # Incremental results
499 "tests": {"001.html": {
500 "results": "[1,\"T\"]",
503 {"builds": ["3", "2", "1"],
504 "tests": {"001.html": {
505 "results": "[1,\"T\"],[" + max_builds + ",\"F\"]",
506 "times": "[1,1],[" + max_builds + ",0]"}}})
508 def test_merge_prune_extra_results_small(self):
509 # Remove items from test results and times that exceed the max number
510 # of builds to track, using smaller threshold.
511 max_builds = str(jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL)
514 {"builds": ["2", "1"],
515 "tests": {"001.html": {
516 "results": "[" + max_builds + ",\"F\"],[1,\"I\"]",
517 "times": "[" + max_builds + ",0],[1,1]"}}},
518 # Incremental results
520 "tests": {"001.html": {
521 "results": "[1,\"T\"]",
524 {"builds": ["3", "2", "1"],
525 "tests": {"001.html": {
526 "results": "[1,\"T\"],[" + max_builds + ",\"F\"]",
527 "times": "[1,1],[" + max_builds + ",0]"}}},
530 def test_merge_prune_extra_results_with_new_result_of_same_type(self):
531 # Test that merging in a new result of the same type as the last result
532 # causes old results to fall off.
533 max_builds = str(jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL)
536 {"builds": ["2", "1"],
537 "tests": {"001.html": {
538 "results": "[" + max_builds + ",\"F\"],[1,\"N\"]",
539 "times": "[" + max_builds + ",0],[1,1]"}}},
540 # Incremental results
542 "tests": {"001.html": {
543 "results": "[1,\"F\"]",
546 {"builds": ["3", "2", "1"],
547 "tests": {"001.html": {
548 "results": "[" + max_builds + ",\"F\"]",
549 "times": "[" + max_builds + ",0]"}}},
552 def test_merge_build_directory_hierarchy(self):
555 {"builds": ["2", "1"],
556 "tests": {"foo/001.html": {
557 "results": "[50,\"F\"]",
560 "results": "[100,\"I\"]",
561 "times": "[100,0]"}}},
562 # Incremental results
566 "results": "[1,\"F\"]",
569 "results": "[1,\"I\"]",
573 {"builds": ["3", "2", "1"],
574 "tests": {"foo/001.html": {
575 "results": "[51,\"F\"]",
578 "results": "[101,\"I\"]",
579 "times": "[101,0]"}},
582 # FIXME(aboxhall): Add some tests for xhtml/svg test results.
584 def test_get_test_name_list(self):
585 # Get test name list only. Don't include non-test-list data and
586 # of test result details.
587 self._test_get_test_list(
589 {"builds": ["3", "2", "1"],
590 "tests": {"001.html": {
591 "results": "[200,\"P\"]",
594 "results": "[10,\"F\"]",
595 "times": "[10,0]"}}},
597 ["001.html", "002.html"])
599 def test_remove_gtest_modifiers(self):
602 {"builds": ["2", "1"],
603 "tests": {"foo.bar": {
604 "results": "[50,\"F\"]",
607 "results": "[100,\"I\"]",
610 "results": "[100,\"I\"]",
613 # Incremental results
615 "tests": {"foo.FLAKY_bar": {
616 "results": "[1,\"F\"]",
618 "foo.DISABLED_bar2": {
619 "results": "[1,\"I\"]",
622 "results": "[1,\"I\"]",
625 "results": "[1,\"I\"]",
628 "results": "[1,\"I\"]",
632 {"builds": ["3", "2", "1"],
633 "tests": {"foo.bar": {
634 "results": "[51,\"F\"]",
637 "results": "[101,\"I\"]",
640 "results": "[1,\"I\"]",
643 "results": "[1,\"N\"],[100,\"I\"]",
646 "results": "[1,\"I\"]",
650 if __name__ == '__main__':