Commit 4c461ac6 authored by rniwa@webkit.org's avatar rniwa@webkit.org
Browse files

run-perf-tests output cryptic error when the config file is missing.

https://bugs.webkit.org/show_bug.cgi?id=96453

Reviewed by Tony Chang.

Add a special error message when a configuration file is missing.

Also update the help message of --source-json-path to signify the fact it specifies
the configuration file on performance tests bots.

* Scripts/webkitpy/performance_tests/perftestsrunner.py:
(PerfTestsRunner._parse_args):
(PerfTestsRunner._generate_and_show_results):
(PerfTestsRunner._merge_slave_config_json):
* Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
(_test_run_with_json_output): Make upload to fail by default and assert the result
in the function so that we can return logs instead.
(_test_run_with_json_output.mock_upload_json):
(test_run_with_json_output):
(test_run_with_description):
(test_run_respects_no_results):
(test_run_with_slave_config_json):
(test_run_with_bad_slave_config_json):
(test_run_with_multiple_repositories):


git-svn-id: http://svn.webkit.org/repository/webkit/trunk@128511 268f45cc-cd09-0410-ab3c-d52691b4dbfc
parent 21a3fdbd
2012-09-13 Ryosuke Niwa <rniwa@webkit.org>
run-perf-tests output cryptic error when the config file is missing.
https://bugs.webkit.org/show_bug.cgi?id=96453
Reviewed by Tony Chang.
Add a special error message when a configuration file is missing.
Also update the help message of --source-json-path to signify the fact it specifies
the configuration file on performance tests bots.
* Scripts/webkitpy/performance_tests/perftestsrunner.py:
(PerfTestsRunner._parse_args):
(PerfTestsRunner._generate_and_show_results):
(PerfTestsRunner._merge_slave_config_json):
* Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
(_test_run_with_json_output): Make upload to fail by default and assert the result
in the function so that we can return logs instead.
(_test_run_with_json_output.mock_upload_json):
(test_run_with_json_output):
(test_run_with_description):
(test_run_respects_no_results):
(test_run_with_slave_config_json):
(test_run_with_bad_slave_config_json):
(test_run_with_multiple_repositories):
2012-09-13 Kenneth Rohde Christiansen <kenneth@webkit.org>
 
Evas_Object* is a ref'ed structure, so tread it as such
......
......@@ -100,8 +100,8 @@ class PerfTestsRunner(object):
help="Do no generate results JSON and results page."),
optparse.make_option("--output-json-path",
help="Path to generate a JSON file at; may contain previous results if it already exists."),
optparse.make_option("--source-json-path",
help="Only used on bots. Path to a JSON file to be merged into the JSON file when --output-json-path is present."),
optparse.make_option("--source-json-path", # FIXME: Rename it to signify the fact it's a slave configuration.
help="Only used on bots. Path to a slave configuration file."),
optparse.make_option("--description",
help="Add a description to the output JSON file if one is generated"),
optparse.make_option("--no-show-results", action="store_false", default=True, dest="show_results",
......@@ -180,7 +180,7 @@ class PerfTestsRunner(object):
output = self._generate_results_dict(self._timestamp, options.description, options.platform, options.builder_name, options.build_number)
if options.source_json_path:
output = self._merge_source_json(options.source_json_path, output)
output = self._merge_slave_config_json(options.source_json_path, output)
if not output:
return self.EXIT_CODE_BAD_SOURCE_JSON
......@@ -215,13 +215,17 @@ class PerfTestsRunner(object):
return contents
def _merge_source_json(self, source_json_path, output):
def _merge_slave_config_json(self, slave_config_json_path, output):
if not self._host.filesystem.isfile(slave_config_json_path):
_log.error("Missing slave configuration JSON file: %s" % slave_config_json_path)
return None
try:
source_json_file = self._host.filesystem.open_text_file_for_reading(source_json_path)
source_json = json.load(source_json_file)
return dict(source_json.items() + output.items())
slave_config_json = self._host.filesystem.open_text_file_for_reading(slave_config_json_path)
slave_config = json.load(slave_config_json)
return dict(slave_config.items() + output.items())
except Exception, error:
_log.error("Failed to merge source JSON file %s: %s" % (source_json_path, error))
_log.error("Failed to merge slave configuration JSON file %s: %s" % (slave_config_json_path, error))
return None
def _merge_outputs(self, output_json_path, output):
......
......@@ -282,7 +282,7 @@ max 548000 bytes
self.assertEqual(results['Parser/memory-test:JSHeap'], {'min': 811000.0, 'max': 848000.0, 'median': 829000.0, 'stdev': 15000.0, 'avg': 832000.0, 'unit': 'bytes'})
self.assertEqual(results['Parser/memory-test:Malloc'], {'min': 511000.0, 'max': 548000.0, 'median': 529000.0, 'stdev': 13000.0, 'avg': 532000.0, 'unit': 'bytes'})
def _test_run_with_json_output(self, runner, filesystem, upload_suceeds=True, expected_exit_code=0):
def _test_run_with_json_output(self, runner, filesystem, upload_suceeds=False, expected_exit_code=0):
filesystem.write_text_file(runner._base_path + '/inspector/pass.html', 'some content')
filesystem.write_text_file(runner._base_path + '/Bindings/event-target-wrapper.html', 'some content')
......@@ -291,7 +291,7 @@ max 548000 bytes
def mock_upload_json(hostname, json_path):
self.assertEqual(hostname, 'some.host')
self.assertEqual(json_path, '/mock-checkout/output.json')
uploaded[0] = True
uploaded[0] = upload_suceeds
return upload_suceeds
runner._upload_json = mock_upload_json
......@@ -315,7 +315,9 @@ max 548000 bytes
'',
'']))
return uploaded[0]
self.assertEqual(uploaded[0], upload_suceeds)
return logs
_event_target_wrapper_and_inspector_results = {
"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
......@@ -324,7 +326,7 @@ max 548000 bytes
def test_run_with_json_output(self):
runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server=some.host'])
self._test_run_with_json_output(runner, port.host.filesystem)
self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=True)
self.assertEqual(runner.load_output_json(), {
"timestamp": 123456789, "results": self._event_target_wrapper_and_inspector_results,
"webkit-revision": "5678", "branch": "webkit-trunk"})
......@@ -332,7 +334,7 @@ max 548000 bytes
def test_run_with_description(self):
runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server=some.host', '--description', 'some description'])
self._test_run_with_json_output(runner, port.host.filesystem)
self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=True)
self.assertEqual(runner.load_output_json(), {
"timestamp": 123456789, "description": "some description",
"results": self._event_target_wrapper_and_inspector_results,
......@@ -350,7 +352,7 @@ max 548000 bytes
def test_run_respects_no_results(self):
runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server=some.host', '--no-results'])
self.assertFalse(self._test_run_with_json_output(runner, port.host.filesystem))
self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=False)
self.assertFalse(port.host.filesystem.isfile('/mock-checkout/output.json'))
def test_run_generates_json_by_default(self):
......@@ -419,29 +421,30 @@ max 548000 bytes
port.host.filesystem.write_text_file('/mock-checkout/output.json', '{"another bad json": "1"}')
self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_MERGE)
def test_run_with_json_source(self):
def test_run_with_slave_config_json(self):
runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
'--source-json-path=/mock-checkout/source.json', '--test-results-server=some.host'])
port.host.filesystem.write_text_file('/mock-checkout/source.json', '{"key": "value"}')
self._test_run_with_json_output(runner, port.host.filesystem)
'--source-json-path=/mock-checkout/slave-config.json', '--test-results-server=some.host'])
port.host.filesystem.write_text_file('/mock-checkout/slave-config.json', '{"key": "value"}')
self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=True)
self.assertEqual(runner.load_output_json(), {
"timestamp": 123456789, "results": self._event_target_wrapper_and_inspector_results,
"webkit-revision": "5678", "branch": "webkit-trunk", "key": "value"})
def test_run_with_bad_json_source(self):
def test_run_with_bad_slave_config_json(self):
runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
'--source-json-path=/mock-checkout/source.json', '--test-results-server=some.host'])
self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
port.host.filesystem.write_text_file('/mock-checkout/source.json', 'bad json')
'--source-json-path=/mock-checkout/slave-config.json', '--test-results-server=some.host'])
logs = self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
self.assertTrue('Missing slave configuration JSON file: /mock-checkout/slave-config.json' in logs)
port.host.filesystem.write_text_file('/mock-checkout/slave-config.json', 'bad json')
self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
port.host.filesystem.write_text_file('/mock-checkout/source.json', '["another bad json"]')
port.host.filesystem.write_text_file('/mock-checkout/slave-config.json', '["another bad json"]')
self._test_run_with_json_output(runner, port.host.filesystem, expected_exit_code=PerfTestsRunner.EXIT_CODE_BAD_SOURCE_JSON)
def test_run_with_multiple_repositories(self):
runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
'--test-results-server=some.host'])
port.repository_paths = lambda: [('webkit', '/mock-checkout'), ('some', '/mock-checkout/some')]
self._test_run_with_json_output(runner, port.host.filesystem)
self._test_run_with_json_output(runner, port.host.filesystem, upload_suceeds=True)
self.assertEqual(runner.load_output_json(), {
"timestamp": 123456789, "results": self._event_target_wrapper_and_inspector_results,
"webkit-revision": "5678", "some-revision": "5678", "branch": "webkit-trunk"})
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment