From 611001756956124d9fdd7fbd312a1c4adc7d0f19 Mon Sep 17 00:00:00 2001 From: Shing Lyu Date: Thu, 10 Nov 2016 11:31:07 +0800 Subject: [PATCH 1/5] Use selenium to drive gecko test --- etc/ci/performance/README.md | 7 +- etc/ci/performance/gecko_driver.py | 78 +++++++++++++++++ etc/ci/performance/runner.py | 130 +++++++++++++++++------------ etc/ci/performance/test_all.sh | 5 +- etc/ci/performance/test_runner.py | 29 +------ 5 files changed, 165 insertions(+), 84 deletions(-) create mode 100644 etc/ci/performance/gecko_driver.py diff --git a/etc/ci/performance/README.md b/etc/ci/performance/README.md index 789236d3c08..8c1966fd19c 100644 --- a/etc/ci/performance/README.md +++ b/etc/ci/performance/README.md @@ -64,9 +64,10 @@ If you want to test the data submission code in `submit_to_perfherder.py` withou ## For Gecko * Install Firefox Nightly in your PATH -* Install [jpm](https://developer.mozilla.org/en-US/Add-ons/SDK/Tools/jpm#Installation) -* Run `jpm xpi` in the `firefox/addon` folder -* Install the generated `xpi` file to your Firefox Nightly +* Download [geckodrive](https://github.com/mozilla/geckodriver/releases) and add it to the `PATH` +* `pip install selenium` +* Run `python gecko_driver.py` to test + # Troubleshooting diff --git a/etc/ci/performance/gecko_driver.py b/etc/ci/performance/gecko_driver.py new file mode 100644 index 00000000000..f19b7e683ad --- /dev/null +++ b/etc/ci/performance/gecko_driver.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from selenium import webdriver +from selenium.common.exceptions import TimeoutException + + +def execute_gecko_test(testcase, timeout): + firefox_binary = "./firefox/firefox/firefox" + driver = webdriver.Firefox(firefox_binary=firefox_binary) + driver.set_page_load_timeout(timeout) + failed = False + try: + driver.get(testcase) + except TimeoutException: + print("Timeout!") + driver.quit() + failed = True + + timings = { + "testcase": testcase, + "title": driver.title.replace(",", ",") + } + + timing_names = [ + "navigationStart", + "unloadEventStart", + "domLoading", + "fetchStart", + "responseStart", + "loadEventEnd", + "connectStart", + "domainLookupStart", + "redirectStart", + "domContentLoadedEventEnd", + "requestStart", + "secureConnectionStart", + "connectEnd", + "loadEventStart", + "domInteractive", + "domContentLoadedEventStart", + "redirectEnd", + "domainLookupEnd", + "unloadEventEnd", + "responseEnd", + "domComplete", + ] + + # We could have just run + # timings = driver.execute_script("return performance.timing") + # But for some test case the webdriver will fail with "too much recursion" + # So we need to get the timing fields one by one + for name in timing_names: + if failed: + if name == "navigationStart": + timings[name] = 0 + else: + timings[name] = -1 + else: + timings[name] = driver.execute_script( + "return performance.timing.{0}".format(name) + ) + + timings['testcase'] = testcase + # driver.quit() gives an "'NoneType' object has no attribute 'path'" error. + # Fixed in + # https://github.com/SeleniumHQ/selenium/commit/9157c7071f9900c2608f5ca40ae4f518ed373b96 + driver.quit() + return [timings] + +if __name__ == '__main__': + # Just for manual testing + from pprint import pprint + url = "http://localhost:8000/page_load_test/tp5n/dailymail.co.uk/www.dailymail.co.uk/ushome/index.html" + pprint(execute_gecko_test(url, 15)) diff --git a/etc/ci/performance/runner.py b/etc/ci/performance/runner.py index 81c4ac6c33b..b36fa575dde 100644 --- a/etc/ci/performance/runner.py +++ b/etc/ci/performance/runner.py @@ -9,6 +9,8 @@ import itertools import json import os import subprocess +import gecko_driver +from functools import partial from statistics import median, StatisticsError @@ -25,7 +27,8 @@ def parse_manifest(text): def execute_test(url, command, timeout): try: - return subprocess.check_output(command, stderr=subprocess.STDOUT, timeout=timeout) + return subprocess.check_output(command, stderr=subprocess.STDOUT, + timeout=timeout) except subprocess.CalledProcessError as e: print("Unexpected Fail:") print(e) @@ -37,21 +40,33 @@ def execute_test(url, command, timeout): def get_servo_command(url): - ua_script_path = "{}/user-agent-js".format(os.getcwd()) - return ["../../../target/release/servo", url, - "--userscripts", ua_script_path, - "--headless", - "-x", "-o", "output.png"] + def run_servo_test(url, timeout): + ua_script_path = "{}/user-agent-js".format(os.getcwd()) + command = ["../../../target/release/servo", url, + "--userscripts", ua_script_path, + "--headless", + "-x", "-o", "output.png"] + log = "" + try: + log = subprocess.check_output(command, + stderr=subprocess.STDOUT, + timeout=timeout) + except subprocess.CalledProcessError as e: + print("Unexpected Fail:") + print(e) + print("You may want to re-run the test manually:\n{}" + .format(' '.join(command))) + except subprocess.TimeoutExpired: + print("Test FAILED due to timeout: {}".format(url)) + return parse_log(log, url) + return run_servo_test def get_gecko_command(url): - return ["./firefox/firefox/firefox", - " --display=:0", "--no-remote" - " -profile", "./firefox/servo", - url] + return gecko_driver.execute_gecko_test -def parse_log(log, testcase=None): +def parse_log(log, testcase): blocks = [] block = [] copy = False @@ -67,48 +82,19 @@ def parse_log(log, testcase=None): elif copy and line.strip().startswith("[PERF]"): block.append(line) - # We need to still include the failed tests, otherwise Treeherder will - # consider the result to be a new test series, and thus a new graph. So we - # use a placeholder with values = -1 to make Treeherder happy, and still be - # able to identify failed tests (successful tests have time >=0). - placeholder = { - "navigationStart": 0, - "unloadEventStart": -1, - "domLoading": -1, - "fetchStart": -1, - "responseStart": -1, - "loadEventEnd": -1, - "connectStart": -1, - "domainLookupStart": -1, - "redirectStart": -1, - "domContentLoadedEventEnd": -1, - "requestStart": -1, - "secureConnectionStart": -1, - "connectEnd": -1, - "loadEventStart": -1, - "domInteractive": -1, - "domContentLoadedEventStart": -1, - "redirectEnd": -1, - "domainLookupEnd": -1, - "unloadEventEnd": -1, - "responseEnd": -1, - "testcase": testcase, - "domComplete": -1, - } - def parse_block(block): timing = {} for line in block: try: (_, key, value) = line.split(",") except: - print("[DEBUG] failed to parse the following block:") - print(block) + print("[DEBUG] failed to parse the following line:") + print(line) print('[DEBUG] log:') print('-----') print(log) print('-----') - return placeholder + return None if key == "testcase" or key == "title": timing[key] = value @@ -117,20 +103,57 @@ def parse_log(log, testcase=None): return timing - def valid_timing(timing): - return (timing.get('title') != 'Error response') and (testcase is None or timing.get('testcase') == testcase) + def valid_timing(timing, testcase=None): + if (timing is None or + testcase is None or + timing.get('title') == 'Error response' or + timing.get('testcase') != testcase): + return False + else: + return True - timings = list(filter(valid_timing, map(parse_block, blocks))) + # We need to still include the failed tests, otherwise Treeherder will + # consider the result to be a new test series, and thus a new graph. So we + # use a placeholder with values = -1 to make Treeherder happy, and still be + # able to identify failed tests (successful tests have time >=0). + def create_placeholder(testcase): + return { + "testcase": testcase, + "title": "", + "navigationStart": 0, + "unloadEventStart": -1, + "domLoading": -1, + "fetchStart": -1, + "responseStart": -1, + "loadEventEnd": -1, + "connectStart": -1, + "domainLookupStart": -1, + "redirectStart": -1, + "domContentLoadedEventEnd": -1, + "requestStart": -1, + "secureConnectionStart": -1, + "connectEnd": -1, + "loadEventStart": -1, + "domInteractive": -1, + "domContentLoadedEventStart": -1, + "redirectEnd": -1, + "domainLookupEnd": -1, + "unloadEventEnd": -1, + "responseEnd": -1, + "domComplete": -1, + } + + valid_timing_for_case = partial(valid_timing, testcase=testcase) + timings = list(filter(valid_timing_for_case, map(parse_block, blocks))) if len(timings) == 0: print("Didn't find any perf data in the log, test timeout?") - print("Fillng in a dummy perf data") print('[DEBUG] log:') print('-----') print(log) print('-----') - return [placeholder] + return [create_placeholder(testcase)] else: return timings @@ -237,19 +260,16 @@ def main(): testcases = load_manifest(args.tp5_manifest) results = [] for testcase in testcases: - command = (["timeout", "{}s".format(args.timeout)] + - command_factory(testcase)) + run_test = command_factory(testcase) for run in range(args.runs): print("Running test {}/{} on {}".format(run + 1, args.runs, testcase)) - log = execute_test(testcase, command, args.timeout) + # results will be a mixure of timeings dict and testcase strings + # testcase string indicates a failed test + results += run_test(testcase, args.timeout) print("Finished") - result = parse_log(log, testcase) # TODO: Record and analyze other performance.timing properties - results += result - print("To reproduce the above test, run the following command:") - print(" {0}\n".format(' '.join(command))) print(format_result_summary(results)) save_result_json(results, args.output_file, testcases, args.runs) diff --git a/etc/ci/performance/test_all.sh b/etc/ci/performance/test_all.sh index fe619006ace..e3011048207 100755 --- a/etc/ci/performance/test_all.sh +++ b/etc/ci/performance/test_all.sh @@ -13,9 +13,11 @@ do case "${1}" in --servo) engine="--engine=servo" + timeout=60 ;; --gecko) engine="--engine=gecko" + timeout=15 ;; --submit) submit=1 @@ -43,7 +45,8 @@ MANIFEST="page_load_test/tp5n/20160509.manifest" # A manifest that excludes PERF_FILE="output/perf-$(date +%s).json" echo "Running tests" -python3 runner.py ${engine} --runs 3 "${MANIFEST}" "${PERF_FILE}" +python3 runner.py ${engine} --runs 3 --timeout "${timeout}"\ + "${MANIFEST}" "${PERF_FILE}" if [[ "${submit:-}" ]]; then diff --git a/etc/ci/performance/test_runner.py b/etc/ci/performance/test_runner.py index cdb15869804..c7b0c2b7011 100644 --- a/etc/ci/performance/test_runner.py +++ b/etc/ci/performance/test_runner.py @@ -9,6 +9,7 @@ import pytest def test_log_parser(): + mock_url = "http://localhost:8000/page_load_test/56.com/www.56.com/index.html" mock_log = b''' [PERF] perf block start [PERF],testcase,http://localhost:8000/page_load_test/56.com/www.56.com/index.html @@ -61,7 +62,7 @@ Shutting down the Constellation after generating an output file or exit flag spe "loadEventStart": None, "loadEventEnd": None }] - result = runner.parse_log(mock_log) + result = runner.parse_log(mock_log, mock_url) assert(expected == list(result)) @@ -119,30 +120,8 @@ Some other js error logs here [PERF] perf block end Shutting down the Constellation after generating an output file or exit flag specified ''' + mock_url = "http://localhost:8000/page_load_test/56.com/www.56.com/index.html" expected = [{ - "testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/content.html", - "navigationStart": 1460358300, - "unloadEventStart": None, - "unloadEventEnd": None, - "redirectStart": None, - "redirectEnd": None, - "fetchStart": None, - "domainLookupStart": None, - "domainLookupEnd": None, - "connectStart": None, - "connectEnd": None, - "secureConnectionStart": None, - "requestStart": None, - "responseStart": None, - "responseEnd": None, - "domLoading": 1460358376000, - "domInteractive": 1460358388000, - "domContentLoadedEventStart": 1460358388000, - "domContentLoadedEventEnd": 1460358388000, - "domComplete": 1460358389000, - "loadEventStart": None, - "loadEventEnd": None - }, { "testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html", "navigationStart": 1460358376, "unloadEventStart": None, @@ -166,7 +145,7 @@ Shutting down the Constellation after generating an output file or exit flag spe "loadEventStart": None, "loadEventEnd": None }] - result = runner.parse_log(mock_log) + result = runner.parse_log(mock_log, mock_url) assert(expected == list(result)) From d8b862e4d7e643954173984e465edb083a0f9930 Mon Sep 17 00:00:00 2001 From: Shing Lyu Date: Wed, 16 Nov 2016 17:06:14 +0800 Subject: [PATCH 2/5] Various easy nits --- etc/ci/performance/README.md | 2 +- etc/ci/performance/firefox/addon/README.md | 2 -- etc/ci/performance/firefox/addon/data/perf.js | 1 - etc/ci/performance/firefox/addon/index.js | 8 ------ etc/ci/performance/firefox/addon/package.json | 16 ------------ etc/ci/performance/gecko_driver.py | 12 ++++----- etc/ci/performance/runner.py | 25 +++++++++++-------- 7 files changed, 21 insertions(+), 45 deletions(-) delete mode 100644 etc/ci/performance/firefox/addon/README.md delete mode 120000 etc/ci/performance/firefox/addon/data/perf.js delete mode 100644 etc/ci/performance/firefox/addon/index.js delete mode 100644 etc/ci/performance/firefox/addon/package.json diff --git a/etc/ci/performance/README.md b/etc/ci/performance/README.md index 8c1966fd19c..b7baeb08453 100644 --- a/etc/ci/performance/README.md +++ b/etc/ci/performance/README.md @@ -64,7 +64,7 @@ If you want to test the data submission code in `submit_to_perfherder.py` withou ## For Gecko * Install Firefox Nightly in your PATH -* Download [geckodrive](https://github.com/mozilla/geckodriver/releases) and add it to the `PATH` +* Download [geckodriver](https://github.com/mozilla/geckodriver/releases) and add it to the `PATH` * `pip install selenium` * Run `python gecko_driver.py` to test diff --git a/etc/ci/performance/firefox/addon/README.md b/etc/ci/performance/firefox/addon/README.md deleted file mode 100644 index e6967bf55b4..00000000000 --- a/etc/ci/performance/firefox/addon/README.md +++ /dev/null @@ -1,2 +0,0 @@ -#Servo Performance Comparison -Monitor website rendering performance \ No newline at end of file diff --git a/etc/ci/performance/firefox/addon/data/perf.js b/etc/ci/performance/firefox/addon/data/perf.js deleted file mode 120000 index 9b8adc62230..00000000000 --- a/etc/ci/performance/firefox/addon/data/perf.js +++ /dev/null @@ -1 +0,0 @@ -../../../user-agent-js/01.perf-timing.js \ No newline at end of file diff --git a/etc/ci/performance/firefox/addon/index.js b/etc/ci/performance/firefox/addon/index.js deleted file mode 100644 index e4835feb58e..00000000000 --- a/etc/ci/performance/firefox/addon/index.js +++ /dev/null @@ -1,8 +0,0 @@ -var self = require("sdk/self"); -var pageMod = require("sdk/page-mod"); - -pageMod.PageMod({ - include: "*", - contentScriptFile: self.data.url('perf.js'), - attachTo: ["top", "existing"] -}); diff --git a/etc/ci/performance/firefox/addon/package.json b/etc/ci/performance/firefox/addon/package.json deleted file mode 100644 index 93fd45aea28..00000000000 --- a/etc/ci/performance/firefox/addon/package.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "title": "Servo Performance Comparison", - "name": "addon", - "version": "0.0.1", - "description": "Monitor website rendering performance", - "main": "index.js", - "author": "The Servo team", - "engines": { - "firefox": ">=38.0a1", - "fennec": ">=38.0a1" - }, - "license": "MPL", - "keywords": [ - "jetpack" - ] -} diff --git a/etc/ci/performance/gecko_driver.py b/etc/ci/performance/gecko_driver.py index f19b7e683ad..de7a48f2679 100644 --- a/etc/ci/performance/gecko_driver.py +++ b/etc/ci/performance/gecko_driver.py @@ -8,7 +8,7 @@ from selenium import webdriver from selenium.common.exceptions import TimeoutException -def execute_gecko_test(testcase, timeout): +def run_gecko_test(testcase, timeout): firefox_binary = "./firefox/firefox/firefox" driver = webdriver.Firefox(firefox_binary=firefox_binary) driver.set_page_load_timeout(timeout) @@ -55,16 +55,16 @@ def execute_gecko_test(testcase, timeout): # So we need to get the timing fields one by one for name in timing_names: if failed: - if name == "navigationStart": - timings[name] = 0 - else: - timings[name] = -1 + # We need to still include the failed tests, otherwise Treeherder will + # consider the result to be a new test series, and thus a new graph. So we + # use a placeholder with values = -1 to make Treeherder happy, and still be + # able to identify failed tests (successful tests have time >=0). + timings[name] = 0 if name == "navigationStart" else -1 else: timings[name] = driver.execute_script( "return performance.timing.{0}".format(name) ) - timings['testcase'] = testcase # driver.quit() gives an "'NoneType' object has no attribute 'path'" error. # Fixed in # https://github.com/SeleniumHQ/selenium/commit/9157c7071f9900c2608f5ca40ae4f518ed373b96 diff --git a/etc/ci/performance/runner.py b/etc/ci/performance/runner.py index b36fa575dde..ed2b7cd78c4 100644 --- a/etc/ci/performance/runner.py +++ b/etc/ci/performance/runner.py @@ -27,8 +27,9 @@ def parse_manifest(text): def execute_test(url, command, timeout): try: - return subprocess.check_output(command, stderr=subprocess.STDOUT, - timeout=timeout) + return subprocess.check_output( + command, stderr=subprocess.STDOUT, timeout=timeout + ) except subprocess.CalledProcessError as e: print("Unexpected Fail:") print(e) @@ -42,15 +43,17 @@ def execute_test(url, command, timeout): def get_servo_command(url): def run_servo_test(url, timeout): ua_script_path = "{}/user-agent-js".format(os.getcwd()) - command = ["../../../target/release/servo", url, - "--userscripts", ua_script_path, - "--headless", - "-x", "-o", "output.png"] + command = [ + "../../../target/release/servo", url, + "--userscripts", ua_script_path, + "--headless", + "-x", "-o", "output.png" + ] log = "" try: - log = subprocess.check_output(command, - stderr=subprocess.STDOUT, - timeout=timeout) + log = subprocess.check_output( + command, stderr=subprocess.STDOUT, timeout=timeout + ) except subprocess.CalledProcessError as e: print("Unexpected Fail:") print(e) @@ -63,7 +66,7 @@ def get_servo_command(url): def get_gecko_command(url): - return gecko_driver.execute_gecko_test + return gecko_driver.run_gecko_test def parse_log(log, testcase): @@ -265,7 +268,7 @@ def main(): print("Running test {}/{} on {}".format(run + 1, args.runs, testcase)) - # results will be a mixure of timeings dict and testcase strings + # results will be a mixure of timings dict and testcase strings # testcase string indicates a failed test results += run_test(testcase, args.timeout) print("Finished") From 9f0b685268aa6e2ed30a02dfb0d9fe2a944e3c13 Mon Sep 17 00:00:00 2001 From: Shing Lyu Date: Wed, 16 Nov 2016 17:09:08 +0800 Subject: [PATCH 3/5] Space nit --- etc/ci/performance/test_all.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/etc/ci/performance/test_all.sh b/etc/ci/performance/test_all.sh index e3011048207..35031da0be0 100755 --- a/etc/ci/performance/test_all.sh +++ b/etc/ci/performance/test_all.sh @@ -45,7 +45,7 @@ MANIFEST="page_load_test/tp5n/20160509.manifest" # A manifest that excludes PERF_FILE="output/perf-$(date +%s).json" echo "Running tests" -python3 runner.py ${engine} --runs 3 --timeout "${timeout}"\ +python3 runner.py ${engine} --runs 3 --timeout "${timeout}" \ "${MANIFEST}" "${PERF_FILE}" if [[ "${submit:-}" ]]; From d9a7221b0ced86be27955c20a151abc7f16a06f6 Mon Sep 17 00:00:00 2001 From: Shing Lyu Date: Wed, 16 Nov 2016 17:16:03 +0800 Subject: [PATCH 4/5] Remove a level of unnessary function call --- etc/ci/performance/runner.py | 56 ++++++++++++++++-------------------- 1 file changed, 25 insertions(+), 31 deletions(-) diff --git a/etc/ci/performance/runner.py b/etc/ci/performance/runner.py index ed2b7cd78c4..78c8c980b10 100644 --- a/etc/ci/performance/runner.py +++ b/etc/ci/performance/runner.py @@ -9,7 +9,6 @@ import itertools import json import os import subprocess -import gecko_driver from functools import partial from statistics import median, StatisticsError @@ -40,33 +39,28 @@ def execute_test(url, command, timeout): return "" -def get_servo_command(url): - def run_servo_test(url, timeout): - ua_script_path = "{}/user-agent-js".format(os.getcwd()) - command = [ - "../../../target/release/servo", url, - "--userscripts", ua_script_path, - "--headless", - "-x", "-o", "output.png" - ] - log = "" - try: - log = subprocess.check_output( - command, stderr=subprocess.STDOUT, timeout=timeout - ) - except subprocess.CalledProcessError as e: - print("Unexpected Fail:") - print(e) - print("You may want to re-run the test manually:\n{}" - .format(' '.join(command))) - except subprocess.TimeoutExpired: - print("Test FAILED due to timeout: {}".format(url)) - return parse_log(log, url) - return run_servo_test - - -def get_gecko_command(url): - return gecko_driver.run_gecko_test +def run_servo_test(url, timeout): + ua_script_path = "{}/user-agent-js".format(os.getcwd()) + command = [ + "../../../target/release/servo", url, + "--userscripts", ua_script_path, + "--headless", + "-x", "-o", "output.png" + ] + log = "" + try: + log = subprocess.check_output( + command, stderr=subprocess.STDOUT, timeout=timeout + ) + except subprocess.CalledProcessError as e: + print("Unexpected Fail:") + print(e) + print("You may want to re-run the test manually:\n{}".format( + ' '.join(command) + )) + except subprocess.TimeoutExpired: + print("Test FAILED due to timeout: {}".format(url)) + return parse_log(log, url) def parse_log(log, testcase): @@ -255,15 +249,15 @@ def main(): " servo and gecko are supported.")) args = parser.parse_args() if args.engine == 'servo': - command_factory = get_servo_command + run_test = run_servo_test elif args.engine == 'gecko': - command_factory = get_gecko_command + import gecko_driver # Load this only when we need gecko test + run_test = gecko_driver.run_gecko_test try: # Assume the server is up and running testcases = load_manifest(args.tp5_manifest) results = [] for testcase in testcases: - run_test = command_factory(testcase) for run in range(args.runs): print("Running test {}/{} on {}".format(run + 1, args.runs, From ecfabb6e9f6f0c128a911021ee039bfdd4d9ebbf Mon Sep 17 00:00:00 2001 From: Shing Lyu Date: Wed, 16 Nov 2016 17:53:55 +0800 Subject: [PATCH 5/5] Use resource manager in gecko_driver and nits --- etc/ci/performance/gecko_driver.py | 131 +++++++++++++++++------------ etc/ci/performance/runner.py | 2 +- 2 files changed, 76 insertions(+), 57 deletions(-) diff --git a/etc/ci/performance/gecko_driver.py b/etc/ci/performance/gecko_driver.py index de7a48f2679..f8772a81c6f 100644 --- a/etc/ci/performance/gecko_driver.py +++ b/etc/ci/performance/gecko_driver.py @@ -4,75 +4,94 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +from contextlib import contextmanager +import json from selenium import webdriver from selenium.common.exceptions import TimeoutException -def run_gecko_test(testcase, timeout): +@contextmanager +def create_gecko_session(): firefox_binary = "./firefox/firefox/firefox" driver = webdriver.Firefox(firefox_binary=firefox_binary) - driver.set_page_load_timeout(timeout) - failed = False - try: - driver.get(testcase) - except TimeoutException: - print("Timeout!") - driver.quit() - failed = True - - timings = { - "testcase": testcase, - "title": driver.title.replace(",", ",") - } - - timing_names = [ - "navigationStart", - "unloadEventStart", - "domLoading", - "fetchStart", - "responseStart", - "loadEventEnd", - "connectStart", - "domainLookupStart", - "redirectStart", - "domContentLoadedEventEnd", - "requestStart", - "secureConnectionStart", - "connectEnd", - "loadEventStart", - "domInteractive", - "domContentLoadedEventStart", - "redirectEnd", - "domainLookupEnd", - "unloadEventEnd", - "responseEnd", - "domComplete", - ] - - # We could have just run - # timings = driver.execute_script("return performance.timing") - # But for some test case the webdriver will fail with "too much recursion" - # So we need to get the timing fields one by one - for name in timing_names: - if failed: - # We need to still include the failed tests, otherwise Treeherder will - # consider the result to be a new test series, and thus a new graph. So we - # use a placeholder with values = -1 to make Treeherder happy, and still be - # able to identify failed tests (successful tests have time >=0). - timings[name] = 0 if name == "navigationStart" else -1 - else: - timings[name] = driver.execute_script( - "return performance.timing.{0}".format(name) - ) - + yield driver # driver.quit() gives an "'NoneType' object has no attribute 'path'" error. # Fixed in # https://github.com/SeleniumHQ/selenium/commit/9157c7071f9900c2608f5ca40ae4f518ed373b96 driver.quit() + + +def generate_placeholder(testcase): + # We need to still include the failed tests, otherwise Treeherder will + # consider the result to be a new test series, and thus a new graph. So we + # use a placeholder with values = -1 to make Treeherder happy, and still be + # able to identify failed tests (successful tests have time >=0). + + timings = { + "testcase": testcase, + "title": "" + } + + timing_names = [ + "navigationStart", + "unloadEventStart", + "domLoading", + "fetchStart", + "responseStart", + "loadEventEnd", + "connectStart", + "domainLookupStart", + "redirectStart", + "domContentLoadedEventEnd", + "requestStart", + "secureConnectionStart", + "connectEnd", + "loadEventStart", + "domInteractive", + "domContentLoadedEventStart", + "redirectEnd", + "domainLookupEnd", + "unloadEventEnd", + "responseEnd", + "domComplete", + ] + + for name in timing_names: + timings[name] = 0 if name == "navigationStart" else -1 + + return [timings] + + +def run_gecko_test(testcase, timeout): + with create_gecko_session() as driver: + driver.set_page_load_timeout(timeout) + try: + driver.get(testcase) + except TimeoutException: + print("Timeout!") + return generate_placeholder(testcase) + + try: + timings = { + "testcase": testcase, + "title": driver.title.replace(",", ",") + } + + timings.update(json.loads( + driver.execute_script( + "return JSON.stringify(performance.timing)" + ) + )) + except: + # We need to return a timing object no matter what happened. + # See the comment in generate_placeholder() for explanation + print("Failed to get a valid timing measurement.") + return generate_placeholder(testcase) + return [timings] if __name__ == '__main__': # Just for manual testing from pprint import pprint url = "http://localhost:8000/page_load_test/tp5n/dailymail.co.uk/www.dailymail.co.uk/ushome/index.html" - pprint(execute_gecko_test(url, 15)) + pprint(run_gecko_test(url, 15)) diff --git a/etc/ci/performance/runner.py b/etc/ci/performance/runner.py index 78c8c980b10..414eb12aeab 100644 --- a/etc/ci/performance/runner.py +++ b/etc/ci/performance/runner.py @@ -251,7 +251,7 @@ def main(): if args.engine == 'servo': run_test = run_servo_test elif args.engine == 'gecko': - import gecko_driver # Load this only when we need gecko test + import gecko_driver # Load this only when we need gecko test run_test = gecko_driver.run_gecko_test try: # Assume the server is up and running