mirror of
https://github.com/servo/servo.git
synced 2025-06-06 16:45:39 +00:00
Added async performance test
This commit is contained in:
parent
363f590019
commit
7aa3350d45
9 changed files with 122 additions and 17 deletions
|
@ -39,14 +39,19 @@ Servo Page Load Time Test
|
||||||
|
|
||||||
# Add your own test
|
# Add your own test
|
||||||
|
|
||||||
|
* You can add two types of tests: sync test and async test
|
||||||
|
* sync test: measure the page load time. Exits automatically after page loaded.
|
||||||
|
* async test: measures your custom time markers from JavaScript, see `page_load_test/example/example_async.html` for example.
|
||||||
* Add you test case (html file) to the `page_load_test/` folder. For example we can create a `page_load_test/example/example.html`
|
* Add you test case (html file) to the `page_load_test/` folder. For example we can create a `page_load_test/example/example.html`
|
||||||
* Add a manifest (or modify existing ones) named `page_load_test/example.manifest`
|
* Add a manifest (or modify existing ones) named `page_load_test/example.manifest`
|
||||||
* Add the lines like this to the manifest:
|
* Add the lines like this to the manifest:
|
||||||
|
|
||||||
```
|
```
|
||||||
http://localhost:8000/page_load_test/example/example.html
|
|
||||||
# This is a comment
|
|
||||||
# Pages got served on a local server at localhost:8000
|
# Pages got served on a local server at localhost:8000
|
||||||
|
# Test case without any flag is a sync test
|
||||||
|
http://localhost:8000/page_load_test/example/example_sync.html
|
||||||
|
# Async test must start with a `async` flag
|
||||||
|
async http://localhost:8000/page_load_test/example/example.html
|
||||||
```
|
```
|
||||||
* Modify the `MANIFEST=...` link in `test_all.sh` and point that to the new manifest file.
|
* Modify the `MANIFEST=...` link in `test_all.sh` and point that to the new manifest file.
|
||||||
|
|
||||||
|
|
|
@ -71,7 +71,7 @@ def generate_placeholder(testcase):
|
||||||
return [timings]
|
return [timings]
|
||||||
|
|
||||||
|
|
||||||
def run_gecko_test(testcase, timeout):
|
def run_gecko_test(testcase, timeout, is_async):
|
||||||
with create_gecko_session() as driver:
|
with create_gecko_session() as driver:
|
||||||
driver.set_page_load_timeout(timeout)
|
driver.set_page_load_timeout(timeout)
|
||||||
try:
|
try:
|
||||||
|
@ -97,6 +97,16 @@ def run_gecko_test(testcase, timeout):
|
||||||
print("Failed to get a valid timing measurement.")
|
print("Failed to get a valid timing measurement.")
|
||||||
return generate_placeholder(testcase)
|
return generate_placeholder(testcase)
|
||||||
|
|
||||||
|
if is_async:
|
||||||
|
# TODO: the timeout is hardcoded
|
||||||
|
driver.implicitly_wait(5) # sec
|
||||||
|
driver.find_element_by_id("GECKO_TEST_DONE")
|
||||||
|
timings.update(json.loads(
|
||||||
|
driver.execute_script(
|
||||||
|
"return JSON.stringify(window.customTimers)"
|
||||||
|
)
|
||||||
|
))
|
||||||
|
|
||||||
return [timings]
|
return [timings]
|
||||||
|
|
||||||
|
|
||||||
|
|
22
etc/ci/performance/harness/harness.js
Normal file
22
etc/ci/performance/harness/harness.js
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
window.customTimers = {};
|
||||||
|
// Create a custome timestamp with a custom name
|
||||||
|
function mark(name) {
|
||||||
|
if (window.performance) {
|
||||||
|
// performance.now() is the time after navigationStart
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/API/Performance/now
|
||||||
|
var time = performance.now() + performance.timing.navigationStart;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var time = (new Date()).getTime();
|
||||||
|
}
|
||||||
|
window.customTimers[name] = time;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notifying the test harness that the test has ended, otherwise the test
|
||||||
|
// harness will time out
|
||||||
|
function done() {
|
||||||
|
var elem = document.createElement('span')
|
||||||
|
elem.id = "GECKO_TEST_DONE";
|
||||||
|
document.body.appendChild(elem);
|
||||||
|
}
|
||||||
|
|
3
etc/ci/performance/page_load_test/example.manifest
Normal file
3
etc/ci/performance/page_load_test/example.manifest
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
http://localhost:8000/page_load_test/example/example_sync.html
|
||||||
|
async http://localhost:8000/page_load_test/example/example_async.html
|
||||||
|
|
23
etc/ci/performance/page_load_test/example/example_async.html
Normal file
23
etc/ci/performance/page_load_test/example/example_async.html
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta name="viewport" content="width=device-width" />
|
||||||
|
<title></title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
Hello World
|
||||||
|
<!-- Remember to include the /harness/harness.js file
|
||||||
|
for the mark() and done() function -->
|
||||||
|
<script src="/harness/harness.js" type="text/javascript" charset="utf-8"></script>
|
||||||
|
<script type="text/javascript" charset="utf-8">
|
||||||
|
// Create a timestamp before the test
|
||||||
|
mark("test start");
|
||||||
|
// Do something slow here
|
||||||
|
// Create another timestamp after the test
|
||||||
|
mark("test end");
|
||||||
|
// Tell the test harness you're done, otherwise it will keep waiting
|
||||||
|
done();
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
13
etc/ci/performance/page_load_test/example/example_sync.html
Normal file
13
etc/ci/performance/page_load_test/example/example_sync.html
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta name="viewport" content="width=device-width" />
|
||||||
|
<title></title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
Hello World
|
||||||
|
<!-- window.performance.timing are recorded.
|
||||||
|
Window closes after onLoad event triggered -->
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -20,8 +20,15 @@ def load_manifest(filename):
|
||||||
|
|
||||||
|
|
||||||
def parse_manifest(text):
|
def parse_manifest(text):
|
||||||
return filter(lambda x: x != "" and not x.startswith("#"),
|
lines = filter(lambda x: x != "" and not x.startswith("#"),
|
||||||
map(lambda x: x.strip(), text.splitlines()))
|
map(lambda x: x.strip(), text.splitlines()))
|
||||||
|
output = []
|
||||||
|
for line in lines:
|
||||||
|
if line.split(" ")[0] == "async":
|
||||||
|
output.append((line.split(" ")[1], True))
|
||||||
|
else:
|
||||||
|
output.append((line.split(" ")[0], False))
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
def execute_test(url, command, timeout):
|
def execute_test(url, command, timeout):
|
||||||
|
@ -39,7 +46,12 @@ def execute_test(url, command, timeout):
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def run_servo_test(url, timeout):
|
def run_servo_test(url, timeout, is_async):
|
||||||
|
if is_async:
|
||||||
|
print("Servo does not support async test!")
|
||||||
|
# Return a placeholder
|
||||||
|
return parse_log("", url)
|
||||||
|
|
||||||
ua_script_path = "{}/user-agent-js".format(os.getcwd())
|
ua_script_path = "{}/user-agent-js".format(os.getcwd())
|
||||||
command = [
|
command = [
|
||||||
"../../../target/release/servo", url,
|
"../../../target/release/servo", url,
|
||||||
|
@ -157,7 +169,7 @@ def parse_log(log, testcase):
|
||||||
|
|
||||||
def filter_result_by_manifest(result_json, manifest):
|
def filter_result_by_manifest(result_json, manifest):
|
||||||
filtered = []
|
filtered = []
|
||||||
for name in manifest:
|
for name, is_async in manifest:
|
||||||
match = [tc for tc in result_json if tc['testcase'] == name]
|
match = [tc for tc in result_json if tc['testcase'] == name]
|
||||||
if len(match) == 0:
|
if len(match) == 0:
|
||||||
raise Exception(("Missing test result: {}. This will cause a "
|
raise Exception(("Missing test result: {}. This will cause a "
|
||||||
|
@ -177,7 +189,7 @@ def take_result_median(result_json, expected_runs):
|
||||||
|
|
||||||
median_result = {}
|
median_result = {}
|
||||||
for k, _ in group[0].items():
|
for k, _ in group[0].items():
|
||||||
if k == "testcase":
|
if k == "testcase" or k == "title":
|
||||||
median_result[k] = group[0][k]
|
median_result[k] = group[0][k]
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
@ -257,14 +269,14 @@ def main():
|
||||||
# Assume the server is up and running
|
# Assume the server is up and running
|
||||||
testcases = load_manifest(args.tp5_manifest)
|
testcases = load_manifest(args.tp5_manifest)
|
||||||
results = []
|
results = []
|
||||||
for testcase in testcases:
|
for testcase, is_async in testcases:
|
||||||
for run in range(args.runs):
|
for run in range(args.runs):
|
||||||
print("Running test {}/{} on {}".format(run + 1,
|
print("Running test {}/{} on {}".format(run + 1,
|
||||||
args.runs,
|
args.runs,
|
||||||
testcase))
|
testcase))
|
||||||
# results will be a mixure of timings dict and testcase strings
|
# results will be a mixure of timings dict and testcase strings
|
||||||
# testcase string indicates a failed test
|
# testcase string indicates a failed test
|
||||||
results += run_test(testcase, args.timeout)
|
results += run_test(testcase, args.timeout, is_async)
|
||||||
print("Finished")
|
print("Finished")
|
||||||
# TODO: Record and analyze other performance.timing properties
|
# TODO: Record and analyze other performance.timing properties
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,8 @@ python3 -m http.server > /dev/null 2>&1 &
|
||||||
# TODO: enable the full manifest when #11087 is fixed
|
# TODO: enable the full manifest when #11087 is fixed
|
||||||
# https://github.com/servo/servo/issues/11087
|
# https://github.com/servo/servo/issues/11087
|
||||||
# MANIFEST="page_load_test/test.manifest"
|
# MANIFEST="page_load_test/test.manifest"
|
||||||
MANIFEST="page_load_test/tp5n/20160509.manifest" # A manifest that excludes
|
# MANIFEST="page_load_test/tp5n/20160509.manifest" # A manifest that excludes
|
||||||
|
MANIFEST="page_load_test/example.manifest" # A manifest that excludes
|
||||||
# timeout test cases
|
# timeout test cases
|
||||||
PERF_FILE="output/perf-$(date +%s).json"
|
PERF_FILE="output/perf-$(date +%s).json"
|
||||||
|
|
||||||
|
|
|
@ -163,6 +163,7 @@ def test_log_parser_empty():
|
||||||
|
|
||||||
expected = [{
|
expected = [{
|
||||||
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
||||||
|
"title": "",
|
||||||
"navigationStart": 0,
|
"navigationStart": 0,
|
||||||
"unloadEventStart": -1,
|
"unloadEventStart": -1,
|
||||||
"unloadEventEnd": -1,
|
"unloadEventEnd": -1,
|
||||||
|
@ -195,6 +196,7 @@ def test_log_parser_error():
|
||||||
|
|
||||||
expected = [{
|
expected = [{
|
||||||
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
||||||
|
"title": "",
|
||||||
"navigationStart": 0,
|
"navigationStart": 0,
|
||||||
"unloadEventStart": -1,
|
"unloadEventStart": -1,
|
||||||
"unloadEventEnd": -1,
|
"unloadEventEnd": -1,
|
||||||
|
@ -254,6 +256,7 @@ Shutting down the Constellation after generating an output file or exit flag spe
|
||||||
|
|
||||||
expected = [{
|
expected = [{
|
||||||
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
||||||
|
"title": "",
|
||||||
"navigationStart": 0,
|
"navigationStart": 0,
|
||||||
"unloadEventStart": -1,
|
"unloadEventStart": -1,
|
||||||
"unloadEventEnd": -1,
|
"unloadEventEnd": -1,
|
||||||
|
@ -290,9 +293,22 @@ http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html
|
||||||
# Disabled! http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html
|
# Disabled! http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html
|
||||||
'''
|
'''
|
||||||
expected = [
|
expected = [
|
||||||
"http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html",
|
("http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html", False),
|
||||||
"http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html",
|
("http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html", False),
|
||||||
"http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html"
|
("http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html", False)
|
||||||
|
]
|
||||||
|
assert(expected == list(runner.parse_manifest(text)))
|
||||||
|
|
||||||
|
|
||||||
|
def test_manifest_loader_async():
|
||||||
|
|
||||||
|
text = '''
|
||||||
|
http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html
|
||||||
|
async http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html
|
||||||
|
'''
|
||||||
|
expected = [
|
||||||
|
("http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html", False),
|
||||||
|
("http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html", True),
|
||||||
]
|
]
|
||||||
assert(expected == list(runner.parse_manifest(text)))
|
assert(expected == list(runner.parse_manifest(text)))
|
||||||
|
|
||||||
|
@ -315,7 +331,7 @@ def test_filter_result_by_manifest():
|
||||||
}]
|
}]
|
||||||
|
|
||||||
manifest = [
|
manifest = [
|
||||||
"http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
("http://localhost:8000/page_load_test/56.com/www.56.com/index.html", False)
|
||||||
]
|
]
|
||||||
|
|
||||||
assert(expected == runner.filter_result_by_manifest(input_json, manifest))
|
assert(expected == runner.filter_result_by_manifest(input_json, manifest))
|
||||||
|
@ -328,8 +344,8 @@ def test_filter_result_by_manifest_error():
|
||||||
}]
|
}]
|
||||||
|
|
||||||
manifest = [
|
manifest = [
|
||||||
"1.html",
|
("1.html", False),
|
||||||
"2.html"
|
("2.html", False)
|
||||||
]
|
]
|
||||||
|
|
||||||
with pytest.raises(Exception) as execinfo:
|
with pytest.raises(Exception) as execinfo:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue