mirror of
https://github.com/servo/servo.git
synced 2025-08-05 21:50:18 +01:00
Use urllib rather than httplib2 when downloading build perf data.
This commit is contained in:
parent
07bd84ecc8
commit
8754695c82
2 changed files with 18 additions and 36 deletions
|
@ -114,7 +114,7 @@ linux-nightly:
|
||||||
- ./mach package --release
|
- ./mach package --release
|
||||||
- ./mach upload-nightly linux
|
- ./mach upload-nightly linux
|
||||||
- ./mach test-perf
|
- ./mach test-perf
|
||||||
- python ./etc/ci/performance/download_buildbot_timings.py --verbose
|
- python3 ./etc/ci/performance/download_buildbot_timings.py --verbose
|
||||||
- aws s3 sync --size-only --acl public-read ./etc/ci/performance/output s3://servo-perf
|
- aws s3 sync --size-only --acl public-read ./etc/ci/performance/output s3://servo-perf
|
||||||
|
|
||||||
android:
|
android:
|
||||||
|
|
|
@ -7,10 +7,10 @@
|
||||||
import argparse
|
import argparse
|
||||||
import csv
|
import csv
|
||||||
from datetime import datetime, date
|
from datetime import datetime, date
|
||||||
import httplib2
|
|
||||||
import json
|
import json
|
||||||
from math import floor
|
from math import floor
|
||||||
import os
|
import os
|
||||||
|
from urllib.request import urlopen, HTTPError
|
||||||
|
|
||||||
SCRIPT_PATH = os.path.split(__file__)[0]
|
SCRIPT_PATH = os.path.split(__file__)[0]
|
||||||
|
|
||||||
|
@ -55,8 +55,6 @@ def main():
|
||||||
help="print every HTTP request")
|
help="print every HTTP request")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
http = httplib2.Http()
|
|
||||||
|
|
||||||
os.makedirs(args.cache_dir, exist_ok=True)
|
os.makedirs(args.cache_dir, exist_ok=True)
|
||||||
os.makedirs(args.output_dir, exist_ok=True)
|
os.makedirs(args.output_dir, exist_ok=True)
|
||||||
|
|
||||||
|
@ -64,28 +62,20 @@ def main():
|
||||||
# Note: this isn't cached
|
# Note: this isn't cached
|
||||||
if args.verbose:
|
if args.verbose:
|
||||||
print("Downloading index {}.".format(args.index_url))
|
print("Downloading index {}.".format(args.index_url))
|
||||||
(index_headers, index_data) = http.request(args.index_url, "GET", headers={'cache-control': 'no-cache'})
|
with urlopen(args.index_url) as response:
|
||||||
if args.verbose:
|
index = json.loads(response.read().decode('utf-8'))
|
||||||
print("Response {}.".format(index_headers))
|
|
||||||
index = json.loads(index_data.decode('utf-8'))
|
|
||||||
|
|
||||||
builds = []
|
builds = []
|
||||||
|
|
||||||
for builder in index["builders"]:
|
for builder in sorted(index["builders"]):
|
||||||
# The most recent build is at offset -1
|
# The most recent build is at offset -1
|
||||||
# Fetch it to find out the build number
|
# Fetch it to find out the build number
|
||||||
# Note: this isn't cached
|
# Note: this isn't cached
|
||||||
recent_build_url = args.build_url.format(builder, -1)
|
recent_build_url = args.build_url.format(builder, -1)
|
||||||
if args.verbose:
|
if args.verbose:
|
||||||
print("Downloading recent build {}.".format(recent_build_url))
|
print("Downloading recent build {}.".format(recent_build_url))
|
||||||
(recent_build_headers, recent_build_data) = http.request(
|
with urlopen(recent_build_url) as response:
|
||||||
recent_build_url,
|
recent_build = json.loads(response.read().decode('utf-8'))
|
||||||
"GET",
|
|
||||||
headers={'cache-control': 'no-cache'}
|
|
||||||
)
|
|
||||||
if args.verbose:
|
|
||||||
print("Respose {}.".format(recent_build_headers))
|
|
||||||
recent_build = json.loads(recent_build_data.decode('utf-8'))
|
|
||||||
recent_build_number = recent_build["number"]
|
recent_build_number = recent_build["number"]
|
||||||
|
|
||||||
# Download each build, and convert to CSV
|
# Download each build, and convert to CSV
|
||||||
|
@ -104,27 +94,19 @@ def main():
|
||||||
build_url = args.build_url.format(builder, build_number)
|
build_url = args.build_url.format(builder, build_number)
|
||||||
if args.verbose:
|
if args.verbose:
|
||||||
print("Downloading build {}.".format(build_url))
|
print("Downloading build {}.".format(build_url))
|
||||||
(build_headers, build_data) = http.request(
|
try:
|
||||||
build_url,
|
with urlopen(build_url) as response:
|
||||||
"GET",
|
build = json.loads(response.read().decode('utf-8'))
|
||||||
headers={'cache-control': 'no=cache'}
|
except HTTPError as e:
|
||||||
)
|
if e.code == 404:
|
||||||
if args.verbose:
|
build = {}
|
||||||
print("Response {}.".format(build_headers))
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
# Only parse the JSON if we got back a 200 response.
|
|
||||||
if build_headers.status == 200:
|
|
||||||
build = json.loads(build_data.decode('utf-8'))
|
|
||||||
# Don't cache current builds.
|
# Don't cache current builds.
|
||||||
if build.get('currentStep'):
|
if build.get('currentStep'):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
elif build_headers.status == 404:
|
|
||||||
build = {}
|
|
||||||
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
with open(cache_json, 'w+') as f:
|
with open(cache_json, 'w+') as f:
|
||||||
json.dump(build, f)
|
json.dump(build, f)
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue