mirror of
https://github.com/servo/servo.git
synced 2025-06-25 17:44:33 +01:00
Auto merge of #5678 - mdamien:patch-1, r=jdm
The urllib version used a `FancyURLOpener` which use urllib. But urllib does not not handle proxies with SSL well. For example, when adding the proxies to urllib.FancyURLOpener, I got this SSL error: IOError: [Errno socket error] [Errno 1] _ssl.c:510: error:140770FC:SSL routines:SSL23_GET_SERVER_HELLO:unknown protocol So I switched the function to urllib2. A better solution would be to use `requests` but I prefer to stay with the "no-dependency" approach. For example, this was my first solution with `requests`: ```python def download(desc, src, dst): print("Downloading(R) %s..." % desc) with open(dst, 'wb') as handle: response = requests.get(src, stream=True) if not response.ok: print("something went wrong downloading %s, :(" % desc) sys.exit(1) size = int(response.headers['content-length']) block_size = 1024 recved = 0 for block in response.iter_content(1024): recved += len(block) pct = recved * 100.0 / size print("\rDownloading %s: %5.1f%%" % (desc, pct), end="") if not block: break handle.write(block) print() ``` <!-- Reviewable:start --> [<img src="https://reviewable.io/review_button.png" height=40 alt="Review on Reviewable"/>](https://reviewable.io/reviews/servo/servo/5678) <!-- Reviewable:end -->
This commit is contained in:
commit
43d476eb2b
1 changed files with 30 additions and 23 deletions
|
@ -6,7 +6,7 @@ import shutil
|
|||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import urllib
|
||||
import urllib2
|
||||
|
||||
from mach.decorators import (
|
||||
CommandArgument,
|
||||
|
@ -16,33 +16,40 @@ from mach.decorators import (
|
|||
|
||||
from servo.command_base import CommandBase, cd, host_triple
|
||||
|
||||
class PanickyUrlOpener(urllib.FancyURLopener):
|
||||
def http_error_default(self, url, fp, errcode, errmsg, headers):
|
||||
print("Download failed (%d): %s - %s" % (errcode, errmsg, url))
|
||||
|
||||
cpu_type = subprocess.check_output(["uname", "-m"]).strip().lower()
|
||||
if errcode == 404 and cpu_type in ["i386", "i486", "i686", "i768", "x86"]:
|
||||
# i686
|
||||
print("Note: Servo does not currently bootstrap 32bit snapshots of Rust")
|
||||
print("See https://github.com/servo/servo/issues/3899")
|
||||
|
||||
sys.exit(1)
|
||||
|
||||
def download(desc, src, dst):
|
||||
recved = [0]
|
||||
|
||||
def report(count, bsize, fsize):
|
||||
recved[0] += bsize
|
||||
pct = recved[0] * 100.0 / fsize
|
||||
print("\rDownloading %s: %5.1f%%" % (desc, pct), end="")
|
||||
sys.stdout.flush()
|
||||
|
||||
print("Downloading %s..." % desc)
|
||||
dumb = (os.environ.get("TERM") == "dumb") or (not sys.stdout.isatty())
|
||||
PanickyUrlOpener().retrieve(src, dst, None if dumb else report)
|
||||
if not dumb:
|
||||
print()
|
||||
|
||||
try:
|
||||
resp = urllib2.urlopen(src)
|
||||
fsize = int(resp.info().getheader('Content-Length').strip())
|
||||
recved = 0
|
||||
chunk_size = 8192
|
||||
|
||||
with open(dst, 'wb') as fd:
|
||||
while True:
|
||||
chunk = resp.read(chunk_size)
|
||||
if not chunk: break
|
||||
recved += len(chunk)
|
||||
if not dumb:
|
||||
pct = recved * 100.0 / fsize
|
||||
print("\rDownloading %s: %5.1f%%" % (desc, pct), end="")
|
||||
sys.stdout.flush()
|
||||
fd.write(chunk)
|
||||
|
||||
if not dumb:
|
||||
print()
|
||||
except urllib2.HTTPError, e:
|
||||
print("Download failed (%d): %s - %s" % (e.code, e.reason, src))
|
||||
|
||||
cpu_type = subprocess.check_output(["uname", "-m"]).strip().lower()
|
||||
if e.code == 404 and cpu_type in ["i386", "i486", "i686", "i768", "x86"]:
|
||||
# i686
|
||||
print("Note: Servo does not currently bootstrap 32bit snapshots of Rust")
|
||||
print("See https://github.com/servo/servo/issues/3899")
|
||||
|
||||
sys.exit(1)
|
||||
|
||||
def extract(src, dst, movedir=None):
|
||||
tarfile.open(src).extractall(dst)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue