mirror of
https://github.com/servo/servo.git
synced 2025-08-14 18:05:36 +01:00
Update web-platform-tests to revision 4a5223502fa660ce03e470af6a61c8bc26c5a8ee
This commit is contained in:
parent
c5f7c9ccf3
commit
e891345f26
1328 changed files with 36632 additions and 20588 deletions
7
tests/wpt/web-platform-tests/tools/certs/README.md
Normal file
7
tests/wpt/web-platform-tests/tools/certs/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
To enable https://web-platform.test:8443/, add cacert.pem to your browser as Certificate Authority.
|
||||
|
||||
For Firefox, go to about:preferences and search for "certificates".
|
||||
|
||||
For browsers that use the Certificate Authorities of the underlying OS, such as Chrome and Safari,
|
||||
you need to adjust the OS. For macOS, go to Keychain Access and add the certificate under
|
||||
**login**.
|
|
@ -3,7 +3,7 @@ import os
|
|||
|
||||
from ..localpaths import repo_root
|
||||
|
||||
from ..serve.serve import load_config, normalise_config, make_hosts_file
|
||||
from ..serve.serve import load_config, make_hosts_file
|
||||
|
||||
def create_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
@ -14,6 +14,4 @@ def run(**kwargs):
|
|||
config = load_config(os.path.join(repo_root, "config.default.json"),
|
||||
os.path.join(repo_root, "config.json"))
|
||||
|
||||
config = normalise_config(config, {})
|
||||
|
||||
print(make_hosts_file(config, kwargs["address"]))
|
||||
|
|
|
@ -13,10 +13,13 @@ settings.register_profile("pypy", settings(suppress_health_check=[HealthCheck.to
|
|||
settings.load_profile(os.getenv("HYPOTHESIS_PROFILE",
|
||||
"default" if impl != "PyPy" else "pypy"))
|
||||
|
||||
# serve can't even be imported on Py3, so totally ignore it even from collection
|
||||
# these can't even be imported on Py3, so totally ignore it even from collection
|
||||
ignore_dirs = ["serve", "wptserve"]
|
||||
|
||||
collect_ignore = []
|
||||
if sys.version_info[0] >= 3:
|
||||
serve = os.path.join(os.path.dirname(__file__), "serve")
|
||||
collect_ignore.extend([os.path.join(root, f)
|
||||
for root, _, files in os.walk(serve)
|
||||
for f in files])
|
||||
for d in ignore_dirs:
|
||||
path = os.path.join(os.path.dirname(__file__), d)
|
||||
collect_ignore.extend([os.path.join(root, f)
|
||||
for root, _, files in os.walk(path)
|
||||
for f in files])
|
||||
|
|
|
@ -32,7 +32,10 @@ def setup_logging(prefix=False):
|
|||
if logger is None:
|
||||
logger = logging.getLogger(os.path.basename(os.path.splitext(__file__)[0]))
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
logger.addHandler(handler)
|
||||
# Only add a handler if the parent logger is missing a handler
|
||||
if logger.parent and len(logger.parent.handlers) == 0:
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
logger.addHandler(handler)
|
||||
if prefix:
|
||||
format = logging.BASIC_FORMAT
|
||||
else:
|
||||
|
|
|
@ -225,6 +225,9 @@ def load(tests_root, manifest):
|
|||
rv = Manifest.from_json(tests_root, json.load(f))
|
||||
except IOError:
|
||||
return None
|
||||
except ValueError:
|
||||
logger.warning("%r may be corrupted", manifest)
|
||||
return None
|
||||
return rv
|
||||
|
||||
return Manifest.from_json(tests_root, json.load(manifest))
|
||||
|
|
|
@ -225,8 +225,9 @@ class SourceFile(object):
|
|||
# wdspec tests are in subdirectories of /webdriver excluding __init__.py
|
||||
# files.
|
||||
rel_dir_tree = self.rel_path.split(os.path.sep)
|
||||
return (rel_dir_tree[0] == "webdriver" and
|
||||
len(rel_dir_tree) > 1 and
|
||||
return (((rel_dir_tree[0] == "webdriver" and len(rel_dir_tree) > 1) or
|
||||
(rel_dir_tree[:2] == ["infrastructure", "webdriver"] and
|
||||
len(rel_dir_tree) > 2)) and
|
||||
self.filename not in ("__init__.py", "conftest.py") and
|
||||
fnmatch(self.filename, wd_pattern))
|
||||
|
||||
|
|
|
@ -395,7 +395,7 @@ def test_testharness_svg():
|
|||
assert not s.name_is_worker
|
||||
assert not s.name_is_reference
|
||||
|
||||
assert s.root
|
||||
assert s.root is not None
|
||||
assert s.content_is_testharness
|
||||
|
||||
assert items(s) == [("testharness", "/" + filename)]
|
||||
|
@ -424,7 +424,7 @@ def test_relative_testharness_svg():
|
|||
assert not s.name_is_worker
|
||||
assert not s.name_is_reference
|
||||
|
||||
assert s.root
|
||||
assert s.root is not None
|
||||
assert not s.content_is_testharness
|
||||
|
||||
assert items(s) == []
|
||||
|
|
|
@ -23,8 +23,10 @@ import sslutils
|
|||
from manifest.sourcefile import read_script_metadata, js_meta_re
|
||||
from wptserve import server as wptserve, handlers
|
||||
from wptserve import stash
|
||||
from wptserve import config
|
||||
from wptserve.logger import set_logger
|
||||
from wptserve.handlers import filesystem_path, wrap_pipeline
|
||||
from wptserve.utils import get_port
|
||||
from mod_pywebsocket import standalone as pywebsocket
|
||||
|
||||
def replace_end(s, old, new):
|
||||
|
@ -197,14 +199,6 @@ done();
|
|||
|
||||
rewrites = [("GET", "/resources/WebIDLParser.js", "/resources/webidl2/lib/webidl2.js")]
|
||||
|
||||
subdomains = [u"www",
|
||||
u"www1",
|
||||
u"www2",
|
||||
u"天気の良い日",
|
||||
u"élève"]
|
||||
|
||||
not_subdomains = [u"nonexistent-origin"]
|
||||
|
||||
class RoutesBuilder(object):
|
||||
def __init__(self):
|
||||
self.forbidden_override = [("GET", "/tools/runner/*", handlers.file_handler),
|
||||
|
@ -282,105 +276,6 @@ def build_routes(aliases):
|
|||
return builder.get_routes()
|
||||
|
||||
|
||||
def setup_logger(level):
|
||||
import logging
|
||||
global logger
|
||||
logger = logging.getLogger("web-platform-tests")
|
||||
logger.setLevel(getattr(logging, level.upper()))
|
||||
set_logger(logger)
|
||||
|
||||
|
||||
def open_socket(port):
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if port != 0:
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sock.bind(('127.0.0.1', port))
|
||||
sock.listen(5)
|
||||
return sock
|
||||
|
||||
def bad_port(port):
|
||||
"""
|
||||
Bad port as per https://fetch.spec.whatwg.org/#port-blocking
|
||||
"""
|
||||
return port in [
|
||||
1, # tcpmux
|
||||
7, # echo
|
||||
9, # discard
|
||||
11, # systat
|
||||
13, # daytime
|
||||
15, # netstat
|
||||
17, # qotd
|
||||
19, # chargen
|
||||
20, # ftp-data
|
||||
21, # ftp
|
||||
22, # ssh
|
||||
23, # telnet
|
||||
25, # smtp
|
||||
37, # time
|
||||
42, # name
|
||||
43, # nicname
|
||||
53, # domain
|
||||
77, # priv-rjs
|
||||
79, # finger
|
||||
87, # ttylink
|
||||
95, # supdup
|
||||
101, # hostriame
|
||||
102, # iso-tsap
|
||||
103, # gppitnp
|
||||
104, # acr-nema
|
||||
109, # pop2
|
||||
110, # pop3
|
||||
111, # sunrpc
|
||||
113, # auth
|
||||
115, # sftp
|
||||
117, # uucp-path
|
||||
119, # nntp
|
||||
123, # ntp
|
||||
135, # loc-srv / epmap
|
||||
139, # netbios
|
||||
143, # imap2
|
||||
179, # bgp
|
||||
389, # ldap
|
||||
465, # smtp+ssl
|
||||
512, # print / exec
|
||||
513, # login
|
||||
514, # shell
|
||||
515, # printer
|
||||
526, # tempo
|
||||
530, # courier
|
||||
531, # chat
|
||||
532, # netnews
|
||||
540, # uucp
|
||||
556, # remotefs
|
||||
563, # nntp+ssl
|
||||
587, # smtp
|
||||
601, # syslog-conn
|
||||
636, # ldap+ssl
|
||||
993, # imap+ssl
|
||||
995, # pop3+ssl
|
||||
2049, # nfs
|
||||
3659, # apple-sasl
|
||||
4045, # lockd
|
||||
6000, # x11
|
||||
6665, # irc (alternate)
|
||||
6666, # irc (alternate)
|
||||
6667, # irc (default)
|
||||
6668, # irc (alternate)
|
||||
6669, # irc (alternate)
|
||||
]
|
||||
|
||||
def get_port():
|
||||
port = 0
|
||||
while True:
|
||||
free_socket = open_socket(0)
|
||||
port = free_socket.getsockname()[1]
|
||||
free_socket.close()
|
||||
if not bad_port(port):
|
||||
break
|
||||
logger.debug("Going to use port %s" % port)
|
||||
return port
|
||||
|
||||
|
||||
class ServerProc(object):
|
||||
def __init__(self):
|
||||
self.proc = None
|
||||
|
@ -432,9 +327,11 @@ class ServerProc(object):
|
|||
return self.proc.is_alive()
|
||||
|
||||
|
||||
def check_subdomains(host, paths, bind_address, ssl_config, aliases):
|
||||
port = get_port()
|
||||
subdomains = get_subdomains(host)
|
||||
def check_subdomains(domains, paths, bind_address, ssl_config, aliases):
|
||||
domains = domains.copy()
|
||||
host = domains.pop("")
|
||||
port = get_port(host)
|
||||
logger.debug("Going to use port %d to check subdomains" % port)
|
||||
|
||||
wrapper = ServerProc()
|
||||
wrapper.start(start_http_server, host, port, paths, build_routes(aliases), bind_address,
|
||||
|
@ -454,8 +351,7 @@ def check_subdomains(host, paths, bind_address, ssl_config, aliases):
|
|||
"You may need to edit /etc/hosts or similar, see README.md." % (host, port))
|
||||
sys.exit(1)
|
||||
|
||||
for subdomain, (punycode, host) in subdomains.iteritems():
|
||||
domain = "%s.%s" % (punycode, host)
|
||||
for domain in domains.itervalues():
|
||||
try:
|
||||
urllib2.urlopen("http://%s:%d/" % (domain, port))
|
||||
except Exception as e:
|
||||
|
@ -466,18 +362,6 @@ def check_subdomains(host, paths, bind_address, ssl_config, aliases):
|
|||
wrapper.wait()
|
||||
|
||||
|
||||
def get_subdomains(host):
|
||||
#This assumes that the tld is ascii-only or already in punycode
|
||||
return {subdomain: (subdomain.encode("idna"), host)
|
||||
for subdomain in subdomains}
|
||||
|
||||
|
||||
def get_not_subdomains(host):
|
||||
#This assumes that the tld is ascii-only or already in punycode
|
||||
return {subdomain: (subdomain.encode("idna"), host)
|
||||
for subdomain in not_subdomains}
|
||||
|
||||
|
||||
def make_hosts_file(config, host):
|
||||
rv = []
|
||||
|
||||
|
@ -630,80 +514,14 @@ def start_wss_server(host, port, paths, routes, bind_address, config, ssl_config
|
|||
ssl_config)
|
||||
|
||||
|
||||
def get_ports(config, ssl_environment):
|
||||
rv = defaultdict(list)
|
||||
for scheme, ports in config["ports"].iteritems():
|
||||
for i, port in enumerate(ports):
|
||||
if scheme in ["wss", "https"] and not ssl_environment.ssl_enabled:
|
||||
port = None
|
||||
if port == "auto":
|
||||
port = get_port()
|
||||
else:
|
||||
port = port
|
||||
rv[scheme].append(port)
|
||||
return rv
|
||||
|
||||
|
||||
|
||||
def normalise_config(config, ports):
|
||||
if "host" in config:
|
||||
logger.warning("host in config is deprecated; use browser_host instead")
|
||||
host = config["host"]
|
||||
else:
|
||||
host = config["browser_host"]
|
||||
|
||||
domains = get_subdomains(host)
|
||||
not_domains = get_not_subdomains(host)
|
||||
|
||||
ports_ = {}
|
||||
for scheme, ports_used in ports.iteritems():
|
||||
ports_[scheme] = ports_used
|
||||
|
||||
for key, value in domains.iteritems():
|
||||
domains[key] = ".".join(value)
|
||||
|
||||
for key, value in not_domains.iteritems():
|
||||
not_domains[key] = ".".join(value)
|
||||
|
||||
domains[""] = host
|
||||
|
||||
if "bind_hostname" in config:
|
||||
logger.warning("bind_hostname in config is deprecated; use bind_address instead")
|
||||
bind_address = config["bind_hostname"]
|
||||
else:
|
||||
bind_address = config["bind_address"]
|
||||
|
||||
# make a (shallow) copy of the config and update that, so that the
|
||||
# normalized config can be used in place of the original one.
|
||||
config_ = config.copy()
|
||||
config_["domains"] = domains
|
||||
config_["not_domains"] = not_domains
|
||||
config_["ports"] = ports_
|
||||
config_["bind_address"] = bind_address
|
||||
if config.get("server_host", None) is None:
|
||||
config_["server_host"] = host
|
||||
return config_
|
||||
|
||||
|
||||
def get_paths(config):
|
||||
return {"doc_root": config["doc_root"],
|
||||
"ws_doc_root": config["ws_doc_root"]}
|
||||
|
||||
|
||||
def get_ssl_config(config, ssl_environment):
|
||||
external_domains = config["domains"].values()
|
||||
key_path, cert_path = ssl_environment.host_cert_path(external_domains)
|
||||
return {"key_path": key_path,
|
||||
"cert_path": cert_path,
|
||||
"encrypt_after_connect": config["ssl"]["encrypt_after_connect"]}
|
||||
|
||||
|
||||
def start(config, ssl_environment, routes, **kwargs):
|
||||
host = config["server_host"]
|
||||
ports = get_ports(config, ssl_environment)
|
||||
paths = get_paths(config)
|
||||
ports = config.ports
|
||||
paths = config.paths
|
||||
bind_address = config["bind_address"]
|
||||
ssl_config = get_ssl_config(config, ssl_environment)
|
||||
ssl_config = config.ssl_config
|
||||
|
||||
logger.debug("Using ports: %r" % ports)
|
||||
|
||||
servers = start_servers(host, ports, paths, routes, bind_address, config,
|
||||
ssl_config, **kwargs)
|
||||
|
@ -717,49 +535,6 @@ def iter_procs(servers):
|
|||
yield server.proc
|
||||
|
||||
|
||||
def value_set(config, key):
|
||||
return key in config and config[key] is not None
|
||||
|
||||
|
||||
def get_value_or_default(config, key, default=None):
|
||||
return config[key] if value_set(config, key) else default
|
||||
|
||||
|
||||
def set_computed_defaults(config):
|
||||
if not value_set(config, "doc_root"):
|
||||
config["doc_root"] = repo_root
|
||||
|
||||
if not value_set(config, "ws_doc_root"):
|
||||
root = get_value_or_default(config, "doc_root", default=repo_root)
|
||||
config["ws_doc_root"] = os.path.join(root, "websockets", "handlers")
|
||||
|
||||
if not value_set(config, "aliases"):
|
||||
config["aliases"] = []
|
||||
|
||||
|
||||
def merge_json(base_obj, override_obj):
|
||||
rv = {}
|
||||
for key, value in base_obj.iteritems():
|
||||
if key not in override_obj:
|
||||
rv[key] = value
|
||||
else:
|
||||
if isinstance(value, dict):
|
||||
rv[key] = merge_json(value, override_obj[key])
|
||||
else:
|
||||
rv[key] = override_obj[key]
|
||||
return rv
|
||||
|
||||
|
||||
def get_ssl_environment(config):
|
||||
implementation_type = config["ssl"]["type"]
|
||||
cls = sslutils.environments[implementation_type]
|
||||
try:
|
||||
kwargs = config["ssl"][implementation_type].copy()
|
||||
except KeyError:
|
||||
raise ValueError("%s is not a vaid ssl type." % implementation_type)
|
||||
return cls(logger, **kwargs)
|
||||
|
||||
|
||||
def load_config(default_path, override_path=None, **kwargs):
|
||||
if os.path.exists(default_path):
|
||||
with open(default_path) as f:
|
||||
|
@ -767,20 +542,19 @@ def load_config(default_path, override_path=None, **kwargs):
|
|||
else:
|
||||
raise ValueError("Config path %s does not exist" % default_path)
|
||||
|
||||
rv = Config(**base_obj)
|
||||
|
||||
if os.path.exists(override_path):
|
||||
with open(override_path) as f:
|
||||
override_obj = json.load(f)
|
||||
else:
|
||||
override_obj = {}
|
||||
rv = merge_json(base_obj, override_obj)
|
||||
rv.update(override_obj)
|
||||
|
||||
if kwargs.get("config_path"):
|
||||
other_path = os.path.abspath(os.path.expanduser(kwargs.get("config_path")))
|
||||
if os.path.exists(other_path):
|
||||
base_obj = rv
|
||||
with open(other_path) as f:
|
||||
override_obj = json.load(f)
|
||||
rv = merge_json(base_obj, override_obj)
|
||||
rv.update(override_obj)
|
||||
else:
|
||||
raise ValueError("Config path %s does not exist" % other_path)
|
||||
|
||||
|
@ -793,11 +567,30 @@ def load_config(default_path, override_path=None, **kwargs):
|
|||
value = os.path.abspath(os.path.expanduser(value))
|
||||
if not os.path.exists(value):
|
||||
raise ValueError("%s path %s does not exist" % (title, value))
|
||||
rv[key] = value
|
||||
setattr(rv, key, value)
|
||||
|
||||
set_computed_defaults(rv)
|
||||
return rv
|
||||
|
||||
_subdomains = {u"www",
|
||||
u"www1",
|
||||
u"www2",
|
||||
u"天気の良い日",
|
||||
u"élève"}
|
||||
|
||||
_not_subdomains = {u"nonexistent-origin"}
|
||||
|
||||
class Config(config.Config):
|
||||
"""serve config
|
||||
|
||||
this subclasses wptserve.config.Config to add serve config options"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Config, self).__init__(
|
||||
subdomains=_subdomains,
|
||||
not_subdomains=_not_subdomains,
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
def get_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
@ -817,32 +610,31 @@ def run(**kwargs):
|
|||
os.path.join(repo_root, "config.json"),
|
||||
**kwargs)
|
||||
|
||||
setup_logger(config["log_level"])
|
||||
global logger
|
||||
logger = config.logger
|
||||
set_logger(logger)
|
||||
|
||||
with get_ssl_environment(config) as ssl_env:
|
||||
ports = get_ports(config, ssl_env)
|
||||
config = normalise_config(config, ports)
|
||||
host = config["browser_host"]
|
||||
bind_address = config["bind_address"]
|
||||
bind_address = config["bind_address"]
|
||||
|
||||
if config["check_subdomains"]:
|
||||
paths = get_paths(config)
|
||||
ssl_config = get_ssl_config(config, ssl_env)
|
||||
check_subdomains(host, paths, bind_address, ssl_config, config["aliases"])
|
||||
if config["check_subdomains"]:
|
||||
paths = config.paths
|
||||
ssl_config = config.ssl_config
|
||||
check_subdomains(config.domains, paths, bind_address, ssl_config, config["aliases"])
|
||||
|
||||
stash_address = None
|
||||
if bind_address:
|
||||
stash_address = (host, get_port())
|
||||
stash_address = None
|
||||
if bind_address:
|
||||
stash_address = (config.server_host, get_port(config.server_host))
|
||||
logger.debug("Going to use port %d for stash" % stash_address[1])
|
||||
|
||||
with stash.StashServer(stash_address, authkey=str(uuid.uuid4())):
|
||||
servers = start(config, ssl_env, build_routes(config["aliases"]), **kwargs)
|
||||
with stash.StashServer(stash_address, authkey=str(uuid.uuid4())):
|
||||
servers = start(config, config.ssl_env, build_routes(config["aliases"]), **kwargs)
|
||||
|
||||
try:
|
||||
while any(item.is_alive() for item in iter_procs(servers)):
|
||||
for item in iter_procs(servers):
|
||||
item.join(1)
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Shutting down")
|
||||
try:
|
||||
while any(item.is_alive() for item in iter_procs(servers)):
|
||||
for item in iter_procs(servers):
|
||||
item.join(1)
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Shutting down")
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -136,7 +136,7 @@ certs = $dir
|
|||
new_certs_dir = $certs
|
||||
crl_dir = $dir%(sep)scrl
|
||||
database = $dir%(sep)sindex.txt
|
||||
private_key = $dir%(sep)scakey.pem
|
||||
private_key = $dir%(sep)scacert.key
|
||||
certificate = $dir%(sep)scacert.pem
|
||||
serial = $dir%(sep)sserial
|
||||
crldir = $dir%(sep)scrl
|
||||
|
@ -294,7 +294,7 @@ class OpenSSLEnvironment(object):
|
|||
return self._ca_cert_path
|
||||
|
||||
def _load_ca_cert(self):
|
||||
key_path = self.path("cakey.pem")
|
||||
key_path = self.path("cacert.key")
|
||||
cert_path = self.path("cacert.pem")
|
||||
|
||||
if self.check_key_cert(key_path, cert_path, None):
|
||||
|
@ -327,7 +327,7 @@ class OpenSSLEnvironment(object):
|
|||
path = self.path
|
||||
self.logger.info("Generating new CA in %s" % self.base_path)
|
||||
|
||||
key_path = path("cakey.pem")
|
||||
key_path = path("cacert.key")
|
||||
req_path = path("careq.pem")
|
||||
cert_path = path("cacert.pem")
|
||||
|
||||
|
|
|
@ -367,6 +367,9 @@ class Session(object):
|
|||
self.alert = UserPrompt(self)
|
||||
self.actions = Actions(self)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.session_id or "(disconnected)")
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.session_id is not None and isinstance(other, Session) and
|
||||
self.session_id == other.session_id)
|
||||
|
@ -628,6 +631,9 @@ class Element(object):
|
|||
assert id not in self.session._element_cache
|
||||
self.session._element_cache[self.id] = self
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.__class__.__name__, self.id)
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, Element) and self.id == other.id and
|
||||
self.session == other.session)
|
||||
|
|
|
@ -179,22 +179,70 @@ class Firefox(Browser):
|
|||
def find_webdriver(self):
|
||||
return find_executable("geckodriver")
|
||||
|
||||
def install_prefs(self, dest=None):
|
||||
def get_version_number(self, binary):
|
||||
version_re = re.compile("Mozilla Firefox (\d+\.\d+(?:\.\d+)?)(a|b)?")
|
||||
proc = subprocess.Popen([binary, "--version"], stdout=subprocess.PIPE)
|
||||
stdout, _ = proc.communicate()
|
||||
stdout.strip()
|
||||
m = version_re.match(stdout)
|
||||
if not m:
|
||||
return None, "nightly"
|
||||
version, status = m.groups()
|
||||
channel = {"a": "nightly", "b": "beta"}
|
||||
return version, channel.get(status, "stable")
|
||||
|
||||
def get_prefs_url(self, version, channel):
|
||||
if channel == "stable":
|
||||
repo = "https://hg.mozilla.org/releases/mozilla-release"
|
||||
tag = "FIREFOX_%s_RELEASE" % version.replace(".", "_")
|
||||
else:
|
||||
repo = "https://hg.mozilla.org/mozilla-central"
|
||||
if channel == "beta":
|
||||
tag = "FIREFOX_%s_BETA" % version.split(".", 1)[0]
|
||||
else:
|
||||
# Always use tip as the tag for nightly; this isn't quite right
|
||||
# but to do better we need the actual build revision, which we
|
||||
# can get if we have an application.ini file
|
||||
tag = "tip"
|
||||
|
||||
return "%s/raw-file/%s/testing/profiles/prefs_general.js" % (repo, tag)
|
||||
|
||||
def install_prefs(self, binary, dest=None):
|
||||
version, channel = self.get_version_number(binary)
|
||||
|
||||
if dest is None:
|
||||
dest = os.pwd
|
||||
|
||||
dest = os.path.join(dest, "profiles")
|
||||
if not os.path.exists(dest):
|
||||
os.makedirs(dest)
|
||||
prefs_path = os.path.join(dest, "prefs_general.js")
|
||||
prefs_file = os.path.join(dest, "prefs_general.js")
|
||||
cache_file = os.path.join(dest,
|
||||
"%s-%s.cache" % (version, channel)
|
||||
if channel != "nightly"
|
||||
else "nightly.cache")
|
||||
|
||||
now = datetime.now()
|
||||
if (not os.path.exists(prefs_path) or
|
||||
(datetime.fromtimestamp(os.stat(prefs_path).st_mtime) <
|
||||
now - timedelta(days=2))):
|
||||
with open(prefs_path, "wb") as f:
|
||||
resp = get("https://hg.mozilla.org/mozilla-central/raw-file/tip/testing/profiles/prefs_general.js")
|
||||
have_cache = False
|
||||
if os.path.exists(cache_file):
|
||||
if channel != "nightly":
|
||||
have_cache = True
|
||||
else:
|
||||
now = datetime.now()
|
||||
have_cache = (datetime.fromtimestamp(os.stat(cache_file).st_mtime) >
|
||||
now - timedelta(days=1))
|
||||
|
||||
# If we don't have a recent download, grab the url
|
||||
if not have_cache:
|
||||
url = self.get_prefs_url(version, channel)
|
||||
|
||||
with open(cache_file, "wb") as f:
|
||||
print("Installing test prefs from %s" % url)
|
||||
resp = get(url)
|
||||
f.write(resp.content)
|
||||
else:
|
||||
print("Using cached test prefs from %s" % cache_file)
|
||||
|
||||
shutil.copyfile(cache_file, prefs_file)
|
||||
|
||||
return dest
|
||||
|
||||
|
|
|
@ -98,7 +98,6 @@ def check_environ(product):
|
|||
if product not in ("firefox", "servo"):
|
||||
config = serve.load_config(os.path.join(wpt_root, "config.default.json"),
|
||||
os.path.join(wpt_root, "config.json"))
|
||||
config = serve.normalise_config(config, {})
|
||||
expected_hosts = (set(config["domains"].itervalues()) ^
|
||||
set(config["not_domains"].itervalues()))
|
||||
missing_hosts = set(expected_hosts)
|
||||
|
@ -202,8 +201,7 @@ Consider installing certutil via your OS package manager or directly.""")
|
|||
kwargs["test_types"].remove("wdspec")
|
||||
|
||||
if kwargs["prefs_root"] is None:
|
||||
print("Downloading gecko prefs")
|
||||
prefs_root = self.browser.install_prefs(self.venv.path)
|
||||
prefs_root = self.browser.install_prefs(kwargs["binary"], self.venv.path)
|
||||
kwargs["prefs_root"] = prefs_root
|
||||
|
||||
|
||||
|
|
|
@ -183,13 +183,14 @@ def test_files_changed_ignore_rules():
|
|||
def test_tests_affected(capsys, manifest_dir):
|
||||
# This doesn't really work properly for random commits because we test the files in
|
||||
# the current working directory for references to the changed files, not the ones at
|
||||
# that specific commit. But we can at least test it returns something sensible
|
||||
commit = "9047ac1d9f51b1e9faa4f9fad9c47d109609ab09"
|
||||
# that specific commit. But we can at least test it returns something sensible.
|
||||
# The test will fail if the file we assert is renamed, so we choose a stable one.
|
||||
commit = "3a055e818218f548db240c316654f3cc1aeeb733"
|
||||
with pytest.raises(SystemExit) as excinfo:
|
||||
wpt.main(argv=["tests-affected", "--metadata", manifest_dir, "%s~..%s" % (commit, commit)])
|
||||
assert excinfo.value.code == 0
|
||||
out, err = capsys.readouterr()
|
||||
assert "html/browsers/offline/appcache/workers/appcache-worker.html" in out
|
||||
assert "infrastructure/reftest-wait.html" in out
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
marionette_driver == 2.5.0
|
||||
mozprofile == 0.29
|
||||
marionette_driver==2.6.0
|
||||
mozprofile==1.0.0
|
||||
mozprocess == 0.26
|
||||
mozcrash == 1.0
|
||||
mozrunner == 6.14
|
||||
mozrunner==6.15
|
||||
mozleak == 0.1
|
||||
mozinstall == 1.15
|
||||
mozdownload == 1.23
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
xfail_strict=true
|
||||
|
||||
[tox]
|
||||
envlist = {py27,pypy}-{base,chrome,edge,firefox,ie,opera,safari,sauce,servo},py27-flake8
|
||||
envlist = {py27}-{base,chrome,edge,firefox,ie,opera,safari,sauce,servo},py27-flake8
|
||||
|
||||
[testenv]
|
||||
deps =
|
||||
|
|
|
@ -102,7 +102,7 @@ def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
|
|||
if kwargs["binary_args"]:
|
||||
options["args"] = kwargs["binary_args"]
|
||||
options["prefs"] = {
|
||||
"network.dns.localDomains": ",".join(server_config['domains'].values())
|
||||
"network.dns.localDomains": ",".join(server_config.domains.itervalues())
|
||||
}
|
||||
capabilities["moz:firefoxOptions"] = options
|
||||
if kwargs["certutil_binary"] is None:
|
||||
|
@ -133,8 +133,8 @@ def run_info_extras(**kwargs):
|
|||
|
||||
|
||||
def update_properties():
|
||||
return (["debug", "stylo", "e10s", "os", "version", "processor", "bits"],
|
||||
{"debug", "e10s", "stylo"})
|
||||
return (["debug", "webrender", "e10s", "os", "version", "processor", "bits"],
|
||||
{"debug", "e10s", "webrender"})
|
||||
|
||||
|
||||
class FirefoxBrowser(Browser):
|
||||
|
@ -198,7 +198,7 @@ class FirefoxBrowser(Browser):
|
|||
self.profile = FirefoxProfile(preferences=preferences)
|
||||
self.profile.set_preferences({"marionette.port": self.marionette_port,
|
||||
"dom.disable_open_during_load": False,
|
||||
"network.dns.localDomains": ",".join(self.config['domains'].values()),
|
||||
"network.dns.localDomains": ",".join(self.config.domains.itervalues()),
|
||||
"network.proxy.type": 0,
|
||||
"places.history.enabled": False,
|
||||
"dom.send_after_paint_to_content": True,
|
||||
|
|
|
@ -1,2 +1,9 @@
|
|||
@echo off
|
||||
reg add "HKCU\Software\Classes\Local Settings\Software\Microsoft\Windows\CurrentVersion\AppContainer\Storage\microsoft.microsoftedge_8wekyb3d8bbwe\MicrosoftEdge\New Windows" /v "PopupMgr" /t REG_SZ /d no
|
||||
|
||||
|
||||
REM Download and install the Ahem font
|
||||
REM - https://wiki.saucelabs.com/display/DOCS/Downloading+Files+to+a+Sauce+Labs+Virtual+Machine+Prior+to+Testing
|
||||
REM - https://superuser.com/questions/201896/how-do-i-install-a-font-from-the-windows-command-prompt
|
||||
bitsadmin.exe /transfer "JobName" https://github.com/w3c/web-platform-tests/raw/master/fonts/Ahem.ttf "%WINDIR%\Fonts\Ahem.ttf"
|
||||
reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Fonts" /v "Ahem (TrueType)" /t REG_SZ /d Ahem.ttf /f
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
#!/bin/bash
|
||||
curl https://raw.githubusercontent.com/w3c/web-platform-tests/master/fonts/Ahem.ttf > ~/Library/Fonts/Ahem.ttf
|
||||
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2JavaScriptCanOpenWindowsAutomatically -bool true
|
||||
|
|
|
@ -94,8 +94,6 @@ class TestEnvironment(object):
|
|||
|
||||
self.config = self.load_config()
|
||||
self.setup_server_logging()
|
||||
ports = serve.get_ports(self.config, self.ssl_env)
|
||||
self.config = serve.normalise_config(self.config, ports)
|
||||
|
||||
assert self.env_extras_cms is None, (
|
||||
"A TestEnvironment object cannot be nested")
|
||||
|
@ -137,43 +135,37 @@ class TestEnvironment(object):
|
|||
|
||||
def load_config(self):
|
||||
default_config_path = os.path.join(serve_path(self.test_paths), "config.default.json")
|
||||
local_config = {
|
||||
"ports": {
|
||||
"http": [8000, 8001],
|
||||
"https": [8443],
|
||||
"ws": [8888]
|
||||
},
|
||||
"check_subdomains": False,
|
||||
"ssl": {}
|
||||
}
|
||||
|
||||
if "browser_host" in self.options:
|
||||
local_config["browser_host"] = self.options["browser_host"]
|
||||
|
||||
if "bind_address" in self.options:
|
||||
local_config["bind_address"] = self.options["bind_address"]
|
||||
override_path = os.path.join(serve_path(self.test_paths), "config.json")
|
||||
|
||||
with open(default_config_path) as f:
|
||||
default_config = json.load(f)
|
||||
|
||||
local_config["server_host"] = self.options.get("server_host", None)
|
||||
local_config["ssl"]["encrypt_after_connect"] = self.options.get("encrypt_after_connect", False)
|
||||
config = serve.Config(override_ssl_env=self.ssl_env, **default_config)
|
||||
|
||||
config = serve.merge_json(default_config, local_config)
|
||||
config["doc_root"] = serve_path(self.test_paths)
|
||||
config.ports = {
|
||||
"http": [8000, 8001],
|
||||
"https": [8443],
|
||||
"ws": [8888],
|
||||
"wss": [8889],
|
||||
}
|
||||
|
||||
if not self.ssl_env.ssl_enabled:
|
||||
config["ports"]["https"] = [None]
|
||||
if os.path.exists(override_path):
|
||||
with open(override_path) as f:
|
||||
override_obj = json.load(f)
|
||||
config.update(override_obj)
|
||||
|
||||
host = config["browser_host"]
|
||||
hosts = [host]
|
||||
hosts.extend("%s.%s" % (item[0], host) for item in serve.get_subdomains(host).values())
|
||||
key_file, certificate = self.ssl_env.host_cert_path(hosts)
|
||||
config.check_subdomains = False
|
||||
config.ssl = {}
|
||||
|
||||
config["key_file"] = key_file
|
||||
config["certificate"] = certificate
|
||||
if "browser_host" in self.options:
|
||||
config.browser_host = self.options["browser_host"]
|
||||
|
||||
serve.set_computed_defaults(config)
|
||||
if "bind_address" in self.options:
|
||||
config.bind_address = self.options["bind_address"]
|
||||
|
||||
config.server_host = self.options.get("server_host", None)
|
||||
config.ssl["encrypt_after_connect"] = self.options.get("encrypt_after_connect", False)
|
||||
config.doc_root = serve_path(self.test_paths)
|
||||
|
||||
return config
|
||||
|
||||
|
|
|
@ -193,7 +193,7 @@ class TestExecutor(object):
|
|||
if hasattr(e, "status") and e.status in test.result_cls.statuses:
|
||||
status = e.status
|
||||
else:
|
||||
status = "ERROR"
|
||||
status = "INTERNAL-ERROR"
|
||||
message = unicode(getattr(e, "message", ""))
|
||||
if message:
|
||||
message += "\n"
|
||||
|
@ -406,7 +406,7 @@ class WdspecRun(object):
|
|||
if message:
|
||||
message += "\n"
|
||||
message += traceback.format_exc(e)
|
||||
self.result = False, ("ERROR", message)
|
||||
self.result = False, ("INTERNAL-ERROR", message)
|
||||
finally:
|
||||
self.result_flag.set()
|
||||
|
||||
|
@ -438,6 +438,8 @@ class ConnectionlessProtocol(Protocol):
|
|||
class WebDriverProtocol(Protocol):
|
||||
server_cls = None
|
||||
|
||||
implements = [ConnectionlessBaseProtocolPart]
|
||||
|
||||
def __init__(self, executor, browser):
|
||||
Protocol.__init__(self, executor, browser)
|
||||
self.webdriver_binary = executor.webdriver_binary
|
||||
|
@ -501,7 +503,8 @@ class CallbackHandler(object):
|
|||
}
|
||||
|
||||
self.actions = {
|
||||
"click": ClickAction(self.logger, self.protocol)
|
||||
"click": ClickAction(self.logger, self.protocol),
|
||||
"send_keys": SendKeysAction(self.logger, self.protocol)
|
||||
}
|
||||
|
||||
def __call__(self, result):
|
||||
|
@ -544,7 +547,6 @@ class CallbackHandler(object):
|
|||
def _send_message(self, message_type, status, message=None):
|
||||
self.protocol.testdriver.send_message(message_type, status, message=message)
|
||||
|
||||
|
||||
class ClickAction(object):
|
||||
def __init__(self, logger, protocol):
|
||||
self.logger = logger
|
||||
|
@ -559,3 +561,19 @@ class ClickAction(object):
|
|||
raise ValueError("Selector matches multiple elements")
|
||||
self.logger.debug("Clicking element: %s" % selector)
|
||||
self.protocol.click.element(elements[0])
|
||||
|
||||
class SendKeysAction(object):
|
||||
def __init__(self, logger, protocol):
|
||||
self.logger = logger
|
||||
self.protocol = protocol
|
||||
|
||||
def __call__(self, payload):
|
||||
selector = payload["selector"]
|
||||
keys = payload["keys"]
|
||||
elements = self.protocol.select.elements_by_selector(selector)
|
||||
if len(elements) == 0:
|
||||
raise ValueError("Selector matches no elements")
|
||||
elif len(elements) > 1:
|
||||
raise ValueError("Selector matches multiple elements")
|
||||
self.logger.debug("Sending keys to element: %s" % selector)
|
||||
self.protocol.send_keys.send_keys(elements[0], keys)
|
||||
|
|
|
@ -32,6 +32,7 @@ from .protocol import (BaseProtocolPart,
|
|||
StorageProtocolPart,
|
||||
SelectorProtocolPart,
|
||||
ClickProtocolPart,
|
||||
SendKeysProtocolPart,
|
||||
TestDriverProtocolPart)
|
||||
from ..testrunner import Stop
|
||||
from ..webdriver_server import GeckoDriverServer
|
||||
|
@ -307,6 +308,12 @@ class MarionetteClickProtocolPart(ClickProtocolPart):
|
|||
def element(self, element):
|
||||
return element.click()
|
||||
|
||||
class MarionetteSendKeysProtocolPart(SendKeysProtocolPart):
|
||||
def setup(self):
|
||||
self.marionette = self.parent.marionette
|
||||
|
||||
def send_keys(self, element, keys):
|
||||
return element.send_keys(keys)
|
||||
|
||||
class MarionetteTestDriverProtocolPart(TestDriverProtocolPart):
|
||||
def setup(self):
|
||||
|
@ -329,6 +336,7 @@ class MarionetteProtocol(Protocol):
|
|||
MarionetteStorageProtocolPart,
|
||||
MarionetteSelectorProtocolPart,
|
||||
MarionetteClickProtocolPart,
|
||||
MarionetteSendKeysProtocolPart,
|
||||
MarionetteTestDriverProtocolPart]
|
||||
|
||||
def __init__(self, executor, browser, capabilities=None, timeout_multiplier=1):
|
||||
|
@ -395,7 +403,7 @@ class MarionetteProtocol(Protocol):
|
|||
self.prefs.set(name, value)
|
||||
|
||||
for name, value in new_environment.get("prefs", {}).iteritems():
|
||||
self.executor.original_pref_values[name] = self.get_pref(name)
|
||||
self.executor.original_pref_values[name] = self.prefs.get(name)
|
||||
self.prefs.set(name, value)
|
||||
|
||||
|
||||
|
@ -446,7 +454,7 @@ class ExecuteAsyncScriptRun(object):
|
|||
# We didn't get any data back from the test, so check if the
|
||||
# browser is still responsive
|
||||
if self.protocol.is_alive:
|
||||
self.result = False, ("ERROR", None)
|
||||
self.result = False, ("INTERNAL-ERROR", None)
|
||||
else:
|
||||
self.result = False, ("CRASH", None)
|
||||
return self.result
|
||||
|
@ -467,7 +475,7 @@ class ExecuteAsyncScriptRun(object):
|
|||
if message:
|
||||
message += "\n"
|
||||
message += traceback.format_exc(e)
|
||||
self.result = False, ("ERROR", e)
|
||||
self.result = False, ("INTERNAL-ERROR", e)
|
||||
|
||||
finally:
|
||||
self.result_flag.set()
|
||||
|
@ -631,12 +639,12 @@ class MarionetteRefTestExecutor(RefTestExecutor):
|
|||
test_url,
|
||||
timeout).run()
|
||||
|
||||
def _screenshot(self, marionette, url, timeout):
|
||||
marionette.navigate(url)
|
||||
def _screenshot(self, protocol, url, timeout):
|
||||
protocol.marionette.navigate(url)
|
||||
|
||||
marionette.execute_async_script(self.wait_script)
|
||||
protocol.base.execute_script(self.wait_script, async=True)
|
||||
|
||||
screenshot = marionette.screenshot(full=False)
|
||||
screenshot = protocol.marionette.screenshot(full=False)
|
||||
# strip off the data:img/png, part of the url
|
||||
if screenshot.startswith("data:image/png;base64,"):
|
||||
screenshot = screenshot.split(",", 1)[1]
|
||||
|
|
|
@ -19,6 +19,7 @@ from .protocol import (BaseProtocolPart,
|
|||
Protocol,
|
||||
SelectorProtocolPart,
|
||||
ClickProtocolPart,
|
||||
SendKeysProtocolPart,
|
||||
TestDriverProtocolPart)
|
||||
from ..testrunner import Stop
|
||||
|
||||
|
@ -134,6 +135,13 @@ class SeleniumClickProtocolPart(ClickProtocolPart):
|
|||
def element(self, element):
|
||||
return element.click()
|
||||
|
||||
class SeleniumSendKeysProtocolPart(SendKeysProtocolPart):
|
||||
def setup(self):
|
||||
self.webdriver = self.parent.webdriver
|
||||
|
||||
def send_keys(self, element, keys):
|
||||
return element.send_keys(keys)
|
||||
|
||||
|
||||
class SeleniumTestDriverProtocolPart(TestDriverProtocolPart):
|
||||
def setup(self):
|
||||
|
@ -154,6 +162,7 @@ class SeleniumProtocol(Protocol):
|
|||
SeleniumTestharnessProtocolPart,
|
||||
SeleniumSelectorProtocolPart,
|
||||
SeleniumClickProtocolPart,
|
||||
SeleniumSendKeysProtocolPart,
|
||||
SeleniumTestDriverProtocolPart]
|
||||
|
||||
def __init__(self, executor, browser, capabilities, **kwargs):
|
||||
|
@ -236,7 +245,7 @@ class SeleniumRun(object):
|
|||
if message:
|
||||
message += "\n"
|
||||
message += traceback.format_exc(e)
|
||||
self.result = False, ("ERROR", e)
|
||||
self.result = False, ("INTERNAL-ERROR", e)
|
||||
finally:
|
||||
self.result_flag.set()
|
||||
|
||||
|
|
|
@ -109,7 +109,7 @@ class ServoWebDriverRun(object):
|
|||
if message:
|
||||
message += "\n"
|
||||
message += traceback.format_exc(e)
|
||||
self.result = False, ("ERROR", e)
|
||||
self.result = False, ("INTERNAL-ERROR", e)
|
||||
finally:
|
||||
self.result_flag.set()
|
||||
|
||||
|
@ -214,7 +214,7 @@ class ServoWebDriverRefTestExecutor(RefTestExecutor):
|
|||
if message:
|
||||
message += "\n"
|
||||
message += traceback.format_exc(e)
|
||||
return test.result_cls("ERROR", message), []
|
||||
return test.result_cls("INTERNAL-ERROR", message), []
|
||||
|
||||
def screenshot(self, test, viewport_size, dpi):
|
||||
# https://github.com/w3c/wptrunner/issues/166
|
||||
|
|
|
@ -259,6 +259,20 @@ class ClickProtocolPart(ProtocolPart):
|
|||
:param element: A protocol-specific handle to an element."""
|
||||
pass
|
||||
|
||||
class SendKeysProtocolPart(ProtocolPart):
|
||||
"""Protocol part for performing trusted clicks"""
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
name = "send_keys"
|
||||
|
||||
@abstractmethod
|
||||
def send_keys(self, element, keys):
|
||||
"""Send keys to a specific element.
|
||||
|
||||
:param element: A protocol-specific handle to an element.
|
||||
:param keys: A protocol-specific handle to a string of input keys."""
|
||||
pass
|
||||
|
||||
|
||||
class TestDriverProtocolPart(ProtocolPart):
|
||||
"""Protocol part that implements the basic functionality required for
|
||||
|
|
|
@ -44,7 +44,7 @@ def run(path, server_config, session_config, timeout=0):
|
|||
os.environ["WD_HOST"] = session_config["host"]
|
||||
os.environ["WD_PORT"] = str(session_config["port"])
|
||||
os.environ["WD_CAPABILITIES"] = json.dumps(session_config["capabilities"])
|
||||
os.environ["WD_SERVER_CONFIG"] = json.dumps(server_config)
|
||||
os.environ["WD_SERVER_CONFIG"] = json.dumps(server_config.as_dict())
|
||||
|
||||
harness = HarnessResultRecorder()
|
||||
subtests = SubtestResultRecorder()
|
||||
|
@ -62,7 +62,7 @@ def run(path, server_config, session_config, timeout=0):
|
|||
path],
|
||||
plugins=[harness, subtests])
|
||||
except Exception as e:
|
||||
harness.outcome = ("ERROR", str(e))
|
||||
harness.outcome = ("INTERNAL-ERROR", str(e))
|
||||
|
||||
return (harness.outcome, subtests.results)
|
||||
|
||||
|
|
|
@ -60,4 +60,14 @@
|
|||
window.opener.postMessage({"type": "action", "action": "click", "selector": selector}, "*");
|
||||
return pending_promise;
|
||||
};
|
||||
|
||||
window.test_driver_internal.send_keys = function(element, keys) {
|
||||
const selector = get_selector(element);
|
||||
const pending_promise = new Promise(function(resolve, reject) {
|
||||
pending_resolve = resolve;
|
||||
pending_reject = reject;
|
||||
});
|
||||
window.opener.postMessage({"type": "action", "action": "send_keys", "selector": selector, "keys": keys}, "*");
|
||||
return pending_promise;
|
||||
};
|
||||
})();
|
||||
|
|
|
@ -560,13 +560,18 @@ class TestRunnerManager(threading.Thread):
|
|||
expected=expected,
|
||||
stack=result.stack)
|
||||
|
||||
# TODO: consider changing result if there is a crash dump file
|
||||
|
||||
# Write the result of the test harness
|
||||
# We have a couple of status codes that are used internally, but not exposed to the
|
||||
# user. These are used to indicate that some possibly-broken state was reached
|
||||
# and we should restart the runner before the next test.
|
||||
# INTERNAL-ERROR indicates a Python exception was caught in the harness
|
||||
# EXTERNAL-TIMEOUT indicates we had to forcibly kill the browser from the harness
|
||||
# because the test didn't return a result after reaching the test-internal timeout
|
||||
status_subns = {"INTERNAL-ERROR": "ERROR",
|
||||
"EXTERNAL-TIMEOUT": "TIMEOUT"}
|
||||
expected = test.expected()
|
||||
status = file_result.status if file_result.status != "EXTERNAL-TIMEOUT" else "TIMEOUT"
|
||||
status = status_subns.get(file_result.status, file_result.status)
|
||||
|
||||
if file_result.status in ("TIMEOUT", "EXTERNAL-TIMEOUT"):
|
||||
if file_result.status in ("TIMEOUT", "EXTERNAL-TIMEOUT", "INTERNAL-ERROR"):
|
||||
if self.browser.check_for_crashes():
|
||||
status = "CRASH"
|
||||
|
||||
|
@ -585,7 +590,7 @@ class TestRunnerManager(threading.Thread):
|
|||
extra=file_result.extra)
|
||||
|
||||
restart_before_next = (test.restart_after or
|
||||
file_result.status in ("CRASH", "EXTERNAL-TIMEOUT") or
|
||||
file_result.status in ("CRASH", "EXTERNAL-TIMEOUT", "INTERNAL-ERROR") or
|
||||
((subtest_unexpected or is_unexpected) and
|
||||
self.restart_on_unexpected))
|
||||
|
||||
|
|
|
@ -8,6 +8,8 @@ sys.path.insert(0, join(dirname(__file__), "..", "..", ".."))
|
|||
|
||||
sauce = pytest.importorskip("wptrunner.browsers.sauce")
|
||||
|
||||
from wptserve.config import Config
|
||||
|
||||
|
||||
def test_sauceconnect_success():
|
||||
with mock.patch.object(sauce.SauceConnect, "upload_prerun_exec"),\
|
||||
|
@ -25,9 +27,7 @@ def test_sauceconnect_success():
|
|||
sauce_tunnel_id="ccc",
|
||||
sauce_connect_binary="ddd")
|
||||
|
||||
env_config = {
|
||||
"domains": {"": "example.net"}
|
||||
}
|
||||
env_config = Config(browser_host="example.net")
|
||||
sauce_connect(None, env_config)
|
||||
with sauce_connect:
|
||||
pass
|
||||
|
@ -56,9 +56,7 @@ def test_sauceconnect_failure_exit(readyfile, returncode):
|
|||
sauce_tunnel_id="ccc",
|
||||
sauce_connect_binary="ddd")
|
||||
|
||||
env_config = {
|
||||
"domains": {"": "example.net"}
|
||||
}
|
||||
env_config = Config(browser_host="example.net")
|
||||
sauce_connect(None, env_config)
|
||||
with pytest.raises(sauce.SauceException):
|
||||
with sauce_connect:
|
||||
|
@ -83,9 +81,7 @@ def test_sauceconnect_failure_never_ready():
|
|||
sauce_tunnel_id="ccc",
|
||||
sauce_connect_binary="ddd")
|
||||
|
||||
env_config = {
|
||||
"domains": {"": "example.net"}
|
||||
}
|
||||
env_config = Config(browser_host="example.net")
|
||||
sauce_connect(None, env_config)
|
||||
with pytest.raises(sauce.SauceException):
|
||||
with sauce_connect:
|
||||
|
@ -113,9 +109,9 @@ def test_sauceconnect_tunnel_domains():
|
|||
sauce_tunnel_id="ccc",
|
||||
sauce_connect_binary="ddd")
|
||||
|
||||
env_config = {
|
||||
"domains": {"foo": "foo.bar.example.com", "": "example.net"}
|
||||
}
|
||||
env_config = Config(browser_host="example.net",
|
||||
subdomains={"a", "b"},
|
||||
not_subdomains={"x", "y"})
|
||||
sauce_connect(None, env_config)
|
||||
with sauce_connect:
|
||||
Popen.assert_called_once()
|
||||
|
@ -127,4 +123,6 @@ def test_sauceconnect_tunnel_domains():
|
|||
assert len(rest) >= 1
|
||||
if len(rest) > 1:
|
||||
assert rest[1].startswith("-"), "--tunnel-domains takes a comma separated list (not a space separated list)"
|
||||
assert set(rest[0].split(",")) == {"foo.bar.example.com", "example.net"}
|
||||
assert set(rest[0].split(",")) == {'example.net',
|
||||
'a.example.net',
|
||||
'b.example.net'}
|
||||
|
|
|
@ -334,7 +334,7 @@ def set_from_config(kwargs):
|
|||
kwargs["test_paths"]["/"] = {}
|
||||
kwargs["test_paths"]["/"]["metadata_path"] = kwargs["metadata_root"]
|
||||
|
||||
if kwargs["manifest_path"]:
|
||||
if kwargs.get("manifest_path"):
|
||||
if "/" not in kwargs["test_paths"]:
|
||||
kwargs["test_paths"]["/"] = {}
|
||||
kwargs["test_paths"]["/"]["manifest_path"] = kwargs["manifest_path"]
|
||||
|
@ -342,6 +342,8 @@ def set_from_config(kwargs):
|
|||
kwargs["suite_name"] = kwargs["config"].get("web-platform-tests", {}).get("name", "web-platform-tests")
|
||||
|
||||
|
||||
check_paths(kwargs)
|
||||
|
||||
def get_test_paths(config):
|
||||
# Set up test_paths
|
||||
test_paths = OrderedDict()
|
||||
|
@ -400,8 +402,6 @@ def check_paths(kwargs):
|
|||
def check_args(kwargs):
|
||||
set_from_config(kwargs)
|
||||
|
||||
check_paths(kwargs)
|
||||
|
||||
if kwargs["product"] is None:
|
||||
kwargs["product"] = "firefox"
|
||||
|
||||
|
@ -493,8 +493,6 @@ def check_args(kwargs):
|
|||
def check_args_update(kwargs):
|
||||
set_from_config(kwargs)
|
||||
|
||||
check_paths(kwargs)
|
||||
|
||||
if kwargs["product"] is None:
|
||||
kwargs["product"] = "firefox"
|
||||
if kwargs["patch"] is None:
|
||||
|
|
|
@ -36,7 +36,7 @@ class SubtestResult(object):
|
|||
|
||||
class TestharnessResult(Result):
|
||||
default_expected = "OK"
|
||||
statuses = set(["OK", "ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT", "CRASH"])
|
||||
statuses = set(["OK", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT", "CRASH"])
|
||||
|
||||
|
||||
class TestharnessSubtestResult(SubtestResult):
|
||||
|
@ -46,12 +46,13 @@ class TestharnessSubtestResult(SubtestResult):
|
|||
|
||||
class ReftestResult(Result):
|
||||
default_expected = "PASS"
|
||||
statuses = set(["PASS", "FAIL", "ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT", "CRASH"])
|
||||
statuses = set(["PASS", "FAIL", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT",
|
||||
"CRASH"])
|
||||
|
||||
|
||||
class WdspecResult(Result):
|
||||
default_expected = "OK"
|
||||
statuses = set(["OK", "ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT", "CRASH"])
|
||||
statuses = set(["OK", "ERROR", "INTERNAL-ERROR", "TIMEOUT", "EXTERNAL-TIMEOUT", "CRASH"])
|
||||
|
||||
|
||||
class WdspecSubtestResult(SubtestResult):
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
md5: {{file_hash(md5, sub_file_hash_subject.txt)}}
|
||||
sha1: {{file_hash(sha1, sub_file_hash_subject.txt)}}
|
||||
sha224: {{file_hash(sha224, sub_file_hash_subject.txt)}}
|
||||
sha256: {{file_hash(sha256, sub_file_hash_subject.txt)}}
|
||||
sha384: {{file_hash(sha384, sub_file_hash_subject.txt)}}
|
||||
sha512: {{file_hash(sha512, sub_file_hash_subject.txt)}}
|
|
@ -0,0 +1,2 @@
|
|||
This file is used to verify expected behavior of the `file_hash` "sub"
|
||||
function.
|
|
@ -0,0 +1 @@
|
|||
{{file_hash(sha007, sub_file_hash_subject.txt)}}
|
|
@ -0,0 +1,8 @@
|
|||
host: {{location[host]}}
|
||||
hostname: {{location[hostname]}}
|
||||
path: {{location[path]}}
|
||||
pathname: {{location[pathname]}}
|
||||
port: {{location[port]}}
|
||||
query: {{location[query]}}
|
||||
scheme: {{location[scheme]}}
|
||||
server: {{location[server]}}
|
|
@ -0,0 +1 @@
|
|||
Before {{url_base}} After
|
|
@ -0,0 +1 @@
|
|||
Before {{uuid()}} After
|
|
@ -0,0 +1 @@
|
|||
{{$first:host}} {{$second:ports[http][0]}} A {{$second}} B {{$first}} C
|
|
@ -0,0 +1,2 @@
|
|||
def module_function():
|
||||
return [("Content-Type", "text/plain")], "PASS"
|
|
@ -0,0 +1,5 @@
|
|||
import example_module
|
||||
|
||||
|
||||
def main(request, response):
|
||||
return example_module.module_function()
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
|
@ -228,6 +229,7 @@ class TestJSONHandler(TestUsingServer):
|
|||
self.assertEqual("test-value", resp.info()["test-header"])
|
||||
self.assertEqual({"data": "test data"}, json.load(resp))
|
||||
|
||||
|
||||
class TestPythonHandler(TestUsingServer):
|
||||
def test_string(self):
|
||||
resp = self.request("/test_string.py")
|
||||
|
@ -250,6 +252,17 @@ class TestPythonHandler(TestUsingServer):
|
|||
self.assertEqual("PASS", resp.info()["X-Test"])
|
||||
self.assertEqual("PASS", resp.read())
|
||||
|
||||
def test_import(self):
|
||||
dir_name = os.path.join(doc_root, "subdir")
|
||||
assert dir_name not in sys.path
|
||||
assert "test_module" not in sys.modules
|
||||
resp = self.request("/subdir/import_handler.py")
|
||||
assert dir_name not in sys.path
|
||||
assert "test_module" not in sys.modules
|
||||
self.assertEqual(200, resp.getcode())
|
||||
self.assertEqual("text/plain", resp.info()["Content-Type"])
|
||||
self.assertEqual("PASS", resp.read())
|
||||
|
||||
def test_no_main(self):
|
||||
with pytest.raises(HTTPError) as cm:
|
||||
self.request("/no_main.py")
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import unittest
|
||||
import urllib2
|
||||
import time
|
||||
import json
|
||||
|
||||
|
@ -58,16 +59,59 @@ class TestSub(TestUsingServer):
|
|||
expected = "localhost localhost %i" % self.server.port
|
||||
self.assertEqual(resp.read().rstrip(), expected)
|
||||
|
||||
def test_sub_file_hash(self):
|
||||
resp = self.request("/sub_file_hash.sub.txt")
|
||||
expected = """
|
||||
md5: JmI1W8fMHfSfCarYOSxJcw==
|
||||
sha1: nqpWqEw4IW8NjD6R375gtrQvtTo=
|
||||
sha224: RqQ6fMmta6n9TuA/vgTZK2EqmidqnrwBAmQLRQ==
|
||||
sha256: G6Ljg1uPejQxqFmvFOcV/loqnjPTW5GSOePOfM/u0jw=
|
||||
sha384: lkXHChh1BXHN5nT5BYhi1x67E1CyYbPKRKoF2LTm5GivuEFpVVYtvEBHtPr74N9E
|
||||
sha512: r8eLGRTc7ZznZkFjeVLyo6/FyQdra9qmlYCwKKxm3kfQAswRS9+3HsYk3thLUhcFmmWhK4dXaICz
|
||||
JwGFonfXwg=="""
|
||||
self.assertEqual(resp.read().rstrip(), expected.strip())
|
||||
|
||||
def test_sub_file_hash_unrecognized(self):
|
||||
with self.assertRaises(urllib2.HTTPError):
|
||||
self.request("/sub_file_hash_unrecognized.sub.txt")
|
||||
|
||||
def test_sub_headers(self):
|
||||
resp = self.request("/sub_headers.txt", query="pipe=sub", headers={"X-Test": "PASS"})
|
||||
expected = "PASS"
|
||||
self.assertEqual(resp.read().rstrip(), expected)
|
||||
|
||||
def test_sub_location(self):
|
||||
resp = self.request("/sub_location.sub.txt?query_string")
|
||||
expected = """
|
||||
host: localhost:{0}
|
||||
hostname: localhost
|
||||
path: /sub_location.sub.txt
|
||||
pathname: /sub_location.sub.txt
|
||||
port: {0}
|
||||
query: ?query_string
|
||||
scheme: http
|
||||
server: http://localhost:{0}""".format(self.server.port)
|
||||
self.assertEqual(resp.read().rstrip(), expected.strip())
|
||||
|
||||
def test_sub_params(self):
|
||||
resp = self.request("/sub_params.txt", query="test=PASS&pipe=sub")
|
||||
expected = "PASS"
|
||||
self.assertEqual(resp.read().rstrip(), expected)
|
||||
|
||||
def test_sub_url_base(self):
|
||||
resp = self.request("/sub_url_base.sub.txt")
|
||||
self.assertEqual(resp.read().rstrip(), "Before / After")
|
||||
|
||||
def test_sub_uuid(self):
|
||||
resp = self.request("/sub_uuid.sub.txt")
|
||||
self.assertRegexpMatches(resp.read().rstrip(), r"Before [a-f0-9-]+ After")
|
||||
|
||||
def test_sub_var(self):
|
||||
resp = self.request("/sub_var.sub.txt")
|
||||
port = self.server.port
|
||||
expected = "localhost %s A %s B localhost C" % (port, port)
|
||||
self.assertEqual(resp.read().rstrip(), expected)
|
||||
|
||||
class TestTrickle(TestUsingServer):
|
||||
def test_trickle(self):
|
||||
#Actually testing that the response trickles in is not that easy
|
||||
|
|
346
tests/wpt/web-platform-tests/tools/wptserve/tests/test_config.py
Normal file
346
tests/wpt/web-platform-tests/tools/wptserve/tests/test_config.py
Normal file
|
@ -0,0 +1,346 @@
|
|||
import logging
|
||||
import os
|
||||
import pickle
|
||||
from logging import handlers
|
||||
|
||||
import pytest
|
||||
|
||||
import localpaths
|
||||
|
||||
config = pytest.importorskip("wptserve.config")
|
||||
|
||||
|
||||
def test_renamed_are_renamed():
|
||||
assert len(set(config._renamed_props.viewkeys()) & set(config.Config._default.viewkeys())) == 0
|
||||
|
||||
|
||||
def test_renamed_exist():
|
||||
assert set(config._renamed_props.viewvalues()).issubset(set(config.Config._default.viewkeys()))
|
||||
|
||||
|
||||
@pytest.mark.parametrize("base, override, expected", [
|
||||
({"a": 1}, {"a": 2}, {"a": 2}),
|
||||
({"a": 1}, {"b": 2}, {"a": 1}),
|
||||
({"a": {"b": 1}}, {"a": {}}, {"a": {"b": 1}}),
|
||||
({"a": {"b": 1}}, {"a": {"b": 2}}, {"a": {"b": 2}}),
|
||||
({"a": {"b": 1}}, {"a": {"b": 2, "c": 3}}, {"a": {"b": 2}}),
|
||||
pytest.param({"a": {"b": 1}}, {"a": 2}, {"a": 1}, marks=pytest.mark.xfail),
|
||||
pytest.param({"a": 1}, {"a": {"b": 2}}, {"a": 1}, marks=pytest.mark.xfail),
|
||||
])
|
||||
def test_merge_dict(base, override, expected):
|
||||
assert expected == config._merge_dict(base, override)
|
||||
|
||||
|
||||
def test_logger_created():
|
||||
c = config.Config()
|
||||
assert c.logger is not None
|
||||
|
||||
|
||||
def test_logger_preserved():
|
||||
logger = logging.getLogger("test_logger_preserved")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
c = config.Config(logger=logger)
|
||||
assert c.logger is logger
|
||||
|
||||
|
||||
def test_init_basic_prop():
|
||||
c = config.Config(browser_host="foo.bar")
|
||||
assert c.browser_host == "foo.bar"
|
||||
|
||||
|
||||
def test_init_prefixed_prop():
|
||||
c = config.Config(doc_root="/")
|
||||
assert c._doc_root == "/"
|
||||
|
||||
|
||||
def test_init_renamed_host():
|
||||
logger = logging.getLogger("test_init_renamed_host")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
handler = handlers.BufferingHandler(100)
|
||||
logger.addHandler(handler)
|
||||
|
||||
c = config.Config(logger=logger, host="foo.bar")
|
||||
assert c.logger is logger
|
||||
assert len(handler.buffer) == 1
|
||||
assert "browser_host" in handler.buffer[0].getMessage() # check we give the new name in the message
|
||||
assert not hasattr(c, "host")
|
||||
assert c.browser_host == "foo.bar"
|
||||
|
||||
|
||||
def test_init_bogus():
|
||||
with pytest.raises(TypeError) as e:
|
||||
config.Config(foo=1, bar=2)
|
||||
assert "foo" in e.value.message
|
||||
assert "bar" in e.value.message
|
||||
|
||||
|
||||
def test_getitem():
|
||||
c = config.Config(browser_host="foo.bar")
|
||||
assert c["browser_host"] == "foo.bar"
|
||||
|
||||
|
||||
def test_no_setitem():
|
||||
c = config.Config()
|
||||
with pytest.raises(TypeError):
|
||||
c["browser_host"] = "foo.bar"
|
||||
|
||||
|
||||
def test_iter():
|
||||
c = config.Config()
|
||||
s = set(iter(c))
|
||||
assert "browser_host" in s
|
||||
assert "host" not in s
|
||||
assert "__getitem__" not in s
|
||||
assert "_browser_host" not in s
|
||||
|
||||
|
||||
def test_assignment():
|
||||
c = config.Config()
|
||||
c.browser_host = "foo.bar"
|
||||
assert c.browser_host == "foo.bar"
|
||||
|
||||
|
||||
def test_update_basic():
|
||||
c = config.Config()
|
||||
c.update({"browser_host": "foo.bar"})
|
||||
assert c.browser_host == "foo.bar"
|
||||
|
||||
|
||||
def test_update_prefixed():
|
||||
c = config.Config()
|
||||
c.update({"doc_root": "/"})
|
||||
assert c._doc_root == "/"
|
||||
|
||||
|
||||
def test_update_renamed_host():
|
||||
logger = logging.getLogger("test_update_renamed_host")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
handler = handlers.BufferingHandler(100)
|
||||
logger.addHandler(handler)
|
||||
|
||||
c = config.Config(logger=logger)
|
||||
assert c.logger is logger
|
||||
assert len(handler.buffer) == 0
|
||||
|
||||
c.update({"host": "foo.bar"})
|
||||
|
||||
assert len(handler.buffer) == 1
|
||||
assert "browser_host" in handler.buffer[0].getMessage() # check we give the new name in the message
|
||||
assert not hasattr(c, "host")
|
||||
assert c.browser_host == "foo.bar"
|
||||
|
||||
|
||||
def test_update_bogus():
|
||||
c = config.Config()
|
||||
with pytest.raises(KeyError):
|
||||
c.update({"foobar": 1})
|
||||
|
||||
|
||||
def test_ports_auto():
|
||||
c = config.Config(ports={"http": ["auto"]},
|
||||
ssl={"type": "none"})
|
||||
ports = c.ports
|
||||
assert set(ports.keys()) == {"http"}
|
||||
assert len(ports["http"]) == 1
|
||||
assert isinstance(ports["http"][0], int)
|
||||
|
||||
|
||||
def test_ports_auto_mutate():
|
||||
c = config.Config(ports={"http": [1001]},
|
||||
ssl={"type": "none"})
|
||||
orig_ports = c.ports
|
||||
assert set(orig_ports.keys()) == {"http"}
|
||||
assert orig_ports["http"] == [1001]
|
||||
|
||||
c.ports = {"http": ["auto"]}
|
||||
new_ports = c.ports
|
||||
assert set(new_ports.keys()) == {"http"}
|
||||
assert len(new_ports["http"]) == 1
|
||||
assert isinstance(new_ports["http"][0], int)
|
||||
|
||||
|
||||
def test_ports_auto_roundtrip():
|
||||
c = config.Config(ports={"http": ["auto"]},
|
||||
ssl={"type": "none"})
|
||||
old_ports = c.ports
|
||||
c.ports = old_ports
|
||||
new_ports = c.ports
|
||||
assert old_ports == new_ports
|
||||
|
||||
|
||||
def test_ports_idempotent():
|
||||
c = config.Config(ports={"http": ["auto"]},
|
||||
ssl={"type": "none"})
|
||||
ports_a = c.ports
|
||||
ports_b = c.ports
|
||||
assert ports_a == ports_b
|
||||
|
||||
|
||||
def test_ports_explicit():
|
||||
c = config.Config(ports={"http": [1001]},
|
||||
ssl={"type": "none"})
|
||||
ports = c.ports
|
||||
assert set(ports.keys()) == {"http"}
|
||||
assert ports["http"] == [1001]
|
||||
|
||||
|
||||
def test_ports_no_ssl():
|
||||
c = config.Config(ports={"http": [1001], "https": [1002], "ws": [1003], "wss": [1004]},
|
||||
ssl={"type": "none"})
|
||||
ports = c.ports
|
||||
assert set(ports.keys()) == {"http", "https", "ws", "wss"}
|
||||
assert ports["http"] == [1001]
|
||||
assert ports["https"] == [None]
|
||||
assert ports["ws"] == [1003]
|
||||
assert ports["wss"] == [None]
|
||||
|
||||
|
||||
def test_ports_openssl():
|
||||
c = config.Config(ports={"http": [1001], "https": [1002], "ws": [1003], "wss": [1004]},
|
||||
ssl={"type": "openssl"})
|
||||
ports = c.ports
|
||||
assert set(ports.keys()) == {"http", "https", "ws", "wss"}
|
||||
assert ports["http"] == [1001]
|
||||
assert ports["https"] == [1002]
|
||||
assert ports["ws"] == [1003]
|
||||
assert ports["wss"] == [1004]
|
||||
|
||||
|
||||
def test_doc_root_default():
|
||||
c = config.Config()
|
||||
assert c.doc_root == localpaths.repo_root
|
||||
|
||||
|
||||
def test_init_doc_root():
|
||||
c = config.Config(doc_root="/")
|
||||
assert c._doc_root == "/"
|
||||
assert c.doc_root == "/"
|
||||
|
||||
|
||||
def test_set_doc_root():
|
||||
c = config.Config()
|
||||
c.doc_root = "/"
|
||||
assert c._doc_root == "/"
|
||||
assert c.doc_root == "/"
|
||||
|
||||
|
||||
def test_ws_doc_root_default():
|
||||
c = config.Config()
|
||||
assert c.ws_doc_root == os.path.join(localpaths.repo_root, "websockets", "handlers")
|
||||
|
||||
|
||||
def test_ws_doc_root_from_doc_root():
|
||||
c = config.Config(doc_root="/foo")
|
||||
assert c.ws_doc_root == os.path.join("/foo", "websockets", "handlers")
|
||||
|
||||
|
||||
def test_init_ws_doc_root():
|
||||
c = config.Config(ws_doc_root="/")
|
||||
assert c.doc_root == localpaths.repo_root # check this hasn't changed
|
||||
assert c._ws_doc_root == "/"
|
||||
assert c.ws_doc_root == "/"
|
||||
|
||||
|
||||
def test_set_ws_doc_root():
|
||||
c = config.Config()
|
||||
c.ws_doc_root = "/"
|
||||
assert c.doc_root == localpaths.repo_root # check this hasn't changed
|
||||
assert c._ws_doc_root == "/"
|
||||
assert c.ws_doc_root == "/"
|
||||
|
||||
|
||||
def test_server_host_from_browser_host():
|
||||
c = config.Config(browser_host="foo.bar")
|
||||
assert c.server_host == "foo.bar"
|
||||
|
||||
|
||||
def test_init_server_host():
|
||||
c = config.Config(server_host="foo.bar")
|
||||
assert c.browser_host == "web-platform.test" # check this hasn't changed
|
||||
assert c._server_host == "foo.bar"
|
||||
assert c.server_host == "foo.bar"
|
||||
|
||||
|
||||
def test_set_server_host():
|
||||
c = config.Config()
|
||||
c.server_host = "/"
|
||||
assert c.browser_host == "web-platform.test" # check this hasn't changed
|
||||
assert c._server_host == "/"
|
||||
assert c.server_host == "/"
|
||||
|
||||
|
||||
def test_domains():
|
||||
c = config.Config(browser_host="foo.bar",
|
||||
subdomains={"a", "b"},
|
||||
not_subdomains={"x", "y"})
|
||||
domains = c.domains
|
||||
assert domains == {
|
||||
"": "foo.bar",
|
||||
"a": "a.foo.bar",
|
||||
"b": "b.foo.bar",
|
||||
}
|
||||
|
||||
|
||||
def test_not_domains():
|
||||
c = config.Config(browser_host="foo.bar",
|
||||
subdomains={"a", "b"},
|
||||
not_subdomains={"x", "y"})
|
||||
not_domains = c.not_domains
|
||||
assert not_domains == {
|
||||
"x": "x.foo.bar",
|
||||
"y": "y.foo.bar",
|
||||
}
|
||||
|
||||
|
||||
def test_domains_not_domains_intersection():
|
||||
c = config.Config(browser_host="foo.bar",
|
||||
subdomains={"a", "b"},
|
||||
not_subdomains={"x", "y"})
|
||||
domains = c.domains
|
||||
not_domains = c.not_domains
|
||||
assert len(set(domains.iterkeys()) & set(not_domains.iterkeys())) == 0
|
||||
assert len(set(domains.itervalues()) & set(not_domains.itervalues())) == 0
|
||||
|
||||
|
||||
def test_all_domains():
|
||||
c = config.Config(browser_host="foo.bar",
|
||||
subdomains={"a", "b"},
|
||||
not_subdomains={"x", "y"})
|
||||
all_domains = c.all_domains
|
||||
assert all_domains == {
|
||||
"": "foo.bar",
|
||||
"a": "a.foo.bar",
|
||||
"b": "b.foo.bar",
|
||||
"x": "x.foo.bar",
|
||||
"y": "y.foo.bar",
|
||||
}
|
||||
|
||||
|
||||
def test_ssl_env_override():
|
||||
c = config.Config(override_ssl_env="foobar")
|
||||
assert c.ssl_env == "foobar"
|
||||
|
||||
|
||||
def test_ssl_env_none():
|
||||
c = config.Config(ssl={"type": "none"})
|
||||
assert c.ssl_env is not None
|
||||
assert c.ssl_env.ssl_enabled is False
|
||||
|
||||
|
||||
def test_ssl_env_openssl():
|
||||
c = config.Config(ssl={"type": "openssl", "openssl": {"openssl_binary": "foobar"}})
|
||||
assert c.ssl_env is not None
|
||||
assert c.ssl_env.ssl_enabled is True
|
||||
assert c.ssl_env.binary == "foobar"
|
||||
|
||||
|
||||
def test_ssl_env_bogus():
|
||||
c = config.Config(ssl={"type": "foobar"})
|
||||
with pytest.raises(ValueError):
|
||||
c.ssl_env
|
||||
|
||||
|
||||
def test_pickle():
|
||||
# Ensure that the config object can be pickled
|
||||
pickle.dumps(config.Config())
|
|
@ -0,0 +1,37 @@
|
|||
import pytest
|
||||
|
||||
from wptserve.pipes import ReplacementTokenizer
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"content,expected",
|
||||
[
|
||||
["aaa", [('ident', 'aaa')]],
|
||||
["bbb()", [('ident', 'bbb'), ('arguments', [])]],
|
||||
["$ccc:ddd", [('var', '$ccc'), ('ident', 'ddd')]],
|
||||
["$eee", [('ident', '$eee')]],
|
||||
["fff[0]", [('ident', 'fff'), ('index', 0)]],
|
||||
["ggg[hhh]", [('ident', 'ggg'), ('index', u'hhh')]],
|
||||
["[iii]", [('index', u'iii')]],
|
||||
["jjj['kkk']", [('ident', 'jjj'), ('index', u"'kkk'")]],
|
||||
["lll[]", [('ident', 'lll'), ('index', u"")]],
|
||||
["111", [('ident', u'111')]],
|
||||
["$111", [('ident', u'$111')]],
|
||||
]
|
||||
)
|
||||
def test_tokenizer(content, expected):
|
||||
tokenizer = ReplacementTokenizer()
|
||||
tokens = tokenizer.tokenize(content)
|
||||
assert expected == tokens
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"content,expected",
|
||||
[
|
||||
["/", []],
|
||||
["$aaa: BBB", [('var', '$aaa')]],
|
||||
]
|
||||
)
|
||||
def test_tokenizer_errors(content, expected):
|
||||
tokenizer = ReplacementTokenizer()
|
||||
tokens = tokenizer.tokenize(content)
|
||||
assert expected == tokens
|
245
tests/wpt/web-platform-tests/tools/wptserve/wptserve/config.py
Normal file
245
tests/wpt/web-platform-tests/tools/wptserve/wptserve/config.py
Normal file
|
@ -0,0 +1,245 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from collections import defaultdict, Mapping
|
||||
|
||||
import sslutils
|
||||
|
||||
from localpaths import repo_root
|
||||
|
||||
from .utils import get_port
|
||||
|
||||
|
||||
_renamed_props = {
|
||||
"host": "browser_host",
|
||||
"bind_hostname": "bind_address",
|
||||
"external_host": "server_host",
|
||||
"host_ip": "server_host",
|
||||
}
|
||||
|
||||
|
||||
def _merge_dict(base_dict, override_dict):
|
||||
rv = base_dict.copy()
|
||||
for key, value in base_dict.iteritems():
|
||||
if key in override_dict:
|
||||
if isinstance(value, dict):
|
||||
rv[key] = _merge_dict(value, override_dict[key])
|
||||
else:
|
||||
rv[key] = override_dict[key]
|
||||
return rv
|
||||
|
||||
|
||||
class Config(Mapping):
|
||||
"""wptserve config
|
||||
|
||||
Inherits from Mapping for backwards compatibility with the old dict-based config"""
|
||||
|
||||
with open(os.path.join(repo_root, "config.default.json"), "rb") as _fp:
|
||||
_default = json.load(_fp)
|
||||
|
||||
def __init__(self,
|
||||
logger=None,
|
||||
subdomains=set(),
|
||||
not_subdomains=set(),
|
||||
**kwargs):
|
||||
|
||||
self.log_level = kwargs.get("log_level", "DEBUG")
|
||||
|
||||
if logger is None:
|
||||
self._logger_name = "web-platform-tests"
|
||||
else:
|
||||
level_name = logging.getLevelName(logger.level)
|
||||
if level_name != "NOTSET":
|
||||
self.log_level = level_name
|
||||
self._logger_name = logger.name
|
||||
|
||||
for k, v in self._default.iteritems():
|
||||
setattr(self, k, kwargs.pop(k, v))
|
||||
|
||||
self.subdomains = subdomains
|
||||
self.not_subdomains = not_subdomains
|
||||
|
||||
for k, new_k in _renamed_props.iteritems():
|
||||
if k in kwargs:
|
||||
self.logger.warning(
|
||||
"%s in config is deprecated; use %s instead" % (
|
||||
k,
|
||||
new_k
|
||||
)
|
||||
)
|
||||
setattr(self, new_k, kwargs.pop(k))
|
||||
|
||||
self.override_ssl_env = kwargs.pop("override_ssl_env", None)
|
||||
|
||||
if kwargs:
|
||||
raise TypeError("__init__() got unexpected keyword arguments %r" % (tuple(kwargs),))
|
||||
|
||||
def __getitem__(self, k):
|
||||
try:
|
||||
return getattr(self, k)
|
||||
except AttributeError:
|
||||
raise KeyError(k)
|
||||
|
||||
def __iter__(self):
|
||||
return iter([x for x in dir(self) if not x.startswith("_")])
|
||||
|
||||
def __len__(self):
|
||||
return len([x for x in dir(self) if not x.startswith("_")])
|
||||
|
||||
def update(self, override):
|
||||
"""Load an overrides dict to override config values"""
|
||||
override = override.copy()
|
||||
|
||||
for k in self._default:
|
||||
if k in override:
|
||||
self._set_override(k, override.pop(k))
|
||||
|
||||
for k, new_k in _renamed_props.iteritems():
|
||||
if k in override:
|
||||
self.logger.warning(
|
||||
"%s in config is deprecated; use %s instead" % (
|
||||
k,
|
||||
new_k
|
||||
)
|
||||
)
|
||||
self._set_override(new_k, override.pop(k))
|
||||
|
||||
if override:
|
||||
k = next(iter(override))
|
||||
raise KeyError("unknown config override '%s'" % k)
|
||||
|
||||
def _set_override(self, k, v):
|
||||
old_v = getattr(self, k)
|
||||
if isinstance(old_v, dict):
|
||||
setattr(self, k, _merge_dict(old_v, v))
|
||||
else:
|
||||
setattr(self, k, v)
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
try:
|
||||
old_ports = self._computed_ports
|
||||
except AttributeError:
|
||||
old_ports = {}
|
||||
|
||||
self._computed_ports = defaultdict(list)
|
||||
|
||||
for scheme, ports in self._ports.iteritems():
|
||||
for i, port in enumerate(ports):
|
||||
if scheme in ["wss", "https"] and not self.ssl_env.ssl_enabled:
|
||||
port = None
|
||||
if port == "auto":
|
||||
try:
|
||||
port = old_ports[scheme][i]
|
||||
except (KeyError, IndexError):
|
||||
port = get_port(self.server_host)
|
||||
else:
|
||||
port = port
|
||||
self._computed_ports[scheme].append(port)
|
||||
|
||||
return self._computed_ports
|
||||
|
||||
@ports.setter
|
||||
def ports(self, v):
|
||||
self._ports = v
|
||||
|
||||
@property
|
||||
def doc_root(self):
|
||||
return self._doc_root if self._doc_root is not None else repo_root
|
||||
|
||||
@doc_root.setter
|
||||
def doc_root(self, v):
|
||||
self._doc_root = v
|
||||
|
||||
@property
|
||||
def ws_doc_root(self):
|
||||
if self._ws_doc_root is not None:
|
||||
return self._ws_doc_root
|
||||
else:
|
||||
return os.path.join(self.doc_root, "websockets", "handlers")
|
||||
|
||||
@ws_doc_root.setter
|
||||
def ws_doc_root(self, v):
|
||||
self._ws_doc_root = v
|
||||
|
||||
@property
|
||||
def server_host(self):
|
||||
return self._server_host if self._server_host is not None else self.browser_host
|
||||
|
||||
@server_host.setter
|
||||
def server_host(self, v):
|
||||
self._server_host = v
|
||||
|
||||
@property
|
||||
def domains(self):
|
||||
assert self.browser_host.encode("idna") == self.browser_host
|
||||
domains = {subdomain: (subdomain.encode("idna") + u"." + self.browser_host)
|
||||
for subdomain in self.subdomains}
|
||||
domains[""] = self.browser_host
|
||||
return domains
|
||||
|
||||
@property
|
||||
def not_domains(self):
|
||||
assert self.browser_host.encode("idna") == self.browser_host
|
||||
domains = {subdomain: (subdomain.encode("idna") + u"." + self.browser_host)
|
||||
for subdomain in self.not_subdomains}
|
||||
return domains
|
||||
|
||||
@property
|
||||
def all_domains(self):
|
||||
domains = self.domains.copy()
|
||||
domains.update(self.not_domains)
|
||||
return domains
|
||||
|
||||
@property
|
||||
def ssl_env(self):
|
||||
try:
|
||||
if self.override_ssl_env is not None:
|
||||
return self.override_ssl_env
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
implementation_type = self.ssl["type"]
|
||||
|
||||
try:
|
||||
cls = sslutils.environments[implementation_type]
|
||||
except KeyError:
|
||||
raise ValueError("%s is not a vaid ssl type." % implementation_type)
|
||||
kwargs = self.ssl.get(implementation_type, {}).copy()
|
||||
return cls(self.logger, **kwargs)
|
||||
|
||||
@property
|
||||
def paths(self):
|
||||
return {"doc_root": self.doc_root,
|
||||
"ws_doc_root": self.ws_doc_root}
|
||||
|
||||
@property
|
||||
def ssl_config(self):
|
||||
key_path, cert_path = self.ssl_env.host_cert_path(self.domains.itervalues())
|
||||
return {"key_path": key_path,
|
||||
"cert_path": cert_path,
|
||||
"encrypt_after_connect": self.ssl["encrypt_after_connect"]}
|
||||
|
||||
@property
|
||||
def log_level(self):
|
||||
return getattr(logging, self._log_level)
|
||||
|
||||
@log_level.setter
|
||||
def log_level(self, value):
|
||||
self._log_level = value.upper()
|
||||
|
||||
@property
|
||||
def logger(self):
|
||||
logger = logging.getLogger(self._logger_name)
|
||||
logger.setLevel(self.log_level)
|
||||
return logger
|
||||
|
||||
def as_dict(self):
|
||||
rv = {
|
||||
"domains": list(self.domains),
|
||||
"sundomains": list(self.subdomains),
|
||||
}
|
||||
for item in self._default.iterkeys():
|
||||
rv[item] = getattr(self, item)
|
||||
return rv
|
|
@ -1,6 +1,7 @@
|
|||
import cgi
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from six.moves.urllib.parse import parse_qs, quote, unquote, urljoin
|
||||
|
@ -231,8 +232,11 @@ class PythonScriptHandler(object):
|
|||
def __call__(self, request, response):
|
||||
path = filesystem_path(self.base_path, request, self.url_base)
|
||||
|
||||
sys_path = sys.path[:]
|
||||
sys_modules = sys.modules.copy()
|
||||
try:
|
||||
environ = {"__file__": path}
|
||||
sys.path.insert(0, os.path.dirname(path))
|
||||
execfile(path, environ, environ)
|
||||
if "main" in environ:
|
||||
handler = FunctionHandler(environ["main"])
|
||||
|
@ -242,6 +246,10 @@ class PythonScriptHandler(object):
|
|||
raise HTTPException(500, "No main function in script %s" % path)
|
||||
except IOError:
|
||||
raise HTTPException(404)
|
||||
finally:
|
||||
sys.path = sys_path
|
||||
sys.modules = sys_modules
|
||||
|
||||
|
||||
python_script_handler = PythonScriptHandler()
|
||||
|
||||
|
@ -252,6 +260,8 @@ class FunctionHandler(object):
|
|||
def __call__(self, request, response):
|
||||
try:
|
||||
rv = self.func(request, response)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
msg = traceback.format_exc()
|
||||
raise HTTPException(500, message=msg)
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from cgi import escape
|
||||
import gzip as gzip_module
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import types
|
||||
|
@ -277,6 +279,10 @@ def slice(request, response, start, end=None):
|
|||
|
||||
|
||||
class ReplacementTokenizer(object):
|
||||
def arguments(self, token):
|
||||
unwrapped = token[1:-1]
|
||||
return ("arguments", re.split(r",\s*", token[1:-1]) if unwrapped else [])
|
||||
|
||||
def ident(self, token):
|
||||
return ("ident", token)
|
||||
|
||||
|
@ -296,8 +302,9 @@ class ReplacementTokenizer(object):
|
|||
return self.scanner.scan(string)[0]
|
||||
|
||||
scanner = re.Scanner([(r"\$\w+:", var),
|
||||
(r"\$?\w+(?:\(\))?", ident),
|
||||
(r"\[[^\]]*\]", index)])
|
||||
(r"\$?\w+", ident),
|
||||
(r"\[[^\]]*\]", index),
|
||||
(r"\([^)]*\)", arguments)])
|
||||
|
||||
|
||||
class FirstWrapper(object):
|
||||
|
@ -339,6 +346,11 @@ def sub(request, response, escape_type="html"):
|
|||
A dictionary of query parameters supplied with the request.
|
||||
uuid()
|
||||
A pesudo-random UUID suitable for usage with stash
|
||||
file_hash(algorithm, filepath)
|
||||
The cryptographic hash of a file. Supported algorithms: md5, sha1,
|
||||
sha224, sha256, sha384, and sha512. For example:
|
||||
|
||||
{{file_hash(md5, dom/interfaces.html)}}
|
||||
|
||||
So for example in a setup running on localhost with a www
|
||||
subdomain and a http server on ports 80 and 81::
|
||||
|
@ -351,7 +363,7 @@ def sub(request, response, escape_type="html"):
|
|||
It is also possible to assign a value to a variable name, which must start with
|
||||
the $ character, using the ":" syntax e.g.
|
||||
|
||||
{{$id:uuid()}
|
||||
{{$id:uuid()}}
|
||||
|
||||
Later substitutions in the same file may then refer to the variable
|
||||
by name e.g.
|
||||
|
@ -365,6 +377,39 @@ def sub(request, response, escape_type="html"):
|
|||
response.content = new_content
|
||||
return response
|
||||
|
||||
class SubFunctions(object):
|
||||
@staticmethod
|
||||
def uuid(request):
|
||||
return str(uuid.uuid4())
|
||||
|
||||
# Maintain a whitelist of supported algorithms, restricted to those that
|
||||
# are available on all platforms [1]. This ensures that test authors do not
|
||||
# unknowingly introduce platform-specific tests.
|
||||
#
|
||||
# [1] https://docs.python.org/2/library/hashlib.html
|
||||
supported_algorithms = ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")
|
||||
|
||||
@staticmethod
|
||||
def file_hash(request, algorithm, path):
|
||||
if algorithm not in SubFunctions.supported_algorithms:
|
||||
raise ValueError("Unsupported encryption algorithm: '%s'" % algorithm)
|
||||
|
||||
hash_obj = getattr(hashlib, algorithm)()
|
||||
absolute_path = os.path.join(request.doc_root, path)
|
||||
|
||||
try:
|
||||
with open(absolute_path) as f:
|
||||
hash_obj.update(f.read())
|
||||
except IOError:
|
||||
# In this context, an unhandled IOError will be interpreted by the
|
||||
# server as an indication that the template file is non-existent.
|
||||
# Although the generic "Exception" is less precise, it avoids
|
||||
# triggering a potentially-confusing HTTP 404 error in cases where
|
||||
# the path to the file to be hashed is invalid.
|
||||
raise Exception('Cannot open file for hash computation: "%s"' % absolute_path)
|
||||
|
||||
return hash_obj.digest().encode('base64').strip()
|
||||
|
||||
def template(request, content, escape_type="html"):
|
||||
#TODO: There basically isn't any error handling here
|
||||
tokenizer = ReplacementTokenizer()
|
||||
|
@ -382,12 +427,15 @@ def template(request, content, escape_type="html"):
|
|||
else:
|
||||
variable = None
|
||||
|
||||
assert tokens[0][0] == "ident" and all(item[0] == "index" for item in tokens[1:]), tokens
|
||||
assert tokens[0][0] == "ident", tokens
|
||||
assert all(item[0] in ("index", "arguments") for item in tokens[1:]), tokens
|
||||
|
||||
field = tokens[0][1]
|
||||
|
||||
if field in variables:
|
||||
value = variables[field]
|
||||
elif hasattr(SubFunctions, field):
|
||||
value = getattr(SubFunctions, field)
|
||||
elif field == "headers":
|
||||
value = request.headers
|
||||
elif field == "GET":
|
||||
|
@ -414,15 +462,16 @@ def template(request, content, escape_type="html"):
|
|||
"path": request.url_parts.path,
|
||||
"pathname": request.url_parts.path,
|
||||
"query": "?%s" % request.url_parts.query}
|
||||
elif field == "uuid()":
|
||||
value = str(uuid.uuid4())
|
||||
elif field == "url_base":
|
||||
value = request.url_base
|
||||
else:
|
||||
raise Exception("Undefined template variable %s" % field)
|
||||
|
||||
for item in tokens[1:]:
|
||||
value = value[item[1]]
|
||||
if item[0] == "index":
|
||||
value = value[item[1]]
|
||||
else:
|
||||
value = value(request, *item[1])
|
||||
|
||||
assert isinstance(value, (int,) + types.StringTypes), tokens
|
||||
|
||||
|
|
|
@ -400,9 +400,10 @@ class WebTestHttpd(object):
|
|||
server_cls = WebTestServer
|
||||
|
||||
if use_ssl:
|
||||
if key_file is not None:
|
||||
assert os.path.exists(key_file)
|
||||
assert certificate is not None and os.path.exists(certificate)
|
||||
if not os.path.exists(key_file):
|
||||
raise ValueError("SSL certificate not found: {}".format(key_file))
|
||||
if not os.path.exists(certificate):
|
||||
raise ValueError("SSL key not found: {}".format(certificate))
|
||||
|
||||
try:
|
||||
self.httpd = server_cls((host, port),
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import socket
|
||||
|
||||
def invert_dict(dict):
|
||||
rv = {}
|
||||
for key, values in dict.iteritems():
|
||||
|
@ -12,3 +14,93 @@ class HTTPException(Exception):
|
|||
def __init__(self, code, message=""):
|
||||
self.code = code
|
||||
self.message = message
|
||||
|
||||
|
||||
def _open_socket(host, port):
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if port != 0:
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sock.bind((host, port))
|
||||
sock.listen(5)
|
||||
return sock
|
||||
|
||||
def is_bad_port(port):
|
||||
"""
|
||||
Bad port as per https://fetch.spec.whatwg.org/#port-blocking
|
||||
"""
|
||||
return port in [
|
||||
1, # tcpmux
|
||||
7, # echo
|
||||
9, # discard
|
||||
11, # systat
|
||||
13, # daytime
|
||||
15, # netstat
|
||||
17, # qotd
|
||||
19, # chargen
|
||||
20, # ftp-data
|
||||
21, # ftp
|
||||
22, # ssh
|
||||
23, # telnet
|
||||
25, # smtp
|
||||
37, # time
|
||||
42, # name
|
||||
43, # nicname
|
||||
53, # domain
|
||||
77, # priv-rjs
|
||||
79, # finger
|
||||
87, # ttylink
|
||||
95, # supdup
|
||||
101, # hostriame
|
||||
102, # iso-tsap
|
||||
103, # gppitnp
|
||||
104, # acr-nema
|
||||
109, # pop2
|
||||
110, # pop3
|
||||
111, # sunrpc
|
||||
113, # auth
|
||||
115, # sftp
|
||||
117, # uucp-path
|
||||
119, # nntp
|
||||
123, # ntp
|
||||
135, # loc-srv / epmap
|
||||
139, # netbios
|
||||
143, # imap2
|
||||
179, # bgp
|
||||
389, # ldap
|
||||
465, # smtp+ssl
|
||||
512, # print / exec
|
||||
513, # login
|
||||
514, # shell
|
||||
515, # printer
|
||||
526, # tempo
|
||||
530, # courier
|
||||
531, # chat
|
||||
532, # netnews
|
||||
540, # uucp
|
||||
556, # remotefs
|
||||
563, # nntp+ssl
|
||||
587, # smtp
|
||||
601, # syslog-conn
|
||||
636, # ldap+ssl
|
||||
993, # imap+ssl
|
||||
995, # pop3+ssl
|
||||
2049, # nfs
|
||||
3659, # apple-sasl
|
||||
4045, # lockd
|
||||
6000, # x11
|
||||
6665, # irc (alternate)
|
||||
6666, # irc (alternate)
|
||||
6667, # irc (default)
|
||||
6668, # irc (alternate)
|
||||
6669, # irc (alternate)
|
||||
]
|
||||
|
||||
def get_port(host):
|
||||
port = 0
|
||||
while True:
|
||||
free_socket = _open_socket(host, 0)
|
||||
port = free_socket.getsockname()[1]
|
||||
free_socket.close()
|
||||
if not is_bad_port(port):
|
||||
break
|
||||
return port
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue