mirror of
https://github.com/servo/servo.git
synced 2025-07-03 05:23:38 +01:00
Auto merge of #13614 - jdm:wptrunnerup3, r=Ms2ger
Upgrade wptrunner This gets us back to a pristine local copy and allows us to start experimenting with webdriver tests. <!-- Reviewable:start --> --- This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/servo/13614) <!-- Reviewable:end -->
This commit is contained in:
commit
80447a79c4
24 changed files with 481 additions and 143 deletions
|
@ -170,25 +170,28 @@ class MachCommands(CommandBase):
|
||||||
print(cargo_path)
|
print(cargo_path)
|
||||||
call(["cargo", "fetch"], env=self.build_env())
|
call(["cargo", "fetch"], env=self.build_env())
|
||||||
|
|
||||||
@Command('wpt-upgrade',
|
@Command('wptrunner-upgrade',
|
||||||
description='upgrade wptrunner.',
|
description='upgrade wptrunner.',
|
||||||
category='devenv')
|
category='devenv')
|
||||||
def upgrade_wpt_runner(self):
|
def upgrade_wpt_runner(self):
|
||||||
|
env = self.build_env()
|
||||||
with cd(path.join(self.context.topdir, 'tests', 'wpt', 'harness')):
|
with cd(path.join(self.context.topdir, 'tests', 'wpt', 'harness')):
|
||||||
code = call(["git", "init"], env=self.build_env())
|
code = call(["git", "init"], env=env)
|
||||||
if code:
|
if code:
|
||||||
return code
|
return code
|
||||||
|
# No need to report an error if this fails, as it will for the first use
|
||||||
|
call(["git", "remote", "rm", "upstream"], env=env)
|
||||||
code = call(
|
code = call(
|
||||||
["git", "remote", "add", "upstream", "https://github.com/w3c/wptrunner.git"], env=self.build_env())
|
["git", "remote", "add", "upstream", "https://github.com/w3c/wptrunner.git"], env=env)
|
||||||
if code:
|
if code:
|
||||||
return code
|
return code
|
||||||
code = call(["git", "fetch", "upstream"], env=self.build_env())
|
code = call(["git", "fetch", "upstream"], env=env)
|
||||||
if code:
|
if code:
|
||||||
return code
|
return code
|
||||||
code = call(["git", "reset", "--hard", "remotes/upstream/master"], env=self.build_env())
|
code = call(["git", "reset", "--hard", "remotes/upstream/master"], env=env)
|
||||||
if code:
|
if code:
|
||||||
return code
|
return code
|
||||||
code = call(["rm", "-rf", ".git"], env=self.build_env())
|
code = call(["rm", "-rf", ".git"], env=env)
|
||||||
if code:
|
if code:
|
||||||
return code
|
return code
|
||||||
return 0
|
return 0
|
||||||
|
|
20
tests/wpt/harness/.travis.yml
Normal file
20
tests/wpt/harness/.travis.yml
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
language: python
|
||||||
|
python: 2.7
|
||||||
|
|
||||||
|
sudo: false
|
||||||
|
|
||||||
|
cache:
|
||||||
|
directories:
|
||||||
|
- $HOME/.cache/pip
|
||||||
|
|
||||||
|
env:
|
||||||
|
- TOXENV="{py27,pypy}-base"
|
||||||
|
- TOXENV="{py27,pypy}-chrome"
|
||||||
|
- TOXENV="{py27,pypy}-firefox"
|
||||||
|
- TOXENV="{py27,pypy}-servo"
|
||||||
|
|
||||||
|
install:
|
||||||
|
- pip install -U tox
|
||||||
|
|
||||||
|
script:
|
||||||
|
- tox
|
15
tests/wpt/harness/tox.ini
Normal file
15
tests/wpt/harness/tox.ini
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
[pytest]
|
||||||
|
xfail_strict=true
|
||||||
|
|
||||||
|
[tox]
|
||||||
|
envlist = {py27,pypy}-{base,b2g,chrome,firefox,servo}
|
||||||
|
|
||||||
|
[testenv]
|
||||||
|
deps =
|
||||||
|
pytest>=2.9
|
||||||
|
-r{toxinidir}/requirements.txt
|
||||||
|
chrome: -r{toxinidir}/requirements_chrome.txt
|
||||||
|
firefox: -r{toxinidir}/requirements_firefox.txt
|
||||||
|
servo: -r{toxinidir}/requirements_servo.txt
|
||||||
|
|
||||||
|
commands = py.test []
|
|
@ -26,8 +26,8 @@ All classes and functions named in the above dict must be imported into the
|
||||||
module global scope.
|
module global scope.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
product_list = ["b2g",
|
product_list = ["chrome",
|
||||||
"chrome",
|
"edge",
|
||||||
"firefox",
|
"firefox",
|
||||||
"servo",
|
"servo",
|
||||||
"servodriver"]
|
"servodriver"]
|
||||||
|
|
71
tests/wpt/harness/wptrunner/browsers/edge.py
Normal file
71
tests/wpt/harness/wptrunner/browsers/edge.py
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
|
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
from .base import Browser, ExecutorBrowser, require_arg
|
||||||
|
from ..webdriver_server import EdgeDriverServer
|
||||||
|
from ..executors import executor_kwargs as base_executor_kwargs
|
||||||
|
from ..executors.executorselenium import (SeleniumTestharnessExecutor,
|
||||||
|
SeleniumRefTestExecutor)
|
||||||
|
|
||||||
|
__wptrunner__ = {"product": "edge",
|
||||||
|
"check_args": "check_args",
|
||||||
|
"browser": "EdgeBrowser",
|
||||||
|
"executor": {"testharness": "SeleniumTestharnessExecutor",
|
||||||
|
"reftest": "SeleniumRefTestExecutor"},
|
||||||
|
"browser_kwargs": "browser_kwargs",
|
||||||
|
"executor_kwargs": "executor_kwargs",
|
||||||
|
"env_options": "env_options"}
|
||||||
|
|
||||||
|
|
||||||
|
def check_args(**kwargs):
|
||||||
|
require_arg(kwargs, "webdriver_binary")
|
||||||
|
|
||||||
|
def browser_kwargs(**kwargs):
|
||||||
|
return {"webdriver_binary": kwargs["webdriver_binary"]}
|
||||||
|
|
||||||
|
def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
|
||||||
|
**kwargs):
|
||||||
|
from selenium.webdriver import DesiredCapabilities
|
||||||
|
|
||||||
|
executor_kwargs = base_executor_kwargs(test_type, server_config,
|
||||||
|
cache_manager, **kwargs)
|
||||||
|
executor_kwargs["close_after_done"] = True
|
||||||
|
executor_kwargs["capabilities"] = dict(DesiredCapabilities.EDGE.items())
|
||||||
|
return executor_kwargs
|
||||||
|
|
||||||
|
def env_options():
|
||||||
|
return {"host": "web-platform.test",
|
||||||
|
"bind_hostname": "true",
|
||||||
|
"supports_debugger": False}
|
||||||
|
|
||||||
|
class EdgeBrowser(Browser):
|
||||||
|
used_ports = set()
|
||||||
|
|
||||||
|
def __init__(self, logger, webdriver_binary):
|
||||||
|
Browser.__init__(self, logger)
|
||||||
|
self.server = EdgeDriverServer(self.logger, binary=webdriver_binary)
|
||||||
|
self.webdriver_host = "localhost"
|
||||||
|
self.webdriver_port = self.server.port
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
print self.server.url
|
||||||
|
self.server.start()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.server.stop()
|
||||||
|
|
||||||
|
def pid(self):
|
||||||
|
return self.server.pid
|
||||||
|
|
||||||
|
def is_alive(self):
|
||||||
|
# TODO(ato): This only indicates the server is alive,
|
||||||
|
# and doesn't say anything about whether a browser session
|
||||||
|
# is active.
|
||||||
|
return self.server.is_alive()
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
self.stop()
|
||||||
|
|
||||||
|
def executor_browser(self):
|
||||||
|
return ExecutorBrowser, {"webdriver_url": self.server.url}
|
|
@ -3,6 +3,7 @@
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -128,10 +129,16 @@ class FirefoxBrowser(Browser):
|
||||||
self.profile.set_preferences({"marionette.defaultPrefs.enabled": True,
|
self.profile.set_preferences({"marionette.defaultPrefs.enabled": True,
|
||||||
"marionette.defaultPrefs.port": self.marionette_port,
|
"marionette.defaultPrefs.port": self.marionette_port,
|
||||||
"dom.disable_open_during_load": False,
|
"dom.disable_open_during_load": False,
|
||||||
"network.dns.localDomains": ",".join(hostnames)})
|
"network.dns.localDomains": ",".join(hostnames),
|
||||||
|
"places.history.enabled": False})
|
||||||
if self.e10s:
|
if self.e10s:
|
||||||
self.profile.set_preferences({"browser.tabs.remote.autostart": True})
|
self.profile.set_preferences({"browser.tabs.remote.autostart": True})
|
||||||
|
|
||||||
|
# Bug 1262954: winxp + e10s, disable hwaccel
|
||||||
|
if (self.e10s and platform.system() in ("Windows", "Microsoft") and
|
||||||
|
'5.1' in platform.version()):
|
||||||
|
self.profile.set_preferences({"layers.acceleration.disabled": True})
|
||||||
|
|
||||||
if self.ca_certificate_path is not None:
|
if self.ca_certificate_path is not None:
|
||||||
self.setup_ssl()
|
self.setup_ssl()
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ import os
|
||||||
|
|
||||||
from .base import NullBrowser, ExecutorBrowser, require_arg
|
from .base import NullBrowser, ExecutorBrowser, require_arg
|
||||||
from ..executors import executor_kwargs as base_executor_kwargs
|
from ..executors import executor_kwargs as base_executor_kwargs
|
||||||
from ..executors.executorservo import ServoTestharnessExecutor, ServoRefTestExecutor
|
from ..executors.executorservo import ServoTestharnessExecutor, ServoRefTestExecutor, ServoWdspecExecutor
|
||||||
|
|
||||||
here = os.path.join(os.path.split(__file__)[0])
|
here = os.path.join(os.path.split(__file__)[0])
|
||||||
|
|
||||||
|
@ -14,7 +14,8 @@ __wptrunner__ = {"product": "servo",
|
||||||
"check_args": "check_args",
|
"check_args": "check_args",
|
||||||
"browser": "ServoBrowser",
|
"browser": "ServoBrowser",
|
||||||
"executor": {"testharness": "ServoTestharnessExecutor",
|
"executor": {"testharness": "ServoTestharnessExecutor",
|
||||||
"reftest": "ServoRefTestExecutor"},
|
"reftest": "ServoRefTestExecutor",
|
||||||
|
"wdspec": "ServoWdspecExecutor"},
|
||||||
"browser_kwargs": "browser_kwargs",
|
"browser_kwargs": "browser_kwargs",
|
||||||
"executor_kwargs": "executor_kwargs",
|
"executor_kwargs": "executor_kwargs",
|
||||||
"env_options": "env_options",
|
"env_options": "env_options",
|
||||||
|
|
|
@ -17,11 +17,11 @@ from ..wpttest import WdspecResult, WdspecSubtestResult
|
||||||
|
|
||||||
errors = None
|
errors = None
|
||||||
marionette = None
|
marionette = None
|
||||||
|
pytestrunner = None
|
||||||
webdriver = None
|
webdriver = None
|
||||||
|
|
||||||
here = os.path.join(os.path.split(__file__)[0])
|
here = os.path.join(os.path.split(__file__)[0])
|
||||||
|
|
||||||
from . import pytestrunner
|
|
||||||
from .base import (ExecutorException,
|
from .base import (ExecutorException,
|
||||||
Protocol,
|
Protocol,
|
||||||
RefTestExecutor,
|
RefTestExecutor,
|
||||||
|
@ -41,7 +41,7 @@ extra_timeout = 5 # seconds
|
||||||
|
|
||||||
|
|
||||||
def do_delayed_imports():
|
def do_delayed_imports():
|
||||||
global errors, marionette, webdriver
|
global errors, marionette
|
||||||
|
|
||||||
# Marionette client used to be called marionette, recently it changed
|
# Marionette client used to be called marionette, recently it changed
|
||||||
# to marionette_driver for unfathomable reasons
|
# to marionette_driver for unfathomable reasons
|
||||||
|
@ -51,8 +51,6 @@ def do_delayed_imports():
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from marionette_driver import marionette, errors
|
from marionette_driver import marionette, errors
|
||||||
|
|
||||||
import webdriver
|
|
||||||
|
|
||||||
|
|
||||||
class MarionetteProtocol(Protocol):
|
class MarionetteProtocol(Protocol):
|
||||||
def __init__(self, executor, browser):
|
def __init__(self, executor, browser):
|
||||||
|
@ -292,7 +290,7 @@ class RemoteMarionetteProtocol(Protocol):
|
||||||
class ExecuteAsyncScriptRun(object):
|
class ExecuteAsyncScriptRun(object):
|
||||||
def __init__(self, logger, func, marionette, url, timeout):
|
def __init__(self, logger, func, marionette, url, timeout):
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
self.result = None
|
self.result = (None, None)
|
||||||
self.marionette = marionette
|
self.marionette = marionette
|
||||||
self.func = func
|
self.func = func
|
||||||
self.url = url
|
self.url = url
|
||||||
|
@ -323,11 +321,9 @@ class ExecuteAsyncScriptRun(object):
|
||||||
wait_timeout = None
|
wait_timeout = None
|
||||||
|
|
||||||
flag = self.result_flag.wait(wait_timeout)
|
flag = self.result_flag.wait(wait_timeout)
|
||||||
if self.result is None:
|
if self.result[1] is None:
|
||||||
self.logger.debug("Timed out waiting for a result")
|
self.logger.debug("Timed out waiting for a result")
|
||||||
assert not flag
|
|
||||||
self.result = False, ("EXTERNAL-TIMEOUT", None)
|
self.result = False, ("EXTERNAL-TIMEOUT", None)
|
||||||
|
|
||||||
return self.result
|
return self.result
|
||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
|
@ -409,7 +405,8 @@ class MarionetteTestharnessExecutor(TestharnessExecutor):
|
||||||
"timeout": timeout_ms,
|
"timeout": timeout_ms,
|
||||||
"explicit_timeout": timeout is None}
|
"explicit_timeout": timeout is None}
|
||||||
|
|
||||||
return marionette.execute_async_script(script, new_sandbox=False)
|
rv = marionette.execute_async_script(script, new_sandbox=False)
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
class MarionetteRefTestExecutor(RefTestExecutor):
|
class MarionetteRefTestExecutor(RefTestExecutor):
|
||||||
|
@ -487,7 +484,7 @@ class MarionetteRefTestExecutor(RefTestExecutor):
|
||||||
class WdspecRun(object):
|
class WdspecRun(object):
|
||||||
def __init__(self, func, session, path, timeout):
|
def __init__(self, func, session, path, timeout):
|
||||||
self.func = func
|
self.func = func
|
||||||
self.result = None
|
self.result = (None, None)
|
||||||
self.session = session
|
self.session = session
|
||||||
self.path = path
|
self.path = path
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
|
@ -504,8 +501,7 @@ class WdspecRun(object):
|
||||||
executor.start()
|
executor.start()
|
||||||
|
|
||||||
flag = self.result_flag.wait(self.timeout)
|
flag = self.result_flag.wait(self.timeout)
|
||||||
if self.result is None:
|
if self.result[1] is None:
|
||||||
assert not flag
|
|
||||||
self.result = False, ("EXTERNAL-TIMEOUT", None)
|
self.result = False, ("EXTERNAL-TIMEOUT", None)
|
||||||
|
|
||||||
return self.result
|
return self.result
|
||||||
|
@ -528,6 +524,7 @@ class WdspecRun(object):
|
||||||
class MarionetteWdspecExecutor(WdspecExecutor):
|
class MarionetteWdspecExecutor(WdspecExecutor):
|
||||||
def __init__(self, browser, server_config, webdriver_binary,
|
def __init__(self, browser, server_config, webdriver_binary,
|
||||||
timeout_multiplier=1, close_after_done=True, debug_info=None):
|
timeout_multiplier=1, close_after_done=True, debug_info=None):
|
||||||
|
self.do_delayed_imports()
|
||||||
WdspecExecutor.__init__(self, browser, server_config,
|
WdspecExecutor.__init__(self, browser, server_config,
|
||||||
timeout_multiplier=timeout_multiplier,
|
timeout_multiplier=timeout_multiplier,
|
||||||
debug_info=debug_info)
|
debug_info=debug_info)
|
||||||
|
@ -557,3 +554,8 @@ class MarionetteWdspecExecutor(WdspecExecutor):
|
||||||
harness_result = ("OK", None)
|
harness_result = ("OK", None)
|
||||||
subtest_results = pytestrunner.run(path, session, timeout=timeout)
|
subtest_results = pytestrunner.run(path, session, timeout=timeout)
|
||||||
return (harness_result, subtest_results)
|
return (harness_result, subtest_results)
|
||||||
|
|
||||||
|
def do_delayed_imports(self):
|
||||||
|
global pytestrunner, webdriver
|
||||||
|
from . import pytestrunner
|
||||||
|
import webdriver
|
||||||
|
|
|
@ -22,20 +22,21 @@ from .base import (ExecutorException,
|
||||||
strip_server)
|
strip_server)
|
||||||
from ..testrunner import Stop
|
from ..testrunner import Stop
|
||||||
|
|
||||||
|
|
||||||
here = os.path.join(os.path.split(__file__)[0])
|
here = os.path.join(os.path.split(__file__)[0])
|
||||||
|
|
||||||
webdriver = None
|
webdriver = None
|
||||||
exceptions = None
|
exceptions = None
|
||||||
|
RemoteConnection = None
|
||||||
|
|
||||||
extra_timeout = 5
|
extra_timeout = 5
|
||||||
|
|
||||||
def do_delayed_imports():
|
def do_delayed_imports():
|
||||||
global webdriver
|
global webdriver
|
||||||
global exceptions
|
global exceptions
|
||||||
|
global RemoteConnection
|
||||||
from selenium import webdriver
|
from selenium import webdriver
|
||||||
from selenium.common import exceptions
|
from selenium.common import exceptions
|
||||||
|
from selenium.webdriver.remote.remote_connection import RemoteConnection
|
||||||
|
|
||||||
class SeleniumProtocol(Protocol):
|
class SeleniumProtocol(Protocol):
|
||||||
def __init__(self, executor, browser, capabilities, **kwargs):
|
def __init__(self, executor, browser, capabilities, **kwargs):
|
||||||
|
@ -53,8 +54,9 @@ class SeleniumProtocol(Protocol):
|
||||||
|
|
||||||
session_started = False
|
session_started = False
|
||||||
try:
|
try:
|
||||||
self.webdriver = webdriver.Remote(
|
self.webdriver = webdriver.Remote(command_executor=RemoteConnection(self.url.strip("/"),
|
||||||
self.url, desired_capabilities=self.capabilities)
|
resolve_ip=False),
|
||||||
|
desired_capabilities=self.capabilities)
|
||||||
except:
|
except:
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
"Connecting to Selenium failed:\n%s" % traceback.format_exc())
|
"Connecting to Selenium failed:\n%s" % traceback.format_exc())
|
||||||
|
@ -231,17 +233,7 @@ class SeleniumRefTestExecutor(RefTestExecutor):
|
||||||
def do_test(self, test):
|
def do_test(self, test):
|
||||||
self.logger.info("Test requires OS-level window focus")
|
self.logger.info("Test requires OS-level window focus")
|
||||||
|
|
||||||
if self.close_after_done and self.has_window:
|
self.protocol.webdriver.set_window_size(600, 600)
|
||||||
self.protocol.webdriver.close()
|
|
||||||
self.protocol.webdriver.switch_to_window(
|
|
||||||
self.protocol.webdriver.window_handles[-1])
|
|
||||||
self.has_window = False
|
|
||||||
|
|
||||||
if not self.has_window:
|
|
||||||
self.protocol.webdriver.execute_script(self.script)
|
|
||||||
self.protocol.webdriver.switch_to_window(
|
|
||||||
self.protocol.webdriver.window_handles[-1])
|
|
||||||
self.has_window = True
|
|
||||||
|
|
||||||
result = self.implementation.run_test(test)
|
result = self.implementation.run_test(test)
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,13 @@
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import httplib
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
|
import traceback
|
||||||
import urlparse
|
import urlparse
|
||||||
import uuid
|
import uuid
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
@ -19,11 +21,19 @@ from .base import (ExecutorException,
|
||||||
Protocol,
|
Protocol,
|
||||||
RefTestImplementation,
|
RefTestImplementation,
|
||||||
testharness_result_converter,
|
testharness_result_converter,
|
||||||
reftest_result_converter)
|
reftest_result_converter,
|
||||||
|
WdspecExecutor)
|
||||||
from .process import ProcessTestExecutor
|
from .process import ProcessTestExecutor
|
||||||
from ..browsers.base import browser_command
|
from ..browsers.base import browser_command
|
||||||
render_arg = None
|
from ..wpttest import WdspecResult, WdspecSubtestResult
|
||||||
|
from ..webdriver_server import ServoDriverServer
|
||||||
|
from .executormarionette import WdspecRun
|
||||||
|
|
||||||
|
pytestrunner = None
|
||||||
|
render_arg = None
|
||||||
|
webdriver = None
|
||||||
|
|
||||||
|
extra_timeout = 5 # seconds
|
||||||
|
|
||||||
def do_delayed_imports():
|
def do_delayed_imports():
|
||||||
global render_arg
|
global render_arg
|
||||||
|
@ -205,7 +215,7 @@ class ServoRefTestExecutor(ProcessTestExecutor):
|
||||||
self.binary,
|
self.binary,
|
||||||
[render_arg(self.browser.render_backend), "--hard-fail", "--exit",
|
[render_arg(self.browser.render_backend), "--hard-fail", "--exit",
|
||||||
"-u", "Servo/wptrunner", "-Z", "disable-text-aa,load-webfonts-synchronously,replace-surrogates",
|
"-u", "Servo/wptrunner", "-Z", "disable-text-aa,load-webfonts-synchronously,replace-surrogates",
|
||||||
"--output=%s" % output_path, full_url],
|
"--output=%s" % output_path, full_url] + self.browser.binary_args,
|
||||||
self.debug_info)
|
self.debug_info)
|
||||||
|
|
||||||
for stylesheet in self.browser.user_stylesheets:
|
for stylesheet in self.browser.user_stylesheets:
|
||||||
|
@ -214,10 +224,7 @@ class ServoRefTestExecutor(ProcessTestExecutor):
|
||||||
for pref, value in test.environment.get('prefs', {}).iteritems():
|
for pref, value in test.environment.get('prefs', {}).iteritems():
|
||||||
command += ["--pref", "%s=%s" % (pref, value)]
|
command += ["--pref", "%s=%s" % (pref, value)]
|
||||||
|
|
||||||
if viewport_size:
|
command += ["--resolution", viewport_size or "800x600"]
|
||||||
command += ["--resolution", viewport_size]
|
|
||||||
else:
|
|
||||||
command += ["--resolution", "800x600"]
|
|
||||||
|
|
||||||
if dpi:
|
if dpi:
|
||||||
command += ["--device-pixel-ratio", dpi]
|
command += ["--device-pixel-ratio", dpi]
|
||||||
|
@ -278,3 +285,83 @@ class ServoRefTestExecutor(ProcessTestExecutor):
|
||||||
self.logger.process_output(self.proc.pid,
|
self.logger.process_output(self.proc.pid,
|
||||||
line,
|
line,
|
||||||
" ".join(self.command))
|
" ".join(self.command))
|
||||||
|
|
||||||
|
class ServoWdspecProtocol(Protocol):
|
||||||
|
def __init__(self, executor, browser):
|
||||||
|
self.do_delayed_imports()
|
||||||
|
Protocol.__init__(self, executor, browser)
|
||||||
|
self.session = None
|
||||||
|
self.server = None
|
||||||
|
|
||||||
|
def setup(self, runner):
|
||||||
|
try:
|
||||||
|
self.server = ServoDriverServer(self.logger, binary=self.browser.binary, binary_args=self.browser.binary_args, render_backend=self.browser.render_backend)
|
||||||
|
self.server.start(block=False)
|
||||||
|
self.logger.info(
|
||||||
|
"WebDriver HTTP server listening at %s" % self.server.url)
|
||||||
|
|
||||||
|
self.logger.info(
|
||||||
|
"Establishing new WebDriver session with %s" % self.server.url)
|
||||||
|
self.session = webdriver.Session(
|
||||||
|
self.server.host, self.server.port, self.server.base_path)
|
||||||
|
except Exception:
|
||||||
|
self.logger.error(traceback.format_exc())
|
||||||
|
self.executor.runner.send_message("init_failed")
|
||||||
|
else:
|
||||||
|
self.executor.runner.send_message("init_succeeded")
|
||||||
|
|
||||||
|
def teardown(self):
|
||||||
|
if self.server is not None:
|
||||||
|
try:
|
||||||
|
if self.session.session_id is not None:
|
||||||
|
self.session.end()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
if self.server.is_alive:
|
||||||
|
self.server.stop()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_alive(self):
|
||||||
|
conn = httplib.HTTPConnection(self.server.host, self.server.port)
|
||||||
|
conn.request("HEAD", self.server.base_path + "invalid")
|
||||||
|
res = conn.getresponse()
|
||||||
|
return res.status == 404
|
||||||
|
|
||||||
|
def do_delayed_imports(self):
|
||||||
|
global pytestrunner, webdriver
|
||||||
|
from . import pytestrunner
|
||||||
|
import webdriver
|
||||||
|
|
||||||
|
|
||||||
|
class ServoWdspecExecutor(WdspecExecutor):
|
||||||
|
def __init__(self, browser, server_config,
|
||||||
|
timeout_multiplier=1, close_after_done=True, debug_info=None,
|
||||||
|
**kwargs):
|
||||||
|
WdspecExecutor.__init__(self, browser, server_config,
|
||||||
|
timeout_multiplier=timeout_multiplier,
|
||||||
|
debug_info=debug_info)
|
||||||
|
self.protocol = ServoWdspecProtocol(self, browser)
|
||||||
|
|
||||||
|
def is_alive(self):
|
||||||
|
return self.protocol.is_alive
|
||||||
|
|
||||||
|
def on_environment_change(self, new_environment):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def do_test(self, test):
|
||||||
|
timeout = test.timeout * self.timeout_multiplier + extra_timeout
|
||||||
|
|
||||||
|
success, data = WdspecRun(self.do_wdspec,
|
||||||
|
self.protocol.session,
|
||||||
|
test.path,
|
||||||
|
timeout).run()
|
||||||
|
|
||||||
|
if success:
|
||||||
|
return self.convert_result(test, data)
|
||||||
|
|
||||||
|
return (test.result_cls(*data), [])
|
||||||
|
|
||||||
|
def do_wdspec(self, session, path, timeout):
|
||||||
|
harness_result = ("OK", None)
|
||||||
|
subtest_results = pytestrunner.run(path, session, timeout=timeout)
|
||||||
|
return (harness_result, subtest_results)
|
||||||
|
|
|
@ -14,7 +14,6 @@ from .base import (Protocol,
|
||||||
RefTestImplementation,
|
RefTestImplementation,
|
||||||
TestharnessExecutor,
|
TestharnessExecutor,
|
||||||
strip_server)
|
strip_server)
|
||||||
from .. import webdriver
|
|
||||||
from ..testrunner import Stop
|
from ..testrunner import Stop
|
||||||
|
|
||||||
webdriver = None
|
webdriver = None
|
||||||
|
@ -26,7 +25,7 @@ extra_timeout = 5
|
||||||
|
|
||||||
def do_delayed_imports():
|
def do_delayed_imports():
|
||||||
global webdriver
|
global webdriver
|
||||||
import webdriver
|
from tools import webdriver
|
||||||
|
|
||||||
|
|
||||||
class ServoWebDriverProtocol(Protocol):
|
class ServoWebDriverProtocol(Protocol):
|
||||||
|
|
|
@ -3,6 +3,10 @@
|
||||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import webdriver
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import httplib
|
||||||
|
|
||||||
|
|
||||||
"""pytest fixtures for use in Python-based WPT tests.
|
"""pytest fixtures for use in Python-based WPT tests.
|
||||||
|
@ -17,7 +21,7 @@ class Session(object):
|
||||||
in tests.
|
in tests.
|
||||||
|
|
||||||
The session is not created by default to enable testing of session
|
The session is not created by default to enable testing of session
|
||||||
creation. However, a module-scoped session will be implicitly created
|
creation. However, a function-scoped session will be implicitly created
|
||||||
at the first call to a WebDriver command. This means methods such as
|
at the first call to a WebDriver command. This means methods such as
|
||||||
`session.send_command` and `session.session_id` are possible to use
|
`session.send_command` and `session.session_id` are possible to use
|
||||||
without having a session.
|
without having a session.
|
||||||
|
@ -45,14 +49,88 @@ class Session(object):
|
||||||
def test_something(setup, session):
|
def test_something(setup, session):
|
||||||
assert session.url == "https://example.org"
|
assert session.url == "https://example.org"
|
||||||
|
|
||||||
The session is closed when the test module goes out of scope by an
|
When the test function goes out of scope, any remaining user prompts
|
||||||
implicit call to `session.end`.
|
and opened windows are closed, and the current browsing context is
|
||||||
|
switched back to the top-level browsing context.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, client):
|
def __init__(self, client):
|
||||||
self.client = client
|
self.client = client
|
||||||
|
|
||||||
@pytest.fixture(scope="module")
|
@pytest.fixture(scope="function")
|
||||||
def session(self, request):
|
def session(self, request):
|
||||||
request.addfinalizer(self.client.end)
|
# finalisers are popped off a stack,
|
||||||
|
# making their ordering reverse
|
||||||
|
request.addfinalizer(self.switch_to_top_level_browsing_context)
|
||||||
|
request.addfinalizer(self.restore_windows)
|
||||||
|
request.addfinalizer(self.dismiss_user_prompts)
|
||||||
|
|
||||||
return self.client
|
return self.client
|
||||||
|
|
||||||
|
def dismiss_user_prompts(self):
|
||||||
|
"""Dismisses any open user prompts in windows."""
|
||||||
|
current_window = self.client.window_handle
|
||||||
|
|
||||||
|
for window in self.windows():
|
||||||
|
self.client.window_handle = window
|
||||||
|
try:
|
||||||
|
self.client.alert.dismiss()
|
||||||
|
except webdriver.NoSuchAlertException:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.client.window_handle = current_window
|
||||||
|
|
||||||
|
def restore_windows(self):
|
||||||
|
"""Closes superfluous windows opened by the test without ending
|
||||||
|
the session implicitly by closing the last window.
|
||||||
|
"""
|
||||||
|
current_window = self.client.window_handle
|
||||||
|
|
||||||
|
for window in self.windows(exclude=[current_window]):
|
||||||
|
self.client.window_handle = window
|
||||||
|
if len(self.client.window_handles) > 1:
|
||||||
|
self.client.close()
|
||||||
|
|
||||||
|
self.client.window_handle = current_window
|
||||||
|
|
||||||
|
def switch_to_top_level_browsing_context(self):
|
||||||
|
"""If the current browsing context selected by WebDriver is a
|
||||||
|
`<frame>` or an `<iframe>`, switch it back to the top-level
|
||||||
|
browsing context.
|
||||||
|
"""
|
||||||
|
self.client.switch_frame(None)
|
||||||
|
|
||||||
|
def windows(self, exclude=None):
|
||||||
|
"""Set of window handles, filtered by an `exclude` list if
|
||||||
|
provided.
|
||||||
|
"""
|
||||||
|
if exclude is None:
|
||||||
|
exclude = []
|
||||||
|
wins = [w for w in self.client.handles if w not in exclude]
|
||||||
|
return set(wins)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPRequest(object):
|
||||||
|
def __init__(self, host, port):
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
|
||||||
|
def head(self, path):
|
||||||
|
return self._request("HEAD", path)
|
||||||
|
|
||||||
|
def get(self, path):
|
||||||
|
return self._request("GET", path)
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def _request(self, method, path):
|
||||||
|
conn = httplib.HTTPConnection(self.host, self.port)
|
||||||
|
try:
|
||||||
|
conn.request(method, path)
|
||||||
|
yield conn.getresponse()
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def http(session):
|
||||||
|
return HTTPRequest(session.transport.host, session.transport.port)
|
||||||
|
|
|
@ -45,6 +45,7 @@ def run(path, session, timeout=0):
|
||||||
|
|
||||||
recorder = SubtestResultRecorder()
|
recorder = SubtestResultRecorder()
|
||||||
plugins = [recorder,
|
plugins = [recorder,
|
||||||
|
fixtures,
|
||||||
fixtures.Session(session)]
|
fixtures.Session(session)]
|
||||||
|
|
||||||
# TODO(ato): Deal with timeouts
|
# TODO(ato): Deal with timeouts
|
||||||
|
|
|
@ -5,14 +5,18 @@
|
||||||
var callback = arguments[arguments.length - 1];
|
var callback = arguments[arguments.length - 1];
|
||||||
window.timeout_multiplier = %(timeout_multiplier)d;
|
window.timeout_multiplier = %(timeout_multiplier)d;
|
||||||
|
|
||||||
window.addEventListener("message", function(event) {
|
window.addEventListener("message", function f(event) {
|
||||||
var tests = event.data[0];
|
if (event.data.type != "complete") {
|
||||||
var status = event.data[1];
|
return;
|
||||||
|
}
|
||||||
|
window.removeEventListener("message", f);
|
||||||
|
|
||||||
|
var tests = event.data.tests;
|
||||||
|
var status = event.data.status;
|
||||||
|
|
||||||
var subtest_results = tests.map(function(x) {
|
var subtest_results = tests.map(function(x) {
|
||||||
return [x.name, x.status, x.message, x.stack]
|
return [x.name, x.status, x.message, x.stack]
|
||||||
});
|
});
|
||||||
|
|
||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
callback(["%(url)s",
|
callback(["%(url)s",
|
||||||
status.status,
|
status.status,
|
||||||
|
|
|
@ -27,6 +27,7 @@ class TestChunker(object):
|
||||||
self.chunk_number = chunk_number
|
self.chunk_number = chunk_number
|
||||||
assert self.chunk_number <= self.total_chunks
|
assert self.chunk_number <= self.total_chunks
|
||||||
self.logger = structured.get_default_logger()
|
self.logger = structured.get_default_logger()
|
||||||
|
assert self.logger
|
||||||
|
|
||||||
def __call__(self, manifest):
|
def __call__(self, manifest):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
|
@ -168,7 +168,7 @@ class TestRunnerManager(threading.Thread):
|
||||||
|
|
||||||
def __init__(self, suite_name, test_queue, test_source_cls, browser_cls, browser_kwargs,
|
def __init__(self, suite_name, test_queue, test_source_cls, browser_cls, browser_kwargs,
|
||||||
executor_cls, executor_kwargs, stop_flag, pause_after_test=False,
|
executor_cls, executor_kwargs, stop_flag, pause_after_test=False,
|
||||||
pause_on_unexpected=False, debug_info=None):
|
pause_on_unexpected=False, restart_on_unexpected=True, debug_info=None):
|
||||||
"""Thread that owns a single TestRunner process and any processes required
|
"""Thread that owns a single TestRunner process and any processes required
|
||||||
by the TestRunner (e.g. the Firefox binary).
|
by the TestRunner (e.g. the Firefox binary).
|
||||||
|
|
||||||
|
@ -207,6 +207,7 @@ class TestRunnerManager(threading.Thread):
|
||||||
|
|
||||||
self.pause_after_test = pause_after_test
|
self.pause_after_test = pause_after_test
|
||||||
self.pause_on_unexpected = pause_on_unexpected
|
self.pause_on_unexpected = pause_on_unexpected
|
||||||
|
self.restart_on_unexpected = restart_on_unexpected
|
||||||
self.debug_info = debug_info
|
self.debug_info = debug_info
|
||||||
|
|
||||||
self.manager_number = next_manager_number()
|
self.manager_number = next_manager_number()
|
||||||
|
@ -526,7 +527,8 @@ class TestRunnerManager(threading.Thread):
|
||||||
|
|
||||||
restart_before_next = (test.restart_after or
|
restart_before_next = (test.restart_after or
|
||||||
file_result.status in ("CRASH", "EXTERNAL-TIMEOUT") or
|
file_result.status in ("CRASH", "EXTERNAL-TIMEOUT") or
|
||||||
subtest_unexpected or is_unexpected)
|
((subtest_unexpected or is_unexpected)
|
||||||
|
and self.restart_on_unexpected))
|
||||||
|
|
||||||
if (self.pause_after_test or
|
if (self.pause_after_test or
|
||||||
(self.pause_on_unexpected and (subtest_unexpected or is_unexpected))):
|
(self.pause_on_unexpected and (subtest_unexpected or is_unexpected))):
|
||||||
|
@ -593,6 +595,7 @@ class ManagerGroup(object):
|
||||||
executor_cls, executor_kwargs,
|
executor_cls, executor_kwargs,
|
||||||
pause_after_test=False,
|
pause_after_test=False,
|
||||||
pause_on_unexpected=False,
|
pause_on_unexpected=False,
|
||||||
|
restart_on_unexpected=True,
|
||||||
debug_info=None):
|
debug_info=None):
|
||||||
"""Main thread object that owns all the TestManager threads."""
|
"""Main thread object that owns all the TestManager threads."""
|
||||||
self.suite_name = suite_name
|
self.suite_name = suite_name
|
||||||
|
@ -605,6 +608,7 @@ class ManagerGroup(object):
|
||||||
self.executor_kwargs = executor_kwargs
|
self.executor_kwargs = executor_kwargs
|
||||||
self.pause_after_test = pause_after_test
|
self.pause_after_test = pause_after_test
|
||||||
self.pause_on_unexpected = pause_on_unexpected
|
self.pause_on_unexpected = pause_on_unexpected
|
||||||
|
self.restart_on_unexpected = restart_on_unexpected
|
||||||
self.debug_info = debug_info
|
self.debug_info = debug_info
|
||||||
|
|
||||||
self.pool = set()
|
self.pool = set()
|
||||||
|
@ -643,6 +647,7 @@ class ManagerGroup(object):
|
||||||
self.stop_flag,
|
self.stop_flag,
|
||||||
self.pause_after_test,
|
self.pause_after_test,
|
||||||
self.pause_on_unexpected,
|
self.pause_on_unexpected,
|
||||||
|
self.restart_on_unexpected,
|
||||||
self.debug_info)
|
self.debug_info)
|
||||||
manager.start()
|
manager.start()
|
||||||
self.pool.add(manager)
|
self.pool.add(manager)
|
||||||
|
|
|
@ -4,9 +4,16 @@
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
import sys
|
import sys
|
||||||
sys.path.insert(0, "..")
|
from os.path import join, dirname
|
||||||
|
from mozlog import structured
|
||||||
|
|
||||||
from wptrunner import wptrunner
|
import pytest
|
||||||
|
|
||||||
|
sys.path.insert(0, join(dirname(__file__), "..", ".."))
|
||||||
|
|
||||||
|
from wptrunner.testloader import EqualTimeChunker
|
||||||
|
|
||||||
|
structured.set_default_logger(structured.structuredlog.StructuredLogger("TestChunker"))
|
||||||
|
|
||||||
class MockTest(object):
|
class MockTest(object):
|
||||||
def __init__(self, id, timeout=10):
|
def __init__(self, id, timeout=10):
|
||||||
|
@ -28,9 +35,9 @@ class TestEqualTimeChunker(unittest.TestCase):
|
||||||
def test_include_all(self):
|
def test_include_all(self):
|
||||||
tests = make_mock_manifest(("a", 10), ("a/b", 10), ("c", 10))
|
tests = make_mock_manifest(("a", 10), ("a/b", 10), ("c", 10))
|
||||||
|
|
||||||
chunk_1 = list(wptrunner.EqualTimeChunker(3, 1)(tests))
|
chunk_1 = list(EqualTimeChunker(3, 1)(tests))
|
||||||
chunk_2 = list(wptrunner.EqualTimeChunker(3, 2)(tests))
|
chunk_2 = list(EqualTimeChunker(3, 2)(tests))
|
||||||
chunk_3 = list(wptrunner.EqualTimeChunker(3, 3)(tests))
|
chunk_3 = list(EqualTimeChunker(3, 3)(tests))
|
||||||
|
|
||||||
self.assertEquals(tests[:10], chunk_1)
|
self.assertEquals(tests[:10], chunk_1)
|
||||||
self.assertEquals(tests[10:20], chunk_2)
|
self.assertEquals(tests[10:20], chunk_2)
|
||||||
|
@ -39,9 +46,9 @@ class TestEqualTimeChunker(unittest.TestCase):
|
||||||
def test_include_all_1(self):
|
def test_include_all_1(self):
|
||||||
tests = make_mock_manifest(("a", 5), ("a/b", 5), ("c", 10), ("d", 10))
|
tests = make_mock_manifest(("a", 5), ("a/b", 5), ("c", 10), ("d", 10))
|
||||||
|
|
||||||
chunk_1 = list(wptrunner.EqualTimeChunker(3, 1)(tests))
|
chunk_1 = list(EqualTimeChunker(3, 1)(tests))
|
||||||
chunk_2 = list(wptrunner.EqualTimeChunker(3, 2)(tests))
|
chunk_2 = list(EqualTimeChunker(3, 2)(tests))
|
||||||
chunk_3 = list(wptrunner.EqualTimeChunker(3, 3)(tests))
|
chunk_3 = list(EqualTimeChunker(3, 3)(tests))
|
||||||
|
|
||||||
self.assertEquals(tests[:10], chunk_1)
|
self.assertEquals(tests[:10], chunk_1)
|
||||||
self.assertEquals(tests[10:20], chunk_2)
|
self.assertEquals(tests[10:20], chunk_2)
|
||||||
|
@ -50,9 +57,9 @@ class TestEqualTimeChunker(unittest.TestCase):
|
||||||
def test_long(self):
|
def test_long(self):
|
||||||
tests = make_mock_manifest(("a", 100), ("a/b", 1), ("c", 1))
|
tests = make_mock_manifest(("a", 100), ("a/b", 1), ("c", 1))
|
||||||
|
|
||||||
chunk_1 = list(wptrunner.EqualTimeChunker(3, 1)(tests))
|
chunk_1 = list(EqualTimeChunker(3, 1)(tests))
|
||||||
chunk_2 = list(wptrunner.EqualTimeChunker(3, 2)(tests))
|
chunk_2 = list(EqualTimeChunker(3, 2)(tests))
|
||||||
chunk_3 = list(wptrunner.EqualTimeChunker(3, 3)(tests))
|
chunk_3 = list(EqualTimeChunker(3, 3)(tests))
|
||||||
|
|
||||||
self.assertEquals(tests[:100], chunk_1)
|
self.assertEquals(tests[:100], chunk_1)
|
||||||
self.assertEquals(tests[100:101], chunk_2)
|
self.assertEquals(tests[100:101], chunk_2)
|
||||||
|
@ -61,9 +68,9 @@ class TestEqualTimeChunker(unittest.TestCase):
|
||||||
def test_long_1(self):
|
def test_long_1(self):
|
||||||
tests = make_mock_manifest(("a", 1), ("a/b", 100), ("c", 1))
|
tests = make_mock_manifest(("a", 1), ("a/b", 100), ("c", 1))
|
||||||
|
|
||||||
chunk_1 = list(wptrunner.EqualTimeChunker(3, 1)(tests))
|
chunk_1 = list(EqualTimeChunker(3, 1)(tests))
|
||||||
chunk_2 = list(wptrunner.EqualTimeChunker(3, 2)(tests))
|
chunk_2 = list(EqualTimeChunker(3, 2)(tests))
|
||||||
chunk_3 = list(wptrunner.EqualTimeChunker(3, 3)(tests))
|
chunk_3 = list(EqualTimeChunker(3, 3)(tests))
|
||||||
|
|
||||||
self.assertEquals(tests[:1], chunk_1)
|
self.assertEquals(tests[:1], chunk_1)
|
||||||
self.assertEquals(tests[1:101], chunk_2)
|
self.assertEquals(tests[1:101], chunk_2)
|
||||||
|
@ -72,7 +79,7 @@ class TestEqualTimeChunker(unittest.TestCase):
|
||||||
def test_too_few_dirs(self):
|
def test_too_few_dirs(self):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
tests = make_mock_manifest(("a", 1), ("a/b", 100), ("c", 1))
|
tests = make_mock_manifest(("a", 1), ("a/b", 100), ("c", 1))
|
||||||
list(wptrunner.EqualTimeChunker(4, 1)(tests))
|
list(EqualTimeChunker(4, 1)(tests))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -4,11 +4,12 @@
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
import sys
|
import sys
|
||||||
|
from os.path import join, dirname
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
|
|
||||||
sys.path.insert(0, "..")
|
sys.path.insert(0, join(dirname(__file__), "..", ".."))
|
||||||
|
|
||||||
import hosts
|
from wptrunner import hosts
|
||||||
|
|
||||||
|
|
||||||
class HostsTest(unittest.TestCase):
|
class HostsTest(unittest.TestCase):
|
||||||
|
|
|
@ -5,6 +5,8 @@
|
||||||
import unittest
|
import unittest
|
||||||
import StringIO
|
import StringIO
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
from .. import metadata, manifestupdate
|
from .. import metadata, manifestupdate
|
||||||
from mozlog import structuredlog, handlers, formatters
|
from mozlog import structuredlog, handlers, formatters
|
||||||
|
|
||||||
|
@ -51,6 +53,7 @@ class TestExpectedUpdater(unittest.TestCase):
|
||||||
subtest.coalesce_expected()
|
subtest.coalesce_expected()
|
||||||
test.coalesce_expected()
|
test.coalesce_expected()
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
def test_update_0(self):
|
def test_update_0(self):
|
||||||
prev_data = [("path/to/test.htm.ini", ["/path/to/test.htm"], """[test.htm]
|
prev_data = [("path/to/test.htm.ini", ["/path/to/test.htm"], """[test.htm]
|
||||||
type: testharness
|
type: testharness
|
||||||
|
@ -71,6 +74,7 @@ class TestExpectedUpdater(unittest.TestCase):
|
||||||
self.coalesce_results([new_manifest])
|
self.coalesce_results([new_manifest])
|
||||||
self.assertTrue(new_manifest.is_empty)
|
self.assertTrue(new_manifest.is_empty)
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
def test_update_1(self):
|
def test_update_1(self):
|
||||||
test_id = "/path/to/test.htm"
|
test_id = "/path/to/test.htm"
|
||||||
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
||||||
|
@ -93,6 +97,7 @@ class TestExpectedUpdater(unittest.TestCase):
|
||||||
self.assertFalse(new_manifest.is_empty)
|
self.assertFalse(new_manifest.is_empty)
|
||||||
self.assertEquals(new_manifest.get_test(test_id).children[0].get("expected"), "FAIL")
|
self.assertEquals(new_manifest.get_test(test_id).children[0].get("expected"), "FAIL")
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
def test_new_subtest(self):
|
def test_new_subtest(self):
|
||||||
test_id = "/path/to/test.htm"
|
test_id = "/path/to/test.htm"
|
||||||
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
||||||
|
@ -120,6 +125,7 @@ class TestExpectedUpdater(unittest.TestCase):
|
||||||
self.assertEquals(new_manifest.get_test(test_id).children[0].get("expected"), "FAIL")
|
self.assertEquals(new_manifest.get_test(test_id).children[0].get("expected"), "FAIL")
|
||||||
self.assertEquals(new_manifest.get_test(test_id).children[1].get("expected"), "FAIL")
|
self.assertEquals(new_manifest.get_test(test_id).children[1].get("expected"), "FAIL")
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
def test_update_multiple_0(self):
|
def test_update_multiple_0(self):
|
||||||
test_id = "/path/to/test.htm"
|
test_id = "/path/to/test.htm"
|
||||||
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
||||||
|
@ -159,6 +165,7 @@ class TestExpectedUpdater(unittest.TestCase):
|
||||||
self.assertEquals(new_manifest.get_test(test_id).children[0].get(
|
self.assertEquals(new_manifest.get_test(test_id).children[0].get(
|
||||||
"expected", {"debug": False, "os": "linux"}), "TIMEOUT")
|
"expected", {"debug": False, "os": "linux"}), "TIMEOUT")
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
def test_update_multiple_1(self):
|
def test_update_multiple_1(self):
|
||||||
test_id = "/path/to/test.htm"
|
test_id = "/path/to/test.htm"
|
||||||
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
||||||
|
@ -200,6 +207,7 @@ class TestExpectedUpdater(unittest.TestCase):
|
||||||
self.assertEquals(new_manifest.get_test(test_id).children[0].get(
|
self.assertEquals(new_manifest.get_test(test_id).children[0].get(
|
||||||
"expected", {"debug": False, "os": "windows"}), "FAIL")
|
"expected", {"debug": False, "os": "windows"}), "FAIL")
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
def test_update_multiple_2(self):
|
def test_update_multiple_2(self):
|
||||||
test_id = "/path/to/test.htm"
|
test_id = "/path/to/test.htm"
|
||||||
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
||||||
|
@ -239,6 +247,7 @@ class TestExpectedUpdater(unittest.TestCase):
|
||||||
self.assertEquals(new_manifest.get_test(test_id).children[0].get(
|
self.assertEquals(new_manifest.get_test(test_id).children[0].get(
|
||||||
"expected", {"debug": True, "os": "osx"}), "TIMEOUT")
|
"expected", {"debug": True, "os": "osx"}), "TIMEOUT")
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
def test_update_multiple_3(self):
|
def test_update_multiple_3(self):
|
||||||
test_id = "/path/to/test.htm"
|
test_id = "/path/to/test.htm"
|
||||||
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
||||||
|
@ -280,6 +289,7 @@ class TestExpectedUpdater(unittest.TestCase):
|
||||||
self.assertEquals(new_manifest.get_test(test_id).children[0].get(
|
self.assertEquals(new_manifest.get_test(test_id).children[0].get(
|
||||||
"expected", {"debug": True, "os": "osx"}), "TIMEOUT")
|
"expected", {"debug": True, "os": "osx"}), "TIMEOUT")
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
def test_update_ignore_existing(self):
|
def test_update_ignore_existing(self):
|
||||||
test_id = "/path/to/test.htm"
|
test_id = "/path/to/test.htm"
|
||||||
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
prev_data = [("path/to/test.htm.ini", [test_id], """[test.htm]
|
||||||
|
|
|
@ -16,7 +16,8 @@ import mozprocess
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["SeleniumServer", "ChromeDriverServer",
|
__all__ = ["SeleniumServer", "ChromeDriverServer",
|
||||||
"GeckoDriverServer", "WebDriverServer"]
|
"GeckoDriverServer", "ServoDriverServer",
|
||||||
|
"WebDriverServer"]
|
||||||
|
|
||||||
|
|
||||||
class WebDriverServer(object):
|
class WebDriverServer(object):
|
||||||
|
@ -44,7 +45,7 @@ class WebDriverServer(object):
|
||||||
def make_command(self):
|
def make_command(self):
|
||||||
"""Returns the full command for starting the server process as a list."""
|
"""Returns the full command for starting the server process as a list."""
|
||||||
|
|
||||||
def start(self, block=True):
|
def start(self, block=False):
|
||||||
try:
|
try:
|
||||||
self._run(block)
|
self._run(block)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
|
@ -86,9 +87,7 @@ class WebDriverServer(object):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_alive(self):
|
def is_alive(self):
|
||||||
return (self._proc is not None and
|
return hasattr(self._proc, "proc") and self._proc.poll() is None
|
||||||
self._proc.proc is not None and
|
|
||||||
self._proc.poll() is None)
|
|
||||||
|
|
||||||
def on_output(self, line):
|
def on_output(self, line):
|
||||||
self.logger.process_output(self.pid,
|
self.logger.process_output(self.pid,
|
||||||
|
@ -138,6 +137,17 @@ class ChromeDriverServer(WebDriverServer):
|
||||||
cmd_arg("url-base", self.base_path) if self.base_path else ""]
|
cmd_arg("url-base", self.base_path) if self.base_path else ""]
|
||||||
|
|
||||||
|
|
||||||
|
class EdgeDriverServer(WebDriverServer):
|
||||||
|
def __init__(self, logger, binary="MicrosoftWebDriver.exe", port=None,
|
||||||
|
base_path="", host="localhost"):
|
||||||
|
WebDriverServer.__init__(
|
||||||
|
self, logger, binary, host=host, port=port)
|
||||||
|
|
||||||
|
def make_command(self):
|
||||||
|
return [self.binary,
|
||||||
|
"--port=%s" % str(self.port)]
|
||||||
|
|
||||||
|
|
||||||
class GeckoDriverServer(WebDriverServer):
|
class GeckoDriverServer(WebDriverServer):
|
||||||
def __init__(self, logger, marionette_port=2828, binary="wires",
|
def __init__(self, logger, marionette_port=2828, binary="wires",
|
||||||
host="127.0.0.1", port=None):
|
host="127.0.0.1", port=None):
|
||||||
|
@ -150,8 +160,30 @@ class GeckoDriverServer(WebDriverServer):
|
||||||
return [self.binary,
|
return [self.binary,
|
||||||
"--connect-existing",
|
"--connect-existing",
|
||||||
"--marionette-port", str(self.marionette_port),
|
"--marionette-port", str(self.marionette_port),
|
||||||
"--webdriver-host", self.host,
|
"--host", self.host,
|
||||||
"--webdriver-port", str(self.port)]
|
"--port", str(self.port)]
|
||||||
|
|
||||||
|
|
||||||
|
class ServoDriverServer(WebDriverServer):
|
||||||
|
def __init__(self, logger, binary="servo", binary_args=None, host="127.0.0.1", port=None, render_backend=None):
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["RUST_BACKTRACE"] = "1"
|
||||||
|
WebDriverServer.__init__(self, logger, binary, host=host, port=port, env=env)
|
||||||
|
self.binary_args = binary_args
|
||||||
|
self.render_backend = render_backend
|
||||||
|
|
||||||
|
def make_command(self):
|
||||||
|
command = [self.binary,
|
||||||
|
"--webdriver", str(self.port),
|
||||||
|
"--hard-fail",
|
||||||
|
"--headless"]
|
||||||
|
if self.binary_args:
|
||||||
|
command += self.binary_args
|
||||||
|
if self.render_backend == "cpu":
|
||||||
|
command += ["--cpu"]
|
||||||
|
elif self.render_backend == "webrender":
|
||||||
|
command += ["--webrender"]
|
||||||
|
return command
|
||||||
|
|
||||||
|
|
||||||
def cmd_arg(name, value=None):
|
def cmd_arg(name, value=None):
|
||||||
|
|
|
@ -45,62 +45,34 @@ def create_parser(product_choices=None):
|
||||||
config_data = config.load()
|
config_data = config.load()
|
||||||
product_choices = products.products_enabled(config_data)
|
product_choices = products.products_enabled(config_data)
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="Runner for web-platform-tests tests.")
|
parser = argparse.ArgumentParser(description="""Runner for web-platform-tests tests.""",
|
||||||
parser.add_argument("--metadata", action="store", type=abs_path, dest="metadata_root",
|
usage="""%(prog)s [OPTION]... [TEST]...
|
||||||
help="Path to the folder containing test metadata"),
|
|
||||||
parser.add_argument("--tests", action="store", type=abs_path, dest="tests_root",
|
|
||||||
help="Path to test files"),
|
|
||||||
parser.add_argument("--run-info", action="store", type=abs_path,
|
|
||||||
help="Path to directory containing extra json files to add to run info")
|
|
||||||
parser.add_argument("--config", action="store", type=abs_path, dest="config",
|
|
||||||
help="Path to config file")
|
|
||||||
|
|
||||||
|
TEST is either the full path to a test file to run, or the URL of a test excluding
|
||||||
|
scheme host and port.""")
|
||||||
parser.add_argument("--manifest-update", action="store_true", default=False,
|
parser.add_argument("--manifest-update", action="store_true", default=False,
|
||||||
help="Force regeneration of the test manifest")
|
help="Regenerate the test manifest.")
|
||||||
|
|
||||||
parser.add_argument("--binary", action="store",
|
|
||||||
type=abs_path, help="Binary to run tests against")
|
|
||||||
parser.add_argument('--binary-arg',
|
|
||||||
default=[], action="append", dest="binary_args",
|
|
||||||
help="Extra argument for the binary (servo)")
|
|
||||||
parser.add_argument("--webdriver-binary", action="store", metavar="BINARY",
|
|
||||||
type=abs_path, help="WebDriver server binary to use")
|
|
||||||
parser.add_argument("--processes", action="store", type=int, default=None,
|
|
||||||
help="Number of simultaneous processes to use")
|
|
||||||
|
|
||||||
|
parser.add_argument("--timeout-multiplier", action="store", type=float, default=None,
|
||||||
|
help="Multiplier relative to standard test timeout to use")
|
||||||
parser.add_argument("--run-by-dir", type=int, nargs="?", default=False,
|
parser.add_argument("--run-by-dir", type=int, nargs="?", default=False,
|
||||||
help="Split run into groups by directories. With a parameter,"
|
help="Split run into groups by directories. With a parameter,"
|
||||||
"limit the depth of splits e.g. --run-by-dir=1 to split by top-level"
|
"limit the depth of splits e.g. --run-by-dir=1 to split by top-level"
|
||||||
"directory")
|
"directory")
|
||||||
|
parser.add_argument("--processes", action="store", type=int, default=None,
|
||||||
parser.add_argument("--timeout-multiplier", action="store", type=float, default=None,
|
help="Number of simultaneous processes to use")
|
||||||
help="Multiplier relative to standard test timeout to use")
|
|
||||||
parser.add_argument("--repeat", action="store", type=int, default=1,
|
|
||||||
help="Number of times to run the tests")
|
|
||||||
parser.add_argument("--repeat-until-unexpected", action="store_true", default=None,
|
|
||||||
help="Run tests in a loop until one returns an unexpected result")
|
|
||||||
|
|
||||||
parser.add_argument("--no-capture-stdio", action="store_true", default=False,
|
parser.add_argument("--no-capture-stdio", action="store_true", default=False,
|
||||||
help="Don't capture stdio and write to logging")
|
help="Don't capture stdio and write to logging")
|
||||||
|
|
||||||
parser.add_argument("--product", action="store", choices=product_choices,
|
mode_group = parser.add_argument_group("Mode")
|
||||||
default=None, help="Browser against which to run tests")
|
mode_group.add_argument("--list-test-groups", action="store_true",
|
||||||
|
|
||||||
parser.add_argument("--list-test-groups", action="store_true",
|
|
||||||
default=False,
|
default=False,
|
||||||
help="List the top level directories containing tests that will run.")
|
help="List the top level directories containing tests that will run.")
|
||||||
parser.add_argument("--list-disabled", action="store_true",
|
mode_group.add_argument("--list-disabled", action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="List the tests that are disabled on the current platform")
|
help="List the tests that are disabled on the current platform")
|
||||||
|
|
||||||
build_type = parser.add_mutually_exclusive_group()
|
|
||||||
build_type.add_argument("--debug-build", dest="debug", action="store_true",
|
|
||||||
default=None,
|
|
||||||
help="Build is a debug build (overrides any mozinfo file)")
|
|
||||||
build_type.add_argument("--release-build", dest="debug", action="store_false",
|
|
||||||
default=None,
|
|
||||||
help="Build is a release (overrides any mozinfo file)")
|
|
||||||
|
|
||||||
test_selection_group = parser.add_argument_group("Test Selection")
|
test_selection_group = parser.add_argument_group("Test Selection")
|
||||||
test_selection_group.add_argument("--test-types", action="store",
|
test_selection_group.add_argument("--test-types", action="store",
|
||||||
nargs="*", default=wpttest.enabled_tests,
|
nargs="*", default=wpttest.enabled_tests,
|
||||||
|
@ -119,7 +91,10 @@ def create_parser(product_choices=None):
|
||||||
debugging_group.add_argument('--debugger', const="__default__", nargs="?",
|
debugging_group.add_argument('--debugger', const="__default__", nargs="?",
|
||||||
help="run under a debugger, e.g. gdb or valgrind")
|
help="run under a debugger, e.g. gdb or valgrind")
|
||||||
debugging_group.add_argument('--debugger-args', help="arguments to the debugger")
|
debugging_group.add_argument('--debugger-args', help="arguments to the debugger")
|
||||||
|
debugging_group.add_argument("--repeat", action="store", type=int, default=1,
|
||||||
|
help="Number of times to run the tests")
|
||||||
|
debugging_group.add_argument("--repeat-until-unexpected", action="store_true", default=None,
|
||||||
|
help="Run tests in a loop until one returns an unexpected result")
|
||||||
debugging_group.add_argument('--pause-after-test', action="store_true", default=None,
|
debugging_group.add_argument('--pause-after-test', action="store_true", default=None,
|
||||||
help="Halt the test runner after each test (this happens by default if only a single test is run)")
|
help="Halt the test runner after each test (this happens by default if only a single test is run)")
|
||||||
debugging_group.add_argument('--no-pause-after-test', dest="pause_after_test", action="store_false",
|
debugging_group.add_argument('--no-pause-after-test', dest="pause_after_test", action="store_false",
|
||||||
|
@ -127,12 +102,47 @@ def create_parser(product_choices=None):
|
||||||
|
|
||||||
debugging_group.add_argument('--pause-on-unexpected', action="store_true",
|
debugging_group.add_argument('--pause-on-unexpected', action="store_true",
|
||||||
help="Halt the test runner when an unexpected result is encountered")
|
help="Halt the test runner when an unexpected result is encountered")
|
||||||
|
debugging_group.add_argument('--no-restart-on-unexpected', dest="restart_on_unexpected",
|
||||||
|
default=True, action="store_false",
|
||||||
|
help="Don't restart on an unexpected result")
|
||||||
|
|
||||||
debugging_group.add_argument("--symbols-path", action="store", type=url_or_path,
|
debugging_group.add_argument("--symbols-path", action="store", type=url_or_path,
|
||||||
help="Path or url to symbols file used to analyse crash minidumps.")
|
help="Path or url to symbols file used to analyse crash minidumps.")
|
||||||
debugging_group.add_argument("--stackwalk-binary", action="store", type=abs_path,
|
debugging_group.add_argument("--stackwalk-binary", action="store", type=abs_path,
|
||||||
help="Path to stackwalker program used to analyse minidumps.")
|
help="Path to stackwalker program used to analyse minidumps.")
|
||||||
|
|
||||||
|
debugging_group.add_argument("--pdb", action="store_true",
|
||||||
|
help="Drop into pdb on python exception")
|
||||||
|
|
||||||
|
config_group = parser.add_argument_group("Configuration")
|
||||||
|
config_group.add_argument("--binary", action="store",
|
||||||
|
type=abs_path, help="Binary to run tests against")
|
||||||
|
config_group.add_argument('--binary-arg',
|
||||||
|
default=[], action="append", dest="binary_args",
|
||||||
|
help="Extra argument for the binary (servo)")
|
||||||
|
config_group.add_argument("--webdriver-binary", action="store", metavar="BINARY",
|
||||||
|
type=abs_path, help="WebDriver server binary to use")
|
||||||
|
|
||||||
|
config_group.add_argument("--metadata", action="store", type=abs_path, dest="metadata_root",
|
||||||
|
help="Path to root directory containing test metadata"),
|
||||||
|
config_group.add_argument("--tests", action="store", type=abs_path, dest="tests_root",
|
||||||
|
help="Path to root directory containing test files"),
|
||||||
|
config_group.add_argument("--run-info", action="store", type=abs_path,
|
||||||
|
help="Path to directory containing extra json files to add to run info")
|
||||||
|
config_group.add_argument("--product", action="store", choices=product_choices,
|
||||||
|
default=None, help="Browser against which to run tests")
|
||||||
|
config_group.add_argument("--config", action="store", type=abs_path, dest="config",
|
||||||
|
help="Path to config file")
|
||||||
|
|
||||||
|
build_type = parser.add_mutually_exclusive_group()
|
||||||
|
build_type.add_argument("--debug-build", dest="debug", action="store_true",
|
||||||
|
default=None,
|
||||||
|
help="Build is a debug build (overrides any mozinfo file)")
|
||||||
|
build_type.add_argument("--release-build", dest="debug", action="store_false",
|
||||||
|
default=None,
|
||||||
|
help="Build is a release (overrides any mozinfo file)")
|
||||||
|
|
||||||
|
|
||||||
chunking_group = parser.add_argument_group("Test Chunking")
|
chunking_group = parser.add_argument_group("Test Chunking")
|
||||||
chunking_group.add_argument("--total-chunks", action="store", type=int, default=1,
|
chunking_group.add_argument("--total-chunks", action="store", type=int, default=1,
|
||||||
help="Total number of chunks to use")
|
help="Total number of chunks to use")
|
||||||
|
@ -164,10 +174,6 @@ def create_parser(product_choices=None):
|
||||||
gecko_group.add_argument("--disable-e10s", dest="gecko_e10s", action="store_false", default=True,
|
gecko_group.add_argument("--disable-e10s", dest="gecko_e10s", action="store_false", default=True,
|
||||||
help="Run tests without electrolysis preferences")
|
help="Run tests without electrolysis preferences")
|
||||||
|
|
||||||
b2g_group = parser.add_argument_group("B2G-specific")
|
|
||||||
b2g_group.add_argument("--b2g-no-backup", action="store_true", default=False,
|
|
||||||
help="Don't backup device before testrun with --product=b2g")
|
|
||||||
|
|
||||||
servo_group = parser.add_argument_group("Servo-specific")
|
servo_group = parser.add_argument_group("Servo-specific")
|
||||||
servo_group.add_argument("--user-stylesheet",
|
servo_group.add_argument("--user-stylesheet",
|
||||||
default=[], action="append", dest="user_stylesheets",
|
default=[], action="append", dest="user_stylesheets",
|
||||||
|
|
|
@ -2,10 +2,13 @@
|
||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
from .. import parser, serializer
|
from .. import parser, serializer
|
||||||
|
|
||||||
|
|
||||||
|
@ -196,6 +199,7 @@ class TokenizerTest(unittest.TestCase):
|
||||||
r"""key: "#"
|
r"""key: "#"
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
@pytest.mark.xfail(sys.maxunicode == 0xFFFF, reason="narrow unicode")
|
||||||
def test_escape_9(self):
|
def test_escape_9(self):
|
||||||
self.compare(r"""key: \U10FFFFabc""",
|
self.compare(r"""key: \U10FFFFabc""",
|
||||||
u"""key: \U0010FFFFabc
|
u"""key: \U0010FFFFabc
|
||||||
|
|
|
@ -204,6 +204,7 @@ def run_tests(config, test_paths, product, **kwargs):
|
||||||
executor_kwargs,
|
executor_kwargs,
|
||||||
kwargs["pause_after_test"],
|
kwargs["pause_after_test"],
|
||||||
kwargs["pause_on_unexpected"],
|
kwargs["pause_on_unexpected"],
|
||||||
|
kwargs["restart_on_unexpected"],
|
||||||
kwargs["debug_info"]) as manager_group:
|
kwargs["debug_info"]) as manager_group:
|
||||||
try:
|
try:
|
||||||
manager_group.run(test_type, test_loader.tests)
|
manager_group.run(test_type, test_loader.tests)
|
||||||
|
|
|
@ -68,9 +68,6 @@ class WdspecSubtestResult(SubtestResult):
|
||||||
|
|
||||||
|
|
||||||
def get_run_info(metadata_root, product, **kwargs):
|
def get_run_info(metadata_root, product, **kwargs):
|
||||||
if product == "b2g":
|
|
||||||
return B2GRunInfo(metadata_root, product, **kwargs)
|
|
||||||
else:
|
|
||||||
return RunInfo(metadata_root, product, **kwargs)
|
return RunInfo(metadata_root, product, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@ -101,12 +98,6 @@ class RunInfo(dict):
|
||||||
mozinfo.find_and_update_from_json(*dirs)
|
mozinfo.find_and_update_from_json(*dirs)
|
||||||
|
|
||||||
|
|
||||||
class B2GRunInfo(RunInfo):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
RunInfo.__init__(self, *args, **kwargs)
|
|
||||||
self["os"] = "b2g"
|
|
||||||
|
|
||||||
|
|
||||||
class Test(object):
|
class Test(object):
|
||||||
result_cls = None
|
result_cls = None
|
||||||
subtest_result_cls = None
|
subtest_result_cls = None
|
||||||
|
@ -131,7 +122,7 @@ class Test(object):
|
||||||
inherit_metadata,
|
inherit_metadata,
|
||||||
test_metadata,
|
test_metadata,
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
path=manifest_item.path,
|
path=manifest_item.source_file.path,
|
||||||
protocol="https" if hasattr(manifest_item, "https") and manifest_item.https else "http")
|
protocol="https" if hasattr(manifest_item, "https") and manifest_item.https else "http")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue