mirror of
https://github.com/servo/servo.git
synced 2025-08-10 16:05:43 +01:00
Update web-platform-tests to revision fb15e14b52049f952612623ee0d7fb7a620a57c9
This commit is contained in:
parent
200cc8aa6b
commit
4a942c982f
141 changed files with 2563 additions and 1589 deletions
64
tests/wpt/web-platform-tests/.azure-pipelines.yml
Normal file
64
tests/wpt/web-platform-tests/.azure-pipelines.yml
Normal file
|
@ -0,0 +1,64 @@
|
|||
# This is the configuration file for Azure Pipelines, used to run tests on
|
||||
# macOS. Documentation to help understand this setup:
|
||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema
|
||||
|
||||
trigger: none # disable builds for branches
|
||||
|
||||
jobs:
|
||||
- job: macOS
|
||||
|
||||
pool:
|
||||
vmImage: 'macOS-10.13'
|
||||
|
||||
steps:
|
||||
- checkout: self
|
||||
fetchDepth: 50
|
||||
submodules: false
|
||||
|
||||
- script: |
|
||||
echo "Test jobs:"
|
||||
./wpt test-jobs | while read job; do
|
||||
echo "$job"
|
||||
echo "##vso[task.setvariable variable=run_$job]true";
|
||||
done
|
||||
displayName: 'List test jobs'
|
||||
|
||||
- script: |
|
||||
sudo easy_install pip
|
||||
sudo pip install -U virtualenv
|
||||
displayName: 'Install Python packages'
|
||||
condition: variables.run_wptrunner_infrastructure
|
||||
|
||||
# Installig Ahem in /Library/Fonts instead of using --install-fonts is a
|
||||
# workaround for https://github.com/web-platform-tests/wpt/issues/13803.
|
||||
- script: sudo cp fonts/Ahem.ttf /Library/Fonts
|
||||
displayName: 'Install Ahem font'
|
||||
condition: variables.run_wptrunner_infrastructure
|
||||
|
||||
- script: |
|
||||
# https://github.com/web-platform-tests/results-collection/blob/master/src/scripts/trust-root-ca.sh
|
||||
sudo security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain tools/certs/cacert.pem
|
||||
displayName: 'Install web-platform.test certificate'
|
||||
condition: variables.run_wptrunner_infrastructure
|
||||
|
||||
- script: |
|
||||
# Pin to STP 67, as SafariDriver isn't working in 68:
|
||||
# https://github.com/web-platform-tests/wpt/issues/13800
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 brew cask install https://raw.githubusercontent.com/Homebrew/homebrew-cask-versions/23fae0a88868911913c2ee7d527c89164b6d5720/Casks/safari-technology-preview.rb
|
||||
# https://web-platform-tests.org/running-tests/safari.html
|
||||
sudo "/Applications/Safari Technology Preview.app/Contents/MacOS/safaridriver" --enable
|
||||
defaults write com.apple.Safari WebKitJavaScriptCanOpenWindowsAutomatically 1
|
||||
displayName: 'Install Safari Technology Preview'
|
||||
condition: variables.run_wptrunner_infrastructure
|
||||
|
||||
- script: ./wpt make-hosts-file | sudo tee -a /etc/hosts
|
||||
displayName: 'Update /etc/hosts'
|
||||
condition: variables.run_wptrunner_infrastructure
|
||||
|
||||
- script: ./wpt manifest
|
||||
displayName: 'Update manifest'
|
||||
condition: variables.run_wptrunner_infrastructure
|
||||
|
||||
- script: no_proxy='*' ./wpt run --yes --no-manifest-update --manifest MANIFEST.json --metadata infrastructure/metadata/ --channel=preview safari_webdriver infrastructure/
|
||||
displayName: 'Run infrastructure/ tests'
|
||||
condition: variables.run_wptrunner_infrastructure
|
|
@ -1,89 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<meta charset=utf-8>
|
||||
<title>Access-Control-Allow-Headers handling</title>
|
||||
<script src=/resources/testharness.js></script>
|
||||
<script src=/resources/testharnessreport.js></script>
|
||||
<script src=support.js?pipe=sub></script>
|
||||
|
||||
<h1>Access-Control-Allow-Headers handling</h1>
|
||||
|
||||
<div id=log></div>
|
||||
|
||||
<script>
|
||||
|
||||
/*
|
||||
* Origin header
|
||||
*/
|
||||
function shouldPass(origin) {
|
||||
test(function () {
|
||||
var client = new XMLHttpRequest()
|
||||
client.open('GET', CROSSDOMAIN
|
||||
+ '/resources/cors-makeheader.py?origin='
|
||||
+ encodeURIComponent(origin),
|
||||
false)
|
||||
client.send()
|
||||
r = JSON.parse(client.response)
|
||||
var host = location.protocol + "//" + location.host
|
||||
assert_equals(r['origin'], host, 'Request Origin: should be ' + host)
|
||||
}, 'Allow origin: ' + origin.replace(/\t/g, "[tab]").replace(/ /g, '_'));
|
||||
}
|
||||
|
||||
shouldPass('*');
|
||||
shouldPass(' * ');
|
||||
shouldPass(' *');
|
||||
shouldPass(location.protocol + "//" + location.host);
|
||||
shouldPass(" "+location.protocol + "//" + location.host);
|
||||
shouldPass(" "+location.protocol + "//" + location.host + " ");
|
||||
shouldPass(" "+location.protocol + "//" + location.host);
|
||||
|
||||
|
||||
function shouldFail(origin) {
|
||||
test(function () {
|
||||
var client = new XMLHttpRequest()
|
||||
client.open('GET', CROSSDOMAIN
|
||||
+ '/resources/cors-makeheader.py?origin='
|
||||
+ encodeURIComponent(origin),
|
||||
false)
|
||||
assert_throws("NetworkError", function() { client.send() }, 'send')
|
||||
}, 'Disallow origin: ' + origin.replace('\0', '\\0'));
|
||||
}
|
||||
|
||||
shouldFail(location.protocol + "//" + SUBDOMAIN + "." + location.host)
|
||||
shouldFail("//" + location.host)
|
||||
shouldFail("://" + location.host)
|
||||
shouldFail("ftp://" + location.host)
|
||||
shouldFail("http:://" + location.host)
|
||||
shouldFail("http:/" + location.host)
|
||||
shouldFail("http:" + location.host)
|
||||
shouldFail(location.host)
|
||||
shouldFail(location.protocol + "//" + location.host + "?")
|
||||
shouldFail(location.protocol + "//" + location.host + "/")
|
||||
shouldFail(location.protocol + "//" + location.host + " /")
|
||||
shouldFail(location.protocol + "//" + location.host + "#")
|
||||
shouldFail(location.protocol + "//" + location.host + "%23")
|
||||
shouldFail(location.protocol + "//" + location.host + ":80")
|
||||
shouldFail(location.protocol + "//" + location.host + ", *")
|
||||
shouldFail(location.protocol + "//" + location.host + "\0")
|
||||
shouldFail((location.protocol + "//" + location.host).toUpperCase())
|
||||
shouldFail(location.protocol.toUpperCase() + "//" + location.host)
|
||||
shouldFail("-")
|
||||
shouldFail("**")
|
||||
shouldFail("\0*")
|
||||
shouldFail("*\0")
|
||||
shouldFail("'*'")
|
||||
shouldFail('"*"')
|
||||
shouldFail("* *")
|
||||
shouldFail("*" + location.protocol + "//" + "*")
|
||||
shouldFail("*" + location.protocol + "//" + location.host)
|
||||
shouldFail("* " + location.protocol + "//" + location.host)
|
||||
shouldFail("*, " + location.protocol + "//" + location.host)
|
||||
shouldFail("\0" + location.protocol + "//" + location.host)
|
||||
shouldFail("null " + location.protocol + "//" + location.host)
|
||||
shouldFail('http://example.net')
|
||||
shouldFail('null')
|
||||
shouldFail('')
|
||||
shouldFail(location.href)
|
||||
shouldFail(dirname(location.href))
|
||||
shouldFail(CROSSDOMAIN)
|
||||
|
||||
</script>
|
|
@ -122,8 +122,13 @@ function test_response_header(allow) {
|
|||
test_response_header('TRUE')
|
||||
test_response_header('True')
|
||||
test_response_header('"true"')
|
||||
test_response_header("'true'");
|
||||
test_response_header('false')
|
||||
test_response_header('1')
|
||||
test_response_header('0')
|
||||
test_response_header(',true');
|
||||
test_response_header('true,');
|
||||
test_response_header('true%0B');
|
||||
test_response_header('true%0C');
|
||||
|
||||
</script>
|
||||
|
|
|
@ -71,8 +71,14 @@ shouldFail((location.protocol + "//" + location.host).toUpperCase())
|
|||
shouldFail(location.protocol.toUpperCase() + "//" + location.host)
|
||||
shouldFail("-")
|
||||
shouldFail("**")
|
||||
shouldFail(",*");
|
||||
shouldFail("*,");
|
||||
shouldFail("\0*")
|
||||
shouldFail("\u000B*");
|
||||
shouldFail("\u000C*");
|
||||
shouldFail("*\0")
|
||||
shouldFail("*\u000B");
|
||||
shouldFail("*\u000C");
|
||||
shouldFail("'*'")
|
||||
shouldFail('"*"')
|
||||
shouldFail("* *")
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
<!DOCTYPE html>
|
||||
<link rel="author" title="Morten Stenshorne" href="mstensho@chromium.org">
|
||||
<link rel="help" href="https://www.w3.org/TR/CSS22/visudet.html#containing-block-details">
|
||||
<link rel="help" href="https://www.w3.org/TR/CSS22/visuren.html#anonymous-block-level">
|
||||
<p>There should be a green square below, and no red.</p>
|
||||
<div style="position:relative; width:100px; height:100px; background:red;">
|
||||
<span id="posMe">
|
||||
<div id="removeMe"></div>
|
||||
</span>
|
||||
<span>
|
||||
<div>
|
||||
<div id="target" style="position:absolute; width:100%; height:100%; background:green;"></div>
|
||||
</div>
|
||||
</span>
|
||||
</div>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script>
|
||||
test(()=> {
|
||||
document.body.offsetTop;
|
||||
removeMe.style.display = "none";
|
||||
document.body.offsetTop;
|
||||
posMe.style.position = "relative";
|
||||
assert_equals(document.getElementById("target").offsetWidth, 100);
|
||||
assert_equals(document.getElementById("target").offsetHeight, 100);
|
||||
assert_equals(document.getElementById("target").offsetLeft, 0);
|
||||
assert_equals(document.getElementById("target").offsetTop, 0);
|
||||
}, "Make sure that we're sized by the right ancestor");
|
||||
</script>
|
|
@ -32,13 +32,7 @@
|
|||
margin-top: 0em;
|
||||
}
|
||||
|
||||
div#X
|
||||
{
|
||||
float: left;
|
||||
margin-left: 50%;
|
||||
}
|
||||
|
||||
div#after-X
|
||||
div
|
||||
{
|
||||
background-color: aqua;
|
||||
width: 25%;
|
||||
|
@ -58,9 +52,7 @@
|
|||
the others should all be<br />
|
||||
<em>aligned on the left</em> of the window.</p>
|
||||
|
||||
<div id="X">X</div>
|
||||
|
||||
<div id="after-X"><br />The X on the previous line should be centered across the window.</div>
|
||||
<div><span style="padding-left:50%">X The first X in this sentence should be indented to the center of this block.</span></div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
<em>aligned on the left</em> of the window.
|
||||
</p>
|
||||
<div>
|
||||
X The X on the previous line should be centered across the window.
|
||||
X The first X in this sentence should be indented to the center of this block.
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<title>text-indent test</title>
|
||||
<style type="text/css">
|
||||
div { width: 500px; }
|
||||
p { width: 300px; text-indent: 50px; }
|
||||
p { width: 300px; text-indent: 30px; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
@ -11,4 +11,4 @@ p { width: 300px; text-indent: 50px; }
|
|||
</div>
|
||||
|
||||
|
||||
</body></html>
|
||||
</body></html>
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Animations: getComputedValue().animationDelay</title>
|
||||
<link rel="help" href="https://drafts.csswg.org/css-animations/#propdef-animation-delay">
|
||||
<meta name="assert" content="animation-delay converts to seconds.">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/css/support/computed-testcommon.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="target"></div>
|
||||
<script>
|
||||
test_computed_value("animation-delay", "-500ms", "-0.5s");
|
||||
test_computed_value("animation-delay", "calc(2 * 3s)", "6s");
|
||||
test_computed_value("animation-delay", "20s, 10s");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,20 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Animations: getComputedValue().animationDuration</title>
|
||||
<link rel="help" href="https://drafts.csswg.org/css-animations/#propdef-animation-duration">
|
||||
<meta name="assert" content="animation-duration converts to seconds.">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/css/support/computed-testcommon.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="target"></div>
|
||||
<script>
|
||||
test_computed_value("animation-duration", "500ms", "0.5s");
|
||||
test_computed_value("animation-duration", "calc(2 * 3s)", "6s");
|
||||
test_computed_value("animation-duration", "20s, 10s");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,15 @@
|
|||
<!DOCTYPE html>
|
||||
<meta charset="utf-8">
|
||||
<title>Tests that we consider main axis sizes to be definite in column flexboxes
|
||||
when the height is an explicit definite height, even if the container does
|
||||
not have a definite main axis size.</title>
|
||||
<link rel="author" title="Google Inc." href="https://www.google.com/">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-flexbox/#definite-sizes">
|
||||
<link rel="match" href="../reference/ref-filled-green-100px-square.xht" />
|
||||
|
||||
<p>Test passes if there is a filled green square and <strong>no red</strong>.</p>
|
||||
<div style="display:flex; flex-direction:column;">
|
||||
<div style="width:100px; height:100px; background:red;">
|
||||
<div style="height:100%; background:green;"></div>
|
||||
</div>
|
||||
</div>
|
|
@ -0,0 +1,39 @@
|
|||
<!DOCTYPE html>
|
||||
<title>CSS Reference</title>
|
||||
|
||||
<style>
|
||||
@font-face {
|
||||
font-family: TestWeight;
|
||||
src: url(resources/csstest-weights-100-kerned.ttf);
|
||||
font-weight: 200 300;
|
||||
}
|
||||
@font-face {
|
||||
font-family: TestWeight;
|
||||
src: url(resources/csstest-weights-900-kerned.ttf);
|
||||
font-weight: 300 400;
|
||||
}
|
||||
@font-face {
|
||||
font-family: TestStyle;
|
||||
src: url(resources/csstest-weights-100-kerned.ttf);
|
||||
font-style: oblique 20deg 30deg;
|
||||
}
|
||||
@font-face {
|
||||
font-family: TestStyle;
|
||||
src: url(resources/csstest-weights-900-kerned.ttf);
|
||||
font-style: oblique 30deg 40deg;
|
||||
}
|
||||
@font-face {
|
||||
font-family: TestStretch;
|
||||
src: url(resources/csstest-weights-100-kerned.ttf);
|
||||
font-stretch: 110% 120%;
|
||||
}
|
||||
@font-face {
|
||||
font-family: TestStretch;
|
||||
src: url(resources/csstest-weights-900-kerned.ttf);
|
||||
font-stretch: 120% 130%;
|
||||
}
|
||||
</style>
|
||||
|
||||
<p style="font-family: TestWeight; font-weight: 250;">A</p>
|
||||
<p style="font-family: TestStyle; font-style: oblique 25deg;">A</p>
|
||||
<p style="font-family: TestStretch; font-stretch: 115%;">A</p>
|
|
@ -0,0 +1,56 @@
|
|||
<!DOCTYPE html>
|
||||
<html class="reftest-wait">
|
||||
<title>CSS Test: Matching @font-face font-weight, font-style, and font-stretch descriptors with reversed ranges</title>
|
||||
<link rel="help" href="https://drafts.csswg.org/css-fonts-4/#font-prop-desc">
|
||||
<link rel="match" href="font-descriptor-range-reversed-ref.html">
|
||||
|
||||
<!-- Using csstest-weights-{100,900}-kerned.ttf just as two convenient
|
||||
different fonts here with different "A" glyphs -->
|
||||
|
||||
<style>
|
||||
@font-face {
|
||||
font-family: TestWeight;
|
||||
src: url(resources/csstest-weights-100-kerned.ttf);
|
||||
font-weight: 300 200;
|
||||
}
|
||||
@font-face {
|
||||
font-family: TestWeight;
|
||||
src: url(resources/csstest-weights-900-kerned.ttf);
|
||||
font-weight: 300 400;
|
||||
}
|
||||
@font-face {
|
||||
font-family: TestStyle;
|
||||
src: url(resources/csstest-weights-100-kerned.ttf);
|
||||
font-style: oblique 30deg 20deg;
|
||||
}
|
||||
@font-face {
|
||||
font-family: TestStyle;
|
||||
src: url(resources/csstest-weights-900-kerned.ttf);
|
||||
font-style: oblique 30deg 40deg;
|
||||
}
|
||||
@font-face {
|
||||
font-family: TestStretch;
|
||||
src: url(resources/csstest-weights-100-kerned.ttf);
|
||||
font-stretch: 120% 110%;
|
||||
}
|
||||
@font-face {
|
||||
font-family: TestStretch;
|
||||
src: url(resources/csstest-weights-900-kerned.ttf);
|
||||
font-stretch: 120% 130%;
|
||||
}
|
||||
</style>
|
||||
|
||||
<!-- Matches `font-weight: 300 200;` -->
|
||||
<p style="font-family: TestWeight; font-weight: 250;">A</p>
|
||||
|
||||
<!-- Matches `font-style: oblique 30deg 20deg;` -->
|
||||
<p style="font-family: TestStyle; font-style: oblique 25deg;">A</p>
|
||||
|
||||
<!-- Matches `font-style: oblique 120% 110%;` -->
|
||||
<p style="font-family: TestStretch; font-stretch: 115%;">A</p>
|
||||
|
||||
<script>
|
||||
document.fonts.ready.then(function() {
|
||||
document.documentElement.className = "";
|
||||
});
|
||||
</script>
|
|
@ -2,6 +2,8 @@
|
|||
<link rel="help" href="https://drafts.css-houdini.org/css-properties-values-api/#register-a-custom-property" />
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="./resources/utils.js"></script>
|
||||
<div id=target></div>
|
||||
<script>
|
||||
// Tests for error checking during property registration
|
||||
|
||||
|
@ -45,4 +47,29 @@ test(function(){
|
|||
CSS.registerProperty({name: '--inherit-test-2', syntax: '<length>', initialValue: '0px', inherits: false});
|
||||
assert_throws(new TypeError(), () => CSS.registerProperty({name: '--inherit-test-3', syntax: '<length>', initialValue: '0px'}));
|
||||
}, "registerProperty requires inherits");
|
||||
|
||||
test(function(){
|
||||
try {
|
||||
let name = generate_name();
|
||||
|
||||
target.style.setProperty(name, 'green');
|
||||
target.style.transitionProperty = name;
|
||||
target.style.transitionDuration = '1s';
|
||||
target.style.transitionTimingFunction = 'steps(1, end)';
|
||||
|
||||
assert_equals(getComputedStyle(target).getPropertyValue(name), 'green');
|
||||
|
||||
CSS.registerProperty({
|
||||
name: name,
|
||||
syntax: '<color>',
|
||||
initialValue: 'red',
|
||||
inherits: false
|
||||
});
|
||||
|
||||
assert_equals(getComputedStyle(target).getPropertyValue(name), 'rgb(0, 128, 0)');
|
||||
} finally {
|
||||
target.style = '';
|
||||
}
|
||||
}, 'Registering a property should not cause a transition');
|
||||
|
||||
</script>
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
<!DOCTYPE html>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<link rel="help" href="https://drafts.csswg.org/css-scoping/#slots-in-shadow-tree">
|
||||
<div id="host"><span id="slotted">This text should be green.</span></div>
|
||||
<script>
|
||||
const root = host.attachShadow({mode:"open"});
|
||||
root.innerHTML = '<slot name="nomatch" style="color:green"></slot><slot style="color:red"></slot>';
|
||||
|
||||
test(() => {
|
||||
assert_equals(getComputedStyle(slotted).color, "rgb(255, 0, 0)");
|
||||
}, "Initial computed color.");
|
||||
|
||||
test(() => {
|
||||
root.querySelector("slot").removeAttribute("name");
|
||||
assert_equals(getComputedStyle(slotted).color, "rgb(0, 128, 0)");
|
||||
|
||||
}, "Computed color after re-slotting.");
|
||||
</script>
|
|
@ -5,3 +5,4 @@
|
|||
<div>PASS</div>
|
||||
<div>PASS</div>
|
||||
<div style="color:green">PASS</div>
|
||||
<div style="color:green">PASS</div>
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
<div id="host1"><span></span></div>
|
||||
<div id="host2"><span></span></div>
|
||||
<div id="host3"><span></span></div>
|
||||
<div id="host4"><span></span></div>
|
||||
<style>
|
||||
#host3 > span::before { content: "PASS" }
|
||||
</style>
|
||||
|
@ -19,4 +20,12 @@
|
|||
attachShadowWithSlottedStyle(host1, "::slotted(span)::before { content: 'PASS' }");
|
||||
attachShadowWithSlottedStyle(host2, "::slotted(span)::after { content: 'PASS' }");
|
||||
attachShadowWithSlottedStyle(host3, "::slotted(span)::before { content: 'FAIL'; color: green }");
|
||||
attachShadowWithSlottedStyle(host4, `
|
||||
::slotted(span)::before { content: 'PASS'; color: red }
|
||||
::slotted(.foo)::before { color: green }
|
||||
`);
|
||||
onload = function() {
|
||||
host4.offsetTop;
|
||||
host4.firstElementChild.classList.add("foo");
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
<!DOCTYPE html>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Text Test: hanging punctuation is scrollable overflow</title>
|
||||
<link rel="author" title="Florian Rivoal" href="http://florian.rivoal.net/">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-text-3/#hanging-punctuation-property">
|
||||
<meta name="flags" content="">
|
||||
<link rel="match" href="reference/hanging-scrollable-001-ref.html">
|
||||
<meta name="assert" content="Hanging characters that overflow their block container are treated as scrollable overflow. ">
|
||||
<style>
|
||||
div {
|
||||
font-family: monospace;
|
||||
font-size: 50px;
|
||||
hanging-punctuation: last;
|
||||
overflow: hidden;
|
||||
color: red;
|
||||
}
|
||||
span {color: white; }
|
||||
</style>
|
||||
|
||||
<p>Test passes if there is no red below.
|
||||
<table><tr><td><div id="t">X<span>”</span></div></table>
|
||||
<!--
|
||||
The table is to do sizing based on the min content size.
|
||||
A simpler test could be written using `div { width: min-content; }`,
|
||||
but that is not widely supported yet.
|
||||
-->
|
||||
|
||||
<script>
|
||||
document.getElementById("t").scrollLeft=100;
|
||||
</script>
|
|
@ -0,0 +1,6 @@
|
|||
<!DOCTYPE html>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Text Test reference</title>
|
||||
<link rel="author" title="Florian Rivoal" href="http://florian.rivoal.net/">
|
||||
|
||||
<p>Test passes if there is no red below.
|
|
@ -19,7 +19,7 @@
|
|||
<body>
|
||||
<p>Test passes if there is no red visible on the page.</p>
|
||||
<div id="parent">
|
||||
<div>X</div>
|
||||
<div style="padding-left: 100px">X</div>
|
||||
</div>
|
||||
<p>Test passes if the following two text blocks look same in terms of margin-left and text-indent respectively.</p>
|
||||
<div>
|
||||
|
@ -31,4 +31,4 @@
|
|||
ABCDEFGHIJKLMNOPQRSTUVWXYZ ABCDEFGHIJKLMNOPQRSTUVWXYZ ABCDEFGHIJKLMNOPQRSTUVWXYZ ABCDEFGHIJKLMNOPQRSTUVWXYZ ABCDEFGHIJKLMNOPQRSTUVWXYZ ABCDEFGHIJKLMNOPQRSTUVWXYZ.<br />
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
|
|
@ -4,11 +4,11 @@
|
|||
<title>CSS Text Test reference</title>
|
||||
<link rel="author" title="Florian Rivoal" href="http://florian.rivoal.net/">
|
||||
<style>
|
||||
body { background: white; }
|
||||
div {
|
||||
padding-left: 50px;
|
||||
font-family: Ahem;
|
||||
}
|
||||
</style>
|
||||
|
||||
<p>Test passes if there is a single black X below and no red.
|
||||
<div>X</div>
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
#reference1
|
||||
{
|
||||
color: red;
|
||||
left: 0;
|
||||
left: 100px; /* see comments for #test1 below */
|
||||
position: absolute;
|
||||
top: 0;
|
||||
z-index: -1;
|
||||
|
@ -29,8 +29,8 @@
|
|||
}
|
||||
#test1
|
||||
{
|
||||
text-indent: 50%;
|
||||
margin-left: -50%;
|
||||
margin-left: -50%; /* -50% * 400px = -200px which makes the inline-size of this block 600px */
|
||||
text-indent: 50%; /* 50% * 600px = 300px (which is 100px from the start of #parent due to the negative margin) */
|
||||
}
|
||||
#test2
|
||||
{
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<link rel="match" href="reference/text-indent-percentage-002-ref.html">
|
||||
<meta name="assert" content="Percentages in text-indent refer to width of the element's content box">
|
||||
<style>
|
||||
section { position: absolute; }
|
||||
body { background: white; }
|
||||
section, div {
|
||||
border-right: 10px solid white;
|
||||
margin-right: 10px;
|
||||
|
@ -19,10 +19,7 @@ div {
|
|||
box-sizing: border-box;
|
||||
width: 120px;
|
||||
}
|
||||
.test div { text-indent: 50%; color: red; }
|
||||
.ref div { text-indent: 50px; }
|
||||
.test div { text-indent: 50%; }
|
||||
</style>
|
||||
|
||||
<p>Test passes if there is a single black X below and no red.
|
||||
<section class=test><div>X</div></section>
|
||||
<section class=ref><div>X</div></section>
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<link rel="match" href="reference/text-indent-percentage-002-ref.html">
|
||||
<meta name="assert" content="Percentages in text-indent refer to width of the element's content box">
|
||||
<style>
|
||||
section { position: absolute; }
|
||||
body { background: white; }
|
||||
section, div {
|
||||
border-right: 10px solid white;
|
||||
margin-right: 10px;
|
||||
|
@ -19,10 +19,7 @@ div {
|
|||
box-sizing: border-box;
|
||||
width: 120px;
|
||||
}
|
||||
.test div { text-indent: 50%; color: red; overflow: hidden; } /* overflow:hidden should not make any difference, but it does in some browsers */
|
||||
.ref div { text-indent: 50px; }
|
||||
.test div { text-indent: 50%; overflow: hidden; } /* overflow:hidden should not make any difference, but it does in some browsers */
|
||||
</style>
|
||||
|
||||
<p>Test passes if there is a single black X below and no red.
|
||||
<section class=test><div>X</div></section>
|
||||
<section class=ref><div>X</div></section>
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<link rel="match" href="reference/text-indent-percentage-002-ref.html">
|
||||
<meta name="assert" content="Percentages in text-indent refer to width of the element's content box, when used in a calc expression">
|
||||
<style>
|
||||
section { position: absolute; }
|
||||
body { background: white; }
|
||||
section, div {
|
||||
border-right: 10px solid white;
|
||||
margin-right: 10px;
|
||||
|
@ -19,10 +19,7 @@ div {
|
|||
box-sizing: border-box;
|
||||
width: 120px;
|
||||
}
|
||||
.test div { text-indent: calc(25px + 25%); color: red; }
|
||||
.ref div { text-indent: 50px; }
|
||||
.test div { text-indent: calc(25px + 25%); }
|
||||
</style>
|
||||
|
||||
<p>Test passes if there is a single black X below and no red.
|
||||
<section class=test><div>X</div></section>
|
||||
<section class=ref><div>X</div></section>
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>CSS Text test: hanging trailing spaces with white-space:pre-wrap</title>
|
||||
<link rel="author" title="Florian Rivoal" href="https://florian.rivoal.net/">
|
||||
|
||||
<style>
|
||||
div {
|
||||
white-space: pre-wrap;
|
||||
font-family: monospace;
|
||||
}
|
||||
</style>
|
||||
|
||||
<p>This test passes if the 4 letters below are verticaly aligned.
|
||||
|
||||
<div> P</div>
|
||||
<div> A</div>
|
||||
<div> S</div>
|
||||
<div> S</div>
|
|
@ -0,0 +1,8 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>CSS test Reference</title>
|
||||
<link rel="author" title="Florian Rivoal" href="https://florian.rivoal.net/">
|
||||
|
||||
<p>This test passes if you can see the word PASS below, without any extra spaces.
|
||||
|
||||
<div>PASS</div>
|
|
@ -0,0 +1,31 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>CSS Text test: hanging trailing spaces with white-space:pre-wrap</title>
|
||||
<link rel="author" title="Florian Rivoal" href="https://florian.rivoal.net/">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-text-3/#white-space-phase-2">
|
||||
<link rel="match" href="reference/white-space-pre-wrap-trailing-spaces-001-ref.html">
|
||||
<meta name="assert" content="Preserved white space at the end of the line is hanged when white-space is pre-wrap.">
|
||||
|
||||
<style>
|
||||
div {
|
||||
white-space: pre-wrap;
|
||||
font-family: monospace;
|
||||
}
|
||||
div:nth-of-type(1),
|
||||
div:nth-of-type(2) {
|
||||
width: 5ch;
|
||||
text-align: right;
|
||||
}
|
||||
div:nth-of-type(3),
|
||||
div:nth-of-type(4) {
|
||||
width: 9ch;
|
||||
text-align: center;
|
||||
}
|
||||
</style>
|
||||
|
||||
<p>This test passes if the 4 letters below are verticaly aligned.
|
||||
|
||||
<div>P</div>
|
||||
<div>A </div>
|
||||
<div>S</div>
|
||||
<div>S </div>
|
|
@ -0,0 +1,20 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>CSS Text test: intrinsic maximum sizing of trailing spaces with white-space:pre-wrap</title>
|
||||
<link rel="author" title="Florian Rivoal" href="https://florian.rivoal.net/">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-text-3/#white-space-phase-2">
|
||||
<link rel="match" href="reference/white-space-pre-wrap-trailing-spaces-002-ref.html">
|
||||
<meta name="assert" content="Preserved white space at the end of the line is hanged when white-space is pre-wrap, and therefore does not count when computing the (maximum) intrinsic size.">
|
||||
|
||||
<style>
|
||||
span {
|
||||
display: inline-block;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
#s1 { text-align: right; }
|
||||
#s2 { text-align: center; }
|
||||
</style>
|
||||
|
||||
<p>This test passes if you can see the word PASS below, without any extra spaces.
|
||||
|
||||
<div><span id=s1>P </span><span id=s2>A </span>SS</div>
|
|
@ -0,0 +1,27 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>CSS Text test: intrinsic minimum sizing of trailing spaces with white-space:pre-wrap</title>
|
||||
<link rel="author" title="Florian Rivoal" href="https://florian.rivoal.net/">
|
||||
<link rel="help" href="https://drafts.csswg.org/css-text-3/#white-space-phase-2">
|
||||
<link rel="match" href="reference/white-space-pre-wrap-trailing-spaces-002-ref.html">
|
||||
<meta name="assert" content="Preserved white space at the end of the line is hanged when white-space is pre-wrap, and therefore does not count when computing the (minimum) intrinsic size.">
|
||||
|
||||
<style>
|
||||
/* Make the table invisible */
|
||||
table { border-collapse: collapse; }
|
||||
td { padding: 0; }
|
||||
|
||||
/* for the content of the table to be sized to their mininum intrinsic size */
|
||||
table { width: 0; }
|
||||
|
||||
span {
|
||||
display: inline-block;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
#s1 { text-align: right; }
|
||||
#s2 { text-align: center; }
|
||||
</style>
|
||||
|
||||
<p>This test passes if you can see the word PASS below, without any extra spaces.
|
||||
|
||||
<table><td><span id=s1>P </span><td><span id=s2>A </span><td>SS</table>
|
|
@ -11,57 +11,56 @@
|
|||
</head>
|
||||
<body>
|
||||
<script>
|
||||
// none -> none
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'none',
|
||||
to: 'none',
|
||||
},
|
||||
[{ at: 0.25, expect: 'none' }]
|
||||
[{ at: 0.25, expect: 'none' }],
|
||||
'none -> none'
|
||||
);
|
||||
|
||||
// none -> something
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'none',
|
||||
to: 'translate(200px) rotate(720deg)',
|
||||
},
|
||||
[{ at: 0.25, expect: 'translate(50px) rotate(180deg)' }]
|
||||
[{ at: 0.25, expect: 'translate(50px) rotate(180deg)' }],
|
||||
'none -> something'
|
||||
);
|
||||
|
||||
// something -> none
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'translate(200px) rotate(720deg)',
|
||||
to: 'none',
|
||||
},
|
||||
[{ at: 0.25, expect: 'translate(150px) rotate(540deg)' }]
|
||||
[{ at: 0.25, expect: 'translate(150px) rotate(540deg)' }],
|
||||
'something -> none'
|
||||
);
|
||||
|
||||
// Mismatched lengths (from is shorter), common part matches
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'translate(100px)',
|
||||
to: 'translate(200px) rotate(720deg)',
|
||||
},
|
||||
[{ at: 0.25, expect: 'translate(125px) rotate(180deg)' }]
|
||||
[{ at: 0.25, expect: 'translate(125px) rotate(180deg)' }],
|
||||
'Mismatched lengths (from is shorter), common part matches'
|
||||
);
|
||||
|
||||
// Mismatched lengths (to is shorter), common part matches
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'translate(100px) rotate(720deg)',
|
||||
to: 'translate(200px)',
|
||||
},
|
||||
[{ at: 0.25, expect: 'translate(125px) rotate(540deg)' }]
|
||||
[{ at: 0.25, expect: 'translate(125px) rotate(540deg)' }],
|
||||
'Mismatched lengths (to is shorter), common part matches'
|
||||
);
|
||||
|
||||
// Perfect match
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
|
@ -73,47 +72,68 @@ test_interpolation(
|
|||
at: 0.25,
|
||||
expect: 'scale(2.25) rotate(540deg) translate(125px) matrix(1, 0, 0, 1, 75, 50) skew(180deg)',
|
||||
},
|
||||
]
|
||||
],
|
||||
'Perfect match'
|
||||
);
|
||||
|
||||
// Matches on primitives
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'translateX(100px) scaleX(3) translate(500px) scale(2)',
|
||||
to: 'translateY(200px) scale(5) translateX(100px) scaleY(3)',
|
||||
},
|
||||
[{ at: 0.25, expect: 'translate(75px, 50px) scale(3.5, 2) translate(400px, 0px) scale(1.75, 2.25)' }]
|
||||
[{ at: 0.25, expect: 'translate(75px, 50px) scale(3.5, 2) translate(400px, 0px) scale(1.75, 2.25)' }],
|
||||
'Matches on primitives'
|
||||
);
|
||||
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'rotateX(90deg) translateX(100px)',
|
||||
to: 'rotate3d(50, 0, 0, 180deg) translateY(200px)',
|
||||
},
|
||||
[{ at: 0.25, expect: 'rotateX(112.5deg) translate(75px, 50px)' }],
|
||||
'Match on rotation vector'
|
||||
);
|
||||
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'rotateX(90deg) translateX(100px)',
|
||||
to: 'rotateY(0deg) translateY(200px)',
|
||||
},
|
||||
[{ at: 0.25, expect: 'rotateX(67.5deg) translate(75px, 50px)' }],
|
||||
'Match on rotation due to 0deg angle'
|
||||
);
|
||||
|
||||
// Common prefix
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'rotate(0deg) translate(100px)',
|
||||
to: 'rotate(720deg) scale(2) translate(200px)',
|
||||
},
|
||||
[{ at: 0.25, expect: 'rotate(180deg) matrix(1.25, 0, 0, 1.25, 175, 0)' }]
|
||||
[{ at: 0.25, expect: 'rotate(180deg) matrix(1.25, 0, 0, 1.25, 175, 0)' }],
|
||||
'Common prefix'
|
||||
);
|
||||
|
||||
// Complete mismatch (except length)
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'scale(2) rotate(0deg) translate(100px)',
|
||||
to: 'rotate(720deg) scale(2) translate(200px)',
|
||||
},
|
||||
[{ at: 0.25, expect: 'matrix(2, 0, 0, 2, 250, 0)' }]
|
||||
[{ at: 0.25, expect: 'matrix(2, 0, 0, 2, 250, 0)' }],
|
||||
'Complete mismatch (except length)'
|
||||
);
|
||||
|
||||
// Complete mismatch including length
|
||||
test_interpolation(
|
||||
{
|
||||
property: 'transform',
|
||||
from: 'scale(2) rotate(0deg)',
|
||||
to: 'rotate(720deg) scale(2) translate(200px)',
|
||||
},
|
||||
[{ at: 0.25, expect: 'matrix(2, 0, 0, 2, 100, 0)' }]
|
||||
[{ at: 0.25, expect: 'matrix(2, 0, 0, 2, 100, 0)' }],
|
||||
'Complete mismatch including length'
|
||||
);
|
||||
</script>
|
||||
</body>
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
'use strict';
|
||||
function test_interpolation(settings, expectations) {
|
||||
function test_interpolation(settings, expectations, name) {
|
||||
var message_prefix = name ? name + ': ' : '';
|
||||
// Returns a timing function that at 0.5 evaluates to progress.
|
||||
function timingFunction(progress) {
|
||||
if (progress === 0)
|
||||
|
@ -13,7 +14,7 @@ function test_interpolation(settings, expectations) {
|
|||
test(function(){
|
||||
assert_true(CSS.supports(settings.property, settings.from), 'Value "' + settings.from + '" is supported by ' + settings.property);
|
||||
assert_true(CSS.supports(settings.property, settings.to), 'Value "' + settings.to + '" is supported by ' + settings.property);
|
||||
}, '"' + settings.from + '" and "' + settings.to + '" are valid ' + settings.property + ' values');
|
||||
}, message_prefix + '"' + settings.from + '" and "' + settings.to + '" are valid ' + settings.property + ' values');
|
||||
|
||||
for (var i = 0; i < expectations.length; ++i) {
|
||||
var progress = expectations[i].at;
|
||||
|
@ -49,6 +50,6 @@ function test_interpolation(settings, expectations) {
|
|||
reference.style = '';
|
||||
|
||||
assert_equals(getComputedStyle(target)[settings.property], getComputedStyle(reference)[settings.property]);
|
||||
}, 'Animation between "' + settings.from + '" and "' + settings.to + '" at progress ' + progress);
|
||||
}, message_prefix + 'Animation between "' + settings.from + '" and "' + settings.to + '" at progress ' + progress);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>CSS Transitions: getComputedValue().transitionDuration</title>
|
||||
<link rel="help" href="https://drafts.csswg.org/css-transitions/#propdef-transition-duration">
|
||||
<meta name="assert" content="transition-duration converts to seconds.">
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/css/support/computed-testcommon.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="target"></div>
|
||||
<script>
|
||||
test_computed_value("transition-duration", "500ms", "0.5s");
|
||||
test_computed_value("transition-duration", "calc(2 * 3s)", "6s");
|
||||
test_computed_value("transition-duration", "20s, 10s");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,22 @@
|
|||
<!DOCTYPE html>
|
||||
<title>When non-empty placeholder becomes empty, :placeholder-shown test</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<link rel="help" href="https://drafts.csswg.org/selectors-4/#placeholder">
|
||||
<input id="myinput" type="text" placeholder="FAIL">
|
||||
<textarea id="mytextarea" placeholder="FAIL"></textarea>
|
||||
<script>
|
||||
test(() => {
|
||||
const input = document.querySelector("input");
|
||||
input.placeholder = "";
|
||||
input.value = "NO RED";
|
||||
assert_false(input.matches(":placeholder-shown"));
|
||||
}, "input:placeholder-shown should not be matched");
|
||||
|
||||
test(() => {
|
||||
const textarea = document.querySelector("textarea");
|
||||
textarea.placeholder = "";
|
||||
textarea.value = "No RED";
|
||||
assert_false(textarea.matches(":placeholder-shown"));
|
||||
}, "textarea:placeholder-shown should not be matched");
|
||||
</script>
|
|
@ -9,6 +9,10 @@ these are especially using for [visual][] tests which need to be manually
|
|||
judged and following common patterns makes it easier to correctly tell if a
|
||||
given test passed or not.
|
||||
|
||||
## Make tests self-describing
|
||||
|
||||
Tests should make it obvious when they pass and when they fail. It shouldn't be necessary to consult the specification to figure out whether a test has passed of failed.
|
||||
|
||||
## Indicating success
|
||||
|
||||
Success is largely indicated by the color green; typically in one of
|
||||
|
|
|
@ -758,6 +758,9 @@ asserts that the class string of `object` as returned in
|
|||
### `assert_own_property(object, property_name, description)`
|
||||
assert that object has own property `property_name`
|
||||
|
||||
### `assert_not_own_property(object, property_name, description)`
|
||||
assert that object does not have an own property named `property_name`
|
||||
|
||||
### `assert_inherits(object, property_name, description)`
|
||||
assert that object does not have an own property named
|
||||
`property_name` but that `property_name` is present in the prototype
|
||||
|
|
|
@ -0,0 +1,132 @@
|
|||
<!doctype html>
|
||||
<meta charset="utf8">
|
||||
<title>Events must dispatch on disabled elements</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="/resources/testdriver.js"></script>
|
||||
<script src="/resources/testdriver-vendor.js"></script>
|
||||
<body>
|
||||
<script>
|
||||
// HTML elements that can be disabled
|
||||
const formElements = ["button", "fieldset", "input", "select", "textarea"];
|
||||
|
||||
test(() => {
|
||||
for (const localName of formElements) {
|
||||
const elem = document.createElement(localName);
|
||||
elem.disabled = true;
|
||||
// pass becomes true if the event is called and it's the right type.
|
||||
let pass = false;
|
||||
const listener = ({ type }) => {
|
||||
pass = type === "click";
|
||||
};
|
||||
elem.addEventListener("click", listener, { once: true });
|
||||
elem.dispatchEvent(new Event("click"));
|
||||
assert_true(
|
||||
pass,
|
||||
`Untrusted "click" Event didn't dispatch on ${elem.constructor.name}.`
|
||||
);
|
||||
}
|
||||
}, "Can dispatch untrusted 'click' Events at disabled HTML elements.");
|
||||
|
||||
test(() => {
|
||||
for (const localName of formElements) {
|
||||
const elem = document.createElement(localName);
|
||||
elem.disabled = true;
|
||||
// pass becomes true if the event is called and it's the right type.
|
||||
let pass = false;
|
||||
const listener = ({ type }) => {
|
||||
pass = type === "pass";
|
||||
};
|
||||
elem.addEventListener("pass", listener, { once: true });
|
||||
elem.dispatchEvent(new Event("pass"));
|
||||
assert_true(
|
||||
pass,
|
||||
`Untrusted "pass" Event didn't dispatch on ${elem.constructor.name}`
|
||||
);
|
||||
}
|
||||
}, "Can dispatch untrusted Events at disabled HTML elements.");
|
||||
|
||||
test(() => {
|
||||
for (const localName of formElements) {
|
||||
const elem = document.createElement(localName);
|
||||
elem.disabled = true;
|
||||
// pass becomes true if the event is called and it's the right type.
|
||||
let pass = false;
|
||||
const listener = ({ type }) => {
|
||||
pass = type === "custom-pass";
|
||||
};
|
||||
elem.addEventListener("custom-pass", listener, { once: true });
|
||||
elem.dispatchEvent(new CustomEvent("custom-pass"));
|
||||
assert_true(
|
||||
pass,
|
||||
`CustomEvent "custom-pass" didn't dispatch on ${elem.constructor.name}`
|
||||
);
|
||||
}
|
||||
}, "Can dispatch CustomEvents at disabled HTML elements.");
|
||||
|
||||
test(() => {
|
||||
for (const localName of formElements) {
|
||||
const elem = document.createElement(localName);
|
||||
|
||||
// Element is disabled... so this click() MUST NOT fire an event.
|
||||
elem.disabled = true;
|
||||
let pass = true;
|
||||
elem.onclick = e => {
|
||||
pass = false;
|
||||
};
|
||||
elem.click();
|
||||
assert_true(
|
||||
pass,
|
||||
`.click() must not dispatch "click" event on disabled ${
|
||||
elem.constructor.name
|
||||
}.`
|
||||
);
|
||||
|
||||
// Element is (re)enabled... so this click() fires an event.
|
||||
elem.disabled = false;
|
||||
pass = false;
|
||||
elem.onclick = e => {
|
||||
pass = true;
|
||||
};
|
||||
elem.click();
|
||||
assert_true(
|
||||
pass,
|
||||
`.click() must dispatch "click" event on enabled ${
|
||||
elem.constructor.name
|
||||
}.`
|
||||
);
|
||||
}
|
||||
}, "Calling click() on disabled elements must not dispatch events.");
|
||||
|
||||
promise_test(async () => {
|
||||
for (const localName of formElements) {
|
||||
const elem = document.createElement(localName);
|
||||
elem.disabled = true;
|
||||
document.body.appendChild(elem);
|
||||
|
||||
// Element is disabled, so clicking must not fire events
|
||||
let pass = true;
|
||||
elem.onclick = e => {
|
||||
pass = false;
|
||||
};
|
||||
await test_driver.click(elem); // triggers "onclick"
|
||||
assert_true(
|
||||
pass,
|
||||
`${elem.constructor.name} is disabled, so onclick must not fire.`
|
||||
);
|
||||
|
||||
// Element is (re)enabled... so this click() will fire an event.
|
||||
pass = false;
|
||||
elem.disabled = false;
|
||||
elem.onclick = () => {
|
||||
pass = true;
|
||||
};
|
||||
await test_driver.click(elem); // triggers "onclick"
|
||||
assert_true(
|
||||
pass,
|
||||
`${elem.constructor.name} is enabled, so onclick must fire.`
|
||||
);
|
||||
elem.remove();
|
||||
}
|
||||
}, "Real clicks on disabled elements must not dispatch events.");
|
||||
</script>
|
|
@ -0,0 +1,13 @@
|
|||
<!doctype html>
|
||||
<title>Ref: vertical-scroll test for scrollbar</title>
|
||||
<iframe src="/feature-policy/experimental-features/resources/vertical-scroll-scrollable-content.html"></iframe>
|
||||
<script>
|
||||
let iframe = document.querySelector("iframe");
|
||||
let overflow_y = "visible";
|
||||
if (window.location.search.indexOf("no-vertical-scrollbar") !== -1)
|
||||
overflow_y = "hidden"
|
||||
iframe.addEventListener("load", () => {
|
||||
iframe.contentDocument.body.style.overflowY = overflow_y;
|
||||
});
|
||||
</script>
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
<!doctype html>
|
||||
<title>vertical-scroll test for vertical scrollbar</title>
|
||||
<link rel="match" href="/feature-policy/experimental-features/resources/vertical-scroll-scrollbar-ref.html?no-vertical-scrollbar">
|
||||
<iframe src="/feature-policy/experimental-features/resources/vertical-scroll-scrollable-content.html" allow="vertical-scroll 'none'"></iframe>
|
|
@ -1,11 +1,15 @@
|
|||
// META: global=window,worker
|
||||
|
||||
promise_test(async t => {
|
||||
const response = await fetch("../../../xhr/resources/headers-basic.asis");
|
||||
assert_equals(response.headers.get("foo-test"), "1, 2, 3");
|
||||
}, "response.headers.get('foo-test')");
|
||||
|
||||
promise_test(async t => {
|
||||
const response = await fetch("../../../xhr/resources/headers-www-authenticate.asis");
|
||||
assert_equals(response.headers.get("www-authenticate"), "1, 2, 3, 4");
|
||||
}, "response.headers.get('www-authenticate')");
|
||||
[
|
||||
["content-length", "0", "header-content-length"],
|
||||
["content-length", "0, 0", "header-content-length-twice"],
|
||||
["double-trouble", ", ", "headers-double-empty"],
|
||||
["foo-test", "1, 2, 3", "headers-basic"],
|
||||
["heya", ", \u000B\u000C, 1, , , 2", "headers-some-are-empty"],
|
||||
["www-authenticate", "1, 2, 3, 4", "headers-www-authenticate"],
|
||||
].forEach(testValues => {
|
||||
promise_test(async t => {
|
||||
const response = await fetch("../../../xhr/resources/" + testValues[2] + ".asis");
|
||||
assert_equals(response.headers.get(testValues[0]), testValues[1]);
|
||||
}, "response.headers.get('" + testValues[0] + "') expects " + testValues[1]);
|
||||
});
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
<script src=/resources/testharness.js></script>
|
||||
<script src=/resources/testharnessreport.js></script>
|
||||
<div id=log></div>
|
||||
<script>
|
||||
var fails = ["first", "uppercase"],
|
||||
passes = ["last", "quoted", "quoted-single", "no-x"]
|
||||
|
||||
fails.forEach(function(urlpart) {
|
||||
async_test(function(t) {
|
||||
var script = document.createElement("script")
|
||||
script.onerror = t.step_func_done(function(){})
|
||||
script.onload = t.unreached_func("Unexpected load event")
|
||||
script.src = "resources/nosniff-" + urlpart + ".asis"
|
||||
document.body.appendChild(script)
|
||||
}, "URL query: " + urlpart)
|
||||
})
|
||||
|
||||
passes.forEach(function(urlpart) {
|
||||
async_test(function(t) {
|
||||
var script = document.createElement("script")
|
||||
script.onerror = t.unreached_func("Unexpected error event")
|
||||
script.onload = t.step_func_done(function(){})
|
||||
script.src = "resources/nosniff-" + urlpart + ".asis"
|
||||
document.body.appendChild(script)
|
||||
}, "URL query: " + urlpart)
|
||||
})
|
||||
|
||||
</script>
|
|
@ -0,0 +1,24 @@
|
|||
promise_test(() => fetch("resources/x-content-type-options.json").then(res => res.json()).then(runTests), "Loading JSON…");
|
||||
|
||||
function runTests(allTestData) {
|
||||
for (let i = 0; i < allTestData.length; i++) {
|
||||
const testData = allTestData[i],
|
||||
input = encodeURIComponent(testData.input);
|
||||
async_test(t => {
|
||||
const script = document.createElement("script");
|
||||
t.add_cleanup(() => script.remove());
|
||||
// A <script> element loading a classic script does not care about the MIME type, unless
|
||||
// X-Content-Type-Options: nosniff is specified, in which case a JavaScript MIME type is
|
||||
// enforced, which x/x is not.
|
||||
if (testData.nosniff) {
|
||||
script.onerror = t.step_func_done();
|
||||
script.onload = t.unreached_func("Script should not have loaded");
|
||||
} else {
|
||||
script.onerror = t.unreached_func("Script should have loaded");
|
||||
script.onload = t.step_func_done();
|
||||
}
|
||||
script.src = "resources/nosniff.py?nosniff=" + input;
|
||||
document.body.appendChild(script);
|
||||
}, input);
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
HTTP/1.1 200 YOU HAVE NO POWER HERE
|
||||
Content-Length: 22
|
||||
Content-Type: x/x
|
||||
X-Content-Type-options: nosniff
|
||||
X-Content-Type-Options: no
|
||||
|
||||
// nothing to see here
|
|
@ -1,7 +0,0 @@
|
|||
HTTP/1.1 200 YOU HAVE NO POWER HERE
|
||||
Content-Length: 22
|
||||
Content-Type: x/x
|
||||
X-Content-Type-Options: no
|
||||
X-Content-Type-options: nosniff
|
||||
|
||||
// nothing to see here
|
|
@ -1,6 +0,0 @@
|
|||
HTTP/1.1 200 YOU HAVE NO POWER HERE
|
||||
Content-Length: 22
|
||||
Content-Type: x/x
|
||||
Content-Type-Options: nosniff
|
||||
|
||||
// nothing to see here
|
|
@ -1,6 +0,0 @@
|
|||
HTTP/1.1 200 YOU HAVE NO POWER HERE
|
||||
Content-Length: 22
|
||||
Content-Type: x/x
|
||||
X-Content-Type-Options: 'NosniFF'
|
||||
|
||||
// nothing to see here
|
|
@ -1,6 +0,0 @@
|
|||
HTTP/1.1 200 YOU HAVE NO POWER HERE
|
||||
Content-Length: 22
|
||||
Content-Type: x/x
|
||||
X-Content-Type-Options: "nosniFF"
|
||||
|
||||
// nothing to see here
|
|
@ -1,6 +0,0 @@
|
|||
HTTP/1.1 200 YOU HAVE NO POWER HERE
|
||||
Content-Length: 22
|
||||
Content-Type: x/x
|
||||
X-Content-Type-Options: NOSNIFF
|
||||
|
||||
// nothing to see here
|
|
@ -0,0 +1,10 @@
|
|||
def main(request, response):
|
||||
response.add_required_headers = False
|
||||
output = "HTTP/1.1 220 YOU HAVE NO POWER HERE\r\n"
|
||||
output += "Content-Length: 22\r\n"
|
||||
output += "Content-Type: x/x\r\n"
|
||||
output += request.GET.first("nosniff") + "\r\n"
|
||||
output += "\r\n"
|
||||
output += "// nothing to see here"
|
||||
response.writer.write(output)
|
||||
response.close_connection = True
|
|
@ -0,0 +1,58 @@
|
|||
[
|
||||
{
|
||||
"input": "X-Content-Type-Options: NOSNIFF",
|
||||
"nosniff": true
|
||||
},
|
||||
{
|
||||
"input": "x-content-type-OPTIONS: nosniff",
|
||||
"nosniff": true
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options: nosniff,,@#$#%%&^&^*()()11!",
|
||||
"nosniff": true
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options: @#$#%%&^&^*()()11!,nosniff",
|
||||
"nosniff": false
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options: nosniff\r\nX-Content-Type-Options: no",
|
||||
"nosniff": true
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options: no\r\nX-Content-Type-Options: nosniff",
|
||||
"nosniff": false
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options:\r\nX-Content-Type-Options: nosniff",
|
||||
"nosniff": false
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options: ,nosniff",
|
||||
"nosniff": false
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options: nosniff\u000C",
|
||||
"nosniff": false
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options: nosniff\u000B",
|
||||
"nosniff": false
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options: nosniff\u000B,nosniff",
|
||||
"nosniff": false
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options: 'NosniFF'",
|
||||
"nosniff": false
|
||||
},
|
||||
{
|
||||
"input": "X-Content-Type-Options: \"nosniFF\"",
|
||||
"nosniff": false
|
||||
},
|
||||
{
|
||||
"input": "Content-Type-Options: nosniff",
|
||||
"nosniff": false
|
||||
}
|
||||
]
|
|
@ -0,0 +1,86 @@
|
|||
test(() => {
|
||||
const frame = document.body.appendChild(document.createElement("iframe")),
|
||||
win = frame.contentWindow,
|
||||
loc = win.location;
|
||||
frame.remove();
|
||||
assert_equals(win.location, loc);
|
||||
}, "Window and Location are 1:1 after browsing context removal");
|
||||
|
||||
function bcLessLocation() {
|
||||
const frame = document.body.appendChild(document.createElement("iframe")),
|
||||
win = frame.contentWindow,
|
||||
loc = win.location;
|
||||
frame.remove();
|
||||
return loc;
|
||||
}
|
||||
|
||||
[
|
||||
{
|
||||
"property": "href",
|
||||
"expected": "about:blank",
|
||||
"values": ["https://example.com/", "/", "http://test:test/", "test test", "test:test", "chrome:fail"]
|
||||
},
|
||||
{
|
||||
"property": "protocol",
|
||||
"expected": "about:",
|
||||
"values": ["http", "about", "test"]
|
||||
},
|
||||
{
|
||||
"property": "host",
|
||||
"expected": "",
|
||||
"values": ["example.com", "test test", "()"]
|
||||
},
|
||||
{
|
||||
"property": "hostname",
|
||||
"expected": "",
|
||||
"values": ["example.com"]
|
||||
},
|
||||
{
|
||||
"property": "port",
|
||||
"expected": "",
|
||||
"values": ["80", "", "443", "notaport"]
|
||||
},
|
||||
{
|
||||
"property": "pathname",
|
||||
"expected": "blank",
|
||||
"values": ["/", "x"]
|
||||
},
|
||||
{
|
||||
"property": "search",
|
||||
"expected": "",
|
||||
"values": ["test"]
|
||||
},
|
||||
{
|
||||
"property": "hash",
|
||||
"expected": "",
|
||||
"values": ["test", "#"]
|
||||
}
|
||||
].forEach(testSetup => {
|
||||
testSetup.values.forEach(value => {
|
||||
test(() => {
|
||||
const loc = bcLessLocation();
|
||||
loc[testSetup.property] = value;
|
||||
assert_equals(loc[testSetup.property], testSetup.expected);
|
||||
}, "Setting `" + testSetup.property + "` to `" + value + "` of a `Location` object sans browsing context is a no-op");
|
||||
});
|
||||
});
|
||||
|
||||
test(() => {
|
||||
const loc = bcLessLocation();
|
||||
assert_equals(loc.origin, "null");
|
||||
}, "Getting `origin` of a `Location` object sans browsing context should be \"null\"");
|
||||
|
||||
["assign", "replace", "reload"].forEach(method => {
|
||||
["about:blank", "https://example.com/", "/", "http://test:test/", "test test", "test:test", "chrome:fail"].forEach(value => {
|
||||
test(() => {
|
||||
const loc = bcLessLocation();
|
||||
loc[method](value);
|
||||
assert_equals(loc.href, "about:blank");
|
||||
}, "Invoking `" + method + "` with `" + value + "` on a `Location` object sans browsing context is a no-op");
|
||||
});
|
||||
});
|
||||
|
||||
test(() => {
|
||||
const loc = bcLessLocation();
|
||||
assert_array_equals(loc.ancestorOrigins, []);
|
||||
}, "Getting `ancestorOrigins` of a `Location` object sans browsing context should be []");
|
|
@ -22,7 +22,7 @@
|
|||
if(script2) {
|
||||
head.removeChild(script2);
|
||||
}
|
||||
var script3 = createScript('data:text\/javascript, log("Script %233 ran"); createScript(\'\', \'log("Script #4 ran")\')');
|
||||
var script3 = createScript('data:text\/javascript, log("Script %233 ran"); createScript(\'\', \'log("Script %234 ran")\')');
|
||||
if(script3) {
|
||||
head.removeChild(script3);
|
||||
}
|
||||
|
|
|
@ -50,9 +50,16 @@ interface IDBFactory {
|
|||
optional [EnforceRange] unsigned long long version);
|
||||
[NewObject] IDBOpenDBRequest deleteDatabase(DOMString name);
|
||||
|
||||
Promise<sequence<IDBDatabaseInfo>> databases();
|
||||
|
||||
short cmp(any first, any second);
|
||||
};
|
||||
|
||||
dictionary IDBDatabaseInfo {
|
||||
DOMString name;
|
||||
unsigned long long version;
|
||||
};
|
||||
|
||||
[Exposed=(Window,Worker)]
|
||||
interface IDBDatabase : EventTarget {
|
||||
readonly attribute DOMString name;
|
||||
|
|
|
@ -13,6 +13,5 @@ interface Performance : EventTarget {
|
|||
};
|
||||
|
||||
partial interface mixin WindowOrWorkerGlobalScope {
|
||||
[Replaceable]
|
||||
readonly attribute Performance performance;
|
||||
[Replaceable] readonly attribute Performance performance;
|
||||
};
|
||||
|
|
|
@ -59,6 +59,10 @@ TRAILING WHITESPACE, INDENT TABS, CR AT EOL: *.ico
|
|||
TRAILING WHITESPACE, INDENT TABS, CR AT EOL: *.wasm
|
||||
TRAILING WHITESPACE, INDENT TABS, CR AT EOL: *.bmp
|
||||
|
||||
## Whitespace needed for testing
|
||||
|
||||
TRAILING WHITESPACE: xhr/resources/headers-some-are-empty.asis
|
||||
|
||||
## Documentation ##
|
||||
|
||||
W3C-TEST.ORG: README.md
|
||||
|
@ -313,10 +317,10 @@ SET TIMEOUT: resources/testharness.js
|
|||
# setTimeout use in reftests
|
||||
SET TIMEOUT: acid/acid3/test.html
|
||||
|
||||
# Travis
|
||||
# CI configuration
|
||||
WEB-PLATFORM.TEST: .azure-pipelines.yml
|
||||
WEB-PLATFORM.TEST: .travis.yml
|
||||
|
||||
|
||||
# Third party code
|
||||
*: css/tools/apiclient/*
|
||||
*: css/tools/w3ctestlib/*
|
||||
|
|
|
@ -74,7 +74,7 @@
|
|||
}, "Width of scripted elements");
|
||||
|
||||
test(function() {
|
||||
var e = 3;
|
||||
var e = 3.2;
|
||||
for (var i = 0; i <= 3; i++) {
|
||||
assert_approx_equals(getBox("under" + i).height, getBox("under" + i + "base").height + getBox("under" + i + "under").height + e, e, "munder " + i + ": height is determined by the sum of heights of base and script plus some spacing.");
|
||||
assert_approx_equals(getBox("over" + i).height, getBox("over" + i + "base").height + getBox("over" + i + "over").height + e, e, "mover " + i + ": height is determined by the sum of heights of base and script plus some spacing.");
|
||||
|
|
|
@ -71,40 +71,146 @@
|
|||
var stream = await navigator.mediaDevices.getUserMedia(constraints);
|
||||
assert_equals(stream.getTracks()[0].getSettings().groupId,
|
||||
device.groupId);
|
||||
assert_true(device.groupId.length > 0);
|
||||
assert_greater_than(device.groupId.length, 0);
|
||||
}
|
||||
});
|
||||
}, 'groupId is correctly reported by getSettings() for all devices');
|
||||
|
||||
promise_test(t => {
|
||||
return navigator.mediaDevices.getUserMedia({audio: true}).then(stream => {
|
||||
let settings = stream.getAudioTracks()[0].getSettings();
|
||||
assert_equals(typeof(settings.deviceId), "string",
|
||||
"deviceId should exist and it should be a string.");
|
||||
assert_equals(typeof(settings.groupId), "string",
|
||||
"groupId should exist and it should be a string.");
|
||||
assert_equals(typeof(settings.volume), "number",
|
||||
"volume should exist and it should be a number.");
|
||||
assert_true(settings.volume >= 0.0 && settings.volume <= 1.0,
|
||||
"volume should be a number in the range [0.0, 1.0].");
|
||||
assert_equals(typeof(settings.sampleRate), "number",
|
||||
"sampleRate should exist and it should be a number.");
|
||||
assert_true(settings.sampleRate > 0, "sampleRate should be positive.");
|
||||
assert_equals(typeof(settings.sampleSize), "number",
|
||||
"sampleSize should exist and it should be a number.");
|
||||
assert_true(settings.sampleSize > 0, "sampleSize should be positive.");
|
||||
assert_equals(typeof(settings.echoCancellation), "boolean",
|
||||
"echoCancellation should exist and it should be a boolean.");
|
||||
assert_equals(typeof(settings.autoGainControl), "boolean",
|
||||
"autoGainControl should exist and it should be a boolean.");
|
||||
assert_equals(typeof(settings.noiseSuppression), "boolean",
|
||||
"noiseSuppression should exist and it should be a boolean.");
|
||||
assert_equals(typeof(settings.latency), "number",
|
||||
"latency should exist and it should be a number.");
|
||||
assert_true(settings.latency >= 0, "latency should not be negative.");
|
||||
assert_equals(typeof(settings.channelCount), "number",
|
||||
"channelCount should exist and it should be a number.");
|
||||
assert_true(settings.channelCount > 0, "channelCount should be positive.");
|
||||
});
|
||||
}, 'audio properties are reported by getSettings()');
|
||||
async function createAudioStreamAndGetSettings(t) {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({audio: true});
|
||||
t.add_cleanup(() => stream.getAudioTracks()[0].stop());
|
||||
return stream.getAudioTracks()[0].getSettings();
|
||||
}
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createAudioStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.deviceId), "string",
|
||||
"deviceId should exist and it should be a string.");
|
||||
}, 'deviceId is reported by getSettings() for getUserMedia() audio tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createAudioStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.groupId), "string",
|
||||
"groupId should exist and it should be a string.");
|
||||
}, 'groupId is reported by getSettings() for getUserMedia() audio tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createAudioStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.volume), "number",
|
||||
"volume should exist and it should be a number.");
|
||||
assert_between_inclusive(settings.volume, 0.0, 1.0);
|
||||
}, 'volume is reported by getSettings() for getUserMedia() audio tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createAudioStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.sampleRate), "number",
|
||||
"sampleRate should exist and it should be a number.");
|
||||
assert_greater_than(settings.sampleRate, 0);
|
||||
}, 'sampleRate is reported by getSettings() for getUserMedia() audio tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createAudioStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.sampleSize), "number",
|
||||
"sampleSize should exist and it should be a number.");
|
||||
assert_greater_than(settings.sampleSize, 0);
|
||||
}, 'sampleSize is reported by getSettings() for getUserMedia() audio tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createAudioStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.echoCancellation), "boolean",
|
||||
"echoCancellation should exist and it should be a boolean.");
|
||||
}, 'echoCancellation is reported by getSettings() for getUserMedia() audio tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createAudioStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.autoGainControl), "boolean",
|
||||
"autoGainControl should exist and it should be a boolean.");
|
||||
}, 'autoGainControl is reported by getSettings() for getUserMedia() audio tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createAudioStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.noiseSuppression), "boolean",
|
||||
"noiseSuppression should exist and it should be a boolean.");
|
||||
}, 'noiseSuppression is reported by getSettings() for getUserMedia() audio tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createAudioStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.latency), "number",
|
||||
"latency should exist and it should be a number.");
|
||||
assert_greater_than_equal(settings.latency,0);
|
||||
}, 'latency is reported by getSettings() for getUserMedia() audio tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createAudioStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.channelCount), "number",
|
||||
"channelCount should exist and it should be a number.");
|
||||
assert_greater_than(settings.channelCount, 0);
|
||||
}, 'channelCount is reported by getSettings() for getUserMedia() audio tracks');
|
||||
|
||||
async function createVideoStreamAndGetSettings(t) {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({video: true});
|
||||
t.add_cleanup(() => stream.getVideoTracks()[0].stop());
|
||||
return stream.getVideoTracks()[0].getSettings();
|
||||
}
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createVideoStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.deviceId), "string",
|
||||
"deviceId should exist and it should be a string.");
|
||||
}, 'deviceId is reported by getSettings() for getUserMedia() video tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createVideoStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.groupId), "string",
|
||||
"groupId should exist and it should be a string.");
|
||||
}, 'groupId is reported by getSettings() for getUserMedia() video tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createVideoStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.width), "number",
|
||||
"width should exist and it should be a number.");
|
||||
assert_true(Number.isInteger(settings.width), "width should be an integer.");
|
||||
assert_greater_than_equal(settings.width, 0);;
|
||||
}, 'width is reported by getSettings() for getUserMedia() video tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createVideoStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.height), "number",
|
||||
"height should exist and it should be a number.");
|
||||
assert_true(Number.isInteger(settings.height), "height should be an integer.");
|
||||
assert_greater_than_equal(settings.height, 0);
|
||||
}, 'height is reported by getSettings() for getUserMedia() video tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createVideoStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.aspectRatio), "number",
|
||||
"aspectRatio should exist and it should be a number.");
|
||||
assert_greater_than_equal(settings.aspectRatio, 0);
|
||||
}, 'aspectRatio is reported by getSettings() for getUserMedia() video tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createVideoStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.frameRate), "number",
|
||||
"frameRate should exist and it should be a number.");
|
||||
assert_greater_than_equal(settings.frameRate, 0);
|
||||
}, 'frameRate is reported by getSettings() for getUserMedia() video tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createVideoStreamAndGetSettings(t);
|
||||
// facingMode not treated as mandatory because not all platforms provide
|
||||
// this information.
|
||||
if (settings.facingMode) {
|
||||
assert_equals(typeof(settings.facingMode), "string",
|
||||
"If facingMode is provided it should be a string.");
|
||||
assert_in_array(settings.facingMode,
|
||||
['user', 'environment', 'left', 'right']);
|
||||
}
|
||||
}, 'facingMode is reported by getSettings() for getUserMedia() video tracks');
|
||||
|
||||
promise_test(async t => {
|
||||
const settings = await createVideoStreamAndGetSettings(t);
|
||||
assert_equals(typeof(settings.resizeMode), "string",
|
||||
"resizeMode should exist and it should be a string.");
|
||||
assert_in_array(settings.resizeMode, ['none', 'crop-and-scale']);
|
||||
}, 'resizeMode is reported by getSettings() for getUserMedia() video tracks');
|
||||
</script>
|
||||
|
|
|
@ -33,29 +33,6 @@ self.addEventListener('paymentrequest', event => {
|
|||
return;
|
||||
}
|
||||
|
||||
const supportedTypes = methodData.data.supportedTypes;
|
||||
if (!supportedTypes) {
|
||||
const msg = 'Expected supported types in payment method specific data';
|
||||
event.respondWith(Promise.reject(new Error(msg)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (supportedTypes.length !== 1) {
|
||||
const msg = `Expected one supported type, but got ${
|
||||
supportedTypes.length
|
||||
} instead`;
|
||||
event.respondWith(Promise.reject(new Error(msg)));
|
||||
return;
|
||||
}
|
||||
|
||||
const supportedType = supportedTypes[0];
|
||||
const expectedSupportedType = 'prepaid';
|
||||
if (supportedType !== expectedSupportedType) {
|
||||
const msg = `Expected supported type "${expectedSupportedType}", but got "${supportedType}"`;
|
||||
event.respondWith(Promise.reject(new Error(msg)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (methodData.displayItems) {
|
||||
const msg = 'Expected no display items';
|
||||
event.respondWith(Promise.reject(new Error(msg)));
|
||||
|
|
|
@ -264,14 +264,12 @@ promise_test(async t => {
|
|||
promise_test(async t => {
|
||||
const methodName = 'basic-card';
|
||||
await registerApp(methodName);
|
||||
const cardType = 'prepaid';
|
||||
const cardNetwork = 'mir';
|
||||
const request = new PaymentRequest(
|
||||
[
|
||||
{
|
||||
supportedMethods: methodName,
|
||||
data: {
|
||||
supportedTypes: [cardType],
|
||||
supportedNetworks: [cardNetwork],
|
||||
},
|
||||
},
|
||||
|
@ -305,14 +303,12 @@ promise_test(async t => {
|
|||
|
||||
promise_test(async t => {
|
||||
const methodName = 'basic-card';
|
||||
const cardType = 'prepaid';
|
||||
const cardNetwork = 'mir';
|
||||
const registration = await registerApp(methodName);
|
||||
await registration.paymentManager.instruments.set(instrumentKey, {
|
||||
name: 'Test Payment Method',
|
||||
method: methodName,
|
||||
capabilities: {
|
||||
supportedTypes: [cardType],
|
||||
supportedNetworks: [cardNetwork],
|
||||
},
|
||||
});
|
||||
|
@ -321,7 +317,6 @@ promise_test(async t => {
|
|||
{
|
||||
supportedMethods: methodName,
|
||||
data: {
|
||||
supportedTypes: [cardType],
|
||||
supportedNetworks: [cardNetwork],
|
||||
},
|
||||
},
|
||||
|
@ -355,14 +350,12 @@ promise_test(async t => {
|
|||
|
||||
promise_test(async t => {
|
||||
const methodName = 'basic-card';
|
||||
const cardType = 'prepaid';
|
||||
const cardNetwork = 'mir';
|
||||
const registration = await registerApp(methodName);
|
||||
await registration.paymentManager.instruments.set(instrumentKey, {
|
||||
name: 'Test Payment Method',
|
||||
method: methodName,
|
||||
capabilities: {
|
||||
supportedTypes: [cardType],
|
||||
supportedNetworks: [cardNetwork],
|
||||
},
|
||||
});
|
||||
|
|
|
@ -77,7 +77,7 @@ function runTests(registration) {
|
|||
},
|
||||
],
|
||||
method: 'basic-card',
|
||||
capabilities: {supportedNetworks: ['mir'], supportedTypes: ['prepaid']},
|
||||
capabilities: {supportedNetworks: ['mir']},
|
||||
},
|
||||
);
|
||||
const result = await registration.paymentManager.instruments.get(
|
||||
|
@ -104,7 +104,7 @@ function runTests(registration) {
|
|||
{src: '/images/green-16x16.png', sizes: '16x16', type: 'image/png'},
|
||||
],
|
||||
method: 'basic-card',
|
||||
capabilities: {supportedNetworks: ['mir'], supportedTypes: ['prepaid']},
|
||||
capabilities: {supportedNetworks: ['mir']},
|
||||
},
|
||||
);
|
||||
let result = await registration.paymentManager.instruments.get(
|
||||
|
@ -120,7 +120,6 @@ function runTests(registration) {
|
|||
assert_equals(result.icons[0].type, 'image/png');
|
||||
assert_equals(result.method, 'basic-card');
|
||||
assert_array_equals(result.capabilities.supportedNetworks, ['mir']);
|
||||
assert_array_equals(result.capabilities.supportedTypes, ['prepaid']);
|
||||
await registration.paymentManager.instruments.set(
|
||||
'existing-instrument-key',
|
||||
{
|
||||
|
@ -133,7 +132,7 @@ function runTests(registration) {
|
|||
},
|
||||
],
|
||||
method: 'basic-card',
|
||||
capabilities: {supportedNetworks: ['visa'], supportedTypes: ['credit']},
|
||||
capabilities: {supportedNetworks: ['visa']},
|
||||
},
|
||||
);
|
||||
result = await registration.paymentManager.instruments.get(
|
||||
|
@ -149,7 +148,6 @@ function runTests(registration) {
|
|||
assert_equals(result.icons[0].type, 'image/png');
|
||||
assert_equals(result.method, 'basic-card');
|
||||
assert_array_equals(result.capabilities.supportedNetworks, ['visa']);
|
||||
assert_array_equals(result.capabilities.supportedTypes, ['credit']);
|
||||
}, 'Resetting an existing instrument updates the instrument');
|
||||
|
||||
promise_test(async t => {
|
||||
|
@ -166,7 +164,7 @@ function runTests(registration) {
|
|||
},
|
||||
],
|
||||
method: 'basic-card',
|
||||
capabilities: {supportedNetworks: ['mir'], supportedTypes: ['prepaid']},
|
||||
capabilities: {supportedNetworks: ['mir']},
|
||||
},
|
||||
);
|
||||
await registration.paymentManager.instruments.clear();
|
||||
|
|
|
@ -16,7 +16,7 @@ async function setInstrumentsAndRunTests(registration) {
|
|||
{src: '/images/rgrg-256x256.png', sizes: '256x256', type: 'image/png'},
|
||||
],
|
||||
method: 'basic-card',
|
||||
capabilities: {supportedNetworks: ['mir'], supportedTypes: ['prepaid']},
|
||||
capabilities: {supportedNetworks: ['mir']},
|
||||
});
|
||||
runTests();
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ function runTests() {
|
|||
promise_test(async t => {
|
||||
const response = await new PaymentRequest(
|
||||
[
|
||||
{supportedMethods: 'basic-card', data: {supportedTypes: ['prepaid']}},
|
||||
{supportedMethods: 'basic-card', data: {}},
|
||||
{supportedMethods: 'interledger', data: {supportedNetworks: ['mir']}},
|
||||
],
|
||||
{
|
||||
|
@ -60,7 +60,7 @@ function runTests() {
|
|||
},
|
||||
{
|
||||
supportedMethods: 'interledger',
|
||||
data: {supportedTypes: ['prepaid']},
|
||||
data: {},
|
||||
total: {
|
||||
label: 'Prepaid total',
|
||||
amount: {currency: 'USD', value: '0.0097'},
|
||||
|
|
|
@ -77,7 +77,7 @@ function runPromiseTest(button, data, expectedCard = visaCredit, expectedAddress
|
|||
The test expects the following credit card.
|
||||
</p>
|
||||
<ol>
|
||||
<li>Add credit card:
|
||||
<li>Add card:
|
||||
<dl>
|
||||
<dt>Cardholder name:</dt>
|
||||
<dd>web platform test</dd>
|
||||
|
@ -112,22 +112,12 @@ function runPromiseTest(button, data, expectedCard = visaCredit, expectedAddress
|
|||
<ol>
|
||||
<li>
|
||||
<button onclick="runPromiseTest(this, {});">
|
||||
When passed BasicCardRequest without members, allow the user to input of any credit card type.
|
||||
</button>
|
||||
</li>
|
||||
<li>
|
||||
<button onclick="runPromiseTest(this, { supportedNetworks: [], supportedTypes: [] });">
|
||||
Returns any card type on any network, because zero length supportedNetworks and supportedTypes.
|
||||
When passed BasicCardRequest without members, allow the user to input a card on any network.
|
||||
</button>
|
||||
</li>
|
||||
<li>
|
||||
<button onclick="runPromiseTest(this, { supportedNetworks: [] });">
|
||||
Returns any card type on any network, because supportedNetworks is missing and supportedTypes is empty.
|
||||
</button>
|
||||
</li>
|
||||
<li>
|
||||
<button onclick="runPromiseTest(this, { supportedTypes: [] });">
|
||||
Returns any card type on any network missing supportedTypes, and empty supportedNetwork.
|
||||
Returns a card on any network, because zero length supportedNetworks.
|
||||
</button>
|
||||
</li>
|
||||
</ol>
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>Historical Basic Card Changes</title>
|
||||
<script src=/resources/testharness.js></script>
|
||||
<script src=/resources/testharnessreport.js></script>
|
||||
<script>
|
||||
// https://github.com/w3c/payment-method-basic-card/pull/62
|
||||
test(() => {
|
||||
try {
|
||||
new PaymentRequest(
|
||||
[
|
||||
{
|
||||
supportedMethods: "basic-card",
|
||||
supportedTypes: [
|
||||
"this was an enum value once - so this would have thrown",
|
||||
],
|
||||
},
|
||||
],
|
||||
{ total: { label: "bar", amount: { currency: "BAZ", value: "0" } } }
|
||||
);
|
||||
} catch (err) {
|
||||
assert_unreached("Unexpected error");
|
||||
}
|
||||
}, "supportedTypes and BasicCardType enum were removed from the spec");
|
||||
</script>
|
|
@ -18,7 +18,7 @@
|
|||
}, 'Call PresentationRequest constructor with an empty sequence. NotSupportedError Exception expected.');
|
||||
|
||||
assert_throws('SyntaxError', () => {
|
||||
new PresentationRequest('http://@');
|
||||
new PresentationRequest('https://@');
|
||||
}, 'Call PresentationRequest constructor with an invalid URL. SyntaxError Exception expected.');
|
||||
|
||||
assert_throws('NotSupportedError', () => {
|
||||
|
@ -26,7 +26,7 @@
|
|||
}, 'Call PresentationRequest constructor with an unsupported URL. NotSupportedError expected.');
|
||||
|
||||
assert_throws('SyntaxError', function() {
|
||||
new PresentationRequest(['presentation.html', 'http://@']);
|
||||
new PresentationRequest(['presentation.html', 'https://@']);
|
||||
}, 'Call PresentationRequest constructor with a sequence of URLs, one of them invalid. SyntaxError Exception expected.');
|
||||
|
||||
assert_throws('NotSupportedError', function() {
|
||||
|
|
|
@ -148,10 +148,10 @@
|
|||
A.prototype = {b:"b"}
|
||||
var a = new A();
|
||||
assert_own_property(a, "a");
|
||||
assert_false(a.hasOwnProperty("b"), "unexpected property found: \"b\"");
|
||||
assert_not_own_property(a, "b", "unexpected property found: \"b\"");
|
||||
assert_inherits(a, "b");
|
||||
}
|
||||
test(testAssertInherits, "test for assert[_not]_exists and insert_inherits")
|
||||
test(testAssertInherits, "test for assert[_not]_own_property and insert_inherits")
|
||||
|
||||
test(function()
|
||||
{
|
||||
|
@ -398,7 +398,7 @@
|
|||
},
|
||||
{
|
||||
"status_string": "PASS",
|
||||
"name": "test for assert[_not]_exists and insert_inherits",
|
||||
"name": "test for assert[_not]_own_property and insert_inherits",
|
||||
"message": null,
|
||||
"properties": {}
|
||||
},
|
||||
|
|
|
@ -1262,6 +1262,13 @@ policies and contribution forms [3].
|
|||
}
|
||||
expose(assert_own_property, "assert_own_property");
|
||||
|
||||
function assert_not_own_property(object, property_name, description) {
|
||||
assert(!object.hasOwnProperty(property_name),
|
||||
"assert_not_own_property", description,
|
||||
"unexpected property ${p} is found on object", {p:property_name});
|
||||
}
|
||||
expose(assert_not_own_property, "assert_not_own_property");
|
||||
|
||||
function _assert_inherits(name) {
|
||||
return function (object, property_name, description)
|
||||
{
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
<!DOCTYPE html>
|
||||
<meta charset="utf-8">
|
||||
<title>Tests for importScripts: MIME types</title>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/test-helpers.sub.js"></script>
|
||||
<body>
|
||||
<script>
|
||||
/**
|
||||
* Test that a Service Worker's importScript() only accepts valid MIME types.
|
||||
*/
|
||||
let serviceWorker = null;
|
||||
|
||||
promise_test(async t => {
|
||||
const scope = 'resources/import-scripts-mime-types';
|
||||
const registration = await service_worker_unregister_and_register(t,
|
||||
'resources/import-scripts-mime-types-worker.js', scope);
|
||||
|
||||
add_completion_callback(() => { registration.unregister(); });
|
||||
|
||||
await wait_for_state(t, registration.installing, 'activated');
|
||||
|
||||
serviceWorker = registration.active;
|
||||
}, 'Global setup');
|
||||
|
||||
promise_test(async t => {
|
||||
await fetch_tests_from_worker(serviceWorker);
|
||||
}, 'Fetch importScripts tests from service worker')
|
||||
</script>
|
||||
</body>
|
|
@ -0,0 +1,49 @@
|
|||
const badMimeTypes = [
|
||||
null, // no MIME type
|
||||
'text/plain',
|
||||
];
|
||||
|
||||
const validMimeTypes = [
|
||||
'application/ecmascript',
|
||||
'application/javascript',
|
||||
'application/x-ecmascript',
|
||||
'application/x-javascript',
|
||||
'text/ecmascript',
|
||||
'text/javascript',
|
||||
'text/javascript1.0',
|
||||
'text/javascript1.1',
|
||||
'text/javascript1.2',
|
||||
'text/javascript1.3',
|
||||
'text/javascript1.4',
|
||||
'text/javascript1.5',
|
||||
'text/jscript',
|
||||
'text/livescript',
|
||||
'text/x-ecmascript',
|
||||
'text/x-javascript',
|
||||
];
|
||||
|
||||
function importScriptsWithMimeType(mimeType) {
|
||||
importScripts(`./mime-type-worker.py${mimeType ? '?mime=' + mimeType : ''}`);
|
||||
}
|
||||
|
||||
importScripts('/resources/testharness.js');
|
||||
|
||||
for (const mimeType of badMimeTypes) {
|
||||
test(() => {
|
||||
assert_throws(
|
||||
'NetworkError',
|
||||
() => { importScriptsWithMimeType(mimeType); },
|
||||
`importScripts with ${mimeType ? 'bad' : 'no'} MIME type ${mimeType || ''} throws NetworkError`,
|
||||
);
|
||||
}, `Importing script with ${mimeType ? 'bad' : 'no'} MIME type ${mimeType || ''}`);
|
||||
}
|
||||
|
||||
for (const mimeType of validMimeTypes) {
|
||||
test(() => {
|
||||
try {
|
||||
importScriptsWithMimeType(mimeType);
|
||||
} catch {
|
||||
assert_unreached(`importScripts with MIME type ${mimeType} should not throw`);
|
||||
}
|
||||
}, `Importing script with valid JavaScript MIME type ${mimeType}`);
|
||||
}
|
|
@ -21,11 +21,21 @@ function registration_tests_mime_types(register_method, check_error_types) {
|
|||
'Registration of plain text script should fail.');
|
||||
}, 'Registering script with bad MIME type');
|
||||
|
||||
/**
|
||||
* ServiceWorkerContainer.register() should throw a TypeError, according to
|
||||
* step 17.1 of https://w3c.github.io/ServiceWorker/#importscripts
|
||||
*
|
||||
* "[17] If an uncaught runtime script error occurs during the above step, then:
|
||||
* [17.1] Invoke Reject Job Promise with job and TypeError"
|
||||
*
|
||||
* (Where the "uncaught runtime script error" is thrown by an unsuccessful
|
||||
* importScripts())
|
||||
*/
|
||||
promise_test(function(t) {
|
||||
var script = 'resources/import-mime-type-worker.py';
|
||||
var scope = 'resources/scope/no-mime-type-worker/';
|
||||
return promise_rejects(t,
|
||||
check_error_types ? 'SecurityError' : null,
|
||||
check_error_types ? new TypeError() : null,
|
||||
register_method(script, {scope: scope}),
|
||||
'Registration of no MIME type imported script should fail.');
|
||||
}, 'Registering script that imports script with no MIME type');
|
||||
|
@ -34,7 +44,7 @@ function registration_tests_mime_types(register_method, check_error_types) {
|
|||
var script = 'resources/import-mime-type-worker.py?mime=text/plain';
|
||||
var scope = 'resources/scope/bad-mime-type-worker/';
|
||||
return promise_rejects(t,
|
||||
check_error_types ? 'SecurityError' : null,
|
||||
check_error_types ? new TypeError() : null,
|
||||
register_method(script, {scope: scope}),
|
||||
'Registration of plain text imported script should fail.');
|
||||
}, 'Registering script that imports script with bad MIME type');
|
||||
|
|
|
@ -15,7 +15,7 @@ regenerate these files by running `generate-test-sxgs.sh` in the
|
|||
`resource` directory before running the tests.
|
||||
|
||||
`generate-test-sxgs.sh` requires command-line tools in the
|
||||
[webpackage repository][https://github.com/WICG/webpackage].
|
||||
[webpackage repository](https://github.com/WICG/webpackage).
|
||||
To install them, run:
|
||||
```
|
||||
go get -u github.com/WICG/webpackage/go/signedexchange/cmd/...
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
spec: https://w3c.github.io/speech-api/
|
||||
suggested_reviewers:
|
||||
- foolip
|
||||
- gshires
|
||||
|
|
|
@ -1,15 +1,13 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
RELEVANT_JOBS=$(./wpt test-jobs)
|
||||
RELEVANT_CHANGES=$(echo "$RELEVANT_JOBS" | grep $JOB || true)
|
||||
if [[ -z ${RUN_JOB+x} && ! -z $RELEVANT_CHANGES ]] || [[ $RUN_JOB -eq 1 ]]; then
|
||||
if [[ $RUN_JOB -eq 1 ]] || ./wpt test-jobs --includes $JOB; then
|
||||
export RUN_JOB=1
|
||||
git submodule update --init --recursive 1>&2
|
||||
export DISPLAY=:99.0
|
||||
sh -e /etc/init.d/xvfb start 1>&2
|
||||
# For uploading the manifest
|
||||
export WPT_MANIFEST_FILE=$HOME/meta/MANIFEST-$(git rev-parse HEAD).json
|
||||
elif [[ -z ${RUN_JOB+x} ]]; then
|
||||
else
|
||||
export RUN_JOB=0
|
||||
fi
|
||||
|
|
|
@ -18,10 +18,7 @@ run_applicable_tox () {
|
|||
export TOXENV="$OLD_TOXENV"
|
||||
}
|
||||
|
||||
RELEVANT_JOBS=$(./wpt test-jobs)
|
||||
|
||||
RELEVANT_CHANGES_TOOLS=$(echo "$RELEVANT_JOBS" | grep "tools_unittest" || true)
|
||||
if [[ ! -z $RELEVANT_CHANGES_TOOLS ]]; then
|
||||
if ./wpt test-jobs --includes tools_unittest; then
|
||||
pip install -U tox codecov
|
||||
cd tools
|
||||
run_applicable_tox
|
||||
|
@ -30,12 +27,10 @@ else
|
|||
echo "Skipping tools unittest"
|
||||
fi
|
||||
|
||||
RELEVANT_CHANGES_WPTRUNNER=$(echo "$RELEVANT_JOBS" | grep "wptrunner_unittest" || true)
|
||||
if [[ ! -z $RELEVANT_CHANGES_WPTRUNNER ]]; then
|
||||
if ./wpt test-jobs --includes wptrunner_unittest; then
|
||||
cd tools/wptrunner
|
||||
run_applicable_tox
|
||||
cd $WPT_ROOT
|
||||
else
|
||||
echo "Skipping wptrunner unittest"
|
||||
fi
|
||||
|
||||
|
|
|
@ -90,6 +90,9 @@ def get_paths(**kwargs):
|
|||
|
||||
|
||||
def get_jobs(paths, **kwargs):
|
||||
if kwargs.get("all"):
|
||||
return set(job_path_map.keys())
|
||||
|
||||
jobs = set()
|
||||
|
||||
rules = {}
|
||||
|
@ -121,6 +124,7 @@ def get_jobs(paths, **kwargs):
|
|||
def create_parser():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("revish", default=None, help="Commits to consider. Defaults to the commits on the current branch", nargs="?")
|
||||
parser.add_argument("--all", help="List all jobs unconditionally.", action="store_true")
|
||||
parser.add_argument("--includes", default=None, help="Jobs to check for. Return code is 0 if all jobs are found, otherwise 1", nargs="*")
|
||||
return parser
|
||||
|
||||
|
|
|
@ -1,8 +1,29 @@
|
|||
from tools.ci import jobs
|
||||
|
||||
all_jobs = set([
|
||||
"build_css",
|
||||
"lint",
|
||||
"manifest_upload",
|
||||
"resources_unittest",
|
||||
"stability",
|
||||
"tools_unittest",
|
||||
"update_built",
|
||||
"wpt_integration",
|
||||
"wptrunner_infrastructure",
|
||||
"wptrunner_unittest",
|
||||
])
|
||||
|
||||
default_jobs = set(["lint", "manifest_upload"])
|
||||
|
||||
|
||||
def test_all():
|
||||
assert jobs.get_jobs(["README.md"], all=True) == all_jobs
|
||||
|
||||
|
||||
def test_default():
|
||||
assert jobs.get_jobs(["README.md"]) == default_jobs
|
||||
|
||||
|
||||
def test_testharness():
|
||||
assert jobs.get_jobs(["resources/testharness.js"]) == default_jobs | set(["resources_unittest"])
|
||||
assert jobs.get_jobs(["resources/testharness.js"],
|
||||
|
@ -39,10 +60,6 @@ def test_stability():
|
|||
includes=["stability"]) == set(["stability"])
|
||||
|
||||
|
||||
def test_default():
|
||||
assert jobs.get_jobs(["README.md"]) == default_jobs
|
||||
|
||||
|
||||
def test_tools_unittest():
|
||||
assert jobs.get_jobs(["tools/ci/test/test_jobs.py"],
|
||||
includes=["tools_unittest"]) == set(["tools_unittest"])
|
||||
|
@ -83,6 +100,7 @@ def test_wpt_integration():
|
|||
assert jobs.get_jobs(["tools/wptrunner/wptrunner/wptrunner.py"],
|
||||
includes=["wpt_integration"]) == set(["wpt_integration"])
|
||||
|
||||
|
||||
def test_wpt_infrastructure():
|
||||
assert jobs.get_jobs(["tools/hammer.html"],
|
||||
includes=["wptrunner_infrastructure"]) == set(["wptrunner_infrastructure"])
|
||||
|
|
|
@ -1,26 +1,32 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang=en>
|
||||
<meta charset=UTF-8>
|
||||
<title>Web tests</title>
|
||||
<title>web-platform-tests Runner</title>
|
||||
<link rel='stylesheet' href='css/bootstrap.min.css'>
|
||||
<link rel='stylesheet' href='css/bootstrap-theme.min.css'>
|
||||
<link rel='stylesheet' href='runner.css'>
|
||||
<script src='/common/get-host-info.sub.js'></script>
|
||||
<script src='runner.js'></script>
|
||||
|
||||
|
||||
<header class="navbar navbar-inverse navbar-fixed-top">
|
||||
<div class="container">
|
||||
<div class="navbar-header">
|
||||
<a class="navbar-brand" href="#">
|
||||
<img src='logo.svg' width='50' height='50' alt='Logo for the WPT Runner'>
|
||||
Web Platform Tests Runner
|
||||
<img src='/images/wpt-logo/wpt-logo-lightblue-bg.svg' width='50' height='50' style='border-radius: 50%' alt='WPT Logo'>
|
||||
web-platform-tests Runner
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<div class="container">
|
||||
<div class="alert alert-warning">
|
||||
This runner does not fully support all test types and has a number of
|
||||
<a href="https://github.com/web-platform-tests/wpt/labels/runner">known issues</a>.
|
||||
<code>./wpt run</code> is a more well-supported runner, see the documentation on
|
||||
<a href="https://web-platform-tests.org/running-tests/">running tests</a>.
|
||||
</div>
|
||||
|
||||
<div id="testControl" class="panel panel-default">
|
||||
<div class="panel-body">
|
||||
<form id='options' class='horizontal-form' onsubmit='return false;'>
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
<svg width="453px" height="453px" viewBox="0 0 453 453" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle stroke="#ed5565" stroke-width="20" fill="#fff" cx="226.5" cy="226.5" r="213.5"></circle>
|
||||
<g transform="translate(160 250)">
|
||||
<g transform="matrix(2.25 0 0 -2.25 0 0)">
|
||||
<path d="m0 0c0.252-0.333 0.508-0.659 0.766-0.974-1.269-3.764-2.368-7.749-3.447-11.383-0.166-0.053-0.329-0.116-0.486-0.194-8.572 8.211-2.056 11.364 3.167 12.551m59.809 45.57c2.899 20.118-10.084 38.635-29 41.362-18.915 2.726-36.601-11.371-39.5-31.489-2.418-16.774 6.212-32.427 20.112-38.75l-0.05 0.014-0.323-2.242c0.219-0.049 0.427-0.09 0.64-0.134-0.122-0.027-0.245-0.053-0.365-0.086l-0.004 0.013s-0.042-0.017-0.106-0.042c-0.465-0.137-0.915-0.319-1.349-0.544-3.502-1.424-12.285-5.095-14.287-6.867 0 0 0.927-1.665 2.254-3.704-17.335-2.421-6.522-15.63-3.176-19.26-0.015-0.296 0.004-0.592 0.039-0.888-2.562-1.677-5.772-4.195-7.772-7.29 0 0 0.552-0.182 1.458-0.536v-0.001l0.016-0.005c1.182-0.463 2.96-1.219 4.914-2.248-3.004-13.557-7.542-9.677-10.589-4.683 0.745 2.232 0.741 4.23-0.202 5.393-0.66 0.811-1.681 1.101-2.889 0.931-0.087 0.222-0.135 0.349-0.135 0.349l-0.108-0.397c-0.057-0.01-0.115-0.021-0.173-0.034 0.005 0.003 0.008 0.005 0.013 0.008 0 0-0.032-0.008-0.078-0.021-0.508-0.116-1.042-0.306-1.593-0.566-2.759-1.158-8.023-4.248-8.639-11.088-0.208-1.056-0.212-2.015 0.002-2.812 0.001-0.014 0.001-0.026 0.003-0.04 0 0 0.527-4.561 4.288-1.894l-0.002 0.003c1.861 0.536 4.034 2.003 5.989 4.226 0.664 0.755 1.256 1.545 1.768 2.343 8.537-16.768 14.974 3.409 15.81 6.23 2.309-1.538 4.528-3.425 6.019-5.64 0 0 1.182 3.458 3.942 6.312 5.984-8.956 13.374-10.465 13.374-10.465l3.802 6.152c11.328-5.569 7.382-10.385 2.713-13.253-1.757 1.198-3.428 1.485-4.537 0.55-0.775-0.653-1.166-1.807-1.2-3.246-0.199-0.069-0.312-0.106-0.312-0.106l0.316-0.185c0.001-0.069 0.002-0.139 0.004-0.21-0.002 0.006-0.004 0.012-0.006 0.019 0 0 0.003-0.044 0.007-0.112 0.024-0.604 0.104-1.247 0.239-1.92 0.564-3.395 2.378-10.019 8.013-11.741 0.851-0.396 1.652-0.542 2.349-0.407 0.012 0 0.023-0.003 0.035-0.003 0 0 3.891-0.048 2.21 4.745l-0.004-0.004c-0.176 2.258-1.086 5.015-2.659 7.628-0.535 0.888-1.109 1.695-1.701 2.413 16.374 8.095-3.15 19.567-3.156 19.57l2.062 3.336-4.584 1.028c-0.516 0.116-1.446 0.458-2.639 0.949 0.61-0.116 1.218-0.225 1.821-0.322 0.221 0.615 0.432 1.249 0.631 1.918 1.715 5.766 2.34 12.577 1.803 18.76l1.544-3.601s0.655 0.404 1.612 0.827l-0.088-0.167c3.832-26.906 14.631-10.666 17.407-5.924 1.445 0.125 2.819 1.27 3.448 3.074 0.864 2.475 0.002 5.242-1.926 6.183-1.927 0.942-4.188-0.301-5.05-2.774-0.533-1.524-0.406-3.158 0.218-4.41-6.67-13.044-10.36-1.016-11.647 4.81 0.669 0.143 1.355 0.21 1.998 0.135 0 0-4.185 11.234-11.743 15.618-0.097 0.136-0.192 0.275-0.291 0.405-0.056-0.017-0.116-0.029-0.174-0.044l0.345 2.832c0.567 0.046 0.871 0.099 0.871 0.099l0.021 0.146-0.65 0.181c18.572-2.158 35.744 11.797 38.597 31.593" fill="#ed5565"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 2.9 KiB |
|
@ -1,2 +1,2 @@
|
|||
mozprocess == 0.26
|
||||
selenium==3.14.1
|
||||
selenium==3.141.0
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
mozprocess == 0.26
|
||||
selenium==3.14.1
|
||||
selenium==3.141.0
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
mozprocess == 0.26
|
||||
selenium==3.14.1
|
||||
selenium==3.141.0
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
mozprocess == 0.26
|
||||
selenium==3.14.1
|
||||
selenium==3.141.0
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
mozprocess == 0.26
|
||||
selenium==3.14.1
|
||||
selenium==3.141.0
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
mozprocess == 0.26
|
||||
selenium==3.14.1
|
||||
selenium==3.141.0
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
mozprocess == 0.26
|
||||
selenium==3.14.1
|
||||
selenium==3.141.0
|
||||
requests==2.20.0
|
||||
|
|
|
@ -266,7 +266,7 @@ class SeleniumRun(object):
|
|||
if message:
|
||||
message += "\n"
|
||||
message += traceback.format_exc(e)
|
||||
self.result = False, ("INTERNAL-ERROR", e)
|
||||
self.result = False, ("INTERNAL-ERROR", message)
|
||||
finally:
|
||||
self.result_flag.set()
|
||||
|
||||
|
|
|
@ -281,9 +281,9 @@ def run_tests(config, test_paths, product, **kwargs):
|
|||
test_total += test_count
|
||||
unexpected_total += unexpected_count
|
||||
logger.info("Got %i unexpected results" % unexpected_count)
|
||||
logger.suite_end()
|
||||
if repeat_until_unexpected and unexpected_total > 0:
|
||||
break
|
||||
logger.suite_end()
|
||||
|
||||
if test_total == 0:
|
||||
if skipped_tests > 0:
|
||||
|
|
|
@ -0,0 +1,121 @@
|
|||
# META: timeout=long
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.support.asserts import (
|
||||
assert_error,
|
||||
assert_same_element,
|
||||
assert_success,
|
||||
assert_dialog_handled,
|
||||
)
|
||||
from tests.support.inline import inline
|
||||
|
||||
|
||||
def find_element(session, using, value):
|
||||
return session.transport.send(
|
||||
"POST", "session/{session_id}/element".format(**vars(session)),
|
||||
{"using": using, "value": value})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_without_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_without_exception(dialog_type, retval):
|
||||
session.url = inline("<p>bar</p>")
|
||||
element = session.find.css("p", all=False)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_element(session, "css selector", "p")
|
||||
value = assert_success(response)
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
assert_same_element(session, value, element)
|
||||
|
||||
return check_user_prompt_closed_without_exception
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_with_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_with_exception(dialog_type, retval):
|
||||
session.url = inline("<p>bar</p>")
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_element(session, "css selector", "p")
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
return check_user_prompt_closed_with_exception
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_not_closed_but_exception(session, create_dialog):
|
||||
def check_user_prompt_not_closed_but_exception(dialog_type):
|
||||
session.url = inline("<p>bar</p>")
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_element(session, "css selector", "p")
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert session.alert.text == dialog_type
|
||||
session.alert.dismiss()
|
||||
|
||||
return check_user_prompt_not_closed_but_exception
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", True),
|
||||
("prompt", ""),
|
||||
])
|
||||
def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_without_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", True),
|
||||
("prompt", ""),
|
||||
])
|
||||
def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_without_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
|
||||
@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
|
||||
def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
|
||||
check_user_prompt_not_closed_but_exception(dialog_type)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
|
@ -0,0 +1,126 @@
|
|||
# META: timeout=long
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.support.asserts import (
|
||||
assert_error,
|
||||
assert_same_element,
|
||||
assert_success,
|
||||
assert_dialog_handled,
|
||||
)
|
||||
from tests.support.inline import inline
|
||||
|
||||
|
||||
def find_element(session, element_id, using, value):
|
||||
return session.transport.send(
|
||||
"POST", "session/{session_id}/element/{element_id}/element".format(
|
||||
session_id=session.session_id,
|
||||
element_id=element_id),
|
||||
{"using": using, "value": value})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_without_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_without_exception(dialog_type, retval):
|
||||
session.url = inline("<div><p>bar</p><div>")
|
||||
outer_element = session.find.css("div", all=False)
|
||||
inner_element = session.find.css("p", all=False)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_element(session, outer_element.id, "css selector", "p")
|
||||
value = assert_success(response)
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
assert_same_element(session, value, inner_element)
|
||||
|
||||
return check_user_prompt_closed_without_exception
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_with_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_with_exception(dialog_type, retval):
|
||||
session.url = inline("<div><p>bar</p><div>")
|
||||
outer_element = session.find.css("div", all=False)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_element(session, outer_element.id, "css selector", "p")
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
return check_user_prompt_closed_with_exception
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_not_closed_but_exception(session, create_dialog):
|
||||
def check_user_prompt_not_closed_but_exception(dialog_type):
|
||||
session.url = inline("<div><p>bar</p><div>")
|
||||
outer_element = session.find.css("div", all=False)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_element(session, outer_element.id, "css selector", "p")
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert session.alert.text == dialog_type
|
||||
session.alert.dismiss()
|
||||
|
||||
return check_user_prompt_not_closed_but_exception
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", True),
|
||||
("prompt", ""),
|
||||
])
|
||||
def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_without_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", True),
|
||||
("prompt", ""),
|
||||
])
|
||||
def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_without_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
|
||||
@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
|
||||
def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
|
||||
check_user_prompt_not_closed_but_exception(dialog_type)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
|
@ -0,0 +1,123 @@
|
|||
# META: timeout=long
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.support.asserts import (
|
||||
assert_error,
|
||||
assert_same_element,
|
||||
assert_success,
|
||||
assert_dialog_handled,
|
||||
)
|
||||
from tests.support.inline import inline
|
||||
|
||||
|
||||
def find_elements(session, using, value):
|
||||
return session.transport.send(
|
||||
"POST", "session/{session_id}/elements".format(**vars(session)),
|
||||
{"using": using, "value": value})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_without_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_without_exception(dialog_type, retval):
|
||||
session.url = inline("<p>bar</p>")
|
||||
element = session.find.css("p", all=False)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_elements(session, "css selector", "p")
|
||||
value = assert_success(response)
|
||||
assert isinstance(value, list)
|
||||
assert len(value) == 1
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
assert_same_element(session, value[0], element)
|
||||
|
||||
return check_user_prompt_closed_without_exception
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_with_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_with_exception(dialog_type, retval):
|
||||
session.url = inline("<p>bar</p>")
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_elements(session, "css selector", "p")
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
return check_user_prompt_closed_with_exception
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_not_closed_but_exception(session, create_dialog):
|
||||
def check_user_prompt_not_closed_but_exception(dialog_type):
|
||||
session.url = inline("<p>bar</p>")
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_elements(session, "css selector", "p")
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert session.alert.text == dialog_type
|
||||
session.alert.dismiss()
|
||||
|
||||
return check_user_prompt_not_closed_but_exception
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", True),
|
||||
("prompt", ""),
|
||||
])
|
||||
def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_without_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", True),
|
||||
("prompt", ""),
|
||||
])
|
||||
def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_without_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
|
||||
@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
|
||||
def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
|
||||
check_user_prompt_not_closed_but_exception(dialog_type)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
|
@ -0,0 +1,128 @@
|
|||
# META: timeout=long
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.support.asserts import (
|
||||
assert_error,
|
||||
assert_same_element,
|
||||
assert_success,
|
||||
assert_dialog_handled,
|
||||
)
|
||||
from tests.support.inline import inline
|
||||
|
||||
|
||||
def find_elements(session, element_id, using, value):
|
||||
return session.transport.send(
|
||||
"POST", "session/{session_id}/element/{element_id}/elements".format(
|
||||
session_id=session.session_id,
|
||||
element_id=element_id),
|
||||
{"using": using, "value": value})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_without_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_without_exception(dialog_type, retval):
|
||||
session.url = inline("<div><p>bar</p><div>")
|
||||
outer_element = session.find.css("div", all=False)
|
||||
inner_element = session.find.css("p", all=False)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_elements(session, outer_element.id, "css selector", "p")
|
||||
value = assert_success(response)
|
||||
assert isinstance(value, list)
|
||||
assert len(value) == 1
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
assert_same_element(session, value[0], inner_element)
|
||||
|
||||
return check_user_prompt_closed_without_exception
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_with_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_with_exception(dialog_type, retval):
|
||||
session.url = inline("<div><p>bar</p><div>")
|
||||
outer_element = session.find.css("div", all=False)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_elements(session, outer_element.id, "css selector", "p")
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
return check_user_prompt_closed_with_exception
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_not_closed_but_exception(session, create_dialog):
|
||||
def check_user_prompt_not_closed_but_exception(dialog_type):
|
||||
session.url = inline("<div><p>bar</p><div>")
|
||||
outer_element = session.find.css("div", all=False)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = find_elements(session, outer_element.id, "css selector", "p")
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert session.alert.text == dialog_type
|
||||
session.alert.dismiss()
|
||||
|
||||
return check_user_prompt_not_closed_but_exception
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", True),
|
||||
("prompt", ""),
|
||||
])
|
||||
def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_without_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", True),
|
||||
("prompt", ""),
|
||||
])
|
||||
def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_without_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
||||
|
||||
|
||||
@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
|
||||
@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
|
||||
def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
|
||||
check_user_prompt_not_closed_but_exception(dialog_type)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dialog_type, retval", [
|
||||
("alert", None),
|
||||
("confirm", False),
|
||||
("prompt", None),
|
||||
])
|
||||
def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
|
||||
check_user_prompt_closed_with_exception(dialog_type, retval)
|
|
@ -1,4 +1,5 @@
|
|||
from tests.support.asserts import assert_error, assert_success
|
||||
from tests.support.helpers import is_fullscreen
|
||||
|
||||
|
||||
def fullscreen(session):
|
||||
|
@ -6,17 +7,6 @@ def fullscreen(session):
|
|||
"POST", "session/{session_id}/window/fullscreen".format(**vars(session)))
|
||||
|
||||
|
||||
def is_fullscreen(session):
|
||||
# At the time of writing, WebKit does not conform to the
|
||||
# Fullscreen API specification.
|
||||
#
|
||||
# Remove the prefixed fallback when
|
||||
# https://bugs.webkit.org/show_bug.cgi?id=158125 is fixed.
|
||||
return session.execute_script("""
|
||||
return !!(window.fullScreen || document.webkitIsFullScreen)
|
||||
""")
|
||||
|
||||
|
||||
def test_no_browsing_context(session, closed_window):
|
||||
response = fullscreen(session)
|
||||
assert_error(response, "no such window")
|
||||
|
@ -26,7 +16,7 @@ def test_fullscreen(session):
|
|||
response = fullscreen(session)
|
||||
assert_success(response)
|
||||
|
||||
assert is_fullscreen(session) is True
|
||||
assert is_fullscreen(session)
|
||||
|
||||
|
||||
def test_payload(session):
|
||||
|
@ -47,12 +37,12 @@ def test_payload(session):
|
|||
|
||||
|
||||
def test_fullscreen_twice_is_idempotent(session):
|
||||
assert is_fullscreen(session) is False
|
||||
assert not is_fullscreen(session)
|
||||
|
||||
first_response = fullscreen(session)
|
||||
assert_success(first_response)
|
||||
assert is_fullscreen(session) is True
|
||||
assert is_fullscreen(session)
|
||||
|
||||
second_response = fullscreen(session)
|
||||
assert_success(second_response)
|
||||
assert is_fullscreen(session) is True
|
||||
assert is_fullscreen(session)
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
import pytest
|
||||
|
||||
from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
|
||||
from tests.support.helpers import is_fullscreen
|
||||
|
||||
|
||||
def fullscreen(session):
|
||||
|
@ -10,21 +11,10 @@ def fullscreen(session):
|
|||
"POST", "session/{session_id}/window/fullscreen".format(**vars(session)))
|
||||
|
||||
|
||||
def is_fullscreen(session):
|
||||
# At the time of writing, WebKit does not conform to the
|
||||
# Fullscreen API specification.
|
||||
#
|
||||
# Remove the prefixed fallback when
|
||||
# https://bugs.webkit.org/show_bug.cgi?id=158125 is fixed.
|
||||
return session.execute_script("""
|
||||
return !!(window.fullScreen || document.webkitIsFullScreen)
|
||||
""")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_without_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_without_exception(dialog_type, retval):
|
||||
assert is_fullscreen(session) is False
|
||||
assert not is_fullscreen(session)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
|
@ -32,8 +22,7 @@ def check_user_prompt_closed_without_exception(session, create_dialog):
|
|||
assert_success(response)
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
assert is_fullscreen(session) is True
|
||||
assert is_fullscreen(session)
|
||||
|
||||
return check_user_prompt_closed_without_exception
|
||||
|
||||
|
@ -41,7 +30,7 @@ def check_user_prompt_closed_without_exception(session, create_dialog):
|
|||
@pytest.fixture
|
||||
def check_user_prompt_closed_with_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_with_exception(dialog_type, retval):
|
||||
assert is_fullscreen(session) is False
|
||||
assert not is_fullscreen(session)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
|
@ -49,8 +38,7 @@ def check_user_prompt_closed_with_exception(session, create_dialog):
|
|||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
assert is_fullscreen(session) is False
|
||||
assert not is_fullscreen(session)
|
||||
|
||||
return check_user_prompt_closed_with_exception
|
||||
|
||||
|
@ -58,7 +46,7 @@ def check_user_prompt_closed_with_exception(session, create_dialog):
|
|||
@pytest.fixture
|
||||
def check_user_prompt_not_closed_but_exception(session, create_dialog):
|
||||
def check_user_prompt_not_closed_but_exception(dialog_type):
|
||||
assert is_fullscreen(session) is False
|
||||
assert not is_fullscreen(session)
|
||||
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
|
@ -68,7 +56,7 @@ def check_user_prompt_not_closed_but_exception(session, create_dialog):
|
|||
assert session.alert.text == dialog_type
|
||||
session.alert.dismiss()
|
||||
|
||||
assert is_fullscreen(session) is False
|
||||
assert not is_fullscreen(session)
|
||||
|
||||
return check_user_prompt_not_closed_but_exception
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from tests.support.asserts import assert_error, assert_success
|
||||
from tests.support.helpers import document_hidden, is_fullscreen
|
||||
|
||||
|
||||
def maximize(session):
|
||||
|
@ -6,17 +7,6 @@ def maximize(session):
|
|||
"POST", "session/{session_id}/window/maximize".format(**vars(session)))
|
||||
|
||||
|
||||
def is_fullscreen(session):
|
||||
# At the time of writing, WebKit does not conform to the
|
||||
# Fullscreen API specification.
|
||||
#
|
||||
# Remove the prefixed fallback when
|
||||
# https://bugs.webkit.org/show_bug.cgi?id=158125 is fixed.
|
||||
return session.execute_script("""
|
||||
return !!(window.fullScreen || document.webkitIsFullScreen)
|
||||
""")
|
||||
|
||||
|
||||
def test_no_browsing_context(session, closed_window):
|
||||
response = maximize(session)
|
||||
assert_error(response, "no such window")
|
||||
|
@ -24,16 +14,16 @@ def test_no_browsing_context(session, closed_window):
|
|||
|
||||
def test_fully_exit_fullscreen(session):
|
||||
session.window.fullscreen()
|
||||
assert is_fullscreen(session) is True
|
||||
assert is_fullscreen(session)
|
||||
|
||||
response = maximize(session)
|
||||
assert_success(response)
|
||||
assert is_fullscreen(session) is False
|
||||
assert not is_fullscreen(session)
|
||||
|
||||
|
||||
def test_restore_the_window(session):
|
||||
session.window.minimize()
|
||||
assert session.execute_script("return document.hidden") is True
|
||||
assert document_hidden(session)
|
||||
|
||||
response = maximize(session)
|
||||
assert_success(response)
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
# META: timeout=long
|
||||
|
||||
from tests.support.asserts import assert_error, assert_success
|
||||
from tests.support.helpers import document_hidden, is_fullscreen
|
||||
|
||||
|
||||
def minimize(session):
|
||||
|
@ -6,21 +9,6 @@ def minimize(session):
|
|||
"POST", "session/{session_id}/window/minimize".format(**vars(session)))
|
||||
|
||||
|
||||
def is_fullscreen(session):
|
||||
# At the time of writing, WebKit does not conform to the
|
||||
# Fullscreen API specification.
|
||||
#
|
||||
# Remove the prefixed fallback when
|
||||
# https://bugs.webkit.org/show_bug.cgi?id=158125 is fixed.
|
||||
return session.execute_script("""
|
||||
return !!(window.fullScreen || document.webkitIsFullScreen)
|
||||
""")
|
||||
|
||||
|
||||
def is_minimized(session):
|
||||
return session.execute_script("return document.hidden")
|
||||
|
||||
|
||||
def test_no_browsing_context(session, closed_window):
|
||||
response = minimize(session)
|
||||
assert_error(response, "no such window")
|
||||
|
@ -33,19 +21,19 @@ def test_fully_exit_fullscreen(session):
|
|||
response = minimize(session)
|
||||
assert_success(response)
|
||||
assert not is_fullscreen(session)
|
||||
assert is_minimized(session)
|
||||
assert document_hidden(session)
|
||||
|
||||
|
||||
def test_minimize(session):
|
||||
assert not is_minimized(session)
|
||||
assert not document_hidden(session)
|
||||
|
||||
response = minimize(session)
|
||||
assert_success(response)
|
||||
assert is_minimized(session)
|
||||
assert document_hidden(session)
|
||||
|
||||
|
||||
def test_payload(session):
|
||||
assert not is_minimized(session)
|
||||
assert not document_hidden(session)
|
||||
|
||||
response = minimize(session)
|
||||
value = assert_success(response)
|
||||
|
@ -61,16 +49,16 @@ def test_payload(session):
|
|||
assert isinstance(value["x"], int)
|
||||
assert isinstance(value["y"], int)
|
||||
|
||||
assert is_minimized(session)
|
||||
assert document_hidden(session)
|
||||
|
||||
|
||||
def test_minimize_twice_is_idempotent(session):
|
||||
assert not is_minimized(session)
|
||||
assert not document_hidden(session)
|
||||
|
||||
first_response = minimize(session)
|
||||
assert_success(first_response)
|
||||
assert is_minimized(session)
|
||||
assert document_hidden(session)
|
||||
|
||||
second_response = minimize(session)
|
||||
assert_success(second_response)
|
||||
assert is_minimized(session)
|
||||
assert document_hidden(session)
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
import pytest
|
||||
|
||||
from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
|
||||
from tests.support.helpers import document_hidden
|
||||
|
||||
|
||||
def minimize(session):
|
||||
|
@ -10,23 +11,17 @@ def minimize(session):
|
|||
"POST", "session/{session_id}/window/minimize".format(**vars(session)))
|
||||
|
||||
|
||||
def is_minimized(session):
|
||||
return session.execute_script("return document.hidden")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_user_prompt_closed_without_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_without_exception(dialog_type, retval):
|
||||
assert not is_minimized(session)
|
||||
|
||||
assert not document_hidden(session)
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = minimize(session)
|
||||
assert_success(response)
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
assert is_minimized(session)
|
||||
assert document_hidden(session)
|
||||
|
||||
return check_user_prompt_closed_without_exception
|
||||
|
||||
|
@ -34,16 +29,14 @@ def check_user_prompt_closed_without_exception(session, create_dialog):
|
|||
@pytest.fixture
|
||||
def check_user_prompt_closed_with_exception(session, create_dialog):
|
||||
def check_user_prompt_closed_with_exception(dialog_type, retval):
|
||||
assert not is_minimized(session)
|
||||
|
||||
assert not document_hidden(session)
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = minimize(session)
|
||||
assert_error(response, "unexpected alert open")
|
||||
|
||||
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
|
||||
|
||||
assert not is_minimized(session)
|
||||
assert not document_hidden(session)
|
||||
|
||||
return check_user_prompt_closed_with_exception
|
||||
|
||||
|
@ -51,8 +44,7 @@ def check_user_prompt_closed_with_exception(session, create_dialog):
|
|||
@pytest.fixture
|
||||
def check_user_prompt_not_closed_but_exception(session, create_dialog):
|
||||
def check_user_prompt_not_closed_but_exception(dialog_type):
|
||||
assert not is_minimized(session)
|
||||
|
||||
assert not document_hidden(session)
|
||||
create_dialog(dialog_type, text=dialog_type)
|
||||
|
||||
response = minimize(session)
|
||||
|
@ -61,7 +53,7 @@ def check_user_prompt_not_closed_but_exception(session, create_dialog):
|
|||
assert session.alert.text == dialog_type
|
||||
session.alert.dismiss()
|
||||
|
||||
assert not is_minimized(session)
|
||||
assert not document_hidden(session)
|
||||
|
||||
return check_user_prompt_not_closed_but_exception
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ import pytest
|
|||
from webdriver.transport import Response
|
||||
|
||||
from tests.support.asserts import assert_error, assert_success
|
||||
from tests.support.helpers import document_hidden, is_fullscreen
|
||||
|
||||
|
||||
def set_window_rect(session, rect):
|
||||
|
@ -13,17 +14,6 @@ def set_window_rect(session, rect):
|
|||
rect)
|
||||
|
||||
|
||||
def is_fullscreen(session):
|
||||
# At the time of writing, WebKit does not conform to the
|
||||
# Fullscreen API specification.
|
||||
#
|
||||
# Remove the prefixed fallback when
|
||||
# https://bugs.webkit.org/show_bug.cgi?id=158125 is fixed.
|
||||
return session.execute_script("""
|
||||
return !!(window.fullScreen || document.webkitIsFullScreen)
|
||||
""")
|
||||
|
||||
|
||||
def test_null_parameter_value(session, http):
|
||||
path = "/session/{session_id}/window/rect".format(**vars(session))
|
||||
with http.post(path, None) as response:
|
||||
|
@ -142,26 +132,26 @@ def test_no_change(session, rect):
|
|||
|
||||
def test_fully_exit_fullscreen(session):
|
||||
session.window.fullscreen()
|
||||
assert is_fullscreen(session) is True
|
||||
assert is_fullscreen(session)
|
||||
|
||||
response = set_window_rect(session, {"width": 400, "height": 400})
|
||||
value = assert_success(response)
|
||||
assert value["width"] == 400
|
||||
assert value["height"] == 400
|
||||
|
||||
assert is_fullscreen(session) is False
|
||||
assert not is_fullscreen(session)
|
||||
|
||||
|
||||
def test_restore_from_minimized(session):
|
||||
session.window.minimize()
|
||||
assert session.execute_script("return document.hidden") is True
|
||||
assert document_hidden(session)
|
||||
|
||||
response = set_window_rect(session, {"width": 450, "height": 450})
|
||||
value = assert_success(response)
|
||||
assert value["width"] == 450
|
||||
assert value["height"] == 450
|
||||
|
||||
assert session.execute_script("return document.hidden") is False
|
||||
assert not document_hidden(session)
|
||||
|
||||
|
||||
def test_restore_from_maximized(session):
|
||||
|
|
|
@ -5,6 +5,8 @@ import sys
|
|||
import webdriver
|
||||
|
||||
from tests.support import defaults
|
||||
from tests.support.sync import Poll
|
||||
|
||||
|
||||
def ignore_exceptions(f):
|
||||
def inner(*args, **kwargs):
|
||||
|
@ -119,6 +121,24 @@ def is_element_in_viewport(session, element):
|
|||
""", args=(element,))
|
||||
|
||||
|
||||
def document_hidden(session):
|
||||
"""Polls for the document to become hidden."""
|
||||
def hidden(session):
|
||||
return session.execute_script("return document.hidden")
|
||||
return Poll(session, timeout=3, raises=None).until(hidden)
|
||||
|
||||
|
||||
def is_fullscreen(session):
|
||||
# At the time of writing, WebKit does not conform to the
|
||||
# Fullscreen API specification.
|
||||
#
|
||||
# Remove the prefixed fallback when
|
||||
# https://bugs.webkit.org/show_bug.cgi?id=158125 is fixed.
|
||||
return session.execute_script("""
|
||||
return !!(window.fullScreen || document.webkitIsFullScreen)
|
||||
""")
|
||||
|
||||
|
||||
def document_dimensions(session):
|
||||
return tuple(session.execute_script("""
|
||||
let {devicePixelRatio} = window;
|
||||
|
|
|
@ -4,16 +4,6 @@
|
|||
<script src=/resources/testharnessreport.js></script>
|
||||
<div id="log"></div>
|
||||
<script>
|
||||
async_test((t) => {
|
||||
const client = new XMLHttpRequest()
|
||||
client.onload = t.step_func_done(() => {
|
||||
assert_equals(client.getAllResponseHeaders(), "foo-test: 1, 2, 3\r\n")
|
||||
})
|
||||
client.onerror = t.unreached_func("unexpected error")
|
||||
client.open("GET", "resources/headers-basic.asis")
|
||||
client.send(null)
|
||||
})
|
||||
|
||||
async_test((t) => {
|
||||
const client = new XMLHttpRequest()
|
||||
client.onload = t.step_func_done(() => {
|
||||
|
@ -22,22 +12,24 @@ async_test((t) => {
|
|||
client.onerror = t.unreached_func("unexpected error")
|
||||
client.open("GET", "resources/headers.asis")
|
||||
client.send(null)
|
||||
})
|
||||
});
|
||||
|
||||
test(() => {
|
||||
const client = new XMLHttpRequest
|
||||
client.open("GET", "resources/header-content-length.asis", false)
|
||||
client.send()
|
||||
assert_equals(client.getAllResponseHeaders(), "content-length: 0\r\n")
|
||||
})
|
||||
|
||||
async_test(t => {
|
||||
const client = new XMLHttpRequest();
|
||||
client.onload = t.step_func_done(() => {
|
||||
assert_equals(client.getAllResponseHeaders(), "www-authenticate: 1, 2, 3, 4\r\n");
|
||||
[
|
||||
["content-length", "0", "header-content-length"],
|
||||
["content-length", "0, 0", "header-content-length-twice"],
|
||||
["double-trouble", ", ", "headers-double-empty"],
|
||||
["foo-test", "1, 2, 3", "headers-basic"],
|
||||
["heya", ", \u000B\u000C, 1, , , 2", "headers-some-are-empty"],
|
||||
["www-authenticate", "1, 2, 3, 4", "headers-www-authenticate"],
|
||||
].forEach(testValues => {
|
||||
async_test(t => {
|
||||
const client = new XMLHttpRequest();
|
||||
client.onload = t.step_func_done(() => {
|
||||
assert_equals(client.getAllResponseHeaders(), testValues[0] + ": " + testValues[1] + "\r\n");
|
||||
});
|
||||
client.onerror = t.unreached_func("unexpected error");
|
||||
client.open("GET", "resources/" + testValues[2] + ".asis");
|
||||
client.send();
|
||||
});
|
||||
client.onerror = t.unreached_func("unexpected error");
|
||||
client.open("GET", "resources/headers-www-authenticate.asis");
|
||||
client.send();
|
||||
});
|
||||
</script>
|
||||
|
|
|
@ -1,19 +1,18 @@
|
|||
async_test(t => {
|
||||
const client = new XMLHttpRequest();
|
||||
client.onload = t.step_func_done(() => {
|
||||
assert_equals(client.getResponseHeader("foo-test"), "1, 2, 3");
|
||||
});
|
||||
client.onerror = t.unreached_func("unexpected error");
|
||||
client.open("GET", "resources/headers-basic.asis");
|
||||
client.send();
|
||||
}, "getResponseHeader('foo-test')");
|
||||
|
||||
async_test(t => {
|
||||
const client = new XMLHttpRequest();
|
||||
client.onload = t.step_func_done(() => {
|
||||
assert_equals(client.getResponseHeader("www-authenticate"), "1, 2, 3, 4");
|
||||
});
|
||||
client.onerror = t.unreached_func("unexpected error");
|
||||
client.open("GET", "resources/headers-www-authenticate.asis");
|
||||
client.send();
|
||||
}, "getResponseHeader('www-authenticate')");
|
||||
[
|
||||
["content-length", "0", "header-content-length"],
|
||||
["content-length", "0, 0", "header-content-length-twice"],
|
||||
["double-trouble", ", ", "headers-double-empty"],
|
||||
["foo-test", "1, 2, 3", "headers-basic"],
|
||||
["heya", ", \u000B\u000C, 1, , , 2", "headers-some-are-empty"],
|
||||
["www-authenticate", "1, 2, 3, 4", "headers-www-authenticate"],
|
||||
].forEach(testValues => {
|
||||
async_test(t => {
|
||||
const client = new XMLHttpRequest();
|
||||
client.onload = t.step_func_done(() => {
|
||||
assert_equals(client.getResponseHeader(testValues[0]), testValues[1]);
|
||||
});
|
||||
client.onerror = t.unreached_func("unexpected error");
|
||||
client.open("GET", "resources/" + testValues[2] + ".asis");
|
||||
client.send();
|
||||
}, "getResponseHeader('" + testValues[0] + "') expects " + testValues[1]);
|
||||
});
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
async_test(t => {
|
||||
const client = new XMLHttpRequest();
|
||||
client.overrideMimeType('text/plain;charset=Shift-JIS');
|
||||
client.onreadystatechange = t.step_func(() => {
|
||||
if (client.readyState === 4) {
|
||||
assert_equals( client.responseText, 'テスト' );
|
||||
t.done();
|
||||
}
|
||||
});
|
||||
client.open("GET", "resources/status.py?type="+encodeURIComponent('text/html;charset=iso-8859-1')+'&content=%83%65%83%58%83%67');
|
||||
client.send( '' );
|
||||
}, "XMLHttpRequest: overrideMimeType() in unsent state, enforcing Shift-JIS encoding");
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue