mirror of
https://github.com/servo/servo.git
synced 2025-09-05 20:48:22 +01:00
Update web-platform-tests to revision 58eb04cecbbec2e18531ab440225e38944a9c444
This commit is contained in:
parent
25e8bf69e6
commit
665817d2a6
35333 changed files with 1818077 additions and 16036 deletions
|
@ -0,0 +1,17 @@
|
|||
test(function() {
|
||||
self.performance.mark("mark1");
|
||||
self.performance.mark("mark2");
|
||||
|
||||
// test that two marks have been created
|
||||
var entries = self.performance.getEntriesByType("mark");
|
||||
assert_equals(entries.length, 2, "Two marks have been created for this test.");
|
||||
|
||||
// clear all marks
|
||||
self.performance.clearMarks();
|
||||
|
||||
// test that all marks were cleared
|
||||
entries = self.performance.getEntriesByType("mark");
|
||||
|
||||
assert_equals(entries.length, 0, "All marks have been cleared.");
|
||||
|
||||
}, "Clearing all marks remove all of them.");
|
|
@ -0,0 +1,21 @@
|
|||
test(function()
|
||||
{
|
||||
self.performance.mark("mark1", "responseStart");
|
||||
self.performance.measure("measure1", "mark1");
|
||||
self.performance.mark("mark2", "responseStart");
|
||||
self.performance.measure("measure2", "mark2");
|
||||
|
||||
// test that two measures have been created
|
||||
var entries = self.performance.getEntriesByType("measure");
|
||||
assert_equals(entries.length, 2, "Two measures have been created for this test.");
|
||||
|
||||
// clear all measures
|
||||
self.performance.clearMeasures();
|
||||
|
||||
// test that all measures were cleared
|
||||
entries = self.performance.getEntriesByType("measure");
|
||||
assert_equals(entries.length, 0,
|
||||
"After a call to self.performance.clearMeasures(), " +
|
||||
"self.performance.getEntriesByType(\"measure\") returns an empty object.");
|
||||
|
||||
}, "Clearing all marks remove all of them.");
|
|
@ -0,0 +1,26 @@
|
|||
test(function() {
|
||||
self.performance.mark("mark1");
|
||||
self.performance.mark("mark2");
|
||||
|
||||
// test that two marks have been created
|
||||
var entries = self.performance.getEntriesByType("mark");
|
||||
assert_equals(entries.length, 2, "Two marks have been created for this test.");
|
||||
|
||||
// clear non-existent mark
|
||||
self.performance.clearMarks("mark3");
|
||||
|
||||
// test that "mark1" still exists
|
||||
entries = self.performance.getEntriesByName("mark1");
|
||||
assert_equals(entries[0].name, "mark1",
|
||||
"After a call to self.performance.clearMarks(\"mark3\"), where \"mark3" +
|
||||
"\" is a non-existent mark, self.performance.getEntriesByName(\"mark1\") " +
|
||||
"returns an object containing the \"mark1\" mark.");
|
||||
|
||||
// test that "mark2" still exists
|
||||
entries = self.performance.getEntriesByName("mark2");
|
||||
assert_equals(entries[0].name, "mark2",
|
||||
"After a call to self.performance.clearMarks(\"mark3\"), where \"mark3" +
|
||||
"\" is a non-existent mark, self.performance.getEntriesByName(\"mark2\") " +
|
||||
"returns an object containing the \"mark2\" mark.");
|
||||
|
||||
}, "Clearing a non-existent mark doesn't affect existing marks");
|
|
@ -0,0 +1,29 @@
|
|||
test(function()
|
||||
{
|
||||
self.performance.mark("mark1", "responseStart");
|
||||
self.performance.measure("measure1", "mark1");
|
||||
self.performance.mark("mark2", "responseStart");
|
||||
self.performance.measure("measure2", "mark2");
|
||||
|
||||
// test that two measures have been created
|
||||
var entries = self.performance.getEntriesByType("measure");
|
||||
assert_equals(entries.length, 2, "Two measures have been created for this test.");
|
||||
|
||||
// clear non-existent measure
|
||||
self.performance.clearMeasures("measure3");
|
||||
|
||||
// test that "measure1" still exists
|
||||
entries = self.performance.getEntriesByName("measure1");
|
||||
assert_equals(entries[0].name, "measure1",
|
||||
"After a call to self.performance.clearMeasures(\"measure3\"), where \"measure3" +
|
||||
"\" is a non-existent measure, self.performance.getEntriesByName(\"measure1\") " +
|
||||
"returns an object containing the \"measure1\" measure.");
|
||||
|
||||
// test that "measure2" still exists
|
||||
entries = self.performance.getEntriesByName("measure2");
|
||||
assert_equals(entries[0].name, "measure2",
|
||||
"After a call to self.performance.clearMeasures(\"measure3\"), where \"measure3" +
|
||||
"\" is a non-existent measure, self.performance.getEntriesByName(\"measure2\") " +
|
||||
"returns an object containing the \"measure2\" measure.");
|
||||
|
||||
}, "Clearing a non-existent measure doesn't affect existing measures");
|
|
@ -0,0 +1,26 @@
|
|||
test(function() {
|
||||
self.performance.mark("mark1");
|
||||
self.performance.mark("mark2");
|
||||
|
||||
// test that two marks have been created
|
||||
var entries = self.performance.getEntriesByType("mark");
|
||||
assert_equals(entries.length, 2, "Two marks have been created for this test.");
|
||||
|
||||
// clear existent mark
|
||||
self.performance.clearMarks("mark1");
|
||||
|
||||
// test that "mark1" was cleared
|
||||
entries = self.performance.getEntriesByName("mark1");
|
||||
|
||||
assert_equals(entries.length, 0,
|
||||
"After a call to self.performance.clearMarks(\"mark1\"), " +
|
||||
"window.performance.getEntriesByName(\"mark1\") returns an empty object.");
|
||||
|
||||
// test that "mark2" still exists
|
||||
entries = self.performance.getEntriesByName("mark2");
|
||||
assert_equals(entries[0].name, "mark2",
|
||||
"After a call to self.performance.clearMarks(\"mark1\"), " +
|
||||
"window.performance.getEntriesByName(\"mark2\") returns an object containing the " +
|
||||
"\"mark2\" mark.");
|
||||
|
||||
}, "Clearing an existent mark doesn't affect other existing marks");
|
|
@ -0,0 +1,29 @@
|
|||
test(function()
|
||||
{
|
||||
self.performance.mark("mark1", "responseStart");
|
||||
self.performance.measure("measure1", "mark1");
|
||||
self.performance.mark("mark2", "responseStart");
|
||||
self.performance.measure("measure2", "mark2");
|
||||
|
||||
// test that two measures have been created
|
||||
var entries = self.performance.getEntriesByType("measure");
|
||||
assert_equals(entries.length, 2, "Two measures have been created for this test.");
|
||||
|
||||
// clear existent measure
|
||||
self.performance.clearMeasures("measure1");
|
||||
|
||||
// test that "measure1" was cleared
|
||||
entries = self.performance.getEntriesByName("measure1");
|
||||
|
||||
assert_equals(entries.length, 0,
|
||||
"After a call to self.performance.clearMeasures(\"measure1\"), " +
|
||||
"self.performance.getEntriesByName(\"measure1\") returns an empty object.");
|
||||
|
||||
// test that "measure2" still exists
|
||||
entries = self.performance.getEntriesByName("measure2");
|
||||
assert_equals(entries[0].name, "measure2",
|
||||
"After a call to self.performance.clearMeasures(\"measure1\"), " +
|
||||
"self.performance.getEntriesByName(\"measure2\") returns an object containing the " +
|
||||
"\"measure2\" measure.");
|
||||
|
||||
}, "Clearing an existent measure doesn't affect other existing measures");
|
13
tests/wpt/web-platform-tests/user-timing/entry_type.any.js
Normal file
13
tests/wpt/web-platform-tests/user-timing/entry_type.any.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
test(function () {
|
||||
self.performance.mark('mark');
|
||||
var mark_entry = self.performance.getEntriesByName('mark')[0];
|
||||
|
||||
assert_equals(Object.prototype.toString.call(mark_entry), '[object PerformanceMark]', 'Class name of mark entry should be PerformanceMark.');
|
||||
}, "Validate the user timing entry type PerformanceMark");
|
||||
|
||||
test(function () {
|
||||
self.performance.measure('measure');
|
||||
var measure_entry = self.performance.getEntriesByName('measure')[0];
|
||||
|
||||
assert_equals(Object.prototype.toString.call(measure_entry), '[object PerformanceMeasure]', 'Class name of measure entry should be PerformanceMeasure.');
|
||||
}, "Validate the user timing entry type PerformanceMeasure");
|
|
@ -17,9 +17,11 @@
|
|||
<div id="log"></div>
|
||||
|
||||
<pre id='untested_idl' style='display:none'>
|
||||
[Exposed=(Window,Worker)]
|
||||
interface Performance {
|
||||
};
|
||||
|
||||
[Exposed=(Window,Worker)]
|
||||
interface PerformanceEntry {
|
||||
};
|
||||
</pre>
|
||||
|
@ -33,9 +35,11 @@ partial interface Performance {
|
|||
void clearMeasures(optional DOMString measureName);
|
||||
};
|
||||
|
||||
[Exposed=(Window,Worker)]
|
||||
interface PerformanceMark : PerformanceEntry {
|
||||
};
|
||||
|
||||
[Exposed=(Window,Worker)]
|
||||
interface PerformanceMeasure : PerformanceEntry {
|
||||
};
|
||||
|
||||
|
|
|
@ -3,30 +3,33 @@
|
|||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>exception test of performance.mark and performance.measure</title>
|
||||
<meta rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<meta rel="help" href="https://w3c.github.io/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
setup({explicit_done: true});
|
||||
test_namespace();
|
||||
|
||||
test(function() {
|
||||
for (var i in timingAttributes) {
|
||||
assert_throws("SyntaxError", function() { window.performance.mark(timingAttributes[i]); });
|
||||
assert_throws("SyntaxError", function() { window.performance.measure(timingAttributes[i]); });
|
||||
}
|
||||
}, "performance.mark and performance.measure should throw if used with timing attribute values");
|
||||
|
||||
fetch_tests_from_worker(new Worker("test_user_timing_mark_and_measure_exception_when_invoke_with_timing_attributes.js"));
|
||||
|
||||
done();
|
||||
|
||||
</script>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates exception scenarios of invoking mark() and measure() with timing attributes as value.</p>
|
||||
<div id="log"></div>
|
||||
<script>
|
||||
function emit_test(attrName) {
|
||||
test(function() {
|
||||
assert_throws("SyntaxError", function() { window.performance.mark(attrName); });
|
||||
}, "performance.mark should throw if used with timing attribute " + attrName);
|
||||
}
|
||||
for (var i in timingAttributes) {
|
||||
emit_test(timingAttributes[i]);
|
||||
}
|
||||
|
||||
function emit_test2(attrName) {
|
||||
test(function() {
|
||||
assert_throws("SyntaxError", function() { window.performance.measure(attrName); });
|
||||
}, "performance.measure should throw if used with timing attribute " + attrName);
|
||||
}
|
||||
for (var i in timingAttributes) {
|
||||
emit_test2(timingAttributes[i]);
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,25 @@
|
|||
importScripts("/resources/testharness.js");
|
||||
importScripts("resources/webperftestharness.js");
|
||||
|
||||
function emit_test(attrName) {
|
||||
test(function() {
|
||||
performance.mark(attrName);
|
||||
performance.clearMarks(attrName);
|
||||
}, "performance.mark should not throw if used with timing attribute " + attrName
|
||||
+ " in workers");
|
||||
}
|
||||
for (var i in timingAttributes) {
|
||||
emit_test(timingAttributes[i]);
|
||||
}
|
||||
|
||||
function emit_test2(attrName) {
|
||||
test(function() {
|
||||
performance.measure(attrName);
|
||||
performance.clearMeasures(attrName);
|
||||
}, "performance.measure should not throw if used with timing attribute " + attrName
|
||||
+ " in workers");
|
||||
}
|
||||
for (var i in timingAttributes) {
|
||||
emit_test2(timingAttributes[i]);
|
||||
}
|
||||
done();
|
|
@ -0,0 +1,26 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>exception test of performance.mark and performance.measure</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="https://w3c.github.io/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates exception scenarios of invoking mark() and measure() without parameter.</p>
|
||||
<div id="log"></div>
|
||||
<script>
|
||||
test(function () {
|
||||
assert_throws(new TypeError(), function () { window.performance.mark() });
|
||||
}, "window.performance.mark() throws a TypeError exception when invoke without a parameter.");
|
||||
|
||||
test(function () {
|
||||
assert_throws(new TypeError(), function () { window.performance.measure(); });
|
||||
}, "window.performance.measure() throws a TypeError exception when invoke without a parameter.");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
118
tests/wpt/web-platform-tests/user-timing/mark.any.js
Normal file
118
tests/wpt/web-platform-tests/user-timing/mark.any.js
Normal file
|
@ -0,0 +1,118 @@
|
|||
// test data
|
||||
var testThreshold = 20;
|
||||
|
||||
var expectedTimes = new Array();
|
||||
|
||||
function match_entries(entries, index)
|
||||
{
|
||||
var entry = entries[index];
|
||||
var match = self.performance.getEntriesByName("mark")[index];
|
||||
assert_equals(entry.name, match.name, "entry.name");
|
||||
assert_equals(entry.startTime, match.startTime, "entry.startTime");
|
||||
assert_equals(entry.entryType, match.entryType, "entry.entryType");
|
||||
assert_equals(entry.duration, match.duration, "entry.duration");
|
||||
}
|
||||
|
||||
function filter_entries_by_type(entryList, entryType)
|
||||
{
|
||||
var testEntries = new Array();
|
||||
|
||||
// filter entryList
|
||||
for (var i in entryList)
|
||||
{
|
||||
if (entryList[i].entryType == entryType)
|
||||
{
|
||||
testEntries.push(entryList[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return testEntries;
|
||||
}
|
||||
|
||||
test(function () {
|
||||
// create first mark
|
||||
self.performance.mark("mark");
|
||||
|
||||
expectedTimes[0] = self.performance.now();
|
||||
|
||||
entries = self.performance.getEntriesByName("mark");
|
||||
assert_equals(entries.length, 1);
|
||||
}, "Entry 0 is properly created");
|
||||
|
||||
test(function () {
|
||||
// create second, duplicate mark
|
||||
self.performance.mark("mark");
|
||||
|
||||
expectedTimes[1] = self.performance.now();
|
||||
|
||||
entries = self.performance.getEntriesByName("mark");
|
||||
assert_equals(entries.length, 2);
|
||||
|
||||
}, "Entry 1 is properly created");
|
||||
|
||||
function test_mark(index) {
|
||||
test(function () {
|
||||
entries = self.performance.getEntriesByName("mark");
|
||||
assert_equals(entries[index].name, "mark", "Entry has the proper name");
|
||||
}, "Entry " + index + " has the proper name");
|
||||
|
||||
test(function () {
|
||||
entries = self.performance.getEntriesByName("mark");
|
||||
assert_approx_equals(entries[index].startTime, expectedTimes[index], testThreshold);
|
||||
}, "Entry " + index + " startTime is approximately correct (up to " + testThreshold +
|
||||
"ms difference allowed)");
|
||||
|
||||
test(function () {
|
||||
entries = self.performance.getEntriesByName("mark");
|
||||
assert_equals(entries[index].entryType, "mark");
|
||||
}, "Entry " + index + " has the proper entryType");
|
||||
|
||||
test(function () {
|
||||
entries = self.performance.getEntriesByName("mark");
|
||||
assert_equals(entries[index].duration, 0);
|
||||
}, "Entry " + index + " duration == 0");
|
||||
|
||||
test(function () {
|
||||
entries = self.performance.getEntriesByName("mark", "mark");
|
||||
assert_equals(entries[index].name, "mark");
|
||||
}, "getEntriesByName(\"mark\", \"mark\")[" + index + "] returns an " +
|
||||
"object containing a \"mark\" mark");
|
||||
|
||||
test(function () {
|
||||
entries = self.performance.getEntriesByName("mark", "mark");
|
||||
match_entries(entries, index);
|
||||
}, "The mark returned by getEntriesByName(\"mark\", \"mark\")[" + index
|
||||
+ "] matches the mark returned by " +
|
||||
"getEntriesByName(\"mark\")[" + index + "]");
|
||||
|
||||
test(function () {
|
||||
entries = filter_entries_by_type(self.performance.getEntries(), "mark");
|
||||
assert_equals(entries[index].name, "mark");
|
||||
}, "getEntries()[" + index + "] returns an " +
|
||||
"object containing a \"mark\" mark");
|
||||
|
||||
test(function () {
|
||||
entries = filter_entries_by_type(self.performance.getEntries(), "mark");
|
||||
match_entries(entries, index);
|
||||
}, "The mark returned by getEntries()[" + index
|
||||
+ "] matches the mark returned by " +
|
||||
"getEntriesByName(\"mark\")[" + index + "]");
|
||||
|
||||
test(function () {
|
||||
entries = self.performance.getEntriesByType("mark");
|
||||
assert_equals(entries[index].name, "mark");
|
||||
}, "getEntriesByType(\"mark\")[" + index + "] returns an " +
|
||||
"object containing a \"mark\" mark");
|
||||
|
||||
test(function () {
|
||||
entries = self.performance.getEntriesByType("mark");
|
||||
match_entries(entries, index);
|
||||
}, "The mark returned by getEntriesByType(\"mark\")[" + index
|
||||
+ "] matches the mark returned by " +
|
||||
"getEntriesByName(\"mark\")[" + index + "]");
|
||||
|
||||
}
|
||||
|
||||
for (var i = 0; i < expectedTimes.length; i++) {
|
||||
test_mark(i);
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing mark() method is throwing the proper exceptions</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://w3c.github.io/user-timing/#dom-performance-mark"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script>
|
||||
function test_exception(attrName) {
|
||||
test(function () {
|
||||
assert_throws(new SyntaxError(), function () {
|
||||
window.performance.mark(attrName);
|
||||
})
|
||||
}, "window.performance.mark(\"" + attrName + "\") throws a SyntaxError exception.");
|
||||
}
|
||||
|
||||
// loop through mark scenarios
|
||||
for (var i in timingAttributes) {
|
||||
test_exception(timingAttributes[i]);
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates that the performance.mark() method throws a SYNTAX_ERR exception whenever a navigation
|
||||
timing attribute is provided for the name parameter.
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,15 +1,16 @@
|
|||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing measure() method is working properly</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-measure"/>
|
||||
<link rel="help" href="https://w3c.github.io/user-timing/#dom-performance-measure"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script type="text/javascript">
|
||||
<script>
|
||||
// test data
|
||||
var startMarkName = "mark_start";
|
||||
var startMarkValue;
|
||||
|
@ -135,7 +136,7 @@
|
|||
|
||||
// when endMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding to
|
||||
// the current time with a timebase of the navigationStart attribute is used
|
||||
scenario.duration = ((new Date()) - window.performance.timing.navigationStart) -
|
||||
scenario.duration = window.performance.now() -
|
||||
startMarkValue;
|
||||
}
|
||||
else if (scenario.startMark != undefined && scenario.endMark != undefined)
|
|
@ -0,0 +1,70 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing measure() method is throwing the proper exceptions</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="https://w3c.github.io/user-timing/#dom-performance-measure"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script>
|
||||
// test data
|
||||
var zeroedNavTimingAtt = undefined;
|
||||
|
||||
setup(function () {
|
||||
// for testing the INVALID_ACCESS_ERR exception, find a navigation timing attribute with a value of zero
|
||||
for (var i in timingAttributes) {
|
||||
if (window.performance.timing[timingAttributes[i]] == 0) {
|
||||
zeroedNavTimingAtt = timingAttributes[i];
|
||||
}
|
||||
}
|
||||
if (zeroedNavTimingAtt == undefined) {
|
||||
throw new Error("A navigation timing attribute with a value of 0 was not found to test for the " +
|
||||
"INVALID_ACCESS_ERR exception thrown by window.performance.measure().")
|
||||
}
|
||||
});
|
||||
|
||||
test(function () {
|
||||
assert_throws("InvalidAccessError", function () {
|
||||
window.performance.measure("measure", zeroedNavTimingAtt);
|
||||
});
|
||||
}, "window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\"), where \"" +
|
||||
zeroedNavTimingAtt + "\" is a navigation timing attribute with a value of 0, throws a " +
|
||||
"InvalidAccessError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws("InvalidAccessError", function () {
|
||||
window.performance.measure("measure", zeroedNavTimingAtt, "responseEnd");
|
||||
});
|
||||
}, "window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", " +
|
||||
"\"responseEnd\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
|
||||
"attribute with a value of 0, throws a InvalidAccessError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws("InvalidAccessError", function () {
|
||||
window.performance.measure("measure", "navigationStart", zeroedNavTimingAtt);
|
||||
});
|
||||
}, "window.performance.measure(\"measure\", \"navigationStart\", \"" + zeroedNavTimingAtt +
|
||||
"\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing attribute with a " +
|
||||
"value of 0, throws a InvalidAccessError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws("InvalidAccessError", function () {
|
||||
window.performance.measure("measure", zeroedNavTimingAtt, zeroedNavTimingAtt);
|
||||
});
|
||||
}, "window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", \"" +
|
||||
zeroedNavTimingAtt + "\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
|
||||
"attribute with a value of 0, throws a InvalidAccessError exception.");
|
||||
</script>
|
||||
</head>
|
||||
<body onload="onload_test();">
|
||||
<h1>Description</h1>
|
||||
<p><code>window.performance.measure()</code> method throws a InvalidAccessError
|
||||
whenever a navigation timing attribute with a value of zero is provided as the startMark or endMark.
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
|
@ -5,12 +6,12 @@
|
|||
<title>window.performance User Timing clearMeasures() method is working properly with navigation timing
|
||||
attributes</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-measure"/>
|
||||
<link rel="help" href="https://w3c.github.io/user-timing/#dom-performance-measure"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script type="text/javascript">
|
||||
<script>
|
||||
// test data
|
||||
var startMarkName = "mark_start";
|
||||
var startMarkValue;
|
|
@ -0,0 +1,33 @@
|
|||
test(function () {
|
||||
self.performance.mark("existing_mark");
|
||||
var entries = self.performance.getEntriesByName("existing_mark");
|
||||
assert_equals(entries.length, 1);
|
||||
self.performance.measure("measure", "existing_mark");
|
||||
}, "Create a mark \"existing_mark\"");
|
||||
test(function () {
|
||||
assert_throws(new SyntaxError(), function () {
|
||||
self.performance.measure("measure", "mark");
|
||||
});
|
||||
}, "self.performance.measure(\"measure\", \"mark\"), where \"mark\" is a non-existent mark, " +
|
||||
"throws a SyntaxError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws(new SyntaxError(), function () {
|
||||
self.performance.measure("measure", "mark", "existing_mark");
|
||||
});
|
||||
}, "self.performance.measure(\"measure\", \"mark\", \"existing_mark\"), where \"mark\" is a " +
|
||||
"non-existent mark, throws a SyntaxError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws(new SyntaxError(), function () {
|
||||
self.performance.measure("measure", "existing_mark", "mark");
|
||||
});
|
||||
}, "self.performance.measure(\"measure\", \"existing_mark\", \"mark\"), where \"mark\" " +
|
||||
"is a non-existent mark, throws a SyntaxError exception.");
|
||||
|
||||
test(function () {
|
||||
assert_throws(new SyntaxError(), function () {
|
||||
self.performance.measure("measure", "mark", "mark");
|
||||
});
|
||||
}, "self.performance.measure(\"measure\", \"mark\", \"mark\"), where \"mark\" is a " +
|
||||
"non-existent mark, throws a SyntaxError exception.");
|
|
@ -14,26 +14,27 @@ policies and contribution forms [3].
|
|||
|
||||
var performanceNamespace = self.performance;
|
||||
var timingAttributes = [
|
||||
'connectEnd',
|
||||
'connectStart',
|
||||
'domComplete',
|
||||
'domContentLoadedEventEnd',
|
||||
'domContentLoadedEventStart',
|
||||
'domInteractive',
|
||||
'domLoading',
|
||||
'domainLookupEnd',
|
||||
'domainLookupStart',
|
||||
'fetchStart',
|
||||
'loadEventEnd',
|
||||
'loadEventStart',
|
||||
'navigationStart',
|
||||
'redirectEnd',
|
||||
'redirectStart',
|
||||
'requestStart',
|
||||
'responseEnd',
|
||||
'responseStart',
|
||||
'unloadEventEnd',
|
||||
'unloadEventStart'
|
||||
"navigationStart",
|
||||
"unloadEventStart",
|
||||
"unloadEventEnd",
|
||||
"redirectStart",
|
||||
"redirectEnd",
|
||||
"fetchStart",
|
||||
"domainLookupStart",
|
||||
"domainLookupEnd",
|
||||
"connectStart",
|
||||
"connectEnd",
|
||||
"secureConnectionStart",
|
||||
"requestStart",
|
||||
"responseStart",
|
||||
"responseEnd",
|
||||
"domLoading",
|
||||
"domInteractive",
|
||||
"domContentLoadedEventStart",
|
||||
"domContentLoadedEventEnd",
|
||||
"domComplete",
|
||||
"loadEventStart",
|
||||
"loadEventEnd"
|
||||
];
|
||||
|
||||
var namespace_check = false;
|
||||
|
|
|
@ -1,128 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing clearMarks() method is working properly</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-clearmarks"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script type="text/javascript">
|
||||
// test marks
|
||||
var markName1 = "mark1";
|
||||
var markName2 = "mark2";
|
||||
var markName3 = "markUndefined";
|
||||
var markTestDelay = 200;
|
||||
var entries;
|
||||
var pass;
|
||||
|
||||
setup({explicit_done: true});
|
||||
|
||||
test_namespace();
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
// test for existance of User Timing and Performance Timeline interface
|
||||
if (!has_required_interfaces())
|
||||
{
|
||||
test_true(false,
|
||||
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
|
||||
"are defined.");
|
||||
|
||||
done();
|
||||
}
|
||||
else
|
||||
{
|
||||
// create a mark using the test delay; the mark's value should be equivalent to the loadEventStart
|
||||
// navigation timing attribute plus the test delay
|
||||
setTimeout(mark_test_cb, markTestDelay);
|
||||
}
|
||||
}
|
||||
|
||||
function mark_test_cb()
|
||||
{
|
||||
// create the test marks; only create "mark1" and "mark2", "markUndefined" is a non-existent mark
|
||||
window.performance.mark(markName1);
|
||||
window.performance.mark(markName2);
|
||||
|
||||
// test that two marks have been created
|
||||
entries = window.performance.getEntriesByType("mark");
|
||||
test_equals(entries.length, 2, "Two marks have been created for this test.");
|
||||
|
||||
// clear non-existent mark
|
||||
window.performance.clearMarks(markName3);
|
||||
|
||||
// test that "mark1" still exists
|
||||
entries = window.performance.getEntriesByName(markName1);
|
||||
test_true(entries[0].name == markName1,
|
||||
"After a call to window.performance.clearMarks(\"" + markName3 + "\"), where \"" + markName3 +
|
||||
"\" is a non-existent mark, window.performance.getEntriesByName(\"" + markName1 + "\") " +
|
||||
"returns an object containing the \"" + markName1 + "\" mark.");
|
||||
|
||||
// test that "mark2" still exists
|
||||
entries = window.performance.getEntriesByName(markName2);
|
||||
test_true(entries[0].name == markName2,
|
||||
"After a call to window.performance.clearMarks(\"" + markName3 + "\"), where \"" + markName3 +
|
||||
"\" is a non-existent mark, window.performance.getEntriesByName(\"" + markName2 + "\") " +
|
||||
"returns an object containing the \"" + markName2 + "\" mark.");
|
||||
|
||||
// clear existent mark
|
||||
window.performance.clearMarks(markName1);
|
||||
|
||||
// test that "mark1" was cleared
|
||||
entries = window.performance.getEntriesByName(markName1);
|
||||
pass = true;
|
||||
for (var i in entries)
|
||||
{
|
||||
pass = false;
|
||||
}
|
||||
test_true(pass,
|
||||
"After a call to window.performance.clearMarks(\"" + markName1 + "\"), " +
|
||||
"window.performance.getEntriesByName(\"" + markName1 + "\") returns an empty object.");
|
||||
|
||||
// test that "mark2" still exists
|
||||
entries = window.performance.getEntriesByName(markName2);
|
||||
test_true(entries[0].name == markName2,
|
||||
"After a call to window.performance.clearMarks(\"" + markName1 + "\"), " +
|
||||
"window.performance.getEntriesByName(\"" + markName2 + "\") returns an object containing the " +
|
||||
"\"" + markName2 + "\" mark.");
|
||||
|
||||
// clear all marks
|
||||
window.performance.clearMarks();
|
||||
|
||||
// test that all marks were cleared
|
||||
entries = window.performance.getEntriesByType("mark");
|
||||
pass = true;
|
||||
for (var i in entries)
|
||||
{
|
||||
pass = false;
|
||||
}
|
||||
test_true(pass,
|
||||
"After a call to window.performance.clearMarks(), " +
|
||||
"window.performance.getEntriesByType(\"mark\") returns an empty object.");
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="onload_test();">
|
||||
<h1>Description</h1>
|
||||
<p>This test validates that the performance.clearMarks() method is working properly. This test creates the
|
||||
following marks to test this method:
|
||||
<ul>
|
||||
<li>"mark1"</li>
|
||||
<li>"mark2"</li>
|
||||
</ul>
|
||||
After creating each mark, performance.clearMarks() is called three times. First, it is provided with a name
|
||||
of "markUndefined", a non-existent mark, which shouldn't change the state of the Performance Timeline. Next,
|
||||
it is provided with a name of "mark2", after which, this mark should no longer be present in the Performance
|
||||
Timeline. Finally, performance.clearMarks() is called without any name provided. After this call, no marks
|
||||
should be present in the Performance Timeline. The state of the Performance Timeline is tested with the
|
||||
performance.getEntriesByType() and performance.getEntries() methods.
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,130 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing clearMeasures() method is working properly</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-clearmeasures"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script type="text/javascript">
|
||||
// test measures
|
||||
var measureName1 = "measure1";
|
||||
var measureName2 = "measure2";
|
||||
var measureName3 = "measureUndefined";
|
||||
var measureTestDelay = 200;
|
||||
var measureEntryNames;
|
||||
var entries;
|
||||
|
||||
setup({explicit_done: true});
|
||||
|
||||
test_namespace();
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
// test for existance of User Timing and Performance Timeline interface
|
||||
if (!has_required_interfaces())
|
||||
{
|
||||
test_true(false,
|
||||
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
|
||||
"are defined.");
|
||||
|
||||
done();
|
||||
}
|
||||
else
|
||||
{
|
||||
// create measures using the test delay
|
||||
setTimeout(measure_test_cb, measureTestDelay);
|
||||
}
|
||||
}
|
||||
|
||||
function measure_test_cb()
|
||||
{
|
||||
// create the test measures; only create "measure1" and "measure2", "measureUndefined" is a non-existent
|
||||
// measure; give "measure1" a startMark of "navigationStart" and "measure2" a startMark of
|
||||
// "responseEnd", this way, "measure1" always come first in a PerformanceEntryList returned from a
|
||||
// Performance Timeline accessor
|
||||
window.performance.measure(measureName1, "navigationStart");
|
||||
window.performance.measure(measureName2, "responseEnd");
|
||||
|
||||
// test that two measures have been created
|
||||
entries = window.performance.getEntriesByType("measure");
|
||||
test_equals(entries.length, 2, "Two measures have been created for this test.");
|
||||
|
||||
// clear non-existent measure
|
||||
window.performance.clearMeasures(measureName3);
|
||||
|
||||
// test that "measure1" still exists
|
||||
entries = window.performance.getEntriesByName(measureName1);
|
||||
test_true(entries[0].name == measureName1,
|
||||
"After a call to window.performance.clearMeasures(\"" + measureName3 + "\"), where \"" + measureName3 +
|
||||
"\" is a non-existent measure, window.performance.getEntriesByName(\"" + measureName1 + "\") " +
|
||||
"returns an object containing the \"" + measureName1 + "\" measure.");
|
||||
|
||||
// test that "measure2" still exists
|
||||
entries = window.performance.getEntriesByName(measureName2);
|
||||
test_true(entries[0].name == measureName2,
|
||||
"After a call to window.performance.clearMeasures(\"" + measureName3 + "\"), where \"" + measureName3 +
|
||||
"\" is a non-existent measure, window.performance.getEntriesByName(\"" + measureName2 + "\") " +
|
||||
"returns an object containing the \"" + measureName2 + "\" measure.");
|
||||
|
||||
// clear existent measure
|
||||
window.performance.clearMeasures(measureName1);
|
||||
|
||||
// test that "measure1" was cleared
|
||||
entries = window.performance.getEntriesByName(measureName1);
|
||||
pass = true;
|
||||
for (var i in entries)
|
||||
{
|
||||
pass = false;
|
||||
}
|
||||
test_true(pass,
|
||||
"After a call to window.performance.clearMeasures(\"" + measureName1 + "\"), " +
|
||||
"window.performance.getEntriesByName(\"" + measureName1 + "\") returns an empty object.");
|
||||
|
||||
// test that "measure2" still exists
|
||||
entries = window.performance.getEntriesByName(measureName2);
|
||||
test_true(entries[0].name == measureName2,
|
||||
"After a call to window.performance.clearMeasures(\"" + measureName1 + "\"), " +
|
||||
"window.performance.getEntriesByName(\"" + measureName2 + "\") returns an object containing the " +
|
||||
"\"" + measureName2 + "\" measure.");
|
||||
|
||||
// clear all measures
|
||||
window.performance.clearMeasures();
|
||||
|
||||
// test that all measures were cleared
|
||||
entries = window.performance.getEntriesByType("measure");
|
||||
pass = true;
|
||||
for (var i in entries)
|
||||
{
|
||||
pass = false;
|
||||
}
|
||||
test_true(pass,
|
||||
"After a call to window.performance.clearMeasures(), " +
|
||||
"window.performance.getEntriesByType(\"measure\") returns an empty object.");
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="onload_test();">
|
||||
<h1>Description</h1>
|
||||
<p>This test validates that the performance.clearMeasures() method is working properly. This test creates the
|
||||
following measures to test this method:
|
||||
<ul>
|
||||
<li>"measure1"</li>
|
||||
<li>"measure2"</li>
|
||||
</ul>
|
||||
After creating each measure, performance.clearMeasures() is called three times. First, it is provided with a
|
||||
name of "measureUndefined", a non-existent measure, which shouldn't change the state of the Performance
|
||||
Timeline. Next, it is provided with a name of "measure2", after which, this measure should no longer be
|
||||
present in the Performance Timeline. Finally, performance.clearMeasures() is called without any name
|
||||
provided. After this call, no measures should be present in the Performance Timeline. The state of the
|
||||
Performance Timeline is tested with the performance.getEntriesByType() and performance.getEntries() methods.
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,29 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>user timing entry type</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates the user timing entry type, PerformanceMark and PerformanceMeasure.</p>
|
||||
|
||||
<div id="log"></div>
|
||||
<script>
|
||||
var context = new PerformanceContext(window.performance);
|
||||
context.mark('mark');
|
||||
context.measure('measure');
|
||||
var mark_entry = context.getEntriesByName('mark')[0];
|
||||
var measure_entry = context.getEntriesByName('measure')[0];
|
||||
|
||||
test_equals(Object.prototype.toString.call(mark_entry), '[object PerformanceMark]', 'Class name of mark entry should be PerformanceMark.');
|
||||
test_equals(Object.prototype.toString.call(measure_entry), '[object PerformanceMeasure]', 'Class name of measure entry should be PerformanceMeasure.');
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -1,28 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing exists</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates that all of the methods used to interact with the User Timing API are defined.</p>
|
||||
|
||||
<div id="log"></div>
|
||||
|
||||
<script>
|
||||
test_namespace();
|
||||
|
||||
test_true(window.performance.mark !== undefined, "window.performance.mark is defined.");
|
||||
test_true(window.performance.clearMarks !== undefined, "window.performance.clearMarks is defined.");
|
||||
test_true(window.performance.measure !== undefined, "window.performance.measure is defined.");
|
||||
test_true(window.performance.clearMeasures !== undefined, "window.performance.clearMeasures is defined.");
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -1,222 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing mark() method is working properly</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-mark"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script type="text/javascript">
|
||||
// test data
|
||||
var markTestDelay = 200;
|
||||
var testThreshold = 20;
|
||||
var marks;
|
||||
|
||||
var TEST_MARKS =
|
||||
[
|
||||
{
|
||||
name: "mark1",
|
||||
expectedStartTime: undefined,
|
||||
entryMatch: undefined
|
||||
},
|
||||
{
|
||||
name: "mark1",
|
||||
expectedStartTime: undefined,
|
||||
entryMatch: undefined
|
||||
}
|
||||
];
|
||||
|
||||
setup({explicit_done: true});
|
||||
|
||||
test_namespace();
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
// test for existance of User Timing and Performance Timeline interface
|
||||
if (!has_required_interfaces())
|
||||
{
|
||||
test_true(false,
|
||||
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
|
||||
"are defined.");
|
||||
|
||||
done();
|
||||
}
|
||||
else
|
||||
{
|
||||
// create first mark
|
||||
window.performance.mark(TEST_MARKS[0].name);
|
||||
|
||||
// record the time that this mark is created; this should correspond to the mark's startTime
|
||||
TEST_MARKS[0].expectedStartTime = (new Date()) - window.performance.timing.navigationStart;
|
||||
|
||||
// create the duplicate mark using the test delay; the duplicate mark's value should be equivalent to
|
||||
// the loadEventStart navigation timing attribute plus the test delay
|
||||
setTimeout(mark_test_cb, markTestDelay);
|
||||
}
|
||||
}
|
||||
|
||||
function mark_test_cb()
|
||||
{
|
||||
var getByNameScenarios = new Array();
|
||||
|
||||
// create second, duplicate mark
|
||||
window.performance.mark(TEST_MARKS[1].name);
|
||||
|
||||
// record the time that this mark is created; this should correspond to the mark's startTime
|
||||
TEST_MARKS[1].expectedStartTime = (new Date()) - window.performance.timing.navigationStart;
|
||||
|
||||
// test the test marks are returned by getEntriesByName
|
||||
entries = window.performance.getEntriesByName(TEST_MARKS[0].name);
|
||||
test_mark(entries[0],
|
||||
"window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\")[0]",
|
||||
TEST_MARKS[0].name,
|
||||
TEST_MARKS[0].expectedStartTime);
|
||||
TEST_MARKS[0].entryMatch = entries[0];
|
||||
|
||||
test_mark(entries[1],
|
||||
"window.performance.getEntriesByName(\"" + TEST_MARKS[1].name + "\")[1]",
|
||||
TEST_MARKS[1].name,
|
||||
TEST_MARKS[1].expectedStartTime);
|
||||
TEST_MARKS[1].entryMatch = entries[1];
|
||||
|
||||
// test the test marks are returned by getEntriesByName with the entryType parameter provided
|
||||
entries = window.performance.getEntriesByName(TEST_MARKS[0].name, "mark");
|
||||
test_equals(entries[0].name, TEST_MARKS[0].name,
|
||||
"window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\", \"mark\") returns an " +
|
||||
"object containing the \"" + TEST_MARKS[0].name + "\" mark in the correct order");
|
||||
|
||||
test_equals(entries[1].name, TEST_MARKS[1].name,
|
||||
"window.performance.getEntriesByName(\"" + TEST_MARKS[1].name + "\", \"mark\") returns an " +
|
||||
"object containing the duplicate \"" + TEST_MARKS[1].name + "\" mark in the correct order");
|
||||
|
||||
test_true(match_entries(entries[0], TEST_MARKS[0].entryMatch),
|
||||
"The \"" + TEST_MARKS[0].name + "\" mark returned by " +
|
||||
"window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\", \"mark\") matches the " +
|
||||
"the \"" + TEST_MARKS[0].name + "\" mark returned by " +
|
||||
"window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\")");
|
||||
|
||||
test_true(match_entries(entries[1], TEST_MARKS[1].entryMatch),
|
||||
"The duplicate \"" + TEST_MARKS[1].name + "\" mark returned by " +
|
||||
"window.performance.getEntriesByName(\"" + TEST_MARKS[1].name + "\", \"mark\") matches the " +
|
||||
"the duplicate \"" + TEST_MARKS[1].name + "\" mark returned by " +
|
||||
"window.performance.getEntriesByName(\"" + TEST_MARKS[1].name + "\")");
|
||||
|
||||
// test the test marks are returned by getEntries
|
||||
entries = get_test_entries(window.performance.getEntries(), "mark");
|
||||
|
||||
test_equals(entries[0].name, TEST_MARKS[0].name,
|
||||
"window.performance.getEntries() returns an object containing the original \"" +
|
||||
TEST_MARKS[0].name + "\" mark in the correct order");
|
||||
|
||||
test_equals(entries[1].name, TEST_MARKS[1].name,
|
||||
"window.performance.getEntries() returns an object containing the duplicate \"" +
|
||||
TEST_MARKS[1].name + "\" mark in the correct order");
|
||||
|
||||
test_true(match_entries(entries[0], TEST_MARKS[0].entryMatch),
|
||||
"The \"" + TEST_MARKS[0].name + "\" mark returned by " +
|
||||
"window.performance.getEntries() matches the the \"" + TEST_MARKS[0].name + "\" mark returned " +
|
||||
"by window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\")");
|
||||
|
||||
test_true(match_entries(entries[1], TEST_MARKS[1].entryMatch),
|
||||
"The \"" + TEST_MARKS[1].name + "\" mark returned by " +
|
||||
"window.performance.getEntries() matches the the duplicate \"" + TEST_MARKS[1].name + "\" mark " +
|
||||
"returned by window.performance.getEntriesByName(\"" + TEST_MARKS[1].name + "\")");
|
||||
|
||||
// test the test marks are returned by getEntriesByType
|
||||
entries = window.performance.getEntriesByType("mark");
|
||||
|
||||
test_equals(entries[0].name, TEST_MARKS[0].name,
|
||||
"window.performance.getEntriesByType(\"mark\") returns an object containing the original \"" +
|
||||
TEST_MARKS[0].name + "\" mark in the correct order");
|
||||
|
||||
test_equals(entries[1].name, TEST_MARKS[1].name,
|
||||
"window.performance.getEntriesByType(\"mark\") returns an object containing the duplicate \"" +
|
||||
TEST_MARKS[1].name + "\" mark in the correct order");
|
||||
|
||||
test_true(match_entries(entries[0], TEST_MARKS[0].entryMatch),
|
||||
"The \"" + TEST_MARKS[0].name + "\" mark returned by " +
|
||||
"window.performance.getEntriesByType(\"mark\") matches the the \"" + TEST_MARKS[0].name +
|
||||
"\" mark returned by window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\")");
|
||||
|
||||
test_true(match_entries(entries[1], TEST_MARKS[1].entryMatch),
|
||||
"The \"" + TEST_MARKS[1].name + "\" mark returned by " +
|
||||
"window.performance.getEntriesByType(\"mark\") matches the the duplicate \"" +
|
||||
TEST_MARKS[1].name + "\" mark returned by window.performance.getEntriesByName(\"" +
|
||||
TEST_MARKS[1].name + "\")");
|
||||
|
||||
done();
|
||||
}
|
||||
|
||||
function match_entries(entry1, entry2)
|
||||
{
|
||||
var pass = true;
|
||||
|
||||
// match name
|
||||
pass = pass && (entry1.name == entry2.name);
|
||||
|
||||
// match startTime
|
||||
pass = pass && (entry1.startTime == entry2.startTime);
|
||||
|
||||
// match entryType
|
||||
pass = pass && (entry1.entryType == entry2.entryType);
|
||||
|
||||
// match duration
|
||||
pass = pass && (entry1.duration == entry2.duration);
|
||||
|
||||
return pass;
|
||||
}
|
||||
|
||||
function test_mark(markEntry, markEntryCommand, expectedName, expectedStartTime)
|
||||
{
|
||||
// test name
|
||||
test_equals(markEntry.name, expectedName, markEntryCommand + ".name == \"" + expectedName + "\"");
|
||||
|
||||
// test startTime, allow for an acceptable threshold in the difference between the startTime and the
|
||||
// expected value for the startTime (loadEventStart + markTestDelay)
|
||||
test_true(Math.abs(markEntry.startTime - expectedStartTime) <= testThreshold,
|
||||
markEntryCommand + ".startTime is approximately correct (up to " + testThreshold +
|
||||
"ms difference allowed)");
|
||||
|
||||
// verify entryType
|
||||
test_equals(markEntry.entryType, "mark", markEntryCommand + ".entryType == \"mark\"");
|
||||
|
||||
// verify duration
|
||||
test_equals(markEntry.duration, 0, markEntryCommand + ".duration == 0");
|
||||
}
|
||||
|
||||
function get_test_entries(entryList, entryType)
|
||||
{
|
||||
var testEntries = new Array();
|
||||
|
||||
// filter entryList
|
||||
for (var i in entryList)
|
||||
{
|
||||
if (entryList[i].entryType == entryType)
|
||||
{
|
||||
testEntries.push(entryList[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return testEntries;
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="onload_test();">
|
||||
<h1>Description</h1>
|
||||
<p>This test validates that the performance.mark() method is working properly. This test creates the
|
||||
following marks to test this method:
|
||||
<ul>
|
||||
<li>"mark1": created using a normal mark() call</li>
|
||||
<li>"mark1": duplicate of the first mark, used to confirm names can be re-used</li>
|
||||
</ul>
|
||||
After creating each mark, the existence of these marks is validated by calling
|
||||
performance.getEntriesByName() (both with and without the entryType parameter provided),
|
||||
performance.getEntriesByType(), and performance.getEntries()
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,14 +0,0 @@
|
|||
importScripts("/resources/testharness.js");
|
||||
importScripts("resources/webperftestharness.js");
|
||||
|
||||
test(function() {
|
||||
for (var i in timingAttributes) {
|
||||
performance.mark(timingAttributes[i]);
|
||||
performance.clearMarks(timingAttributes[i]);
|
||||
|
||||
performance.measure(timingAttributes[i]);
|
||||
performance.clearMeasures(timingAttributes[i]);
|
||||
}
|
||||
}, "performance.mark and performance.measure should not throw if used with timing attribute values in workers");
|
||||
|
||||
done();
|
|
@ -1,67 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>exception test of performance.mark and performance.measure</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script>
|
||||
setup({explicit_done: true});
|
||||
test_namespace();
|
||||
|
||||
function onload_test() {
|
||||
if (window.performance !== undefined && window.performance.mark !== undefined)
|
||||
{
|
||||
try
|
||||
{
|
||||
window.performance.mark();
|
||||
test_true(false, "window.performance.mark() threw an exception when invoke without a parameter.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true, "window.performance.mark() threw an exception when invoke without a parameter.");
|
||||
|
||||
test_equals(e.name,
|
||||
"TypeError",
|
||||
"window.performance.mark() threw a TYPE_ERR exception when invoke without a parameter.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
test_true(false, "window.performance.mark() interface is not supported!");
|
||||
}
|
||||
|
||||
if (window.performance !== undefined && window.performance.measure !== undefined)
|
||||
{
|
||||
try
|
||||
{
|
||||
window.performance.measure();
|
||||
test_true(false, "window.performance.measure() threw an exception when invoke without a parameter.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true, "window.performance.measure() threw an exception when invoke without a parameter.");
|
||||
|
||||
test_equals(e.name,
|
||||
"TypeError",
|
||||
"window.performance.measure() threw a TYPE_ERR exception when invoke without a parameter.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
test_true(false, "window.performance.measure() interface is not supported!");
|
||||
}
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="onload_test();">
|
||||
<h1>Description</h1>
|
||||
<p>This test validates exception scenarios of invoking mark() and measure() without parameter.</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,75 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing mark() method is throwing the proper exceptions</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-mark"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script type="text/javascript">
|
||||
// test data
|
||||
var markExceptionThrown = false;
|
||||
|
||||
setup({explicit_done: true});
|
||||
|
||||
test_namespace();
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
// test for existance of User Timing and Performance Timeline interface
|
||||
if (!has_required_interfaces())
|
||||
{
|
||||
test_true(false,
|
||||
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
|
||||
"are defined.");
|
||||
|
||||
done();
|
||||
}
|
||||
else
|
||||
{
|
||||
test_mark_exceptions();
|
||||
}
|
||||
}
|
||||
|
||||
function test_mark_exceptions()
|
||||
{
|
||||
// loop through mark scenarios
|
||||
for (var i in timingAttributes)
|
||||
{
|
||||
try
|
||||
{
|
||||
// create the mark
|
||||
window.performance.mark(timingAttributes[i]);
|
||||
|
||||
test_true(false,
|
||||
"window.performance.mark(\"" + timingAttributes[i] + "\") threw an exception.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true,
|
||||
"window.performance.mark(\"" + timingAttributes[i] + "\") threw an exception.");
|
||||
|
||||
// confirm that a SYNTAX_ERR exception is thrown and not any other exception
|
||||
test_equals(e.code,
|
||||
e.SYNTAX_ERR,
|
||||
"window.performance.mark(\"" + timingAttributes[i] + "\") threw a SYNTAX_ERR " +
|
||||
"exception.");
|
||||
}
|
||||
}
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="onload_test();">
|
||||
<h1>Description</h1>
|
||||
<p>This test validates that the performance.mark() method throws a SYNTAX_ERR exception whenever a navigation
|
||||
timing attribute is provided for the name parameter.
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,55 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>exception test of performance.mark</title>
|
||||
<link rel="author" title="Intel" href="http://www.intel.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
<script src="resources/webperftestharnessextension.js"></script>
|
||||
<script>
|
||||
setup({explicit_done: true});
|
||||
test_namespace();
|
||||
|
||||
function onload_test() {
|
||||
if (window.performance !== undefined && window.performance.timing !== undefined && window.performance.timing.secureConnectionStart !== undefined)
|
||||
{
|
||||
var context = new PerformanceContext(window.performance);
|
||||
var optionalAttribute = "secureConnectionStart";
|
||||
try
|
||||
{
|
||||
context.mark(optionalAttribute);
|
||||
test_true(false,
|
||||
"window.performance.mark(\"" + optionalAttribute + "\") threw an exception when " +
|
||||
optinalAttribute + " attribute of Navigation Timing is supported.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true,
|
||||
"window.performance.mark(\"" + optionalAttribute + "\") threw an exception when " +
|
||||
optionalAttribute + " attribute of Navigation Timing is supported.");
|
||||
|
||||
// confirm that a SYNTAX_ERR exception is thrown and not any other exception
|
||||
test_equals(e.code,
|
||||
e.SYNTAX_ERR,
|
||||
"window.performance.mark(\"" + optionalAttribute + "\") threw a SYNTAX_ERR when " +
|
||||
optionalAttribute + " attribute of Navigation Timing is supported.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
test_true(true,
|
||||
"This test is ignored when secureConnectionStart attribute of Navigation Timing is not supported.");
|
||||
}
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload=onload_test()>
|
||||
<h1>Description</h1>
|
||||
<p>This test validates exception scenarios of invoking performance.mark() with param of "secureConnectionStart".</p>
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -1,252 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>window.performance User Timing measure() method is throwing the proper exceptions</title>
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-measure"/>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
<script src="resources/webperftestharness.js"></script>
|
||||
|
||||
<script type="text/javascript">
|
||||
// test data
|
||||
var zeroedNavTimingAtt = undefined;
|
||||
|
||||
setup({explicit_done: true});
|
||||
|
||||
test_namespace();
|
||||
|
||||
function onload_test()
|
||||
{
|
||||
// test for existance of User Timing and Performance Timeline interface
|
||||
if (!has_required_interfaces())
|
||||
{
|
||||
test_true(false,
|
||||
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
|
||||
"are defined.");
|
||||
|
||||
done();
|
||||
}
|
||||
else
|
||||
{
|
||||
test_measure_exceptions();
|
||||
}
|
||||
}
|
||||
|
||||
function test_measure_exceptions()
|
||||
{
|
||||
// test scenarios for the SYNTAX_ERR exception
|
||||
try
|
||||
{
|
||||
// create the measure
|
||||
window.performance.measure("measure", "mark");
|
||||
|
||||
test_true(false,
|
||||
"window.performance.measure(\"measure\", \"mark\"), where \"mark\" is a non-existent mark, " +
|
||||
"threw an exception.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true,
|
||||
"window.performance.measure(\"measure\", \"mark\"), where \"mark\" is a non-existent mark, " +
|
||||
" threw an exception.");
|
||||
|
||||
test_equals(e.code,
|
||||
e.SYNTAX_ERR,
|
||||
"window.performance.measure(\"measure\", \"mark\"), where \"mark\" is a non-existent " +
|
||||
"mark, threw a SYNTAX_ERR exception.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// create the measure
|
||||
window.performance.measure("measure", "mark", "responseEnd");
|
||||
|
||||
test_true(false,
|
||||
"window.performance.measure(\"measure\", \"mark\", \"responseEnd\"), where \"mark\" is a " +
|
||||
"non-existent mark, threw an exception.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true,
|
||||
"window.performance.measure(\"measure\", \"mark\", \"responseEnd\"), where \"mark\" is a " +
|
||||
"non-existent mark, threw an exception.");
|
||||
|
||||
test_equals(e.code,
|
||||
e.SYNTAX_ERR,
|
||||
"window.performance.measure(\"measure\", \"mark\", \"responseEnd\"), where \"mark\" is a " +
|
||||
"non-existent mark, threw a SYNTAX_ERR exception.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// create the measure
|
||||
window.performance.measure("measure", "navigationStart", "mark");
|
||||
|
||||
test_true(false,
|
||||
"window.performance.measure(\"measure\", \"navigationStart\", \"mark\"), where \"mark\" is " +
|
||||
"a non-existent mark, threw an exception.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true,
|
||||
"window.performance.measure(\"measure\", \"navigationStart\", \"mark\"), where \"mark\" is " +
|
||||
"a non-existent mark, threw an exception.");
|
||||
|
||||
test_equals(e.code,
|
||||
e.SYNTAX_ERR,
|
||||
"window.performance.measure(\"measure\", \"navigationStart\", \"mark\"), where \"mark\" " +
|
||||
"is a non-existent mark, threw a SYNTAX_ERR exception.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// create the measure
|
||||
window.performance.measure("measure", "mark", "mark");
|
||||
|
||||
test_true(false,
|
||||
"window.performance.measure(\"measure\", \"mark\", \"mark\"), where \"mark\" is a " +
|
||||
"non-existent mark, threw an exception.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true,
|
||||
"window.performance.measure(\"measure\", \"mark\", \"mark\"), where \"mark\" is a " +
|
||||
"non-existent mark, threw an exception.");
|
||||
|
||||
test_equals(e.code,
|
||||
e.SYNTAX_ERR,
|
||||
"window.performance.measure(\"measure\", \"mark\", \"mark\"), where \"mark\" is a " +
|
||||
"non-existent mark, threw a SYNTAX_ERR exception.");
|
||||
}
|
||||
|
||||
|
||||
// for testing the INVALID_ACCESS_ERR exception, find a navigation timing attribute with a value of zero
|
||||
for (var i in timingAttributes)
|
||||
{
|
||||
if (window.performance.timing[timingAttributes[i]] == 0)
|
||||
{
|
||||
zeroedNavTimingAtt = timingAttributes[i];
|
||||
}
|
||||
}
|
||||
|
||||
if (zeroedNavTimingAtt == undefined)
|
||||
{
|
||||
test_true(false,
|
||||
"A navigation timing attribute with a value of 0 was not found to test for the " +
|
||||
"INVALID_ACCESS_ERR exception thrown by window.performance.measure().");
|
||||
}
|
||||
else
|
||||
{
|
||||
try
|
||||
{
|
||||
// create the measure
|
||||
window.performance.measure("measure", zeroedNavTimingAtt);
|
||||
|
||||
test_true(false,
|
||||
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\"), where \"" +
|
||||
zeroedNavTimingAtt + "\" is a navigation timing attribute with a value of 0, threw an " +
|
||||
"exception.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true,
|
||||
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\"), where \"" +
|
||||
zeroedNavTimingAtt + "\" is a navigation timing attribute with a value of 0, threw an " +
|
||||
"exception.");
|
||||
|
||||
test_equals(e.code,
|
||||
e.INVALID_ACCESS_ERR,
|
||||
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\"), where \"" +
|
||||
zeroedNavTimingAtt + "\" is a navigation timing attribute with a value of 0, threw " +
|
||||
"an INVALID_ACCESS_ERR exception.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// create the measure
|
||||
window.performance.measure("measure", zeroedNavTimingAtt, "responseEnd");
|
||||
|
||||
test_true(false,
|
||||
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", " +
|
||||
"\"responseEnd\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
|
||||
"attribute with a value of 0, threw an exception.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true,
|
||||
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", " +
|
||||
"\"responseEnd\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
|
||||
"attribute with a value of 0, threw an exception.");
|
||||
|
||||
test_equals(e.code,
|
||||
e.INVALID_ACCESS_ERR,
|
||||
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", " +
|
||||
"\"responseEnd\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
|
||||
"attribute with a value of 0, threw an INVALID_ACCESS_ERR exception.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// create the measure
|
||||
window.performance.measure("measure", "navigationStart", zeroedNavTimingAtt);
|
||||
|
||||
test_true(false,
|
||||
"window.performance.measure(\"measure\", \"navigationStart\", \"" + zeroedNavTimingAtt +
|
||||
"\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing attribute with a " +
|
||||
"value of 0, threw an exception.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true,
|
||||
"window.performance.measure(\"measure\", \"navigationStart\", \"" + zeroedNavTimingAtt +
|
||||
"\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing attribute with a " +
|
||||
"value of 0, threw an exception.");
|
||||
|
||||
test_equals(e.code,
|
||||
e.INVALID_ACCESS_ERR,
|
||||
"window.performance.measure(\"measure\", \"navigationStart\", \"" + zeroedNavTimingAtt +
|
||||
"\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing attribute with a " +
|
||||
"value of 0, threw an INVALID_ACCESS_ERR exception.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// create the measure
|
||||
window.performance.measure("measure", zeroedNavTimingAtt, zeroedNavTimingAtt);
|
||||
|
||||
test_true(false,
|
||||
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", \"" +
|
||||
zeroedNavTimingAtt + "\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
|
||||
"attribute with a value of 0, threw an exception.");
|
||||
}
|
||||
catch(e)
|
||||
{
|
||||
test_true(true,
|
||||
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", \"" +
|
||||
zeroedNavTimingAtt + "\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
|
||||
"attribute with a value of 0, threw an exception.");
|
||||
|
||||
test_equals(e.code,
|
||||
e.INVALID_ACCESS_ERR,
|
||||
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", \"" +
|
||||
zeroedNavTimingAtt + "\"), where \"" + zeroedNavTimingAtt + "\" is a navigation " +
|
||||
"timing attribute with a value of 0, threw an INVALID_ACCESS_ERR exception.");
|
||||
}
|
||||
}
|
||||
|
||||
done();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body onload="onload_test();">
|
||||
<h1>Description</h1>
|
||||
<p>This test validates that the performance.measure() method throws a SYNTAX_ERR exception whenever a
|
||||
non-existent mark is provided as the startMark or endMark, and the method also throws a INVALID_ACCESS_ERR
|
||||
whenever a navigation timing attribute with a value of zero is provided as the startMark or endMark.
|
||||
</p>
|
||||
|
||||
<div id="log"></div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,12 @@
|
|||
test(function() {
|
||||
assert_not_equals(self.performance.mark, undefined);
|
||||
}, "self.performance.mark is defined.");
|
||||
test(function() {
|
||||
assert_not_equals(self.performance.clearMarks, undefined);
|
||||
}, "self.performance.clearMarks is defined.");
|
||||
test(function() {
|
||||
assert_not_equals(self.performance.measure, undefined);
|
||||
}, "self.performance.measure is defined.");
|
||||
test(function() {
|
||||
assert_not_equals(self.performance.clearMeasures, undefined);
|
||||
}, "self.performance.clearMeasures is defined.");
|
Loading…
Add table
Add a link
Reference in a new issue