Update web-platform-tests to revision 0d318188757a9c996e20b82db201fd04de5aa255

This commit is contained in:
James Graham 2015-03-27 09:15:38 +00:00
parent b2a5225831
commit 1a81b18b9f
12321 changed files with 544385 additions and 6 deletions

View file

@ -0,0 +1,59 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>User Timing IDL tests</title>
<link rel="author" title="W3C" href="http://www.w3.org/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
<link rel="help" href="http://www.w3.org/TR/user-timing/#performancemark"/>
<link rel="help" href="http://www.w3.org/TR/user-timing/#performancemeasure"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/WebIDLParser.js"></script>
<script src="/resources/idlharness.js"></script>
</head>
<body>
<h1>User Timing IDL tests</h1>
<div id="log"></div>
<pre id='untested_idl' style='display:none'>
interface Performance {
};
interface PerformanceEntry {
};
</pre>
<pre id='idl'>
partial interface Performance {
void mark(DOMString markName);
void clearMarks(optional DOMString markName);
void measure(DOMString measureName, optional DOMString startMark, optional DOMString endMark);
void clearMeasures(optional DOMString measureName);
};
interface PerformanceMark : PerformanceEntry {
};
interface PerformanceMeasure : PerformanceEntry {
};
</pre>
<script>
(function() {
var idl_array = new IdlArray();
idl_array.add_untested_idls(document.getElementById("untested_idl").textContent);
idl_array.add_idls(document.getElementById("idl").textContent);
idl_array.add_objects({Performance: ["window.performance"]});
idl_array.test();
})();
</script>
</body>
</html>

View file

@ -0,0 +1,155 @@
/*
Distributed under both the W3C Test Suite License [1] and the W3C
3-clause BSD License [2]. To contribute to a W3C Test Suite, see the
policies and contribution forms [3].
[1] http://www.w3.org/Consortium/Legal/2008/04-testsuite-license
[2] http://www.w3.org/Consortium/Legal/2008/03-bsd-license
[3] http://www.w3.org/2004/10/27-testcases
*/
//
// Helper Functions for NavigationTiming W3C tests
//
var performanceNamespace = window.performance;
var timingAttributes = [
'connectEnd',
'connectStart',
'domComplete',
'domContentLoadedEventEnd',
'domContentLoadedEventStart',
'domInteractive',
'domLoading',
'domainLookupEnd',
'domainLookupStart',
'fetchStart',
'loadEventEnd',
'loadEventStart',
'navigationStart',
'redirectEnd',
'redirectStart',
'requestStart',
'responseEnd',
'responseStart',
'unloadEventEnd',
'unloadEventStart'
];
var namespace_check = false;
//
// All test() functions in the WebPerf test suite should use wp_test() instead.
//
// wp_test() validates the window.performance namespace exists prior to running tests and
// immediately shows a single failure if it does not.
//
function wp_test(func, msg, properties)
{
// only run the namespace check once
if (!namespace_check)
{
namespace_check = true;
if (performanceNamespace === undefined || performanceNamespace == null)
{
// show a single error that window.performance is undefined
test(function() { assert_true(performanceNamespace !== undefined && performanceNamespace != null, "window.performance is defined and not null"); }, "window.performance is defined and not null.", {author:"W3C http://www.w3.org/",help:"http://www.w3.org/TR/navigation-timing/#sec-window.performance-attribute",assert:"The window.performance attribute provides a hosting area for performance related attributes. "});
}
}
test(func, msg, properties);
}
function test_namespace(child_name, skip_root)
{
if (skip_root === undefined) {
var msg = 'window.performance is defined';
wp_test(function () { assert_true(performanceNamespace !== undefined, msg); }, msg,{author:"W3C http://www.w3.org/",help:"http://www.w3.org/TR/navigation-timing/#sec-window.performance-attribute",assert:"The window.performance attribute provides a hosting area for performance related attributes. "});
}
if (child_name !== undefined) {
var msg2 = 'window.performance.' + child_name + ' is defined';
wp_test(function() { assert_true(performanceNamespace[child_name] !== undefined, msg2); }, msg2,{author:"W3C http://www.w3.org/",help:"http://www.w3.org/TR/navigation-timing/#sec-window.performance-attribute",assert:"The window.performance attribute provides a hosting area for performance related attributes. "});
}
}
function test_attribute_exists(parent_name, attribute_name, properties)
{
var msg = 'window.performance.' + parent_name + '.' + attribute_name + ' is defined.';
wp_test(function() { assert_true(performanceNamespace[parent_name][attribute_name] !== undefined, msg); }, msg, properties);
}
function test_enum(parent_name, enum_name, value, properties)
{
var msg = 'window.performance.' + parent_name + '.' + enum_name + ' is defined.';
wp_test(function() { assert_true(performanceNamespace[parent_name][enum_name] !== undefined, msg); }, msg, properties);
msg = 'window.performance.' + parent_name + '.' + enum_name + ' = ' + value;
wp_test(function() { assert_equals(performanceNamespace[parent_name][enum_name], value, msg); }, msg, properties);
}
function test_timing_order(attribute_name, greater_than_attribute, properties)
{
// ensure it's not 0 first
var msg = "window.performance.timing." + attribute_name + " > 0";
wp_test(function() { assert_true(performanceNamespace.timing[attribute_name] > 0, msg); }, msg, properties);
// ensure it's in the right order
msg = "window.performance.timing." + attribute_name + " >= window.performance.timing." + greater_than_attribute;
wp_test(function() { assert_true(performanceNamespace.timing[attribute_name] >= performanceNamespace.timing[greater_than_attribute], msg); }, msg, properties);
}
function test_timing_greater_than(attribute_name, greater_than, properties)
{
var msg = "window.performance.timing." + attribute_name + " > " + greater_than;
test_greater_than(performanceNamespace.timing[attribute_name], greater_than, msg, properties);
}
function test_timing_equals(attribute_name, equals, msg, properties)
{
var test_msg = msg || "window.performance.timing." + attribute_name + " == " + equals;
test_equals(performanceNamespace.timing[attribute_name], equals, test_msg, properties);
}
//
// Non-test related helper functions
//
function sleep_milliseconds(n)
{
var start = new Date().getTime();
while (true) {
if ((new Date().getTime() - start) >= n) break;
}
}
//
// Common helper functions
//
function test_true(value, msg, properties)
{
wp_test(function () { assert_true(value, msg); }, msg, properties);
}
function test_equals(value, equals, msg, properties)
{
wp_test(function () { assert_equals(value, equals, msg); }, msg, properties);
}
function test_greater_than(value, greater_than, msg, properties)
{
wp_test(function () { assert_true(value > greater_than, msg); }, msg, properties);
}
function test_greater_or_equals(value, greater_than, msg, properties)
{
wp_test(function () { assert_true(value >= greater_than, msg); }, msg, properties);
}
function test_not_equals(value, notequals, msg, properties)
{
wp_test(function() { assert_true(value !== notequals, msg); }, msg, properties);
}

View file

@ -0,0 +1,199 @@
/*
Distributed under both the W3C Test Suite License [1] and the W3C
3-clause BSD License [2]. To contribute to a W3C Test Suite, see the
policies and contribution forms [3].
[1] http://www.w3.org/Consortium/Legal/2008/04-testsuite-license
[2] http://www.w3.org/Consortium/Legal/2008/03-bsd-license
[3] http://www.w3.org/2004/10/27-testcases
*/
var mark_names = [
'',
'1',
'abc',
];
var measures = [
[''],
['2', 1],
['aaa', 'navigationStart', ''],
];
function test_method_exists(method, method_name, properties)
{
var msg;
if (typeof method === 'function')
msg = 'performance.' + method.name + ' is supported!';
else
msg = 'performance.' + method_name + ' is supported!';
wp_test(function() { assert_true(typeof method === 'function', msg); }, msg, properties);
}
function test_method_throw_exception(func_str, exception, msg)
{
var exception_name = typeof exception === "object" ? exception.name : exception;
var msg = 'Invocation of ' + func_str + ' should throw ' + exception_name + ' Exception.';
wp_test(function() { assert_throws(exception, function() {eval(func_str)}, msg); }, msg);
}
function test_noless_than(value, greater_than, msg, properties)
{
wp_test(function () { assert_true(value >= greater_than, msg); }, msg, properties);
}
function test_fail(msg, properties)
{
wp_test(function() { assert_unreached(); }, msg, properties);
}
function test_resource_entries(entries, expected_entries)
{
// This is slightly convoluted so that we can sort the output.
var actual_entries = {};
var origin = window.location.protocol + "//" + window.location.host;
for (var i = 0; i < entries.length; ++i) {
var entry = entries[i];
var found = false;
for (var expected_entry in expected_entries) {
if (entry.name == origin + expected_entry) {
found = true;
if (expected_entry in actual_entries) {
test_fail(expected_entry + ' is not expected to have duplicate entries');
}
actual_entries[expected_entry] = entry;
break;
}
}
if (!found) {
test_fail(entries[i].name + ' is not expected to be in the Resource Timing buffer');
}
}
sorted_urls = [];
for (var i in actual_entries) {
sorted_urls.push(i);
}
sorted_urls.sort();
for (var i in sorted_urls) {
var url = sorted_urls[i];
test_equals(actual_entries[url].initiatorType,
expected_entries[url],
origin + url + ' is expected to have initiatorType ' + expected_entries[url]);
}
for (var j in expected_entries) {
if (!(j in actual_entries)) {
test_fail(origin + j + ' is expected to be in the Resource Timing buffer');
}
}
}
function performance_entrylist_checker(type)
{
var entryType = type;
function entry_check(entry, expectedNames)
{
var msg = 'Entry \"' + entry.name + '\" should be one that we have set.';
wp_test(function() { assert_in_array(entry.name, expectedNames, msg); }, msg);
test_equals(entry.entryType, entryType, 'entryType should be \"' + entryType + '\".');
if (type === "measure") {
test_true(isFinite(entry.startTime), 'startTime should be a number.');
test_true(isFinite(entry.duration), 'duration should be a number.');
} else if (type === "mark") {
test_greater_than(entry.startTime, 0, 'startTime should greater than 0.');
test_equals(entry.duration, 0, 'duration of mark should be 0.');
}
}
function entrylist_order_check(entryList)
{
var inOrder = true;
for (var i = 0; i < entryList.length - 1; ++i)
{
if (entryList[i + 1].startTime < entryList[i].startTime) {
inOrder = false;
break;
}
}
return inOrder;
}
function entrylist_check(entryList, expectedLength, expectedNames)
{
test_equals(entryList.length, expectedLength, 'There should be ' + expectedLength + ' entries.');
test_true(entrylist_order_check(entryList), 'Entries in entrylist should be in order.');
for (var i = 0; i < entryList.length; ++i)
{
entry_check(entryList[i], expectedNames);
}
}
return{"entrylist_check":entrylist_check};
}
function PerformanceContext(context)
{
this.performanceContext = context;
}
PerformanceContext.prototype =
{
initialMeasures: function(item, index, array)
{
this.performanceContext.measure.apply(this.performanceContext, item);
},
mark: function()
{
this.performanceContext.mark.apply(this.performanceContext, arguments);
},
measure: function()
{
this.performanceContext.measure.apply(this.performanceContext, arguments);
},
clearMarks: function()
{
this.performanceContext.clearMarks.apply(this.performanceContext, arguments);
},
clearMeasures: function()
{
this.performanceContext.clearMeasures.apply(this.performanceContext, arguments);
},
getEntries: function()
{
return this.performanceContext.getEntries.apply(this.performanceContext, arguments);
},
getEntriesByType: function()
{
return this.performanceContext.getEntriesByType.apply(this.performanceContext, arguments);
},
getEntriesByName: function()
{
return this.performanceContext.getEntriesByName.apply(this.performanceContext, arguments);
},
setResourceTimingBufferSize: function()
{
return this.performanceContext.setResourceTimingBufferSize.apply(this.performanceContext, arguments);
},
registerResourceTimingBufferFullCallback: function(func)
{
this.performanceContext.onresourcetimingbufferfull = func;
},
clearResourceTimings: function()
{
this.performanceContext.clearResourceTimings.apply(this.performanceContext, arguments);
}
};

View file

@ -0,0 +1,134 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>window.performance User Timing clearMarks() method is working properly</title>
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-clearmarks"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script type="text/javascript">
// test marks
var markName1 = "mark1";
var markName2 = "mark2";
var markName3 = "markUndefined";
var markTestDelay = 200;
var entries;
var pass;
setup({explicit_done: true});
test_namespace();
function onload_test()
{
// test for existance of User Timing and Performance Timeline interface
if (window.performance.mark == undefined ||
window.performance.clearMarks == undefined ||
window.performance.measure == undefined ||
window.performance.clearMeasures == undefined ||
window.performance.getEntriesByName == undefined ||
window.performance.getEntriesByType == undefined ||
window.performance.getEntries == undefined)
{
test_true(false,
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
"are defined.");
done();
}
else
{
// create a mark using the test delay; the mark's value should be equivalent to the loadEventStart
// navigation timing attribute plus the test delay
setTimeout(mark_test_cb, markTestDelay);
}
}
function mark_test_cb()
{
// create the test marks; only create "mark1" and "mark2", "markUndefined" is a non-existent mark
window.performance.mark(markName1);
window.performance.mark(markName2);
// test that two marks have been created
entries = window.performance.getEntriesByType("mark");
test_equals(entries.length, 2, "Two marks have been created for this test.");
// clear non-existent mark
window.performance.clearMarks(markName3);
// test that "mark1" still exists
entries = window.performance.getEntriesByName(markName1);
test_true(entries[0].name == markName1,
"After a call to window.performance.clearMarks(\"" + markName3 + "\"), where \"" + markName3 +
"\" is a non-existent mark, window.performance.getEntriesByName(\"" + markName1 + "\") " +
"returns an object containing the \"" + markName1 + "\" mark.");
// test that "mark2" still exists
entries = window.performance.getEntriesByName(markName2);
test_true(entries[0].name == markName2,
"After a call to window.performance.clearMarks(\"" + markName3 + "\"), where \"" + markName3 +
"\" is a non-existent mark, window.performance.getEntriesByName(\"" + markName2 + "\") " +
"returns an object containing the \"" + markName2 + "\" mark.");
// clear existent mark
window.performance.clearMarks(markName1);
// test that "mark1" was cleared
entries = window.performance.getEntriesByName(markName1);
pass = true;
for (var i in entries)
{
pass = false;
}
test_true(pass,
"After a call to window.performance.clearMarks(\"" + markName1 + "\"), " +
"window.performance.getEntriesByName(\"" + markName1 + "\") returns an empty object.");
// test that "mark2" still exists
entries = window.performance.getEntriesByName(markName2);
test_true(entries[0].name == markName2,
"After a call to window.performance.clearMarks(\"" + markName1 + "\"), " +
"window.performance.getEntriesByName(\"" + markName2 + "\") returns an object containing the " +
"\"" + markName2 + "\" mark.");
// clear all marks
window.performance.clearMarks();
// test that all marks were cleared
entries = window.performance.getEntriesByType("mark");
pass = true;
for (var i in entries)
{
pass = false;
}
test_true(pass,
"After a call to window.performance.clearMarks(), " +
"window.performance.getEntriesByType(\"mark\") returns an empty object.");
done();
}
</script>
</head>
<body onload="onload_test();">
<h1>Description</h1>
<p>This test validates that the performance.clearMarks() method is working properly. This test creates the
following marks to test this method:
<ul>
<li>"mark1"</li>
<li>"mark2"</li>
</ul>
After creating each mark, performance.clearMarks() is called three times. First, it is provided with a name
of "markUndefined", a non-existent mark, which shouldn't change the state of the Performance Timeline. Next,
it is provided with a name of "mark2", after which, this mark should no longer be present in the Performance
Timeline. Finally, performance.clearMarks() is called without any name provided. After this call, no marks
should be present in the Performance Timeline. The state of the Performance Timeline is tested with the
performance.getEntriesByType() and performance.getEntries() methods.
</p>
<div id="log"></div>
</body>
</html>

View file

@ -0,0 +1,136 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>window.performance User Timing clearMeasures() method is working properly</title>
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-clearmeasures"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script type="text/javascript">
// test measures
var measureName1 = "measure1";
var measureName2 = "measure2";
var measureName3 = "measureUndefined";
var measureTestDelay = 200;
var measureEntryNames;
var entries;
setup({explicit_done: true});
test_namespace();
function onload_test()
{
// test for existance of User Timing and Performance Timeline interface
if (window.performance.mark == undefined ||
window.performance.clearMarks == undefined ||
window.performance.measure == undefined ||
window.performance.clearMeasures == undefined ||
window.performance.getEntriesByName == undefined ||
window.performance.getEntriesByType == undefined ||
window.performance.getEntries == undefined)
{
test_true(false,
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
"are defined.");
done();
}
else
{
// create measures using the test delay
setTimeout(measure_test_cb, measureTestDelay);
}
}
function measure_test_cb()
{
// create the test measures; only create "measure1" and "measure2", "measureUndefined" is a non-existent
// measure; give "measure1" a startMark of "navigationStart" and "measure2" a startMark of
// "responseEnd", this way, "measure1" always come first in a PerformanceEntryList returned from a
// Performance Timeline accessor
window.performance.measure(measureName1, "navigationStart");
window.performance.measure(measureName2, "responseEnd");
// test that two measures have been created
entries = window.performance.getEntriesByType("measure");
test_equals(entries.length, 2, "Two measures have been created for this test.");
// clear non-existent measure
window.performance.clearMeasures(measureName3);
// test that "measure1" still exists
entries = window.performance.getEntriesByName(measureName1);
test_true(entries[0].name == measureName1,
"After a call to window.performance.clearMeasures(\"" + measureName3 + "\"), where \"" + measureName3 +
"\" is a non-existent measure, window.performance.getEntriesByName(\"" + measureName1 + "\") " +
"returns an object containing the \"" + measureName1 + "\" measure.");
// test that "measure2" still exists
entries = window.performance.getEntriesByName(measureName2);
test_true(entries[0].name == measureName2,
"After a call to window.performance.clearMeasures(\"" + measureName3 + "\"), where \"" + measureName3 +
"\" is a non-existent measure, window.performance.getEntriesByName(\"" + measureName2 + "\") " +
"returns an object containing the \"" + measureName2 + "\" measure.");
// clear existent measure
window.performance.clearMeasures(measureName1);
// test that "measure1" was cleared
entries = window.performance.getEntriesByName(measureName1);
pass = true;
for (var i in entries)
{
pass = false;
}
test_true(pass,
"After a call to window.performance.clearMeasures(\"" + measureName1 + "\"), " +
"window.performance.getEntriesByName(\"" + measureName1 + "\") returns an empty object.");
// test that "measure2" still exists
entries = window.performance.getEntriesByName(measureName2);
test_true(entries[0].name == measureName2,
"After a call to window.performance.clearMeasures(\"" + measureName1 + "\"), " +
"window.performance.getEntriesByName(\"" + measureName2 + "\") returns an object containing the " +
"\"" + measureName2 + "\" measure.");
// clear all measures
window.performance.clearMeasures();
// test that all measures were cleared
entries = window.performance.getEntriesByType("measure");
pass = true;
for (var i in entries)
{
pass = false;
}
test_true(pass,
"After a call to window.performance.clearMeasures(), " +
"window.performance.getEntriesByType(\"measure\") returns an empty object.");
done();
}
</script>
</head>
<body onload="onload_test();">
<h1>Description</h1>
<p>This test validates that the performance.clearMeasures() method is working properly. This test creates the
following measures to test this method:
<ul>
<li>"measure1"</li>
<li>"measure2"</li>
</ul>
After creating each measure, performance.clearMeasures() is called three times. First, it is provided with a
name of "measureUndefined", a non-existent measure, which shouldn't change the state of the Performance
Timeline. Next, it is provided with a name of "measure2", after which, this measure should no longer be
present in the Performance Timeline. Finally, performance.clearMeasures() is called without any name
provided. After this call, no measures should be present in the Performance Timeline. The state of the
Performance Timeline is tested with the performance.getEntriesByType() and performance.getEntries() methods.
</p>
<div id="log"></div>
</body>
</html>

View file

@ -0,0 +1,29 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>user timing entry type</title>
<link rel="author" title="Intel" href="http://www.intel.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script src="resources/webperftestharnessextension.js"></script>
</head>
<body>
<h1>Description</h1>
<p>This test validates the user timing entry type, PerformanceMark and PerformanceMeasure.</p>
<div id="log"></div>
<script>
var context = new PerformanceContext(window.performance);
context.mark('mark');
context.measure('measure');
var mark_entry = context.getEntriesByName('mark')[0];
var measure_entry = context.getEntriesByName('measure')[0];
test_equals(Object.prototype.toString.call(mark_entry), '[object PerformanceMark]', 'Class name of mark entry should be PerformanceMark.');
test_equals(Object.prototype.toString.call(measure_entry), '[object PerformanceMeasure]', 'Class name of measure entry should be PerformanceMeasure.');
</script>
</body>
</html>

View file

@ -0,0 +1,28 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>window.performance User Timing exists</title>
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
</head>
<body>
<h1>Description</h1>
<p>This test validates that all of the methods used to interact with the User Timing API are defined.</p>
<div id="log"></div>
<script>
test_namespace();
test_true(window.performance.mark !== undefined, "window.performance.mark is defined.");
test_true(window.performance.clearMarks !== undefined, "window.performance.clearMarks is defined.");
test_true(window.performance.measure !== undefined, "window.performance.measure is defined.");
test_true(window.performance.clearMeasures !== undefined, "window.performance.clearMeasures is defined.");
</script>
</body>
</html>

View file

@ -0,0 +1,228 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>window.performance User Timing mark() method is working properly</title>
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-mark"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script type="text/javascript">
// test data
var markTestDelay = 200;
var testThreshold = 20;
var marks;
var TEST_MARKS =
[
{
name: "mark1",
expectedStartTime: undefined,
entryMatch: undefined
},
{
name: "mark1",
expectedStartTime: undefined,
entryMatch: undefined
}
];
setup({explicit_done: true});
test_namespace();
function onload_test()
{
// test for existance of User Timing and Performance Timeline interface
if (window.performance.mark == undefined ||
window.performance.clearMarks == undefined ||
window.performance.measure == undefined ||
window.performance.clearMeasures == undefined ||
window.performance.getEntriesByName == undefined ||
window.performance.getEntriesByType == undefined ||
window.performance.getEntries == undefined)
{
test_true(false,
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
"are defined.");
done();
}
else
{
// create first mark
window.performance.mark(TEST_MARKS[0].name);
// record the time that this mark is created; this should correspond to the mark's startTime
TEST_MARKS[0].expectedStartTime = (new Date()) - window.performance.timing.navigationStart;
// create the duplicate mark using the test delay; the duplicate mark's value should be equivalent to
// the loadEventStart navigation timing attribute plus the test delay
setTimeout(mark_test_cb, markTestDelay);
}
}
function mark_test_cb()
{
var getByNameScenarios = new Array();
// create second, duplicate mark
window.performance.mark(TEST_MARKS[1].name);
// record the time that this mark is created; this should correspond to the mark's startTime
TEST_MARKS[1].expectedStartTime = (new Date()) - window.performance.timing.navigationStart;
// test the test marks are returned by getEntriesByName
entries = window.performance.getEntriesByName(TEST_MARKS[0].name);
test_mark(entries[0],
"window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\")[0]",
TEST_MARKS[0].name,
TEST_MARKS[0].expectedStartTime);
TEST_MARKS[0].entryMatch = entries[0];
test_mark(entries[1],
"window.performance.getEntriesByName(\"" + TEST_MARKS[1].name + "\")[1]",
TEST_MARKS[1].name,
TEST_MARKS[1].expectedStartTime);
TEST_MARKS[1].entryMatch = entries[1];
// test the test marks are returned by getEntriesByName with the entryType parameter provided
entries = window.performance.getEntriesByName(TEST_MARKS[0].name, "mark");
test_equals(entries[0].name, TEST_MARKS[0].name,
"window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\", \"mark\") returns an " +
"object containing the \"" + TEST_MARKS[0].name + "\" mark in the correct order");
test_equals(entries[1].name, TEST_MARKS[1].name,
"window.performance.getEntriesByName(\"" + TEST_MARKS[1].name + "\", \"mark\") returns an " +
"object containing the duplicate \"" + TEST_MARKS[1].name + "\" mark in the correct order");
test_true(match_entries(entries[0], TEST_MARKS[0].entryMatch),
"The \"" + TEST_MARKS[0].name + "\" mark returned by " +
"window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\", \"mark\") matches the " +
"the \"" + TEST_MARKS[0].name + "\" mark returned by " +
"window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\")");
test_true(match_entries(entries[1], TEST_MARKS[1].entryMatch),
"The duplicate \"" + TEST_MARKS[1].name + "\" mark returned by " +
"window.performance.getEntriesByName(\"" + TEST_MARKS[1].name + "\", \"mark\") matches the " +
"the duplicate \"" + TEST_MARKS[1].name + "\" mark returned by " +
"window.performance.getEntriesByName(\"" + TEST_MARKS[1].name + "\")");
// test the test marks are returned by getEntries
entries = get_test_entries(window.performance.getEntries(), "mark");
test_equals(entries[0].name, TEST_MARKS[0].name,
"window.performance.getEntries() returns an object containing the original \"" +
TEST_MARKS[0].name + "\" mark in the correct order");
test_equals(entries[1].name, TEST_MARKS[1].name,
"window.performance.getEntries() returns an object containing the duplicate \"" +
TEST_MARKS[1].name + "\" mark in the correct order");
test_true(match_entries(entries[0], TEST_MARKS[0].entryMatch),
"The \"" + TEST_MARKS[0].name + "\" mark returned by " +
"window.performance.getEntries() matches the the \"" + TEST_MARKS[0].name + "\" mark returned " +
"by window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\")");
test_true(match_entries(entries[1], TEST_MARKS[1].entryMatch),
"The \"" + TEST_MARKS[1].name + "\" mark returned by " +
"window.performance.getEntries() matches the the duplicate \"" + TEST_MARKS[1].name + "\" mark " +
"returned by window.performance.getEntriesByName(\"" + TEST_MARKS[1].name + "\")");
// test the test marks are returned by getEntriesByType
entries = window.performance.getEntriesByType("mark");
test_equals(entries[0].name, TEST_MARKS[0].name,
"window.performance.getEntriesByType(\"mark\") returns an object containing the original \"" +
TEST_MARKS[0].name + "\" mark in the correct order");
test_equals(entries[1].name, TEST_MARKS[1].name,
"window.performance.getEntriesByType(\"mark\") returns an object containing the duplicate \"" +
TEST_MARKS[1].name + "\" mark in the correct order");
test_true(match_entries(entries[0], TEST_MARKS[0].entryMatch),
"The \"" + TEST_MARKS[0].name + "\" mark returned by " +
"window.performance.getEntriesByType(\"mark\") matches the the \"" + TEST_MARKS[0].name +
"\" mark returned by window.performance.getEntriesByName(\"" + TEST_MARKS[0].name + "\")");
test_true(match_entries(entries[1], TEST_MARKS[1].entryMatch),
"The \"" + TEST_MARKS[1].name + "\" mark returned by " +
"window.performance.getEntriesByType(\"mark\") matches the the duplicate \"" +
TEST_MARKS[1].name + "\" mark returned by window.performance.getEntriesByName(\"" +
TEST_MARKS[1].name + "\")");
done();
}
function match_entries(entry1, entry2)
{
var pass = true;
// match name
pass = pass && (entry1.name == entry2.name);
// match startTime
pass = pass && (entry1.startTime == entry2.startTime);
// match entryType
pass = pass && (entry1.entryType == entry2.entryType);
// match duration
pass = pass && (entry1.duration == entry2.duration);
return pass;
}
function test_mark(markEntry, markEntryCommand, expectedName, expectedStartTime)
{
// test name
test_equals(markEntry.name, expectedName, markEntryCommand + ".name == \"" + expectedName + "\"");
// test startTime, allow for an acceptable threshold in the difference between the startTime and the
// expected value for the startTime (loadEventStart + markTestDelay)
test_true(Math.abs(markEntry.startTime - expectedStartTime) <= testThreshold,
markEntryCommand + ".startTime ~== " + expectedStartTime + " (up to " + testThreshold +
"ms difference allowed)");
// verify entryType
test_equals(markEntry.entryType, "mark", markEntryCommand + ".entryType == \"mark\"");
// verify duration
test_equals(markEntry.duration, 0, markEntryCommand + ".duration == 0");
}
function get_test_entries(entryList, entryType)
{
var testEntries = new Array();
// filter entryList
for (var i in entryList)
{
if (entryList[i].entryType == entryType)
{
testEntries.push(entryList[i]);
}
}
return testEntries;
}
</script>
</head>
<body onload="onload_test();">
<h1>Description</h1>
<p>This test validates that the performance.mark() method is working properly. This test creates the
following marks to test this method:
<ul>
<li>"mark1": created using a normal mark() call</li>
<li>"mark1": duplicate of the first mark, used to confirm names can be re-used</li>
</ul>
After creating each mark, the existence of these marks is validated by calling
performance.getEntriesByName() (both with and without the entryType parameter provided),
performance.getEntriesByType(), and performance.getEntries()
</p>
<div id="log"></div>
</body>
</html>

View file

@ -0,0 +1,67 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>exception test of performance.mark and performance.measure</title>
<link rel="author" title="Intel" href="http://www.intel.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script>
setup({explicit_done: true});
test_namespace();
function onload_test() {
if (window.performance !== undefined && window.performance.mark !== undefined)
{
try
{
window.performance.mark();
test_true(false, "window.performance.mark() threw an exception when invoke without a parameter.");
}
catch(e)
{
test_true(true, "window.performance.mark() threw an exception when invoke without a parameter.");
test_equals(e.name,
"TypeError",
"window.performance.mark() threw a TYPE_ERR exception when invoke without a parameter.");
}
}
else
{
test_true(false, "window.performance.mark() interface is not supported!");
}
if (window.performance !== undefined && window.performance.measure !== undefined)
{
try
{
window.performance.measure();
test_true(false, "window.performance.measure() threw an exception when invoke without a parameter.");
}
catch(e)
{
test_true(true, "window.performance.measure() threw an exception when invoke without a parameter.");
test_equals(e.name,
"TypeError",
"window.performance.measure() threw a TYPE_ERR exception when invoke without a parameter.");
}
}
else
{
test_true(false, "window.performance.measure() interface is not supported!");
}
done();
}
</script>
</head>
<body onload="onload_test();">
<h1>Description</h1>
<p>This test validates exception scenarios of invoking mark() and measure() without parameter.</p>
<div id="log"></div>
</body>
</html>

View file

@ -0,0 +1,105 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>window.performance User Timing mark() method is throwing the proper exceptions</title>
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-mark"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script type="text/javascript">
// navigation timing attributes
var timingAttributes = [
'connectEnd',
'connectStart',
'domComplete',
'domContentLoadedEventEnd',
'domContentLoadedEventStart',
'domInteractive',
'domLoading',
'domainLookupEnd',
'domainLookupStart',
'fetchStart',
'loadEventEnd',
'loadEventStart',
'navigationStart',
'redirectEnd',
'redirectStart',
'requestStart',
'responseEnd',
'responseStart',
'unloadEventEnd',
'unloadEventStart'
];
// test data
var markExceptionThrown = false;
setup({explicit_done: true});
test_namespace();
function onload_test()
{
// test for existance of User Timing and Performance Timeline interface
if (window.performance.mark == undefined ||
window.performance.clearMarks == undefined ||
window.performance.measure == undefined ||
window.performance.clearMeasures == undefined ||
window.performance.getEntriesByName == undefined ||
window.performance.getEntriesByType == undefined ||
window.performance.getEntries == undefined)
{
test_true(false,
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
"are defined.");
done();
}
else
{
test_mark_exceptions();
}
}
function test_mark_exceptions()
{
// loop through mark scenarios
for (var i in timingAttributes)
{
try
{
// create the mark
window.performance.mark(timingAttributes[i]);
test_true(false,
"window.performance.mark(\"" + timingAttributes[i] + "\") threw an exception.");
}
catch(e)
{
test_true(true,
"window.performance.mark(\"" + timingAttributes[i] + "\") threw an exception.");
// confirm that a SYNTAX_ERR exception is thrown and not any other exception
test_equals(e.code,
e.SYNTAX_ERR,
"window.performance.mark(\"" + timingAttributes[i] + "\") threw a SYNTAX_ERR " +
"exception.");
}
}
done();
}
</script>
</head>
<body onload="onload_test();">
<h1>Description</h1>
<p>This test validates that the performance.mark() method throws a SYNTAX_ERR exception whenever a navigation
timing attribute is provided for the name parameter.
</p>
<div id="log"></div>
</body>
</html>

View file

@ -0,0 +1,55 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>exception test of performance.mark</title>
<link rel="author" title="Intel" href="http://www.intel.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#extensions-performance-interface"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script src="resources/webperftestharnessextension.js"></script>
<script>
setup({explicit_done: true});
test_namespace();
function onload_test() {
if (window.performance !== undefined && window.performance.timing !== undefined && window.performance.timing.secureConnectionStart !== undefined)
{
var context = new PerformanceContext(window.performance);
var optionalAttribute = "secureConnectionStart";
try
{
context.mark(optionalAttribute);
test_true(false,
"window.performance.mark(\"" + optionalAttribute + "\") threw an exception when " +
optinalAttribute + " attribute of Navigation Timing is supported.");
}
catch(e)
{
test_true(true,
"window.performance.mark(\"" + optionalAttribute + "\") threw an exception when " +
optionalAttribute + " attribute of Navigation Timing is supported.");
// confirm that a SYNTAX_ERR exception is thrown and not any other exception
test_equals(e.code,
e.SYNTAX_ERR,
"window.performance.mark(\"" + optionalAttribute + "\") threw a SYNTAX_ERR when " +
optionalAttribute + " attribute of Navigation Timing is supported.");
}
}
else
{
test_true(true,
"This test is ignored when secureConnectionStart attribute of Navigation Timing is not supported.");
}
done();
}
</script>
</head>
<body onload=onload_test()>
<h1>Description</h1>
<p>This test validates exception scenarios of invoking performance.mark() with param of "secureConnectionStart".</p>
<div id="log"></div>
</body>
</html>

View file

@ -0,0 +1,334 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>window.performance User Timing measure() method is working properly</title>
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-measure"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script type="text/javascript">
// test data
var startMarkName = "mark_start";
var startMarkValue;
var endMarkName = "mark_end";
var endMarkValue;
var measures;
var testThreshold = 20;
// test measures
var measureTestDelay = 200;
var TEST_MEASURES =
[
{
name: "measure_no_start_no_end",
startMark: undefined,
endMark: undefined,
startTime: undefined,
duration: undefined,
entryType: "measure",
entryMatch: undefined,
order: undefined,
found: false
},
{
name: "measure_start_no_end",
startMark: "mark_start",
endMark: undefined,
startTime: undefined,
duration: undefined,
entryType: "measure",
entryMatch: undefined,
order: undefined,
found: false
},
{
name: "measure_start_end",
startMark: "mark_start",
endMark: "mark_end",
startTime: undefined,
duration: undefined,
entryType: "measure",
entryMatch: undefined,
order: undefined,
found: false
},
{
name: "measure_no_start_no_end",
startMark: undefined,
endMark: undefined,
startTime: undefined,
duration: undefined,
entryType: "measure",
entryMatch: undefined,
order: undefined,
found: false
}
];
setup({explicit_done: true});
test_namespace();
function onload_test()
{
// test for existance of User Timing and Performance Timeline interface
if (window.performance.mark == undefined ||
window.performance.clearMarks == undefined ||
window.performance.measure == undefined ||
window.performance.clearMeasures == undefined ||
window.performance.getEntriesByName == undefined ||
window.performance.getEntriesByType == undefined ||
window.performance.getEntries == undefined)
{
test_true(false,
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
"are defined.");
done();
}
else
{
// create the start mark for the test measures
window.performance.mark(startMarkName);
// get the start mark's value
startMarkValue = window.performance.getEntriesByName(startMarkName)[0].startTime;
// create the test end mark using the test delay; this will allow for a significant difference between
// the mark values that should be represented in the duration of measures using these marks
setTimeout(measure_test_cb, measureTestDelay);
}
}
function measure_test_cb()
{
// create the end mark for the test measures
window.performance.mark(endMarkName);
// get the end mark's value
endMarkValue = window.performance.getEntriesByName(endMarkName)[0].startTime;
// loop through all measure scenarios and create the corresponding measures
for (var i in TEST_MEASURES)
{
var scenario = TEST_MEASURES[i];
if (scenario.startMark == undefined && scenario.endMark == undefined)
{
// both startMark and endMark are undefined, don't provide either parameters
window.performance.measure(scenario.name);
// when startMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding
// to the navigationStart attribute with a timebase of the same attribute is used; this is
// equivalent to 0
scenario.startTime = 0;
// when endMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding to
// the current time with a timebase of the navigationStart attribute is used
scenario.duration = (new Date()) - window.performance.timing.navigationStart;
}
else if (scenario.startMark != undefined && scenario.endMark == undefined)
{
// only startMark is defined, provide startMark and don't provide endMark
window.performance.measure(scenario.name, scenario.startMark);
// when startMark is provided to the measure() call, the value of the mark whose name is
// provided is used for the startMark
scenario.startTime = startMarkValue;
// when endMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding to
// the current time with a timebase of the navigationStart attribute is used
scenario.duration = ((new Date()) - window.performance.timing.navigationStart) -
startMarkValue;
}
else if (scenario.startMark != undefined && scenario.endMark != undefined)
{
// both startMark and endMark are defined, provide both parameters
window.performance.measure(scenario.name, scenario.startMark, scenario.endMark);
// when startMark is provided to the measure() call, the value of the mark whose name is
// provided is used for the startMark
scenario.startTime = startMarkValue;
// when endMark is provided to the measure() call, the value of the mark whose name is
// provided is used for the startMark
scenario.duration = endMarkValue - startMarkValue;
}
}
// test that expected measures are returned by getEntriesByName
for (var i in TEST_MEASURES)
{
entries = window.performance.getEntriesByName(TEST_MEASURES[i].name);
// for all test measures, the test will be validate the test measure against the first entry returned
// by getEntriesByName(), except for the last measure, where since it is a duplicate measure, the test
// will validate it against the second entry returned by getEntriesByName()
test_measure(entries[(i == 3 ? 1 : 0)],
"window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name + "\")[" +
(i == 3 ? 1 : 0) + "]",
TEST_MEASURES[i].name,
TEST_MEASURES[i].startTime,
TEST_MEASURES[i].duration);
TEST_MEASURES[i].entryMatch = entries[(i == 3 ? 1 : 0)];
}
// test that expected measures are returned by getEntriesByName with the entryType parameter provided
for (var i in TEST_MEASURES)
{
entries = window.performance.getEntriesByName(TEST_MEASURES[i].name, "measure");
test_true(match_entries(entries[(i == 3 ? 1 : 0)], TEST_MEASURES[i].entryMatch),
"window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name + "\", \"measure\")[" +
(i == 3 ? 1 : 0) + "] returns an object containing the \"" + TEST_MEASURES[i].name +
"\" measure in the correct order, and its value matches the \"" + TEST_MEASURES[i].name +
"\" measure returned by window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name +
"\")");
}
// test that expected measures are returned by getEntries
entries = get_test_entries(window.performance.getEntries(), "measure");
test_measure_list(entries, "window.performance.getEntries()", TEST_MEASURES);
// test that expected measures are returned by getEntriesByType
entries = window.performance.getEntriesByType("measure");
test_measure_list(entries, "window.performance.getEntriesByType(\"measure\")", TEST_MEASURES);
done();
}
function match_entries(entry1, entry2, threshold)
{
if (threshold == undefined)
{
threshold = 0;
}
var pass = true;
// match name
pass = pass && (entry1.name == entry2.name);
// match startTime
pass = pass && (Math.abs(entry1.startTime - entry2.startTime) <= testThreshold);
// match entryType
pass = pass && (entry1.entryType == entry2.entryType);
// match duration
pass = pass && (Math.abs(entry1.duration - entry2.duration) <= testThreshold);
return pass;
}
function test_measure(measureEntry, measureEntryCommand, expectedName, expectedStartTime, expectedDuration)
{
// test name
test_true(measureEntry.name == expectedName, measureEntryCommand + ".name == \"" + expectedName + "\"");
// test startTime; since for a mark, the startTime is always equal to a mark's value or the value of a
// navigation timing attribute, the actual startTime should match the expected value exactly
test_true(Math.abs(measureEntry.startTime - expectedStartTime) == 0,
measureEntryCommand + ".startTime == " + expectedStartTime);
// test entryType
test_true(measureEntry.entryType == "measure", measureEntryCommand + ".entryType == \"measure\"");
// test duration, allow for an acceptable threshold in the difference between the actual duration and the
// expected value for the duration
test_true(Math.abs(measureEntry.duration - expectedDuration) <= testThreshold, measureEntryCommand +
".duration ~== " + expectedDuration + " (up to " + testThreshold + "ms difference allowed)");
}
function test_measure_list(measureEntryList, measureEntryListCommand, measureScenarios)
{
// give all entries a "found" property that can be set to ensure it isn't tested twice
for (var i in measureEntryList)
{
measureEntryList[i].found = false;
}
for (var i in measureScenarios)
{
measureScenarios[i].found = false;
for (var j in measureEntryList)
{
if (match_entries(measureEntryList[j], measureScenarios[i]) && !measureEntryList[j].found)
{
test_true(match_entries(measureEntryList[j], measureScenarios[i].entryMatch),
measureEntryListCommand + " returns an object containing the \"" +
measureScenarios[i].name + "\" measure, and it's value matches the measure " +
"returned by window.performance.getEntriesByName(\"" + measureScenarios[i].name +
"\")[" + (i == 3 ? 1 : 0) + "].");
measureEntryList[j].found = true;
measureScenarios[i].found = true;
break;
}
}
if (!measureScenarios[i].found)
{
test_true(false,
measureEntryListCommand + " returns an object containing the \"" +
measureScenarios[i].name + "\" measure.");
}
}
// verify order of output of getEntriesByType
var startTimeCurr = 0;
var pass = true;
for (var i in measureEntryList)
{
if (measureEntryList[i].startTime < startTimeCurr)
{
pass = false;
}
startTimeCurr = measureEntryList[i].startTime;
}
test_true(pass,
measureEntryListCommand + " returns an object containing all test " +
"measures in order.");
}
function get_test_entries(entryList, entryType)
{
var testEntries = new Array();
// filter entryList
for (var i in entryList)
{
if (entryList[i].entryType == entryType)
{
testEntries.push(entryList[i]);
}
}
return testEntries;
}
</script>
</head>
<body onload="onload_test();">
<h1>Description</h1>
<p>This test validates that the performance.measure() method is working properly. This test creates the
following measures to test this method:
<ul>
<li>"measure_no_start_no_end": created using a measure() call without a startMark or endMark
provided</li>
<li>"measure_start_no_end": created using a measure() call with only the startMark provided</li>
<li>"measure_start_end": created using a measure() call with both a startMark or endMark provided</li>
<li>"measure_no_start_no_end": duplicate of the first measure, used to confirm names can be re-used</li>
</ul>
After creating each measure, the existence of these measures is validated by calling
performance.getEntriesByName() (both with and without the entryType parameter provided),
performance.getEntriesByType(), and performance.getEntries()
</p>
<div id="log"></div>
</body>
</html>

View file

@ -0,0 +1,282 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>window.performance User Timing measure() method is throwing the proper exceptions</title>
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-measure"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script type="text/javascript">
// navigation timing attributes
var timingAttributes = [
'connectEnd',
'connectStart',
'domComplete',
'domContentLoadedEventEnd',
'domContentLoadedEventStart',
'domInteractive',
'domLoading',
'domainLookupEnd',
'domainLookupStart',
'fetchStart',
'loadEventEnd',
'loadEventStart',
'navigationStart',
'redirectEnd',
'redirectStart',
'requestStart',
'responseEnd',
'responseStart',
'unloadEventEnd',
'unloadEventStart'
];
// test data
var zeroedNavTimingAtt = undefined;
setup({explicit_done: true});
test_namespace();
function onload_test()
{
// test for existance of User Timing and Performance Timeline interface
if (window.performance.mark == undefined ||
window.performance.clearMarks == undefined ||
window.performance.measure == undefined ||
window.performance.clearMeasures == undefined ||
window.performance.getEntriesByName == undefined ||
window.performance.getEntriesByType == undefined ||
window.performance.getEntries == undefined)
{
test_true(false,
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
"are defined.");
done();
}
else
{
test_measure_exceptions();
}
}
function test_measure_exceptions()
{
// test scenarios for the SYNTAX_ERR exception
try
{
// create the measure
window.performance.measure("measure", "mark");
test_true(false,
"window.performance.measure(\"measure\", \"mark\"), where \"mark\" is a non-existent mark, " +
"threw an exception.");
}
catch(e)
{
test_true(true,
"window.performance.measure(\"measure\", \"mark\"), where \"mark\" is a non-existent mark, " +
" threw an exception.");
test_equals(e.code,
e.SYNTAX_ERR,
"window.performance.measure(\"measure\", \"mark\"), where \"mark\" is a non-existent " +
"mark, threw a SYNTAX_ERR exception.");
}
try
{
// create the measure
window.performance.measure("measure", "mark", "responseEnd");
test_true(false,
"window.performance.measure(\"measure\", \"mark\", \"responseEnd\"), where \"mark\" is a " +
"non-existent mark, threw an exception.");
}
catch(e)
{
test_true(true,
"window.performance.measure(\"measure\", \"mark\", \"responseEnd\"), where \"mark\" is a " +
"non-existent mark, threw an exception.");
test_equals(e.code,
e.SYNTAX_ERR,
"window.performance.measure(\"measure\", \"mark\", \"responseEnd\"), where \"mark\" is a " +
"non-existent mark, threw a SYNTAX_ERR exception.");
}
try
{
// create the measure
window.performance.measure("measure", "navigationStart", "mark");
test_true(false,
"window.performance.measure(\"measure\", \"navigationStart\", \"mark\"), where \"mark\" is " +
"a non-existent mark, threw an exception.");
}
catch(e)
{
test_true(true,
"window.performance.measure(\"measure\", \"navigationStart\", \"mark\"), where \"mark\" is " +
"a non-existent mark, threw an exception.");
test_equals(e.code,
e.SYNTAX_ERR,
"window.performance.measure(\"measure\", \"navigationStart\", \"mark\"), where \"mark\" " +
"is a non-existent mark, threw a SYNTAX_ERR exception.");
}
try
{
// create the measure
window.performance.measure("measure", "mark", "mark");
test_true(false,
"window.performance.measure(\"measure\", \"mark\", \"mark\"), where \"mark\" is a " +
"non-existent mark, threw an exception.");
}
catch(e)
{
test_true(true,
"window.performance.measure(\"measure\", \"mark\", \"mark\"), where \"mark\" is a " +
"non-existent mark, threw an exception.");
test_equals(e.code,
e.SYNTAX_ERR,
"window.performance.measure(\"measure\", \"mark\", \"mark\"), where \"mark\" is a " +
"non-existent mark, threw a SYNTAX_ERR exception.");
}
// for testing the INVALID_ACCESS_ERR exception, find a navigation timing attribute with a value of zero
for (var i in timingAttributes)
{
if (window.performance.timing[timingAttributes[i]] == 0)
{
zeroedNavTimingAtt = timingAttributes[i];
}
}
if (zeroedNavTimingAtt == undefined)
{
test_true(false,
"A navigation timing attribute with a value of 0 was not found to test for the " +
"INVALID_ACCESS_ERR exception thrown by window.performance.measure().");
}
else
{
try
{
// create the measure
window.performance.measure("measure", zeroedNavTimingAtt);
test_true(false,
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\"), where \"" +
zeroedNavTimingAtt + "\" is a navigation timing attribute with a value of 0, threw an " +
"exception.");
}
catch(e)
{
test_true(true,
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\"), where \"" +
zeroedNavTimingAtt + "\" is a navigation timing attribute with a value of 0, threw an " +
"exception.");
test_equals(e.code,
e.INVALID_ACCESS_ERR,
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\"), where \"" +
zeroedNavTimingAtt + "\" is a navigation timing attribute with a value of 0, threw " +
"an INVALID_ACCESS_ERR exception.");
}
try
{
// create the measure
window.performance.measure("measure", zeroedNavTimingAtt, "responseEnd");
test_true(false,
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", " +
"\"responseEnd\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
"attribute with a value of 0, threw an exception.");
}
catch(e)
{
test_true(true,
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", " +
"\"responseEnd\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
"attribute with a value of 0, threw an exception.");
test_equals(e.code,
e.INVALID_ACCESS_ERR,
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", " +
"\"responseEnd\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
"attribute with a value of 0, threw an INVALID_ACCESS_ERR exception.");
}
try
{
// create the measure
window.performance.measure("measure", "navigationStart", zeroedNavTimingAtt);
test_true(false,
"window.performance.measure(\"measure\", \"navigationStart\", \"" + zeroedNavTimingAtt +
"\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing attribute with a " +
"value of 0, threw an exception.");
}
catch(e)
{
test_true(true,
"window.performance.measure(\"measure\", \"navigationStart\", \"" + zeroedNavTimingAtt +
"\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing attribute with a " +
"value of 0, threw an exception.");
test_equals(e.code,
e.INVALID_ACCESS_ERR,
"window.performance.measure(\"measure\", \"navigationStart\", \"" + zeroedNavTimingAtt +
"\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing attribute with a " +
"value of 0, threw an INVALID_ACCESS_ERR exception.");
}
try
{
// create the measure
window.performance.measure("measure", zeroedNavTimingAtt, zeroedNavTimingAtt);
test_true(false,
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", \"" +
zeroedNavTimingAtt + "\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
"attribute with a value of 0, threw an exception.");
}
catch(e)
{
test_true(true,
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", \"" +
zeroedNavTimingAtt + "\"), where \"" + zeroedNavTimingAtt + "\" is a navigation timing " +
"attribute with a value of 0, threw an exception.");
test_equals(e.code,
e.INVALID_ACCESS_ERR,
"window.performance.measure(\"measure\", \"" + zeroedNavTimingAtt + "\", \"" +
zeroedNavTimingAtt + "\"), where \"" + zeroedNavTimingAtt + "\" is a navigation " +
"timing attribute with a value of 0, threw an INVALID_ACCESS_ERR exception.");
}
}
done();
}
</script>
</head>
<body onload="onload_test();">
<h1>Description</h1>
<p>This test validates that the performance.measure() method throws a SYNTAX_ERR exception whenever a
non-existent mark is provided as the startMark or endMark, and the method also throws a INVALID_ACCESS_ERR
whenever a navigation timing attribute with a value of zero is provided as the startMark or endMark.
</p>
<div id="log"></div>
</body>
</html>

View file

@ -0,0 +1,233 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>window.performance User Timing clearMeasures() method is working properly with navigation timing
attributes</title>
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
<link rel="help" href="http://www.w3.org/TR/user-timing/#dom-performance-measure"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="resources/webperftestharness.js"></script>
<script type="text/javascript">
// navigation timing attributes
var timingAttributes = [
'connectEnd',
'connectStart',
'domComplete',
'domContentLoadedEventEnd',
'domContentLoadedEventStart',
'domInteractive',
'domLoading',
'domainLookupEnd',
'domainLookupStart',
'fetchStart',
'loadEventEnd',
'loadEventStart',
'navigationStart',
'redirectEnd',
'redirectStart',
'requestStart',
'responseEnd',
'responseStart',
'unloadEventEnd',
'unloadEventStart'
];
// test data
var startMarkName = "mark_start";
var startMarkValue;
var endMarkName = "mark_end";
var endMarkValue;
var measures;
var testThreshold = 20;
// test measures
measureTestDelay = 200;
var TEST_MEASURES =
[
{
name: "measure_nav_start_no_end",
startMark: "navigationStart",
endMark: undefined,
exceptionTestMessage: "window.performance.measure(\"measure_nav_start_no_end\", " +
"\"navigationStart\") ran without throwing any exceptions.",
expectedStartTime: undefined,
expectedDuration: undefined,
entryMatch: undefined
},
{
name: "measure_nav_start_mark_end",
startMark: "navigationStart",
endMark: "mark_end",
exceptionTestMessage: "window.performance.measure(\"measure_nav_start_end\", \"navigationStart\", " +
"\"mark_end\") ran without throwing any exceptions.",
expectedStartTime: undefined,
expectedDuration: undefined,
entryMatch: undefined
},
{
name: "measure_mark_start_nav_end",
startMark: "mark_start",
endMark: "responseEnd",
exceptionTestMessage: "window.performance.measure(\"measure_start_nav_end\", \"mark_start\", " +
"\"responseEnd\") ran without throwing any exceptions.",
expectedStartTime: undefined,
expectedDuration: undefined,
entryMatch: undefined
},
{
name: "measure_nav_start_nav_end",
startMark: "navigationStart",
endMark: "responseEnd",
exceptionTestMessage: "window.performance.measure(\"measure_nav_start_nav_end\", " +
"\"navigationStart\", \"responseEnd\") ran without throwing any exceptions.",
expectedStartTime: undefined,
expectedDuration: undefined,
entryMatch: undefined
}
];
setup({explicit_done: true});
test_namespace();
function onload_test()
{
// test for existance of User Timing and Performance Timeline interface
if (window.performance.mark == undefined ||
window.performance.clearMarks == undefined ||
window.performance.measure == undefined ||
window.performance.clearMeasures == undefined ||
window.performance.getEntriesByName == undefined ||
window.performance.getEntriesByType == undefined ||
window.performance.getEntries == undefined)
{
test_true(false,
"The User Timing and Performance Timeline interfaces, which are required for this test, " +
"are defined.");
done();
}
else
{
// create the start mark for the test measures
window.performance.mark(startMarkName);
// get the start mark's value
startMarkValue = window.performance.getEntriesByName(startMarkName)[0].startTime;
// create the test end mark using the test delay; this will allow for a significant difference between
// the mark values that should be represented in the duration of measures using these marks
setTimeout(measure_test_cb, measureTestDelay);
}
}
function measure_test_cb()
{
// create the end mark for the test measures
window.performance.mark(endMarkName);
// get the end mark's value
endMarkValue = window.performance.getEntriesByName(endMarkName)[0].startTime;
// loop through measure scenarios
for (var i in TEST_MEASURES)
{
var scenario = TEST_MEASURES[i];
if (scenario.startMark != undefined && scenario.endMark == undefined)
{
// only startMark is defined, provide startMark and don't provide endMark
window.performance.measure(scenario.name, scenario.startMark);
// when startMark is provided to the measure() call, the value of the mark or navigation
// timing attribute whose name is provided is used for the startMark
scenario.expectedStartTime = (timingAttributes.indexOf(scenario.startMark) != -1 ?
window.performance.timing[scenario.startMark] -
window.performance.timing.navigationStart :
startMarkValue);
// when endMark isn't provided to the measure() call, a DOMHighResTimeStamp corresponding to
// the current time with a timebase of the navigationStart attribute is used
scenario.expectedDuration = ((new Date()) - window.performance.timing.navigationStart) -
scenario.expectedStartTime;
}
else if (scenario.startMark != undefined && scenario.endMark != undefined)
{
// both startMark and endMark are defined, provide both parameters
window.performance.measure(scenario.name, scenario.startMark, scenario.endMark);
// when startMark is provided to the measure() call, the value of the mark or navigation
// timing attribute whose name is provided is used for the startMark
scenario.expectedStartTime = (timingAttributes.indexOf(scenario.startMark) != -1 ?
window.performance.timing[scenario.startMark] -
window.performance.timing.navigationStart :
startMarkValue);
// when endMark is provided to the measure() call, the value of the mark whose name is
// provided is used for the startMark
scenario.expectedDuration = (timingAttributes.indexOf(scenario.endMark) != -1 ?
window.performance.timing[scenario.endMark] -
window.performance.timing.navigationStart :
endMarkValue) - scenario.expectedStartTime;
}
}
// test the test measures are returned by getEntriesByName
for (var i in TEST_MEASURES)
{
entries = window.performance.getEntriesByName(TEST_MEASURES[i].name);
test_measure(entries[0],
"window.performance.getEntriesByName(\"" + TEST_MEASURES[i].name + "\")[0]",
TEST_MEASURES[i].name,
TEST_MEASURES[i].expectedStartTime,
TEST_MEASURES[i].expectedDuration);
TEST_MEASURES[i].entryMatch = entries[0];
}
done();
}
function test_measure(measureEntry, measureEntryCommand, expectedName, expectedStartTime, expectedDuration)
{
// test name
test_true(measureEntry.name == expectedName, measureEntryCommand + ".name == \"" + expectedName + "\"");
// test startTime; since for a mark, the startTime is always equal to a mark's value or the value of a
// navigation timing attribute, the actual startTime should match the expected value exactly
test_true(Math.abs(measureEntry.startTime - expectedStartTime) == 0,
measureEntryCommand + ".startTime == " + expectedStartTime);
// test entryType
test_true(measureEntry.entryType == "measure", measureEntryCommand + ".entryType == \"measure\"");
// test duration, allow for an acceptable threshold in the difference between the actual duration and the
// expected value for the duration
test_true(Math.abs(measureEntry.duration - expectedDuration) <= testThreshold, measureEntryCommand +
".duration ~== " + expectedDuration + " (up to " + testThreshold + "ms difference allowed)");
}
</script>
</head>
<body onload="onload_test();">
<h1>Description</h1>
<p>This test validates that the performance.measure() method is working properly when navigation timing
attributes are used in place of mark names. This test creates the following measures to test this method:
<ul>
<li>"measure_nav_start_no_end": created using a measure() call with a navigation timing attribute
provided as the startMark and nothing provided as the endMark</li>
<li>"measure_nav_start_mark_end": created using a measure() call with a navigation timing attribute
provided as the startMark and a mark name provided as the endMark</li>
<li>"measure_mark_start_nav_end": created using a measure() call with a mark name provided as the
startMark and a navigation timing attribute provided as the endMark</li>
<li>"measure_nav_start_nav_end":created using a measure() call with a navigation timing attribute
provided as both the startMark and endMark</li>
</ul>
After creating each measure, the existence of these measures is validated by calling
performance.getEntriesByName() with each measure name
</p>
<div id="log"></div>
</body>
</html>