mirror of
https://github.com/servo/servo.git
synced 2025-08-06 22:15:33 +01:00
Update web-platform-tests to revision c8a23aed99841887e72f883ab94a45d928a5820f
This commit is contained in:
parent
00b98796bd
commit
ca2fd0f290
196 changed files with 2356 additions and 1494 deletions
|
@ -0,0 +1,126 @@
|
|||
'use strict';
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const handles = await create_file_system_handles(t, root_dir);
|
||||
|
||||
const db = await createDatabase(t, db => {
|
||||
const store = db.createObjectStore('store');
|
||||
});
|
||||
t.add_cleanup(() => deleteAllDatabases(t));
|
||||
|
||||
const value = handles;
|
||||
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
const store = tx.objectStore('store');
|
||||
await promiseForRequest(t, store.put(value, 'key'));
|
||||
const result = await promiseForRequest(t, store.get('key'));
|
||||
|
||||
await promiseForTransaction(t, tx);
|
||||
|
||||
assert_true(Array.isArray(result), 'Result should be an array');
|
||||
assert_equals(result.length, value.length);
|
||||
await assert_equals_cloned_handles(result, value);
|
||||
}, 'Store handle in IndexedDB and read from pending transaction.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const handles = await create_file_system_handles(t, root_dir);
|
||||
|
||||
const db = await createDatabase(t, db => {
|
||||
const store = db.createObjectStore('store');
|
||||
});
|
||||
t.add_cleanup(() => deleteAllDatabases(t));
|
||||
|
||||
const value = handles;
|
||||
|
||||
let tx = db.transaction('store', 'readwrite');
|
||||
let store = tx.objectStore('store');
|
||||
await promiseForRequest(t, store.put(value, 'key'));
|
||||
await promiseForTransaction(t, tx);
|
||||
|
||||
tx = db.transaction('store', 'readonly');
|
||||
store = tx.objectStore('store');
|
||||
const result = await promiseForRequest(t, store.get('key'));
|
||||
await promiseForTransaction(t, tx);
|
||||
|
||||
assert_true(Array.isArray(result), 'Result should be an array');
|
||||
assert_equals(result.length, value.length);
|
||||
await assert_equals_cloned_handles(result, value);
|
||||
}, 'Store handle in IndexedDB and read from new transaction.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const handles = await create_file_system_handles(t, root_dir);
|
||||
|
||||
const db = await createDatabase(t, db => {
|
||||
const store = db.createObjectStore('store');
|
||||
});
|
||||
t.add_cleanup(() => deleteAllDatabases(t));
|
||||
|
||||
const value = {handles, blob: new Blob(["foobar"])};
|
||||
|
||||
let tx = db.transaction('store', 'readwrite');
|
||||
let store = tx.objectStore('store');
|
||||
await promiseForRequest(t, store.put(value, 'key'));
|
||||
await promiseForTransaction(t, tx);
|
||||
|
||||
tx = db.transaction('store', 'readonly');
|
||||
store = tx.objectStore('store');
|
||||
const result = await promiseForRequest(t, store.get('key'));
|
||||
await promiseForTransaction(t, tx);
|
||||
|
||||
assert_true(Array.isArray(result.handles), 'Result should be an array');
|
||||
assert_equals(result.handles.length, value.handles.length);
|
||||
await assert_equals_cloned_handles(result.handles, value.handles);
|
||||
|
||||
assert_equals(await result.blob.text(), await value.blob.text());
|
||||
}, 'Store handles and blobs in IndexedDB.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const handles = await create_file_system_handles(t, root_dir);
|
||||
|
||||
const db = await createDatabase(t, db => {
|
||||
const store = db.createObjectStore('store');
|
||||
});
|
||||
t.add_cleanup(() => deleteAllDatabases(t));
|
||||
|
||||
const value = handles;
|
||||
|
||||
let tx = db.transaction('store', 'readwrite');
|
||||
let store = tx.objectStore('store');
|
||||
await promiseForRequest(t, store.put(value, 'key'));
|
||||
await promiseForTransaction(t, tx);
|
||||
|
||||
tx = db.transaction('store', 'readonly');
|
||||
store = tx.objectStore('store');
|
||||
let cursor_request = store.openCursor();
|
||||
await requestWatcher(t, cursor_request).wait_for('success');
|
||||
const result = cursor_request.result.value;
|
||||
await promiseForTransaction(t, tx);
|
||||
|
||||
assert_true(Array.isArray(result), 'Result should be an array');
|
||||
assert_equals(result.length, value.length);
|
||||
await assert_equals_cloned_handles(result, value);
|
||||
}, 'Store handle in IndexedDB and read using a cursor.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const handles = await create_file_system_handles(t, root_dir);
|
||||
|
||||
const db = await createDatabase(t, db => {
|
||||
const store = db.createObjectStore('store', {keyPath: 'key'});
|
||||
});
|
||||
t.add_cleanup(() => deleteAllDatabases(t));
|
||||
|
||||
const value = handles;
|
||||
let tx = db.transaction('store', 'readwrite');
|
||||
let store = tx.objectStore('store');
|
||||
await promiseForRequest(t, store.put({key: 'key', value}));
|
||||
await promiseForTransaction(t, tx);
|
||||
|
||||
tx = db.transaction('store', 'readonly');
|
||||
store = tx.objectStore('store');
|
||||
const result = await promiseForRequest(t, store.get('key'));
|
||||
await promiseForTransaction(t, tx);
|
||||
|
||||
assert_true(Array.isArray(result.value), 'Result should be an array');
|
||||
assert_equals(result.value.length, value.length);
|
||||
await assert_equals_cloned_handles(result.value, value);
|
||||
}, 'Store handle in IndexedDB using inline keys.');
|
|
@ -0,0 +1,62 @@
|
|||
'use strict';
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
assert_true(await root_dir.isSameEntry(root_dir));
|
||||
|
||||
const subdir = await createDirectory(t, 'subdir-name', root_dir);
|
||||
assert_true(await subdir.isSameEntry(subdir));
|
||||
}, 'isSameEntry for identical directory handles returns true');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const subdir = await createDirectory(t, 'subdir-name', root_dir);
|
||||
|
||||
assert_false(await root_dir.isSameEntry(subdir));
|
||||
assert_false(await subdir.isSameEntry(root_dir));
|
||||
}, 'isSameEntry for different directories returns false');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const subdir = await createDirectory(t, 'subdir-name', root_dir);
|
||||
const subdir2 = await root_dir.getDirectoryHandle('subdir-name');
|
||||
|
||||
assert_true(await subdir.isSameEntry(subdir2));
|
||||
assert_true(await subdir2.isSameEntry(subdir));
|
||||
}, 'isSameEntry for different handles for the same directory');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const handle = await createEmptyFile(t, 'mtime.txt', root_dir);
|
||||
|
||||
assert_true(await handle.isSameEntry(handle));
|
||||
}, 'isSameEntry for identical file handles returns true');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const handle1 = await createEmptyFile(t, 'mtime.txt', root_dir);
|
||||
const handle2 = await createEmptyFile(t, 'foo.txt', root_dir);
|
||||
|
||||
assert_false(await handle1.isSameEntry(handle2));
|
||||
assert_false(await handle2.isSameEntry(handle1));
|
||||
}, 'isSameEntry for different files returns false');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const handle1 = await createEmptyFile(t, 'mtime.txt', root_dir);
|
||||
const handle2 = await root_dir.getFileHandle('mtime.txt');
|
||||
|
||||
assert_true(await handle1.isSameEntry(handle2));
|
||||
assert_true(await handle2.isSameEntry(handle1));
|
||||
}, 'isSameEntry for different handles for the same file');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const handle1 = await createEmptyFile(t, 'mtime.txt', root_dir);
|
||||
const subdir = await createDirectory(t, 'subdir-name', root_dir);
|
||||
const handle2 = await createEmptyFile(t, 'mtime.txt', subdir);
|
||||
|
||||
assert_false(await handle1.isSameEntry(handle2));
|
||||
assert_false(await handle2.isSameEntry(handle1));
|
||||
}, 'isSameEntry comparing a file to a file in a different directory returns false');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const handle1 = await createEmptyFile(t, 'mtime.txt', root_dir);
|
||||
const handle2 = await createDirectory(t, 'subdir-name', root_dir);
|
||||
|
||||
assert_false(await handle1.isSameEntry(handle2));
|
||||
assert_false(await handle2.isSameEntry(handle1));
|
||||
}, 'isSameEntry comparing a file to a directory returns false');
|
|
@ -0,0 +1,82 @@
|
|||
'use strict';
|
||||
|
||||
// This script depends on the following scripts:
|
||||
// /file-system-access/resources/messaging-helpers.js
|
||||
// /file-system-access/resources/messaging-serialize-helpers.js
|
||||
// /file-system-access/resources/test-helpers.js
|
||||
// /service-workers/service-worker/resources/test-helpers.sub.js
|
||||
|
||||
// Sets up a new broadcast channel in |target|. Posts a message instructing
|
||||
// |target| to open the broadcast channel using |broadcast_channel_name|.
|
||||
async function create_broadcast_channel(
|
||||
test, broadcast_channel_name, receiver, target, target_origin) {
|
||||
target.postMessage(
|
||||
{ type: 'create-broadcast-channel', broadcast_channel_name },
|
||||
{ targetOrigin: target_origin });
|
||||
const event_watcher = new EventWatcher(test, receiver, 'message');
|
||||
|
||||
// Wait until |target| is listening to the broad cast channel.
|
||||
const message_event = await event_watcher.wait_for('message');
|
||||
assert_equals(message_event.data.type, 'broadcast-channel-created',
|
||||
'The message target must receive a "broadcast-channel-created" message ' +
|
||||
'response.');
|
||||
}
|
||||
|
||||
// This test is very similar to 'FileSystemBaseHandle-postMessage.js'. It
|
||||
// starts by creating three message targets for the broadcast channel:
|
||||
// an iframe, dedicated worker and a service worker. After setup, an array
|
||||
// of FileSystemHandles is sent across the broadcast channel. The test
|
||||
// expects three responses -- one from each message target.
|
||||
directory_test(async (t, root) => {
|
||||
const broadcast_channel_name = 'file-system-file-handle-channel';
|
||||
const broadcast_channel = new BroadcastChannel(broadcast_channel_name);
|
||||
const broadcast_channel_event_watcher =
|
||||
new EventWatcher(t, broadcast_channel, 'message');
|
||||
|
||||
const iframe = await add_iframe(t, { src: kDocumentMessageTarget });
|
||||
await create_broadcast_channel(
|
||||
t, broadcast_channel_name, self, iframe.contentWindow, '*');
|
||||
|
||||
const scope = `${kServiceWorkerMessageTarget}` +
|
||||
'?post-message-to-broadcast-channel-with-file-handle';
|
||||
|
||||
const registration = await create_service_worker(
|
||||
t, kServiceWorkerMessageTarget, scope);
|
||||
|
||||
await create_broadcast_channel(
|
||||
t, broadcast_channel_name,
|
||||
navigator.serviceWorker, registration.installing);
|
||||
|
||||
const dedicated_worker =
|
||||
create_dedicated_worker(t, kDedicatedWorkerMessageTarget);
|
||||
|
||||
await create_broadcast_channel(
|
||||
t, broadcast_channel_name, dedicated_worker, dedicated_worker);
|
||||
|
||||
const handles = await create_file_system_handles(t, root);
|
||||
|
||||
broadcast_channel.postMessage(
|
||||
{ type: 'receive-file-system-handles', cloned_handles: handles });
|
||||
|
||||
const expected_response_count = 3;
|
||||
const responses = [];
|
||||
for (let i = 0; i < expected_response_count; ++i) {
|
||||
const message_event =
|
||||
await broadcast_channel_event_watcher.wait_for('message');
|
||||
responses.push(message_event.data);
|
||||
}
|
||||
|
||||
const expected_serialized_handles = await serialize_handles(handles);
|
||||
|
||||
for (let i = 0; i < responses.length; ++i) {
|
||||
assert_equals(responses[i].type, 'receive-serialized-file-system-handles',
|
||||
'The test runner must receive a "serialized-file-system-handles" ' +
|
||||
`message response. Actual response: ${responses[i]}`);
|
||||
|
||||
assert_equals_serialized_handles(
|
||||
responses[i].serialized_handles, expected_serialized_handles);
|
||||
|
||||
await assert_equals_cloned_handles(responses[i].cloned_handles, handles);
|
||||
}
|
||||
}, 'Send and receive messages using a broadcast channel in an iframe, ' +
|
||||
'dedicated worker and service worker.');
|
|
@ -0,0 +1,244 @@
|
|||
'use strict';
|
||||
|
||||
// This script depends on the following scripts:
|
||||
// /file-system-access/resources/messaging-helpers.js
|
||||
// /file-system-access/resources/messaging-blob-helpers.js
|
||||
// /file-system-access/resources/messaging-serialize-helpers.js
|
||||
// /file-system-access/resources/test-helpers.js
|
||||
// /common/get-host-info.sub.js
|
||||
// /service-workers/service-worker/resources/test-helpers.sub.js
|
||||
|
||||
// Define URL constants for cross origin windows.
|
||||
const kRemoteOrigin = get_host_info().HTTPS_REMOTE_ORIGIN;
|
||||
const kRemoteOriginDocumentMessageTarget = `${kRemoteOrigin}${base_path()}` +
|
||||
kDocumentMessageTarget;
|
||||
|
||||
// Sending a FileSystemHandle to a cross origin |target| through postMessage()
|
||||
// must dispatch the 'messageerror' event.
|
||||
//
|
||||
// This test sends a FileSystemHandle to |target|. |target| responds with a
|
||||
// serialized MessageEvent from the 'messageerror' event, allowing the test
|
||||
// runner to verify MessageEvent properties.
|
||||
async function do_send_message_error_test(
|
||||
test,
|
||||
root_dir,
|
||||
receiver,
|
||||
target,
|
||||
target_origin,
|
||||
// False when the MessageEvent's source is null.
|
||||
expected_has_source,
|
||||
// The origin of MessageEvents received by |target|.
|
||||
expected_origin) {
|
||||
const message_watcher = new EventWatcher(test, receiver, 'message');
|
||||
|
||||
// Send a file to |target|.
|
||||
const file = await createFileWithContents(
|
||||
test, 'test-error-file', 'test-error-file-contents', root_dir);
|
||||
target.postMessage(
|
||||
{ type: 'receive-file-system-handles', cloned_file_system_handles: [file] },
|
||||
{ targetOrigin: target_origin });
|
||||
|
||||
// Wait for |target| to respond with results.
|
||||
let message_event = await message_watcher.wait_for('message');
|
||||
const first_response = message_event.data;
|
||||
assert_equals(first_response.type, 'serialized-message-error',
|
||||
'The test runner must receive a "serialized-message-error" message ' +
|
||||
'in response to a FileSystemFileHandle message.');
|
||||
|
||||
// Verify the results.
|
||||
assert_equals_serialized_message_error_event(
|
||||
first_response.serialized_message_error_event,
|
||||
expected_origin, expected_has_source);
|
||||
|
||||
// Send a directory to |target|.
|
||||
const directory = await createDirectory(
|
||||
test, 'test-error-directory', root_dir);
|
||||
|
||||
target.postMessage(
|
||||
{
|
||||
type: 'receive-file-system-handles',
|
||||
cloned_file_system_handles: [directory]
|
||||
}, { targetOrigin: target_origin });
|
||||
|
||||
// Wait for |target| to respond with results.
|
||||
message_event = await message_watcher.wait_for('message');
|
||||
const second_response = message_event.data;
|
||||
assert_equals(second_response.type, 'serialized-message-error',
|
||||
'The test runner must receive a "serialized-message-error" message ' +
|
||||
'response to a FileSystemDirectoryHandle message.');
|
||||
|
||||
// Verify the results.
|
||||
assert_equals_serialized_message_error_event(
|
||||
second_response.serialized_message_error_event,
|
||||
expected_origin, expected_has_source);
|
||||
}
|
||||
|
||||
// This test receives a FileSystemHandle from |target|. This test runner
|
||||
// must dispatch the 'messageerror' event after receiving a handle from target.
|
||||
async function do_receive_message_error_test(
|
||||
test,
|
||||
receiver,
|
||||
target,
|
||||
target_origin,
|
||||
// False when the MessageEvent's source is null.
|
||||
expected_has_source,
|
||||
// The origin of MessageEvents received by this test runner.
|
||||
expected_origin) {
|
||||
const error_watcher = new EventWatcher(test, receiver, 'messageerror');
|
||||
|
||||
// Receive a file from |target|.
|
||||
target.postMessage(
|
||||
{ type: 'create-file' }, { targetOrigin: target_origin });
|
||||
const first_error = await error_watcher.wait_for('messageerror');
|
||||
const serialized_first_error = serialize_message_error_event(first_error);
|
||||
assert_equals_serialized_message_error_event(
|
||||
serialized_first_error, expected_origin, expected_has_source);
|
||||
|
||||
// Receive a directory from |target|.
|
||||
target.postMessage(
|
||||
{ type: 'create-directory' }, { targetOrigin: target_origin });
|
||||
const second_error = await error_watcher.wait_for('messageerror');
|
||||
const serialized_second_error = serialize_message_error_event(second_error);
|
||||
assert_equals_serialized_message_error_event(
|
||||
serialized_second_error, expected_origin, expected_has_source);
|
||||
}
|
||||
|
||||
// Performs the send message error test followed by the receive message error
|
||||
// test.
|
||||
async function do_send_and_receive_message_error_test(
|
||||
test,
|
||||
root_dir,
|
||||
receiver,
|
||||
target,
|
||||
target_origin,
|
||||
// False when the MessageEvent's source is null.
|
||||
expected_has_source,
|
||||
// The origin of MessageEvents received by |target|.
|
||||
expected_origin,
|
||||
// The origin of MessageEvents received by this test runner.
|
||||
expected_remote_origin) {
|
||||
await do_send_message_error_test(
|
||||
test, root_dir, receiver, target, target_origin, expected_has_source,
|
||||
expected_origin);
|
||||
await do_receive_message_error_test(
|
||||
test, receiver, target, target_origin, expected_has_source,
|
||||
expected_remote_origin);
|
||||
}
|
||||
|
||||
// Runs the same test as do_send_message_error_test(), but uses a MessagePort.
|
||||
// This test starts by establishing a message channel between the test runner
|
||||
// and |target|.
|
||||
async function do_send_message_port_error_test(
|
||||
test, root_dir, target, target_origin) {
|
||||
const message_port = create_message_channel(target, target_origin);
|
||||
await do_send_message_error_test(
|
||||
test, root_dir, /*receiver=*/message_port, /*target=*/message_port,
|
||||
/*target_origin=*/undefined, /*expected_has_source=*/false,
|
||||
/*expected_origin=*/'', /*expected_remote_origin=*/'');
|
||||
}
|
||||
|
||||
// Runs the same test as do_receive_message_error_test(), but uses a MessagePort.
|
||||
async function do_receive_message_port_error_test(
|
||||
test, target, target_origin) {
|
||||
const message_port = create_message_channel(target, target_origin);
|
||||
await do_receive_message_error_test(
|
||||
test, /*receiver=*/message_port, /*target=*/message_port,
|
||||
/*target_origin=*/undefined, /*expected_has_source=*/false,
|
||||
/*expected_origin=*/'');
|
||||
}
|
||||
|
||||
// Runs the same test as do_send_and_receive_message_error_test(), but uses a
|
||||
// MessagePort.
|
||||
async function do_send_and_receive_message_port_error_test(
|
||||
test, root_dir, target, target_origin) {
|
||||
await do_send_message_port_error_test(
|
||||
test, root_dir, target, target_origin);
|
||||
await do_receive_message_port_error_test(
|
||||
test, target, target_origin);
|
||||
}
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const iframe = await add_iframe(
|
||||
t, { src: kRemoteOriginDocumentMessageTarget });
|
||||
await do_send_and_receive_message_error_test(
|
||||
t, root_dir, /*receiver=*/self, /*target=*/iframe.contentWindow,
|
||||
/*target_origin=*/'*', /*expected_has_source=*/true,
|
||||
/*expected_origin=*/location.origin,
|
||||
/*expected_remote_origin=*/kRemoteOrigin);
|
||||
}, 'Fail to send and receive messages using a cross origin iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const iframe = await add_iframe(t, { src: kRemoteOriginDocumentMessageTarget });
|
||||
await do_send_and_receive_message_port_error_test(
|
||||
t, root_dir, /*target=*/iframe.contentWindow, /*target_origin=*/'*');
|
||||
}, 'Fail to send and receive messages using a cross origin message port in ' +
|
||||
'an iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const iframe = await add_iframe(
|
||||
t, { src: kDocumentMessageTarget, sandbox: 'allow-scripts' });
|
||||
|
||||
await do_send_message_error_test(
|
||||
t, root_dir, /*receiver=*/self, /*target=*/iframe.contentWindow,
|
||||
/*target_origin=*/'*', /*expected_has_source*/true,
|
||||
/*expected_origin=*/location.origin);
|
||||
}, 'Fail to send to a sandboxed iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const iframe = await add_iframe(
|
||||
t, { src: kDocumentMessageTarget, sandbox: 'allow-scripts' });
|
||||
await do_send_message_port_error_test(
|
||||
t, root_dir, /*target=*/iframe.contentWindow, /*target_origin=*/'*');
|
||||
}, 'Fail to send messages using a message port to a sandboxed ' +
|
||||
'iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const iframe_data_uri = await create_message_target_data_uri(t);
|
||||
const iframe = await add_iframe(t, { src: iframe_data_uri });
|
||||
await do_send_message_error_test(t, root_dir, /*receiver=*/self,
|
||||
/*target=*/iframe.contentWindow, /*target_origin=*/'*',
|
||||
/*expected_has_source*/true, /*expected_origin=*/location.origin);
|
||||
// Do not test receiving FileSystemHandles from the data URI iframe. Data URI
|
||||
// iframes are insecure and do not expose the File System Access APIs.
|
||||
}, 'Fail to send messages to a data URI iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const iframe_data_uri = await create_message_target_data_uri(t);
|
||||
const iframe = await add_iframe(t, { src: iframe_data_uri });
|
||||
await do_send_message_port_error_test(
|
||||
t, root_dir, /*target=*/iframe.contentWindow, /*target_origin=*/'*');
|
||||
}, 'Fail to send messages using a message port in a data URI iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const child_window = await open_window(t, kRemoteOriginDocumentMessageTarget);
|
||||
await do_send_and_receive_message_error_test(
|
||||
t, root_dir, /*receiver=*/self, /*target=*/child_window, /*target_origin=*/'*',
|
||||
/*expected_has_source=*/true, /*expected_origin=*/location.origin,
|
||||
/*expected_remote_origin=*/kRemoteOrigin);
|
||||
}, 'Fail to send and receive messages using a cross origin window.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const child_window = await open_window(t, kRemoteOriginDocumentMessageTarget);
|
||||
await do_send_message_port_error_test(
|
||||
t, root_dir, /*target=*/child_window, /*target_origin=*/'*');
|
||||
}, 'Fail to send and receive messages using a cross origin message port in ' +
|
||||
'a window.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const url = `${kDocumentMessageTarget}?pipe=header(Content-Security-Policy` +
|
||||
', sandbox allow-scripts)';
|
||||
const child_window = await open_window(t, url);
|
||||
await do_send_message_error_test(
|
||||
t, root_dir, /*receiver=*/self, /*target=*/child_window,
|
||||
/*target_origin=*/'*', /*expected_has_source*/true,
|
||||
/*expected_origin=*/location.origin);
|
||||
}, 'Fail to send messages to a sandboxed window.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const url = `${kDocumentMessageTarget}?pipe=header(Content-Security-Policy` +
|
||||
', sandbox allow-scripts)';
|
||||
const child_window = await open_window(t, url);
|
||||
await do_send_message_port_error_test(
|
||||
t, root_dir, /*target=*/child_window, /*target_origin=*/'*');
|
||||
}, 'Fail to send messages using a message port to a sandboxed ' +
|
||||
'window.');
|
|
@ -0,0 +1,44 @@
|
|||
'use strict';
|
||||
|
||||
// This script depends on the following scripts:
|
||||
// /file-system-access/resources/messaging-helpers.js
|
||||
// /file-system-access/resources/messaging-blob-helpers.js
|
||||
// /file-system-access/resources/messaging-serialize-helpers.js
|
||||
// /file-system-access/resources/test-helpers.js
|
||||
|
||||
directory_test(
|
||||
async (t, root_dir) => {
|
||||
const iframe = await add_iframe(t, {src: kDocumentMessageTarget});
|
||||
await do_message_port_test(
|
||||
t, root_dir, /*target=*/ iframe.contentWindow,
|
||||
/*target_origin=*/ '*');
|
||||
},
|
||||
'Send and receive messages using a message port in a same origin ' +
|
||||
'iframe.');
|
||||
|
||||
directory_test(
|
||||
async (t, root_dir) => {
|
||||
const iframe = await add_iframe(t, {
|
||||
src: kDocumentMessageTarget,
|
||||
sandbox: 'allow-scripts allow-same-origin'
|
||||
});
|
||||
await do_message_port_test(
|
||||
t, root_dir, /*target=*/ iframe.contentWindow,
|
||||
/*target_origin=*/ '*');
|
||||
},
|
||||
'Send and receive messages using a message port in a sandboxed same ' +
|
||||
'origin iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const blob_url = await create_message_target_blob_url(t);
|
||||
const iframe = await add_iframe(t, {src: blob_url});
|
||||
await do_message_port_test(
|
||||
t, root_dir, /*target=*/ iframe.contentWindow, /*target_origin=*/ '*');
|
||||
}, 'Send and receive messages using a message port in a blob iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const iframe_html = await create_message_target_html_without_subresources(t);
|
||||
const iframe = await add_iframe(t, {srcdoc: iframe_html});
|
||||
await do_message_port_test(
|
||||
t, root_dir, /*target=*/ iframe.contentWindow, /*target_origin=*/ '*');
|
||||
}, 'Send and receive messages using a message port in an iframe srcdoc.');
|
|
@ -0,0 +1,35 @@
|
|||
'use strict';
|
||||
|
||||
// This script depends on the following scripts:
|
||||
// /file-system-access/resources/messaging-helpers.js
|
||||
// /file-system-access/resources/messaging-blob-helpers.js
|
||||
// /file-system-access/resources/messaging-serialize-helpers.js
|
||||
// /file-system-access/resources/test-helpers.js
|
||||
|
||||
directory_test(
|
||||
async (t, root_dir) => {
|
||||
const child_window = await open_window(t, kDocumentMessageTarget);
|
||||
await do_message_port_test(
|
||||
t, root_dir, /*target=*/ child_window, /*target_origin=*/ '*');
|
||||
},
|
||||
'Send and receive messages using a message port in a same origin ' +
|
||||
'window.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const blob_url = await create_message_target_blob_url(t);
|
||||
const child_window = await open_window(t, blob_url);
|
||||
await do_message_port_test(
|
||||
t, root_dir, /*target=*/ child_window, /*target_origin=*/ '*');
|
||||
}, 'Send and receive messages using a message port in a blob window.');
|
||||
|
||||
directory_test(
|
||||
async (t, root_dir) => {
|
||||
const url =
|
||||
`${kDocumentMessageTarget}?pipe=header(Content-Security-Policy` +
|
||||
', sandbox allow-scripts allow-same-origin)';
|
||||
const child_window = await open_window(t, url);
|
||||
await do_message_port_test(
|
||||
t, root_dir, /*target=*/ child_window, /*target_origin=*/ '*');
|
||||
},
|
||||
'Send and receive messages using a message port in a sandboxed same ' +
|
||||
'origin window.');
|
|
@ -0,0 +1,40 @@
|
|||
'use strict';
|
||||
|
||||
// This script depends on the following scripts:
|
||||
// /file-system-access/resources/messaging-helpers.js
|
||||
// /file-system-access/resources/messaging-blob-helpers.js
|
||||
// /file-system-access/resources/messaging-serialize-helpers.js
|
||||
// /file-system-access/resources/test-helpers.js
|
||||
// /service-workers/service-worker/resources/test-helpers.sub.js
|
||||
|
||||
directory_test(
|
||||
async (t, root_dir) => {
|
||||
const dedicated_worker =
|
||||
create_dedicated_worker(t, kDedicatedWorkerMessageTarget);
|
||||
await do_message_port_test(t, root_dir, /*target=*/ dedicated_worker);
|
||||
},
|
||||
'Send and receive messages using a message port in a dedicated ' +
|
||||
'worker.');
|
||||
|
||||
directory_test(
|
||||
async (t, root_dir) => {
|
||||
const scope = `${kServiceWorkerMessageTarget}` +
|
||||
'?post-message-to-message-port-with-file-handle';
|
||||
const registration =
|
||||
await create_service_worker(t, kServiceWorkerMessageTarget, scope);
|
||||
await do_message_port_test(
|
||||
t, root_dir, /*target=*/ registration.installing);
|
||||
},
|
||||
'Send and receive messages using a message port in a service ' +
|
||||
'worker.');
|
||||
|
||||
if (self.SharedWorker !== undefined) {
|
||||
directory_test(
|
||||
async (t, root_dir) => {
|
||||
const shared_worker = new SharedWorker(kSharedWorkerMessageTarget);
|
||||
shared_worker.port.start();
|
||||
await do_message_port_test(t, root_dir, /*target=*/ shared_worker.port);
|
||||
},
|
||||
'Send and receive messages using a message port in a shared ' +
|
||||
' worker.');
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
'use strict';
|
||||
|
||||
// This script depends on the following scripts:
|
||||
// /file-system-access/resources/messaging-helpers.js
|
||||
// /file-system-access/resources/messaging-blob-helpers.js
|
||||
// /file-system-access/resources/messaging-serialize-helpers.js
|
||||
// /file-system-access/resources/test-helpers.js
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const iframe = await add_iframe(t, {src: kDocumentMessageTarget});
|
||||
await do_post_message_test(
|
||||
t, root_dir, /*receiver=*/ self, /*target=*/ iframe.contentWindow,
|
||||
/*target_origin=*/ '*');
|
||||
}, 'Send and receive messages using a same origin iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const iframe = await add_iframe(t, {
|
||||
src: kDocumentMessageTarget,
|
||||
sandbox: 'allow-scripts allow-same-origin'
|
||||
});
|
||||
await do_post_message_test(
|
||||
t, root_dir, /*receiver=*/ self, /*target=*/ iframe.contentWindow,
|
||||
/*target_origin=*/ '*');
|
||||
}, 'Send and receive messages using a sandboxed same origin iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const blob_url = await create_message_target_blob_url(t);
|
||||
const iframe = await add_iframe(t, {src: blob_url});
|
||||
await do_post_message_test(
|
||||
t, root_dir, /*receiver=*/ self, /*target=*/ iframe.contentWindow,
|
||||
/*target_origin=*/ '*');
|
||||
}, 'Send and receive messages using a blob iframe.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const iframe_html = await create_message_target_html_without_subresources(t);
|
||||
const iframe = await add_iframe(t, {srcdoc: iframe_html});
|
||||
await do_post_message_test(
|
||||
t, root_dir, /*receiver=*/ self, /*target=*/ iframe.contentWindow,
|
||||
/*target_origin=*/ '*');
|
||||
}, 'Send and receive messages using an iframe srcdoc.');
|
|
@ -0,0 +1,31 @@
|
|||
'use strict';
|
||||
|
||||
// This script depends on the following scripts:
|
||||
// /file-system-access/resources/messaging-helpers.js
|
||||
// /file-system-access/resources/messaging-blob-helpers.js
|
||||
// /file-system-access/resources/messaging-serialize-helpers.js
|
||||
// /file-system-access/resources/test-helpers.js
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const child_window = await open_window(t, kDocumentMessageTarget);
|
||||
await do_post_message_test(
|
||||
t, root_dir, /*receiver=*/ self, /*target=*/ child_window,
|
||||
/*target_origin=*/ '*');
|
||||
}, 'Send and receive messages using a same origin window.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const blob_url = await create_message_target_blob_url(t);
|
||||
const child_window = await open_window(t, blob_url);
|
||||
await do_post_message_test(
|
||||
t, root_dir, /*receiver=*/ self, /*target=*/ child_window,
|
||||
/*target_origin=*/ '*');
|
||||
}, 'Send and receive messages using a blob window.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const url = `${kDocumentMessageTarget}?pipe=header(Content-Security-Policy` +
|
||||
', sandbox allow-scripts allow-same-origin)';
|
||||
const child_window = await open_window(t, url);
|
||||
await do_post_message_test(
|
||||
t, root_dir, /*receiver=*/ self, /*target=*/ child_window,
|
||||
/*target_origin=*/ '*');
|
||||
}, 'Send and receive messages using a sandboxed same origin window.');
|
|
@ -0,0 +1,35 @@
|
|||
'use strict';
|
||||
|
||||
// This script depends on the following scripts:
|
||||
// /file-system-access/resources/messaging-helpers.js
|
||||
// /file-system-access/resources/messaging-blob-helpers.js
|
||||
// /file-system-access/resources/messaging-serialize-helpers.js
|
||||
// /file-system-access/resources/test-helpers.js
|
||||
// /service-workers/service-worker/resources/test-helpers.sub.js
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const dedicated_worker =
|
||||
create_dedicated_worker(t, kDedicatedWorkerMessageTarget);
|
||||
await do_post_message_test(
|
||||
t, root_dir, /*receiver=*/ dedicated_worker,
|
||||
/*target=*/ dedicated_worker);
|
||||
}, 'Send and receive messages using a dedicated worker.');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const scope = `${kServiceWorkerMessageTarget}?post-message-with-file-handle`;
|
||||
const registration =
|
||||
await create_service_worker(t, kServiceWorkerMessageTarget, scope);
|
||||
await do_post_message_test(
|
||||
t, root_dir, /*receiver=*/ navigator.serviceWorker,
|
||||
/*target=*/ registration.installing);
|
||||
}, 'Send and receive messages using a service worker.');
|
||||
|
||||
if (self.SharedWorker !== undefined) {
|
||||
directory_test(async (t, root_dir) => {
|
||||
const shared_worker = new SharedWorker(kSharedWorkerMessageTarget);
|
||||
shared_worker.port.start();
|
||||
await do_post_message_test(
|
||||
t, root_dir, /*receiver=*/ shared_worker.port,
|
||||
/*target=*/ shared_worker.port);
|
||||
}, 'Send and receive messages using a shared worker.');
|
||||
}
|
|
@ -0,0 +1,115 @@
|
|||
directory_test(async (t, root) => {
|
||||
await promise_rejects_dom(
|
||||
t, 'NotFoundError', root.getDirectoryHandle('non-existing-dir'));
|
||||
}, 'getDirectoryHandle(create=false) rejects for non-existing directories');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle =
|
||||
await root.getDirectoryHandle('non-existing-dir', {create: true});
|
||||
t.add_cleanup(() => root.removeEntry('non-existing-dir', {recursive: true}));
|
||||
|
||||
assert_equals(handle.kind, 'directory');
|
||||
assert_equals(handle.name, 'non-existing-dir');
|
||||
assert_equals(await getDirectoryEntryCount(handle), 0);
|
||||
assert_array_equals(
|
||||
await getSortedDirectoryEntries(root), ['non-existing-dir/']);
|
||||
}, 'getDirectoryHandle(create=true) creates an empty directory');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const existing_handle =
|
||||
await root.getDirectoryHandle('dir-with-contents', {create: true});
|
||||
t.add_cleanup(() => root.removeEntry('dir-with-contents', {recursive: true}));
|
||||
const file_handle = await createEmptyFile(t, 'test-file', existing_handle);
|
||||
|
||||
const handle =
|
||||
await root.getDirectoryHandle('dir-with-contents', {create: false});
|
||||
|
||||
assert_equals(handle.kind, 'directory');
|
||||
assert_equals(handle.name, 'dir-with-contents');
|
||||
assert_array_equals(await getSortedDirectoryEntries(handle), ['test-file']);
|
||||
}, 'getDirectoryHandle(create=false) returns existing directories');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const existing_handle =
|
||||
await root.getDirectoryHandle('dir-with-contents', {create: true});
|
||||
t.add_cleanup(() => root.removeEntry('dir-with-contents', {recursive: true}));
|
||||
const file_handle =
|
||||
await existing_handle.getFileHandle('test-file', {create: true});
|
||||
|
||||
const handle =
|
||||
await root.getDirectoryHandle('dir-with-contents', {create: true});
|
||||
|
||||
assert_equals(handle.kind, 'directory');
|
||||
assert_equals(handle.name, 'dir-with-contents');
|
||||
assert_array_equals(await getSortedDirectoryEntries(handle), ['test-file']);
|
||||
}, 'getDirectoryHandle(create=true) returns existing directories without erasing');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
await createEmptyFile(t, 'file-name', root);
|
||||
|
||||
await promise_rejects_dom(
|
||||
t, 'TypeMismatchError', root.getDirectoryHandle('file-name'));
|
||||
await promise_rejects_dom(
|
||||
t, 'TypeMismatchError',
|
||||
root.getDirectoryHandle('file-name', {create: false}));
|
||||
await promise_rejects_dom(
|
||||
t, 'TypeMismatchError',
|
||||
root.getDirectoryHandle('file-name', {create: true}));
|
||||
}, 'getDirectoryHandle() when a file already exists with the same name');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
await promise_rejects_js(
|
||||
t, TypeError, dir.getDirectoryHandle('', {create: true}));
|
||||
await promise_rejects_js(
|
||||
t, TypeError, dir.getDirectoryHandle('', {create: false}));
|
||||
}, 'getDirectoryHandle() with empty name');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
await promise_rejects_js(
|
||||
t, TypeError, dir.getDirectoryHandle(kCurrentDirectory));
|
||||
await promise_rejects_js(
|
||||
t, TypeError, dir.getDirectoryHandle(kCurrentDirectory, {create: true}));
|
||||
}, `getDirectoryHandle() with "${kCurrentDirectory}" name`);
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const subdir = await createDirectory(t, 'subdir-name', /*parent=*/ dir);
|
||||
|
||||
await promise_rejects_js(
|
||||
t, TypeError, subdir.getDirectoryHandle(kParentDirectory));
|
||||
await promise_rejects_js(
|
||||
t, TypeError,
|
||||
subdir.getDirectoryHandle(kParentDirectory, {create: true}));
|
||||
}, `getDirectoryHandle() with "${kParentDirectory}" name`);
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const first_subdir_name = 'first-subdir-name';
|
||||
const first_subdir =
|
||||
await createDirectory(t, first_subdir_name, /*parent=*/ dir);
|
||||
|
||||
const second_subdir_name = 'second-subdir-name';
|
||||
const second_subdir =
|
||||
await createDirectory(t, second_subdir_name, /*parent=*/ first_subdir);
|
||||
|
||||
for (let i = 0; i < kPathSeparators.length; ++i) {
|
||||
const path_with_separator =
|
||||
`${first_subdir_name}${kPathSeparators[i]}${second_subdir_name}`;
|
||||
await promise_rejects_js(
|
||||
t, TypeError, dir.getDirectoryHandle(path_with_separator),
|
||||
`getDirectoryHandle() must reject names containing "${
|
||||
kPathSeparators[i]}"`);
|
||||
}
|
||||
}, 'getDirectoryHandle(create=false) with a path separator when the directory exists');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const subdir_name = 'subdir-name';
|
||||
const subdir = await createDirectory(t, subdir_name, /*parent=*/ dir);
|
||||
|
||||
for (let i = 0; i < kPathSeparators.length; ++i) {
|
||||
const path_with_separator = `${subdir_name}${kPathSeparators[i]}file_name`;
|
||||
await promise_rejects_js(
|
||||
t, TypeError,
|
||||
dir.getDirectoryHandle(path_with_separator, {create: true}),
|
||||
`getDirectoryHandle(true) must reject names containing "${
|
||||
kPathSeparators[i]}"`);
|
||||
}
|
||||
}, 'getDirectoryHandle(create=true) with a path separator');
|
|
@ -0,0 +1,102 @@
|
|||
directory_test(async (t, dir) => {
|
||||
await promise_rejects_dom(
|
||||
t, 'NotFoundError', dir.getFileHandle('non-existing-file'));
|
||||
}, 'getFileHandle(create=false) rejects for non-existing files');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const handle = await dir.getFileHandle('non-existing-file', {create: true});
|
||||
t.add_cleanup(() => dir.removeEntry('non-existing-file'));
|
||||
|
||||
assert_equals(handle.kind, 'file');
|
||||
assert_equals(handle.name, 'non-existing-file');
|
||||
assert_equals(await getFileSize(handle), 0);
|
||||
assert_equals(await getFileContents(handle), '');
|
||||
}, 'getFileHandle(create=true) creates an empty file for non-existing files');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const existing_handle = await createFileWithContents(
|
||||
t, 'existing-file', '1234567890', /*parent=*/ dir);
|
||||
const handle = await dir.getFileHandle('existing-file');
|
||||
|
||||
assert_equals(handle.kind, 'file');
|
||||
assert_equals(handle.name, 'existing-file');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
assert_equals(await getFileContents(handle), '1234567890');
|
||||
}, 'getFileHandle(create=false) returns existing files');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const existing_handle = await createFileWithContents(
|
||||
t, 'file-with-contents', '1234567890', /*parent=*/ dir);
|
||||
const handle = await dir.getFileHandle('file-with-contents', {create: true});
|
||||
|
||||
assert_equals(handle.kind, 'file');
|
||||
assert_equals(handle.name, 'file-with-contents');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
assert_equals(await getFileContents(handle), '1234567890');
|
||||
}, 'getFileHandle(create=true) returns existing files without erasing');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const dir_handle = await dir.getDirectoryHandle('dir-name', {create: true});
|
||||
t.add_cleanup(() => dir.removeEntry('dir-name', {recursive: true}));
|
||||
|
||||
await promise_rejects_dom(
|
||||
t, 'TypeMismatchError', dir.getFileHandle('dir-name'));
|
||||
}, 'getFileHandle(create=false) when a directory already exists with the same name');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const dir_handle = await dir.getDirectoryHandle('dir-name', {create: true});
|
||||
t.add_cleanup(() => dir.removeEntry('dir-name', {recursive: true}));
|
||||
|
||||
await promise_rejects_dom(
|
||||
t, 'TypeMismatchError', dir.getFileHandle('dir-name', {create: true}));
|
||||
}, 'getFileHandle(create=true) when a directory already exists with the same name');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
await promise_rejects_js(t, TypeError, dir.getFileHandle('', {create: true}));
|
||||
await promise_rejects_js(
|
||||
t, TypeError, dir.getFileHandle('', {create: false}));
|
||||
}, 'getFileHandle() with empty name');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
await promise_rejects_js(t, TypeError, dir.getFileHandle(kCurrentDirectory));
|
||||
await promise_rejects_js(
|
||||
t, TypeError, dir.getFileHandle(kCurrentDirectory, {create: true}));
|
||||
}, `getFileHandle() with "${kCurrentDirectory}" name`);
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const subdir = await createDirectory(t, 'subdir-name', /*parent=*/ dir);
|
||||
|
||||
await promise_rejects_js(
|
||||
t, TypeError, subdir.getFileHandle(kParentDirectory));
|
||||
await promise_rejects_js(
|
||||
t, TypeError, subdir.getFileHandle(kParentDirectory, {create: true}));
|
||||
}, `getFileHandle() with "${kParentDirectory}" name`);
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const subdir_name = 'subdir-name';
|
||||
const subdir = await createDirectory(t, subdir_name, /*parent=*/ dir);
|
||||
|
||||
const file_name = 'file-name';
|
||||
await createEmptyFile(t, file_name, /*parent=*/ subdir);
|
||||
|
||||
for (let i = 0; i < kPathSeparators.length; ++i) {
|
||||
const path_with_separator =
|
||||
`${subdir_name}${kPathSeparators[i]}${file_name}`;
|
||||
await promise_rejects_js(
|
||||
t, TypeError, dir.getFileHandle(path_with_separator),
|
||||
`getFileHandle() must reject names containing "${kPathSeparators[i]}"`);
|
||||
}
|
||||
}, 'getFileHandle(create=false) with a path separator when the file exists.');
|
||||
|
||||
directory_test(async (t, dir) => {
|
||||
const subdir_name = 'subdir-name';
|
||||
const subdir = await createDirectory(t, subdir_name, /*parent=*/ dir);
|
||||
|
||||
for (let i = 0; i < kPathSeparators.length; ++i) {
|
||||
const path_with_separator = `${subdir_name}${kPathSeparators[i]}file_name`;
|
||||
await promise_rejects_js(
|
||||
t, TypeError, dir.getFileHandle(path_with_separator, {create: true}),
|
||||
`getFileHandle(create=true) must reject names containing "${
|
||||
kPathSeparators[i]}"`);
|
||||
}
|
||||
}, 'getFileHandle(create=true) with a path separator');
|
|
@ -0,0 +1,98 @@
|
|||
directory_test(async (t, root) => {
|
||||
const file_name1 = 'foo1.txt';
|
||||
const file_name2 = 'foo2.txt';
|
||||
await createFileWithContents(t, file_name1, 'contents', /*parent=*/ root);
|
||||
await createFileWithContents(t, file_name2, 'contents', /*parent=*/ root);
|
||||
|
||||
for await (let entry of root) {
|
||||
break;
|
||||
}
|
||||
|
||||
}, 'returning early from an iteration doesn\'t crash');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const file_name1 = 'foo1.txt';
|
||||
const file_name2 = 'foo2.txt';
|
||||
await createFileWithContents(t, file_name1, 'contents', /*parent=*/ root);
|
||||
await createFileWithContents(t, file_name2, 'contents', /*parent=*/ root);
|
||||
|
||||
let names = [];
|
||||
for await (let entry of root) {
|
||||
assert_true(Array.isArray(entry));
|
||||
assert_equals(entry.length, 2);
|
||||
assert_equals(typeof entry[0], 'string');
|
||||
assert_true(entry[1] instanceof FileSystemFileHandle);
|
||||
assert_equals(entry[0], entry[1].name);
|
||||
names.push(entry[0]);
|
||||
}
|
||||
names.sort();
|
||||
assert_array_equals(names, [file_name1, file_name2]);
|
||||
|
||||
}, '@@asyncIterator: full iteration works');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const file_name1 = 'foo1.txt';
|
||||
const file_name2 = 'foo2.txt';
|
||||
await createFileWithContents(t, file_name1, 'contents', /*parent=*/ root);
|
||||
await createFileWithContents(t, file_name2, 'contents', /*parent=*/ root);
|
||||
|
||||
let names = [];
|
||||
for await (let entry of root.entries()) {
|
||||
assert_true(Array.isArray(entry));
|
||||
assert_equals(entry.length, 2);
|
||||
assert_equals(typeof entry[0], 'string');
|
||||
assert_true(entry[1] instanceof FileSystemFileHandle);
|
||||
assert_equals(entry[0], entry[1].name);
|
||||
names.push(entry[0]);
|
||||
}
|
||||
names.sort();
|
||||
assert_array_equals(names, [file_name1, file_name2]);
|
||||
}, 'entries: full iteration works');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const file_name1 = 'foo1.txt';
|
||||
const file_name2 = 'foo2.txt';
|
||||
await createFileWithContents(t, file_name1, 'contents', /*parent=*/ root);
|
||||
await createFileWithContents(t, file_name2, 'contents', /*parent=*/ root);
|
||||
|
||||
let names = [];
|
||||
for await (let entry of root.values()) {
|
||||
assert_true(entry instanceof FileSystemFileHandle);
|
||||
names.push(entry.name);
|
||||
}
|
||||
names.sort();
|
||||
assert_array_equals(names, [file_name1, file_name2]);
|
||||
}, 'values: full iteration works');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const file_name1 = 'foo1.txt';
|
||||
const file_name2 = 'foo2.txt';
|
||||
await createFileWithContents(t, file_name1, 'contents', /*parent=*/ root);
|
||||
await createFileWithContents(t, file_name2, 'contents', /*parent=*/ root);
|
||||
|
||||
let names = [];
|
||||
for await (let entry of root.keys()) {
|
||||
assert_equals(typeof entry, 'string');
|
||||
names.push(entry);
|
||||
}
|
||||
names.sort();
|
||||
assert_array_equals(names, [file_name1, file_name2]);
|
||||
}, 'keys: full iteration works');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const file_name1 = 'foo1.txt';
|
||||
await createFileWithContents(t, file_name1, 'contents', /*parent=*/ root);
|
||||
|
||||
const next = (() => {
|
||||
const iterator = root.entries();
|
||||
return iterator.next();
|
||||
})();
|
||||
garbageCollect();
|
||||
let entry = await next;
|
||||
assert_false(entry.done);
|
||||
assert_true(Array.isArray(entry.value));
|
||||
assert_equals(entry.value.length, 2);
|
||||
assert_equals(entry.value[0], file_name1);
|
||||
assert_true(entry.value[1] instanceof FileSystemFileHandle);
|
||||
assert_equals(entry.value[1].name, file_name1);
|
||||
}, 'iteration while iterator gets garbage collected');
|
|
@ -0,0 +1,69 @@
|
|||
|
||||
directory_test(async (t, root) => {
|
||||
const handle =
|
||||
await createFileWithContents(t, 'file-to-remove', '12345', root);
|
||||
await createFileWithContents(t, 'file-to-keep', 'abc', root);
|
||||
await root.removeEntry('file-to-remove');
|
||||
|
||||
assert_array_equals(await getSortedDirectoryEntries(root), ['file-to-keep']);
|
||||
await promise_rejects_dom(t, 'NotFoundError', getFileContents(handle));
|
||||
}, 'removeEntry() to remove a file');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle =
|
||||
await createFileWithContents(t, 'file-to-remove', '12345', root);
|
||||
await root.removeEntry('file-to-remove');
|
||||
|
||||
await promise_rejects_dom(t, 'NotFoundError', root.removeEntry('file-to-remove'));
|
||||
}, 'removeEntry() on an already removed file should fail');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir = await root.getDirectoryHandle('dir-to-remove', {create: true});
|
||||
await createFileWithContents(t, 'file-to-keep', 'abc', root);
|
||||
await root.removeEntry('dir-to-remove');
|
||||
|
||||
assert_array_equals(await getSortedDirectoryEntries(root), ['file-to-keep']);
|
||||
await promise_rejects_dom(t, 'NotFoundError', getSortedDirectoryEntries(dir));
|
||||
}, 'removeEntry() to remove an empty directory');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir = await root.getDirectoryHandle('dir-to-remove', {create: true});
|
||||
t.add_cleanup(() => root.removeEntry('dir-to-remove', {recursive: true}));
|
||||
await createEmptyFile(t, 'file-in-dir', dir);
|
||||
|
||||
await promise_rejects_dom(
|
||||
t, 'InvalidModificationError', root.removeEntry('dir-to-remove'));
|
||||
assert_array_equals(
|
||||
await getSortedDirectoryEntries(root), ['dir-to-remove/']);
|
||||
assert_array_equals(await getSortedDirectoryEntries(dir), ['file-in-dir']);
|
||||
}, 'removeEntry() on a non-empty directory should fail');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir = await createDirectory(t, 'dir', root);
|
||||
await promise_rejects_js(t, TypeError, dir.removeEntry(''));
|
||||
}, 'removeEntry() with empty name should fail');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir = await createDirectory(t, 'dir', root);
|
||||
await promise_rejects_js(t, TypeError, dir.removeEntry(kCurrentDirectory));
|
||||
}, `removeEntry() with "${kCurrentDirectory}" name should fail`);
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir = await createDirectory(t, 'dir', root);
|
||||
await promise_rejects_js(t, TypeError, dir.removeEntry(kParentDirectory));
|
||||
}, `removeEntry() with "${kParentDirectory}" name should fail`);
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir_name = 'dir-name';
|
||||
const dir = await createDirectory(t, dir_name, root);
|
||||
|
||||
const file_name = 'file-name';
|
||||
await createEmptyFile(t, file_name, dir);
|
||||
|
||||
for (let i = 0; i < kPathSeparators.length; ++i) {
|
||||
const path_with_separator = `${dir_name}${kPathSeparators[i]}${file_name}`;
|
||||
await promise_rejects_js(
|
||||
t, TypeError, root.removeEntry(path_with_separator),
|
||||
`removeEntry() must reject names containing "${kPathSeparators[i]}"`);
|
||||
}
|
||||
}, 'removeEntry() with a path separator should fail.');
|
|
@ -0,0 +1,27 @@
|
|||
'use strict';
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
assert_array_equals(await root_dir.resolve(root_dir), []);
|
||||
}, 'Resolve returns empty array for same directory');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const subdir = await createDirectory(t, 'subdir-name', root_dir);
|
||||
const file = await createEmptyFile(t, 'file-name', subdir);
|
||||
|
||||
assert_array_equals(await root_dir.resolve(file), ['subdir-name', 'file-name']);
|
||||
}, 'Resolve returns correct path');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const subdir = await createDirectory(t, 'subdir😊', root_dir);
|
||||
const file = await createEmptyFile(t, 'file-name', subdir);
|
||||
|
||||
assert_array_equals(await root_dir.resolve(file), ['subdir😊', 'file-name']);
|
||||
assert_array_equals(await root_dir.resolve(subdir), ['subdir😊']);
|
||||
}, 'Resolve returns correct path with non-ascii characters');
|
||||
|
||||
directory_test(async (t, root_dir) => {
|
||||
const subdir = await createDirectory(t, 'subdir-name', root_dir);
|
||||
const file = await createEmptyFile(t, 'file-name', root_dir);
|
||||
|
||||
assert_equals(await subdir.resolve(file), null);
|
||||
}, 'Resolve returns null when entry is not a child');
|
|
@ -0,0 +1,40 @@
|
|||
directory_test(async (t, root) => {
|
||||
const fileContents = 'awesome content';
|
||||
let handle = await createFileWithContents(t, 'foo.txt', fileContents, /*parent=*/ root);
|
||||
let file = await handle.getFile();
|
||||
let slice = file.slice(1, file.size);
|
||||
let actualContents = await slice.text();
|
||||
assert_equals(actualContents, fileContents.slice(1, fileContents.length));
|
||||
}, 'getFile() provides a file that can be sliced');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'mtime.txt', root);
|
||||
let file = await handle.getFile();
|
||||
const first_mtime = file.lastModified;
|
||||
|
||||
// We wait for 2s here to ensure that the files do not have the
|
||||
// same modification time. Some filesystems have low resolutions
|
||||
// for modification timestamps.
|
||||
let timeout = new Promise(resolve => {
|
||||
t.step_timeout(resolve, 2000);
|
||||
});
|
||||
await timeout;
|
||||
|
||||
const writer = await handle.createWritable({keepExistingData: false});
|
||||
await writer.write(new Blob(['foo']));
|
||||
await writer.close();
|
||||
|
||||
file = await handle.getFile();
|
||||
const second_mtime = file.lastModified;
|
||||
|
||||
// We wait for 5 ms here to ensure that `lastModified`
|
||||
// from the File objects is stable between getFile invocations.
|
||||
timeout = new Promise(resolve => {
|
||||
t.step_timeout(resolve, 5);
|
||||
});
|
||||
await timeout;
|
||||
let fileReplica = await handle.getFile();
|
||||
assert_equals(second_mtime, fileReplica.lastModified);
|
||||
|
||||
assert_less_than(first_mtime, second_mtime);
|
||||
}, 'getFile() returns last modified time');
|
|
@ -0,0 +1,135 @@
|
|||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'foo_string.txt', root);
|
||||
const wfs = await handle.createWritable();
|
||||
|
||||
const rs = recordingReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue('foo_string');
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
|
||||
await rs.pipeTo(wfs, { preventCancel: true });
|
||||
assert_equals(await getFileContents(handle), 'foo_string');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'can be piped to with a string');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'foo_arraybuf.txt', root);
|
||||
const wfs = await handle.createWritable();
|
||||
const buf = new ArrayBuffer(3);
|
||||
const intView = new Uint8Array(buf);
|
||||
intView[0] = 0x66;
|
||||
intView[1] = 0x6f;
|
||||
intView[2] = 0x6f;
|
||||
|
||||
const rs = recordingReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(buf);
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
|
||||
await rs.pipeTo(wfs, { preventCancel: true });
|
||||
assert_equals(await getFileContents(handle), 'foo');
|
||||
assert_equals(await getFileSize(handle), 3);
|
||||
}, 'can be piped to with an ArrayBuffer');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'foo_blob.txt', root);
|
||||
const wfs = await handle.createWritable();
|
||||
|
||||
const rs = recordingReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new Blob(['foo']));
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
|
||||
await rs.pipeTo(wfs, { preventCancel: true });
|
||||
assert_equals(await getFileContents(handle), 'foo');
|
||||
assert_equals(await getFileSize(handle), 3);
|
||||
}, 'can be piped to with a Blob');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'foo_write_param.txt', root);
|
||||
const wfs = await handle.createWritable();
|
||||
|
||||
const rs = recordingReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue({type: 'write', data: 'foobar'});
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
|
||||
await rs.pipeTo(wfs, { preventCancel: true });
|
||||
assert_equals(await getFileContents(handle), 'foobar');
|
||||
assert_equals(await getFileSize(handle), 6);
|
||||
}, 'can be piped to with a param object with write command');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'foo_write_param.txt', root);
|
||||
const wfs = await handle.createWritable();
|
||||
|
||||
const rs = recordingReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue({type: 'write', data: 'foobar'});
|
||||
controller.enqueue({type: 'truncate', size: 10});
|
||||
controller.enqueue({type: 'write', position: 0, data: 'baz'});
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
|
||||
await rs.pipeTo(wfs, { preventCancel: true });
|
||||
assert_equals(await getFileContents(handle), 'bazbar\0\0\0\0');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'can be piped to with a param object with multiple commands');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'foo_write_queued.txt', root);
|
||||
const wfs = await handle.createWritable();
|
||||
|
||||
const rs = recordingReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue('foo');
|
||||
controller.enqueue('bar');
|
||||
controller.enqueue('baz');
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
|
||||
await rs.pipeTo(wfs, { preventCancel: true });
|
||||
assert_equals(await getFileContents(handle), 'foobarbaz');
|
||||
assert_equals(await getFileSize(handle), 9);
|
||||
}, 'multiple operations can be queued');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'fetched.txt', root);
|
||||
const wfs = await handle.createWritable();
|
||||
|
||||
const response = await fetch('data:text/plain,fetched from far');
|
||||
const body = await response.body;
|
||||
await body.pipeTo(wfs, { preventCancel: true });
|
||||
assert_equals(await getFileContents(handle), 'fetched from far');
|
||||
assert_equals(await getFileSize(handle), 16);
|
||||
}, 'plays well with fetch');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'aborted should_be_empty.txt', root);
|
||||
const wfs = await handle.createWritable();
|
||||
|
||||
const response = await fetch('data:text/plain,fetched from far');
|
||||
const body = await response.body;
|
||||
|
||||
const abortController = new AbortController();
|
||||
const signal = abortController.signal;
|
||||
|
||||
const promise = body.pipeTo(wfs, { signal });
|
||||
await abortController.abort();
|
||||
|
||||
await promise_rejects_dom(t, 'AbortError', promise, 'stream is aborted');
|
||||
await promise_rejects_js(t, TypeError, wfs.close(), 'stream cannot be closed to flush writes');
|
||||
|
||||
assert_equals(await getFileContents(handle), '');
|
||||
assert_equals(await getFileSize(handle), 0);
|
||||
}, 'abort() aborts write');
|
|
@ -0,0 +1,354 @@
|
|||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'empty_blob', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write(new Blob([]));
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '');
|
||||
assert_equals(await getFileSize(handle), 0);
|
||||
}, 'write() with an empty blob to an empty file');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'valid_blob', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write(new Blob(['1234567890']));
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '1234567890');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'write() a blob to an empty file');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'write_param_empty', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write({type: 'write', data: '1234567890'});
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '1234567890');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'write() with WriteParams without position to an empty file');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'string_zero_offset', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write({type: 'write', position: 0, data: '1234567890'});
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '1234567890');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'write() a string to an empty file with zero offset');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'blob_zero_offset', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write({type: 'write', position: 0, data: new Blob(['1234567890'])});
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '1234567890');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'write() a blob to an empty file with zero offset');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'write_appends', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('12345');
|
||||
await stream.write('67890');
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '1234567890');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'write() called consecutively appends');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'write_appends_object_string', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('12345');
|
||||
await stream.write({type: 'write', data: '67890'});
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '1234567890');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'write() WriteParams without position and string appends');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'write_appends_object_blob', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('12345');
|
||||
await stream.write({type: 'write', data: new Blob(['67890'])});
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '1234567890');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'write() WriteParams without position and blob appends');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'string_with_offset', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('1234567890');
|
||||
await stream.write({type: 'write', position: 4, data: 'abc'});
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '1234abc890');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'write() called with a string and a valid offset');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'blob_with_offset', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('1234567890');
|
||||
await stream.write({type: 'write', position: 4, data: new Blob(['abc'])});
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '1234abc890');
|
||||
assert_equals(await getFileSize(handle), 10);
|
||||
}, 'write() called with a blob and a valid offset');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'bad_offset', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await promise_rejects_dom(
|
||||
t, 'InvalidStateError', stream.write({type: 'write', position: 4, data: new Blob(['abc'])}));
|
||||
await promise_rejects_js(
|
||||
t, TypeError, stream.close(), 'stream is already closed');
|
||||
|
||||
assert_equals(await getFileContents(handle), '');
|
||||
assert_equals(await getFileSize(handle), 0);
|
||||
}, 'write() called with an invalid offset');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'empty_string', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('');
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), '');
|
||||
assert_equals(await getFileSize(handle), 0);
|
||||
}, 'write() with an empty string to an empty file');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'valid_utf8_string', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('foo🤘');
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'foo🤘');
|
||||
assert_equals(await getFileSize(handle), 7);
|
||||
}, 'write() with a valid utf-8 string');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'string_with_unix_line_ending', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('foo\n');
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'foo\n');
|
||||
assert_equals(await getFileSize(handle), 4);
|
||||
}, 'write() with a string with unix line ending preserved');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle =
|
||||
await createEmptyFile(t, 'string_with_windows_line_ending', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('foo\r\n');
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'foo\r\n');
|
||||
assert_equals(await getFileSize(handle), 5);
|
||||
}, 'write() with a string with windows line ending preserved');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'empty_array_buffer', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
const buf = new ArrayBuffer(0);
|
||||
await stream.write(buf);
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), '');
|
||||
assert_equals(await getFileSize(handle), 0);
|
||||
}, 'write() with an empty array buffer to an empty file');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle =
|
||||
await createEmptyFile(t, 'valid_string_typed_byte_array', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
const buf = new ArrayBuffer(3);
|
||||
const intView = new Uint8Array(buf);
|
||||
intView[0] = 0x66;
|
||||
intView[1] = 0x6f;
|
||||
intView[2] = 0x6f;
|
||||
await stream.write(buf);
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'foo');
|
||||
assert_equals(await getFileSize(handle), 3);
|
||||
}, 'write() with a valid typed array buffer');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir = await createDirectory(t, 'parent_dir', root);
|
||||
const file_name = 'close_fails_when_dir_removed.txt';
|
||||
const handle = await createEmptyFile(t, file_name, dir);
|
||||
const stream = await handle.createWritable();
|
||||
await stream.write('foo');
|
||||
|
||||
await root.removeEntry('parent_dir', {recursive: true});
|
||||
await promise_rejects_dom(t, 'NotFoundError', stream.close());
|
||||
}, 'atomic writes: close() fails when parent directory is removed');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'atomic_writes.txt', root);
|
||||
const stream = await handle.createWritable();
|
||||
await stream.write('foox');
|
||||
|
||||
const stream2 = await handle.createWritable();
|
||||
await stream2.write('bar');
|
||||
|
||||
assert_equals(await getFileSize(handle), 0);
|
||||
|
||||
await stream2.close();
|
||||
assert_equals(await getFileContents(handle), 'bar');
|
||||
assert_equals(await getFileSize(handle), 3);
|
||||
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'foox');
|
||||
assert_equals(await getFileSize(handle), 4);
|
||||
}, 'atomic writes: writable file streams make atomic changes on close');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'atomic_write_after_close.txt', root);
|
||||
const stream = await handle.createWritable();
|
||||
await stream.write('foo');
|
||||
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'foo');
|
||||
assert_equals(await getFileSize(handle), 3);
|
||||
|
||||
await promise_rejects_js(
|
||||
t, TypeError, stream.write('abc'));
|
||||
}, 'atomic writes: write() after close() fails');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle =
|
||||
await createEmptyFile(t, 'atomic_truncate_after_close.txt', root);
|
||||
const stream = await handle.createWritable();
|
||||
await stream.write('foo');
|
||||
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'foo');
|
||||
assert_equals(await getFileSize(handle), 3);
|
||||
|
||||
await promise_rejects_js(t, TypeError, stream.truncate(0));
|
||||
}, 'atomic writes: truncate() after close() fails');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'atomic_close_after_close.txt', root);
|
||||
const stream = await handle.createWritable();
|
||||
await stream.write('foo');
|
||||
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'foo');
|
||||
assert_equals(await getFileSize(handle), 3);
|
||||
|
||||
await promise_rejects_js(t, TypeError, stream.close());
|
||||
}, 'atomic writes: close() after close() fails');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'there_can_be_only_one.txt', root);
|
||||
const stream = await handle.createWritable();
|
||||
await stream.write('foo');
|
||||
|
||||
// This test might be flaky if there is a race condition allowing
|
||||
// close() to be called multiple times.
|
||||
const success_promises =
|
||||
[...Array(100)].map(() => stream.close().then(() => 1).catch(() => 0));
|
||||
const close_attempts = await Promise.all(success_promises);
|
||||
const success_count = close_attempts.reduce((x, y) => x + y);
|
||||
assert_equals(success_count, 1);
|
||||
}, 'atomic writes: only one close() operation may succeed');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir = await createDirectory(t, 'parent_dir', root);
|
||||
const file_name = 'atomic_writable_file_stream_persists_removed.txt';
|
||||
const handle = await createFileWithContents(t, file_name, 'foo', dir);
|
||||
|
||||
const stream = await handle.createWritable();
|
||||
await stream.write('bar');
|
||||
|
||||
await dir.removeEntry(file_name);
|
||||
await promise_rejects_dom(t, 'NotFoundError', getFileContents(handle));
|
||||
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'bar');
|
||||
assert_equals(await getFileSize(handle), 3);
|
||||
}, 'atomic writes: writable file stream persists file on close, even if file is removed');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'writer_written', root);
|
||||
const stream = await handle.createWritable();
|
||||
assert_false(stream.locked);
|
||||
const writer = stream.getWriter();
|
||||
assert_true(stream.locked);
|
||||
|
||||
await writer.write('foo');
|
||||
await writer.write(new Blob(['bar']));
|
||||
await writer.write({type: 'seek', position: 0});
|
||||
await writer.write({type: 'write', data: 'baz'});
|
||||
await writer.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), 'bazbar');
|
||||
assert_equals(await getFileSize(handle), 6);
|
||||
}, 'getWriter() can be used');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createFileWithContents(
|
||||
t, 'content.txt', 'very long string', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await promise_rejects_dom(
|
||||
t, "SyntaxError", stream.write({type: 'truncate'}), 'truncate without size');
|
||||
|
||||
}, 'WriteParams: truncate missing size param');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'content.txt', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await promise_rejects_dom(
|
||||
t, "SyntaxError", stream.write({type: 'write'}), 'write without data');
|
||||
|
||||
}, 'WriteParams: write missing data param');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createFileWithContents(
|
||||
t, 'content.txt', 'seekable', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await promise_rejects_dom(
|
||||
t, "SyntaxError", stream.write({type: 'seek'}), 'seek without position');
|
||||
|
||||
}, 'WriteParams: seek missing position param');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const source_file =
|
||||
await createFileWithContents(t, 'source_file', 'source data', root);
|
||||
const source_blob = await source_file.getFile();
|
||||
await root.removeEntry(source_file.name);
|
||||
|
||||
const handle = await createEmptyFile(t, 'invalid_blob_test', root);
|
||||
const stream = await handle.createWritable();
|
||||
await promise_rejects_dom(t, "NotFoundError", stream.write(source_blob));
|
||||
await promise_rejects_js(t, TypeError, stream.close());
|
||||
|
||||
assert_equals(await getFileContents(handle), '');
|
||||
assert_equals(await getFileSize(handle), 0);
|
||||
}, 'write() with an invalid blob to an empty file should reject');
|
|
@ -0,0 +1,123 @@
|
|||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'trunc_shrink', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('1234567890');
|
||||
await stream.truncate(5);
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '12345');
|
||||
assert_equals(await getFileSize(handle), 5);
|
||||
}, 'truncate() to shrink a file');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'trunc_grow', root);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await stream.write('abc');
|
||||
await stream.truncate(5);
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), 'abc\0\0');
|
||||
assert_equals(await getFileSize(handle), 5);
|
||||
}, 'truncate() to grow a file');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir = await createDirectory(t, 'parent_dir', root);
|
||||
const file_name = 'create_writable_fails_when_dir_removed.txt';
|
||||
const handle = await createEmptyFile(t, file_name, dir);
|
||||
|
||||
await root.removeEntry('parent_dir', {recursive: true});
|
||||
await promise_rejects_dom(t, 'NotFoundError', handle.createWritable());
|
||||
}, 'createWritable() fails when parent directory is removed');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir = await createDirectory(t, 'parent_dir', root);
|
||||
const file_name = 'write_fails_when_dir_removed.txt';
|
||||
const handle = await createEmptyFile(t, file_name, dir);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await root.removeEntry('parent_dir', {recursive: true});
|
||||
await promise_rejects_dom(t, 'NotFoundError', stream.write('foo'));
|
||||
}, 'write() fails when parent directory is removed');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const dir = await createDirectory(t, 'parent_dir', root);
|
||||
const file_name = 'truncate_fails_when_dir_removed.txt';
|
||||
const handle = await createEmptyFile(t, file_name, dir);
|
||||
const stream = await handle.createWritable();
|
||||
|
||||
await root.removeEntry('parent_dir', {recursive: true});
|
||||
await promise_rejects_dom(t, 'NotFoundError', stream.truncate(0));
|
||||
}, 'truncate() fails when parent directory is removed');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createFileWithContents(
|
||||
t, 'atomic_file_is_copied.txt', 'fooks', root);
|
||||
const stream = await handle.createWritable({keepExistingData: true});
|
||||
|
||||
await stream.write('bar');
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'barks');
|
||||
assert_equals(await getFileSize(handle), 5);
|
||||
}, 'createWritable({keepExistingData: true}): atomic writable file stream initialized with source contents');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createFileWithContents(
|
||||
t, 'atomic_file_is_not_copied.txt', 'very long string', root);
|
||||
const stream = await handle.createWritable({keepExistingData: false});
|
||||
|
||||
await stream.write('bar');
|
||||
assert_equals(await getFileContents(handle), 'very long string');
|
||||
await stream.close();
|
||||
assert_equals(await getFileContents(handle), 'bar');
|
||||
assert_equals(await getFileSize(handle), 3);
|
||||
}, 'createWritable({keepExistingData: false}): atomic writable file stream initialized with empty file');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createFileWithContents(
|
||||
t, 'trunc_smaller_offset.txt', '1234567890', root);
|
||||
const stream = await handle.createWritable({keepExistingData: true});
|
||||
|
||||
await stream.truncate(5);
|
||||
await stream.write('abc');
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), 'abc45');
|
||||
assert_equals(await getFileSize(handle), 5);
|
||||
}, 'cursor position: truncate size > offset');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createFileWithContents(
|
||||
t, 'trunc_bigger_offset.txt', '1234567890', root);
|
||||
const stream = await handle.createWritable({keepExistingData: true});
|
||||
|
||||
await stream.seek(6);
|
||||
await stream.truncate(5);
|
||||
await stream.write('abc');
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), '12345abc');
|
||||
assert_equals(await getFileSize(handle), 8);
|
||||
}, 'cursor position: truncate size < offset');
|
||||
|
||||
directory_test(async (t, root) => {
|
||||
const handle = await createEmptyFile(t, 'contents', root);
|
||||
const stream = await handle.createWritable();
|
||||
assert_false(stream.locked);
|
||||
|
||||
stream.write('abc');
|
||||
assert_false(stream.locked);
|
||||
stream.write('def');
|
||||
assert_false(stream.locked);
|
||||
stream.truncate(9);
|
||||
assert_false(stream.locked);
|
||||
stream.seek(0);
|
||||
assert_false(stream.locked);
|
||||
stream.write('xyz');
|
||||
assert_false(stream.locked);
|
||||
await stream.close();
|
||||
|
||||
assert_equals(await getFileContents(handle), 'xyzdef\0\0\0');
|
||||
assert_equals(await getFileSize(handle), 9);
|
||||
}, 'commands are queued, stream is unlocked after each operation');
|
Loading…
Add table
Add a link
Reference in a new issue