Chromium Code Reviews| Index: content/test/data/blob_storage/blob_creation_and_slicing.html |
| diff --git a/content/test/data/blob_storage/blob_creation_and_slicing.html b/content/test/data/blob_storage/blob_creation_and_slicing.html |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..2498eb092400b7b195bf7835e4f37268bd4bf88d |
| --- /dev/null |
| +++ b/content/test/data/blob_storage/blob_creation_and_slicing.html |
| @@ -0,0 +1,156 @@ |
| +<html> |
| + <head> |
| + <title>Blob Creation & Slicing</title> |
| + <script type="text/javascript" src="common.js"></script> |
| + <script type="text/javascript"> |
| +// We create < 2000 bytes of data, as that is the max for the browsertest. |
|
pwnall
2016/12/01 01:12:13
Instead of having a comment here, how about adding
|
| + |
| +var numRawBlobs = 100; |
|
pwnall
2016/12/01 01:12:13
Can you please document the test parameters?
dmurph
2016/12/01 20:40:58
Done.
|
| +var numSlices = 100; |
| +var numSandwiches = 100; |
| + |
| +var test = function() { |
| + var blobs = []; |
| + |
| + // This should cause us to go straight to file. |
| + var veryLargeDataSize = 190; |
|
pwnall
2016/12/01 01:12:13
How about naming the buffers according to the goal
dmurph
2016/12/01 20:40:59
Done.
|
| + var veryLargeData = new Uint8Array(veryLargeDataSize); |
| + veryLargeData.fill(2); |
|
pwnall
2016/12/01 01:12:13
Any reason for these values? Are they set up so th
dmurph
2016/12/01 20:40:59
Done.
|
| + veryLargeData.fill(8, veryLargeDataSize / 2); |
| + |
| + // This should require multiple shared memory segments. |
| + var largeDataSize = 15; |
|
pwnall
2016/12/01 01:12:13
sharedMemoryData{Size}?
dmurph
2016/12/01 20:40:59
Done.
|
| + var largeData = new Uint8Array(largeDataSize); |
| + largeData.fill(4); |
| + largeData.fill(5, largeDataSize / 2); |
| + |
| + // This should fit in IPC. |
| + var smallDataSize = 2; |
|
pwnall
2016/12/01 01:12:13
ipcData{Size}?
dmurph
2016/12/01 20:40:59
Done.
|
| + var smallData = new Uint8Array(2); |
| + smallData[1] = 2; |
| + |
| + var i = 0; |
|
pwnall
2016/12/01 01:12:13
I think you can use let i inside loops so i is blo
dmurph
2016/12/01 20:40:59
Done.
|
| + var blob; |
| + for (i = 0; i < numRawBlobs; ++i) { |
| + var data = []; |
| + if (i % 5 == 0) { |
| + data.push(largeData); |
| + } else if (i % 13 == 0) { |
| + data.push(veryLargeData); |
| + } else { |
| + smallData[0] = i; |
| + data.push(smallData); |
| + } |
| + blob = new Blob(data, { content_type: "text/plain" }); |
| + blobs.push(blob); |
| + } |
| + |
| + for (i = 0; i < numSlices; ++i) { |
| + var originalSize; |
| + var rawIndex = i % numRawBlobs; |
| + if (rawIndex % 5 == 0) { |
| + originalSize = largeDataSize; |
| + } else if (rawIndex % 13 == 0) { |
| + originalSize = veryLargeDataSize; |
| + } else { |
| + originalSize = smallDataSize; |
| + } |
| + if (i % 2 == 0) { |
| + blob = blobs[i].slice(originalSize / 2); |
| + } else { |
| + blob = blobs[i].slice(0, originalSize / 2); |
| + } |
| + blobs.push(blob); |
| + } |
| + |
| + for (i = 0; i < numSandwiches; ++i) { |
| + var sliceIndex = numRawBlobs + (i % numSlices); |
| + blobs.push(new Blob(['pre', blobs[sliceIndex], 'post'], { content_type: "text/plain" })); |
| + } |
| + |
| + var getBytes = function(string) { |
| + var bytes = []; |
| + for (var i = 0; i < string.length; ++i) { |
| + bytes.push(string.charCodeAt(i)); |
| + } |
| + return bytes; |
| + } |
| + |
| + var i = 0; |
|
pwnall
2016/12/01 01:12:13
If you don't want to switch to let, you should at
|
| + var numRead = 0; |
| + for (; i < blobs.length; i++) { |
| + var genReader = function(index, d) { |
|
pwnall
2016/12/01 01:12:13
This seems to be defined so you can capture i into
dmurph
2016/12/01 20:40:58
isn't that what I do?
|
| + var reader = new FileReader(); |
| + reader.onerror = function(e) { |
| + fail('Error when reading blob ' + index + ': ' + reader.error); |
| + } |
| + reader.onloadend = function(e) { |
| + if (reader.error) { |
| + fail('Error when reading blob ' + index + ': ' + reader.error); |
| + return; |
| + } |
| + numRead++; |
| + debug('Finished reading blob ' + index); |
| + shouldBe('event.target.result.byteLength', d.length + ''); |
| + shouldBe('new Uint8Array(event.target.result)', |
| + '[' + d + ']'); |
| + if (numRead >= blobs.length) { |
| + done('Done!'); |
| + } |
| + } |
| + return reader; |
| + } |
| + var data; |
|
pwnall
2016/12/01 01:12:13
Can you please define these closer to where you as
dmurph
2016/12/01 20:40:58
Done.
|
| + var origData; |
| + var originalSize; |
| + var slicedData; |
| + var slicedDataSize; |
| + var sandwichedData; |
| + var sandwichedDataSize; |
| + |
| + var rawIndex = i % numRawBlobs; |
| + var sliceIndex = i % numSlices; |
| + |
| + if (rawIndex % 5 == 0) { |
| + originalSize = largeDataSize; |
| + origData = largeData; |
|
pwnall
2016/12/01 01:12:13
Why originalSize but origData?
dmurph
2016/12/01 20:40:59
Done.
|
| + } else if (rawIndex % 13 == 0) { |
| + originalSize = veryLargeDataSize; |
| + origData = veryLargeData; |
| + } else { |
| + originalSize = smallDataSize; |
| + origData = new Uint8Array(smallDataSize); |
| + origData[0] = rawIndex; |
| + origData[1] = 2; |
| + } |
| + |
| + if (sliceIndex % 2 == 0) { |
| + slicedData = origData.slice(originalSize / 2); |
| + } else { |
| + slicedData = origData.slice(0, originalSize / 2); |
| + } |
| + slicedDataSize = slicedData.length; |
| + |
| + sandwichedDataSize = 7 + slicedDataSize; |
| + var sandwichedData = new Uint8Array(sandwichedDataSize); |
| + sandwichedData.set(getBytes("pre"), 0); |
| + sandwichedData.set(slicedData, 3); |
| + sandwichedData.set(getBytes("post"), 3 + slicedDataSize); |
| + |
| + var data; |
| + if (i < numRawBlobs) { |
| + data = origData; |
| + } else if (i < numRawBlobs + numSlices) { |
| + data = slicedData; |
| + } else { |
| + data = sandwichedData; |
| + } |
| + genReader(i, data).readAsArrayBuffer(blobs[i]); |
| + } |
| +}; |
| + </script> |
| + </head> |
| + <body onLoad="test()"> |
| + <div id="status">Starting...<br/></div> |
| + </body> |
| +</html> |