| <!DOCTYPE html> |
| <html> |
| <head> |
| <title>Test dimensions of last frame with short frame durations</title> |
| <script src="/resources/testharness.js"></script> |
| <script src="/resources/testharnessreport.js"></script> |
| <script src="mediasource-util.js"></script> |
| </head> |
| <body> |
| </body> |
| <script> |
| const frames_per_keyframe = 10; |
| const default_sample_duration = 512; |
| // The frame rate is very high to test that frame dropping logic does not drop |
| // the final frame, but small enough that "plus 1 microsecond" on "remove |
| // window timestamp" does not cause the first frame to be removed. |
| const fps = 0.5e6; |
| const timescale = default_sample_duration * fps; |
| const earliest_presentation_time = 1024; |
| const frame0_offset = earliest_presentation_time / default_sample_duration; |
| const media_timescale_start = 0x182; |
| const segment_index_timescale_start = 0x353; |
| |
| let media_320, media_640; |
| promise_test(async t => { |
| media_320 = await new Promise( |
| r => MediaSourceUtil.fetchManifestAndData( |
| t, |
| `mp4/test-v-128k-320x240-30fps-${frames_per_keyframe}kfr-manifest.json`, |
| (type, data) => r({type, data}))); |
| // Overwrite timescale to shorten frame durations. |
| MediaSourceUtil.WriteBigEndianInteger32ToUint8Array( |
| timescale, media_320.data.subarray(media_timescale_start)); |
| MediaSourceUtil.WriteBigEndianInteger32ToUint8Array( |
| timescale, media_320.data.subarray(segment_index_timescale_start)); |
| |
| media_640 = await new Promise( |
| r => MediaSourceUtil.fetchManifestAndData( |
| t, |
| `mp4/test-v-128k-640x480-30fps-${frames_per_keyframe}kfr-manifest.json`, |
| (type, data) => r({type, data}))); |
| // Overwrite timescale to shorten frame durations. |
| MediaSourceUtil.WriteBigEndianInteger32ToUint8Array( |
| timescale, media_640.data.subarray(media_timescale_start)); |
| MediaSourceUtil.WriteBigEndianInteger32ToUint8Array( |
| timescale, media_640.data.subarray(segment_index_timescale_start)); |
| }, 'setup'); |
| |
| promise_test(async t => { |
| assert_implements_optional(MediaSource.isTypeSupported(media_320.type), |
| 'type supported'); |
| |
| const v = document.createElement('video'); |
| const v_watcher = new EventWatcher(t, v, ['error', 'ended']); |
| document.body.appendChild(v); |
| const media_source = new MediaSource(); |
| const media_source_watcher = |
| new EventWatcher(t, media_source, ['sourceopen']); |
| v.src = URL.createObjectURL(media_source); |
| await media_source_watcher.wait_for('sourceopen'); |
| |
| const source_buffer = media_source.addSourceBuffer(media_320.type); |
| assert_equals(source_buffer.mode, 'segments', 'source_buffer.mode'); |
| const source_buffer_watcher = |
| new EventWatcher(t, source_buffer, ['updateend']); |
| |
| // Append the first 320x240 frame. |
| source_buffer.appendWindowEnd = (frame0_offset + 1) / fps; |
| source_buffer.appendBuffer(media_320.data); |
| await source_buffer_watcher.wait_for('updateend'); |
| assert_true(true, 'updateend'); |
| |
| // Append the first 640x480 frame. |
| source_buffer.timestampOffset = 1 / fps; |
| source_buffer.appendWindowEnd = (1 + frame0_offset + 1) / fps; |
| source_buffer.appendBuffer(media_640.data); |
| // Not using v_watcher to work around an extra canplaythrough from Chrome. |
| const canplaythrough_promise = new Promise(r => v.oncanplaythrough = r); |
| await source_buffer_watcher.wait_for('updateend'); |
| |
| media_source.endOfStream(); |
| await canplaythrough_promise; |
| assert_equals(v.videoWidth, 320, 'videoWidth 320'); |
| assert_approx_equals(v.duration, |
| source_buffer.appendWindowEnd, |
| 1e-6, |
| 'duration'); |
| |
| v.play(); |
| await v_watcher.wait_for('ended'); |
| assert_equals(v.currentTime, v.duration, 'v.currentTime'); |
| assert_equals(v.videoWidth, 640, 'videoWidth 640'); |
| }, 'last frame dimensions'); |
| </script> |
| </html> |