| <html> |
| <head> |
| <script type="text/javascript" src="webrtc_test_utilities.js"></script> |
| <script type="text/javascript" src="depth_stream_test_utilities.js"></script> |
| <script type="text/javascript"> |
| |
| function cubemapFaces(gl) { |
| return [gl.TEXTURE_CUBE_MAP_POSITIVE_X, |
| gl.TEXTURE_CUBE_MAP_NEGATIVE_X, |
| gl.TEXTURE_CUBE_MAP_POSITIVE_Y, |
| gl.TEXTURE_CUBE_MAP_NEGATIVE_Y, |
| gl.TEXTURE_CUBE_MAP_POSITIVE_Z, |
| gl.TEXTURE_CUBE_MAP_NEGATIVE_Z]; |
| } |
| |
| $ = function(id) { |
| return document.getElementById(id); |
| }; |
| |
| setAllEventsOccuredHandler(function() { |
| reportTestSuccess(); |
| }); |
| |
| // testVideoToImageBitmap and the tests below are layout tests that we |
| // run here because they require --use-fake-device-for-media-capture. |
| function getDepthStreamAndCallCreateImageBitmap() { |
| console.log('Calling getDepthStreamAndCallCreateImageBitmap'); |
| getFake16bitStream().then(function(stream) { |
| detectVideoInLocalView1(stream, function() { |
| testVideoToImageBitmap('local-view-1', function() { |
| stream.getVideoTracks()[0].stop(); |
| waitForVideoToStop('local-view-1'); |
| }, failedCallback); |
| }); |
| }, |
| failedCallback); |
| } |
| |
| function getDepthStreamAndCameraCalibration() { |
| console.log('Calling getDepthStreamAndCameraCalibration'); |
| getFake16bitStream().then(function(stream) { |
| var depth_track = stream.getVideoTracks()[0]; |
| if (!depth_track) |
| return failTest("No depth_track"); |
| var settings = depth_track.getSettings(); |
| if (settings && settings.depthNear == 0 && settings.depthFar == 65.535 && |
| settings.focalLengthX == 135.0 && settings.focalLengthY == 135.6) { |
| reportTestSuccess(); |
| } else { |
| failTest("Unexpected depth_track settings:" + JSON.stringify(settings)); |
| } |
| }, |
| failedCallback); |
| } |
| |
| function getBothStreamsAndCheckForFeaturesPresence() { |
| console.log('Calling getBothStreamsAndCheckForFeaturesPresence'); |
| getConstraintsForDevice("fake_device_0") |
| .then(function(constraints) { |
| if (!constraints) |
| return failTest("No fake video device found."); |
| return navigator.mediaDevices.getUserMedia(constraints); |
| }).then(function(video_stream) { |
| getFake16bitStream().then(function(depth_stream) { |
| if (video_stream.getVideoTracks().length != 1) { |
| return failTest("Expected one video track, got " + |
| video_stream.getVideoTracks().length); |
| } |
| if (depth_stream.getVideoTracks().length != 1) { |
| return failTest("Expected one depth track, got " + |
| depth_stream.getVideoTracks().length); |
| } |
| var video_track = video_stream.getVideoTracks()[0]; |
| var depth_track = depth_stream.getVideoTracks()[0]; |
| |
| // We have specified the fields in getUserMedia constraints. Expect that |
| // both tracks have them in constraints and settings. |
| var expected_fields = ["deviceId", "height", "width"]; |
| for (var field in expected_fields) { |
| var expected_field = expected_fields[field]; |
| if (video_track.getSettings()[expected_field] === undefined) { |
| return failTest(expected_field + |
| " missing from video track getSettings()."); |
| } |
| if (video_track.getConstraints()[expected_field] === undefined) { |
| return failTest(expected_field + |
| " missing from video track getConstraints()."); |
| } |
| if (depth_track.getSettings()[expected_field] === undefined) { |
| return failTest(expected_field + |
| " missing from depth track getSettings()."); |
| } |
| if (depth_track.getConstraints()[expected_field] === undefined) { |
| return failTest(expected_field + |
| " missing from depth track getConstraints()."); |
| } |
| } |
| |
| var depth_fields = ["depthNear", "depthFar", "focalLengthX", |
| "focalLengthY"]; |
| for (var field in depth_fields) { |
| var depth_field = depth_fields[field]; |
| if (video_track.getSettings()[depth_field] !== undefined) { |
| return failTest(depth_field + |
| " present in video track getSettings()."); |
| } |
| if (depth_track.getSettings()[depth_field] === undefined) { |
| return failTest(depth_field + |
| " missing from depth track getSettings()."); |
| } |
| } |
| reportTestSuccess(); |
| }, |
| failedCallback); |
| }, |
| failedCallback); |
| } |
| |
| function depthStreamToRGBAUint8Texture() { |
| console.log('Calling depthStreamToRGBAUint8Texture'); |
| getFake16bitStream().then(function(stream) { |
| detectVideoInLocalView1(stream, function() { |
| testVideoToRGBA8Texture('local-view-1', function() { |
| stream.getVideoTracks()[0].stop(); |
| waitForVideoToStop('local-view-1'); |
| }, failedCallback); |
| }); |
| }, |
| failedCallback); |
| } |
| |
| function depthStreamToRGBAFloatTexture() { |
| console.log('Calling depthStreamToRGBAFloatTexture'); |
| getFake16bitStream().then(function(stream) { |
| detectVideoInLocalView1(stream, function() { |
| testVideoToRGBA32FTexture('local-view-1', function() { |
| stream.getVideoTracks()[0].stop(); |
| waitForVideoToStop('local-view-1'); |
| }, failedCallback); |
| }); |
| }, |
| failedCallback); |
| } |
| |
| function depthStreamToR32FloatTexture() { |
| console.log('Calling depthStreamToR32FloatTexture'); |
| getFake16bitStream().then(function(stream) { |
| detectVideoInLocalView1(stream, function() { |
| testVideoToR32FTexture('local-view-1', function() { |
| stream.getVideoTracks()[0].stop(); |
| waitForVideoToStop('local-view-1'); |
| }, failedCallback); |
| }); |
| }, |
| failedCallback); |
| } |
| |
| function failedCallback(error) { |
| failTest('GetUserMedia call failed with error name ' + error.name); |
| } |
| |
| function attachMediaStream(stream, videoElement) { |
| var localStreamUrl = URL.createObjectURL(stream); |
| $(videoElement).src = localStreamUrl; |
| } |
| |
| function detectVideoInLocalView1(stream, callback) { |
| attachMediaStream(stream, 'local-view-1'); |
| detectVideoPlaying('local-view-1', callback); |
| } |
| |
| function testVideoToImageBitmap(videoElementName, success, error) { |
| var bitmaps = {}; |
| var video = $(videoElementName); |
| var canvas = document.createElement('canvas'); |
| canvas.width = 96; |
| canvas.height = 96; |
| document.body.appendChild(canvas); |
| var p1 = createImageBitmap(video).then(function(imageBitmap) { |
| return runImageBitmapTest(imageBitmap, canvas, false); }); |
| var p2 = createImageBitmap(video, |
| {imageOrientation: "none", premultiplyAlpha: "premultiply"}).then( |
| function(imageBitmap) { |
| return runImageBitmapTest(imageBitmap, canvas, false); }); |
| var p3 = createImageBitmap(video, |
| {imageOrientation: "none", premultiplyAlpha: "default"}).then( |
| function(imageBitmap) { |
| return runImageBitmapTest(imageBitmap, canvas, false); }); |
| var p4 = createImageBitmap(video, |
| {imageOrientation: "none", premultiplyAlpha: "none"}).then( |
| function(imageBitmap) { |
| return runImageBitmapTest(imageBitmap, canvas, false); }); |
| var p5 = createImageBitmap(video, |
| {imageOrientation: "flipY", premultiplyAlpha: "premultiply"}).then( |
| function(imageBitmap) { |
| return runImageBitmapTest(imageBitmap, canvas, true); }); |
| var p6 = createImageBitmap(video, |
| {imageOrientation: "flipY", premultiplyAlpha: "default"}).then( |
| function(imageBitmap) { |
| return runImageBitmapTest(imageBitmap, canvas, true); }); |
| var p7 = createImageBitmap(video, |
| {imageOrientation: "flipY", premultiplyAlpha: "none"}).then( |
| function(imageBitmap) { |
| return runImageBitmapTest(imageBitmap, canvas, true); }); |
| return Promise.all([p1, p2, p3, p4, p5, p6, p7]).then(success(), reason => { |
| return error({name: reason}); |
| }); |
| } |
| |
| function runImageBitmapTest(bitmap, canvas, flip_y) { |
| var context = canvas.getContext('2d'); |
| context.drawImage(bitmap, 0, 0); |
| var imageData = context.getImageData(0, 0, canvas.width, canvas.height); |
| // Fake capture device 96x96 depth image is gradient. See also |
| // Draw16BitGradient in fake_video_capture_device.cc. |
| var color_step = 255.0 / (canvas.width + canvas.height); |
| return verifyPixels(imageData.data, canvas.width, canvas.height, flip_y, |
| color_step, 255, 2, "ImageBitmap"); |
| } |
| |
| function testVideoToRGBA32FTexture(videoElementName, success, error) { |
| var video = $(videoElementName); |
| var canvas = document.createElement('canvas'); |
| canvas.width = 96; |
| canvas.height = 96; |
| var gl = canvas.getContext('webgl'); |
| if (!gl) |
| return error({name:"WebGL is not available."}); |
| if (!gl.getExtension("OES_texture_float")) |
| return error({name:"OES_texture_float extension is not available."}); |
| return testVideoToTexture(gl, video, gl.RGBA, gl.RGBA, gl.FLOAT, |
| readAndVerifyRGBA32F, success, error); |
| } |
| |
| function testVideoToRGBA8Texture(videoElementName, success, error) { |
| var video = $(videoElementName); |
| var canvas = document.createElement('canvas'); |
| canvas.width = 96; |
| canvas.height = 96; |
| var gl = canvas.getContext('webgl'); |
| if (!gl) |
| return error({name:"WebGL is not available."}); |
| return testVideoToTexture(gl, video, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, |
| readAndVerifyRGBA8, success, error); |
| } |
| |
| function testVideoToR32FTexture(videoElementName, success, error) { |
| var video = $(videoElementName); |
| var canvas = document.createElement('canvas'); |
| canvas.width = 96; |
| canvas.height = 96; |
| var gl = canvas.getContext('webgl2'); |
| if (!gl) |
| return error({name:"WebGL2 is not available."}); |
| if (!gl.getExtension('EXT_color_buffer_float')) |
| return error({name:"EXT_color_buffer_float extension is not available."}); |
| return testVideoToTexture(gl, video, gl.R32F, gl.RED, gl.FLOAT, |
| readAndVerifyR32F, success, error); |
| } |
| |
| function testVideoToTexture(gl, video, internalformat, format, type, |
| readAndVerifyFunction, success, error) { |
| // Create framebuffer that we will use for reading back the texture. |
| var fb = gl.createFramebuffer(); |
| gl.bindFramebuffer(gl.FRAMEBUFFER, fb); |
| var tests = []; |
| // Premultiply alpha is ignored but we just test both values. |
| var cases = [ |
| {flip_y: false, premultiply_alpha: true}, |
| {flip_y: true, premultiply_alpha: false} |
| ]; |
| for (var i in cases) { |
| var flip_y = cases[i].flip_y; |
| var premultiply = cases[i].premultiply_alpha; |
| uploadVideoToTexture2D(gl, video, internalformat, format, type, flip_y, |
| premultiply); |
| tests.push(readAndVerifyFunction(gl, video.width, video.height, flip_y, |
| "TexImage_TEXTURE_2D")); |
| uploadVideoToSubTexture2D(gl, video, internalformat, format, type, flip_y, |
| premultiply); |
| tests.push(readAndVerifyFunction(gl, video.width, video.height, flip_y, |
| "TexSubImage_TEXTURE_2D")); |
| |
| // cubemap texImage2D. |
| var tex = uploadVideoToTextureCubemap(gl, video, internalformat, format, |
| type, flip_y, premultiply); |
| for (var i = 0; i < cubemapFaces(gl).length; ++i) { |
| // Attach the texture to framebuffer for readback. |
| gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, |
| cubemapFaces(gl)[i], tex, 0); |
| tests.push(readAndVerifyFunction(gl, video.width, video.height, |
| flip_y, |
| "TexImage_" + cubemapFaces(gl)[i])); |
| } |
| |
| // cubemap texSubImage2D. |
| tex = uploadVideoToSubTextureCubemap(gl, video, internalformat, format, |
| type, flip_y, premultiply); |
| for (var i = 0; i < cubemapFaces(gl).length; ++i) { |
| // Attach the texture to framebuffer for readback. |
| gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, |
| cubemapFaces(gl)[i], tex, 0); |
| tests.push(readAndVerifyFunction(gl, video.width, video.height, |
| flip_y, |
| "TexSubImage_" + cubemapFaces(gl)[i])); |
| } |
| } |
| return Promise.all(tests).then(success(), reason => { |
| return error({name: reason}); |
| }); |
| } |
| |
| // Test setup helper method: create the texture and set texture parameters. |
| // For cubemap, target is gl.TEXTURE_CUBE_MAP. For gl.TEXTURE_2D, it is |
| // gl.TEXTURE_2D. |
| function createTexture(gl, target, video, flip_y, premultiply_alpha) { |
| var tex = gl.createTexture(); |
| gl.bindTexture(target, tex); |
| gl.texParameteri(target, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); |
| gl.texParameteri(target, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); |
| gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, gl.LINEAR); |
| gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, gl.LINEAR); |
| gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, flip_y); |
| gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, premultiply_alpha); |
| return tex; |
| } |
| |
| function uploadVideoToTexture2D(gl, video, internalformat, format, type, |
| flip_y, premultiply_alpha) { |
| var tex = createTexture(gl, gl.TEXTURE_2D, video, flip_y, |
| premultiply_alpha); |
| // Attach the texture to framebuffer for readback. |
| gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, |
| tex, 0); |
| gl.texImage2D(gl.TEXTURE_2D, 0, internalformat, format, type, video); |
| return tex; |
| } |
| |
| function uploadVideoToSubTexture2D(gl, video, internalformat, format, type, |
| flip_y, premultiply_alpha) { |
| var tex = createTexture(gl, gl.TEXTURE_2D, video, flip_y, |
| premultiply_alpha); |
| // Attach the texture to framebuffer for readback. |
| gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, |
| tex, 0); |
| gl.texImage2D(gl.TEXTURE_2D, 0, internalformat, video.width, video.height, |
| 0, format, type, null); |
| gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, format, type, video); |
| return tex; |
| } |
| |
| function uploadVideoToTextureCubemap(gl, video, internalformat, format, type, |
| flip_y, premultiply_alpha) { |
| var tex = createTexture(gl, gl.TEXTURE_CUBE_MAP, video, flip_y, |
| premultiply_alpha); |
| for (var i = 0; i < cubemapFaces(gl).length; ++i) { |
| gl.texImage2D(cubemapFaces(gl)[i], 0, internalformat, format, type, |
| video); |
| } |
| return tex; |
| } |
| |
| function uploadVideoToSubTextureCubemap(gl, video, internalformat, format, |
| type, flip_y, premultiply_alpha) { |
| var tex = createTexture(gl, gl.TEXTURE_CUBE_MAP, video, flip_y, |
| premultiply_alpha); |
| for (var i = 0; i < cubemapFaces(gl).length; ++i) { |
| gl.texImage2D(cubemapFaces(gl)[i], 0, internalformat, video.width, |
| video.height, 0, format, type, null); |
| gl.texSubImage2D(cubemapFaces(gl)[i], 0, 0, 0, format, type, video); |
| } |
| return tex; |
| } |
| |
| function readAndVerifyRGBA8(gl, width, height, flip_y, test_name) { |
| var arr = new Uint8Array(width * height * 4); |
| gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, arr); |
| var color_step = 255.0 / (width + height); |
| return verifyPixels(arr, width, height, flip_y, color_step, |
| 255 /*wrap_around*/, 2 /*tolerance*/, test_name); |
| } |
| |
| function readAndVerifyRGBA32F(gl, width, height, flip_y, test_name) { |
| var arr = new Float32Array(width * height * 4); |
| gl.readPixels(0, 0, width, height, gl.RGBA, gl.FLOAT, arr); |
| var color_step = 1.0 / (width + height); |
| return verifyPixels(arr, width, height, flip_y, color_step, |
| 1.0 /*wrap_around*/, 1.5/65535 /*tolerance*/, |
| test_name); |
| } |
| |
| function readAndVerifyR32F(gl, width, height, flip_y, test_name) { |
| var arr = new Float32Array(width * height * 4); |
| gl.readPixels(0, 0, width, height, gl.RGBA, gl.FLOAT, arr); |
| var color_step = 1.0 / (width + height); |
| return verifyPixelsRed(arr, width, height, flip_y, color_step, |
| 1.0 /*wrap_around*/, 1.5 / 65535 /*tolerance*/, |
| test_name); |
| } |
| |
| function onLoad() { |
| var query = /query=(.*)/.exec(window.location.href); |
| if (!query) |
| return; |
| if (query[1] == "RGBAUint8") |
| depthStreamToRGBAUint8Texture(); |
| else if (query[1] == "RGBAFloat") |
| depthStreamToRGBAFloatTexture(); |
| else if (query[1] == "R32Float") |
| depthStreamToR32FloatTexture(); |
| } |
| </script> |
| </head> |
| <body onload="onLoad()"> |
| <table border="0"> |
| <tr> |
| <td><video id="local-view-1" width="96" height="96" autoplay |
| style="display:none"></video></td> |
| <!-- The canvas is used to detect when video starts and stops. --> |
| <td><canvas id="local-view-1-canvas" width="96" height="96" |
| style="display:none"></canvas></td> |
| </tr> |
| </table> |
| </body> |
| </html> |