加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
克隆/下载
yuv.bundle.js 33.10 KB
一键复制 编辑 原始数据 按行查看 历史
ivan 提交于 2020-04-08 14:03 . feat: 新增 yuv 播放器
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966
(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){
module.exports = {
vertex: "precision lowp float;\n\nattribute vec2 aPosition;\nattribute vec2 aLumaPosition;\nattribute vec2 aChromaPosition;\nvarying vec2 vLumaPosition;\nvarying vec2 vChromaPosition;\nvoid main() {\n gl_Position = vec4(aPosition, 0, 1);\n vLumaPosition = aLumaPosition;\n vChromaPosition = aChromaPosition;\n}\n",
fragment: "// inspired by https://github.com/mbebenita/Broadway/blob/master/Player/canvas.js\n\nprecision lowp float;\n\nuniform sampler2D uTextureY;\nuniform sampler2D uTextureCb;\nuniform sampler2D uTextureCr;\nvarying vec2 vLumaPosition;\nvarying vec2 vChromaPosition;\nvoid main() {\n // Y, Cb, and Cr planes are uploaded as LUMINANCE textures.\n float fY = texture2D(uTextureY, vLumaPosition).x;\n float fCb = texture2D(uTextureCb, vChromaPosition).x;\n float fCr = texture2D(uTextureCr, vChromaPosition).x;\n\n // Premultipy the Y...\n float fYmul = fY * 1.1643828125;\n\n // And convert that to RGB!\n gl_FragColor = vec4(\n fYmul + 1.59602734375 * fCr - 0.87078515625,\n fYmul - 0.39176171875 * fCb - 0.81296875 * fCr + 0.52959375,\n fYmul + 2.017234375 * fCb - 1.081390625,\n 1\n );\n}\n",
vertexStripe: "precision lowp float;\n\nattribute vec2 aPosition;\nattribute vec2 aTexturePosition;\nvarying vec2 vTexturePosition;\n\nvoid main() {\n gl_Position = vec4(aPosition, 0, 1);\n vTexturePosition = aTexturePosition;\n}\n",
fragmentStripe: "// extra 'stripe' texture fiddling to work around IE 11's poor performance on gl.LUMINANCE and gl.ALPHA textures\n\nprecision lowp float;\n\nuniform sampler2D uStripe;\nuniform sampler2D uTexture;\nvarying vec2 vTexturePosition;\nvoid main() {\n // Y, Cb, and Cr planes are mapped into a pseudo-RGBA texture\n // so we can upload them without expanding the bytes on IE 11\n // which doesn't allow LUMINANCE or ALPHA textures\n // The stripe textures mark which channel to keep for each pixel.\n // Each texture extraction will contain the relevant value in one\n // channel only.\n\n float fLuminance = dot(\n texture2D(uStripe, vTexturePosition),\n texture2D(uTexture, vTexturePosition)\n );\n\n gl_FragColor = vec4(fLuminance, fLuminance, fLuminance, 1);\n}\n"
};
},{}],2:[function(require,module,exports){
(function() {
"use strict";
/**
* Create a YUVCanvas and attach it to an HTML5 canvas element.
*
* This will take over the drawing context of the canvas and may turn
* it into a WebGL 3d canvas if possible. Do not attempt to use the
* drawing context directly after this.
*
* @param {HTMLCanvasElement} canvas - HTML canvas element to attach to
* @param {YUVCanvasOptions} options - map of options
* @throws exception if WebGL requested but unavailable
* @constructor
* @abstract
*/
function FrameSink(canvas, options) {
throw new Error('abstract');
}
/**
* Draw a single YUV frame on the underlying canvas, converting to RGB.
* If necessary the canvas will be resized to the optimal pixel size
* for the given buffer's format.
*
* @param {YUVBuffer} buffer - the YUV buffer to draw
* @see {@link https://www.npmjs.com/package/yuv-buffer|yuv-buffer} for format
*/
FrameSink.prototype.drawFrame = function(buffer) {
throw new Error('abstract');
};
/**
* Clear the canvas using appropriate underlying 2d or 3d context.
*/
FrameSink.prototype.clear = function() {
throw new Error('abstract');
};
module.exports = FrameSink;
})();
},{}],3:[function(require,module,exports){
/*
Copyright (c) 2014-2016 Brion Vibber <brion@pobox.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
MPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
(function() {
"use strict";
var FrameSink = require('./FrameSink.js'),
YCbCr = require('./YCbCr.js');
/**
* @param {HTMLCanvasElement} canvas - HTML canvas eledment to attach to
* @constructor
*/
function SoftwareFrameSink(canvas) {
var self = this,
ctx = canvas.getContext('2d'),
imageData = null,
resampleCanvas = null,
resampleContext = null;
function initImageData(width, height) {
imageData = ctx.createImageData(width, height);
// Prefill the alpha to opaque
var data = imageData.data,
pixelCount = width * height * 4;
for (var i = 0; i < pixelCount; i += 4) {
data[i + 3] = 255;
}
}
function initResampleCanvas(cropWidth, cropHeight) {
resampleCanvas = document.createElement('canvas');
resampleCanvas.width = cropWidth;
resampleCanvas.height = cropHeight;
resampleContext = resampleCanvas.getContext('2d');
}
/**
* Actually draw a frame into the canvas.
* @param {YUVFrame} buffer - YUV frame buffer object to draw
*/
self.drawFrame = function drawFrame(buffer) {
var format = buffer.format;
if (canvas.width !== format.displayWidth || canvas.height !== format.displayHeight) {
// Keep the canvas at the right size...
canvas.width = format.displayWidth;
canvas.height = format.displayHeight;
}
if (imageData === null ||
imageData.width != format.width ||
imageData.height != format.height) {
initImageData(format.width, format.height);
}
// YUV -> RGB over the entire encoded frame
YCbCr.convertYCbCr(buffer, imageData.data);
var resample = (format.cropWidth != format.displayWidth || format.cropHeight != format.displayHeight);
var drawContext;
if (resample) {
// hack for non-square aspect-ratio
// putImageData doesn't resample, so we have to draw in two steps.
if (!resampleCanvas) {
initResampleCanvas(format.cropWidth, format.cropHeight);
}
drawContext = resampleContext;
} else {
drawContext = ctx;
}
// Draw cropped frame to either the final or temporary canvas
drawContext.putImageData(imageData,
-format.cropLeft, -format.cropTop, // must offset the offset
format.cropLeft, format.cropTop,
format.cropWidth, format.cropHeight);
if (resample) {
ctx.drawImage(resampleCanvas, 0, 0, format.displayWidth, format.displayHeight);
}
};
self.clear = function() {
ctx.clearRect(0, 0, canvas.width, canvas.height);
};
return self;
}
SoftwareFrameSink.prototype = Object.create(FrameSink.prototype);
module.exports = SoftwareFrameSink;
})();
},{"./FrameSink.js":2,"./YCbCr.js":5}],4:[function(require,module,exports){
/*
Copyright (c) 2014-2016 Brion Vibber <brion@pobox.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
MPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
(function() {
"use strict";
var FrameSink = require('./FrameSink.js'),
shaders = require('../build/shaders.js');
/**
* Warning: canvas must not have been used for 2d drawing prior!
*
* @param {HTMLCanvasElement} canvas - HTML canvas element to attach to
* @constructor
*/
function WebGLFrameSink(canvas) {
var self = this,
gl = WebGLFrameSink.contextForCanvas(canvas),
debug = false; // swap this to enable more error checks, which can slow down rendering
if (gl === null) {
throw new Error('WebGL unavailable');
}
// GL!
function checkError() {
if (debug) {
err = gl.getError();
if (err !== 0) {
throw new Error("GL error " + err);
}
}
}
function compileShader(type, source) {
var shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
var err = gl.getShaderInfoLog(shader);
gl.deleteShader(shader);
throw new Error('GL shader compilation for ' + type + ' failed: ' + err);
}
return shader;
}
var program,
unpackProgram,
err;
// In the world of GL there are no rectangles.
// There are only triangles.
// THERE IS NO SPOON.
var rectangle = new Float32Array([
// First triangle (top left, clockwise)
-1.0, -1.0,
+1.0, -1.0,
-1.0, +1.0,
// Second triangle (bottom right, clockwise)
-1.0, +1.0,
+1.0, -1.0,
+1.0, +1.0
]);
var textures = {};
var framebuffers = {};
var stripes = {};
var buf, positionLocation, unpackPositionLocation;
var unpackTexturePositionBuffer, unpackTexturePositionLocation;
var stripeLocation, unpackTextureLocation;
var lumaPositionBuffer, lumaPositionLocation;
var chromaPositionBuffer, chromaPositionLocation;
function createOrReuseTexture(name) {
if (!textures[name]) {
textures[name] = gl.createTexture();
}
return textures[name];
}
function uploadTexture(name, width, height, data) {
var texture = createOrReuseTexture(name);
gl.activeTexture(gl.TEXTURE0);
if (WebGLFrameSink.stripe) {
var uploadTemp = !textures[name + '_temp'];
var tempTexture = createOrReuseTexture(name + '_temp');
gl.bindTexture(gl.TEXTURE_2D, tempTexture);
if (uploadTemp) {
// new texture
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(
gl.TEXTURE_2D,
0, // mip level
gl.RGBA, // internal format
width / 4,
height,
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, // type
data // data!
);
} else {
// update texture
gl.texSubImage2D(
gl.TEXTURE_2D,
0, // mip level
0, // x offset
0, // y offset
width / 4,
height,
gl.RGBA, // format
gl.UNSIGNED_BYTE, // type
data // data!
);
}
var stripeTexture = textures[name + '_stripe'];
var uploadStripe = !stripeTexture;
if (uploadStripe) {
stripeTexture = createOrReuseTexture(name + '_stripe');
}
gl.bindTexture(gl.TEXTURE_2D, stripeTexture);
if (uploadStripe) {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(
gl.TEXTURE_2D,
0, // mip level
gl.RGBA, // internal format
width,
1,
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, //type
buildStripe(width, 1) // data!
);
}
} else {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texImage2D(
gl.TEXTURE_2D,
0, // mip level
gl.LUMINANCE, // internal format
width,
height,
0, // border
gl.LUMINANCE, // format
gl.UNSIGNED_BYTE, //type
data // data!
);
}
}
function unpackTexture(name, width, height) {
var texture = textures[name];
// Upload to a temporary RGBA texture, then unpack it.
// This is faster than CPU-side swizzling in ANGLE on Windows.
gl.useProgram(unpackProgram);
var fb = framebuffers[name];
if (!fb) {
// Create a framebuffer and an empty target size
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texImage2D(
gl.TEXTURE_2D,
0, // mip level
gl.RGBA, // internal format
width,
height,
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, //type
null // data!
);
fb = framebuffers[name] = gl.createFramebuffer();
}
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
var tempTexture = textures[name + '_temp'];
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, tempTexture);
gl.uniform1i(unpackTextureLocation, 1);
var stripeTexture = textures[name + '_stripe'];
gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, stripeTexture);
gl.uniform1i(stripeLocation, 2);
// Rectangle geometry
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
// Set up the texture geometry...
gl.bindBuffer(gl.ARRAY_BUFFER, unpackTexturePositionBuffer);
gl.enableVertexAttribArray(unpackTexturePositionLocation);
gl.vertexAttribPointer(unpackTexturePositionLocation, 2, gl.FLOAT, false, 0, 0);
// Draw into the target texture...
gl.viewport(0, 0, width, height);
gl.drawArrays(gl.TRIANGLES, 0, rectangle.length / 2);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
}
function attachTexture(name, register, index) {
gl.activeTexture(register);
gl.bindTexture(gl.TEXTURE_2D, textures[name]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.uniform1i(gl.getUniformLocation(program, name), index);
}
function buildStripe(width) {
if (stripes[width]) {
return stripes[width];
}
var len = width,
out = new Uint32Array(len);
for (var i = 0; i < len; i += 4) {
out[i ] = 0x000000ff;
out[i + 1] = 0x0000ff00;
out[i + 2] = 0x00ff0000;
out[i + 3] = 0xff000000;
}
return stripes[width] = new Uint8Array(out.buffer);
}
function initProgram(vertexShaderSource, fragmentShaderSource) {
var vertexShader = compileShader(gl.VERTEX_SHADER, vertexShaderSource);
var fragmentShader = compileShader(gl.FRAGMENT_SHADER, fragmentShaderSource);
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
var err = gl.getProgramInfoLog(program);
gl.deleteProgram(program);
throw new Error('GL program linking failed: ' + err);
}
return program;
}
function init() {
if (WebGLFrameSink.stripe) {
unpackProgram = initProgram(shaders.vertexStripe, shaders.fragmentStripe);
unpackPositionLocation = gl.getAttribLocation(unpackProgram, 'aPosition');
unpackTexturePositionBuffer = gl.createBuffer();
var textureRectangle = new Float32Array([
0, 0,
1, 0,
0, 1,
0, 1,
1, 0,
1, 1
]);
gl.bindBuffer(gl.ARRAY_BUFFER, unpackTexturePositionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, textureRectangle, gl.STATIC_DRAW);
unpackTexturePositionLocation = gl.getAttribLocation(unpackProgram, 'aTexturePosition');
stripeLocation = gl.getUniformLocation(unpackProgram, 'uStripe');
unpackTextureLocation = gl.getUniformLocation(unpackProgram, 'uTexture');
}
program = initProgram(shaders.vertex, shaders.fragment);
buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, rectangle, gl.STATIC_DRAW);
positionLocation = gl.getAttribLocation(program, 'aPosition');
lumaPositionBuffer = gl.createBuffer();
lumaPositionLocation = gl.getAttribLocation(program, 'aLumaPosition');
chromaPositionBuffer = gl.createBuffer();
chromaPositionLocation = gl.getAttribLocation(program, 'aChromaPosition');
}
/**
* Actually draw a frame.
* @param {YUVFrame} buffer - YUV frame buffer object
*/
self.drawFrame = function(buffer) {
var format = buffer.format;
var formatUpdate = (!program || canvas.width !== format.displayWidth || canvas.height !== format.displayHeight);
if (formatUpdate) {
// Keep the canvas at the right size...
canvas.width = format.displayWidth;
canvas.height = format.displayHeight;
self.clear();
}
if (!program) {
init();
}
if (formatUpdate) {
var setupTexturePosition = function(buffer, location, texWidth) {
// Warning: assumes that the stride for Cb and Cr is the same size in output pixels
var textureX0 = format.cropLeft / texWidth;
var textureX1 = (format.cropLeft + format.cropWidth) / texWidth;
var textureY0 = (format.cropTop + format.cropHeight) / format.height;
var textureY1 = format.cropTop / format.height;
var textureRectangle = new Float32Array([
textureX0, textureY0,
textureX1, textureY0,
textureX0, textureY1,
textureX0, textureY1,
textureX1, textureY0,
textureX1, textureY1
]);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, textureRectangle, gl.STATIC_DRAW);
};
setupTexturePosition(
lumaPositionBuffer,
lumaPositionLocation,
buffer.y.stride);
setupTexturePosition(
chromaPositionBuffer,
chromaPositionLocation,
buffer.u.stride * format.width / format.chromaWidth);
}
// Create or update the textures...
uploadTexture('uTextureY', buffer.y.stride, format.height, buffer.y.bytes);
uploadTexture('uTextureCb', buffer.u.stride, format.chromaHeight, buffer.u.bytes);
uploadTexture('uTextureCr', buffer.v.stride, format.chromaHeight, buffer.v.bytes);
if (WebGLFrameSink.stripe) {
// Unpack the textures after upload to avoid blocking on GPU
unpackTexture('uTextureY', buffer.y.stride, format.height);
unpackTexture('uTextureCb', buffer.u.stride, format.chromaHeight);
unpackTexture('uTextureCr', buffer.v.stride, format.chromaHeight);
}
// Set up the rectangle and draw it
gl.useProgram(program);
gl.viewport(0, 0, canvas.width, canvas.height);
attachTexture('uTextureY', gl.TEXTURE0, 0);
attachTexture('uTextureCb', gl.TEXTURE1, 1);
attachTexture('uTextureCr', gl.TEXTURE2, 2);
// Set up geometry
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
// Set up the texture geometry...
gl.bindBuffer(gl.ARRAY_BUFFER, lumaPositionBuffer);
gl.enableVertexAttribArray(lumaPositionLocation);
gl.vertexAttribPointer(lumaPositionLocation, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, chromaPositionBuffer);
gl.enableVertexAttribArray(chromaPositionLocation);
gl.vertexAttribPointer(chromaPositionLocation, 2, gl.FLOAT, false, 0, 0);
// Aaaaand draw stuff.
gl.drawArrays(gl.TRIANGLES, 0, rectangle.length / 2);
};
self.clear = function() {
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0.0, 0.0, 0.0, 0.0);
gl.clear(gl.COLOR_BUFFER_BIT);
};
self.clear();
return self;
}
// For Windows; luminance and alpha textures are ssllooww to upload,
// so we pack into RGBA and unpack in the shaders.
//
// This seems to affect all browsers on Windows, probably due to fun
// mismatches between GL and D3D.
WebGLFrameSink.stripe = (function() {
if (navigator.userAgent.indexOf('Windows') !== -1) {
return true;
}
return false;
})();
WebGLFrameSink.contextForCanvas = function(canvas) {
var options = {
// Don't trigger discrete GPU in multi-GPU systems
preferLowPowerToHighPerformance: true,
powerPreference: 'low-power',
// Don't try to use software GL rendering!
failIfMajorPerformanceCaveat: true,
// In case we need to capture the resulting output.
preserveDrawingBuffer: true
};
return canvas.getContext('webgl', options) || canvas.getContext('experimental-webgl', options);
};
/**
* Static function to check if WebGL will be available with appropriate features.
*
* @returns {boolean} - true if available
*/
WebGLFrameSink.isAvailable = function() {
var canvas = document.createElement('canvas'),
gl;
canvas.width = 1;
canvas.height = 1;
try {
gl = WebGLFrameSink.contextForCanvas(canvas);
} catch (e) {
return false;
}
if (gl) {
var register = gl.TEXTURE0,
width = 4,
height = 4,
texture = gl.createTexture(),
data = new Uint8Array(width * height),
texWidth = WebGLFrameSink.stripe ? (width / 4) : width,
format = WebGLFrameSink.stripe ? gl.RGBA : gl.LUMINANCE,
filter = WebGLFrameSink.stripe ? gl.NEAREST : gl.LINEAR;
gl.activeTexture(register);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, filter);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, filter);
gl.texImage2D(
gl.TEXTURE_2D,
0, // mip level
format, // internal format
texWidth,
height,
0, // border
format, // format
gl.UNSIGNED_BYTE, //type
data // data!
);
var err = gl.getError();
if (err) {
// Doesn't support luminance textures?
return false;
} else {
return true;
}
} else {
return false;
}
};
WebGLFrameSink.prototype = Object.create(FrameSink.prototype);
module.exports = WebGLFrameSink;
})();
},{"../build/shaders.js":1,"./FrameSink.js":2}],5:[function(require,module,exports){
/*
Copyright (c) 2014-2019 Brion Vibber <brion@pobox.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
MPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
(function() {
"use strict";
var depower = require('./depower.js');
/**
* Basic YCbCr->RGB conversion
*
* @author Brion Vibber <brion@pobox.com>
* @copyright 2014-2019
* @license MIT-style
*
* @param {YUVFrame} buffer - input frame buffer
* @param {Uint8ClampedArray} output - array to draw RGBA into
* Assumes that the output array already has alpha channel set to opaque.
*/
function convertYCbCr(buffer, output) {
var width = buffer.format.width | 0,
height = buffer.format.height | 0,
hdec = depower(buffer.format.width / buffer.format.chromaWidth) | 0,
vdec = depower(buffer.format.height / buffer.format.chromaHeight) | 0,
bytesY = buffer.y.bytes,
bytesCb = buffer.u.bytes,
bytesCr = buffer.v.bytes,
strideY = buffer.y.stride | 0,
strideCb = buffer.u.stride | 0,
strideCr = buffer.v.stride | 0,
outStride = width << 2,
YPtr = 0, Y0Ptr = 0, Y1Ptr = 0,
CbPtr = 0, CrPtr = 0,
outPtr = 0, outPtr0 = 0, outPtr1 = 0,
colorCb = 0, colorCr = 0,
multY = 0, multCrR = 0, multCbCrG = 0, multCbB = 0,
x = 0, y = 0, xdec = 0, ydec = 0;
if (hdec == 1 && vdec == 1) {
// Optimize for 4:2:0, which is most common
outPtr0 = 0;
outPtr1 = outStride;
ydec = 0;
for (y = 0; y < height; y += 2) {
Y0Ptr = y * strideY | 0;
Y1Ptr = Y0Ptr + strideY | 0;
CbPtr = ydec * strideCb | 0;
CrPtr = ydec * strideCr | 0;
for (x = 0; x < width; x += 2) {
colorCb = bytesCb[CbPtr++] | 0;
colorCr = bytesCr[CrPtr++] | 0;
// Quickie YUV conversion
// https://en.wikipedia.org/wiki/YCbCr#ITU-R_BT.2020_conversion
// multiplied by 256 for integer-friendliness
multCrR = (409 * colorCr | 0) - 57088 | 0;
multCbCrG = (100 * colorCb | 0) + (208 * colorCr | 0) - 34816 | 0;
multCbB = (516 * colorCb | 0) - 70912 | 0;
multY = 298 * bytesY[Y0Ptr++] | 0;
output[outPtr0 ] = (multY + multCrR) >> 8;
output[outPtr0 + 1] = (multY - multCbCrG) >> 8;
output[outPtr0 + 2] = (multY + multCbB) >> 8;
outPtr0 += 4;
multY = 298 * bytesY[Y0Ptr++] | 0;
output[outPtr0 ] = (multY + multCrR) >> 8;
output[outPtr0 + 1] = (multY - multCbCrG) >> 8;
output[outPtr0 + 2] = (multY + multCbB) >> 8;
outPtr0 += 4;
multY = 298 * bytesY[Y1Ptr++] | 0;
output[outPtr1 ] = (multY + multCrR) >> 8;
output[outPtr1 + 1] = (multY - multCbCrG) >> 8;
output[outPtr1 + 2] = (multY + multCbB) >> 8;
outPtr1 += 4;
multY = 298 * bytesY[Y1Ptr++] | 0;
output[outPtr1 ] = (multY + multCrR) >> 8;
output[outPtr1 + 1] = (multY - multCbCrG) >> 8;
output[outPtr1 + 2] = (multY + multCbB) >> 8;
outPtr1 += 4;
}
outPtr0 += outStride;
outPtr1 += outStride;
ydec++;
}
} else {
outPtr = 0;
for (y = 0; y < height; y++) {
xdec = 0;
ydec = y >> vdec;
YPtr = y * strideY | 0;
CbPtr = ydec * strideCb | 0;
CrPtr = ydec * strideCr | 0;
for (x = 0; x < width; x++) {
xdec = x >> hdec;
colorCb = bytesCb[CbPtr + xdec] | 0;
colorCr = bytesCr[CrPtr + xdec] | 0;
// Quickie YUV conversion
// https://en.wikipedia.org/wiki/YCbCr#ITU-R_BT.2020_conversion
// multiplied by 256 for integer-friendliness
multCrR = (409 * colorCr | 0) - 57088 | 0;
multCbCrG = (100 * colorCb | 0) + (208 * colorCr | 0) - 34816 | 0;
multCbB = (516 * colorCb | 0) - 70912 | 0;
multY = 298 * bytesY[YPtr++] | 0;
output[outPtr ] = (multY + multCrR) >> 8;
output[outPtr + 1] = (multY - multCbCrG) >> 8;
output[outPtr + 2] = (multY + multCbB) >> 8;
outPtr += 4;
}
}
}
}
module.exports = {
convertYCbCr: convertYCbCr
};
})();
},{"./depower.js":6}],6:[function(require,module,exports){
/*
Copyright (c) 2014-2016 Brion Vibber <brion@pobox.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
MPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
(function() {
"use strict";
/**
* Convert a ratio into a bit-shift count; for instance a ratio of 2
* becomes a bit-shift of 1, while a ratio of 1 is a bit-shift of 0.
*
* @author Brion Vibber <brion@pobox.com>
* @copyright 2016
* @license MIT-style
*
* @param {number} ratio - the integer ratio to convert.
* @returns {number} - number of bits to shift to multiply/divide by the ratio.
* @throws exception if given a non-power-of-two
*/
function depower(ratio) {
var shiftCount = 0,
n = ratio >> 1;
while (n != 0) {
n = n >> 1;
shiftCount++
}
if (ratio !== (1 << shiftCount)) {
throw 'chroma plane dimensions must be power of 2 ratio to luma plane dimensions; got ' + ratio;
}
return shiftCount;
}
module.exports = depower;
})();
},{}],7:[function(require,module,exports){
/*
Copyright (c) 2014-2016 Brion Vibber <brion@pobox.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
MPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
(function() {
"use strict";
var FrameSink = require('./FrameSink.js'),
SoftwareFrameSink = require('./SoftwareFrameSink.js'),
WebGLFrameSink = require('./WebGLFrameSink.js');
/**
* @typedef {Object} YUVCanvasOptions
* @property {boolean} webGL - Whether to use WebGL to draw to the canvas and accelerate color space conversion. If left out, defaults to auto-detect.
*/
var YUVCanvas = {
FrameSink: FrameSink,
SoftwareFrameSink: SoftwareFrameSink,
WebGLFrameSink: WebGLFrameSink,
/**
* Attach a suitable FrameSink instance to an HTML5 canvas element.
*
* This will take over the drawing context of the canvas and may turn
* it into a WebGL 3d canvas if possible. Do not attempt to use the
* drawing context directly after this.
*
* @param {HTMLCanvasElement} canvas - HTML canvas element to attach to
* @param {YUVCanvasOptions} options - map of options
* @returns {FrameSink} - instance of suitable subclass.
*/
attach: function(canvas, options) {
options = options || {};
var webGL = ('webGL' in options) ? options.webGL : WebGLFrameSink.isAvailable();
if (webGL) {
return new WebGLFrameSink(canvas, options);
} else {
return new SoftwareFrameSink(canvas, options);
}
}
};
module.exports = YUVCanvas;
})();
},{"./FrameSink.js":2,"./SoftwareFrameSink.js":3,"./WebGLFrameSink.js":4}],8:[function(require,module,exports){
const YUVCanvas = require('yuv-canvas');
const video = document.getElementById('video');
const url = `ws://localhost:9999/ws/test`;
const yuv = YUVCanvas.attach(video)
const ws = new WebSocket(url);
ws.binaryType = 'arraybuffer';
const render = (buff) => {
yuv.drawFrame(buff)
};
ws.onmessage = evt => {
const data = evt.data;
if (typeof data !== 'string') {
render(new Uint8ClampedArray(data));
} else {
const payload = JSON.parse(data);
switch (payload.type) {
case 'initial':
console.log('初始化画布');
// update size
break;
}
}
};
},{"yuv-canvas":7}]},{},[8]);
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化