提交 6acebefc 编写于 作者: J Jerome Etienne

more on wasm

上级 cb1d2a8a
# GOAL
- filtering : convert2Grey + adaptativeThresholding
- code this in multiple versions : webworkers - webassembly - gpu
- mix them together - which combinasion
# Step1 Basic Demo
- read the webcam
- display the origianl image
- filter the image - use jsaruco function
- display the filtered image
# Step2 Implement webworkers + jsaruco
- aka all normal javascript - no webassembly so more stable
# Step3 Implement webassembly
- code in C the convert2Grey yourself first
- see about getting a horintal/vertical blur in C
- then do a adaptative thresholding
- result must be the same as the jsaruco version
# Step4 Implement a gpu version
- convert2Grey may be done in shader
- horizontal/vertical blur may be done in shader
- which resolution for the texture ?
- how many passes ? 4 different shaders or larger ones ?
# Step5 Mix them together
- what is possible ? what is buggy ?
- if all is running as expected, any combinaison would work.
- it is a matter of picking the fastest
- so try and measure :)
# GOAL
- filtering : convert2Grey + adaptativeThresholding
- code this in multiple versions : webworkers - webassembly - gpu
- mix them together - which combinasion
- thresholding is working in webassembly
- now lets make it fast
- do a linear time with the incremental technique
- do a pure average first
- you know how to code it
- later: do a gaussian approximentation - boxstackblur stuff which is the trick
- so you get a good idea of the speed
- so you get webassembly version from optimised c - this is the fastest it can be on the web at the moment
- good to bench webassembly
- test multiple browser - multiple resolutions
- see how hard it would be to incoporate it in threex-aruco.js
# Step1 Basic Demo
- read the webcam
- display the origianl image
- filter the image - use jsaruco function
- display the filtered image
# Step2 Implement webworkers + jsaruco
- aka all normal javascript - no webassembly so more stable
# Step3 Implement webassembly
- code in C the convert2Grey yourself first
- see about getting a horintal/vertical blur in C
- then do a adaptative thresholding
- result must be the same as the jsaruco version
- source ~/webwork/emsdk/emsdk_env.sh
# Step4 Implement a gpu version
- convert2Grey may be done in shader
- horizontal/vertical blur may be done in shader
- which resolution for the texture ?
- how many passes ? 4 different shaders or larger ones ?
# Step5 Mix them together
- what is possible ? what is buggy ?
- if all is running as expected, any combinaison would work.
- it is a matter of picking the fastest
- so try and measure :)
Found issues
- jsaruco use a kernel size of 2 in adaptative thresholding
- could i reduce the resolution of the source image and use a kernel size of 1 ?
- it would produce more fps. what the difference would be ? create errors ?
- jsaruco - adaptiveThreshold is doing it on ALL bytes - so all channel ???
- it use blackwhite image - it only needs 1 channel - 8 bits is already a lot to store blackwhite
- this mean 4 times more work than needed
watch: build
fswatch -0 cv-threshold.cpp | xargs -0 -n 1 -I {} make build
build:
emcc cv-threshold.c -Os -s WASM=1 -s MODULARIZE=1 -o module.js
#include <emscripten.h>
#include <stdlib.h>
#include <alloca.h>
#include <stdint.h>
uint32_t canvasWidth = 0;
uint32_t canvasHeight = 0;
#define NCHANNELS 4
unsigned char *canvasBuffer = NULL;
EMSCRIPTEN_KEEPALIVE
int canvasProcess(){
int length = canvasWidth * canvasHeight * sizeof(canvasBuffer[0]) * NCHANNELS;
for(int i = 0; i < length; i+=4 ){
canvasBuffer[i] = 255;
}
return 42;
}
////////////////////////////////////////////////////////////////////////////////
//
////////////////////////////////////////////////////////////////////////////////
EMSCRIPTEN_KEEPALIVE
unsigned char *canvasAllocate(int width, int height){
EM_ASM(Module.print('canvasAllocate'));
// printf("bal");
// allocate the memory
canvasBuffer = malloc(width * height * sizeof(canvasBuffer[0]) * NCHANNELS);
// copy value
canvasWidth = width;
canvasHeight = height;
// return the buffer
return canvasBuffer;
}
EMSCRIPTEN_KEEPALIVE
void canvasFree(){
if( canvasBuffer != NULL ){
free(canvasBuffer);
canvasBuffer = NULL;
canvasWidth = 0;
canvasHeight = 0;
}
}
EMSCRIPTEN_KEEPALIVE
unsigned char *getCanvasBuffer(){
return canvasBuffer;
}
EMSCRIPTEN_KEEPALIVE
uint32_t getCanvasWidth(){
return canvasWidth;
}
EMSCRIPTEN_KEEPALIVE
uint32_t getCanvasHeight(){
return canvasHeight;
}
<script src="module.js"></script>
<body><script>
var videoElement = document.createElement('video')
document.body.appendChild(videoElement)
videoElement.autoplay = true
// https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
var userMediaContraints = {
video: true,
}
navigator.mediaDevices.getUserMedia(userMediaContraints).then(function(stream){
videoElement.src = window.URL.createObjectURL(stream);
}).catch(function(error){
console.assert(false, 'getUserMedia failed due to')
console.dir(error)
})
var canvas = document.createElement('canvas')
canvas.width = 640
canvas.height = 480
document.body.appendChild(canvas)
var context = canvas.getContext('2d')
Module['print'] = function(text) { console.log('stdout: ' + text) };
var module = Module({
wasmBinaryFile: 'module.wasm'
})
setTimeout(onModuleLoaded, 100)
function onModuleLoaded(){
console.assert(module.asm._canvasAllocate)
var canvasBuffer = module.asm._canvasAllocate(canvas.width, canvas.height);
var bufferWasmLength = canvas.width * canvas.height * 4
// var bufferWasm = Module._malloc(bufferWasmLength);
// // Module._free(bufferWasm);
requestAnimationFrame(function callback(){
context.drawImage(videoElement, 0, 0)
var imageData = context.getImageData(0, 0, canvas.width, canvas.height)
// Module.HEAPU8.set(myTypedArray, buf);
console.log('address', module.asm._getCanvasBuffer() )
module.asm._canvasProcess()
requestAnimationFrame(callback)
})
}
</script></body>
......@@ -8,17 +8,23 @@
// document.body.appendChild(videoElement)
videoElement.autoplay = true
navigator.mediaDevices.getUserMedia({
video:true
}).then(function(stream){
// https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
var userMediaContraints = {
// video: true,
video : {
width: 640,
height:480
}
}
navigator.mediaDevices.getUserMedia(userMediaContraints).then(function(stream){
videoElement.src = window.URL.createObjectURL(stream);
}).catch(function(error){
console.assert(false)
console.assert(false, 'getUserMedia failed due to')
console.dir(error)
})
var canvas = document.createElement('canvas')
canvas.width = 640
canvas.width = 640
canvas.height = 480
document.body.appendChild(canvas)
var context = canvas.getContext('2d')
......@@ -32,8 +38,7 @@
var imageData = context.getImageData(0, 0, canvas.width, canvas.height)
CV.grayscale(imageData, greyCVImage)
CV.adaptiveThreshold(greyCVImage, thresCVImage, 2, 7);
CV.adaptiveThreshold(greyCVImage, thresCVImage, 2, 10);
var imageData = context.createImageData(canvas.width, canvas.height);
copyCVImage2ImageData(thresCVImage, imageData)
......
watch: build
fswatch -0 cv-threshold.cpp | xargs -0 -n 1 -I {} make build
build:
emcc --bind -o module.js cv-threshold.cpp
<script src="module.js"></script>
<body><script type="text/javascript">
// https://stackoverflow.com/questions/34307692/how-to-pass-canvas-imagedata-to-emscripten-c-program-without-copying-it
var srcCanvas = document.createElement('canvas')
srcCanvas.width = 640
srcCanvas.height = 480
var srcContext = srcCanvas.getContext('2d')
srcContext.fillStyle = 'red';
srcContext.fillRect (0, 0, srcCanvas.width, srcCanvas.height);
srcContext.fillStyle = 'pink';
srcContext.fillRect (50, 50, 150, 150);
srcContext.fillStyle = 'green';
srcContext.fillRect (250, 250, 350, 50);
document.body.appendChild(srcCanvas)
var srcImageData = srcContext.getImageData(0, 0, srcCanvas.width, srcCanvas.height)
var wasmBufferPtr = Module._malloc(srcImageData.data.length);
var wasmBufferArray = new Uint8Array(Module.HEAPU8.buffer, wasmBufferPtr, srcImageData.data.length);
wasmBufferArray.set(new Uint8Array(srcImageData.data))
Module.asm._convertToGray(wasmBufferArray.byteOffset, srcCanvas.width, srcCanvas.height)
srcImageData.data.set(wasmBufferArray);
srcContext.putImageData(srcImageData, 0, 0)
</script></body>
#include <emscripten/bind.h>
#include <emscripten.h>
#include <stdint.h>
extern "C" {
EMSCRIPTEN_KEEPALIVE
void convertToGray(uint8_t *buffer, int imageW, int imageH){
int length = imageW * imageH * 4;
for(int i = 0; i < length; i+=4){
buffer[i] = (uint8_t)(buffer[i] * 0.299 + buffer[i + 1] * 0.587 + buffer[i + 2] * 0.114 + 0.5);
// buffer[i + 1] = buffer[i];
// buffer[i + 2] = buffer[i];
}
}
EMSCRIPTEN_KEEPALIVE
void meanBlurHorizontal(uint8_t *srcBuffer, uint8_t *dstBuffer, int imageW, int imageH, int windowW){
for(int y = 0; y < imageH; y++){
for(int x = windowW; x < imageW-windowW; x++){
uint32_t average = 0;
for(int d = -windowW; d <= windowW; d++){
average += srcBuffer[(y * imageW + x + d) * 4];
}
dstBuffer[(y * imageW + x) * 4] = average / (windowW*2+1);
// dstBuffer[(y * imageW + x) * 4+1] = dstBuffer[(y * imageW + x) * 4];
// dstBuffer[(y * imageW + x) * 4+2] = dstBuffer[(y * imageW + x) * 4];
// dstBuffer[(y * imageW + x) * 4+3] = 255;
}
}
}
EMSCRIPTEN_KEEPALIVE
void meanBlurVertical(uint8_t *srcBuffer, uint8_t *dstBuffer, int imageW, int imageH, int windowH){
for(int y = windowH; y < imageH-windowH; y++){
for(int x = 0; x < imageW; x++){
uint32_t average = 0;
for(int d = -windowH; d <= windowH; d++){
average += srcBuffer[((y+d) * imageW + x) * 4];
}
dstBuffer[(y * imageW + x) * 4] = average / (windowH*2+1);
// dstBuffer[(y * imageW + x) * 4+1] = dstBuffer[(y * imageW + x) * 4];
// dstBuffer[(y * imageW + x) * 4+2] = dstBuffer[(y * imageW + x) * 4];
// dstBuffer[(y * imageW + x) * 4+3] = 255;
}
}
}
EMSCRIPTEN_KEEPALIVE
void adaptativeThreshold(uint8_t *srcBuffer, uint8_t *bluredBuffer, uint8_t *dstBuffer, int imageW, int imageH, int threshold){
uint8_t tab[768];
for(int i = 0; i < 768; i++){
tab[i] = (i - 255 <= -threshold)? 255: 0;
}
int length = imageW * imageH * 4;
for(int i = 0; i < length; i+=4){
dstBuffer[i] = tab[srcBuffer[i] - bluredBuffer[i] + 255];
// dstBuffer[i + 1] = dstBuffer[i];
// dstBuffer[i + 2] = dstBuffer[i];
// dstBuffer[i + 3] = 255;
}
}
}
<script src="module.js"></script>
<body><script>
var videoElement = document.createElement('video')
// document.body.appendChild(videoElement)
videoElement.autoplay = true
// https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
var userMediaContraints = {
video: true,
}
navigator.mediaDevices.getUserMedia(userMediaContraints).then(function(stream){
videoElement.src = window.URL.createObjectURL(stream);
}).catch(function(error){
console.assert(false, 'getUserMedia failed due to')
console.dir(error)
})
var canvas = document.createElement('canvas')
canvas.width = 640
canvas.height = 480
document.body.appendChild(canvas)
var context = canvas.getContext('2d')
context.fillStyle = 'rgba(255,0,0,1)';
context.fillRect (0, 0, canvas.width, canvas.height);
// allocate wasmBuffer1Array
var tmpImageData = context.getImageData(0, 0, canvas.width, canvas.height)
var wasmBuffer1Ptr = Module._malloc(tmpImageData.data.length);
var wasmBuffer1Array = new Uint8Array(Module.HEAPU8.buffer, wasmBuffer1Ptr, tmpImageData.data.length);
wasmBuffer1Array.set(new Uint8Array(tmpImageData.data))
// allocate wasmBuffer2Array
var tmpImageData = context.getImageData(0, 0, canvas.width, canvas.height)
var wasmBuffer2Ptr = Module._malloc(tmpImageData.data.length);
var wasmBuffer2Array = new Uint8Array(Module.HEAPU8.buffer, wasmBuffer2Ptr, tmpImageData.data.length);
wasmBuffer2Array.set(new Uint8Array(tmpImageData.data))
// allocate wasmBuffer3Array
var tmpImageData = context.getImageData(0, 0, canvas.width, canvas.height)
var wasmBuffer3Ptr = Module._malloc(tmpImageData.data.length);
var wasmBuffer3Array = new Uint8Array(Module.HEAPU8.buffer, wasmBuffer3Ptr, tmpImageData.data.length);
wasmBuffer3Array.set(new Uint8Array(tmpImageData.data))
requestAnimationFrame(function callback(){
// copy video, get its imageData and copy it in wasmBuffer1Array
context.drawImage(videoElement, 0, 0)
var tmpImageData = context.getImageData(0, 0, canvas.width, canvas.height)
wasmBuffer1Array.set(new Uint8Array(tmpImageData.data))
// process wasmBuffer1Array
Module.asm._convertToGray(wasmBuffer1Array.byteOffset, canvas.width, canvas.height)
var windowW = 5
var windowH = 5
Module.asm._meanBlurHorizontal (wasmBuffer1Array.byteOffset, wasmBuffer2Array.byteOffset, canvas.width, canvas.height, windowW)
Module.asm._meanBlurVertical (wasmBuffer2Array.byteOffset, wasmBuffer3Array.byteOffset, canvas.width, canvas.height, windowH)
Module.asm._adaptativeThreshold(wasmBuffer1Array.byteOffset, wasmBuffer3Array.byteOffset, wasmBuffer2Array.byteOffset, canvas.width, canvas.height, 7)
// copy processed image in context
tmpImageData.data.set(wasmBuffer2Array);
context.putImageData(tmpImageData, 0, 0)
requestAnimationFrame(callback)
})
</script></body>
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册