File-Input working with Source & PixelCaputure

feature/image-source
Christoph Oberhofer 8 years ago
parent 5173ffc8fd
commit 242c275571

@ -55,12 +55,16 @@ $(function() {
}, },
decode: function(file) { decode: function(file) {
this.detachListeners(); this.detachListeners();
var size = this.state.inputStream.size;
console.log("decode..."); console.log("decode...");
var scanner = Quagga
.config(this.state) Quagga.fromImage(file, {
.fromSource(file, {size: this.state.inputStream.size}); constraints: {width: size, height: size},
scanner locator: this.state.locator,
.toPromise() decoder: this.state.decoder,
})
.then(function(scanner) {
scanner.detect()
.then(function(result) { .then(function(result) {
console.log(result); console.log(result);
addToResults(scanner, result); addToResults(scanner, result);
@ -74,6 +78,11 @@ $(function() {
drawResult(scanner, result); drawResult(scanner, result);
this.attachListeners(); this.attachListeners();
}.bind(this)); }.bind(this));
}.bind(this));
// Quagga.fromCamera(constraints)
// Quagga.fromSource();
// Quagga.fromPixelCapture();
}, },
setState: function(path, value) { setState: function(path, value) {
var self = this; var self = this;

@ -0,0 +1,23 @@
export const PORTRAIT = "portrait";
export const LANDSCAPE = "landscape";
const matchingScreens = {
[PORTRAIT]: /portrait/i,
[LANDSCAPE]: /landscape/i,
};
export function determineOrientation() {
var orientationType = screen.msOrientation || screen.mozOrientation;
if (typeof orientationType !== 'string') {
orientationType = screen.orientation;
if (typeof orientationType === 'object' && orientationType.type) {
orientationType = orientationType.type;
}
}
if (orientationType) {
return Object.keys(matchingScreens)
.filter(orientation => matchingScreens[orientation].test(orientationType))[0];
}
console.log(`Failed to determine orientation, defaults to ${PORTRAIT}`);
return PORTRAIT;
}

@ -0,0 +1,6 @@
export function sleep(millis) {
return new Promise(function(resolve) {
window.setTimeout(resolve, millis);
});
}

@ -1,14 +1,15 @@
module.exports = { module.exports = {
inputStream: { numOfWorkers: 0,
name: "Live", locate: true,
type: "LiveStream", target: '#interactive.viewport',
constraints: { constraints: {
width: 640, width: 640,
height: 480, height: 640,
// aspectRatio: 640/480, // optional // aspectRatio: 640/480, // optional
facingMode: "environment", // or user facingMode: "environment", // or user
// deviceId: "38745983457387598375983759834" // deviceId: "38745983457387598375983759834"
}, },
detector: {
area: { area: {
top: "0%", top: "0%",
right: "0%", right: "0%",
@ -17,8 +18,6 @@ module.exports = {
}, },
singleChannel: false // true: only the red color-channel is read singleChannel: false // true: only the red color-channel is read
}, },
locate: true,
numOfWorkers: 2,
decoder: { decoder: {
readers: [ readers: [
'code_128_reader' 'code_128_reader'
@ -31,7 +30,7 @@ module.exports = {
} }
}, },
locator: { locator: {
halfSample: true, halfSample: false,
patchSize: "medium", // x-small, small, medium, large, x-large patchSize: "medium", // x-small, small, medium, large, x-large
debug: { debug: {
showCanvas: false, showCanvas: false,

@ -0,0 +1,120 @@
import {
computeGray
} from '../common/cv_utils';
import {sleep} from '../common/utils';
function adjustCanvasSize(input, canvas) {
if (input instanceof HTMLVideoElement) {
if (canvas.height !== input.videoHeight || canvas.width !== input.videoWidth) {
console.log('adjusting canvas size', input.videoHeight, input.videoWidth);
canvas.height = input.videoHeight;
canvas.width = input.videoWidth;
return true;
}
return false;
} else if (typeof input.width !== 'undefined') {
if (canvas.height !== input.height || canvas.width !== input.width) {
console.log('adjusting canvas size', input.height, input.width);
canvas.height = input.height;
canvas.width = input.width;
return true;
}
return false;
} else {
throw new Error('Not a video element!');
}
}
function getViewPort(target) {
if (target && target.nodeName && target.nodeType === 1) {
return target;
} else {
// Use '#interactive.viewport' as a fallback selector (backwards compatibility)
var selector = typeof target === 'string' ? target : '#interactive.viewport';
return document.querySelector(selector);
}
}
function getOrCreateCanvas(source, target) {
const $viewport = getViewPort(target);
let $canvas = $viewport.querySelector("canvas.imgBuffer");
if (!$canvas) {
$canvas = document.createElement("canvas");
$canvas.className = "imgBuffer";
if ($viewport && source.type === "IMAGE") {
$viewport.appendChild($canvas);
}
}
return $canvas;
}
export function fromSource(source, {target = "#interactive.viewport"} = {}) {
var drawable = source.getDrawable();
var $canvas = null;
var ctx = null;
var bytePool = [];
if (drawable instanceof HTMLVideoElement
|| drawable instanceof HTMLImageElement) {
$canvas = getOrCreateCanvas(source, target);
ctx = $canvas.getContext('2d');
}
if (drawable instanceof HTMLCanvasElement) {
$canvas = drawable;
ctx = drawable.getContext('2d');
}
function nextAvailableBuffer() {
var i;
var buffer;
var bytesRequired = ($canvas.height * $canvas.width);
for (i = 0; i < bytePool.length; i++) {
buffer = bytePool[i];
if (buffer && buffer.buffer.byteLength === bytesRequired) {
return bytePool[i];
}
}
buffer = new Uint8Array(bytesRequired);
bytePool.push(buffer);
console.log("Added new entry to bufferPool", bytesRequired);
return buffer;
}
return {
grabFrameData: function grabFrameData() {
const {viewport, canvas: canvasSize} = source.getDimensions();
const sx = viewport.x;
const sy = viewport.y;
const sWidth = viewport.width;
const sHeight = viewport.height;
const dx = 0;
const dy = 0;
const dWidth = canvasSize.width;
const dHeight = canvasSize.height;
adjustCanvasSize(canvasSize, $canvas);
if ($canvas.height < 10 || $canvas.width < 10) {
console.log('$canvas not initialized. Waiting 100ms and then continuing');
return sleep(100).then(grabFrameData);
}
if (!(drawable instanceof HTMLCanvasElement)) {
ctx.drawImage(drawable, sx, sy, sWidth, sHeight, dx, dy, dWidth, dHeight);
}
var imageData = ctx.getImageData(0, 0, $canvas.width, $canvas.height).data;
var buffer = nextAvailableBuffer();
computeGray(imageData, buffer);
return Promise.resolve({
width: $canvas.width,
height: $canvas.height,
data: buffer,
});
},
getSource: function() {
return source;
},
getCanvas: function() {
return $canvas;
},
};
}

@ -0,0 +1,180 @@
import {determineOrientation, PORTRAIT} from '../common/device';
import CameraAccess from './camera_access';
export function fromCamera(constraints) {
if (!constraints) {
constraints = {width: {ideal: 540}, height: {ideal: 540}, aspectRatio: {ideal: 1}, facingMode: 'environment'};
}
var orientation = determineOrientation();
var videoConstraints = constraints;
if (orientation === PORTRAIT) {
constraints = Object.assign({}, constraints, {
width: constraints.height,
height: constraints.width,
});
}
const video = document.querySelector('video');
CameraAccess.request(videoConstraints, video)
.then(function(mediastream) {
const track = mediastream.getVideoTracks()[0];
return {
type: "CAMERA",
getDimensions() {
const viewport = {
x: 0,
y: 0,
width: video.videoWidth,
height: video.videoHeight,
};
if (constraints.zoom && constraints.zoom.exact > 1) {
const zoom = constraints.zoom.exact;
viewport.width = Math.floor(video.videoWidth / zoom);
viewport.height = Math.floor(video.videoHeight / zoom);
viewport.x = Math.floor((video.videoWidth - viewport.width) / 2);
viewport.y = Math.floor((video.videoHeight - viewport.height) / 2);
}
return {
viewport,
canvas: {
width: constraints.width, // AR
height: constraints.height, // AR
},
};
},
getConstraints: function() {
return videoConstraints;
},
getDrawable: function() {
return video;
},
applyConstraints: function(constraints) {
track.stop();
videoConstraints = Object.assign({}, constraints);
if (determineOrientation() === PORTRAIT) {
constraints = Object.assign({}, constraints, {
width: constraints.height,
height: constraints.width,
});
}
console.log(videoConstraints, constraints);
if (constraints.zoom && constraints.zoom.exact > 1) {
constraints.width.ideal = Math.floor(constraints.width.ideal * constraints.zoom.exact);
constraints.height.ideal = Math.floor(constraints.height.ideal * constraints.zoom.exact);
delete constraints.zoom;
}
return CameraAccess.request(videoConstraints, video);
},
getLabel: function() {
return track.label;
}
};
});
}
export function fromCanvas(input) {
var $canvas = null;
if (typeof input === 'string') {
$canvas = document.querySelector(input);
} else if (input instanceof HTMLCanvasElement) {
$canvas = input;
} else {
return Promise.reject("fromCanvas needs a selector or HTMLCanvasElement");
}
return Promise.resolve({
type: "CANVAS",
getWidth: function() {
return $canvas.width;
},
getHeight: function() {
return $canvas.height;
},
getDrawable: function() {
return $canvas;
},
getLabel: function() {
return $canvas.getAttribute('id');
},
getConstraints: function() {
return {};
},
applyConstraints: function() {
console.log('CanvasSource.applyConstraints not implemented');
}
});
}
export function fromImage(input, constraints = {width: 800, height: 800}) {
var $image = null;
var src = null;
if (typeof input === 'string') {
// data or url, or queryString
$image = new Image();
src = input;
} else if (input instanceof HTMLImageElement) {
$image = input;
} else if (input instanceof File) {
$image = new Image();
src = URL.createObjectURL(input);
} else {
return Promise.reject("fromImage needs a src, HTMLImageElement or File");
}
return new Promise(function(resolve, reject) {
if (src || !$image.complete) {
console.log('Adding eventlistener');
$image.addEventListener('load', function() {
resolve();
}, false);
$image.addEventListener('error', function(e) {
reject(e);
}, false);
if (src) {
console.log(`Setting src = ${src}`);
$image.src = src;
}
} else {
return resolve();
}
})
.then(() => {
const width = $image.naturalWidth;
const height = $image.naturalHeight;
const imageAR = width / height;
const calculatedWidth = imageAR > 1 ? constraints.width : Math.floor((imageAR) * constraints.width);
const calculatedHeight = imageAR > 1 ? Math.floor((1 / imageAR) * constraints.width) : constraints.width;
return {
type: "IMAGE",
getDimensions() {
return {
viewport: {
width: $image.naturalWidth, // AR
height: $image.naturalHeight, // AR
x: 0, // AR
y: 0, // AR
},
canvas: {
width: calculatedWidth, // AR
height: calculatedHeight, // AR
},
};
},
getDrawable: function() {
return $image;
},
getLabel: function() {
return $image.src;
},
getConstraints: function() {
return constraints;
},
applyConstraints: function() {
console.log('ImageSource.applyConstraints not implemented');
}
};
});
}

@ -8,7 +8,7 @@ const facingMatching = {
var streamRef; var streamRef;
function waitForVideo(video) { function waitForVideo(video, stream) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let attempts = 10; let attempts = 10;
@ -18,7 +18,7 @@ function waitForVideo(video) {
if (ENV.development) { if (ENV.development) {
console.log(video.videoWidth + "px x " + video.videoHeight + "px"); console.log(video.videoWidth + "px x " + video.videoHeight + "px");
} }
resolve(); resolve(stream);
} else { } else {
window.setTimeout(checkVideo, 500); window.setTimeout(checkVideo, 500);
} }
@ -46,7 +46,7 @@ function initCamera(video, constraints) {
video.srcObject = stream; video.srcObject = stream;
video.addEventListener('loadedmetadata', () => { video.addEventListener('loadedmetadata', () => {
video.play(); video.play();
resolve(); resolve(stream);
}); });
}); });
}) })

@ -36,7 +36,7 @@ export function createConfigFromSource(config, sourceConfig, source) {
} }
} }
function createConfigForImage(config, source, inputConfig = {}) { export function createConfigForImage(config, source, inputConfig = {}) {
const staticImageConfig = { const staticImageConfig = {
inputStream: merge({ inputStream: merge({
type: "ImageStream", type: "ImageStream",

@ -5,10 +5,13 @@ import ImageDebug from './common/image_debug';
import ResultCollector from './analytics/result_collector'; import ResultCollector from './analytics/result_collector';
import Config from './config/config'; import Config from './config/config';
import {merge} from 'lodash'; import {merge} from 'lodash';
import {createConfigFromSource} from './input/config_factory'; import {createConfigForImage} from './input/config_factory';
import * as PixelCapture from './input/PixelCapture';
import * as Source from './input/Source';
function fromConfig(config) { function fromConfig(pixelCapturer, config) {
const scanner = createScanner(); const scanner = createScanner(pixelCapturer);
const source = pixelCapturer.getSource();
let pendingStart = null; let pendingStart = null;
let initialized = false; let initialized = false;
return { return {
@ -50,9 +53,9 @@ function fromConfig(config) {
initialized = false; initialized = false;
return this; return this;
}, },
toPromise() { detect() {
if (config.inputStream.type === 'LiveStream' if (source.type === 'CAMERA'
|| config.inputStream.type === 'VideoStream') { || source.type === 'VIDEO') {
let cancelRequested = false; let cancelRequested = false;
return { return {
cancel() { cancel() {
@ -90,14 +93,14 @@ function fromConfig(config) {
scanner.registerResultCollector(resultCollector); scanner.registerResultCollector(resultCollector);
}, },
getCanvas() { getCanvas() {
return scanner.canvas.dom.image; return pixelCapturer.getCanvas();
}, },
}; };
} }
function fromSource(config, source, inputConfig = {}) { function fromSource(config, source) {
config = createConfigFromSource(config, inputConfig, source); const pixelCapturer = PixelCapture.fromSource(source, {target: config.target});
return fromConfig(config); return fromConfig(pixelCapturer, config);
} }
function setConfig(configuration = {}, key, config = {}) { function setConfig(configuration = {}, key, config = {}) {
@ -105,8 +108,14 @@ function setConfig(configuration = {}, key, config = {}) {
return createApi(mergedConfig); return createApi(mergedConfig);
} }
function createApi(configuration = Config) { function createApi() {
return { return {
fromImage(image, options) {
const config = merge({}, Config, options);
return Source
.fromImage(image, config.constraints)
.then(fromSource.bind(null, config));
},
fromSource(src, inputConfig) { fromSource(src, inputConfig) {
return fromSource(configuration, src, inputConfig); return fromSource(configuration, src, inputConfig);
}, },

@ -14,7 +14,7 @@ const vec2 = {
}; };
function createScanner() { function createScanner(pixelCapturer) {
var _inputStream, var _inputStream,
_framegrabber, _framegrabber,
_stopped = true, _stopped = true,
@ -35,58 +35,19 @@ function createScanner() {
_config = {}, _config = {},
_events = createEventedElement(), _events = createEventedElement(),
_locator; _locator;
const source = pixelCapturer.getSource();
function initializeData(imageWrapper) { function initializeData(imageWrapper) {
initBuffers(imageWrapper); initBuffers(imageWrapper);
_decoder = BarcodeDecoder.create(_config.decoder, _inputImageWrapper); _decoder = BarcodeDecoder.create(_config.decoder, _inputImageWrapper);
} }
function initInputStream(cb) {
var video;
if (_config.inputStream.type === "VideoStream") {
video = document.createElement("video");
_inputStream = InputStream.createVideoStream(video);
} else if (_config.inputStream.type === "ImageStream") {
_inputStream = InputStream.createImageStream();
} else if (_config.inputStream.type === "LiveStream") {
var $viewport = getViewPort();
if ($viewport) {
video = $viewport.querySelector("video");
if (!video) {
video = document.createElement("video");
$viewport.appendChild(video);
}
}
_inputStream = InputStream.createLiveStream(video);
CameraAccess.request(video, _config.inputStream.constraints)
.then(() => {
_inputStream.trigger("canrecord");
}).catch((err) => {
return cb(err);
});
}
_inputStream.setAttribute("preload", "auto");
_inputStream.setInputStream(_config.inputStream);
_inputStream.addEventListener("canrecord", canRecord.bind(undefined, cb));
}
function getViewPort() {
var target = _config.inputStream.target;
// Check if target is already a DOM element
if (target && target.nodeName && target.nodeType === 1) {
return target;
} else {
// Use '#interactive.viewport' as a fallback selector (backwards compatibility)
var selector = typeof target === 'string' ? target : '#interactive.viewport';
return document.querySelector(selector);
}
}
function canRecord(cb) { function canRecord(cb) {
checkImageConstraints(_inputStream, _config.locator); // checkImageConstraints(_inputStream, _config.locator);
initCanvas(_config); // initCanvas();
_framegrabber = FrameGrabber.create(_inputStream, _canvasContainer.dom.image); // _framegrabber = FrameGrabber.create(_inputStream, _canvasContainer.dom.image);
adjustWorkerPool(_config.numOfWorkers, function() { adjustWorkerPool(_config.numOfWorkers, function() {
if (_config.numOfWorkers === 0) { if (_config.numOfWorkers === 0) {
@ -97,46 +58,30 @@ function createScanner() {
} }
function ready(cb){ function ready(cb){
_inputStream.play(); // _inputStream.play();
cb(); cb();
} }
function initCanvas() {
if (typeof document !== "undefined") {
var $viewport = getViewPort();
_canvasContainer.dom.image = document.querySelector("canvas.imgBuffer");
if (!_canvasContainer.dom.image) {
_canvasContainer.dom.image = document.createElement("canvas");
_canvasContainer.dom.image.className = "imgBuffer";
if ($viewport && _config.inputStream.type === "ImageStream") {
$viewport.appendChild(_canvasContainer.dom.image);
}
}
_canvasContainer.ctx.image = _canvasContainer.dom.image.getContext("2d");
_canvasContainer.dom.image.width = _inputStream.getCanvasSize().x;
_canvasContainer.dom.image.height = _inputStream.getCanvasSize().y;
}
}
function initBuffers(imageWrapper) { function initBuffers(imageWrapper) {
if (imageWrapper) { // if (imageWrapper) {
_inputImageWrapper = imageWrapper; // _inputImageWrapper = imageWrapper;
} else { // } else {
const {canvas} = source.getDimensions();
_inputImageWrapper = new ImageWrapper({ _inputImageWrapper = new ImageWrapper({
x: _inputStream.getWidth(), x: canvas.width,
y: _inputStream.getHeight() y: canvas.height,
}); });
} // }
//
if (ENV.development) { // if (ENV.development) {
console.log(_inputImageWrapper.size); // console.log(_inputImageWrapper.size);
} // }
_boxSize = [ // _boxSize = [
vec2.clone([0, 0]), // vec2.clone([0, 0]),
vec2.clone([0, _inputImageWrapper.size.y]), // vec2.clone([0, _inputImageWrapper.size.y]),
vec2.clone([_inputImageWrapper.size.x, _inputImageWrapper.size.y]), // vec2.clone([_inputImageWrapper.size.x, _inputImageWrapper.size.y]),
vec2.clone([_inputImageWrapper.size.x, 0]) // vec2.clone([_inputImageWrapper.size.x, 0])
]; // ];
_locator = createLocator(_inputImageWrapper, _config.locator); _locator = createLocator(_inputImageWrapper, _config.locator);
} }
@ -153,9 +98,9 @@ function createScanner() {
} }
function transformResult(result) { function transformResult(result) {
var topRight = _inputStream.getTopRight(), const {viewport} = source.getDimensions();
xOffset = topRight.x, let xOffset = viewport.x,
yOffset = topRight.y, yOffset = viewport.y,
i; i;
if (xOffset === 0 && yOffset === 0) { if (xOffset === 0 && yOffset === 0) {
@ -257,14 +202,17 @@ function createScanner() {
return !workerThread.busy; return !workerThread.busy;
})[0]; })[0];
if (availableWorker) { if (availableWorker) {
_framegrabber.attachData(availableWorker.imageData); //_framegrabber.attachData(availableWorker.imageData);
} else { } else {
return; // all workers are busy return; // all workers are busy
} }
} else { } else {
_framegrabber.attachData(_inputImageWrapper.data); //_framegrabber.attachData(_inputImageWrapper.data);
} }
if (_framegrabber.grab()) { pixelCapturer.grabFrameData()
.then((bitmap) => {
_inputImageWrapper.data = bitmap.data;
if (bitmap) {
if (availableWorker) { if (availableWorker) {
availableWorker.busy = true; availableWorker.busy = true;
availableWorker.worker.postMessage({ availableWorker.worker.postMessage({
@ -275,6 +223,10 @@ function createScanner() {
locateAndDecode(); locateAndDecode();
} }
} }
})
.catch(err => {
console.error(err);
})
} else { } else {
locateAndDecode(); locateAndDecode();
} }
@ -298,7 +250,7 @@ function createScanner() {
} }
function start() { function start() {
if (_onUIThread && _config.inputStream.type === "LiveStream") { if (_onUIThread && source.type === "CAMERA") {
startContinuousUpdate(); startContinuousUpdate();
} else { } else {
update(); update();
@ -457,7 +409,7 @@ function createScanner() {
initializeData(imageWrapper); initializeData(imageWrapper);
return cb(); return cb();
} else { } else {
initInputStream(cb); canRecord(cb);
} }
}, },
start: function() { start: function() {
@ -469,10 +421,9 @@ function createScanner() {
stop: function() { stop: function() {
_stopped = true; _stopped = true;
adjustWorkerPool(0); adjustWorkerPool(0);
if (_config.inputStream.type === "LiveStream") { if (source.type === "CAMERA") {
CameraAccess.release(); CameraAccess.release();
} }
_inputStream.clearEventHandlers();
}, },
pause: function() { pause: function() {
_stopped = true; _stopped = true;

Loading…
Cancel
Save