Merge pull request #1001 from mzgoddard/motion-detect

Motion detect
This commit is contained in:
Michael "Z" Goddard 2018-04-03 16:19:26 -04:00 committed by GitHub
commit 0a58a6d806
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 1046 additions and 17 deletions

View file

@ -9,10 +9,13 @@ const BlockType = require('./block-type');
const Scratch3PenBlocks = require('../extensions/scratch3_pen'); const Scratch3PenBlocks = require('../extensions/scratch3_pen');
const Scratch3WeDo2Blocks = require('../extensions/scratch3_wedo2'); const Scratch3WeDo2Blocks = require('../extensions/scratch3_wedo2');
const Scratch3MusicBlocks = require('../extensions/scratch3_music'); const Scratch3MusicBlocks = require('../extensions/scratch3_music');
const Scratch3VideoSensingBlocks = require('../extensions/scratch3_video_sensing');
const builtinExtensions = { const builtinExtensions = {
pen: Scratch3PenBlocks, pen: Scratch3PenBlocks,
wedo2: Scratch3WeDo2Blocks, wedo2: Scratch3WeDo2Blocks,
music: Scratch3MusicBlocks music: Scratch3MusicBlocks,
videoSensing: Scratch3VideoSensingBlocks
}; };
/** /**

View file

@ -0,0 +1,7 @@
const VideoMotion = require('./lib');
const VideoMotionView = require('./view');
module.exports = {
VideoMotion,
VideoMotionView
};

View file

@ -0,0 +1,336 @@
const ArgumentType = require('../../extension-support/argument-type');
const BlockType = require('../../extension-support/block-type');
const Clone = require('../../util/clone');
const log = require('../../util/log');
const Timer = require('../../util/timer');
const VideoMotion = require('./lib');
/**
* Icon svg to be displayed at the left edge of each extension block, encoded as a data URI.
* @type {string}
*/
// eslint-disable-next-line max-len
const blockIconURI = 'data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNDAiIGhlaWdodD0iNDAiIHZpZXdCb3g9IjAgMCA0MCA0MCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB4bWxuczp4bGluaz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94bGluayI+PHRpdGxlPm11c2ljLWJsb2NrLWljb248L3RpdGxlPjxkZWZzPjxwYXRoIGQ9Ik0zMi4xOCAyNS44NzRDMzIuNjM2IDI4LjE1NyAzMC41MTIgMzAgMjcuNDMzIDMwYy0zLjA3IDAtNS45MjMtMS44NDMtNi4zNzItNC4xMjYtLjQ1OC0yLjI4NSAxLjY2NS00LjEzNiA0Ljc0My00LjEzNi42NDcgMCAxLjI4My4wODQgMS44OS4yMzQuMzM4LjA4Ni42MzcuMTguOTM4LjMwMi44Ny0uMDItLjEwNC0yLjI5NC0xLjgzNS0xMi4yMy0yLjEzNC0xMi4zMDIgMy4wNi0xLjg3IDguNzY4LTIuNzUyIDUuNzA4LS44ODUuMDc2IDQuODItMy42NSAzLjg0NC0zLjcyNC0uOTg3LTQuNjUtNy4xNTMuMjYzIDE0LjczOHptLTE2Ljk5OCA1Ljk5QzE1LjYzIDM0LjE0OCAxMy41MDcgMzYgMTAuNDQgMzZjLTMuMDcgMC01LjkyMi0xLjg1Mi02LjM4LTQuMTM2LS40NDgtMi4yODQgMS42NzQtNC4xMzUgNC43NS00LjEzNSAxLjAwMyAwIDEuOTc1LjE5NiAyLjg1NS41NDMuODIyLS4wNTUtLjE1LTIuMzc3LTEuODYyLTEyLjIyOC0yLjEzMy0xMi4zMDMgMy4wNi0xLjg3IDguNzY0LTIuNzUzIDUuNzA2LS44OTQuMDc2IDQuODItMy42NDggMy44MzQtMy43MjQtLjk4Ny00LjY1LTcuMTUyLjI2MiAxNC43Mzh6IiBpZD0iYSIvPjwvZGVmcz48ZyBmaWxsPSJub25lIiBmaWxsLXJ1bGU9ImV2ZW5vZGQiPjx1c2UgZmlsbD0iI0ZGRiIgeGxpbms6aHJlZj0iI2EiLz48cGF0aCBzdHJva2Utb3BhY2l0eT0iLjEiIHN0cm9rZT0iIzAwMCIgZD0iTTI4LjQ1NiAyMS42NzVjLS4wMS0uMzEyLS4wODctLjgyNS0uMjU2LTEuNzAyLS4wOTYtLjQ5NS0uNjEyLTMuMDIyLS43NTMtMy43My0uMzk1LTEuOTgtLjc2LTMuOTItMS4xNDItNi4xMTMtLjczMi00LjIyMy0uNjkzLTYuMDUuMzQ0LTYuNTI3LjUtLjIzIDEuMDYtLjA4IDEuODQuMzUuNDE0LjIyNyAyLjE4MiAxLjM2NSAyLjA3IDEuMjk2IDEuOTk0IDEuMjQyIDMuNDY0IDEuNzc0IDQuOTMgMS41NDggMS41MjYtLjIzNyAyLjUwNC0uMDYgMi44NzYuNjE4LjM0OC42MzUuMDE1IDEuNDE2LS43MyAyLjE4LTEuNDcyIDEuNTE2LTMuOTc1IDIuNTE0LTUuODQ4IDIuMDIzLS44MjItLjIyLTEuMjM4LS40NjUtMi4zOC0xLjI2N2wtLjA5NS0uMDY2Yy4wNDcuNTkzLjI2NCAxLjc0LjcxNyAzLjgwMy4yOTQgMS4zMzYgMi4wOCA5LjE4NyAyLjYzNyAxMS42NzRsLjAwMi4wMTJjLjUyOCAyLjYzNy0xLjg3MyA0LjcyNC01LjIzNiA0LjcyNC0zLjI5IDAtNi4zNjMtMS45ODgtNi44NjItNC41MjgtLjUzLTIuNjQgMS44NzMtNC43MzQgNS4yMzMtNC43MzQuNjcyIDAgMS4zNDcuMDg1IDIuMDE0LjI1LjIyNy4wNTcuNDM2LjExOC42MzYuMTg3em0tMTYuOTk2IDUuOTljLS4wMS0uMzE4LS4wOS0uODM4LS4yNjYtMS43MzctLjA5LS40Ni0uNTk1LTIuOTM3LS43NTMtMy43MjctLjM5LTEuOTYtLjc1LTMuODktMS4xMy02LjA3LS43MzItNC4yMjMtLjY5Mi02LjA1LjM0NC02LjUyNi41MDItLjIzIDEuMDYtLjA4MiAxLjg0LjM1LjQxNS4yMjcgMi4xODIgMS4zNjQgMi4wNyAxLjI5NSAxLjk5MyAxLjI0MiAzLjQ2MiAxLjc3NCA0LjkyNiAxLjU0OCAxLjUyNS0uMjQgMi41MDQtLjA2NCAyLjg3Ni42MTQuMzQ4LjYzNS4wMTUgMS40MTUtLjcyOCAyLjE4LTEuNDc0IDEuNTE3LTMuOTc3IDIuNTEzLTUuODQ3IDIuMDE3LS44Mi0uMjItMS4yMzYtLjQ2NC0yLjM3OC0xLjI2N2wtLjA5NS0uMDY1Yy4wNDcuNTkzLjI2NCAxLjc0LjcxNyAzLjgwMi4yOTQgMS4zMzcgMi4wNzggOS4xOSAyLjYzNiAxMS42NzVsLjAwMy4wMTNjLjUxNyAyLjYzOC0xLjg4NCA0LjczMi01LjIzNCA0LjczMi0zLjI4NyAwLTYuMzYtMS45OTMtNi44Ny00LjU0LS41Mi0yLjY0IDEuODg0LTQuNzMgNS4yNC00LjczLjkwNSAwIDEuODAzLjE1IDIuNjUuNDM2eiIvPjwvZz48L3N2Zz4=';
/**
* Icon svg to be displayed in the category menu, encoded as a data URI.
* @type {string}
*/
// eslint-disable-next-line max-len
const menuIconURI = 'data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjAiIGhlaWdodD0iMjAiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+PHBhdGggZD0iTTE2LjA5IDEyLjkzN2MuMjI4IDEuMTQxLS44MzMgMi4wNjMtMi4zNzMgMi4wNjMtMS41MzUgMC0yLjk2Mi0uOTIyLTMuMTg2LTIuMDYzLS4yMy0xLjE0Mi44MzMtMi4wNjggMi4zNzItMi4wNjguMzIzIDAgLjY0MS4wNDIuOTQ1LjExN2EzLjUgMy41IDAgMCAxIC40NjguMTUxYy40MzUtLjAxLS4wNTItMS4xNDctLjkxNy02LjExNC0xLjA2Ny02LjE1MiAxLjUzLS45MzUgNC4zODQtMS4zNzcgMi44NTQtLjQ0Mi4wMzggMi40MS0xLjgyNSAxLjkyMi0xLjg2Mi0uNDkzLTIuMzI1LTMuNTc3LjEzMiA3LjM3ek03LjQ2IDguNTYzYy0xLjg2Mi0uNDkzLTIuMzI1LTMuNTc2LjEzIDcuMzdDNy44MTYgMTcuMDczIDYuNzU0IDE4IDUuMjIgMThjLTEuNTM1IDAtMi45NjEtLjkyNi0zLjE5LTIuMDY4LS4yMjQtMS4xNDIuODM3LTIuMDY3IDIuMzc1LTIuMDY3LjUwMSAwIC45ODcuMDk4IDEuNDI3LjI3Mi40MTItLjAyOC0uMDc0LTEuMTg5LS45My02LjExNEMzLjgzNCAxLjg3IDYuNDMgNy4wODcgOS4yODIgNi42NDZjMi44NTQtLjQ0Ny4wMzggMi40MS0xLjgyMyAxLjkxN3oiIGZpbGw9IiM1NzVFNzUiIGZpbGwtcnVsZT0iZXZlbm9kZCIvPjwvc3ZnPg==';
/**
* Class for the motion-related blocks in Scratch 3.0
* @param {Runtime} runtime - the runtime instantiating this block package.
* @constructor
*/
class Scratch3VideoSensingBlocks {
constructor (runtime) {
/**
* The runtime instantiating this block package.
* @type {Runtime}
*/
this.runtime = runtime;
this.detect = new VideoMotion();
this._lastUpdate = null;
this._skinId = -1;
this._skin = null;
this._drawable = -1;
this._setupVideo();
this._setupSampleCanvas();
this._setupPreview();
this._loop();
}
static get INTERVAL () {
return 33;
}
static get DIMENSIONS () {
return [480, 360];
}
static get ORDER () {
return 1;
}
_setupVideo () {
this._video = document.createElement('video');
navigator.getUserMedia({
audio: false,
video: {
width: {min: 480, ideal: 640},
height: {min: 360, ideal: 480}
}
}, stream => {
this._video.src = window.URL.createObjectURL(stream);
// Hint to the stream that it should load. A standard way to do this
// is add the video tag to the DOM. Since this extension wants to
// hide the video tag and instead render a sample of the stream into
// the webgl rendered Scratch canvas, another hint like this one is
// needed.
this._track = stream.getTracks()[0];
}, err => {
// @todo Properly handle errors
log(err);
});
}
_setupSampleCanvas () {
// Create low-resolution image to sample video for analysis and preview
const canvas = this._sampleCanvas = document.createElement('canvas');
canvas.width = Scratch3VideoSensingBlocks.DIMENSIONS[0];
canvas.height = Scratch3VideoSensingBlocks.DIMENSIONS[1];
this._sampleContext = canvas.getContext('2d');
}
_setupPreview () {
if (this._skinId !== -1) return;
if (this._skin !== null) return;
if (this._drawable !== -1) return;
if (!this.runtime.renderer) return;
this._skinId = this.runtime.renderer.createPenSkin();
this._skin = this.runtime.renderer._allSkins[this._skinId];
this._drawable = this.runtime.renderer.createDrawable();
this.runtime.renderer.setDrawableOrder(
this._drawable,
Scratch3VideoSensingBlocks.ORDER
);
this.runtime.renderer.updateDrawableProperties(this._drawable, {
skinId: this._skinId
});
}
_loop () {
setTimeout(this._loop.bind(this), this.runtime.currentStepTime);
// Ensure video stream is established
if (!this._video) return;
if (!this._track) return;
if (typeof this._video.videoWidth !== 'number') return;
if (typeof this._video.videoHeight !== 'number') return;
// Bail if the camera is *still* not ready
const nativeWidth = this._video.videoWidth;
const nativeHeight = this._video.videoHeight;
if (nativeWidth === 0) return;
if (nativeHeight === 0) return;
const ctx = this._sampleContext;
// Mirror
ctx.scale(-1, 1);
// Generate video thumbnail for analysis
ctx.drawImage(
this._video,
0,
0,
nativeWidth,
nativeHeight,
Scratch3VideoSensingBlocks.DIMENSIONS[0] * -1,
0,
Scratch3VideoSensingBlocks.DIMENSIONS[0],
Scratch3VideoSensingBlocks.DIMENSIONS[1]
);
// Restore the canvas transform
ctx.resetTransform();
// Render to preview layer
if (this._skin !== null) {
const xOffset = Scratch3VideoSensingBlocks.DIMENSIONS[0] / 2 * -1;
const yOffset = Scratch3VideoSensingBlocks.DIMENSIONS[1] / 2;
this._skin.drawStamp(this._sampleCanvas, xOffset, yOffset);
this.runtime.requestRedraw();
}
// Add frame to detector
const time = Date.now();
if (this._lastUpdate === null) this._lastUpdate = time;
const offset = time - this._lastUpdate;
if (offset > Scratch3VideoSensingBlocks.INTERVAL) {
this._lastUpdate = time;
const data = ctx.getImageData(
0, 0, Scratch3VideoSensingBlocks.DIMENSIONS[0], Scratch3VideoSensingBlocks.DIMENSIONS[1]
);
this.detect.addFrame(data.data);
}
}
/**
* Create data for a menu in scratch-blocks format, consisting of an array of objects with text and
* value properties. The text is a translated string, and the value is one-indexed.
* @param {object[]} info - An array of info objects each having a name property.
* @return {array} - An array of objects with text and value properties.
* @private
*/
_buildMenu (info) {
return info.map((entry, index) => {
const obj = {};
obj.text = entry.name;
obj.value = String(index + 1);
return obj;
});
}
/**
* The key to load & store a target's motion-related state.
* @type {string}
*/
static get STATE_KEY () {
return 'Scratch.videoSensing';
}
/**
* The default music-related state, to be used when a target has no existing music state.
* @type {MusicState}
*/
static get DEFAULT_MOTION_STATE () {
return {
currentInstrument: 0
};
}
/**
* @param {Target} target - collect motion state for this target.
* @returns {MotionState} the mutable motion state associated with that target. This will be created if necessary.
* @private
*/
_getMotionState (target) {
let motionState = target.getCustomState(Scratch3VideoSensingBlocks.STATE_KEY);
if (!motionState) {
motionState = Clone.simple(Scratch3VideoSensingBlocks.DEFAULT_MOTION_STATE);
target.setCustomState(Scratch3VideoSensingBlocks.STATE_KEY, motionState);
}
return motionState;
}
/**
* An array of info about each drum.
* @type {object[]} an array of objects.
* @param {string} name - the translatable name to display in the drums menu.
* @param {string} fileName - the name of the audio file containing the drum sound.
*/
get MOTION_DIRECTION_INFO () {
return [
{
name: 'motion'
},
{
name: 'direction'
}
];
}
/**
* An array of info about each drum.
* @type {object[]} an array of objects.
* @param {string} name - the translatable name to display in the drums menu.
* @param {string} fileName - the name of the audio file containing the drum sound.
*/
get STAGE_SPRITE_INFO () {
return [
{
name: 'stage'
},
{
name: 'sprite'
}
];
}
/**
* @returns {object} metadata for this extension and its blocks.
*/
getInfo () {
return {
id: 'videoSensing',
name: 'Video Sensing',
menuIconURI: menuIconURI,
blockIconURI: blockIconURI,
blocks: [
{
opcode: 'videoOn',
blockType: BlockType.REPORTER,
text: 'video [MOTION_DIRECTION] on [STAGE_SPRITE]',
arguments: {
MOTION_DIRECTION: {
type: ArgumentType.NUMBER,
menu: 'MOTION_DIRECTION',
defaultValue: 1
},
STAGE_SPRITE: {
type: ArgumentType.NUMBER,
menu: 'STAGE_SPRITE',
defaultValue: 1
}
}
}
],
menus: {
MOTION_DIRECTION: this._buildMenu(this.MOTION_DIRECTION_INFO),
STAGE_SPRITE: this._buildMenu(this.STAGE_SPRITE_INFO)
}
};
}
videoOn (args, util) {
this.detect.analyzeFrame();
let state = this.detect;
if (Number(args.STAGE_SPRITE) === 2) {
const drawable = this.runtime.renderer._allDrawables[util.target.drawableID];
state = this._getMotionState(util.target);
this.detect.getLocalMotion(drawable, state);
}
if (Number(args.MOTION_DIRECTION) === 1) {
return state.motionAmount;
}
return state.motionDirection;
}
/**
* Check if the stack timer needs initialization.
* @param {object} util - utility object provided by the runtime.
* @return {boolean} - true if the stack timer needs to be initialized.
* @private
*/
_stackTimerNeedsInit (util) {
return !util.stackFrame.timer;
}
/**
* Start the stack timer and the yield the thread if necessary.
* @param {object} util - utility object provided by the runtime.
* @param {number} duration - a duration in seconds to set the timer for.
* @private
*/
_startStackTimer (util, duration) {
util.stackFrame.timer = new Timer();
util.stackFrame.timer.start();
util.stackFrame.duration = duration;
util.yield();
}
/**
* Check the stack timer, and if its time is not up yet, yield the thread.
* @param {object} util - utility object provided by the runtime.
* @private
*/
_checkStackTimer (util) {
const timeElapsed = util.stackFrame.timer.timeElapsed();
if (timeElapsed < util.stackFrame.duration * 1000) {
util.yield();
}
}
}
module.exports = Scratch3VideoSensingBlocks;

View file

@ -0,0 +1,248 @@
/**
* lib.js
*
* Tony Hwang and John Maloney, January 2011
* Michael "Z" Goddard, March 2018
*
* Video motion sensing primitives.
*/
const TO_DEGREE = 180 / Math.PI;
const WIDTH = 480;
const HEIGHT = 360;
// chosen empirically to give a range of roughly 0-100
const AMOUNT_SCALE = 100;
// note 2e-4 * activePixelNum is an experimentally tuned threshold for my
// logitech Pro 9000 webcam - TTH
const LOCAL_AMOUNT_SCALE = AMOUNT_SCALE * 2e-4;
const THRESHOLD = 10;
const WINSIZE = 8;
const LOCAL_MAX_AMOUNT = 100;
const LOCAL_THRESHOLD = THRESHOLD / 3;
const STATE_KEY = 'Scratch.videoSensing';
class VideoMotion {
constructor () {
this.frameNumber = 0;
this.motionAmount = 0;
this.motionDirection = 0;
this.analysisDone = false;
this.curr = null;
this.prev = null;
this._arrays = new ArrayBuffer(WIDTH * HEIGHT * 2 * 1);
this._curr = new Uint8ClampedArray(this._arrays, WIDTH * HEIGHT * 0 * 1, WIDTH * HEIGHT);
this._prev = new Uint8ClampedArray(this._arrays, WIDTH * HEIGHT * 1 * 1, WIDTH * HEIGHT);
}
reset () {
this.prev = this.curr = null;
this.motionAmount = this.motionDirection = 0;
this.analysisDone = true;
const targets = this.runtime.targets;
for (let i = 0; i < targets.length; i++) {
targets[i].getCustomState(STATE_KEY).motionAmount = 0;
targets[i].getCustomState(STATE_KEY).motionDirection = 0;
}
}
addFrame (source) {
this.frameNumber++;
this.prev = this.curr;
this.curr = new Uint32Array(source.buffer.slice());
const _tmp = this._prev;
this._prev = this._curr;
this._curr = _tmp;
for (let i = 0; i < this.curr.length; i++) {
this._curr[i] = this.curr[i] & 0xff;
}
this.analysisDone = false;
}
analyzeFrame () {
if (!this.curr || !this.prev) {
this.motionAmount = this.motionDirection = -1;
// don't have two frames to analyze yet
return;
}
const {
_curr: curr,
_prev: prev
} = this;
const winStep = (WINSIZE * 2) + 1;
const wmax = WIDTH - WINSIZE - 1;
const hmax = HEIGHT - WINSIZE - 1;
let uu = 0;
let vv = 0;
let n = 0;
for (let i = WINSIZE + 1; i < hmax; i += winStep) {
for (let j = WINSIZE + 1; j < wmax; j += winStep) {
let A2 = 0;
let A1B2 = 0;
let B1 = 0;
let C1 = 0;
let C2 = 0;
let address = ((i - WINSIZE) * WIDTH) + j - WINSIZE;
let nextAddress = address + winStep;
const maxAddress = ((i + WINSIZE) * WIDTH) + j + WINSIZE;
for (; address <= maxAddress; address += WIDTH - winStep, nextAddress += WIDTH) {
for (; address <= nextAddress; address += 1) {
const gradT = ((prev[address]) - (curr[address]));
const gradX = ((curr[address - 1]) - (curr[address + 1]));
const gradY = ((curr[address - WIDTH]) - (curr[address + WIDTH]));
A2 += gradX * gradX;
A1B2 += gradX * gradY;
B1 += gradY * gradY;
C2 += gradX * gradT;
C1 += gradY * gradT;
}
}
const delta = ((A1B2 * A1B2) - (A2 * B1));
let u = 0;
let v = 0;
if (delta) {
// system is not singular - solving by Kramer method
const deltaX = -((C1 * A1B2) - (C2 * B1));
const deltaY = -((A1B2 * C2) - (A2 * C1));
const Idelta = 8 / delta;
u = deltaX * Idelta;
v = deltaY * Idelta;
} else {
// singular system - find optical flow in gradient direction
const Norm = ((A1B2 + A2) * (A1B2 + A2)) + ((B1 + A1B2) * (B1 + A1B2));
if (Norm) {
const IGradNorm = 8 / Norm;
const temp = -(C1 + C2) * IGradNorm;
u = (A1B2 + A2) * temp;
v = (B1 + A1B2) * temp;
}
}
if (-winStep < u && u < winStep && -winStep < v && v < winStep) {
uu += u;
vv += v;
n++;
}
}
}
uu /= n;
vv /= n;
this.motionAmount = Math.round(AMOUNT_SCALE * Math.hypot(uu, vv));
if (this.motionAmount > THRESHOLD) {
// Scratch direction
this.motionDirection = (((Math.atan2(vv, uu) * TO_DEGREE) + 270) % 360) - 180;
}
this.analysisDone = true;
}
getLocalMotion (drawable, state) {
if (!this.curr || !this.prev) {
state.motionAmount = state.motionDirection = -1;
// don't have two frames to analyze yet
return;
}
if (state.motionFrameNumber !== this.frameNumber) {
const {
_prev: prev,
_curr: curr
} = this;
const boundingRect = drawable.getFastBounds();
const xmin = Math.floor(boundingRect.left + (WIDTH / 2));
const xmax = Math.floor(boundingRect.right + (WIDTH / 2));
const ymin = Math.floor((HEIGHT / 2) - boundingRect.top);
const ymax = Math.floor((HEIGHT / 2) - boundingRect.bottom);
let A2 = 0;
let A1B2 = 0;
let B1 = 0;
let C1 = 0;
let C2 = 0;
let scaleFactor = 0;
const position = [0, 0, 0];
for (let i = ymin; i < ymax; i++) {
for (let j = xmin; j < xmax; j++) {
position[0] = j - (WIDTH / 2);
position[1] = (HEIGHT / 2) - i;
if (
j > 0 && (j < WIDTH - 1) &&
i > 0 && (i < HEIGHT - 1) &&
drawable.isTouching(position)
) {
const address = (i * WIDTH) + j;
const gradT = ((prev[address]) - (curr[address]));
const gradX = ((curr[address - 1]) - (curr[address + 1]));
const gradY = ((curr[address - WIDTH]) - (curr[address + WIDTH]));
A2 += gradX * gradX;
A1B2 += gradX * gradY;
B1 += gradY * gradY;
C2 += gradX * gradT;
C1 += gradY * gradT;
scaleFactor++;
}
}
}
const delta = ((A1B2 * A1B2) - (A2 * B1));
let u = 0;
let v = 0;
if (delta) {
// system is not singular - solving by Kramer method
const deltaX = -((C1 * A1B2) - (C2 * B1));
const deltaY = -((A1B2 * C2) - (A2 * C1));
const Idelta = 8 / delta;
u = deltaX * Idelta;
v = deltaY * Idelta;
} else {
// singular system - find optical flow in gradient direction
const Norm = ((A1B2 + A2) * (A1B2 + A2)) + ((B1 + A1B2) * (B1 + A1B2));
if (Norm) {
const IGradNorm = 8 / Norm;
const temp = -(C1 + C2) * IGradNorm;
u = (A1B2 + A2) * temp;
v = (B1 + A1B2) * temp;
}
}
let activePixelNum = 0;
if (scaleFactor) {
// store the area of the sprite in pixels
activePixelNum = scaleFactor;
scaleFactor /= (2 * WINSIZE * 2 * WINSIZE);
u = u / scaleFactor;
v = v / scaleFactor;
}
state.motionAmount = Math.round(LOCAL_AMOUNT_SCALE * activePixelNum * Math.hypot(u, v));
if (state.motionAmount > LOCAL_MAX_AMOUNT) {
// clip all magnitudes greater than 100
state.motionAmount = LOCAL_MAX_AMOUNT;
}
if (state.motionAmount > LOCAL_THRESHOLD) {
// Scratch direction
state.motionDirection = (((Math.atan2(v, u) * TO_DEGREE) + 270) % 360) - 180;
}
state.motionFrameNumber = this.frameNumber;
}
}
}
module.exports = VideoMotion;

View file

@ -0,0 +1,263 @@
const WIDTH = 480;
const HEIGHT = 360;
const WINSIZE = 8;
const AMOUNT_SCALE = 100;
const THRESHOLD = 10;
const OUTPUT = {
INPUT: -1,
XYT: 0,
XYT_CELL: 1,
XY: 2,
XY_CELL: 3,
T: 4,
T_CELL: 5,
C: 6,
AB: 7,
UV: 8
};
class VideoMotionView {
constructor (motion, output = OUTPUT.XYT) {
this.motion = motion;
const canvas = this.canvas = document.createElement('canvas');
canvas.width = WIDTH;
canvas.height = HEIGHT;
this.context = canvas.getContext('2d');
this.output = output;
this.buffer = new Uint32Array(WIDTH * HEIGHT);
}
static get OUTPUT () {
return OUTPUT;
}
_eachAddress (xStart, yStart, xStop, yStop, fn) {
for (let i = yStart; i < yStop; i++) {
for (let j = xStart; j < xStop; j++) {
const address = (i * WIDTH) + j;
fn(address, j, i);
}
}
}
_eachCell (xStart, yStart, xStop, yStop, xStep, yStep, fn) {
const xStep2 = (xStep / 2) | 0;
const yStep2 = (yStep / 2) | 0;
for (let i = yStart; i < yStop; i += yStep) {
for (let j = xStart; j < xStop; j += xStep) {
fn(
_fn => this._eachAddress(j - xStep2 - 1, i - yStep2 - 1, j + xStep2, i + yStep2, _fn),
j - xStep2 - 1,
i - yStep2 - 1,
j + xStep2,
i + yStep2
);
}
}
}
_grads (address) {
const {curr, prev} = this.motion;
const gradX = (curr[address - 1] & 0xff) - (curr[address + 1] & 0xff);
const gradY = (curr[address - WIDTH] & 0xff) - (curr[address + WIDTH] & 0xff);
const gradT = (prev[address] & 0xff) - (curr[address] & 0xff);
return {gradX, gradY, gradT};
}
draw () {
if (!(this.motion.prev && this.motion.curr)) {
return;
}
const {buffer} = this;
if (this.output === OUTPUT.INPUT) {
const {curr} = this.motion;
this._eachAddress(1, 1, WIDTH - 1, HEIGHT - 1, address => {
buffer[address] = curr[address];
});
}
if (this.output === OUTPUT.XYT) {
this._eachAddress(1, 1, WIDTH - 1, HEIGHT - 1, address => {
const {gradX, gradY, gradT} = this._grads(address);
const over1 = gradT / 0xcf;
buffer[address] =
(0xff << 24) +
(Math.floor((((gradY * over1) & 0xff) + 0xff) / 2) << 8) +
Math.floor((((gradX * over1) & 0xff) + 0xff) / 2);
});
}
if (this.output === OUTPUT.XYT_CELL) {
const winStep = (WINSIZE * 2) + 1;
const wmax = WIDTH - WINSIZE - 1;
const hmax = HEIGHT - WINSIZE - 1;
this._eachCell(WINSIZE + 1, WINSIZE + 1, wmax, hmax, winStep, winStep, eachAddress => {
let C1 = 0;
let C2 = 0;
let n = 0;
eachAddress(address => {
const {gradX, gradY, gradT} = this._grads(address);
C2 += (Math.max(Math.min(gradX / 0x0f, 1), -1)) * (gradT / 0xff);
C1 += (Math.max(Math.min(gradY / 0x0f, 1), -1)) * (gradT / 0xff);
n += 1;
});
C1 /= n;
C2 /= n;
C1 = Math.log(C1 + (1 * Math.sign(C1))) / Math.log(2);
C2 = Math.log(C2 + (1 * Math.sign(C2))) / Math.log(2);
eachAddress(address => {
buffer[address] = (0xff << 24) +
(((((C1 * 0x7f) | 0) + 0x80) << 8) & 0xff00) +
(((((C2 * 0x7f) | 0) + 0x80) << 0) & 0xff);
});
});
}
if (this.output === OUTPUT.XY) {
this._eachAddress(1, 1, WIDTH - 1, HEIGHT - 1, address => {
const {gradX, gradY} = this._grads(address);
buffer[address] = (0xff << 24) + (((gradY + 0xff) / 2) << 8) + ((gradX + 0xff) / 2);
});
}
if (this.output === OUTPUT.XY_CELL) {
const winStep = (WINSIZE * 2) + 1;
const wmax = WIDTH - WINSIZE - 1;
const hmax = HEIGHT - WINSIZE - 1;
this._eachCell(WINSIZE + 1, WINSIZE + 1, wmax, hmax, winStep, winStep, eachAddress => {
let C1 = 0;
let C2 = 0;
let n = 0;
eachAddress(address => {
const {gradX, gradY} = this._grads(address);
C2 += Math.max(Math.min(gradX / 0x1f, 1), -1);
C1 += Math.max(Math.min(gradY / 0x1f, 1), -1);
n += 1;
});
C1 /= n;
C2 /= n;
C1 = Math.log(C1 + (1 * Math.sign(C1))) / Math.log(2);
C2 = Math.log(C2 + (1 * Math.sign(C2))) / Math.log(2);
eachAddress(address => {
buffer[address] = (0xff << 24) +
(((((C1 * 0x7f) | 0) + 0x80) << 8) & 0xff00) +
(((((C2 * 0x7f) | 0) + 0x80) << 0) & 0xff);
});
});
} else if (this.output === OUTPUT.T) {
this._eachAddress(1, 1, WIDTH - 1, HEIGHT - 1, address => {
const {gradT} = this._grads(address);
buffer[address] = (0xff << 24) + ((gradT + 0xff) / 2 << 16);
});
}
if (this.output === OUTPUT.T_CELL) {
const winStep = (WINSIZE * 2) + 1;
const wmax = WIDTH - WINSIZE - 1;
const hmax = HEIGHT - WINSIZE - 1;
this._eachCell(WINSIZE + 1, WINSIZE + 1, wmax, hmax, winStep, winStep, eachAddress => {
let T = 0;
let n = 0;
eachAddress(address => {
const {gradT} = this._grads(address);
T += gradT / 0xff;
n += 1;
});
T /= n;
eachAddress(address => {
buffer[address] = (0xff << 24) +
(((((T * 0x7f) | 0) + 0x80) << 16) & 0xff0000);
});
});
} else if (this.output === OUTPUT.C) {
this._eachAddress(1, 1, WIDTH - 1, HEIGHT - 1, address => {
const {gradX, gradY, gradT} = this._grads(address);
buffer[address] =
(0xff << 24) +
((gradY * gradT) << 8) +
(gradX * gradT);
});
} else if (this.output === OUTPUT.AB) {
this._eachAddress(1, 1, WIDTH - 1, HEIGHT - 1, address => {
const {gradX, gradY} = this._grads(address);
buffer[address] =
(0xff << 24) +
((gradX * gradY) << 16) +
((gradY * gradY) << 8) +
(gradX * gradX);
});
} else if (this.output === OUTPUT.UV) {
const winStep = (WINSIZE * 2) + 1;
const wmax = WIDTH - WINSIZE - 1;
const hmax = HEIGHT - WINSIZE - 1;
this._eachCell(WINSIZE + 1, WINSIZE + 1, wmax, hmax, winStep, winStep, eachAddress => {
let A2 = 0;
let A1B2 = 0;
let B1 = 0;
let C2 = 0;
let C1 = 0;
eachAddress(address => {
const {gradX, gradY, gradT} = this._grads(address);
A2 += gradX * gradX;
A1B2 += gradX * gradY;
B1 += gradY * gradY;
C2 += gradX * gradT;
C1 += gradY * gradT;
});
const delta = ((A1B2 * A1B2) - (A2 * B1));
let u = 0;
let v = 0;
if (delta) {
/* system is not singular - solving by Kramer method */
const deltaX = -((C1 * A1B2) - (C2 * B1));
const deltaY = -((A1B2 * C2) - (A2 * C1));
const Idelta = 8 / delta;
u = deltaX * Idelta;
v = deltaY * Idelta;
} else {
/* singular system - find optical flow in gradient direction */
const Norm = ((A1B2 + A2) * (A1B2 + A2)) + ((B1 + A1B2) * (B1 + A1B2));
if (Norm) {
const IGradNorm = 8 / Norm;
const temp = -(C1 + C2) * IGradNorm;
u = (A1B2 + A2) * temp;
v = (B1 + A1B2) * temp;
}
}
const inRange = (-winStep < u && u < winStep && -winStep < v && v < winStep);
const hypot = Math.hypot(u, v);
const amount = AMOUNT_SCALE * hypot;
eachAddress(address => {
buffer[address] =
(0xff << 24) +
(inRange && amount > THRESHOLD ?
(((((v / winStep) + 1) / 2 * 0xff) << 8) & 0xff00) +
(((((u / winStep) + 1) / 2 * 0xff) << 0) & 0xff) :
0x8080
);
});
});
}
const data = new ImageData(new Uint8ClampedArray(this.buffer.buffer), WIDTH, HEIGHT);
this.context.putImageData(data, 0, 0);
}
}
module.exports = VideoMotionView;

View file

@ -0,0 +1,18 @@
<!DOCTYPE html>
<html>
<head>
<title>Video Motion Test Playground</title>
</head>
<body>
<!-- FPS counter, Syntax highlighter, Blocks, Renderer -->
<script src="./vendor.js"></script>
<!-- Storage module -->
<script src="./scratch-storage.js"></script>
<!-- Stage rendering -->
<script src="./scratch-render.js"></script>
<!-- Extension -->
<script src="./motion-extension.js"></script>
<!-- Motion -->
<script src="./motion.js"></script>
</body>
</html>

118
src/playground/motion.js Normal file
View file

@ -0,0 +1,118 @@
(function () {
const video = document.createElement('video');
navigator.getUserMedia({
audio: false,
video: {
width: {min: 480, ideal: 640},
height: {min: 360, ideal: 480}
}
}, stream => {
video.autoplay = true;
video.src = window.URL.createObjectURL(stream);
// Get the track to hint to the browser the stream needs to be running
// even though we don't add the video tag to the DOM.
stream.getTracks();
video.addEventListener('play', () => {
video.width = video.videoWidth;
video.height = video.videoHeight;
});
}, err => {
/* eslint no-console:0 */
console.log(err);
});
const VideoMotion = window.Scratch3MotionDetect.VideoMotion;
const VideoMotionView = window.Scratch3MotionDetect.VideoMotionView;
// Create motion detector
const motion = new VideoMotion();
// Create debug views that will render different slices of how the detector
// uses the a frame of input.
const OUTPUT = VideoMotionView.OUTPUT;
const outputKeys = Object.keys(OUTPUT);
const outputValues = Object.values(OUTPUT);
const views = outputValues
.map(output => new VideoMotionView(motion, output));
const view = views[0];
const defaultViews = [OUTPUT.INPUT, OUTPUT.XY_CELL, OUTPUT.T_CELL, OUTPUT.UV];
const activators = document.createElement('div');
activators.style.userSelect = 'none';
outputValues.forEach((output, index) => {
const checkboxLabel = document.createElement('label');
const checkbox = document.createElement('input');
checkbox.type = 'checkbox';
checkbox.checked = defaultViews.indexOf(output) !== -1;
const checkboxSpan = document.createElement('span');
checkboxSpan.innerText = outputKeys[index];
checkboxLabel.appendChild(checkbox);
checkboxLabel.appendChild(checkboxSpan);
const _view = views[index];
_view.canvas.style.display = checkbox.checked ? '' : 'none';
_view.active = checkbox.checked;
checkbox.onchange = event => {
_view.canvas.style.display = checkbox.checked ? '' : 'none';
_view.active = checkbox.checked;
event.preventDefault();
return false;
};
activators.appendChild(checkboxLabel);
});
document.body.appendChild(activators);
// Add a text line to display milliseconds per frame, motion value, and
// motion direction
const textEl = document.createElement('div');
document.body.appendChild(textEl);
let textTimer = Date.now();
// Add the motion debug views to the dom after the text line, so the text
// appears first.
views.forEach(_view => document.body.appendChild(_view.canvas));
// Create a temporary canvas the video will be drawn to so the video's
// bitmap data can be transformed into a TypeArray.
const tempCanvas = document.createElement('canvas');
tempCanvas.width = view.canvas.width;
tempCanvas.height = view.canvas.height;
const ctx = tempCanvas.getContext('2d');
const loop = function () {
const timeoutId = setTimeout(loop, 33);
try {
// Get the bitmap data for the video frame
ctx.scale(-1, 1);
ctx.drawImage(
video,
0, 0, video.width || video.clientWidth, video.height || video.clientHeight,
-480, 0, tempCanvas.width, tempCanvas.height
);
ctx.resetTransform();
const data = ctx.getImageData(0, 0, tempCanvas.width, tempCanvas.height);
const b = performance.now();
motion.addFrame(data.data);
motion.analyzeFrame();
if (Date.now() - textTimer > 250) {
const e = performance.now();
const analyzeDuration = ((e - b) * 1000).toFixed(0);
const motionAmount = motion.motionAmount.toFixed(1);
const motionDirection = motion.motionDirection.toFixed(1);
textEl.innerText = `${analyzeDuration} :: ${motionAmount} :: ${motionDirection}`;
textTimer = Date.now();
}
views.forEach(_view => _view.active && _view.draw());
} catch (error) {
/* eslint no-console:0 */
console.error(error.stack || error);
clearTimeout(timeoutId);
}
};
loop();
}());

View file

@ -562,6 +562,22 @@ const parseBlock = function (sb2block, addBroadcastMsg, getVariableId, extension
if (shadowObscured) { if (shadowObscured) {
fieldValue = 1; fieldValue = 1;
} }
} else if (expectedArg.inputOp === 'videoSensing.menu.MOTION_DIRECTION') {
if (shadowObscured) {
fieldValue = 1;
} else if (fieldValue === 'motion') {
fieldValue = 1;
} else if (fieldValue === 'direction') {
fieldValue = 2;
}
} else if (expectedArg.inputOp === 'videoSensing.menu.STAGE_SPRITE') {
if (shadowObscured) {
fieldValue = 2;
} else if (fieldValue === 'Stage') {
fieldValue = 1;
} else if (fieldValue === 'this sprite') {
fieldValue = 2;
}
} else if (shadowObscured) { } else if (shadowObscured) {
// Filled drop-down menu. // Filled drop-down menu.
fieldValue = ''; fieldValue = '';

View file

@ -609,6 +609,21 @@ const specMap = {
} }
] ]
}, },
'senseVideoMotion': {
opcode: 'videoSensing.videoOn',
argMap: [
{
type: 'input',
inputOp: 'videoSensing.menu.MOTION_DIRECTION',
inputName: 'MOTION_DIRECTION'
},
{
type: 'input',
inputOp: 'videoSensing.menu.STAGE_SPRITE',
inputName: 'STAGE_SPRITE'
}
]
},
'whenGreenFlag': { 'whenGreenFlag': {
opcode: 'event_whenflagclicked', opcode: 'event_whenflagclicked',
argMap: [ argMap: [
@ -905,21 +920,21 @@ const specMap = {
argMap: [ argMap: [
] ]
}, },
'senseVideoMotion': { // 'senseVideoMotion': {
opcode: 'sensing_videoon', // opcode: 'sensing_videoon',
argMap: [ // argMap: [
{ // {
type: 'input', // type: 'input',
inputOp: 'sensing_videoonmenuone', // inputOp: 'sensing_videoonmenuone',
inputName: 'VIDEOONMENU1' // inputName: 'VIDEOONMENU1'
}, // },
{ // {
type: 'input', // type: 'input',
inputOp: 'sensing_videoonmenutwo', // inputOp: 'sensing_videoonmenutwo',
inputName: 'VIDEOONMENU2' // inputName: 'VIDEOONMENU2'
} // }
] // ]
}, // },
'setVideoState': { 'setVideoState': {
opcode: 'sensing_videotoggle', opcode: 'sensing_videotoggle',
argMap: [ argMap: [

View file

@ -86,7 +86,8 @@ module.exports = [
'scratch-storage', 'scratch-storage',
// Renderer // Renderer
'scratch-render' 'scratch-render'
] ],
'motion-extension': './src/extensions/scratch3_video_sensing/debug'
}, },
output: { output: {
path: path.resolve(__dirname, 'playground'), path: path.resolve(__dirname, 'playground'),
@ -98,6 +99,10 @@ module.exports = [
test: require.resolve('./src/index.js'), test: require.resolve('./src/index.js'),
loader: 'expose-loader?VirtualMachine' loader: 'expose-loader?VirtualMachine'
}, },
{
test: require.resolve('./src/extensions/scratch3_video_sensing/debug.js'),
loader: 'expose-loader?Scratch3MotionDetect'
},
{ {
test: require.resolve('stats.js/build/stats.min.js'), test: require.resolve('stats.js/build/stats.min.js'),
loader: 'script-loader' loader: 'script-loader'