Merge pull request #1016 from gnarf/io-video

Video IO Device Implementation
This commit is contained in:
Michael "Z" Goddard 2018-04-11 10:14:56 -04:00 committed by GitHub
commit 044370790c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 504 additions and 162 deletions

View file

@ -19,6 +19,7 @@ const DeviceManager = require('../io/deviceManager');
const Keyboard = require('../io/keyboard'); const Keyboard = require('../io/keyboard');
const Mouse = require('../io/mouse'); const Mouse = require('../io/mouse');
const MouseWheel = require('../io/mouseWheel'); const MouseWheel = require('../io/mouseWheel');
const Video = require('../io/video');
const defaultBlockPackages = { const defaultBlockPackages = {
scratch3_control: require('../blocks/scratch3_control'), scratch3_control: require('../blocks/scratch3_control'),
@ -248,7 +249,8 @@ class Runtime extends EventEmitter {
deviceManager: new DeviceManager(), deviceManager: new DeviceManager(),
keyboard: new Keyboard(this), keyboard: new Keyboard(this),
mouse: new Mouse(this), mouse: new Mouse(this),
mouseWheel: new MouseWheel(this) mouseWheel: new MouseWheel(this),
video: new Video(this)
}; };
/** /**

View file

@ -3,10 +3,26 @@ const Runtime = require('../../engine/runtime');
const ArgumentType = require('../../extension-support/argument-type'); const ArgumentType = require('../../extension-support/argument-type');
const BlockType = require('../../extension-support/block-type'); const BlockType = require('../../extension-support/block-type');
const Clone = require('../../util/clone'); const Clone = require('../../util/clone');
const log = require('../../util/log'); const Video = require('../../io/video');
const VideoMotion = require('./library'); const VideoMotion = require('./library');
/**
* States the video sensing activity can be set to.
* @readonly
* @enum {string}
*/
const VideoState = {
/** Video turned off. */
OFF: 'off',
/** Video turned on with default y axis mirroring. */
ON: 'on',
/** Video turned on without default y axis mirroring. */
ON_FLIPPED: 'on-flipped'
};
/** /**
* Class for the motion-related blocks in Scratch 3.0 * Class for the motion-related blocks in Scratch 3.0
* @param {Runtime} runtime - the runtime instantiating this block package. * @param {Runtime} runtime - the runtime instantiating this block package.
@ -34,39 +50,6 @@ class Scratch3VideoSensingBlocks {
*/ */
this._lastUpdate = null; this._lastUpdate = null;
/**
* Id representing a Scratch Renderer skin the video is rendered to for
* previewing.
* @type {number}
*/
this._skinId = -1;
/**
* The Scratch Renderer Skin object.
* @type {Skin}
*/
this._skin = null;
/**
* Id for a drawable using the video's skin that will render as a video
* preview.
* @type {Drawable}
*/
this._drawable = -1;
/**
* Canvas DOM element video is rendered to down or up sample to the
* expected resolution.
* @type {HTMLCanvasElement}
*/
this._sampleCanvas = null;
/**
* Canvas 2D Context to render to the _sampleCanvas member.
* @type {CanvasRenderingContext2D}
*/
this._sampleContext = null;
if (this.runtime.ioDevices) { if (this.runtime.ioDevices) {
// Clear target motion state values when the project starts. // Clear target motion state values when the project starts.
this.runtime.on(Runtime.PROJECT_RUN_START, this.reset.bind(this)); this.runtime.on(Runtime.PROJECT_RUN_START, this.reset.bind(this));
@ -74,9 +57,6 @@ class Scratch3VideoSensingBlocks {
// Boot up the video, canvas to down/up sample the video stream, the // Boot up the video, canvas to down/up sample the video stream, the
// preview skin and drawable, and kick off looping the analysis // preview skin and drawable, and kick off looping the analysis
// logic. // logic.
this._setupVideo();
this._setupSampleCanvas();
this._setupPreview();
this._loop(); this._loop();
} }
} }
@ -99,14 +79,6 @@ class Scratch3VideoSensingBlocks {
return [480, 360]; return [480, 360];
} }
/**
* Order preview drawable is inserted at in the renderer.
* @type {number}
*/
static get ORDER () {
return 1;
}
/** /**
* The key to load & store a target's motion-related state. * The key to load & store a target's motion-related state.
* @type {string} * @type {string}
@ -145,127 +117,29 @@ class Scratch3VideoSensingBlocks {
} }
} }
/**
* Setup a video element connected to a user media stream.
* @private
*/
_setupVideo () {
this._video = document.createElement('video');
navigator.getUserMedia({
audio: false,
video: {
width: {min: 480, ideal: 640},
height: {min: 360, ideal: 480}
}
}, stream => {
this._video.src = window.URL.createObjectURL(stream);
// Hint to the stream that it should load. A standard way to do this
// is add the video tag to the DOM. Since this extension wants to
// hide the video tag and instead render a sample of the stream into
// the webgl rendered Scratch canvas, another hint like this one is
// needed.
this._track = stream.getTracks()[0];
}, err => {
// @todo Properly handle errors
log(err);
});
}
/**
* Create a campus to render the user media video to down/up sample to the
* needed resolution.
* @private
*/
_setupSampleCanvas () {
// Create low-resolution image to sample video for analysis and preview
const canvas = this._sampleCanvas = document.createElement('canvas');
canvas.width = Scratch3VideoSensingBlocks.DIMENSIONS[0];
canvas.height = Scratch3VideoSensingBlocks.DIMENSIONS[1];
this._sampleContext = canvas.getContext('2d');
}
/**
* Create a Scratch Renderer Skin and Drawable to preview the user media
* video stream.
* @private
*/
_setupPreview () {
if (this._skinId !== -1) return;
if (this._skin !== null) return;
if (this._drawable !== -1) return;
if (!this.runtime.renderer) return;
this._skinId = this.runtime.renderer.createPenSkin();
this._skin = this.runtime.renderer._allSkins[this._skinId];
this._drawable = this.runtime.renderer.createDrawable();
this.runtime.renderer.setDrawableOrder(
this._drawable,
Scratch3VideoSensingBlocks.ORDER
);
this.runtime.renderer.updateDrawableProperties(this._drawable, {
skinId: this._skinId
});
}
/** /**
* Occasionally step a loop to sample the video, stamp it to the preview * Occasionally step a loop to sample the video, stamp it to the preview
* skin, and add a TypedArray copy of the canvas's pixel data. * skin, and add a TypedArray copy of the canvas's pixel data.
* @private * @private
*/ */
_loop () { _loop () {
setTimeout(this._loop.bind(this), this.runtime.currentStepTime); setTimeout(this._loop.bind(this), Math.max(this.runtime.currentStepTime, Scratch3VideoSensingBlocks.INTERVAL));
// Ensure video stream is established
if (!this._video) return;
if (!this._track) return;
if (typeof this._video.videoWidth !== 'number') return;
if (typeof this._video.videoHeight !== 'number') return;
// Bail if the camera is *still* not ready
const nativeWidth = this._video.videoWidth;
const nativeHeight = this._video.videoHeight;
if (nativeWidth === 0) return;
if (nativeHeight === 0) return;
const ctx = this._sampleContext;
// Mirror
ctx.scale(-1, 1);
// Generate video thumbnail for analysis
ctx.drawImage(
this._video,
0,
0,
nativeWidth,
nativeHeight,
Scratch3VideoSensingBlocks.DIMENSIONS[0] * -1,
0,
Scratch3VideoSensingBlocks.DIMENSIONS[0],
Scratch3VideoSensingBlocks.DIMENSIONS[1]
);
// Restore the canvas transform
ctx.resetTransform();
// Render to preview layer
if (this._skin !== null) {
const xOffset = Scratch3VideoSensingBlocks.DIMENSIONS[0] / 2 * -1;
const yOffset = Scratch3VideoSensingBlocks.DIMENSIONS[1] / 2;
this._skin.drawStamp(this._sampleCanvas, xOffset, yOffset);
this.runtime.requestRedraw();
}
// Add frame to detector // Add frame to detector
const time = Date.now(); const time = Date.now();
if (this._lastUpdate === null) this._lastUpdate = time; if (this._lastUpdate === null) {
this._lastUpdate = time;
}
const offset = time - this._lastUpdate; const offset = time - this._lastUpdate;
if (offset > Scratch3VideoSensingBlocks.INTERVAL) { if (offset > Scratch3VideoSensingBlocks.INTERVAL) {
const frame = this.runtime.ioDevices.video.getFrame({
format: Video.FORMAT_IMAGE_DATA,
dimensions: Scratch3VideoSensingBlocks.DIMENSIONS
});
if (frame) {
this._lastUpdate = time; this._lastUpdate = time;
const data = ctx.getImageData( this.detect.addFrame(frame.data);
0, 0, Scratch3VideoSensingBlocks.DIMENSIONS[0], Scratch3VideoSensingBlocks.DIMENSIONS[1] }
);
this.detect.addFrame(data.data);
} }
} }
@ -282,7 +156,7 @@ class Scratch3VideoSensingBlocks {
return info.map((entry, index) => { return info.map((entry, index) => {
const obj = {}; const obj = {};
obj.text = entry.name; obj.text = entry.name;
obj.value = String(index + 1); obj.value = entry.value || String(index + 1);
return obj; return obj;
}); });
} }
@ -355,6 +229,38 @@ class Scratch3VideoSensingBlocks {
return 2; return 2;
} }
/**
* States the video sensing activity can be set to.
* @readonly
* @enum {string}
*/
static get VideoState () {
return VideoState;
}
/**
* An array of info on video state options for the "turn video [STATE]" block.
* @type {object[]} an array of objects
* @param {string} name - the translatable name to display in the video state menu
* @param {string} value - the serializable value stored in the block
*/
get VIDEO_STATE_INFO () {
return [
{
name: 'off',
value: VideoState.OFF
},
{
name: 'on',
value: VideoState.ON
},
{
name: 'on flipped',
value: VideoState.ON_FLIPPED
}
];
}
/** /**
* @returns {object} metadata for this extension and its blocks. * @returns {object} metadata for this extension and its blocks.
*/ */
@ -392,11 +298,33 @@ class Scratch3VideoSensingBlocks {
defaultValue: 10 defaultValue: 10
} }
} }
},
{
opcode: 'videoToggle',
text: 'turn video [VIDEO_STATE]',
arguments: {
VIDEO_STATE: {
type: ArgumentType.NUMBER,
menu: 'VIDEO_STATE',
defaultValue: VideoState.ON
}
}
},
{
opcode: 'setVideoTransparency',
text: 'set video transparency to [TRANSPARENCY]',
arguments: {
TRANSPARENCY: {
type: ArgumentType.NUMBER,
defaultValue: 0
}
}
} }
], ],
menus: { menus: {
MOTION_DIRECTION: this._buildMenu(this.MOTION_DIRECTION_INFO), MOTION_DIRECTION: this._buildMenu(this.MOTION_DIRECTION_INFO),
STAGE_SPRITE: this._buildMenu(this.STAGE_SPRITE_INFO) STAGE_SPRITE: this._buildMenu(this.STAGE_SPRITE_INFO),
VIDEO_STATE: this._buildMenu(this.VIDEO_STATE_INFO)
} }
}; };
} }
@ -449,6 +377,34 @@ class Scratch3VideoSensingBlocks {
const state = this._analyzeLocalMotion(util.target); const state = this._analyzeLocalMotion(util.target);
return state.motionAmount > Number(args.REFERENCE); return state.motionAmount > Number(args.REFERENCE);
} }
/**
* A scratch command block handle that configures the video state from
* passed arguments.
* @param {object} args - the block arguments
* @param {VideoState} args.VIDEO_STATE - the video state to set the device to
*/
videoToggle (args) {
const state = args.VIDEO_STATE;
if (state === VideoState.OFF) {
this.runtime.ioDevices.video.disableVideo();
} else {
this.runtime.ioDevices.video.enableVideo();
// Mirror if state is ON. Do not mirror if state is ON_FLIPPED.
this.runtime.ioDevices.video.mirror = state === VideoState.ON;
}
}
/**
* A scratch command block handle that configures the video preview's
* transparency from passed arguments.
* @param {object} args - the block arguments
* @param {number} args.TRANSPARENCY - the transparency to set the video
* preview to
*/
setVideoTransparency (args) {
this.runtime.ioDevices.video.setPreviewGhost(Number(args.TRANSPARENCY));
}
} }
module.exports = Scratch3VideoSensingBlocks; module.exports = Scratch3VideoSensingBlocks;

380
src/io/video.js Normal file
View file

@ -0,0 +1,380 @@
const log = require('../util/log');
class Video {
constructor (runtime) {
/**
* Reference to the owning Runtime.
* @type{!Runtime}
*/
this.runtime = runtime;
/**
* Default value for mirrored frames.
* @type boolean
*/
this.mirror = true;
/**
* Cache frames for this many ms.
* @type number
*/
this._frameCacheTimeout = 16;
/**
* DOM Video element
* @private
*/
this._video = null;
/**
* Usermedia stream track
* @private
*/
this._track = null;
/**
* Stores some canvas/frame data per resolution/mirror states
*/
this._workspace = [];
/**
* Id representing a Scratch Renderer skin the video is rendered to for
* previewing.
* @type {number}
*/
this._skinId = -1;
/**
* The Scratch Renderer Skin object.
* @type {Skin}
*/
this._skin = null;
/**
* Id for a drawable using the video's skin that will render as a video
* preview.
* @type {Drawable}
*/
this._drawable = -1;
/**
* Store the last state of the video transparency ghost effect
* @type {number}
*/
this._ghost = 0;
}
static get FORMAT_IMAGE_DATA () {
return 'image-data';
}
static get FORMAT_CANVAS () {
return 'canvas';
}
/**
* Dimensions the video stream is analyzed at after its rendered to the
* sample canvas.
* @type {Array.<number>}
*/
static get DIMENSIONS () {
return [480, 360];
}
/**
* Order preview drawable is inserted at in the renderer.
* @type {number}
*/
static get ORDER () {
return 1;
}
/**
* Request video be enabled. Sets up video, creates video skin and enables preview.
*
* ioDevices.video.requestVideo()
*
* @return {Promise.<Video>} resolves a promise to this IO device when video is ready.
*/
enableVideo () {
this.enabled = true;
return this._setupVideo();
}
/**
* Disable video stream (turn video off)
*/
disableVideo () {
this.enabled = false;
// If we have begun a setup process, call _teardown after it completes
if (this._singleSetup) {
this._singleSetup
.then(this._teardown.bind(this))
.catch(err => this.onError(err));
}
}
/**
* async part of disableVideo
* @private
*/
_teardown () {
// we might be asked to re-enable before _teardown is called, just ignore it.
if (this.enabled === false) {
this._disablePreview();
this._singleSetup = null;
// by clearing refs to video and track, we should lose our hold over the camera
this._video = null;
if (this._track) {
this._track.stop();
}
this._track = null;
}
}
/**
* Return frame data from the video feed in a specified dimensions, format, and mirroring.
*
* @param {object} frameInfo A descriptor of the frame you would like to receive.
* @param {Array.<number>} frameInfo.dimensions [width, height] array of numbers. Defaults to [480,360]
* @param {boolean} frameInfo.mirror If you specificly want a mirror/non-mirror frame, defaults to the global
* mirror state (ioDevices.video.mirror)
* @param {string} frameInfo.format Requested video format, available formats are 'image-data' and 'canvas'.
* @param {number} frameInfo.cacheTimeout Will reuse previous image data if the time since capture is less than
* the cacheTimeout. Defaults to 16ms.
*
* @return {ArrayBuffer|Canvas|string|null} Frame data in requested format, null when errors.
*/
getFrame ({
dimensions = Video.DIMENSIONS,
mirror = this.mirror,
format = Video.FORMAT_IMAGE_DATA,
cacheTimeout = this._frameCacheTimeout
}) {
if (!this.videoReady) {
return null;
}
const [width, height] = dimensions;
const workspace = this._getWorkspace({dimensions, mirror: Boolean(mirror)});
const {videoWidth, videoHeight} = this._video;
const {canvas, context, lastUpdate, cacheData} = workspace;
const now = Date.now();
// if the canvas hasn't been updated...
if (lastUpdate + cacheTimeout < now) {
if (mirror) {
context.scale(-1, 1);
context.translate(width * -1, 0);
}
context.drawImage(this._video,
// source x, y, width, height
0, 0, videoWidth, videoHeight,
// dest x, y, width, height
0, 0, width, height
);
context.resetTransform();
workspace.lastUpdate = now;
}
// each data type has it's own data cache, but the canvas is the same
if (!cacheData[format]) {
cacheData[format] = {lastUpdate: 0};
}
const formatCache = cacheData[format];
if (formatCache.lastUpdate + cacheTimeout < now) {
if (format === Video.FORMAT_IMAGE_DATA) {
formatCache.lastData = context.getImageData(0, 0, width, height);
} else if (format === Video.FORMAT_CANVAS) {
// this will never change
formatCache.lastUpdate = Infinity;
formatCache.lastData = canvas;
} else {
log.error(`video io error - unimplemented format ${format}`);
// cache the null result forever, don't log about it again..
formatCache.lastUpdate = Infinity;
formatCache.lastData = null;
}
// rather than set to now, this data is as stale as it's canvas is
formatCache.lastUpdate = Math.max(workspace.lastUpdate, formatCache.lastUpdate);
}
return formatCache.lastData;
}
/**
* Set the preview ghost effect
* @param {number} ghost from 0 (visible) to 100 (invisible) - ghost effect
*/
setPreviewGhost (ghost) {
this._ghost = ghost;
if (this._drawable) {
this.runtime.renderer.updateDrawableProperties(this._drawable, {ghost});
}
}
/**
* Method called when an error happens. Default implementation is just to log error.
*
* @abstract
* @param {Error} error An error object from getUserMedia or other source of error.
*/
onError (error) {
log.error('Unhandled video io device error', error);
}
/**
* Create a video stream.
* Should probably be moved to -render or somewhere similar later
* @private
* @return {Promise} When video has been received, rejected if video is not received
*/
_setupVideo () {
// We cache the result of this setup so that we can only ever have a single
// video/getUserMedia request happen at a time.
if (this._singleSetup) {
return this._singleSetup;
}
this._singleSetup = new Promise((resolve, reject) => {
navigator.getUserMedia({
audio: false,
video: {
width: {min: 480, ideal: 640},
height: {min: 360, ideal: 480}
}
}, resolve, reject);
})
.then(stream => {
this._video = document.createElement('video');
this._video.src = window.URL.createObjectURL(stream);
// Hint to the stream that it should load. A standard way to do this
// is add the video tag to the DOM. Since this extension wants to
// hide the video tag and instead render a sample of the stream into
// the webgl rendered Scratch canvas, another hint like this one is
// needed.
this._track = stream.getTracks()[0];
this._setupPreview();
return this;
})
.catch(error => {
this._singleSetup = null;
this.onError(error);
});
return this._singleSetup;
}
_disablePreview () {
if (this._skin) {
this._skin.clear();
this.runtime.renderer.updateDrawableProperties(this._drawable, {visible: false});
}
this._renderPreviewFrame = null;
}
_setupPreview () {
const {renderer} = this.runtime;
if (!renderer) return;
if (this._skinId === -1 && this._skin === null && this._drawable === -1) {
this._skinId = renderer.createPenSkin();
this._skin = renderer._allSkins[this._skinId];
this._drawable = renderer.createDrawable();
renderer.setDrawableOrder(
this._drawable,
Video.ORDER
);
renderer.updateDrawableProperties(this._drawable, {
skinId: this._skinId
});
}
// if we haven't already created and started a preview frame render loop, do so
if (!this._renderPreviewFrame) {
renderer.updateDrawableProperties(this._drawable, {
ghost: this._ghost,
visible: true
});
this._renderPreviewFrame = () => {
clearTimeout(this._renderPreviewTimeout);
if (!this._renderPreviewFrame) {
return;
}
this._renderPreviewTimeout = setTimeout(this._renderPreviewFrame, this.runtime.currentStepTime);
const canvas = this.getFrame({format: Video.FORMAT_CANVAS});
if (!canvas) {
this._skin.clear();
return;
}
const xOffset = Video.DIMENSIONS[0] / -2;
const yOffset = Video.DIMENSIONS[1] / 2;
this._skin.drawStamp(canvas, xOffset, yOffset);
this.runtime.requestRedraw();
};
this._renderPreviewFrame();
}
}
get videoReady () {
if (!this.enabled) {
return false;
}
if (!this._video) {
return false;
}
if (!this._track) {
return false;
}
const {videoWidth, videoHeight} = this._video;
if (typeof videoWidth !== 'number' || typeof videoHeight !== 'number') {
return false;
}
if (videoWidth === 0 || videoHeight === 0) {
return false;
}
return true;
}
/**
* get an internal workspace for canvas/context/caches
* this uses some document stuff to create a canvas and what not, probably needs abstraction
* into the renderer layer?
* @private
* @return {object} A workspace for canvas/data storage. Internal format not documented intentionally
*/
_getWorkspace ({dimensions, mirror}) {
let workspace = this._workspace.find(space => (
space.dimensions.join('-') === dimensions.join('-') &&
space.mirror === mirror
));
if (!workspace) {
workspace = {
dimensions,
mirror,
canvas: document.createElement('canvas'),
lastUpdate: 0,
cacheData: {}
};
workspace.canvas.width = dimensions[0];
workspace.canvas.height = dimensions[1];
workspace.context = workspace.canvas.getContext('2d');
this._workspace.push(workspace);
}
return workspace;
}
}
module.exports = Video;

View file

@ -627,6 +627,10 @@ const parseBlock = function (sb2block, addBroadcastMsg, getVariableId, extension
} else if (fieldValue === 'this sprite') { } else if (fieldValue === 'this sprite') {
fieldValue = 2; fieldValue = 2;
} }
} else if (expectedArg.inputOp === 'videoSensing.menu.VIDEO_STATE') {
if (shadowObscured) {
fieldValue = 'on';
}
} else if (shadowObscured) { } else if (shadowObscured) {
// Filled drop-down menu. // Filled drop-down menu.
fieldValue = ''; fieldValue = '';

View file

@ -980,17 +980,17 @@ const specMap = {
// ] // ]
// }, // },
'setVideoState': { 'setVideoState': {
opcode: 'sensing_videotoggle', opcode: 'videoSensing.videoToggle',
argMap: [ argMap: [
{ {
type: 'input', type: 'input',
inputOp: 'sensing_videotogglemenu', inputOp: 'videoSensing.menu.VIDEO_STATE',
inputName: 'VIDEOTOGGLEMENU' inputName: 'VIDEO_STATE'
} }
] ]
}, },
'setVideoTransparency': { 'setVideoTransparency': {
opcode: 'sensing_setvideotransparency', opcode: 'videoSensing.setVideoTransparency',
argMap: [ argMap: [
{ {
type: 'input', type: 'input',