scratch-vm/src/extensions/scratch3_video_sensing/index.js
2024-01-22 18:10:25 +08:00

594 lines
24 KiB
JavaScript

const Runtime = require('../../engine/runtime');
const ArgumentType = require('../../extension-support/argument-type');
const BlockType = require('../../extension-support/block-type');
const Clone = require('../../util/clone');
const Cast = require('../../util/cast');
const formatMessage = require('format-message');
const Video = require('../../io/video');
const VideoMotion = require('./library');
/**
* Icon svg to be displayed in the blocks category menu, encoded as a data URI.
* @type {string}
*/
// eslint-disable-next-line max-len
const menuIconURI = 'data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB3aWR0aD0iMjBweCIgaGVpZ2h0PSIyMHB4IiB2aWV3Qm94PSIwIDAgMjAgMjAiIHZlcnNpb249IjEuMSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB4bWxuczp4bGluaz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94bGluayI+CiAgICA8IS0tIEdlbmVyYXRvcjogU2tldGNoIDUyLjIgKDY3MTQ1KSAtIGh0dHA6Ly93d3cuYm9oZW1pYW5jb2RpbmcuY29tL3NrZXRjaCAtLT4KICAgIDx0aXRsZT5FeHRlbnNpb25zL1NvZnR3YXJlL1ZpZGVvLVNlbnNpbmctTWVudTwvdGl0bGU+CiAgICA8ZGVzYz5DcmVhdGVkIHdpdGggU2tldGNoLjwvZGVzYz4KICAgIDxnIGlkPSJFeHRlbnNpb25zL1NvZnR3YXJlL1ZpZGVvLVNlbnNpbmctTWVudSIgc3Ryb2tlPSJub25lIiBzdHJva2Utd2lkdGg9IjEiIGZpbGw9Im5vbmUiIGZpbGwtcnVsZT0iZXZlbm9kZCI+CiAgICAgICAgPGcgaWQ9InZpZGVvLW1vdGlvbiIgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoMC4wMDAwMDAsIDUuMDAwMDAwKSIgZmlsbC1ydWxlPSJub256ZXJvIj4KICAgICAgICAgICAgPGNpcmNsZSBpZD0iT3ZhbC1Db3B5IiBmaWxsPSIjMEVCRDhDIiBvcGFjaXR5PSIwLjI1IiBjeD0iMTYiIGN5PSI4IiByPSIyIj48L2NpcmNsZT4KICAgICAgICAgICAgPGNpcmNsZSBpZD0iT3ZhbC1Db3B5IiBmaWxsPSIjMEVCRDhDIiBvcGFjaXR5PSIwLjUiIGN4PSIxNiIgY3k9IjYiIHI9IjIiPjwvY2lyY2xlPgogICAgICAgICAgICA8Y2lyY2xlIGlkPSJPdmFsLUNvcHkiIGZpbGw9IiMwRUJEOEMiIG9wYWNpdHk9IjAuNzUiIGN4PSIxNiIgY3k9IjQiIHI9IjIiPjwvY2lyY2xlPgogICAgICAgICAgICA8Y2lyY2xlIGlkPSJPdmFsIiBmaWxsPSIjMEVCRDhDIiBjeD0iMTYiIGN5PSIyIiByPSIyIj48L2NpcmNsZT4KICAgICAgICAgICAgPHBhdGggZD0iTTExLjMzNTk3MzksMi4yMDk3ODgyNSBMOC4yNSw0LjIwOTk1NjQ5IEw4LjI1LDMuMDUgQzguMjUsMi4wNDQ4ODIyNyA3LjQ2ODU5MDMxLDEuMjUgNi41LDEuMjUgTDIuMDUsMS4yNSBDMS4wMzgwNzExOSwxLjI1IDAuMjUsMi4wMzgwNzExOSAwLjI1LDMuMDUgTDAuMjUsNyBDMC4yNSw3Ljk2MzY5OTM3IDEuMDQyMjQ5MTksOC43NTU5NDg1NiAyLjA1LDguOCBMNi41LDguOCBDNy40NTA4MzAwOSw4LjggOC4yNSw3Ljk3MzI3MjUgOC4yNSw3IEw4LjI1LDUuODU4NDUyNDEgTDguNjI4NjIzOTQsNi4wODU2MjY3NyBMMTEuNDI2Nzc2Nyw3Ljc3MzIyMzMgQzExLjQzNjg5NDMsNy43ODMzNDA5MSAxMS40NzU3NjU1LDcuOCAxMS41LDcuOCBDMTEuNjMzNDkzMiw3LjggMTEuNzUsNy42OTEyNjAzNCAxMS43NSw3LjU1IEwxMS43NSwyLjQgQzExLjc1LDIuNDE4MzgyNjkgMTEuNzIxOTAyOSwyLjM1MjgyMjgyIDExLjY4NTYyNjgsMi4yNzg2MjM5NCBDMTEuNjEyOTUyOCwyLjE1NzUwMDY5IDExLjQ3MDc5NjgsMi4xMjkwNjk1IDExLjMzNTk3MzksMi4yMDk3ODgyNSBaIiBpZD0idmlkZW9fMzdfIiBzdHJva2Utb3BhY2l0eT0iMC4xNSIgc3Ryb2tlPSIjMDAwMDAwIiBzdHJva2Utd2lkdGg9IjAuNSIgZmlsbD0iIzRENEQ0RCI+PC9wYXRoPgogICAgICAgIDwvZz4KICAgIDwvZz4KPC9zdmc+';
/**
* Icon svg to be displayed at the left edge of each extension block, encoded as a data URI.
* @type {string}
*/
// eslint-disable-next-line max-len
const blockIconURI = 'data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB3aWR0aD0iNDBweCIgaGVpZ2h0PSI0MHB4IiB2aWV3Qm94PSIwIDAgNDAgNDAiIHZlcnNpb249IjEuMSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB4bWxuczp4bGluaz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94bGluayI+CiAgICA8IS0tIEdlbmVyYXRvcjogU2tldGNoIDUyLjIgKDY3MTQ1KSAtIGh0dHA6Ly93d3cuYm9oZW1pYW5jb2RpbmcuY29tL3NrZXRjaCAtLT4KICAgIDx0aXRsZT5FeHRlbnNpb25zL1NvZnR3YXJlL1ZpZGVvLVNlbnNpbmctQmxvY2s8L3RpdGxlPgogICAgPGRlc2M+Q3JlYXRlZCB3aXRoIFNrZXRjaC48L2Rlc2M+CiAgICA8ZyBpZD0iRXh0ZW5zaW9ucy9Tb2Z0d2FyZS9WaWRlby1TZW5zaW5nLUJsb2NrIiBzdHJva2U9Im5vbmUiIHN0cm9rZS13aWR0aD0iMSIgZmlsbD0ibm9uZSIgZmlsbC1ydWxlPSJldmVub2RkIiBzdHJva2Utb3BhY2l0eT0iMC4xNSI+CiAgICAgICAgPGcgaWQ9InZpZGVvLW1vdGlvbiIgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoMC4wMDAwMDAsIDEwLjAwMDAwMCkiIGZpbGwtcnVsZT0ibm9uemVybyIgc3Ryb2tlPSIjMDAwMDAwIj4KICAgICAgICAgICAgPGNpcmNsZSBpZD0iT3ZhbC1Db3B5IiBmaWxsPSIjRkZGRkZGIiBvcGFjaXR5PSIwLjI1IiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS1saW5lam9pbj0icm91bmQiIGN4PSIzMiIgY3k9IjE2IiByPSI0LjUiPjwvY2lyY2xlPgogICAgICAgICAgICA8Y2lyY2xlIGlkPSJPdmFsLUNvcHkiIGZpbGw9IiNGRkZGRkYiIG9wYWNpdHk9IjAuNSIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2UtbGluZWpvaW49InJvdW5kIiBjeD0iMzIiIGN5PSIxMiIgcj0iNC41Ij48L2NpcmNsZT4KICAgICAgICAgICAgPGNpcmNsZSBpZD0iT3ZhbC1Db3B5IiBmaWxsPSIjRkZGRkZGIiBvcGFjaXR5PSIwLjc1IiBzdHJva2UtbGluZWNhcD0icm91bmQiIHN0cm9rZS1saW5lam9pbj0icm91bmQiIGN4PSIzMiIgY3k9IjgiIHI9IjQuNSI+PC9jaXJjbGU+CiAgICAgICAgICAgIDxjaXJjbGUgaWQ9Ik92YWwiIGZpbGw9IiNGRkZGRkYiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCIgc3Ryb2tlLWxpbmVqb2luPSJyb3VuZCIgY3g9IjMyIiBjeT0iNCIgcj0iNC41Ij48L2NpcmNsZT4KICAgICAgICAgICAgPHBhdGggZD0iTTIyLjY3MTk0NzcsNC40MTk1NzY0OSBMMTYuNSw4LjQxOTkxMjk4IEwxNi41LDYuMSBDMTYuNSw0LjA4OTc2NDU0IDE0LjkzNzE4MDYsMi41IDEzLDIuNSBMNC4xLDIuNSBDMi4wNzYxNDIzNywyLjUgMC41LDQuMDc2MTQyMzcgMC41LDYuMSBMMC41LDE0IEMwLjUsMTUuOTI3Mzk4NyAyLjA4NDQ5ODM5LDE3LjUxMTg5NzEgNC4xLDE3LjYgTDEzLDE3LjYgQzE0LjkwMTY2MDIsMTcuNiAxNi41LDE1Ljk0NjU0NSAxNi41LDE0IEwxNi41LDExLjcxNjkwNDggTDIyLjc1NzI0NzksMTUuNDcxMjUzNSBMMjIuODUzNTUzNCwxNS41NDY0NDY2IEMyMi44NzM3ODg2LDE1LjU2NjY4MTggMjIuOTUxNTMxLDE1LjYgMjMsMTUuNiBDMjMuMjY2OTg2NSwxNS42IDIzLjUsMTUuMzgyNTIwNyAyMy41LDE1LjEgTDIzLjUsNC44IEMyMy41LDQuODM2NzY1MzggMjMuNDQzODA1OCw0LjcwNTY0NTYzIDIzLjM3MTI1MzUsNC41NTcyNDc4OCBDMjMuMjI1OTA1Niw0LjMxNTAwMTM5IDIyLjk0MTU5MzcsNC4yNTgxMzg5OSAyMi42NzE5NDc3LDQuNDE5NTc2NDkgWiIgaWQ9InZpZGVvXzM3XyIgZmlsbD0iIzRENEQ0RCI+PC9wYXRoPgogICAgICAgIDwvZz4KICAgIDwvZz4KPC9zdmc+';
/**
* Sensor attribute video sensor block should report.
* @readonly
* @enum {string}
*/
const SensingAttribute = {
/** The amount of motion. */
MOTION: 'motion',
/** The direction of the motion. */
DIRECTION: 'direction'
};
/**
* Subject video sensor block should report for.
* @readonly
* @enum {string}
*/
const SensingSubject = {
/** The sensor traits of the whole stage. */
STAGE: 'Stage',
/** The senosr traits of the area overlapped by this sprite. */
SPRITE: 'this sprite'
};
/**
* States the video sensing activity can be set to.
* @readonly
* @enum {string}
*/
const VideoState = {
/** Video turned off. */
OFF: 'off',
/** Video turned on with default y axis mirroring. */
ON: 'on',
/** Video turned on without default y axis mirroring. */
ON_FLIPPED: 'on-flipped'
};
/**
* Class for the motion-related blocks in Scratch 3.0
* @param {Runtime} runtime - the runtime instantiating this block package.
* @constructor
*/
class Scratch3VideoSensingBlocks {
constructor (runtime) {
/**
* The runtime instantiating this block package.
* @type {Runtime}
*/
this.runtime = runtime;
/**
* The motion detection algoritm used to power the motion amount and
* direction values.
* @type {VideoMotion}
*/
this.detect = new VideoMotion();
/**
* The last millisecond epoch timestamp that the video stream was
* analyzed.
* @type {number}
*/
this._lastUpdate = null;
/**
* A flag to determine if this extension has been installed in a project.
* It is set to false the first time getInfo is run.
* @type {boolean}
*/
this.firstInstall = true;
if (this.runtime.ioDevices) {
// Configure the video device with values from globally stored locations.
this.runtime.on(Runtime.PROJECT_LOADED, this.updateVideoDisplay.bind(this));
// Clear target motion state values when the project starts.
this.runtime.on(Runtime.PROJECT_RUN_START, this.reset.bind(this));
// Kick off looping the analysis logic.
this._loop();
}
}
/**
* After analyzing a frame the amount of milliseconds until another frame
* is analyzed.
* @type {number}
*/
static get INTERVAL () {
return 33;
}
/**
* Dimensions the video stream is analyzed at after its rendered to the
* sample canvas.
* @type {Array.<number>}
*/
static get DIMENSIONS () {
return [480, 360];
}
/**
* The key to load & store a target's motion-related state.
* @type {string}
*/
static get STATE_KEY () {
return 'Scratch.videoSensing';
}
/**
* The default motion-related state, to be used when a target has no existing motion state.
* @type {MotionState}
*/
static get DEFAULT_MOTION_STATE () {
return {
motionFrameNumber: 0,
motionAmount: 0,
motionDirection: 0
};
}
/**
* The transparency setting of the video preview stored in a value
* accessible by any object connected to the virtual machine.
* @type {number}
*/
get globalVideoTransparency () {
const stage = this.runtime.getTargetForStage();
if (stage) {
return stage.videoTransparency;
}
return 50;
}
set globalVideoTransparency (transparency) {
const stage = this.runtime.getTargetForStage();
if (stage) {
stage.videoTransparency = transparency;
}
}
/**
* The video state of the video preview stored in a value accessible by any
* object connected to the virtual machine.
* @type {number}
*/
get globalVideoState () {
const stage = this.runtime.getTargetForStage();
if (stage) {
return stage.videoState;
}
// Though the default value for the stage is normally 'on', we need to default
// to 'off' here to prevent the video device from briefly activating
// while waiting for stage targets to be installed that say it should be off
return VideoState.OFF;
}
set globalVideoState (state) {
const stage = this.runtime.getTargetForStage();
if (stage) {
stage.videoState = state;
}
}
/**
* Get the latest values for video transparency and state,
* and set the video device to use them.
*/
updateVideoDisplay () {
this.setVideoTransparency({
TRANSPARENCY: this.globalVideoTransparency
});
this.videoToggle({
VIDEO_STATE: this.globalVideoState
});
}
/**
* Reset the extension's data motion detection data. This will clear out
* for example old frames, so the first analyzed frame will not be compared
* against a frame from before reset was called.
*/
reset () {
this.detect.reset();
const targets = this.runtime.targets;
for (let i = 0; i < targets.length; i++) {
const state = targets[i].getCustomState(Scratch3VideoSensingBlocks.STATE_KEY);
if (state) {
state.motionAmount = 0;
state.motionDirection = 0;
}
}
}
/**
* Occasionally step a loop to sample the video, stamp it to the preview
* skin, and add a TypedArray copy of the canvas's pixel data.
* @private
*/
_loop () {
const loopTime = Math.max(this.runtime.currentStepTime, Scratch3VideoSensingBlocks.INTERVAL);
this._loopInterval = setTimeout(this._loop.bind(this), loopTime);
// Add frame to detector
const time = Date.now();
if (this._lastUpdate === null) {
this._lastUpdate = time;
}
const offset = time - this._lastUpdate;
if (offset > Scratch3VideoSensingBlocks.INTERVAL) {
const frame = this.runtime.ioDevices.video.getFrame({
format: Video.FORMAT_IMAGE_DATA,
dimensions: Scratch3VideoSensingBlocks.DIMENSIONS
});
if (frame) {
this._lastUpdate = time;
this.detect.addFrame(frame.data);
}
}
}
/**
* Stop the video sampling loop. Only used for testing.
*/
_stopLoop () {
clearTimeout(this._loopInterval);
}
/**
* Create data for a menu in scratch-blocks format, consisting of an array
* of objects with text and value properties. The text is a translated
* string, and the value is one-indexed.
* @param {object[]} info - An array of info objects each having a name
* property.
* @return {array} - An array of objects with text and value properties.
* @private
*/
_buildMenu (info) {
return info.map((entry, index) => {
const obj = {};
obj.text = entry.name;
obj.value = entry.value || String(index + 1);
return obj;
});
}
/**
* @param {Target} target - collect motion state for this target.
* @returns {MotionState} the mutable motion state associated with that
* target. This will be created if necessary.
* @private
*/
_getMotionState (target) {
let motionState = target.getCustomState(Scratch3VideoSensingBlocks.STATE_KEY);
if (!motionState) {
motionState = Clone.simple(Scratch3VideoSensingBlocks.DEFAULT_MOTION_STATE);
target.setCustomState(Scratch3VideoSensingBlocks.STATE_KEY, motionState);
}
return motionState;
}
static get SensingAttribute () {
return SensingAttribute;
}
/**
* An array of choices of whether a reporter should return the frame's
* motion amount or direction.
* @type {object[]}
* @param {string} name - the translatable name to display in sensor
* attribute menu
* @param {string} value - the serializable value of the attribute
*/
get ATTRIBUTE_INFO () {
return [
{
name: formatMessage({
id: 'videoSensing.motion',
default: 'motion',
description: 'Attribute for the "video [ATTRIBUTE] on [SUBJECT]" block'
}),
value: SensingAttribute.MOTION
},
{
name: formatMessage({
id: 'videoSensing.direction',
default: 'direction',
description: 'Attribute for the "video [ATTRIBUTE] on [SUBJECT]" block'
}),
value: SensingAttribute.DIRECTION
}
];
}
static get SensingSubject () {
return SensingSubject;
}
/**
* An array of info about the subject choices.
* @type {object[]}
* @param {string} name - the translatable name to display in the subject menu
* @param {string} value - the serializable value of the subject
*/
get SUBJECT_INFO () {
return [
{
name: formatMessage({
id: 'videoSensing.sprite',
default: 'sprite',
description: 'Subject for the "video [ATTRIBUTE] on [SUBJECT]" block'
}),
value: SensingSubject.SPRITE
},
{
name: formatMessage({
id: 'videoSensing.stage',
default: 'stage',
description: 'Subject for the "video [ATTRIBUTE] on [SUBJECT]" block'
}),
value: SensingSubject.STAGE
}
];
}
/**
* States the video sensing activity can be set to.
* @readonly
* @enum {string}
*/
static get VideoState () {
return VideoState;
}
/**
* An array of info on video state options for the "turn video [STATE]" block.
* @type {object[]}
* @param {string} name - the translatable name to display in the video state menu
* @param {string} value - the serializable value stored in the block
*/
get VIDEO_STATE_INFO () {
return [
{
name: formatMessage({
id: 'videoSensing.off',
default: 'off',
description: 'Option for the "turn video [STATE]" block'
}),
value: VideoState.OFF
},
{
name: formatMessage({
id: 'videoSensing.on',
default: 'on',
description: 'Option for the "turn video [STATE]" block'
}),
value: VideoState.ON
},
{
name: formatMessage({
id: 'videoSensing.onFlipped',
default: 'on flipped',
description: 'Option for the "turn video [STATE]" block that causes the video to be flipped' +
' horizontally (reversed as in a mirror)'
}),
value: VideoState.ON_FLIPPED
}
];
}
/**
* @returns {object} metadata for this extension and its blocks.
*/
getInfo () {
// Set the video display properties to defaults the first time
// getInfo is run. This turns on the video device when it is
// first added to a project, and is overwritten by a PROJECT_LOADED
// event listener that later calls updateVideoDisplay
if (this.firstInstall) {
this.globalVideoState = VideoState.ON;
this.globalVideoTransparency = 50;
this.updateVideoDisplay();
this.firstInstall = false;
}
// Return extension definition
return {
id: 'videoSensing',
name: formatMessage({
id: 'videoSensing.categoryName',
default: 'Video Sensing',
description: 'Label for the video sensing extension category'
}),
blockIconURI: blockIconURI,
menuIconURI: menuIconURI,
blocks: [
{
// @todo this hat needs to be set itself to restart existing
// threads like Scratch 2's behaviour.
opcode: 'whenMotionGreaterThan',
text: formatMessage({
id: 'videoSensing.whenMotionGreaterThan',
default: 'when video motion > [REFERENCE]',
description: 'Event that triggers when the amount of motion is greater than [REFERENCE]'
}),
blockType: BlockType.HAT,
arguments: {
REFERENCE: {
type: ArgumentType.NUMBER,
defaultValue: 10
}
}
},
{
opcode: 'videoOn',
blockType: BlockType.REPORTER,
text: formatMessage({
id: 'videoSensing.videoOn',
default: 'video [ATTRIBUTE] on [SUBJECT]',
description: 'Reporter that returns the amount of [ATTRIBUTE] for the selected [SUBJECT]'
}),
arguments: {
ATTRIBUTE: {
type: ArgumentType.NUMBER,
menu: 'ATTRIBUTE',
defaultValue: SensingAttribute.MOTION
},
SUBJECT: {
type: ArgumentType.NUMBER,
menu: 'SUBJECT',
defaultValue: SensingSubject.SPRITE
}
}
},
{
opcode: 'videoToggle',
text: formatMessage({
id: 'videoSensing.videoToggle',
default: 'turn video [VIDEO_STATE]',
description: 'Controls display of the video preview layer'
}),
arguments: {
VIDEO_STATE: {
type: ArgumentType.NUMBER,
menu: 'VIDEO_STATE',
defaultValue: VideoState.ON
}
}
},
{
opcode: 'setVideoTransparency',
text: formatMessage({
id: 'videoSensing.setVideoTransparency',
default: 'set video transparency to [TRANSPARENCY]',
description: 'Controls transparency of the video preview layer'
}),
arguments: {
TRANSPARENCY: {
type: ArgumentType.NUMBER,
defaultValue: 50
}
}
}
],
menus: {
ATTRIBUTE: {
acceptReporters: true,
items: this._buildMenu(this.ATTRIBUTE_INFO)
},
SUBJECT: {
acceptReporters: true,
items: this._buildMenu(this.SUBJECT_INFO)
},
VIDEO_STATE: {
acceptReporters: true,
items: this._buildMenu(this.VIDEO_STATE_INFO)
}
}
};
}
/**
* Analyze a part of the frame that a target overlaps.
* @param {Target} target - a target to determine where to analyze
* @returns {MotionState} the motion state for the given target
*/
_analyzeLocalMotion (target) {
const drawable = this.runtime.renderer._allDrawables[target.drawableID];
const state = this._getMotionState(target);
this.detect.getLocalMotion(drawable, state);
return state;
}
/**
* A scratch reporter block handle that analyzes the last two frames and
* depending on the arguments, returns the motion or direction for the
* whole stage or just the target sprite.
* @param {object} args - the block arguments
* @param {BlockUtility} util - the block utility
* @returns {number} the motion amount or direction of the stage or sprite
*/
videoOn (args, util) {
this.detect.analyzeFrame();
let state = this.detect;
if (args.SUBJECT === SensingSubject.SPRITE) {
state = this._analyzeLocalMotion(util.target);
}
if (args.ATTRIBUTE === SensingAttribute.MOTION) {
return state.motionAmount;
}
return state.motionDirection;
}
/**
* A scratch hat block edge handle that analyzes the last two frames where
* the target sprite overlaps and if it has more motion than the given
* reference value.
* @param {object} args - the block arguments
* @param {BlockUtility} util - the block utility
* @returns {boolean} true if the sprite overlaps more motion than the
* reference
*/
whenMotionGreaterThan (args, util) {
this.detect.analyzeFrame();
const state = this._analyzeLocalMotion(util.target);
return state.motionAmount > Number(args.REFERENCE);
}
/**
* A scratch command block handle that configures the video state from
* passed arguments.
* @param {object} args - the block arguments
* @param {VideoState} args.VIDEO_STATE - the video state to set the device to
*/
videoToggle (args) {
const state = Object.values(VideoState).includes(args.VIDEO_STATE) ? args.VIDEO_STATE : VideoState.OFF;
this.globalVideoState = state;
if (state === VideoState.OFF) {
this.runtime.ioDevices.video.disableVideo();
} else {
this.runtime.ioDevices.video.enableVideo();
// Mirror if state is ON. Do not mirror if state is ON_FLIPPED.
this.runtime.ioDevices.video.mirror = state === VideoState.ON;
}
}
/**
* A scratch command block handle that configures the video preview's
* transparency from passed arguments.
* @param {object} args - the block arguments
* @param {number} args.TRANSPARENCY - the transparency to set the video
* preview to
*/
setVideoTransparency (args) {
const transparency = Cast.toNumber(args.TRANSPARENCY);
this.globalVideoTransparency = transparency;
this.runtime.ioDevices.video.setPreviewGhost(transparency);
}
}
module.exports = Scratch3VideoSensingBlocks;