mirror of
https://github.com/scratchfoundation/scratchjr.git
synced 2024-11-28 18:15:37 -05:00
Merge pull request #121 from chrisgarrity/bug/iOS-sound
Rewrite iOS sound management to use native audio instead of webaudio. Quirks are consistent with Android which was already doing native audio.
This commit is contained in:
commit
d4a6b2885d
10 changed files with 263 additions and 278 deletions
1
editions/free/src/app.bundle.js.map
Symbolic link
1
editions/free/src/app.bundle.js.map
Symbolic link
|
@ -0,0 +1 @@
|
|||
../../../src/build/bundles/app.bundle.js.map
|
|
@ -3,6 +3,8 @@
|
|||
ViewController* HTML;
|
||||
MFMailComposeViewController *emailDialog;
|
||||
NSMutableDictionary *mediastrings;
|
||||
NSMutableDictionary *sounds;
|
||||
NSMutableDictionary *soundtimers;
|
||||
|
||||
// new primtives
|
||||
|
||||
|
@ -12,6 +14,8 @@ NSMutableDictionary *mediastrings;
|
|||
// new primtives
|
||||
+ (void)init:(ViewController*)vc {
|
||||
mediastrings = [[NSMutableDictionary alloc] init];
|
||||
sounds = [[NSMutableDictionary alloc] init];
|
||||
soundtimers = [[NSMutableDictionary alloc] init];
|
||||
HTML =vc;
|
||||
}
|
||||
|
||||
|
@ -225,6 +229,73 @@ NSMutableDictionary *mediastrings;
|
|||
return @"1";
|
||||
}
|
||||
|
||||
////////////////////////////
|
||||
// Sound System
|
||||
////////////////////////////
|
||||
|
||||
+ (NSString *)registerSound:(NSString*)dir :(NSString*)name {
|
||||
NSURL *url;
|
||||
if ([dir isEqual:@"Documents"]){
|
||||
url = [self getDocumentPath: name];
|
||||
}
|
||||
else {
|
||||
url = [self getResourcePath: [NSString stringWithFormat: @"%@%@", dir, name]];
|
||||
}
|
||||
|
||||
NSError *error;
|
||||
AVAudioPlayer *snd = [[AVAudioPlayer alloc] initWithContentsOfURL: url error:&error];
|
||||
|
||||
if (error == nil) {
|
||||
[sounds setObject:snd forKey:name];
|
||||
[snd prepareToPlay];
|
||||
return [NSString stringWithFormat: @"%@,%f", name, snd.duration];
|
||||
}
|
||||
return @"error";
|
||||
}
|
||||
|
||||
+ (NSString *)playSound :(NSString*)name {
|
||||
// TODO: make scratchJr pay attention to the mute
|
||||
// // audio type: respect the "Mute" if there are audio sounds
|
||||
// // ignore the Mute if it is from recording / playback and Runtime.
|
||||
// NSString *audiotype = ([dir isEqual: @"Documents"] || [name isEqual:@"pop.mp3"]) ? AVAudio\
|
||||
// SessionCategoryPlayAndRecord : AVAudioSessionCategoryAmbient;
|
||||
// [[AVAudioSession sharedInstance] setCategory:audiotype error:nil];
|
||||
AVAudioPlayer *snd = sounds[name];
|
||||
if (snd == nil) {
|
||||
return [NSString stringWithFormat:@"%@ not found", name];
|
||||
}
|
||||
NSTimer *sndTimer = soundtimers[name];
|
||||
if (sndTimer.valid) {
|
||||
// this sound is already playing, invalidate so that new timer will overrule
|
||||
[sndTimer invalidate];
|
||||
}
|
||||
[snd setCurrentTime:0];
|
||||
[snd play];
|
||||
[soundtimers setObject:[NSTimer scheduledTimerWithTimeInterval:[snd duration]
|
||||
target:self
|
||||
selector:@selector(soundEnded:)
|
||||
userInfo:@{@"soundName":name}
|
||||
repeats:NO] forKey:name];
|
||||
return [NSString stringWithFormat:@"%@ played", name];
|
||||
}
|
||||
|
||||
+ (void)soundEnded:(NSTimer*)timer {
|
||||
NSString *soundName = [[timer userInfo] objectForKey:@"soundName"];
|
||||
if (sounds[soundName] == nil) return;
|
||||
NSString *callback = [NSString stringWithFormat:@"iOS.soundDone('%@');", soundName];
|
||||
UIWebView *webview = [ViewController webview];
|
||||
[webview stringByEvaluatingJavaScriptFromString:callback];
|
||||
}
|
||||
|
||||
+ (NSString *)stopSound :(NSString*)name {
|
||||
AVAudioPlayer *snd = sounds[name];
|
||||
if (snd == nil) {
|
||||
return [NSString stringWithFormat:@"%@ not found", name];
|
||||
}
|
||||
[snd stop];
|
||||
return [NSString stringWithFormat:@"%@ stopped", name];
|
||||
}
|
||||
|
||||
////////////////////////////
|
||||
// File system
|
||||
////////////////////////////
|
||||
|
|
|
@ -6,15 +6,15 @@
|
|||
|
||||
@interface Database : NSObject
|
||||
|
||||
+ (NSString*)open:(NSString *)body;
|
||||
+ (NSString*)close:(NSString *)str;
|
||||
+ (NSString *)open:(NSString *)body;
|
||||
+ (NSString *)close:(NSString *)str;
|
||||
+ (void)initTables;
|
||||
+ (void)runMigrations;
|
||||
+ (NSArray*)findDataIn:(NSString *)stmtstr with:(NSArray *)values;
|
||||
+ (NSArray *)findDataIn:(NSString *)stmtstr with:(NSArray *)values;
|
||||
|
||||
// Exports
|
||||
+ (NSString*) stmt: (NSString*) json;
|
||||
+ (NSString*) query: (NSString*) json;
|
||||
+ (NSString *)stmt:(NSString *)json;
|
||||
+ (NSString *)query:(NSString *)json;
|
||||
@end
|
||||
|
||||
@interface CameraMask : UIView
|
||||
|
@ -30,10 +30,10 @@
|
|||
@property float scale;
|
||||
@property NSString *usingCamera;
|
||||
|
||||
-(void) switchOrientation:(int)orientation;
|
||||
-(id)initWithFrame:(CGRect)frame withScale:(float)scale;
|
||||
-(void) setCameraTo:(NSString*)dir;
|
||||
-(NSString*)getImageBase64:(NSData*)imgdata;
|
||||
- (void)switchOrientation:(int)orientation;
|
||||
- (id)initWithFrame:(CGRect)frame withScale:(float)scale;
|
||||
- (void)setCameraTo:(NSString *)dir;
|
||||
- (NSString *)getImageBase64:(NSData *)imgdata;
|
||||
|
||||
@end
|
||||
|
||||
|
@ -51,113 +51,131 @@
|
|||
@property AVCaptureStillImageOutput *stillImageOutput;
|
||||
|
||||
|
||||
- (BOOL) setupSession;
|
||||
+ (NSString*) cameraHasPermission;
|
||||
- (void) closeSession;
|
||||
- (NSUInteger) cameraCount;
|
||||
- (BOOL) setCamera:(NSString *)mode;
|
||||
- (BOOL)setupSession;
|
||||
+ (NSString *)cameraHasPermission;
|
||||
- (void)closeSession;
|
||||
- (NSUInteger)cameraCount;
|
||||
- (BOOL)setCamera:(NSString *)mode;
|
||||
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections;
|
||||
- (void) captureStillImage;
|
||||
- (void) autoFocusAtPoint:(CGPoint)point;
|
||||
- (void) continuousFocusAtPoint:(CGPoint)point;
|
||||
- (void)captureStillImage;
|
||||
- (void)autoFocusAtPoint:(CGPoint)point;
|
||||
- (void)continuousFocusAtPoint:(CGPoint)point;
|
||||
|
||||
@end
|
||||
|
||||
// These delegate methods can be called on any arbitrary thread. If the delegate does something with the UI when called, make sure to send it to the main thread.
|
||||
@protocol ViewFinderDelegate <NSObject>
|
||||
@optional
|
||||
- (void) viewFinderStillImageCaptured:(ViewFinder *)viewFinder;
|
||||
- (void) viewFinderDeviceConfigurationChanged:(ViewFinder *)viewFinder;
|
||||
- (void)viewFinderStillImageCaptured:(ViewFinder *)viewFinder;
|
||||
- (void)viewFinderDeviceConfigurationChanged:(ViewFinder *)viewFinder;
|
||||
- (void)deviceOrientationDidChange;
|
||||
|
||||
@end
|
||||
|
||||
@interface RecordSound : NSObject
|
||||
+ (NSString *)getPermission;
|
||||
+ (void) setPermission;
|
||||
+ (void) killRecording;
|
||||
+ (void)setPermission;
|
||||
+ (void)killRecording;
|
||||
// Exports
|
||||
+ (NSString*) startRecord;
|
||||
+ (NSString*) stopRecording;
|
||||
+ (double) getVolume;
|
||||
+ (NSString*) startPlay;
|
||||
+ (NSString*) stopPlay;
|
||||
+ (NSString*) recordclose:(NSString *)keep;
|
||||
+ (NSString *)startRecord;
|
||||
+ (NSString *)stopRecording;
|
||||
+ (double)getVolume;
|
||||
+ (NSString *)startPlay;
|
||||
+ (NSString *)stopPlay;
|
||||
+ (NSString *)recordclose:(NSString *)keep;
|
||||
@end
|
||||
|
||||
@protocol JSExports <JSExport>
|
||||
/* Functions exported to JavaScript */
|
||||
-(NSString*) hideSplash :(NSString *)body;
|
||||
-(void) askForPermission;
|
||||
-(NSString*) database_stmt: (NSString*) json;
|
||||
-(NSString*) database_query: (NSString*) json;
|
||||
-(NSString*) io_getmd5: (NSString*) str;
|
||||
-(NSString*) io_getsettings;
|
||||
-(void) io_cleanassets:(NSString*) fileType;
|
||||
-(NSString*) io_setfile:(NSString*)filename :(NSString*)base64ContentStr;
|
||||
-(NSString*) io_getfile:(NSString*)filename;
|
||||
-(NSString*) io_setmedia:(NSString*) base64ContentStr :(NSString*) extension;
|
||||
-(NSString*) io_setmedianame:(NSString*) contents :(NSString*) key :(NSString*) ext;
|
||||
-(NSString*) io_getmedia:(NSString*) filename;
|
||||
-(NSString*) io_getmediadata:(NSString*)filename :(int) offset :(int) length;
|
||||
-(NSString*) io_getmedialen:(NSString*)file :(NSString*)key;
|
||||
-(NSString*) io_getmediadone:(NSString*)filename;
|
||||
-(NSString*) io_remove:(NSString*)filename;
|
||||
-(NSString*) recordsound_recordstart;
|
||||
-(NSString*) recordsound_recordstop;
|
||||
-(NSString*) recordsound_volume;
|
||||
-(NSString*) recordsound_startplay;
|
||||
-(NSString*) recordsound_stopplay;
|
||||
-(NSString*) recordsound_recordclose:(NSString*) keep;
|
||||
- (NSString *)hideSplash:(NSString *)body;
|
||||
- (void) askForPermission;
|
||||
- (NSString *)database_stmt:(NSString *) json;
|
||||
- (NSString *)database_query:(NSString *) json;
|
||||
- (NSString *)io_getmd5:(NSString *) str;
|
||||
- (NSString *)io_getsettings;
|
||||
- (void)io_cleanassets:(NSString *)fileType;
|
||||
- (NSString *)io_setfile:(NSString *)filename :(NSString *)base64ContentStr;
|
||||
- (NSString *)io_getfile:(NSString *)filename;
|
||||
- (NSString *)io_setmedia:(NSString *)base64ContentStr :(NSString *)extension;
|
||||
- (NSString *)io_setmedianame:(NSString *)contents :(NSString *)key :(NSString *)ext;
|
||||
- (NSString *)io_getmedia:(NSString *)filename;
|
||||
- (NSString *)io_getmediadata:(NSString *)filename :(int)offset :(int)length;
|
||||
- (NSString *)io_getmedialen:(NSString *)file :(NSString *)key;
|
||||
- (NSString *)io_getmediadone:(NSString *)filename;
|
||||
- (NSString *)io_remove:(NSString *)filename;
|
||||
- (NSString *)io_registersound:(NSString *)dir :(NSString *)name;
|
||||
- (NSString *)io_playsound:(NSString *)name;
|
||||
- (NSString *)io_stopsound:(NSString *)name;
|
||||
|
||||
-(NSString*) scratchjr_cameracheck;
|
||||
-(bool) scratchjr_has_multiple_cameras;
|
||||
-(NSString*) scratchjr_startfeed:(NSString*)str;
|
||||
-(NSString*) scratchjr_stopfeed;
|
||||
-(NSString*) scratchjr_choosecamera:(NSString *)body;
|
||||
-(NSString*) scratchjr_captureimage:(NSString*)onCameraCaptureComplete;
|
||||
- (NSString*) sendSjrUsingShareDialog:(NSString*) fileName :(NSString*) emailSubject :(NSString*) emailBody :(int) shareType :(NSString*) b64data;
|
||||
-(NSString*) deviceName;
|
||||
-(NSString*) analyticsEvent:(NSString*) category :(NSString*) action :(NSString*) label :(NSNumber*) value;
|
||||
- (NSString *)recordsound_recordstart;
|
||||
- (NSString *)recordsound_recordstop;
|
||||
- (NSString *)recordsound_volume;
|
||||
- (NSString *)recordsound_startplay;
|
||||
- (NSString *)recordsound_stopplay;
|
||||
- (NSString *)recordsound_recordclose:(NSString *)keep;
|
||||
|
||||
- (NSString *)scratchjr_cameracheck;
|
||||
- (bool) scratchjr_has_multiple_cameras;
|
||||
- (NSString *)scratchjr_startfeed:(NSString *)str;
|
||||
- (NSString *)scratchjr_stopfeed;
|
||||
- (NSString *)scratchjr_choosecamera:(NSString *)body;
|
||||
- (NSString *)scratchjr_captureimage:(NSString *)onCameraCaptureComplete;
|
||||
- (NSString *)sendSjrUsingShareDialog:(NSString *)fileName
|
||||
:(NSString *)emailSubject
|
||||
:(NSString *)emailBody
|
||||
:(int)shareType
|
||||
:(NSString *)b64data;
|
||||
- (NSString *) deviceName;
|
||||
- (NSString *) analyticsEvent:(NSString *)category :(NSString *)action :(NSString *)label :(NSNumber*)value;
|
||||
@end
|
||||
|
||||
@interface ViewController : UIViewController <JSExports,UIWebViewDelegate,MFMailComposeViewControllerDelegate>
|
||||
@property (nonatomic, readwrite, strong) JSContext *js;
|
||||
+ (UIWebView*) webview;
|
||||
+ (UIImageView*) splashScreen;
|
||||
- (void) receiveProject:(NSString*) project;
|
||||
+ (UIWebView *)webview;
|
||||
+ (UIImageView *)splashScreen;
|
||||
- (void)receiveProject:(NSString *)project;
|
||||
- (void)registerDefaultsFromSettingsBundle;
|
||||
- (void)reload;
|
||||
@end
|
||||
|
||||
@interface ViewController (ViewFinderDelegate) <ViewFinderDelegate>
|
||||
- (void) showShareEmail:(NSURL *) projectURL withName: (NSString*) name withSubject:(NSString*) subject withBody:(NSString*)body;
|
||||
- (void) showShareAirdrop:(NSURL *) projectURL;
|
||||
- (void)showShareEmail:(NSURL *)projectURL
|
||||
withName:(NSString *)name
|
||||
withSubject:(NSString *)subject
|
||||
withBody:(NSString *)body;
|
||||
- (void)showShareAirdrop:(NSURL *)projectURL;
|
||||
@end
|
||||
|
||||
|
||||
@interface IO : NSObject
|
||||
|
||||
+ (void)init:(ViewController*)vc;
|
||||
+ (NSString*)getpath;
|
||||
+ (NSString*)removeFile:(NSString *)str;
|
||||
+ (NSURL*)getDocumentPath:(NSString *)name;
|
||||
+ (NSString*) encodeBase64:(NSData*)theData;
|
||||
+ (NSString *)getpath;
|
||||
+ (NSString *)removeFile:(NSString *)str;
|
||||
+ (NSURL *)getDocumentPath:(NSString *)name;
|
||||
+ (NSString *)encodeBase64:(NSData *)theData;
|
||||
|
||||
// Exports
|
||||
+ (NSString*)getMD5:(NSString*)str;
|
||||
+ (NSString*) getsettings;
|
||||
+ (void) cleanassets:(NSString*)fileType;
|
||||
+ (NSString*) setfile:(NSString*)filename :(NSString*)base64ContentStr;
|
||||
+ (NSString*)getfile:(NSString *)filename;
|
||||
+ (NSString*) setmedia:(NSString*) base64ContentStr :(NSString*) extension;
|
||||
+ (NSString*) setmedianame:(NSString*) contents :(NSString*) key :(NSString*) ext;
|
||||
+ (NSString*) getmedia:(NSString*) filename;
|
||||
+ (NSString*) getmediadata:(NSString*)filename :(int) offset :(int) length;
|
||||
+ (NSString*) getmedialen:(NSString*)file :(NSString*)key;
|
||||
+ (NSString*) getmediadone:(NSString*)filename;
|
||||
+ (NSString*) remove:(NSString*)filename;
|
||||
+ (NSString*) sendSjrUsingShareDialog:(NSString*) fileName :(NSString*) emailSubject :(NSString*) emailBody :(int) shareType :(NSString*) b64data;
|
||||
+ (NSString *)getMD5:(NSString *)str;
|
||||
+ (NSString *)getsettings;
|
||||
+ (void) cleanassets:(NSString *)fileType;
|
||||
+ (NSString *)setfile:(NSString *)filename :(NSString *)base64ContentStr;
|
||||
+ (NSString *)getfile:(NSString *)filename;
|
||||
+ (NSString *)setmedia:(NSString *)base64ContentStr :(NSString *)extension;
|
||||
+ (NSString *)setmedianame:(NSString *)contents :(NSString *)key :(NSString *)ext;
|
||||
+ (NSString *)getmedia:(NSString *)filename;
|
||||
+ (NSString *)getmediadata:(NSString *)filename :(int)offset :(int)length;
|
||||
+ (NSString *)getmedialen:(NSString *)file :(NSString *)key;
|
||||
+ (NSString *)getmediadone:(NSString *)filename;
|
||||
+ (NSString *)remove:(NSString *)filename;
|
||||
+ (NSString *)sendSjrUsingShareDialog:(NSString *)fileName
|
||||
:(NSString *)emailSubject
|
||||
:(NSString *)emailBody
|
||||
:(int)shareType
|
||||
:(NSString *)b64data;
|
||||
+ (NSString *)registerSound:(NSString *)dir :(NSString *)name;
|
||||
+ (NSString *)playSound:(NSString *)name;
|
||||
+ (NSString *)stopSound:(NSString *)name;
|
||||
@end
|
||||
|
||||
@interface ScratchJr : NSObject
|
||||
|
@ -166,13 +184,12 @@
|
|||
+ (void)reportImageError;
|
||||
+ (void)cameraInit;
|
||||
+ (void)cameraClose;
|
||||
+ (NSString *) hideSplash :(NSString *)body;
|
||||
+ (NSString *)hideSplash :(NSString *)body;
|
||||
|
||||
// Exports
|
||||
+(NSString*) cameracheck;
|
||||
+(NSString*) startfeed:(NSString*)str;
|
||||
+(NSString*) stopfeed;
|
||||
+(NSString*) choosecamera:(NSString*) body;
|
||||
+(NSString*) captureimage:(NSString*)onCameraCaptureComplete;
|
||||
+ (NSString *)cameracheck;
|
||||
+ (NSString *)startfeed:(NSString *)str;
|
||||
+ (NSString *)stopfeed;
|
||||
+ (NSString *)choosecamera:(NSString *)body;
|
||||
+ (NSString *)captureimage:(NSString *)onCameraCaptureComplete;
|
||||
@end
|
||||
|
||||
|
|
|
@ -78,7 +78,6 @@ JSContext *js;
|
|||
[defaultsToRegister setObject:[prefSpecification objectForKey:@"DefaultValue"] forKey:key];
|
||||
}
|
||||
}
|
||||
// NSLog(@"defaultsToRegister %@", defaultsToRegister);
|
||||
[[NSUserDefaults standardUserDefaults] registerDefaults:defaultsToRegister];
|
||||
}
|
||||
|
||||
|
@ -256,6 +255,18 @@ JSContext *js;
|
|||
return [IO remove:filename];
|
||||
}
|
||||
|
||||
-(NSString*) io_registersound:(NSString*)dir :(NSString*)name {
|
||||
return [IO registerSound:dir:name];
|
||||
}
|
||||
|
||||
-(NSString*) io_playsound:(NSString*) name {
|
||||
return [IO playSound:name];
|
||||
}
|
||||
|
||||
-(NSString*) io_stopsound:(NSString*) name {
|
||||
return [IO stopSound:name];
|
||||
}
|
||||
|
||||
-(NSString*) recordsound_recordstart {
|
||||
return [RecordSound startRecord];
|
||||
}
|
||||
|
|
|
@ -229,7 +229,7 @@ export default class ScratchJr {
|
|||
document.ontouchmove = function (e) {
|
||||
e.preventDefault();
|
||||
};
|
||||
window.ontouchstart = ScratchJr.triggerAudio;
|
||||
window.ontouchstart = ScratchJr.unfocus;
|
||||
if (isTablet) {
|
||||
window.ontouchend = undefined;
|
||||
} else {
|
||||
|
@ -237,20 +237,6 @@ export default class ScratchJr {
|
|||
}
|
||||
}
|
||||
|
||||
static prepareAudio () {
|
||||
if (ScratchAudio.firstTime) {
|
||||
ScratchAudio.firstClick();
|
||||
}
|
||||
if (!ScratchAudio.firstTime) {
|
||||
window.ontouchstart = ScratchJr.unfocus;
|
||||
}
|
||||
}
|
||||
|
||||
static triggerAudio (evt) {
|
||||
ScratchJr.prepareAudio();
|
||||
ScratchJr.unfocus(evt);
|
||||
}
|
||||
|
||||
static unfocus (evt) {
|
||||
if (Palette.helpballoon) {
|
||||
Palette.helpballoon.parentNode.removeChild(Palette.helpballoon);
|
||||
|
@ -456,7 +442,6 @@ export default class ScratchJr {
|
|||
}
|
||||
|
||||
static runStrips (e) {
|
||||
ScratchJr.prepareAudio();
|
||||
ScratchJr.stopStripsFromTop(e);
|
||||
ScratchJr.unfocus(e);
|
||||
ScratchJr.startGreenFlagThreads();
|
||||
|
|
|
@ -289,7 +289,7 @@ export default class Record {
|
|||
}
|
||||
|
||||
static closeContinueSave () {
|
||||
iOS.recorddisappear('YES', Record.getUserSound);
|
||||
iOS.recorddisappear('YES', Record.registerProjectSound);
|
||||
}
|
||||
|
||||
static closeContinueRemove () {
|
||||
|
@ -297,18 +297,8 @@ export default class Record {
|
|||
iOS.recorddisappear('NO', Record.tearDownRecorder);
|
||||
}
|
||||
|
||||
static getUserSound () {
|
||||
isRecording = false;
|
||||
if (!isAndroid) {
|
||||
iOS.getmedia(recordedSound, Record.registerProjectSound);
|
||||
} else {
|
||||
// On Android, just pass URL
|
||||
Record.registerProjectSound(null);
|
||||
}
|
||||
}
|
||||
|
||||
static registerProjectSound (data) {
|
||||
function loadingDone (snd) {
|
||||
static registerProjectSound () {
|
||||
function whenDone (snd) {
|
||||
if (snd != 'error') {
|
||||
var spr = ScratchJr.getSprite();
|
||||
var page = spr.div.parentNode.owner;
|
||||
|
@ -325,10 +315,10 @@ export default class Record {
|
|||
Palette.selectCategory(3);
|
||||
}
|
||||
if (!isAndroid) {
|
||||
ScratchAudio.loadFromData(recordedSound, data, loadingDone);
|
||||
ScratchAudio.loadFromLocal('Documents', recordedSound, whenDone);
|
||||
} else {
|
||||
// On Android, just pass URL
|
||||
ScratchAudio.loadFromLocal(recordedSound, loadingDone);
|
||||
ScratchAudio.loadFromLocal('', recordedSound, whenDone);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -352,7 +342,6 @@ export default class Record {
|
|||
error = false;
|
||||
}
|
||||
// Refresh audio context
|
||||
ScratchAudio.firstTime = true;
|
||||
isRecording = false;
|
||||
recordedSound = null;
|
||||
// Hide the dialog
|
||||
|
|
|
@ -53,7 +53,6 @@ function indexFirstTime () {
|
|||
iOS.hidesplash(doit);
|
||||
}, 500);
|
||||
function doit () {
|
||||
ScratchAudio.sndFX('tap.wav');
|
||||
window.ontouchend = function () {
|
||||
indexLoadOptions();
|
||||
};
|
||||
|
@ -88,13 +87,6 @@ function indexLoadOptions () {
|
|||
}
|
||||
|
||||
function indexGohome () {
|
||||
// On iOS, sounds are loaded async, but the code as written expects to play tap.wav when we enter home.html
|
||||
// (but since it isn't loaded yet, no sound is played).
|
||||
// On Android, sync sounds means both calls to tap.wav result in a sound play.
|
||||
// XXX: we should re-write the lobby loading to wait for the sounds to load, and not play a sound here.
|
||||
if (isiOS) {
|
||||
ScratchAudio.sndFX('tap.wav');
|
||||
}
|
||||
iOS.setfile('homescroll.sjr', 0, function () {
|
||||
doNext();
|
||||
});
|
||||
|
|
|
@ -2,6 +2,7 @@ import {isiOS, gn} from '../utils/lib';
|
|||
import IO from './IO';
|
||||
import Lobby from '../lobby/Lobby';
|
||||
import Alert from '../editor/ui/Alert';
|
||||
import ScratchAudio from '../utils/ScratchAudio';
|
||||
|
||||
//////////////////////////////////////////////////
|
||||
// Tablet interface functions
|
||||
|
@ -180,6 +181,35 @@ export default class iOS {
|
|||
}
|
||||
}
|
||||
|
||||
// Sound functions
|
||||
|
||||
static registerSound (dir, name, fcn) {
|
||||
var result = tabletInterface.io_registersound(dir, name);
|
||||
if (fcn) {
|
||||
fcn(result);
|
||||
}
|
||||
}
|
||||
|
||||
static playSound (name, fcn) {
|
||||
var result = tabletInterface.io_playsound(name);
|
||||
if (fcn) {
|
||||
fcn(result);
|
||||
}
|
||||
}
|
||||
|
||||
static stopSound (name, fcn) {
|
||||
var result = tabletInterface.io_stopsound(name);
|
||||
if (fcn) {
|
||||
fcn(result);
|
||||
}
|
||||
}
|
||||
|
||||
// Web Wiew delegate call backs
|
||||
|
||||
static soundDone (name) {
|
||||
ScratchAudio.soundDone(name);
|
||||
}
|
||||
|
||||
static sndrecord (fcn) {
|
||||
var result = tabletInterface.recordsound_recordstart();
|
||||
if (fcn) {
|
||||
|
|
|
@ -7,50 +7,19 @@ import iOS from '../iPad/iOS';
|
|||
////////////////////////////////////////////////////
|
||||
|
||||
let uiSounds = {};
|
||||
let context;
|
||||
let firstTime = true;
|
||||
let defaultSounds = ['cut.wav', 'snap.wav', 'copy.wav', 'grab.wav', 'boing.wav', 'tap.wav',
|
||||
'keydown.wav', 'entertap.wav', 'exittap.wav', 'splash.wav'];
|
||||
let projectSounds = {};
|
||||
let path = '';
|
||||
|
||||
export default class ScratchAudio {
|
||||
static get uiSounds () {
|
||||
return uiSounds;
|
||||
}
|
||||
|
||||
static get firstTime () {
|
||||
return firstTime;
|
||||
}
|
||||
|
||||
static set firstTime (newFirstTime) {
|
||||
firstTime = newFirstTime;
|
||||
}
|
||||
|
||||
static get projectSounds () {
|
||||
return projectSounds;
|
||||
}
|
||||
|
||||
static get context () {
|
||||
return context;
|
||||
}
|
||||
|
||||
static firstClick () { // trick to abilitate the Audio context in iOS 8+
|
||||
var res = true;
|
||||
if (uiSounds['keydown.wav']) {
|
||||
uiSounds['keydown.wav'].playWithVolume(0);
|
||||
res = false;
|
||||
}
|
||||
firstTime = res;
|
||||
}
|
||||
|
||||
static firstOnTouchEnd () { // trick to abilitate the Audio context in iOS 9
|
||||
if (uiSounds['keydown.wav']) {
|
||||
uiSounds['keydown.wav'].playWithVolume(0);
|
||||
}
|
||||
window.removeEventListener('touchend', ScratchAudio.firstOnTouchEnd, false);
|
||||
}
|
||||
|
||||
static sndFX (name) {
|
||||
ScratchAudio.sndFXWithVolume(name, 1.0);
|
||||
}
|
||||
|
@ -60,8 +29,7 @@ export default class ScratchAudio {
|
|||
if (!uiSounds[name]) {
|
||||
return;
|
||||
}
|
||||
uiSounds[name].playWithVolume(volume);
|
||||
firstTime = false;
|
||||
uiSounds[name].play();
|
||||
} else {
|
||||
AndroidInterface.audio_sndfxwithvolume(name, volume);
|
||||
}
|
||||
|
@ -72,49 +40,31 @@ export default class ScratchAudio {
|
|||
prefix = '';
|
||||
}
|
||||
if (!isAndroid) {
|
||||
context = new webkitAudioContext();
|
||||
} else {
|
||||
context = {
|
||||
decodeAudioData: function () {
|
||||
},
|
||||
play: function () {
|
||||
}
|
||||
};
|
||||
prefix = 'HTML5/';
|
||||
}
|
||||
uiSounds = {};
|
||||
|
||||
for (var i = 0; i < defaultSounds.length; i++) {
|
||||
ScratchAudio.addSound(prefix + 'sounds/', defaultSounds[i], uiSounds);
|
||||
}
|
||||
ScratchAudio.addSound(path, prefix + 'pop.mp3', projectSounds);
|
||||
ScratchAudio.addSound(prefix, 'pop.mp3', projectSounds);
|
||||
}
|
||||
|
||||
static addSound (url, snd, dict, fcn) {
|
||||
var name = snd;
|
||||
if (!isAndroid) {
|
||||
|
||||
var bufferSound = function () {
|
||||
context.decodeAudioData(request.response, onDecode, onDecodeError);
|
||||
};
|
||||
var onDecodeError = function () {
|
||||
var whenDone = function (str) {
|
||||
if (str != 'error') {
|
||||
var result = snd.split (',');
|
||||
dict[snd] = new Sound(result[0], result[1]);
|
||||
} else {
|
||||
name = 'error';
|
||||
}
|
||||
if (fcn) {
|
||||
fcn('error');
|
||||
fcn(name);
|
||||
}
|
||||
};
|
||||
var onDecode = function (buffer) {
|
||||
dict[snd] = new Sound(buffer);
|
||||
if (fcn) {
|
||||
fcn(snd);
|
||||
}
|
||||
};
|
||||
var transferFailed = function (e) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
};
|
||||
var request = new XMLHttpRequest();
|
||||
request.open('GET', url + snd, true);
|
||||
request.responseType = 'arraybuffer';
|
||||
request.addEventListener('load', bufferSound, false);
|
||||
request.addEventListener('error', transferFailed, false);
|
||||
request.send(null);
|
||||
iOS.registerSound(url, snd, whenDone);
|
||||
} else {
|
||||
// In Android, this is handled outside of JavaScript, so just place a stub here.
|
||||
dict[snd] = new Sound(url + snd);
|
||||
|
@ -124,65 +74,29 @@ export default class ScratchAudio {
|
|||
}
|
||||
}
|
||||
|
||||
static soundDone (name) {
|
||||
if (!projectSounds[name]) return;
|
||||
projectSounds[name].playing = false;
|
||||
}
|
||||
|
||||
static loadProjectSound (md5, fcn) {
|
||||
if (!md5) {
|
||||
return;
|
||||
}
|
||||
if (md5.indexOf('/') > -1) {
|
||||
ScratchAudio.loadFromLocal(md5, fcn);
|
||||
} else {
|
||||
|
||||
if (md5.indexOf('wav') > -1) {
|
||||
if (!isAndroid) {
|
||||
iOS.getmedia(md5, nextStep);
|
||||
} else {
|
||||
// On Android, all sounds play server-side
|
||||
ScratchAudio.loadFromLocal(md5, fcn);
|
||||
}
|
||||
} else {
|
||||
ScratchAudio.loadFromLocal(md5, fcn);
|
||||
}
|
||||
}
|
||||
function nextStep (data) {
|
||||
ScratchAudio.loadFromData(md5, data, fcn);
|
||||
var dir = '';
|
||||
if (!isAndroid) {
|
||||
if (md5.indexOf('/') > -1) dir = 'HTML5/';
|
||||
else if (md5.indexOf('wav') > -1) dir = 'Documents';
|
||||
}
|
||||
ScratchAudio.loadFromLocal(dir, md5, fcn);
|
||||
}
|
||||
|
||||
static loadFromLocal (md5, fcn) {
|
||||
static loadFromLocal (dir, md5, fcn) {
|
||||
if (projectSounds[md5] != undefined) {
|
||||
return;
|
||||
}
|
||||
ScratchAudio.addSound(path, md5, projectSounds, fcn);
|
||||
}
|
||||
|
||||
static loadFromData (md5, data, fcn) {
|
||||
if (!data) {
|
||||
projectSounds[md5] = projectSounds['pop.mp3'];
|
||||
} else {
|
||||
var onDecode = function (buffer) {
|
||||
projectSounds[md5] = new Sound(buffer);
|
||||
if (fcn) {
|
||||
fcn(md5);
|
||||
}
|
||||
};
|
||||
var onError = function () {
|
||||
// console.log ("error", md5, err);
|
||||
if (fcn) {
|
||||
fcn('error');
|
||||
}
|
||||
};
|
||||
var byteString = atob(data); // take out the base 64 encoding
|
||||
var buffer = new ArrayBuffer(byteString.length);
|
||||
var bytearray = new Uint8Array(buffer);
|
||||
for (var i = 0; i < byteString.length; i++) {
|
||||
bytearray[i] = byteString.charCodeAt(i);
|
||||
}
|
||||
context.decodeAudioData(buffer, onDecode, onError);
|
||||
|
||||
}
|
||||
ScratchAudio.addSound(dir, md5, projectSounds, fcn);
|
||||
}
|
||||
}
|
||||
|
||||
window.ScratchAudio = ScratchAudio;
|
||||
|
||||
window.addEventListener('touchend', ScratchAudio.firstOnTouchEnd, false);
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
import {isAndroid} from './lib';
|
||||
import ScratchAudio from './ScratchAudio';
|
||||
import iOS from '../iPad/iOS';
|
||||
|
||||
export default class Sound {
|
||||
constructor (buffer) {
|
||||
constructor (name, time) {
|
||||
if (isAndroid) {
|
||||
this.url = buffer;
|
||||
this.url = name;
|
||||
this.soundPlayId = null;
|
||||
} else {
|
||||
this.buffer = buffer;
|
||||
this.source = null;
|
||||
this.name = name;
|
||||
this.time = time;
|
||||
this.playing = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,37 +20,11 @@ export default class Sound {
|
|||
}
|
||||
this.soundPlayId = AndroidInterface.audio_play(this.url, 1.0);
|
||||
} else {
|
||||
if (this.source) {
|
||||
if (this.playing) {
|
||||
this.stop();
|
||||
}
|
||||
this.source = ScratchAudio.context.createBufferSource();
|
||||
this.source.buffer = this.buffer;
|
||||
this.source.connect(ScratchAudio.context.destination);
|
||||
this.source.noteOn(0);
|
||||
}
|
||||
}
|
||||
|
||||
playWithVolume (n) {
|
||||
if (isAndroid) {
|
||||
if (this.soundPlayId) {
|
||||
this.stop();
|
||||
}
|
||||
|
||||
if (n > 0) {
|
||||
// This method is not currently called with any value other than 0. If 0, don't play the sound.
|
||||
this.soundPlayId = AndroidInterface.audio_play(this.url, n);
|
||||
}
|
||||
} else {
|
||||
if (this.source) {
|
||||
this.stop();
|
||||
}
|
||||
this.gainNode = ScratchAudio.context.createGainNode();
|
||||
this.source = ScratchAudio.context.createBufferSource();
|
||||
this.source.buffer = this.buffer;
|
||||
this.source.connect(this.gainNode);
|
||||
this.gainNode.connect(ScratchAudio.context.destination);
|
||||
this.source.noteOn(0);
|
||||
this.gainNode.gain.value = n;
|
||||
iOS.playSound(this.name);
|
||||
this.playing = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,7 +32,7 @@ export default class Sound {
|
|||
if (isAndroid) {
|
||||
return (this.soundPlayId == null) || !AndroidInterface.audio_isplaying(this.soundPlayId);
|
||||
} else {
|
||||
return (this.source == null) || (this.source.playbackState == 3);
|
||||
return (!this.playing);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -65,7 +40,7 @@ export default class Sound {
|
|||
if (isAndroid) {
|
||||
this.soundPlayId = null;
|
||||
} else {
|
||||
this.source = null;
|
||||
this.playing = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,8 +51,8 @@ export default class Sound {
|
|||
}
|
||||
this.soundPlayId = null;
|
||||
} else {
|
||||
this.source.noteOff(0);
|
||||
this.source = null;
|
||||
iOS.stopSound(this.name);
|
||||
this.playing = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue