mirror of
https://github.com/codeninjasllc/discourse.git
synced 2024-11-27 09:36:19 -05:00
FEATURE: upload backups
This commit is contained in:
parent
23066edbe1
commit
68a935c36b
16 changed files with 1093 additions and 25 deletions
|
@ -0,0 +1,119 @@
|
|||
/*global Resumable:true */
|
||||
|
||||
/**
|
||||
Example usage:
|
||||
|
||||
{{resumable-upload
|
||||
target="/admin/backups/upload"
|
||||
success="successAction"
|
||||
error="errorAction"
|
||||
uploadText="UPLOAD"
|
||||
}}
|
||||
|
||||
@class ResumableUploadComponent
|
||||
@extends Ember.Component
|
||||
@namespace Discourse
|
||||
@module Discourse
|
||||
**/
|
||||
Discourse.ResumableUploadComponent = Ember.Component.extend({
|
||||
tagName: "button",
|
||||
classNames: ["btn", "ru"],
|
||||
classNameBindings: ["isUploading"],
|
||||
|
||||
resumable: null,
|
||||
|
||||
isUploading: false,
|
||||
progress: 0,
|
||||
|
||||
shouldRerender: Discourse.View.renderIfChanged("isUploading", "progress"),
|
||||
|
||||
text: function() {
|
||||
if (this.get("isUploading")) {
|
||||
return this.get("progress") + " %";
|
||||
} else {
|
||||
return this.get("uploadText");
|
||||
}
|
||||
}.property("isUploading", "progress"),
|
||||
|
||||
render: function(buffer) {
|
||||
var icon = this.get("isUploading") ? "times" : "upload";
|
||||
buffer.push("<i class='fa fa-" + icon + "'></i>");
|
||||
buffer.push("<span class='ru-label'>" + this.get("text") + "</span>");
|
||||
buffer.push("<span class='ru-progress' style='width:" + this.get("progress") + "%'></span>");
|
||||
},
|
||||
|
||||
click: function() {
|
||||
if (this.get("isUploading")) {
|
||||
this.resumable.cancel();
|
||||
var self = this;
|
||||
Em.run.later(function() { self._reset(); });
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
|
||||
_reset: function() {
|
||||
this.setProperties({ isUploading: false, progress: 0 });
|
||||
},
|
||||
|
||||
_initialize: function() {
|
||||
this.resumable = new Resumable({
|
||||
target: this.get("target"),
|
||||
maxFiles: 1, // only 1 file at a time
|
||||
headers: { "X-CSRF-Token": $("meta[name='csrf-token']").attr("content") }
|
||||
});
|
||||
|
||||
var self = this;
|
||||
|
||||
this.resumable.on("fileAdded", function() {
|
||||
// automatically upload the selected file
|
||||
self.resumable.upload();
|
||||
// mark as uploading
|
||||
Em.run.later(function() {
|
||||
self.set("isUploading", true);
|
||||
});
|
||||
});
|
||||
|
||||
this.resumable.on("fileProgress", function(file) {
|
||||
// update progress
|
||||
Em.run.later(function() {
|
||||
self.set("progress", parseInt(file.progress() * 100, 10));
|
||||
});
|
||||
});
|
||||
|
||||
this.resumable.on("fileSuccess", function(file) {
|
||||
Em.run.later(function() {
|
||||
// mark as not uploading anymore
|
||||
self._reset();
|
||||
// fire an event to allow the parent route to reload its model
|
||||
self.sendAction("success", file.fileName);
|
||||
});
|
||||
});
|
||||
|
||||
this.resumable.on("fileError", function(file, message) {
|
||||
Em.run.later(function() {
|
||||
// mark as not uploading anymore
|
||||
self._reset();
|
||||
// fire an event to allow the parent route to display the error message
|
||||
self.sendAction("error", file.fileName, message);
|
||||
});
|
||||
});
|
||||
|
||||
}.on("init"),
|
||||
|
||||
_assignBrowse: function() {
|
||||
var self = this;
|
||||
Em.run.schedule("afterRender", function() {
|
||||
self.resumable.assignBrowse(self.$());
|
||||
});
|
||||
}.on("didInsertElement"),
|
||||
|
||||
_teardown: function() {
|
||||
if (this.resumable) {
|
||||
this.resumable.cancel();
|
||||
this.resumable = null;
|
||||
}
|
||||
}.on("willDestroyElement")
|
||||
|
||||
});
|
|
@ -1 +1,5 @@
|
|||
Discourse.AdminBackupsController = Ember.ObjectController.extend({});
|
||||
Discourse.AdminBackupsController = Ember.ObjectController.extend({
|
||||
noOperationIsRunning: Em.computed.not("isOperationRunning"),
|
||||
rollbackEnabled: Em.computed.and("canRollback", "restoreEnabled", "noOperationIsRunning"),
|
||||
rollbackDisabled: Em.computed.not("rollbackEnabled"),
|
||||
});
|
||||
|
|
|
@ -2,17 +2,11 @@ Discourse.AdminBackupsIndexController = Ember.ArrayController.extend({
|
|||
needs: ["adminBackups"],
|
||||
status: Em.computed.alias("controllers.adminBackups"),
|
||||
|
||||
rollbackDisabled: Em.computed.not("rollbackEnabled"),
|
||||
uploadText: function() { return I18n.t("admin.backups.upload.text"); }.property(),
|
||||
|
||||
rollbackEnabled: function() {
|
||||
return this.get("status.canRollback") && this.get("restoreEnabled");
|
||||
}.property("status.canRollback", "restoreEnabled"),
|
||||
readOnlyModeDisabled: Em.computed.alias("status.isOperationRunning"),
|
||||
|
||||
restoreDisabled: Em.computed.not("restoreEnabled"),
|
||||
|
||||
restoreEnabled: function() {
|
||||
return Discourse.SiteSettings.allow_restore && !this.get("status.isOperationRunning");
|
||||
}.property("status.isOperationRunning"),
|
||||
restoreDisabled: Em.computed.alias("status.restoreDisabled"),
|
||||
|
||||
restoreTitle: function() {
|
||||
if (!Discourse.SiteSettings.allow_restore) {
|
||||
|
@ -24,6 +18,8 @@ Discourse.AdminBackupsIndexController = Ember.ArrayController.extend({
|
|||
}
|
||||
}.property("status.isOperationRunning"),
|
||||
|
||||
destroyDisabled: Em.computed.alias("status.isOperationRunning"),
|
||||
|
||||
destroyTitle: function() {
|
||||
if (this.get("status.isOperationRunning")) {
|
||||
return I18n.t("admin.backups.operation_already_running");
|
||||
|
@ -64,7 +60,7 @@ Discourse.AdminBackupsIndexController = Ember.ArrayController.extend({
|
|||
} else {
|
||||
this._toggleReadOnlyMode(false);
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
|
|
17
app/assets/javascripts/admin/models/backup_status.js
Normal file
17
app/assets/javascripts/admin/models/backup_status.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
/**
|
||||
Data model for representing the status of backup/restore
|
||||
|
||||
@class BackupStatus
|
||||
@extends Discourse.Model
|
||||
@namespace Discourse
|
||||
@module Discourse
|
||||
**/
|
||||
Discourse.BackupStatus = Discourse.Model.extend({
|
||||
|
||||
restoreDisabled: Em.computed.not("restoreEnabled"),
|
||||
|
||||
restoreEnabled: function() {
|
||||
return Discourse.SiteSettings.allow_restore && !this.get("isOperationRunning");
|
||||
}.property("isOperationRunning"),
|
||||
|
||||
});
|
|
@ -29,7 +29,7 @@ Discourse.AdminBackupsRoute = Discourse.Route.extend({
|
|||
return PreloadStore.getAndRemove("operations_status", function() {
|
||||
return Discourse.ajax("/admin/backups/status.json");
|
||||
}).then(function (status) {
|
||||
return Em.Object.create({
|
||||
return Discourse.BackupStatus.create({
|
||||
isOperationRunning: status.is_operation_running,
|
||||
canRollback: status.can_rollback,
|
||||
});
|
||||
|
@ -57,7 +57,7 @@ Discourse.AdminBackupsRoute = Discourse.Route.extend({
|
|||
Discourse.User.currentProp("hideReadOnlyAlert", true);
|
||||
Discourse.Backup.start().then(function() {
|
||||
self.controllerFor("adminBackupsLogs").clear();
|
||||
self.controllerFor("adminBackups").set("isOperationRunning", true);
|
||||
self.modelFor("adminBackups").set("isOperationRunning", true);
|
||||
self.transitionTo("admin.backups.logs");
|
||||
});
|
||||
}
|
||||
|
@ -104,7 +104,7 @@ Discourse.AdminBackupsRoute = Discourse.Route.extend({
|
|||
Discourse.User.currentProp("hideReadOnlyAlert", true);
|
||||
backup.restore().then(function() {
|
||||
self.controllerFor("adminBackupsLogs").clear();
|
||||
self.controllerFor("adminBackups").set("isOperationRunning", true);
|
||||
self.modelFor("adminBackups").set("isOperationRunning", true);
|
||||
self.transitionTo("admin.backups.logs");
|
||||
});
|
||||
}
|
||||
|
@ -148,6 +148,19 @@ Discourse.AdminBackupsRoute = Discourse.Route.extend({
|
|||
}
|
||||
);
|
||||
},
|
||||
|
||||
uploadSuccess: function(filename) {
|
||||
var self = this;
|
||||
bootbox.alert(I18n.t("admin.backups.upload.success", { filename: filename }), function() {
|
||||
Discourse.Backup.find().then(function (backups) {
|
||||
self.controllerFor("adminBackupsIndex").set("model", backups);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
uploadError: function(filename, message) {
|
||||
bootbox.alert(I18n.t("admin.backups.upload.error", { filename: filename, message: message }));
|
||||
},
|
||||
}
|
||||
|
||||
});
|
||||
|
|
|
@ -6,6 +6,9 @@
|
|||
</ul>
|
||||
</div>
|
||||
<div class="pull-right">
|
||||
{{#if canRollback}}
|
||||
<button {{action rollback}} class="btn btn-rollback" title="{{i18n admin.backups.operations.rollback.title}}" {{bind-attr disabled="rollbackDisabled"}}><i class="fa fa-ambulance fa-flip-horizontal"></i>{{i18n admin.backups.operations.rollback.text}}</button>
|
||||
{{/if}}
|
||||
{{#if isOperationRunning}}
|
||||
<button {{action cancelOperation}} class="btn btn-danger" title="{{i18n admin.backups.operations.cancel.title}}"><i class="fa fa-times"></i>{{i18n admin.backups.operations.cancel.text}}</button>
|
||||
{{else}}
|
||||
|
|
|
@ -3,10 +3,8 @@
|
|||
<th width="40%">{{i18n admin.backups.columns.filename}}</th>
|
||||
<th width="30%">{{i18n admin.backups.columns.size}}</th>
|
||||
<th>
|
||||
<button {{action toggleReadOnlyMode}} class="btn" {{bind-attr title=readOnlyModeTitle}}><i class="fa fa-eye"></i>{{readOnlyModeText}}</button>
|
||||
{{#if status.canRollback}}
|
||||
<button {{action rollback}} class="btn btn-rollback" title="{{i18n admin.backups.operations.rollback.title}}" {{bind-attr disabled=rollbackDisabled}}><i class="fa fa-ambulance fa-flip-horizontal"></i>{{i18n admin.backups.operations.rollback.text}}</button>
|
||||
{{/if}}
|
||||
{{resumable-upload target="/admin/backups/upload" success="uploadSuccess" error="uploadError" uploadText=uploadText}}
|
||||
<button {{action toggleReadOnlyMode}} class="btn" {{bind-attr disabled="readOnlyModeDisabled" title="readOnlyModeTitle"}}><i class="fa fa-eye"></i>{{readOnlyModeText}}</button>
|
||||
</th>
|
||||
</tr>
|
||||
{{#each backup in model}}
|
||||
|
@ -15,7 +13,7 @@
|
|||
<td>{{humanSize backup.size}}</td>
|
||||
<td>
|
||||
<a {{bind-attr href="backup.link"}} class="btn download" title="{{i18n admin.backups.operations.download.title}}"><i class="fa fa-download"></i>{{i18n admin.backups.operations.download.text}}</a>
|
||||
<button {{action destroyBackup backup}} class="btn btn-danger" {{bind-attr disabled="status.isOperationRunning" title="destroyTitle"}}><i class="fa fa-trash-o"></i>{{i18n admin.backups.operations.destroy.text}}</button>
|
||||
<button {{action destroyBackup backup}} class="btn btn-danger" {{bind-attr disabled="destroyDisabled" title="destroyTitle"}}><i class="fa fa-trash-o"></i>{{i18n admin.backups.operations.destroy.text}}</button>
|
||||
<button {{action startRestore backup}} class="btn" {{bind-attr disabled="restoreDisabled" title="restoreTitle"}}><i class="fa fa-undo"></i>{{i18n admin.backups.operations.restore.text}}</button>
|
||||
</td>
|
||||
</tr>
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
//= require_tree ./admin
|
||||
|
||||
//= require resumable.js
|
||||
|
|
|
@ -987,3 +987,21 @@ $rollback-darker: darken($rollback, 20%) !default;
|
|||
max-height: 500px;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
|
||||
button.ru {
|
||||
position: relative;
|
||||
min-width: 110px;
|
||||
}
|
||||
|
||||
.ru-progress {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
height: 100%;
|
||||
background: rgba(0, 175, 0, 0.3);
|
||||
}
|
||||
|
||||
.is-uploading:hover .ru-progress {
|
||||
background: rgba(200, 0, 0, 0.3);
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ require_dependency "backup_restore"
|
|||
|
||||
class Admin::BackupsController < Admin::AdminController
|
||||
|
||||
skip_before_filter :check_xhr, only: [:index, :show, :logs]
|
||||
skip_before_filter :check_xhr, only: [:index, :show, :logs, :check_chunk, :upload_chunk]
|
||||
|
||||
def index
|
||||
respond_to do |format|
|
||||
|
@ -83,4 +83,51 @@ class Admin::BackupsController < Admin::AdminController
|
|||
render nothing: true
|
||||
end
|
||||
|
||||
def check_chunk
|
||||
identifier = params.fetch(:resumableIdentifier)
|
||||
filename = params.fetch(:resumableFilename)
|
||||
chunk_number = params.fetch(:resumableChunkNumber)
|
||||
current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i
|
||||
|
||||
# path to chunk file
|
||||
chunk = Backup.chunk_path(identifier, filename, chunk_number)
|
||||
# check whether the chunk has already been uploaded
|
||||
has_chunk_been_uploaded = File.exists?(chunk) && File.size(chunk) == current_chunk_size
|
||||
# 200 = exists, 404 = not uploaded yet
|
||||
status = has_chunk_been_uploaded ? 200 : 404
|
||||
|
||||
render nothing: true, status: status
|
||||
end
|
||||
|
||||
def upload_chunk
|
||||
filename = params.fetch(:resumableFilename)
|
||||
return render nothing:true, status: 415 unless filename.to_s.end_with?(".tar.gz")
|
||||
|
||||
file = params.fetch(:file)
|
||||
identifier = params.fetch(:resumableIdentifier)
|
||||
chunk_number = params.fetch(:resumableChunkNumber).to_i
|
||||
chunk_size = params.fetch(:resumableChunkSize).to_i
|
||||
total_size = params.fetch(:resumableTotalSize).to_i
|
||||
current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i
|
||||
|
||||
# path to chunk file
|
||||
chunk = Backup.chunk_path(identifier, filename, chunk_number)
|
||||
dir = File.dirname(chunk)
|
||||
|
||||
# ensure directory exists
|
||||
FileUtils.mkdir_p(dir) unless Dir.exists?(dir)
|
||||
# save chunk to the directory
|
||||
File.open(chunk, "wb") { |f| f.write(file.tempfile.read) }
|
||||
|
||||
uploaded_file_size = chunk_number * chunk_size
|
||||
|
||||
# when all chunks are uploaded
|
||||
if uploaded_file_size + current_chunk_size >= total_size
|
||||
# merge all the chunks in a background thread
|
||||
Jobs.enqueue(:backup_chunks_merger, filename: filename, identifier: identifier, chunks: chunk_number)
|
||||
end
|
||||
|
||||
render nothing: true
|
||||
end
|
||||
|
||||
end
|
||||
|
|
38
app/jobs/regular/backup_chunks_merger.rb
Normal file
38
app/jobs/regular/backup_chunks_merger.rb
Normal file
|
@ -0,0 +1,38 @@
|
|||
module Jobs
|
||||
|
||||
class BackupChunksMerger < Jobs::Base
|
||||
sidekiq_options retry: false
|
||||
|
||||
def execute(args)
|
||||
filename = args[:filename]
|
||||
identifier = args[:identifier]
|
||||
chunks = args[:chunks].to_i
|
||||
|
||||
raise Discourse::InvalidParameters.new(:filename) if filename.blank?
|
||||
raise Discourse::InvalidParameters.new(:identifier) if identifier.blank?
|
||||
raise Discourse::InvalidParameters.new(:chunks) if chunks <= 0
|
||||
|
||||
backup = "#{Backup.base_directory}/#{filename}"
|
||||
|
||||
# delete destination
|
||||
File.delete(backup) rescue nil
|
||||
|
||||
# merge all the chunks
|
||||
File.open(backup, "a") do |backup|
|
||||
(1..chunks).each do |chunk_number|
|
||||
# path to chunk
|
||||
path = Backup.chunk_path(identifier, filename, chunk_number)
|
||||
# add chunk to backup
|
||||
backup << File.open(path).read
|
||||
# delete chunk
|
||||
File.delete(path) rescue nil
|
||||
end
|
||||
end
|
||||
|
||||
# remove tmp directory
|
||||
FileUtils.rm_rf(directory) rescue nil
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -34,4 +34,8 @@ class Backup
|
|||
@base_directory ||= File.join(Rails.root, "public", "backups", RailsMultisite::ConnectionManagement.current_db)
|
||||
end
|
||||
|
||||
def self.chunk_path(identifier, filename, chunk_number)
|
||||
File.join(Backup.base_directory, "tmp", identifier, "#{filename}.part#{chunk_number}")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -1341,6 +1341,11 @@ en:
|
|||
columns:
|
||||
filename: "Filename"
|
||||
size: "Size"
|
||||
upload:
|
||||
text: "UPLOAD"
|
||||
uploading: "UPLOADING"
|
||||
success: "'{{filename}}' has successfully been uploaded."
|
||||
error: "There has been an error ({{message}}) while uploading '{{filename}}'"
|
||||
operations:
|
||||
is_running: "An operation is currently running..."
|
||||
failed: "The {{operation}} failed. Please check the logs."
|
||||
|
|
|
@ -121,6 +121,8 @@ Discourse::Application.routes.draw do
|
|||
get "cancel" => "backups#cancel"
|
||||
get "rollback" => "backups#rollback"
|
||||
put "readonly" => "backups#readonly"
|
||||
get "upload" => "backups#check_chunk"
|
||||
post "upload" => "backups#upload_chunk"
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -71,10 +71,6 @@ module BackupRestore
|
|||
ActiveRecord::Migrator.current_version
|
||||
end
|
||||
|
||||
def self.can_rollback?
|
||||
User.exec_sql("SELECT 1 FROM pg_namespace WHERE nspname = 'backup'").count > 0
|
||||
end
|
||||
|
||||
def self.move_tables_between_schemas(source, destination)
|
||||
User.exec_sql(move_tables_between_schemas_sql(source, destination))
|
||||
end
|
||||
|
|
806
vendor/assets/javascripts/resumable.js
vendored
Normal file
806
vendor/assets/javascripts/resumable.js
vendored
Normal file
|
@ -0,0 +1,806 @@
|
|||
/*
|
||||
* MIT Licensed
|
||||
* http://www.23developer.com/opensource
|
||||
* http://github.com/23/resumable.js
|
||||
* Steffen Tiedemann Christensen, steffen@23company.com
|
||||
*/
|
||||
|
||||
(function(){
|
||||
"use strict";
|
||||
|
||||
var Resumable = function(opts){
|
||||
if ( !(this instanceof Resumable) ) {
|
||||
return new Resumable(opts);
|
||||
}
|
||||
this.version = 1.0;
|
||||
// SUPPORTED BY BROWSER?
|
||||
// Check if these features are support by the browser:
|
||||
// - File object type
|
||||
// - Blob object type
|
||||
// - FileList object type
|
||||
// - slicing files
|
||||
this.support = (
|
||||
(typeof(File)!=='undefined')
|
||||
&&
|
||||
(typeof(Blob)!=='undefined')
|
||||
&&
|
||||
(typeof(FileList)!=='undefined')
|
||||
&&
|
||||
(!!Blob.prototype.webkitSlice||!!Blob.prototype.mozSlice||!!Blob.prototype.slice||false)
|
||||
);
|
||||
if(!this.support) return(false);
|
||||
|
||||
|
||||
// PROPERTIES
|
||||
var $ = this;
|
||||
$.files = [];
|
||||
$.defaults = {
|
||||
chunkSize:1*1024*1024,
|
||||
forceChunkSize:false,
|
||||
simultaneousUploads:3,
|
||||
fileParameterName:'file',
|
||||
throttleProgressCallbacks:0.5,
|
||||
query:{},
|
||||
headers:{},
|
||||
preprocess:null,
|
||||
method:'multipart',
|
||||
prioritizeFirstAndLastChunk:false,
|
||||
target:'/',
|
||||
testChunks:true,
|
||||
generateUniqueIdentifier:null,
|
||||
maxChunkRetries:undefined,
|
||||
chunkRetryInterval:undefined,
|
||||
permanentErrors:[404, 415, 500, 501],
|
||||
maxFiles:undefined,
|
||||
withCredentials:false,
|
||||
xhrTimeout:0,
|
||||
maxFilesErrorCallback:function (files, errorCount) {
|
||||
var maxFiles = $.getOpt('maxFiles');
|
||||
alert('Please upload ' + maxFiles + ' file' + (maxFiles === 1 ? '' : 's') + ' at a time.');
|
||||
},
|
||||
minFileSize:1,
|
||||
minFileSizeErrorCallback:function(file, errorCount) {
|
||||
alert(file.fileName||file.name +' is too small, please upload files larger than ' + $h.formatSize($.getOpt('minFileSize')) + '.');
|
||||
},
|
||||
maxFileSize:undefined,
|
||||
maxFileSizeErrorCallback:function(file, errorCount) {
|
||||
alert(file.fileName||file.name +' is too large, please upload files less than ' + $h.formatSize($.getOpt('maxFileSize')) + '.');
|
||||
},
|
||||
fileType: [],
|
||||
fileTypeErrorCallback: function(file, errorCount) {
|
||||
alert(file.fileName||file.name +' has type not allowed, please upload files of type ' + $.getOpt('fileType') + '.');
|
||||
}
|
||||
};
|
||||
$.opts = opts||{};
|
||||
$.getOpt = function(o) {
|
||||
var $opt = this;
|
||||
// Get multiple option if passed an array
|
||||
if(o instanceof Array) {
|
||||
var options = {};
|
||||
$h.each(o, function(option){
|
||||
options[option] = $opt.getOpt(option);
|
||||
});
|
||||
return options;
|
||||
}
|
||||
// Otherwise, just return a simple option
|
||||
if ($opt instanceof ResumableChunk) {
|
||||
if (typeof $opt.opts[o] !== 'undefined') { return $opt.opts[o]; }
|
||||
else { $opt = $opt.fileObj; }
|
||||
}
|
||||
if ($opt instanceof ResumableFile) {
|
||||
if (typeof $opt.opts[o] !== 'undefined') { return $opt.opts[o]; }
|
||||
else { $opt = $opt.resumableObj; }
|
||||
}
|
||||
if ($opt instanceof Resumable) {
|
||||
if (typeof $opt.opts[o] !== 'undefined') { return $opt.opts[o]; }
|
||||
else { return $opt.defaults[o]; }
|
||||
}
|
||||
};
|
||||
|
||||
// EVENTS
|
||||
// catchAll(event, ...)
|
||||
// fileSuccess(file), fileProgress(file), fileAdded(file, event), fileRetry(file), fileError(file, message),
|
||||
// complete(), progress(), error(message, file), pause()
|
||||
$.events = [];
|
||||
$.on = function(event,callback){
|
||||
$.events.push(event.toLowerCase(), callback);
|
||||
};
|
||||
$.fire = function(){
|
||||
// `arguments` is an object, not array, in FF, so:
|
||||
var args = [];
|
||||
for (var i=0; i<arguments.length; i++) args.push(arguments[i]);
|
||||
// Find event listeners, and support pseudo-event `catchAll`
|
||||
var event = args[0].toLowerCase();
|
||||
for (var i=0; i<=$.events.length; i+=2) {
|
||||
if($.events[i]==event) $.events[i+1].apply($,args.slice(1));
|
||||
if($.events[i]=='catchall') $.events[i+1].apply(null,args);
|
||||
}
|
||||
if(event=='fileerror') $.fire('error', args[2], args[1]);
|
||||
if(event=='fileprogress') $.fire('progress');
|
||||
};
|
||||
|
||||
|
||||
// INTERNAL HELPER METHODS (handy, but ultimately not part of uploading)
|
||||
var $h = {
|
||||
stopEvent: function(e){
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
},
|
||||
each: function(o,callback){
|
||||
if(typeof(o.length)!=='undefined') {
|
||||
for (var i=0; i<o.length; i++) {
|
||||
// Array or FileList
|
||||
if(callback(o[i])===false) return;
|
||||
}
|
||||
} else {
|
||||
for (i in o) {
|
||||
// Object
|
||||
if(callback(i,o[i])===false) return;
|
||||
}
|
||||
}
|
||||
},
|
||||
generateUniqueIdentifier:function(file){
|
||||
var custom = $.getOpt('generateUniqueIdentifier');
|
||||
if(typeof custom === 'function') {
|
||||
return custom(file);
|
||||
}
|
||||
var relativePath = file.webkitRelativePath||file.fileName||file.name; // Some confusion in different versions of Firefox
|
||||
var size = file.size;
|
||||
return(size + '-' + relativePath.replace(/[^0-9a-zA-Z_-]/img, ''));
|
||||
},
|
||||
contains:function(array,test) {
|
||||
var result = false;
|
||||
|
||||
$h.each(array, function(value) {
|
||||
if (value == test) {
|
||||
result = true;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
return result;
|
||||
},
|
||||
formatSize:function(size){
|
||||
if(size<1024) {
|
||||
return size + ' bytes';
|
||||
} else if(size<1024*1024) {
|
||||
return (size/1024.0).toFixed(0) + ' KB';
|
||||
} else if(size<1024*1024*1024) {
|
||||
return (size/1024.0/1024.0).toFixed(1) + ' MB';
|
||||
} else {
|
||||
return (size/1024.0/1024.0/1024.0).toFixed(1) + ' GB';
|
||||
}
|
||||
},
|
||||
getTarget:function(params){
|
||||
var target = $.getOpt('target');
|
||||
if(target.indexOf('?') < 0) {
|
||||
target += '?';
|
||||
} else {
|
||||
target += '&';
|
||||
}
|
||||
return target + params.join('&');
|
||||
}
|
||||
};
|
||||
|
||||
var onDrop = function(event){
|
||||
$h.stopEvent(event);
|
||||
appendFilesFromFileList(event.dataTransfer.files, event);
|
||||
};
|
||||
var onDragOver = function(e) {
|
||||
e.preventDefault();
|
||||
};
|
||||
|
||||
// INTERNAL METHODS (both handy and responsible for the heavy load)
|
||||
var appendFilesFromFileList = function(fileList, event){
|
||||
// check for uploading too many files
|
||||
var errorCount = 0;
|
||||
var o = $.getOpt(['maxFiles', 'minFileSize', 'maxFileSize', 'maxFilesErrorCallback', 'minFileSizeErrorCallback', 'maxFileSizeErrorCallback', 'fileType', 'fileTypeErrorCallback']);
|
||||
if (typeof(o.maxFiles)!=='undefined' && o.maxFiles<(fileList.length+$.files.length)) {
|
||||
// if single-file upload, file is already added, and trying to add 1 new file, simply replace the already-added file
|
||||
if (o.maxFiles===1 && $.files.length===1 && fileList.length===1) {
|
||||
$.removeFile($.files[0]);
|
||||
} else {
|
||||
o.maxFilesErrorCallback(fileList, errorCount++);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
var files = [];
|
||||
$h.each(fileList, function(file){
|
||||
var fileName = file.name.split('.');
|
||||
var fileType = fileName[fileName.length-1].toLowerCase();
|
||||
|
||||
if (o.fileType.length > 0 && !$h.contains(o.fileType, fileType)) {
|
||||
o.fileTypeErrorCallback(file, errorCount++);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof(o.minFileSize)!=='undefined' && file.size<o.minFileSize) {
|
||||
o.minFileSizeErrorCallback(file, errorCount++);
|
||||
return false;
|
||||
}
|
||||
if (typeof(o.maxFileSize)!=='undefined' && file.size>o.maxFileSize) {
|
||||
o.maxFileSizeErrorCallback(file, errorCount++);
|
||||
return false;
|
||||
}
|
||||
|
||||
// directories have size == 0
|
||||
if (!$.getFromUniqueIdentifier($h.generateUniqueIdentifier(file))) {(function(){
|
||||
var f = new ResumableFile($, file);
|
||||
window.setTimeout(function(){
|
||||
$.files.push(f);
|
||||
files.push(f);
|
||||
$.fire('fileAdded', f, event)
|
||||
},0);
|
||||
})()};
|
||||
});
|
||||
window.setTimeout(function(){
|
||||
$.fire('filesAdded', files)
|
||||
},0);
|
||||
};
|
||||
|
||||
// INTERNAL OBJECT TYPES
|
||||
function ResumableFile(resumableObj, file){
|
||||
var $ = this;
|
||||
$.opts = {};
|
||||
$.getOpt = resumableObj.getOpt;
|
||||
$._prevProgress = 0;
|
||||
$.resumableObj = resumableObj;
|
||||
$.file = file;
|
||||
$.fileName = file.fileName||file.name; // Some confusion in different versions of Firefox
|
||||
$.size = file.size;
|
||||
$.relativePath = file.webkitRelativePath || $.fileName;
|
||||
$.uniqueIdentifier = $h.generateUniqueIdentifier(file);
|
||||
$._pause = false;
|
||||
var _error = false;
|
||||
|
||||
// Callback when something happens within the chunk
|
||||
var chunkEvent = function(event, message){
|
||||
// event can be 'progress', 'success', 'error' or 'retry'
|
||||
switch(event){
|
||||
case 'progress':
|
||||
$.resumableObj.fire('fileProgress', $);
|
||||
break;
|
||||
case 'error':
|
||||
$.abort();
|
||||
_error = true;
|
||||
$.chunks = [];
|
||||
$.resumableObj.fire('fileError', $, message);
|
||||
break;
|
||||
case 'success':
|
||||
if(_error) return;
|
||||
$.resumableObj.fire('fileProgress', $); // it's at least progress
|
||||
if($.isComplete()) {
|
||||
$.resumableObj.fire('fileSuccess', $, message);
|
||||
}
|
||||
break;
|
||||
case 'retry':
|
||||
$.resumableObj.fire('fileRetry', $);
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
// Main code to set up a file object with chunks,
|
||||
// packaged to be able to handle retries if needed.
|
||||
$.chunks = [];
|
||||
$.abort = function(){
|
||||
// Stop current uploads
|
||||
var abortCount = 0;
|
||||
$h.each($.chunks, function(c){
|
||||
if(c.status()=='uploading') {
|
||||
c.abort();
|
||||
abortCount++;
|
||||
}
|
||||
});
|
||||
if(abortCount>0) $.resumableObj.fire('fileProgress', $);
|
||||
}
|
||||
$.cancel = function(){
|
||||
// Reset this file to be void
|
||||
var _chunks = $.chunks;
|
||||
$.chunks = [];
|
||||
// Stop current uploads
|
||||
$h.each(_chunks, function(c){
|
||||
if(c.status()=='uploading') {
|
||||
c.abort();
|
||||
$.resumableObj.uploadNextChunk();
|
||||
}
|
||||
});
|
||||
$.resumableObj.removeFile($);
|
||||
$.resumableObj.fire('fileProgress', $);
|
||||
};
|
||||
$.retry = function(){
|
||||
$.bootstrap();
|
||||
$.resumableObj.upload();
|
||||
};
|
||||
$.bootstrap = function(){
|
||||
$.abort();
|
||||
_error = false;
|
||||
// Rebuild stack of chunks from file
|
||||
$.chunks = [];
|
||||
$._prevProgress = 0;
|
||||
var round = $.getOpt('forceChunkSize') ? Math.ceil : Math.floor;
|
||||
var maxOffset = Math.max(round($.file.size/$.getOpt('chunkSize')),1);
|
||||
for (var offset=0; offset<maxOffset; offset++) {(function(offset){
|
||||
window.setTimeout(function(){
|
||||
$.chunks.push(new ResumableChunk($.resumableObj, $, offset, chunkEvent));
|
||||
$.resumableObj.fire('chunkingProgress',$,offset/maxOffset);
|
||||
},0);
|
||||
})(offset)}
|
||||
window.setTimeout(function(){
|
||||
$.resumableObj.fire('chunkingComplete',$);
|
||||
},0);
|
||||
};
|
||||
$.progress = function(){
|
||||
if(_error) return(1);
|
||||
// Sum up progress across everything
|
||||
var ret = 0;
|
||||
var error = false;
|
||||
$h.each($.chunks, function(c){
|
||||
if(c.status()=='error') error = true;
|
||||
ret += c.progress(true); // get chunk progress relative to entire file
|
||||
});
|
||||
ret = (error ? 1 : (ret>0.999 ? 1 : ret));
|
||||
ret = Math.max($._prevProgress, ret); // We don't want to lose percentages when an upload is paused
|
||||
$._prevProgress = ret;
|
||||
return(ret);
|
||||
};
|
||||
$.isUploading = function(){
|
||||
var uploading = false;
|
||||
$h.each($.chunks, function(chunk){
|
||||
if(chunk.status()=='uploading') {
|
||||
uploading = true;
|
||||
return(false);
|
||||
}
|
||||
});
|
||||
return(uploading);
|
||||
};
|
||||
$.isComplete = function(){
|
||||
var outstanding = false;
|
||||
$h.each($.chunks, function(chunk){
|
||||
var status = chunk.status();
|
||||
if(status=='pending' || status=='uploading' || chunk.preprocessState === 1) {
|
||||
outstanding = true;
|
||||
return(false);
|
||||
}
|
||||
});
|
||||
return(!outstanding);
|
||||
};
|
||||
$.pause = function(pause){
|
||||
if(typeof(pause)==='undefined'){
|
||||
$._pause = ($._pause ? false : true);
|
||||
}else{
|
||||
$._pause = pause;
|
||||
}
|
||||
};
|
||||
$.isPaused = function() {
|
||||
return $._pause;
|
||||
};
|
||||
|
||||
|
||||
// Bootstrap and return
|
||||
$.resumableObj.fire('chunkingStart', $);
|
||||
$.bootstrap();
|
||||
return(this);
|
||||
}
|
||||
|
||||
function ResumableChunk(resumableObj, fileObj, offset, callback){
|
||||
var $ = this;
|
||||
$.opts = {};
|
||||
$.getOpt = resumableObj.getOpt;
|
||||
$.resumableObj = resumableObj;
|
||||
$.fileObj = fileObj;
|
||||
$.fileObjSize = fileObj.size;
|
||||
$.fileObjType = fileObj.file.type;
|
||||
$.offset = offset;
|
||||
$.callback = callback;
|
||||
$.lastProgressCallback = (new Date);
|
||||
$.tested = false;
|
||||
$.retries = 0;
|
||||
$.pendingRetry = false;
|
||||
$.preprocessState = 0; // 0 = unprocessed, 1 = processing, 2 = finished
|
||||
|
||||
// Computed properties
|
||||
var chunkSize = $.getOpt('chunkSize');
|
||||
$.loaded = 0;
|
||||
$.startByte = $.offset*chunkSize;
|
||||
$.endByte = Math.min($.fileObjSize, ($.offset+1)*chunkSize);
|
||||
if ($.fileObjSize-$.endByte < chunkSize && !$.getOpt('forceChunkSize')) {
|
||||
// The last chunk will be bigger than the chunk size, but less than 2*chunkSize
|
||||
$.endByte = $.fileObjSize;
|
||||
}
|
||||
$.xhr = null;
|
||||
|
||||
// test() makes a GET request without any data to see if the chunk has already been uploaded in a previous session
|
||||
$.test = function(){
|
||||
// Set up request and listen for event
|
||||
$.xhr = new XMLHttpRequest();
|
||||
|
||||
var testHandler = function(e){
|
||||
$.tested = true;
|
||||
var status = $.status();
|
||||
if(status=='success') {
|
||||
$.callback(status, $.message());
|
||||
$.resumableObj.uploadNextChunk();
|
||||
} else {
|
||||
$.send();
|
||||
}
|
||||
};
|
||||
$.xhr.addEventListener('load', testHandler, false);
|
||||
$.xhr.addEventListener('error', testHandler, false);
|
||||
|
||||
// Add data from the query options
|
||||
var params = [];
|
||||
var customQuery = $.getOpt('query');
|
||||
if(typeof customQuery == 'function') customQuery = customQuery($.fileObj, $);
|
||||
$h.each(customQuery, function(k,v){
|
||||
params.push([encodeURIComponent(k), encodeURIComponent(v)].join('='));
|
||||
});
|
||||
// Add extra data to identify chunk
|
||||
params.push(['resumableChunkNumber', encodeURIComponent($.offset+1)].join('='));
|
||||
params.push(['resumableChunkSize', encodeURIComponent($.getOpt('chunkSize'))].join('='));
|
||||
params.push(['resumableCurrentChunkSize', encodeURIComponent($.endByte - $.startByte)].join('='));
|
||||
params.push(['resumableTotalSize', encodeURIComponent($.fileObjSize)].join('='));
|
||||
params.push(['resumableType', encodeURIComponent($.fileObjType)].join('='));
|
||||
params.push(['resumableIdentifier', encodeURIComponent($.fileObj.uniqueIdentifier)].join('='));
|
||||
params.push(['resumableFilename', encodeURIComponent($.fileObj.fileName)].join('='));
|
||||
params.push(['resumableRelativePath', encodeURIComponent($.fileObj.relativePath)].join('='));
|
||||
// Append the relevant chunk and send it
|
||||
$.xhr.open('GET', $h.getTarget(params));
|
||||
$.xhr.timeout = $.getOpt('xhrTimeout');
|
||||
$.xhr.withCredentials = $.getOpt('withCredentials');
|
||||
// Add data from header options
|
||||
$h.each($.getOpt('headers'), function(k,v) {
|
||||
$.xhr.setRequestHeader(k, v);
|
||||
});
|
||||
$.xhr.send(null);
|
||||
};
|
||||
|
||||
$.preprocessFinished = function(){
|
||||
$.preprocessState = 2;
|
||||
$.send();
|
||||
};
|
||||
|
||||
// send() uploads the actual data in a POST call
|
||||
$.send = function(){
|
||||
var preprocess = $.getOpt('preprocess');
|
||||
if(typeof preprocess === 'function') {
|
||||
switch($.preprocessState) {
|
||||
case 0: preprocess($); $.preprocessState = 1; return;
|
||||
case 1: return;
|
||||
case 2: break;
|
||||
}
|
||||
}
|
||||
if($.getOpt('testChunks') && !$.tested) {
|
||||
$.test();
|
||||
return;
|
||||
}
|
||||
|
||||
// Set up request and listen for event
|
||||
$.xhr = new XMLHttpRequest();
|
||||
|
||||
// Progress
|
||||
$.xhr.upload.addEventListener('progress', function(e){
|
||||
if( (new Date) - $.lastProgressCallback > $.getOpt('throttleProgressCallbacks') * 1000 ) {
|
||||
$.callback('progress');
|
||||
$.lastProgressCallback = (new Date);
|
||||
}
|
||||
$.loaded=e.loaded||0;
|
||||
}, false);
|
||||
$.loaded = 0;
|
||||
$.pendingRetry = false;
|
||||
$.callback('progress');
|
||||
|
||||
// Done (either done, failed or retry)
|
||||
var doneHandler = function(e){
|
||||
var status = $.status();
|
||||
if(status=='success'||status=='error') {
|
||||
$.callback(status, $.message());
|
||||
$.resumableObj.uploadNextChunk();
|
||||
} else {
|
||||
$.callback('retry', $.message());
|
||||
$.abort();
|
||||
$.retries++;
|
||||
var retryInterval = $.getOpt('chunkRetryInterval');
|
||||
if(retryInterval !== undefined) {
|
||||
$.pendingRetry = true;
|
||||
setTimeout($.send, retryInterval);
|
||||
} else {
|
||||
$.send();
|
||||
}
|
||||
}
|
||||
};
|
||||
$.xhr.addEventListener('load', doneHandler, false);
|
||||
$.xhr.addEventListener('error', doneHandler, false);
|
||||
|
||||
// Set up the basic query data from Resumable
|
||||
var query = {
|
||||
resumableChunkNumber: $.offset+1,
|
||||
resumableChunkSize: $.getOpt('chunkSize'),
|
||||
resumableCurrentChunkSize: $.endByte - $.startByte,
|
||||
resumableTotalSize: $.fileObjSize,
|
||||
resumableType: $.fileObjType,
|
||||
resumableIdentifier: $.fileObj.uniqueIdentifier,
|
||||
resumableFilename: $.fileObj.fileName,
|
||||
resumableRelativePath: $.fileObj.relativePath,
|
||||
resumableTotalChunks: $.fileObj.chunks.length
|
||||
};
|
||||
// Mix in custom data
|
||||
var customQuery = $.getOpt('query');
|
||||
if(typeof customQuery == 'function') customQuery = customQuery($.fileObj, $);
|
||||
$h.each(customQuery, function(k,v){
|
||||
query[k] = v;
|
||||
});
|
||||
|
||||
var func = ($.fileObj.file.slice ? 'slice' : ($.fileObj.file.mozSlice ? 'mozSlice' : ($.fileObj.file.webkitSlice ? 'webkitSlice' : 'slice'))),
|
||||
bytes = $.fileObj.file[func]($.startByte,$.endByte),
|
||||
data = null,
|
||||
target = $.getOpt('target');
|
||||
|
||||
if ($.getOpt('method') === 'octet') {
|
||||
// Add data from the query options
|
||||
data = bytes;
|
||||
var params = [];
|
||||
$h.each(query, function(k,v){
|
||||
params.push([encodeURIComponent(k), encodeURIComponent(v)].join('='));
|
||||
});
|
||||
target = $h.getTarget(params);
|
||||
} else {
|
||||
// Add data from the query options
|
||||
data = new FormData();
|
||||
$h.each(query, function(k,v){
|
||||
data.append(k,v);
|
||||
});
|
||||
data.append($.getOpt('fileParameterName'), bytes);
|
||||
}
|
||||
|
||||
$.xhr.open('POST', target);
|
||||
$.xhr.timeout = $.getOpt('xhrTimeout');
|
||||
$.xhr.withCredentials = $.getOpt('withCredentials');
|
||||
// Add data from header options
|
||||
$h.each($.getOpt('headers'), function(k,v) {
|
||||
$.xhr.setRequestHeader(k, v);
|
||||
});
|
||||
$.xhr.send(data);
|
||||
};
|
||||
$.abort = function(){
|
||||
// Abort and reset
|
||||
if($.xhr) $.xhr.abort();
|
||||
$.xhr = null;
|
||||
};
|
||||
$.status = function(){
|
||||
// Returns: 'pending', 'uploading', 'success', 'error'
|
||||
if($.pendingRetry) {
|
||||
// if pending retry then that's effectively the same as actively uploading,
|
||||
// there might just be a slight delay before the retry starts
|
||||
return('uploading')
|
||||
} else if(!$.xhr) {
|
||||
return('pending');
|
||||
} else if($.xhr.readyState<4) {
|
||||
// Status is really 'OPENED', 'HEADERS_RECEIVED' or 'LOADING' - meaning that stuff is happening
|
||||
return('uploading');
|
||||
} else {
|
||||
if($.xhr.status==200) {
|
||||
// HTTP 200, perfect
|
||||
return('success');
|
||||
} else if($h.contains($.getOpt('permanentErrors'), $.xhr.status) || $.retries >= $.getOpt('maxChunkRetries')) {
|
||||
// HTTP 415/500/501, permanent error
|
||||
return('error');
|
||||
} else {
|
||||
// this should never happen, but we'll reset and queue a retry
|
||||
// a likely case for this would be 503 service unavailable
|
||||
$.abort();
|
||||
return('pending');
|
||||
}
|
||||
}
|
||||
};
|
||||
$.message = function(){
|
||||
return($.xhr ? $.xhr.responseText : '');
|
||||
};
|
||||
$.progress = function(relative){
|
||||
if(typeof(relative)==='undefined') relative = false;
|
||||
var factor = (relative ? ($.endByte-$.startByte)/$.fileObjSize : 1);
|
||||
if($.pendingRetry) return(0);
|
||||
var s = $.status();
|
||||
switch(s){
|
||||
case 'success':
|
||||
case 'error':
|
||||
return(1*factor);
|
||||
case 'pending':
|
||||
return(0*factor);
|
||||
default:
|
||||
return($.loaded/($.endByte-$.startByte)*factor);
|
||||
}
|
||||
};
|
||||
return(this);
|
||||
}
|
||||
|
||||
// QUEUE
|
||||
$.uploadNextChunk = function(){
|
||||
var found = false;
|
||||
|
||||
// In some cases (such as videos) it's really handy to upload the first
|
||||
// and last chunk of a file quickly; this let's the server check the file's
|
||||
// metadata and determine if there's even a point in continuing.
|
||||
if ($.getOpt('prioritizeFirstAndLastChunk')) {
|
||||
$h.each($.files, function(file){
|
||||
if(file.chunks.length && file.chunks[0].status()=='pending' && file.chunks[0].preprocessState === 0) {
|
||||
file.chunks[0].send();
|
||||
found = true;
|
||||
return(false);
|
||||
}
|
||||
if(file.chunks.length>1 && file.chunks[file.chunks.length-1].status()=='pending' && file.chunks[0].preprocessState === 0) {
|
||||
file.chunks[file.chunks.length-1].send();
|
||||
found = true;
|
||||
return(false);
|
||||
}
|
||||
});
|
||||
if(found) return(true);
|
||||
}
|
||||
|
||||
// Now, simply look for the next, best thing to upload
|
||||
$h.each($.files, function(file){
|
||||
if(file.isPaused()===false){
|
||||
$h.each(file.chunks, function(chunk){
|
||||
if(chunk.status()=='pending' && chunk.preprocessState === 0) {
|
||||
chunk.send();
|
||||
found = true;
|
||||
return(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
if(found) return(false);
|
||||
});
|
||||
if(found) return(true);
|
||||
|
||||
// The are no more outstanding chunks to upload, check is everything is done
|
||||
var outstanding = false;
|
||||
$h.each($.files, function(file){
|
||||
if(!file.isComplete()) {
|
||||
outstanding = true;
|
||||
return(false);
|
||||
}
|
||||
});
|
||||
if(!outstanding) {
|
||||
// All chunks have been uploaded, complete
|
||||
$.fire('complete');
|
||||
}
|
||||
return(false);
|
||||
};
|
||||
|
||||
|
||||
// PUBLIC METHODS FOR RESUMABLE.JS
|
||||
$.assignBrowse = function(domNodes, isDirectory){
|
||||
if(typeof(domNodes.length)=='undefined') domNodes = [domNodes];
|
||||
|
||||
$h.each(domNodes, function(domNode) {
|
||||
var input;
|
||||
if(domNode.tagName==='INPUT' && domNode.type==='file'){
|
||||
input = domNode;
|
||||
} else {
|
||||
input = document.createElement('input');
|
||||
input.setAttribute('type', 'file');
|
||||
input.style.display = 'none';
|
||||
domNode.addEventListener('click', function(){
|
||||
input.click();
|
||||
}, false);
|
||||
domNode.appendChild(input);
|
||||
}
|
||||
var maxFiles = $.getOpt('maxFiles');
|
||||
if (typeof(maxFiles)==='undefined'||maxFiles!=1){
|
||||
input.setAttribute('multiple', 'multiple');
|
||||
} else {
|
||||
input.removeAttribute('multiple');
|
||||
}
|
||||
if(isDirectory){
|
||||
input.setAttribute('webkitdirectory', 'webkitdirectory');
|
||||
} else {
|
||||
input.removeAttribute('webkitdirectory');
|
||||
}
|
||||
// When new files are added, simply append them to the overall list
|
||||
input.addEventListener('change', function(e){
|
||||
appendFilesFromFileList(e.target.files);
|
||||
e.target.value = '';
|
||||
}, false);
|
||||
});
|
||||
};
|
||||
$.assignDrop = function(domNodes){
|
||||
if(typeof(domNodes.length)=='undefined') domNodes = [domNodes];
|
||||
|
||||
$h.each(domNodes, function(domNode) {
|
||||
domNode.addEventListener('dragover', onDragOver, false);
|
||||
domNode.addEventListener('drop', onDrop, false);
|
||||
});
|
||||
};
|
||||
$.unAssignDrop = function(domNodes) {
|
||||
if (typeof(domNodes.length) == 'undefined') domNodes = [domNodes];
|
||||
|
||||
$h.each(domNodes, function(domNode) {
|
||||
domNode.removeEventListener('dragover', onDragOver);
|
||||
domNode.removeEventListener('drop', onDrop);
|
||||
});
|
||||
};
|
||||
$.isUploading = function(){
|
||||
var uploading = false;
|
||||
$h.each($.files, function(file){
|
||||
if (file.isUploading()) {
|
||||
uploading = true;
|
||||
return(false);
|
||||
}
|
||||
});
|
||||
return(uploading);
|
||||
};
|
||||
$.upload = function(){
|
||||
// Make sure we don't start too many uploads at once
|
||||
if($.isUploading()) return;
|
||||
// Kick off the queue
|
||||
$.fire('uploadStart');
|
||||
for (var num=1; num<=$.getOpt('simultaneousUploads'); num++) {
|
||||
$.uploadNextChunk();
|
||||
}
|
||||
};
|
||||
$.pause = function(){
|
||||
// Resume all chunks currently being uploaded
|
||||
$h.each($.files, function(file){
|
||||
file.abort();
|
||||
});
|
||||
$.fire('pause');
|
||||
};
|
||||
$.cancel = function(){
|
||||
for(var i = $.files.length - 1; i >= 0; i--) {
|
||||
$.files[i].cancel();
|
||||
}
|
||||
$.fire('cancel');
|
||||
};
|
||||
$.progress = function(){
|
||||
var totalDone = 0;
|
||||
var totalSize = 0;
|
||||
// Resume all chunks currently being uploaded
|
||||
$h.each($.files, function(file){
|
||||
totalDone += file.progress()*file.size;
|
||||
totalSize += file.size;
|
||||
});
|
||||
return(totalSize>0 ? totalDone/totalSize : 0);
|
||||
};
|
||||
$.addFile = function(file){
|
||||
appendFilesFromFileList([file]);
|
||||
};
|
||||
$.removeFile = function(file){
|
||||
for(var i = $.files.length - 1; i >= 0; i--) {
|
||||
if($.files[i] === file) {
|
||||
$.files.splice(i, 1);
|
||||
}
|
||||
}
|
||||
};
|
||||
$.getFromUniqueIdentifier = function(uniqueIdentifier){
|
||||
var ret = false;
|
||||
$h.each($.files, function(f){
|
||||
if(f.uniqueIdentifier==uniqueIdentifier) ret = f;
|
||||
});
|
||||
return(ret);
|
||||
};
|
||||
$.getSize = function(){
|
||||
var totalSize = 0;
|
||||
$h.each($.files, function(file){
|
||||
totalSize += file.size;
|
||||
});
|
||||
return(totalSize);
|
||||
};
|
||||
|
||||
return(this);
|
||||
};
|
||||
|
||||
|
||||
// Node.js-style export for Node and Component
|
||||
if (typeof module != 'undefined') {
|
||||
module.exports = Resumable;
|
||||
} else if (typeof define === "function" && define.amd) {
|
||||
// AMD/requirejs: Define the module
|
||||
define(function(){
|
||||
return Resumable;
|
||||
});
|
||||
} else {
|
||||
// Browser: Expose to window
|
||||
window.Resumable = Resumable;
|
||||
}
|
||||
|
||||
})();
|
Loading…
Reference in a new issue