remove old import/export code

This commit is contained in:
Régis Hanol 2014-02-12 20:29:35 -08:00
parent 90c00fcaba
commit 8344f0d8fd
18 changed files with 0 additions and 1852 deletions

View file

@ -1,10 +0,0 @@
class Admin::ExportController < Admin::AdminController
def create
unless Export.is_export_running? || Import.is_import_running?
job_id = Jobs.enqueue( :exporter, user_id: current_user.id )
render json: success_json.merge( job_id: job_id )
else
render json: failed_json.merge( message: I18n.t('operation_already_running', { operation: Export.is_export_running? ? 'export' : 'import' }))
end
end
end

View file

@ -1,128 +0,0 @@
require_dependency 'export/json_encoder'
require_dependency 'export/export'
require_dependency 'import/import'
module Jobs
class Exporter < Jobs::Base
sidekiq_options retry: false
def execute(args)
raise Import::ImportInProgressError if Import::is_import_running?
raise Export::ExportInProgressError if Export::is_export_running?
@format = args[:format] || :json
@output_base_filename = File.absolute_path( args[:filename] || File.join( Rails.root, 'tmp', "export-#{Time.now.strftime('%Y-%m-%d-%H%M%S')}" ) )
@output_base_filename = @output_base_filename[0...-3] if @output_base_filename[-3..-1] == '.gz'
@output_base_filename = @output_base_filename[0...-4] if @output_base_filename[-4..-1] == '.tar'
@user = args[:user_id] ? User.where(id: args[:user_id].to_i).first : nil
start_export
@encoder.write_schema_info( source: 'discourse', version: Export.current_schema_version )
ordered_models_for_export.each do |model|
log " #{model.table_name}"
column_info = model.columns
column_names = model.column_names
results = model.connection.raw_connection.async_exec("select * from #{model.table_name}").to_enum
@encoder.write_table(model.table_name, column_info) do |num_rows_written|
log("#{num_rows_written} rows written") if num_rows_written > 0
rows = []
begin
while rows.count < batch_size
row = results.next
rows << column_names.map{|col| row[col]}
end
rescue StopIteration
# we are done
end
rows
end
end
"#{@output_base_filename}.tar.gz"
ensure
finish_export
end
def ordered_models_for_export
Export.models_included_in_export
end
def order_columns_for(model)
@order_columns_for_hash ||= {
'CategoryFeaturedTopic' => 'category_id, topic_id',
'CategorySearchData' => 'category_id',
'PostOneboxRender' => 'post_id, onebox_render_id',
'PostReply' => 'post_id, reply_id',
'PostSearchData' => 'post_id',
'PostTiming' => 'topic_id, post_number, user_id',
'SiteContent' => 'content_type',
'TopicUser' => 'topic_id, user_id',
'UserSearchData' => 'user_id',
'UserStat' => 'user_id',
'View' => 'parent_id, parent_type, ip_address, viewed_at'
}
@order_columns_for_hash[model.name]
end
def batch_size
1000
end
def start_export
if @format == :json
@encoder = Export::JsonEncoder.new
else
raise Export::FormatInvalidError
end
Export.set_export_started
Discourse.enable_maintenance_mode
end
def finish_export
if @encoder
@encoder.finish
create_tar_file
@encoder.remove_tmp_directory('export')
end
ensure
Export.set_export_is_not_running
Discourse.disable_maintenance_mode
send_notification
end
def create_tar_file
filenames = @encoder.filenames
tar_filename = "#{@output_base_filename}.tar"
upload_directory = "uploads/" + RailsMultisite::ConnectionManagement.current_db
FileUtils.cd(File.join(Rails.root, 'public')) do
`tar cvf #{tar_filename} #{upload_directory}`
end
filenames.each do |filename|
FileUtils.cd(File.dirname(filename)) do
`tar --append --file=#{tar_filename} #{File.basename(filename)}`
end
end
`gzip #{tar_filename}`
true
end
def send_notification
SystemMessage.new(@user).create('export_succeeded') if @user
true
end
end
end

View file

@ -1,295 +0,0 @@
require_dependency 'import/json_decoder'
require_dependency 'import/import'
require_dependency 'import/adapter/base'
require_dependency 'directory_helper'
(Dir.entries(File.join( Rails.root, 'lib', 'import', 'adapter' )) - ['.', '..', 'base.rb']).each do |f|
require_dependency "import/adapter/#{f}"
end
module Jobs
class Importer < Jobs::Base
include DirectoryHelper
sidekiq_options retry: false
BACKUP_SCHEMA = 'backup'
def initialize
@index_definitions = {}
@format = :json
@warnings = []
end
def execute(args)
ordered_models_for_import.each { |model| model.primary_key } # a HACK to workaround cache problems
raise Import::ImportDisabledError unless SiteSetting.allow_import?
raise Import::ImportInProgressError if Import::is_import_running?
raise Export::ExportInProgressError if Export::is_export_running?
# Disable printing of NOTICE, DETAIL and other unimportant messages from postgresql
User.exec_sql("SET client_min_messages TO WARNING")
@format = args[:format] || :json
@archive_filename = args[:filename]
if args[:user_id]
# After the import is done, we'll need to reload the user record and make sure it's the same person
# before sending a notification
user = User.where(id: args[:user_id].to_i).first
@user_info = { user_id: user.id, email: user.email }
else
@user_info = nil
end
start_import
backup_tables
begin
load_data
create_indexes
extract_uploads
rescue
log "Performing a ROLLBACK because something went wrong!"
rollback
raise
end
ensure
finish_import
end
def ordered_models_for_import
Export.models_included_in_export
end
def start_import
if @format != :json
raise Import::FormatInvalidError
elsif @archive_filename.nil?
raise Import::FilenameMissingError
else
extract_files
@decoder = Import::JsonDecoder.new( Dir[File.join(tmp_directory('import'), '*.json')] )
Import.set_import_started
Discourse.enable_maintenance_mode
end
self
end
def extract_files
FileUtils.cd( tmp_directory('import') ) do
`tar xvzf #{@archive_filename}`
end
end
def backup_tables
log " Backing up tables"
ActiveRecord::Base.transaction do
create_backup_schema
ordered_models_for_import.each do |model|
backup_and_setup_table( model )
end
end
self
end
def create_backup_schema
User.exec_sql("DROP SCHEMA IF EXISTS #{BACKUP_SCHEMA} CASCADE")
User.exec_sql("CREATE SCHEMA #{BACKUP_SCHEMA}")
self
end
def backup_and_setup_table( model )
log " #{model.table_name}"
@index_definitions[model.table_name] = model.exec_sql("SELECT indexdef FROM pg_indexes WHERE tablename = '#{model.table_name}' and schemaname = 'public';").map { |x| x['indexdef'] }
model.exec_sql("ALTER TABLE #{model.table_name} SET SCHEMA #{BACKUP_SCHEMA}")
model.exec_sql("CREATE TABLE #{model.table_name} (LIKE #{BACKUP_SCHEMA}.#{model.table_name} INCLUDING DEFAULTS INCLUDING CONSTRAINTS INCLUDING COMMENTS INCLUDING STORAGE);")
self
end
def load_data
log " Importing data"
@decoder.start(
callbacks: {
schema_info: method(:set_schema_info),
table_data: method(:load_table)
}
)
self
end
def batch_size
1000
end
def set_schema_info(arg)
if arg[:source] && arg[:source].downcase == 'discourse'
if arg[:version] && arg[:version] <= Export.current_schema_version
@export_schema_version = arg[:version]
if arg[:table_count] == ordered_models_for_import.size
true
else
raise Import::WrongTableCountError.new("Expected to find #{ordered_models_for_import.size} tables, but export file has #{arg[:table_count]} tables!")
end
elsif arg[:version].nil?
raise ArgumentError.new("The schema version must be provided.")
else
raise Import::UnsupportedSchemaVersion.new("Export file is from a newer version of Discourse. Upgrade and run migrations to import this file.")
end
else
raise Import::UnsupportedExportSource
end
end
def load_table(table_name, fields_arg, row_data, row_count)
fields = fields_arg.dup
model = Export::models_included_in_export.find { |m| m.table_name == table_name }
if model
@adapters ||= Import.adapters_for_version( @export_schema_version )
log " #{table_name}: #{row_count} rows"
if @adapters[table_name]
@adapters[table_name].each do |adapter|
fields = adapter.apply_to_column_names(table_name, fields)
end
end
if fields.size > model.columns.size
raise Import::WrongFieldCountError.new("Table #{table_name} is expected to have #{model.columns.size} fields, but got #{fields.size}! Maybe your Discourse server is older than the server that this export file comes from?")
end
# If there are fewer fields in the data than the model has, then insert only those fields and
# hope that the table uses default values or allows null for the missing columns.
# If the table doesn't have defaults or is not nullable, then a migration adapter should have been created
# along with the migration.
column_info = model.columns
col_num = -1
rows = row_data.map do |row|
if @adapters[table_name]
@adapters[table_name].each do |adapter|
row = adapter.apply_to_row(table_name, row)
end
end
row
end.transpose.map do |col_values|
col_num += 1
case column_info[col_num].type
when :boolean
col_values.map { |v| v.nil? ? nil : (v == 'f' ? false : true) }
else
col_values
end
end.transpose
parameter_markers = fields.map {|x| "?"}.join(',')
sql_stmt = "INSERT INTO #{table_name} (#{fields.join(',')}) VALUES (#{parameter_markers})"
in_tran = false
begin
unless Rails.env.test?
User.exec_sql("BEGIN TRANSACTION")
in_tran = true
end
i = 0
rows.each do |row|
if i % batch_size == 0 && i > 0
log "#{i} rows done"
end
User.exec_sql(sql_stmt, *row)
i += 1
end
User.exec_sql("COMMIT") if in_tran
rescue
User.exec_sql("ROLLBACK") if in_tran
raise
end
true
else
add_warning "Export file contained an unrecognized table named: #{table_name}! It was ignored."
end
end
def create_indexes
log " Creating indexes"
ordered_models_for_import.each do |model|
log " #{model.table_name}"
@index_definitions[model.table_name].each do |indexdef|
model.exec_sql( indexdef )
end
# The indexdef statements don't create the primary keys, so we need to find the primary key and do it ourselves.
pkey_index_def = @index_definitions[model.table_name].find { |ixdef| ixdef =~ / ([\S]{1,}_pkey) / }
if pkey_index_def && pkey_index_name = / ([\S]{1,}_pkey) /.match(pkey_index_def)[1]
model.exec_sql( "ALTER TABLE ONLY #{model.table_name} ADD PRIMARY KEY USING INDEX #{pkey_index_name}" )
end
if model.columns.map(&:name).include?('id')
max_id = model.exec_sql("SELECT MAX(id) AS max FROM #{model.table_name}")[0]['max'].to_i + 1
seq_name = "#{model.table_name}_id_seq"
model.exec_sql("CREATE SEQUENCE #{seq_name} START WITH #{max_id} INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
model.exec_sql("ALTER SEQUENCE #{seq_name} OWNED BY #{model.table_name}.id")
model.exec_sql("ALTER TABLE #{model.table_name} ALTER COLUMN id SET DEFAULT nextval('#{seq_name}')")
end
end
self
end
def extract_uploads
if `tar tf #{@archive_filename} | grep "uploads/"`.present?
FileUtils.cd( File.join(Rails.root, 'public') ) do
`tar -xz --keep-newer-files -f #{@archive_filename} uploads/`
end
end
end
def rollback
ordered_models_for_import.each do |model|
log " #{model.table_name}"
model.exec_sql("DROP TABLE IF EXISTS #{model.table_name}") rescue nil
begin
model.exec_sql("ALTER TABLE #{BACKUP_SCHEMA}.#{model.table_name} SET SCHEMA public")
rescue => e
log " Failed to restore. #{e.class.name}: #{e.message}"
end
end
end
def finish_import
Import.set_import_is_not_running
Discourse.disable_maintenance_mode
remove_tmp_directory('import')
if @warnings.size > 0
log "WARNINGS:"
@warnings.each do |message|
log " #{message}"
end
end
# send_notification
end
def send_notification
# Doesn't work. "WARNING: Can't mass-assign protected attributes: created_at"
# Still a problem with the activerecord schema_cache I think.
# if @user_info && @user_info[:user_id]
# user = User.where(id: @user_info[:user_id]).first
# if user && user.email == @user_info[:email]
# SystemMessage.new(user).create('import_succeeded')
# end
# end
true
end
def add_warning(message)
@warnings << message
end
end
end

View file

@ -1,35 +0,0 @@
module Export
class UnsupportedExportSource < RuntimeError; end
class FormatInvalidError < RuntimeError; end
class FilenameMissingError < RuntimeError; end
class ExportInProgressError < RuntimeError; end
def self.current_schema_version
ActiveRecord::Migrator.current_version.to_s
end
def self.models_included_in_export
@models_included_in_export ||= begin
Rails.application.eager_load! # So that all models get loaded now
ActiveRecord::Base.descendants - [ActiveRecord::SchemaMigration]
end
end
def self.export_running_key
'exporter_is_running'
end
def self.is_export_running?
$redis.get(export_running_key) == '1'
end
def self.set_export_started
$redis.set export_running_key, '1'
end
def self.set_export_is_not_running
$redis.del export_running_key
end
end

View file

@ -1,70 +0,0 @@
require_dependency 'directory_helper'
module Export
class SchemaArgumentsError < RuntimeError; end
class JsonEncoder
attr_accessor :stream_creator
include DirectoryHelper
def initialize(stream_creator = nil)
@stream_creator = stream_creator
@stream_creator ||= lambda do |filename|
File.new(filename, 'w+b' )
end
@schema_data = {
schema: {}
}
@table_info = {}
end
def write_json(name, data)
filename = File.join( tmp_directory('export'), "#{name}.json")
filenames << filename
stream = stream_creator.call(filename)
Oj.to_stream(stream, data, :mode => :compat)
stream.close
end
def write_schema_info(args)
raise SchemaArgumentsError unless args[:source].present? && args[:version].present?
@schema_data[:schema][:source] = args[:source]
@schema_data[:schema][:version] = args[:version]
end
def write_table(table_name, columns)
rows ||= []
while true
current_rows = yield(rows.count)
break unless current_rows && current_rows.size > 0
rows.concat current_rows
end
# TODO still way too big a chunk, needs to be split up
write_json(table_name, rows)
@table_info[table_name] ||= {
fields: columns.map(&:name),
row_count: rows.size
}
end
def finish
@schema_data[:schema][:table_count] = @table_info.keys.count
write_json("schema", @schema_data.merge(@table_info))
end
def filenames
@filenames ||= []
end
end
end

View file

@ -1,31 +0,0 @@
module Import
module Adapter
class Base
def self.register(opts={})
Import.add_import_adapter self, opts[:version], opts[:tables]
@table_names = opts[:tables]
end
def apply_to_column_names(table_name, column_names)
up_column_names(table_name, column_names)
end
def apply_to_row(table_name, row)
up_row(table_name, row)
end
# Implement the following methods in subclasses:
def up_column_names(table_name, column_names)
column_names
end
def up_row(table_name, row)
row
end
end
end
end

View file

@ -1,28 +0,0 @@
module Import
module Adapter
class MergeMuteOptionsOnTopicUsers < Base
register version: '20130115012140', tables: [:topic_users]
def up_column_names(table_name, column_names)
# rename_column :topic_users, :notifications, :notification_level
# remove_column :topic_users, :muted_at
if table_name.to_sym == :topic_users
column_names.map {|col| col == 'notifications' ? 'notification_level' : col}.reject {|col| col == 'muted_at'}
else
column_names
end
end
def up_row(table_name, row)
# remove_column :topic_users, :muted_at
if table_name.to_sym == :topic_users
row[0..6] + row[8..-1]
else
row
end
end
end
end
end

View file

@ -1,27 +0,0 @@
module Import
module Adapter
class RemoveSubTagFromTopics < Base
register version: '20130116151829', tables: [:topics]
def up_column_names(table_name, column_names)
# remove_column :topics, :sub_tag
if table_name.to_sym == :topics
column_names.reject {|col| col == 'sub_tag'}
else
column_names
end
end
def up_row(table_name, row)
# remove_column :topics, :sub_tag
if table_name.to_sym == :topics
row[0..29] + row[31..-1]
else
row
end
end
end
end
end

View file

@ -1,58 +0,0 @@
require_dependency 'import/adapter/base'
module Import
class UnsupportedExportSource < RuntimeError; end
class FormatInvalidError < RuntimeError; end
class FilenameMissingError < RuntimeError; end
class ImportInProgressError < RuntimeError; end
class ImportDisabledError < RuntimeError; end
class UnsupportedSchemaVersion < RuntimeError; end
class WrongTableCountError < RuntimeError; end
class WrongFieldCountError < RuntimeError; end
def self.import_running_key
'importer_is_running'
end
def self.is_import_running?
$redis.get(import_running_key) == '1'
end
def self.set_import_started
$redis.set import_running_key, '1'
end
def self.set_import_is_not_running
$redis.del import_running_key
end
def self.backup_tables_count
User.exec_sql("select count(*) as count from information_schema.tables where table_schema = '#{Jobs::Importer::BACKUP_SCHEMA}'")[0]['count'].to_i
end
def self.clear_adapters
@adapters = {}
@adapter_instances = {}
end
def self.add_import_adapter(klass, version, tables)
@adapters ||= {}
@adapter_instances ||= {}
unless @adapter_instances[klass]
@adapter_instances[klass] = klass.new
tables.each do |table|
@adapters[table.to_s] ||= []
@adapters[table.to_s] << [version, @adapter_instances[klass]]
end
end
end
def self.adapters_for_version(version)
a = Hash.new([])
@adapters.each {|table_name,adapters| a[table_name] = adapters.reject {|i| i[0].to_i <= version.to_i}.map {|j| j[1]} } if defined?(@adapters)
a
end
end

View file

@ -1,43 +0,0 @@
module Import
class JsonDecoder
def initialize(filenames, loader = nil)
@filemap = Hash[*
filenames.map do |filename|
[File.basename(filename, '.*'), filename]
end.flatten
]
@loader = loader || lambda{|filename| Oj.load_file(filename)}
end
def load_schema
@loader.call(@filemap['schema'])
end
def each_table
@filemap.each do |name, filename|
next if name == 'schema'
yield name, @loader.call(filename)
end
end
def input_stream
@input_stream ||= begin
end
end
def start( opts )
schema = load_schema
opts[:callbacks][:schema_info].call( source: schema['schema']['source'], version: schema['schema']['version'], table_count: schema.keys.size - 1)
each_table do |name, data|
info = schema[name]
opts[:callbacks][:table_data].call( name, info['fields'], data, info['row_count'] )
end
end
end
end

View file

@ -1,37 +0,0 @@
require 'spec_helper'
require 'export/export'
describe Export do
describe '#current_schema_version' do
it "should return the latest migration version" do
Export.current_schema_version.should == User.exec_sql("select max(version) as max from schema_migrations")[0]["max"]
end
end
describe "models_included_in_export" do
it "should include the user model" do
Export.models_included_in_export.map(&:name).should include('User')
end
it "should not include the message bus model" do
Export.models_included_in_export.map(&:name).should_not include('MessageBus')
end
end
describe "is_export_running?" do
it "should return true when an export is in progress" do
$redis.stubs(:get).with(Export.export_running_key).returns('1')
Export.is_export_running?.should be_true
end
it "should return false when an export is not happening" do
$redis.stubs(:get).with(Export.export_running_key).returns('0')
Export.is_export_running?.should be_false
end
it "should return false when an export has never been run" do
$redis.stubs(:get).with(Export.export_running_key).returns(nil)
Export.is_export_running?.should be_false
end
end
end

View file

@ -1,143 +0,0 @@
require 'spec_helper'
require 'export/json_encoder'
describe Export::JsonEncoder do
describe "exported data" do
before do
@streams = {}
@encoder = Export::JsonEncoder.new(lambda{ |filename|
@streams[File.basename(filename, ".*")] = StringIO.new
})
end
let :schema do
JSON.parse(@streams['schema'].string)
end
describe "write_schema_info" do
it "should write a schema section when given valid arguments" do
version = '20121216230719'
@encoder.write_schema_info( source: 'discourse', version: version )
@encoder.finish
schema.should have_key('schema')
schema['schema']['source'].should == 'discourse'
schema['schema']['version'].should == version
end
it "should raise an exception when its arguments are invalid" do
expect {
@encoder.write_schema_info({})
}.to raise_error(Export::SchemaArgumentsError)
end
end
describe "write_table" do
let(:table_name) { Topic.table_name }
let(:columns) { Topic.columns }
before do
@encoder.write_schema_info( source: 'discourse', version: '111' )
end
it "should yield a row count of 0 to the caller on the first iteration" do
yield_count = 0
@encoder.write_table(table_name, columns) do |row_count|
row_count.should == 0
yield_count += 1
break
end
yield_count.should == 1
end
it "should yield the number of rows I sent the first time on the second iteration" do
yield_count = 0
@encoder.write_table(table_name, columns) do |row_count|
yield_count += 1
if yield_count == 1
[[1, 'Hello'], [2, 'Yeah'], [3, 'Great']]
elsif yield_count == 2
row_count.should == 3
break
end
end
yield_count.should == 2
end
it "should stop yielding when it gets an empty array" do
yield_count = 0
@encoder.write_table(table_name, columns) do |row_count|
yield_count += 1
break if yield_count > 1
[]
end
yield_count.should == 1
end
it "should stop yielding when it gets nil" do
yield_count = 0
@encoder.write_table(table_name, columns) do |row_count|
yield_count += 1
break if yield_count > 1
nil
end
yield_count.should == 1
end
end
describe "exported data" do
before do
@encoder.write_schema_info( source: 'discourse', version: '20121216230719' )
end
it "should have a table count of 0 when no tables were exported" do
@encoder.finish
schema['schema']['table_count'].should == 0
end
it "should have a table count of 1 when one table was exported" do
@encoder.write_table(Topic.table_name, Topic.columns) { |row_count| [] }
@encoder.finish
schema['schema']['table_count'].should == 1
end
it "should have a table count of 3 when three tables were exported" do
@encoder.write_table(Topic.table_name, Topic.columns) { |row_count| [] }
@encoder.write_table(User.table_name, User.columns) { |row_count| [] }
@encoder.write_table(Post.table_name, Post.columns) { |row_count| [] }
@encoder.finish
schema['schema']['table_count'].should == 3
end
it "should have a row count of 0 when no rows were exported" do
@encoder.write_table(Notification.table_name, Notification.columns) { |row_count| [] }
@encoder.finish
schema[Notification.table_name]['row_count'].should == 0
end
it "should have a row count of 1 when one row was exported" do
@encoder.write_table(Notification.table_name, Notification.columns) do |row_count|
if row_count == 0
[['1409', '5', '1227', '', 't', '2012-12-07 19:59:56.691592', '2012-12-07 19:59:56.691592', '303', '16', '420']]
else
[]
end
end
@encoder.finish
schema[Notification.table_name]['row_count'].should == 1
end
it "should have a row count of 2 when two rows were exported" do
@encoder.write_table(Notification.table_name, Notification.columns) do |row_count|
if row_count == 0
[['1409', '5', '1227', '', 't', '2012-12-07 19:59:56.691592', '2012-12-07 19:59:56.691592', '303', '16', '420'],
['1408', '4', '1188', '', 'f', '2012-12-07 18:40:30.460404', '2012-12-07 18:40:30.460404', '304', '1', '421']]
else
[]
end
end
@encoder.finish
schema[Notification.table_name]['row_count'].should == 2
end
end
end
end

View file

@ -1,24 +0,0 @@
require 'spec_helper'
require 'import/adapter/base'
describe Import::Adapter::Base do
describe 'the base implementation' do
let(:adapter) { Import::Adapter::Base.new }
describe 'apply_to_column_names' do
it 'should return the column names passed in' do
cols = ['first', 'second']
adapter.apply_to_column_names('table_name', cols).should == cols
end
end
describe 'apply_to_row' do
it 'should return the row passed in' do
row = [1,2,3,4]
adapter.apply_to_row('table_name', row).should == row
end
end
end
end

View file

@ -1,66 +0,0 @@
require 'spec_helper'
require 'import/import'
class AdapterX < Import::Adapter::Base; end
class Adapter1 < Import::Adapter::Base; end
class Adapter2 < Import::Adapter::Base; end
class Adapter3 < Import::Adapter::Base; end
describe Import do
describe "is_import_running?" do
it "should return true when an import is in progress" do
$redis.stubs(:get).with(Import.import_running_key).returns('1')
Import.is_import_running?.should be_true
end
it "should return false when an import is not happening" do
$redis.stubs(:get).with(Import.import_running_key).returns('0')
Import.is_import_running?.should be_false
end
it "should return false when an import has never been run" do
$redis.stubs(:get).with(Import.import_running_key).returns(nil)
Import.is_import_running?.should be_false
end
end
describe 'add_import_adapter' do
it "should return true" do
Import.clear_adapters
Import.add_import_adapter(AdapterX, '20130110121212', ['users']).should be_true
end
end
describe 'adapters_for_version' do
it "should return an empty Hash when there are no adapters" do
Import.clear_adapters
Import.adapters_for_version('1').should == {}
end
context 'when there are some adapters' do
before do
Import.clear_adapters
Import.add_import_adapter(Adapter1, '10', ['users'])
Import.add_import_adapter(Adapter2, '20', ['users'])
Import.add_import_adapter(Adapter3, '30', ['users'])
end
it "should return no adapters when the version is newer than all adapters" do
Import.adapters_for_version('31')['users'].should have(0).adapters
end
it "should return adapters that are newer than the given version" do
Import.adapters_for_version('12')['users'].should have(2).adapters
Import.adapters_for_version('22')['users'].should have(1).adapters
end
it "should return the adapters in order" do
adapters = Import.adapters_for_version('1')['users']
adapters[0].should be_a(Adapter1)
adapters[1].should be_a(Adapter2)
adapters[2].should be_a(Adapter3)
end
end
end
end

View file

@ -1,76 +0,0 @@
require 'spec_helper'
require 'import/json_decoder'
describe Import::JsonDecoder do
describe "start" do
context "given valid arguments" do
before do
@version = '20121201205642'
@schema = {
"schema" => { 'source' => 'discourse', 'version' => @version},
"categories" => {
fields: Category.columns.map(&:name),
row_count: 2
},
"notifications" => {
fields: Notification.columns.map(&:name),
row_count: 2
}
}
@categories = [
["3", "entertainment", "AB9364", "155", nil, nil, nil, nil, "19", "2012-07-12 18:55:56.355932", "2012-07-12 18:55:56.355932", "1186", "17", "0", "0", "entertainment"],
["4", "question", "AB9364", "164", nil, nil, nil, nil, "1", "2012-07-12 18:55:56.355932", "2012-07-12 18:55:56.355932", "1186", "1", "0", "0", "question"]
]
@notifications = [
["1416", "2", "1214", "{\"topic_title\":\"UI: Where did the 'Create a Topic' button go?\",\"display_username\":\"Lowell Heddings\"}", "t", "2012-12-09 18:05:09.862898", "2012-12-09 18:05:09.862898", "394", "2", nil],
["1415", "2", "1187", "{\"topic_title\":\"Jenkins Config.xml\",\"display_username\":\"Sam\"}", "t", "2012-12-08 10:11:17.599724", "2012-12-08 10:11:17.599724", "392", "3", nil]
]
@decoder = Import::JsonDecoder.new(['xyz/schema.json', 'xyz/categories.json', 'xyz/notifications.json'], lambda{|filename|
case filename
when 'xyz/schema.json'
@schema
when 'xyz/categories.json'
@categories
when 'xyz/notifications.json'
@notifications
end
})
@valid_args = { callbacks: { schema_info: stub_everything, table_data: stub_everything } }
end
it "should call the schema_info callback before sending table data" do
callback_sequence = sequence('callbacks')
@valid_args[:callbacks][:schema_info].expects(:call).in_sequence(callback_sequence)
@valid_args[:callbacks][:table_data].expects(:call).in_sequence(callback_sequence).at_least_once
@decoder.start( @valid_args )
end
it "should call the schema_info callback with source and version parameters when export data is from discourse" do
@valid_args[:callbacks][:schema_info].expects(:call).with do |arg|
arg["source"].should == @schema["source"]
arg["version"].should == @schema["version"]
end
@decoder.start( @valid_args )
end
it "should call the table_data callback at least once for each table in the export file" do
@valid_args[:callbacks][:table_data].expects(:call).with('categories',
@schema['categories']['fields'],
anything, anything
).at_least_once
@valid_args[:callbacks][:table_data].expects(:call).with('notifications',
@schema['notifications']['fields'], anything, anything).at_least_once
@decoder.start( @valid_args )
end
end
end
end

View file

@ -1,57 +0,0 @@
require 'spec_helper'
describe Admin::ExportController do
it "is a subclass of AdminController" do
(Admin::ExportController < Admin::AdminController).should be_true
end
context 'while logged in as an admin' do
before do
@user = log_in(:admin)
end
describe "create" do
it "should start an export job" do
Jobs::Exporter.any_instance.expects(:execute).returns(true)
xhr :post, :create
end
it "should return a job id" do
job_id = 'abc123'
Jobs.stubs(:enqueue).returns( job_id )
xhr :post, :create
json = JSON.parse(response.body)
json.should have_key('job_id')
json['job_id'].should == job_id
end
shared_examples_for "when export should not be started" do
it "should return an error" do
xhr :post, :create
json = JSON.parse(response.body)
json['failed'].should_not be_nil
json['message'].should_not be_nil
end
it "should not start an export job" do
Jobs::Exporter.any_instance.expects(:start_export).never
xhr :post, :create
end
end
context "when an export is already running" do
before do
Export.stubs(:is_export_running?).returns( true )
end
it_should_behave_like "when export should not be started"
end
context "when an import is currently running" do
before do
Import.stubs(:is_import_running?).returns( true )
end
it_should_behave_like "when export should not be started"
end
end
end
end

View file

@ -1,186 +0,0 @@
require 'spec_helper'
require_dependency 'jobs/base'
describe Jobs::Exporter do
before do
Jobs::Exporter.any_instance.stubs(:log).returns(true)
Jobs::Exporter.any_instance.stubs(:create_tar_file).returns(true)
Export::JsonEncoder.any_instance.stubs(:tmp_directory).returns( File.join(Rails.root, 'tmp', 'exporter_spec') )
Discourse.stubs(:enable_maintenance_mode).returns(true)
Discourse.stubs(:disable_maintenance_mode).returns(true)
end
describe "execute" do
context 'when no export or import is running' do
before do
@streams = {}
Export::JsonEncoder.any_instance.stubs(:stream_creator).returns(lambda {|filename|
@streams[File.basename(filename, '.*')] = StringIO.new
})
Jobs::Exporter.any_instance.stubs(:ordered_models_for_export).returns([])
Export.stubs(:is_export_running?).returns(false)
Export.stubs(:is_import_running?).returns(false)
@exporter_args = {}
end
it "should indicate that an export is running" do
seq = sequence('call sequence')
Export.expects(:set_export_started).in_sequence(seq).at_least_once
Export.expects(:set_export_is_not_running).in_sequence(seq).at_least_once
Jobs::Exporter.new.execute( @exporter_args )
end
it "should put the site in maintenance mode when it starts" do
encoder = stub_everything
Export::JsonEncoder.stubs(:new).returns(encoder)
seq = sequence('export-sequence')
Discourse.expects(:enable_maintenance_mode).in_sequence(seq).at_least_once
encoder.expects(:write_schema_info).in_sequence(seq).at_least_once
Jobs::Exporter.new.execute( @exporter_args )
end
it "should take the site out of maintenance mode when it ends" do
encoder = stub_everything
Export::JsonEncoder.stubs(:new).returns(encoder)
seq = sequence('export-sequence')
encoder.expects(:write_schema_info).in_sequence(seq).at_least_once
Discourse.expects(:disable_maintenance_mode).in_sequence(seq).at_least_once
Jobs::Exporter.new.execute( @exporter_args )
end
describe "without specifying a format" do
it "should use json as the default" do
Export::JsonEncoder.expects(:new).returns( stub_everything )
Jobs::Exporter.new.execute( @exporter_args.reject { |key, val| key == :format } )
end
end
describe "specifying an invalid format" do
it "should raise an exception and not flag that an export has started" do
Jobs::Exporter.expects(:set_export_started).never
expect {
Jobs::Exporter.new.execute( @exporter_args.merge( format: :interpretive_dance ) )
}.to raise_error(Export::FormatInvalidError)
end
end
context "using json format" do
before do
@exporter_args = {format: :json}
end
it "should export metadata" do
version = '201212121212'
encoder = stub_everything
encoder.expects(:write_schema_info).with do |arg|
arg[:source].should == 'discourse'
arg[:version].should == version
end
Export::JsonEncoder.stubs(:new).returns(encoder)
Export.stubs(:current_schema_version).returns(version)
Jobs::Exporter.new.execute( @exporter_args )
end
describe "exporting tables" do
before do
# Create some real database records
@user1, @user2 = Fabricate(:user), Fabricate(:user)
@topic1 = Fabricate(:topic, user: @user1)
@topic2 = Fabricate(:topic, user: @user2)
@topic3 = Fabricate(:topic, user: @user1)
@post1 = Fabricate(:post, topic: @topic1, user: @user1)
@post1 = Fabricate(:post, topic: @topic3, user: @user1)
@reply1 = Fabricate(:basic_reply, user: @user2, topic: @topic3)
@reply1.save_reply_relationships
@reply2 = Fabricate(:basic_reply, user: @user1, topic: @topic1)
@reply2.save_reply_relationships
@reply3 = Fabricate(:basic_reply, user: @user1, topic: @topic3)
@reply3.save_reply_relationships
end
it "should export all rows from the topics table in ascending id order" do
Jobs::Exporter.any_instance.stubs(:ordered_models_for_export).returns([Topic])
Jobs::Exporter.new.execute( @exporter_args )
topics = JSON.parse( @streams['topics'].string )
topics.should have(3).rows
topics.map{|row| row[0].to_i}.sort.should == [@topic1.id, @topic2.id, @topic3.id].sort
end
it "should export all rows from the post_replies table in ascending order by post_id, reply_id" do
# because post_replies doesn't have an id column, so order by one of its indexes
Jobs::Exporter.any_instance.stubs(:ordered_models_for_export).returns([PostReply])
Jobs::Exporter.new.execute( @exporter_args )
post_replies = JSON.parse( @streams['post_replies'].string )
post_replies.map{|row| row[1].to_i}.sort.should == [@reply1.id, @reply2.id, @reply3.id].sort
end
it "should export column names for each table" do
Jobs::Exporter.any_instance.stubs(:ordered_models_for_export).returns([Topic, TopicUser, PostReply])
Jobs::Exporter.new.execute( @exporter_args )
json = JSON.parse( @streams['schema'].string )
json['topics'].should have_key('fields')
json['topic_users'].should have_key('fields')
json['post_replies'].should have_key('fields')
json['topics']['fields'].should == Topic.columns.map(&:name)
json['topic_users']['fields'].should == TopicUser.columns.map(&:name)
json['post_replies']['fields'].should == PostReply.columns.map(&:name)
end
end
end
context "when it finishes successfully" do
context "and no user was given" do
it "should not send a notification to anyone" do
expect {
Jobs::Exporter.new.execute( @exporter_args )
}.to_not change { Notification.count }
end
end
context "and a user was given" do
before do
@user = Fabricate(:user)
@admin = Fabricate(:admin)
end
it "should send a notification to the user who started the export" do
ActiveRecord::Base.observers.enable :all
expect {
Jobs::Exporter.new.execute( @exporter_args.merge( user_id: @user.id ) )
}.to change { Notification.count }.by(1)
end
end
end
end
context 'when an export is already running' do
before do
Export.expects(:is_export_running?).returns(true)
end
it "should not start an export and raise an exception" do
Export.expects(:set_export_started).never
Jobs::Exporter.any_instance.expects(:start_export).never
expect {
Jobs::Exporter.new.execute({})
}.to raise_error(Export::ExportInProgressError)
end
end
context 'when an import is running' do
before do
Import.expects(:is_import_running?).returns(true)
end
it "should not start an export and raise an exception" do
Export.expects(:set_export_started).never
Jobs::Exporter.any_instance.expects(:start_export).never
expect {
Jobs::Exporter.new.execute({})
}.to raise_error(Import::ImportInProgressError)
end
end
end
end

View file

@ -1,538 +0,0 @@
require 'spec_helper'
require_dependency 'jobs/base'
describe Jobs::Importer do
def stub_schema_changes
Jobs::Importer.any_instance.stubs(:create_backup_schema).returns( true )
Jobs::Importer.any_instance.stubs(:backup_and_setup_table).returns( true )
end
def stub_data_loading
Jobs::Importer.any_instance.stubs(:set_schema_info).returns( true )
Jobs::Importer.any_instance.stubs(:load_table).returns( true )
Jobs::Importer.any_instance.stubs(:create_indexes).returns( true )
end
before do
Discourse.stubs(:enable_maintenance_mode).returns(true)
Discourse.stubs(:disable_maintenance_mode).returns(true)
Jobs::Importer.any_instance.stubs(:log).returns(true)
Jobs::Importer.any_instance.stubs(:extract_uploads).returns(true)
Jobs::Importer.any_instance.stubs(:extract_files).returns(true)
Jobs::Importer.any_instance.stubs(:tmp_directory).returns( File.join(Rails.root, 'tmp', 'importer_spec') )
@importer_args = { filename: 'importer_spec.json.gz' }
end
context "SiteSetting to enable imports" do
it "should exist" do
SiteSetting.all_settings.detect {|s| s[:setting] == :allow_import }.should be_present
end
it "should default to false" do
SiteSetting.allow_import?.should be_false
end
end
context 'when import is disabled' do
before do
stub_schema_changes
stub_data_loading
Import::JsonDecoder.stubs(:new).returns( stub_everything )
SiteSetting.stubs(:allow_import).returns(false)
end
describe "execute" do
it "should raise an error" do
expect {
Jobs::Importer.new.execute( @importer_args )
}.to raise_error(Import::ImportDisabledError)
end
it "should not start an import" do
Import::JsonDecoder.expects(:new).never
Jobs::Importer.any_instance.expects(:backup_tables).never
Discourse.expects(:enable_maintenance_mode).never
Jobs::Importer.new.execute( @importer_args ) rescue nil
end
end
end
context 'when import is enabled' do
before do
SiteSetting.stubs(:allow_import).returns(true)
end
describe "execute" do
before do
stub_data_loading
end
shared_examples_for "when import should not be started" do
it "should not start an import" do
Import::JsonDecoder.expects(:new).never
Jobs::Importer.any_instance.expects(:backup_tables).never
Jobs::Importer.new.execute( @invalid_args ) rescue nil
end
it "should not put the site in maintenance mode" do
Discourse.expects(:enable_maintenance_mode).never
Jobs::Importer.new.execute( @invalid_args ) rescue nil
end
end
context "when an import is already running" do
before do
Import::JsonDecoder.stubs(:new).returns( stub_everything )
Import.stubs(:is_import_running?).returns( true )
end
it "should raise an error" do
expect {
Jobs::Importer.new.execute( @importer_args )
}.to raise_error(Import::ImportInProgressError)
end
it_should_behave_like "when import should not be started"
end
context "when an export is running" do
before do
Export.stubs(:is_export_running?).returns( true )
end
it "should raise an error" do
expect {
Jobs::Importer.new.execute( @importer_args )
}.to raise_error(Export::ExportInProgressError)
end
it_should_behave_like "when import should not be started"
end
context "when no export or import are running" do
before do
Import.stubs(:is_import_running?).returns( false )
Export.stubs(:is_export_running?).returns( false )
end
it "without specifying a format should use json as the default format" do
stub_schema_changes
Import::JsonDecoder.expects(:new).returns( stub_everything )
Jobs::Importer.new.execute( @importer_args.reject { |key, val| key == :format } )
end
it "when specifying json as the format it should use json" do
stub_schema_changes
Import::JsonDecoder.expects(:new).returns( stub_everything )
Jobs::Importer.new.execute( @importer_args.merge(format: :json) )
end
context "when specifying an invalid format" do
before do
stub_schema_changes
@invalid_args = @importer_args.merge( format: :smoke_signals )
end
it "should raise an error" do
expect {
Jobs::Importer.new.execute( @invalid_args )
}.to raise_error(Import::FormatInvalidError)
end
it_should_behave_like "when import should not be started"
end
context "when filename is not given" do
before do
stub_schema_changes
@invalid_args = @importer_args.reject { |k,v| k == :filename }
end
it "should raise an error" do
expect {
Jobs::Importer.new.execute( @invalid_args )
}.to raise_error(Import::FilenameMissingError)
end
it_should_behave_like "when import should not be started"
end
context "before loading data into tables" do
before do
Import::JsonDecoder.stubs(:new).returns( stub_everything )
stub_data_loading
end
shared_examples_for "a successful call to execute" do
it "should make a backup of the users table" do
Jobs::Importer.any_instance.stubs(:ordered_models_for_import).returns([User])
Jobs::Importer.new.execute(@importer_args)
User.exec_sql_row_count("SELECT table_name FROM information_schema.tables WHERE table_schema = 'backup' AND table_name = 'users'").should == 1
end
# Neil, please have a look here
it "should have a users table that's empty" do
@user1 = Fabricate(:user)
Jobs::Importer.any_instance.stubs(:ordered_models_for_import).returns([User])
Jobs::Importer.new.execute(@importer_args)
User.count.should == 0 # empty table (data loading is stubbed for this test)
end
it "should indicate that an import is running" do
seq = sequence('call sequence')
Import.expects(:set_import_started).in_sequence(seq).at_least_once
Import.expects(:set_import_is_not_running).in_sequence(seq).at_least_once
Jobs::Importer.new.execute(@importer_args)
end
it "should put the site in maintenance mode" do
seq = sequence('call sequence')
Import.is_import_running?.should be_false
Discourse.expects(:enable_maintenance_mode).in_sequence(seq).at_least_once
Jobs::Importer.any_instance.expects(:backup_tables).in_sequence(seq).at_least_once
Jobs::Importer.any_instance.expects(:load_data).in_sequence(seq).at_least_once
Jobs::Importer.new.execute( @importer_args )
end
it "should take the site out of maintenance mode when it's done" do
seq = sequence('call sequence')
Jobs::Importer.any_instance.expects(:backup_tables).in_sequence(seq).at_least_once
Jobs::Importer.any_instance.expects(:load_data).in_sequence(seq).at_least_once
Discourse.expects(:disable_maintenance_mode).in_sequence(seq).at_least_once
Jobs::Importer.new.execute( @importer_args )
end
end
context "the first time an import is run" do
it_should_behave_like "a successful call to execute"
end
context "the second time an import is run" do
before do
Jobs::Importer.new.execute(@importer_args)
end
it_should_behave_like "a successful call to execute"
end
end
#
# Import notifications don't work from the rake task. Why is activerecord inserting an "id" value of NULL?
#
# PG::Error: ERROR: null value in column "id" violates not-null constraint
# : INSERT INTO "topic_allowed_users" ("created_at", "id", "topic_id", "updated_at", "user_id") VALUES ($1, $2, $3, $4, $5) RETURNING "id"
#
# context "when it finishes successfully" do
# before do
# stub_schema_changes
# Import::JsonDecoder.stubs(:new).returns( stub_everything )
# end
# context "and no user was given" do
# it "should not send a notification to anyone" do
# expect {
# Jobs::Importer.new.execute( @importer_args )
# }.to_not change { Notification.count }
# end
# end
# context "and a user was given" do
# before do
# @user = Fabricate(:user)
# @admin = Fabricate(:admin)
# end
# it "should send a notification to the user who started the import" do
# expect {
# Jobs::Importer.new.execute( @importer_args.merge( user_id: @user.id ) )
# }.to change { Notification.count }.by(1)
# end
# end
# end
end
end
describe "set_schema_info" do
context "when source is Discourse" do
before do
@current_version = '20121216230719'
Export.stubs(:current_schema_version).returns(@current_version)
@valid_args = { source: 'discourse', version: @current_version, table_count: Export.models_included_in_export.size }
end
it "succeeds when receiving the current schema version" do
Jobs::Importer.new.set_schema_info( @valid_args ).should be_true
end
it "succeeds when receiving an older schema version" do
Jobs::Importer.new.set_schema_info( @valid_args.merge( version: "#{@current_version.to_i - 1}") ).should be_true
end
it "raises an error if version is not given" do
expect {
Jobs::Importer.new.set_schema_info( @valid_args.reject {|key, val| key == :version} )
}.to raise_error(ArgumentError)
end
it "raises an error when receiving a newer schema version" do
expect {
Jobs::Importer.new.set_schema_info( @valid_args.merge( version: "#{@current_version.to_i + 1}") )
}.to raise_error(Import::UnsupportedSchemaVersion)
end
it "raises an error when it doesn't get the number of tables it expects" do
expect {
Jobs::Importer.new.set_schema_info( @valid_args.merge( table_count: 2 ) )
}.to raise_error(Import::WrongTableCountError)
end
end
it "raises an error when it receives an unsupported source" do
expect {
Jobs::Importer.new.set_schema_info( source: 'digg' )
}.to raise_error(Import::UnsupportedExportSource)
end
end
describe "load_table" do
before do
stub_schema_changes
@valid_field_list = ["id", "notification_type", "user_id", "data", "read", "created_at", "updated_at", "topic_id", "post_number", "post_action_id"]
@valid_notifications_row_data = [
['1409', '5', '1227', '', 't', '2012-12-07 19:59:56.691592', '2012-12-07 19:59:56.691592', '303', '16', '420'],
['1408', '4', '1188', '', 'f', '2012-12-07 18:40:30.460404', '2012-12-07 18:40:30.460404', '304', '1', '421']
]
end
context "when export data is at the current scheam version" do
before do
Import.stubs(:adapters_for_version).returns({})
end
context "with good data" do
it "should add rows to the notifcations table given valid row data" do
Jobs::Importer.new.load_table('notifications', @valid_field_list, @valid_notifications_row_data, @valid_notifications_row_data.size)
Notification.count.should == @valid_notifications_row_data.length
end
it "should successfully load rows with double quote literals in the values" do
@valid_notifications_row_data[0][3] = "{\"topic_title\":\"Errors, errbit and you!\",\"display_username\":\"Coding Horror\"}"
Jobs::Importer.new.load_table('notifications', @valid_field_list, @valid_notifications_row_data, @valid_notifications_row_data.size)
Notification.count.should == @valid_notifications_row_data.length
end
it "should successfully load rows with single quote literals in the values" do
@valid_notifications_row_data[0][3] = "{\"topic_title\":\"Bacon's Delicious, Am I Right\",\"display_username\":\"Celine Dion\"}"
Jobs::Importer.new.load_table('notifications', @valid_field_list, @valid_notifications_row_data, @valid_notifications_row_data.size)
Notification.count.should == @valid_notifications_row_data.length
end
it "should succesfully load rows with null values" do
@valid_notifications_row_data[0][7] = nil
@valid_notifications_row_data[1][9] = nil
Jobs::Importer.new.load_table('notifications', @valid_field_list, @valid_notifications_row_data, @valid_notifications_row_data.size)
Notification.count.should == @valid_notifications_row_data.length
end
it "should successfully load rows with question marks in the values" do
@valid_notifications_row_data[0][3] = "{\"topic_title\":\"Who took my sandwich?\",\"display_username\":\"Lunchless\"}"
Jobs::Importer.new.load_table('notifications', @valid_field_list, @valid_notifications_row_data, @valid_notifications_row_data.size)
Notification.count.should == @valid_notifications_row_data.length
end
end
context "with fewer than the expected number of fields for a table" do
before do
@short_field_list = ["id", "notification_type", "user_id", "data", "read", "created_at", "updated_at", "topic_id", "post_number"]
@short_notifications_row_data = [
['1409', '5', '1227', '', 't', '2012-12-07 19:59:56.691592', '2012-12-07 19:59:56.691592', '303', '16'],
['1408', '4', '1188', '', 'f', '2012-12-07 18:40:30.460404', '2012-12-07 18:40:30.460404', '304', '1']
]
end
it "should not raise an error" do
expect {
Jobs::Importer.new.load_table('notifications', @short_field_list, @short_notifications_row_data, @short_notifications_row_data.size)
}.to_not raise_error
end
end
context "with more than the expected number of fields for a table" do
before do
@too_long_field_list = ["id", "notification_type", "user_id", "data", "read", "created_at", "updated_at", "topic_id", "post_number", "post_action_id", "extra_col"]
@too_long_notifications_row_data = [
['1409', '5', '1227', '', 't', '2012-12-07 19:59:56.691592', '2012-12-07 19:59:56.691592', '303', '16', '420', 'extra'],
['1408', '4', '1188', '', 'f', '2012-12-07 18:40:30.460404', '2012-12-07 18:40:30.460404', '304', '1', '421', 'extra']
]
end
it "should raise an error" do
expect {
Jobs::Importer.new.load_table('notifications', @too_long_field_list, @too_long_notifications_row_data, @too_long_notifications_row_data.size)
}.to raise_error(Import::WrongFieldCountError)
end
end
context "with an unrecognized table name" do
it "should not raise an error" do
expect {
Jobs::Importer.new.load_table('pork_chops', @valid_field_list, @valid_notifications_row_data, @valid_notifications_row_data.size)
}.to_not raise_error
end
it "should report a warning" do
Jobs::Importer.any_instance.expects(:add_warning).once
Jobs::Importer.new.load_table('pork_chops', @valid_field_list, @valid_notifications_row_data, @valid_notifications_row_data.size)
end
end
end
context "when import adapters are needed" do
before do
@version = (Export.current_schema_version.to_i - 1).to_s
Export.stubs(:current_schema_version).returns( @version )
end
it "should apply the adapter" do
@adapter = mock('adapter', apply_to_column_names: @valid_field_list, apply_to_row: @valid_notifications_row_data[0])
Import.expects(:adapters_for_version).at_least_once.returns({'notifications' => [@adapter]})
Jobs::Importer.new.load_table('notifications', @valid_field_list, @valid_notifications_row_data[0,1], 1)
end
end
end
describe "create_indexes" do
before do
Import::JsonDecoder.stubs(:new).returns( stub_everything )
Jobs::Importer.any_instance.stubs(:set_schema_info).returns( true )
Jobs::Importer.any_instance.stubs(:load_table).returns( true )
end
it "should create the same indexes on the new tables" do
Jobs::Importer.any_instance.stubs(:ordered_models_for_import).returns([Topic])
expect {
Jobs::Importer.new.execute( @importer_args )
}.to_not change{ Topic.exec_sql("SELECT indexname FROM pg_indexes WHERE tablename = 'topics' and schemaname = 'public';").map {|x| x['indexname']}.sort }
end
it "should create primary keys" do
Jobs::Importer.any_instance.stubs(:ordered_models_for_import).returns([User])
Jobs::Importer.new.execute( @importer_args )
User.connection.primary_key('users').should_not be_nil
end
end
describe "rollback" do
it "should not get called if format parameter is invalid" do
stub_data_loading
Jobs::Importer.any_instance.stubs(:start_import).raises(Import::FormatInvalidError)
Jobs::Importer.any_instance.expects(:rollback).never
Jobs::Importer.new.execute( @importer_args ) rescue nil
end
context "when creating the backup schema fails" do
it "should not call rollback" do
stub_data_loading
Jobs::Importer.any_instance.stubs(:create_backup_schema).raises(RuntimeError)
Jobs::Importer.any_instance.expects(:rollback).never
Jobs::Importer.new.execute( @importer_args ) rescue nil
end
end
shared_examples_for "a case when rollback is needed" do
before do
Jobs::Importer.any_instance.stubs(:ordered_models_for_import).returns([User])
@user1, @user2 = Fabricate(:user), Fabricate(:user)
@user_row1 = User.connection.select_rows("select * from users order by id DESC limit 1")
@user_row1[0] = '11111' # change the id
@export_data = {
schema: { source: 'discourse', version: '20121201205642'},
users: {
fields: User.columns.map(&:name),
rows: [ *@user_row1 ]
}
}
@testIO = StringIO.new(@export_data.to_json, 'r')
Import::JsonDecoder.any_instance.stubs(:input_stream).returns(@testIO)
end
it "should call rollback" do
Jobs::Importer.any_instance.expects(:rollback).once
Jobs::Importer.new.execute( @importer_args ) rescue nil
end
it "should restore the data" do
expect {
Jobs::Importer.new.execute( @importer_args ) rescue nil
}.to_not change { User.count }
users = User.all
users.should include(@user1)
users.should include(@user2)
end
it "should take the site out of maintenance mode" do
Discourse.expects(:disable_maintenance_mode).at_least_once
Jobs::Importer.new.execute( @importer_args ) rescue nil
end
end
context "when backing up a table fails" do
it "should not call rollback" do # because the transaction will rollback automatically
stub_data_loading
Jobs::Importer.any_instance.stubs(:backup_and_setup_table).raises(ActiveRecord::StatementInvalid)
Jobs::Importer.any_instance.expects(:rollback).never
Jobs::Importer.new.execute( @importer_args ) rescue nil
end
end
context "when export source is invalid" do
before do
Jobs::Importer.any_instance.stubs(:set_schema_info).raises(Import::UnsupportedExportSource)
end
it_should_behave_like "a case when rollback is needed"
end
context "when schema version is not supported" do
before do
Jobs::Importer.any_instance.stubs(:set_schema_info).raises(Import::UnsupportedSchemaVersion)
end
it_should_behave_like "a case when rollback is needed"
end
context "when schema info in export file is invalid for some other reason" do
before do
Jobs::Importer.any_instance.stubs(:set_schema_info).raises(ArgumentError)
end
it_should_behave_like "a case when rollback is needed"
end
context "when loading a table fails" do
before do
Jobs::Importer.any_instance.stubs(:load_table).raises(ActiveRecord::StatementInvalid)
end
it_should_behave_like "a case when rollback is needed"
end
context "when creating indexes fails" do
before do
Jobs::Importer.any_instance.stubs(:create_indexes).raises(ActiveRecord::StatementInvalid)
end
it_should_behave_like "a case when rollback is needed"
end
context "when table count is wrong" do
before do
Jobs::Importer.any_instance.stubs(:set_schema_info).raises(Import::WrongTableCountError)
end
it_should_behave_like "a case when rollback is needed"
end
context "when field count for a table is wrong" do
before do
Jobs::Importer.any_instance.stubs(:load_table).raises(Import::WrongFieldCountError)
end
it_should_behave_like "a case when rollback is needed"
end
end
end
end