Map bounds weren't always being calculated

* Adds new attribute "table_names" on DataImport, to keep track of tables
  created by every import job
* Calculate map bounds for every imported table, not just one
* Calculate map bounds for tables created via sql queries
* Don't calculate map bounds after appending data to a table
* Update specs
2.0
David Arango 12 years ago
parent a0abc941d1
commit 39d8379c58

@ -21,8 +21,6 @@ class DataImport < Sequel::Model
#after_transition :uploading => :preparing, :preparing => :importing, :importing => :cleaning do
#end
after_transition any => :complete do
table = Table[table_id]
table.map.recalculate_bounds!
self.success = true
self.logger << "SUCCESS!\n"
self.save
@ -98,6 +96,7 @@ class DataImport < Sequel::Model
elsif table_copy.present? || from_query.present?
query = table_copy ? "SELECT * FROM #{table_copy}" : from_query
new_table_name = import_from_query table_name, query
self.update :table_names => new_table_name
migrate_existing new_table_name
elsif %w(url file).include?(data_type)
@ -125,6 +124,12 @@ class DataImport < Sequel::Model
#true # FIXME: our exception handler returns true so that the after_create method doesnt rollback
end
# Recalculate map bounds on every imported table
self.table_names.to_s.split(',').each do |table_name|
table = Table.filter(:user_id => current_user.id, :name => table_name).first
table.map.recalculate_bounds!
end
end
def before_save
@ -191,6 +196,10 @@ class DataImport < Sequel::Model
def append_to_existing
imports, errors = import_to_cartodb data_type, data_source
# table_names is null, since we're just appending data to
# an existing table, not creating a new one
self.update :table_names => nil
@table = Table.filter(:user_id => current_user.id, :id => table_id).first
(imports || []).each do |import|
migrate_existing import.name, table_name

@ -0,0 +1,7 @@
Sequel.migration do
change do
alter_table :data_imports do
add_column :table_names, :text
end
end
end

@ -245,7 +245,7 @@ module CartoDB
out = loader.process!
out.each{ |d| payloads << d }
out.each { |d| payloads << d }
@data_import.log_update("#{data[:ext]} successfully loaded")
rescue => e
@data_import.reload
@ -261,6 +261,7 @@ module CartoDB
# Flag the data import as failed
if payloads.length > 0
@data_import.tables_created_count = payloads.size
@data_import.table_names = payloads.map(&:name).join(',')
@data_import.log_update("#{payloads.size} tables imported")
else
@data_import.failed

@ -110,22 +110,6 @@ describe "Imports API" do
import['state'].should be == 'complete'
end
it 'allows users to import csv files with invalid encodings' do
f = upload_file('spec/support/data/invalid_byte_seq.csv', 'text/csv')
post v1_imports_url(:host => 'test.localhost.lan',
:filename => 'invalid_byte_seq.csv',
:table_name => 'invalid_byte_seq',
:api_key => @user.get_map_key), f.read.force_encoding('UTF-8')
item_queue_id = JSON.parse(response.body)['item_queue_id']
get v1_import_url(:host => 'test.localhost.lan', :id => item_queue_id), :api_key => @user.get_map_key
response.code.should be == '200'
import = JSON.parse(response.body)
import['state'].should be == 'complete'
end
it 'allows users to append data to an existing table' do
@table = FactoryGirl.create(:table, :user_id => @user.id)

Loading…
Cancel
Save