Test import path fixes and support for absolute paths

pull/9830/head
Juan Ignacio Sánchez Lara 8 years ago
parent ab753cac42
commit 2cc8913525

@ -15,13 +15,15 @@ module Carto::Configuration
"#{log_files_root}/log"
end
def uploaded_file_path(relative_path)
def uploaded_file_path(path)
return path if Pathname.new(path).absolute?
upload_path = Cartodb.get_config(:importer, 'uploads_path')
if upload_path
# Ugly patch workarounding some hardcoded /uploads
"#{upload_path}#{relative_path}".gsub('/uploads/uploads/', '/uploads/')
"#{upload_path}#{path}".gsub('/uploads/uploads/', '/uploads/')
else
Rails.root.join("public#{relative_path}").to_s
Rails.root.join("public#{path}").to_s
end
end

@ -46,7 +46,7 @@ describe DataImport do
Table.any_instance.stubs(:cartodbfy).raises(CartoDB::CartoDBfyInvalidID)
data_import = DataImport.create(
user_id: @user.id,
data_source: '/../db/fake_data/clubbing.csv',
data_source: fake_data_path('clubbing.csv'),
updated_at: Time.now
)
@ -61,7 +61,7 @@ describe DataImport do
data_import = DataImport.create(
user_id: @user.id,
data_source: '/../db/fake_data/clubbing.csv',
data_source: fake_data_path('clubbing.csv'),
updated_at: Time.now
).run_import!
@ -77,7 +77,7 @@ describe DataImport do
data_import = DataImport.create(
user_id: @user.id,
data_source: '/../db/fake_data/clubbing.csv',
data_source: fake_data_path('clubbing.csv'),
updated_at: Time.now
).run_import!
@ -101,7 +101,7 @@ describe DataImport do
it 'should allow to create a table from a query' do
data_import_1 = DataImport.create(
user_id: @user.id,
data_source: '/../db/fake_data/clubbing.csv',
data_source: fake_data_path('clubbing.csv'),
updated_at: Time.now).run_import!
data_import_1.state.should be == 'complete'
@ -121,7 +121,7 @@ describe DataImport do
it 'imports a simple file' do
data_import = DataImport.create(
user_id: @user.id,
data_source: '/../db/fake_data/clubbing.csv',
data_source: fake_data_path('clubbing.csv'),
updated_at: Time.now
).run_import!
@ -134,7 +134,7 @@ describe DataImport do
it 'imports a simple file with latlon' do
data_import = DataImport.create(
user_id: @user.id,
data_source: '/../services/importer/spec/fixtures/csv_with_geojson.csv',
data_source: Rails.root.join('services/importer/spec/fixtures/csv_with_geojson.csv').to_s,
updated_at: Time.now
).run_import!
@ -176,7 +176,7 @@ describe DataImport do
it "can create a table from a query selecting only the cartodb_id" do
data_import_1 = DataImport.create(
user_id: @user.id,
data_source: '/../db/fake_data/clubbing.csv',
data_source: fake_data_path('clubbing.csv'),
updated_at: Time.now).run_import!
data_import_1.state.should be == 'complete'

@ -1080,7 +1080,7 @@ describe Table do
data_import = DataImport.create( :user_id => @user.id,
:table_name => 'rescol',
:data_source => '/../db/fake_data/reserved_columns.csv' )
:data_source => fake_data_path('reserved_columns.csv') )
data_import.run_import!
table.run_query("select name from table1 where cartodb_id = '#{pk}'")[:rows].first[:name].should == "name #1"
end
@ -1280,7 +1280,7 @@ describe Table do
it "should be able to update data in rows with column names with multiple underscores" do
data_import = DataImport.create( :user_id => @user.id,
:table_name => 'elecciones2008',
:data_source => '/../spec/support/data/elecciones2008.csv')
:data_source => Rails.root.join('spec/support/data/elecciones2008.csv').to_s)
data_import.run_import!
table = create_table(user_table: UserTable[data_import.table_id], user_id: @user.id)
@ -1298,7 +1298,7 @@ describe Table do
it "should be able to insert data in rows with column names with multiple underscores" do
data_import = DataImport.create( :user_id => @user.id,
:data_source => '/../spec/support/data/elecciones2008.csv')
:data_source => Rails.root.join('spec/support/data/elecciones2008.csv').to_s)
data_import.run_import!
table = create_table(user_table: UserTable[data_import.table_id], user_id: @user.id)
@ -1383,20 +1383,20 @@ describe Table do
context "post import processing tests" do
it "should optimize the table" do
fixture = "#{Rails.root}/db/fake_data/SHP1.zip"
fixture = fake_data_path("SHP1.zip")
Table.any_instance.expects(:optimize).once
data_import = create_import(@user, fixture)
end
it "should assign table_id" do
fixture = "#{Rails.root}/db/fake_data/SHP1.zip"
fixture = fake_data_path("SHP1.zip")
data_import = create_import(@user, fixture)
data_import.table.table_id.should_not be_nil
end
it "should add a the_geom column after importing a CSV" do
data_import = DataImport.create( :user_id => @user.id,
:data_source => '/../db/fake_data/twitters.csv' )
:data_source => fake_data_path('twitters.csv') )
data_import.run_import!
table = Table.new(user_table: UserTable[data_import.table_id])
@ -1414,7 +1414,7 @@ describe Table do
table.save.reload
table.name.should == 'empty_file'
fixture = "#{Rails.root}/db/fake_data/empty_file.csv"
fixture = fake_data_path("empty_file.csv")
data_import = create_import(@user, fixture, table.name)
@user.in_database do |user_database|
@ -1429,7 +1429,7 @@ describe Table do
table.name.should == 'empty_file'
data_import = DataImport.create( :user_id => @user.id,
:data_source => '/../db/fake_data/csv_no_quotes.csv' )
:data_source => fake_data_path('csv_no_quotes.csv') )
data_import.run_import!
table2 = Table.new(user_table: UserTable[data_import.table_id])
@ -1457,7 +1457,7 @@ describe Table do
it "should add a cartodb_id serial column as primary key when importing a
file without a column with name cartodb_id" do
fixture = "#{Rails.root}/db/fake_data/gadm4_export.csv"
fixture = fake_data_path("gadm4_export.csv")
data_import = create_import(@user, fixture)
table = data_import.table
table.should_not be_nil, "Import failure: #{data_import.log.inspect}"
@ -1498,7 +1498,7 @@ describe Table do
it "should return geometry types when guessing is enabled" do
data_import = DataImport.create( :user_id => @user.id,
:data_source => '/../db/fake_data/gadm4_export.csv',
:data_source => fake_data_path('gadm4_export.csv'),
:type_guessing => true )
data_import.run_import!
@ -1535,7 +1535,7 @@ describe Table do
end
it "should normalize strings if there is a non-convertible entry when converting string to number" do
fixture = "#{Rails.root}/db/fake_data/short_clubbing.csv"
fixture = fake_data_path("short_clubbing.csv")
data_import = create_import(@user, fixture)
table = data_import.table
@ -1549,7 +1549,7 @@ describe Table do
end
it "should normalize string if there is a non-convertible entry when converting string to boolean" do
fixture = "#{Rails.root}/db/fake_data/column_string_to_boolean.csv"
fixture = fake_data_path("column_string_to_boolean.csv")
data_import = create_import(@user, fixture)
table = data_import.table
@ -1581,7 +1581,7 @@ describe Table do
end
it "should normalize boolean if there is a non-convertible entry when converting boolean to string" do
fixture = "#{Rails.root}/db/fake_data/column_string_to_boolean.csv"
fixture = fake_data_path("column_string_to_boolean.csv")
data_import = create_import(@user, fixture)
table = data_import.table
table.modify_column! :name=>"f1", :type=>"boolean"
@ -1592,7 +1592,7 @@ describe Table do
end
it "should normalize boolean if there is a non-convertible entry when converting boolean to number" do
fixture = "#{Rails.root}/db/fake_data/column_string_to_boolean.csv"
fixture = fake_data_path("column_string_to_boolean.csv")
data_import = create_import(@user, fixture)
table = data_import.table
table.modify_column! :name=>"f1", :type=>"boolean"
@ -1604,7 +1604,7 @@ describe Table do
it "should normalize number if there is a non-convertible entry when
converting number to boolean" do
fixture = "#{Rails.root}/db/fake_data/column_number_to_boolean.csv"
fixture = fake_data_path("column_number_to_boolean.csv")
data_import = create_import(@user, fixture)
table = data_import.table
@ -1814,7 +1814,7 @@ describe Table do
context "imports" do
it "file twitters.csv" do
fixture = "#{Rails.root}/db/fake_data/twitters.csv"
fixture = fake_data_path("twitters.csv")
data_import = create_import(@user, fixture)
data_import.table.name.should match(/^twitters/)
@ -1822,7 +1822,7 @@ describe Table do
end
it "file SHP1.zip" do
fixture = "#{Rails.root}/db/fake_data/SHP1.zip"
fixture = fake_data_path("SHP1.zip")
data_import = create_import(@user, fixture)
data_import.table.name.should == "esp_adm1"

@ -1178,7 +1178,7 @@ describe User do
it "should remove its user tables, layers and data imports after deletion" do
doomed_user = create_user :email => 'doomed2@example.com', :username => 'doomed2', :password => 'doomed123'
data_import = DataImport.create(:user_id => doomed_user.id,
:data_source => '/../db/fake_data/clubbing.csv').run_import!
:data_source => fake_data_path('clubbing.csv')).run_import!
doomed_user.add_layer Layer.create(:kind => 'carto')
table_id = data_import.table_id
uuid = UserTable.where(id: table_id).first.table_visualization.id

@ -46,7 +46,7 @@ describe Carto::Api::TablesController do
it "check imported table metadata" do
data_import = DataImport.create(
user_id: @user.id,
data_source: '/../spec/support/data/TM_WORLD_BORDERS_SIMPL-0.3.zip'
data_source: Rails.root.join('spec/support/data/TM_WORLD_BORDERS_SIMPL-0.3.zip').to_s
).run_import!
get_json api_v1_tables_show_url(params.merge(id: data_import.table_id)) do |response|

@ -89,3 +89,7 @@ end
def http_json_headers
{ "CONTENT_TYPE" => "application/json", :format => "json" }
end
def fake_data_path(filename)
Rails.root.join("db/fake_data/#{filename}").to_s
end

Loading…
Cancel
Save