fix null id when syncing from arcgis

pull/14082/head
Alberto Romeu 6 years ago
parent ca61041558
commit 23bbffc786

@ -40,7 +40,8 @@ module CartoDB
index_statements = @table_setup.generate_index_statements(user.database_schema, table_name)
move_to_schema(result)
geo_type = fix_the_geom_type!(user.database_schema, result.table_name)
import_cleanup(user.database_schema, result.table_name)
# import_cleanup(user.database_schema, result.table_name)
enforce_valid_cartodb_id(table_name)
@table_setup.cartodbfy(result.table_name)
@table_setup.copy_privileges(user.database_schema, table_name, user.database_schema, result.table_name)
overwrite(table_name, result)
@ -95,6 +96,11 @@ module CartoDB
raise exception
end
def enforce_valid_cartodb_id(table_name)
table = Carto::UserTable.find(user.tables.where(name: table_name).first.id).service
table.import_cleanup
end
def setup_table(table_name, geo_type)
table = Carto::UserTable.find(user.tables.where(name: table_name).first.id).service

File diff suppressed because it is too large Load Diff

@ -73,4 +73,68 @@ module FileServerHelper
filename = filepath.split('/').last
{ "Content-Disposition" => "attachment; filename=#{filename}" }
end
def stub_arcgis_response_with_file(
absolute_filepath,
absolute_metadata_filepath = File.expand_path('spec/fixtures/arcgis_metadata.json')
)
# Metadata of a layer
Typhoeus.stub(/\/arcgis\/rest\/services\/Planning\/EPI_Primary_Planning_Layers\/MapServer\/2\?f=json/) do
body = File.read(absolute_metadata_filepath)
Typhoeus::Response.new(
code: 200,
headers: { 'Content-Type' => 'application/json' },
body: body
)
end
# IDs list of a layer
Typhoeus.stub(/\/arcgis\/rest\/(.*)query\?where=/) do
json_file = JSON.parse(File.read(absolute_filepath))
Typhoeus::Response.new(
code: 200,
headers: { 'Content-Type' => 'application/json' },
body: JSON.dump(
objectIdFieldName: "OBJECTID",
objectIds: json_file['features'].map { |f| f['attributes']['OBJECTID'] }
)
)
end
Typhoeus.stub(/\/arcgis\/rest\/(.*)query$/) do |request|
response_body = File.read(absolute_filepath)
response_body = ::JSON.parse(response_body)
request_body = request.options[:body]
requested_object_id = nil
lower_match = nil
upper_match = nil
if request_body[:objectIds]
requested_object_id = request_body[:objectIds]
else
lower_match = /OBJECTID\s+>=(\d+)/ =~ request.options[:body][:where]
upper_match = /OBJECTID\s+<=(\d+)/ =~ request.options[:body][:where]
end
response_body['features'] = response_body['features'].select do |f|
object_id = f['attributes']['OBJECTID']
if requested_object_id
object_id == requested_object_id
elsif lower_match && upper_match
object_id >= lower_match[1].to_i && object_id <= upper_match[1].to_i
elsif lower_match
object_id >= lower_match[1].to_i
elsif upper_match
object_id <= upper_match[1].to_i
end
end
Typhoeus::Response.new(
code: 200,
headers: { 'Content-Type' => 'application/json' },
body: ::JSON.dump(response_body)
)
end
end
end

@ -1,5 +1,6 @@
# encoding: utf-8
require_relative '../spec_helper'
require_relative '../helpers/file_server_helper'
require_relative 'data_import_shared_examples'
describe DataImport do
@ -520,69 +521,8 @@ describe DataImport do
CartoDB::Importer2::QueryBatcher.any_instance.unstub(:execute_update)
end
def stub_arcgis_response_with_file(filename)
# Metadata of a layer
Typhoeus.stub(/\/arcgis\/rest\/services\/Planning\/EPI_Primary_Planning_Layers\/MapServer\/2\?f=json/) do
body = File.read(File.join(File.dirname(__FILE__), "../fixtures/arcgis_metadata.json"))
Typhoeus::Response.new(
code: 200,
headers: { 'Content-Type' => 'application/json' },
body: body
)
end
# IDs list of a layer
Typhoeus.stub(/\/arcgis\/rest\/(.*)query\?where=/) do
json_file = JSON.parse(File.read(File.join(File.dirname(__FILE__), filename)))
Typhoeus::Response.new(
code: 200,
headers: { 'Content-Type' => 'application/json' },
body: JSON.dump(
objectIdFieldName: "OBJECTID",
objectIds: json_file['features'].map { |f| f['attributes']['OBJECTID'] }
)
)
end
Typhoeus.stub(/\/arcgis\/rest\/(.*)query$/) do |request|
response_body = File.read(File.join(File.dirname(__FILE__), filename))
response_body = ::JSON.parse(response_body)
request_body = request.options[:body]
requested_object_id = nil
lower_match = nil
upper_match = nil
if request_body[:objectIds]
requested_object_id = request_body[:objectIds]
else
lower_match = /OBJECTID\s+>=(\d+)/ =~ request.options[:body][:where]
upper_match = /OBJECTID\s+<=(\d+)/ =~ request.options[:body][:where]
end
response_body['features'] = response_body['features'].select do |f|
object_id = f['attributes']['OBJECTID']
if requested_object_id
object_id == requested_object_id
elsif lower_match && upper_match
object_id >= lower_match[1].to_i && object_id <= upper_match[1].to_i
elsif lower_match
object_id >= lower_match[1].to_i
elsif upper_match
object_id <= upper_match[1].to_i
end
end
Typhoeus::Response.new(
code: 200,
headers: { 'Content-Type' => 'application/json' },
body: ::JSON.dump(response_body)
)
end
end
it 'should raise statement timeout error when the query batcher raise that exception' do
stub_arcgis_response_with_file('../fixtures/arcgis_response_valid.json')
stub_arcgis_response_with_file(File.expand_path('spec/fixtures/arcgis_response_valid.json'))
CartoDB::Importer2::QueryBatcher.any_instance
.stubs(:execute_update)
.raises(Sequel::DatabaseError, 'canceling statement due to statement timeout')
@ -598,7 +538,7 @@ describe DataImport do
end
it 'should raise invalid data error when the query batcher raise any other exception' do
stub_arcgis_response_with_file('../fixtures/arcgis_response_valid.json')
stub_arcgis_response_with_file(File.expand_path('spec/fixtures/arcgis_response_valid.json'))
CartoDB::Importer2::QueryBatcher.any_instance
.stubs(:execute_update)
.raises(Sequel::DatabaseError, 'GEOSisValid(): InterruptedException: Interrupted!')
@ -614,7 +554,7 @@ describe DataImport do
end
it 'should import this supposed invalid dataset for ogr2ogr 2.1.1' do
stub_arcgis_response_with_file('../fixtures/arcgis_response_invalid.json')
stub_arcgis_response_with_file(File.expand_path('spec/fixtures/arcgis_response_invalid.json'))
data_import = DataImport.create(
user_id: @user.id,
@ -626,7 +566,7 @@ describe DataImport do
end
it 'should import files with missing ogc_fid' do
stub_arcgis_response_with_file('../fixtures/arcgis_response_missing_ogc_fid.json')
stub_arcgis_response_with_file(File.expand_path('spec/fixtures/arcgis_response_missing_ogc_fid.json'))
data_import = DataImport.create(
user_id: @user.id,

@ -127,7 +127,7 @@ describe Synchronization::Member do
DataImport.create(
user_id: @user2.id,
data_source: fake_data_path('guess_country.csv'),
data_source: path,
synchronization_id: member.id,
service_name: 'public_url',
service_item_id: url,
@ -140,6 +140,39 @@ describe Synchronization::Member do
expect(member.state).to eq 'failure'
expect(member.error_code).to eq 2013
end
it 'should sync files with missing ogc_fid' do
stub_arcgis_response_with_file(
File.expand_path('spec/fixtures/arcgis_response_missing_ogc_fid.json'),
File.expand_path('spec/fixtures/arcgis_metadata_ogc_fid.json')
)
url = 'https://wtf.com/arcgis/rest/services/Planning/EPI_Primary_Planning_Layers/MapServer/2'
attrs = random_attributes(user_id: @user1.id)
.merge(service_item_id: url, url: url, name: 'land_zoning')
member = Synchronization::Member.new(attrs).store
data_import = DataImport.create(
user_id: @user1.id,
synchronization_id: member.id,
service_name: 'arcgis',
service_item_id: url,
updated_at: Time.now
)
data_import.run_import!
expect(data_import.state).to eq 'complete'
source_file = CartoDB::Importer2::SourceFile.new(
File.expand_path('spec/fixtures/arcgis_response_missing_ogc_fid.json'),
'arcgis_response_missing_ogc_fid.json'
)
CartoDB::Importer2::Downloader.any_instance.stubs(:download_and_store).returns(source_file)
CartoDB::Importer2::Downloader.any_instance.stubs(:source_file).returns(source_file)
member.run
expect(member.state).to eq 'success'
end
end
end

Loading…
Cancel
Save