Fixed bug with SERIAL not primary key

Set a trigger to protect cartodb_id to be updated
1.0
Fernando Blat 14 years ago
parent 779c385108
commit 94c2bda943

@ -54,12 +54,16 @@ class Table < Sequel::Model(:user_tables)
column.gsub(/primary\s+key/i,"UNIQUE") column.gsub(/primary\s+key/i,"UNIQUE")
end end
end end
sanitized_force_schema.unshift("cartodb_id SERIAL") if import_from_file.blank? # If import_from_file is blank primary key is added now.
# If not we add it after importing the CSV file, becaus the number of columns
# will not match
sanitized_force_schema.unshift("cartodb_id SERIAL PRIMARY KEY") if import_from_file.blank?
user_database.run("CREATE TABLE #{self.name} (#{sanitized_force_schema.join(', ')})") user_database.run("CREATE TABLE #{self.name} (#{sanitized_force_schema.join(', ')})")
end end
end end
end end
import_data! unless import_from_file.nil? import_data! unless import_from_file.nil?
set_triggers
end end
super super
end end
@ -345,4 +349,25 @@ class Table < Sequel::Model(:user_tables)
self.force_schema = result.join(', ') self.force_schema = result.join(', ')
end end
def set_triggers
owner.in_database(:as => :superuser) do |user_database|
user_database.run(<<-TRIGGER
-- Sets trigger to protect primary key cartodb_id to be updated
DROP TRIGGER IF EXISTS protect_data_trigger ON #{self.name};
CREATE OR REPLACE FUNCTION protect_data() RETURNS TRIGGER AS $protect_data_trigger$
BEGIN
NEW.cartodb_id := OLD.cartodb_id;
RETURN NEW;
END;
$protect_data_trigger$ LANGUAGE plpgsql;
CREATE TRIGGER protect_data_trigger
BEFORE UPDATE ON #{self.name}
FOR EACH ROW EXECUTE PROCEDURE protect_data();
TRIGGER
)
end
end
end end

@ -249,6 +249,12 @@ feature "Tables JSON API" do
table.reload table.reload
row = table.to_json(:rows_per_page => 1, :page => 0)[:rows].first row = table.to_json(:rows_per_page => 1, :page => 0)[:rows].first
row[:description].should == "Description 123" row[:description].should == "Description 123"
put_json "/api/json/tables/#{table.id}/rows/#{row[:cartodb_id]}", {:cartodb_id => "666"}
response.status.should == 200
table.reload
row = table.to_json(:rows_per_page => 1, :page => 0)[:rows].first
row[:cartodb_id].should == 1
end end
scenario "Update the value from a ceil with an invalid value" do scenario "Update the value from a ceil with an invalid value" do

@ -138,4 +138,21 @@ describe User do
api_key.api_key.should == key api_key.api_key.should == key
end end
it "should not be able to update primary key value" do
user = create_user
table = new_table
table.user_id = user.id
table.name = 'antantaric species'
table.import_from_file = Rack::Test::UploadedFile.new("#{Rails.root}/db/fake_data/import_csv_1.csv", "text/csv")
table.save
query_result = user.run_query("select * from antantaric_species order by cartodb_id asc limit 1")
query_result[:rows][0][:cartodb_id].should == 1
user.run_query("update antantaric_species set cartodb_id = 666 where cartodb_id = 1")
query_result = user.run_query("select * from antantaric_species order by cartodb_id asc limit 1")
query_result[:rows][0][:cartodb_id].should == 1
end
end end

Loading…
Cancel
Save