Merge branch 'master' of github.com:CartoDB/cartodb into do-subscriptions

pull/15855/head
juanrmn 4 years ago
commit ae12ff7905

@ -35,6 +35,7 @@ Development
* Fix map backups when deleting tables via Ghost Tables [#15832](https://github.com/CartoDB/cartodb/pull/15832)
* Add DO subscriptions improvements [#15847](https://github.com/CartoDB/cartodb/pull/15847)
* Migrate ::OauthToken from Sequel to ActiveRecord [#15840](https://github.com/CartoDB/cartodb/pull/15840)
* Fix passing `::User` instead of `::Carto::User` [#15848](https://github.com/CartoDB/cartodb/pull/15848)
4.41.1 (2020-09-03)
-------------------

@ -21,7 +21,7 @@ class OauthController < ApplicationController
if params[:x_auth_mode] == 'client_auth'
if user = authenticate(params[:x_auth_username], params[:x_auth_password])
@token = user.tokens.find_by(client_application: current_client_application, invalidated_at: nil)
@token = Carto::AccessToken.create(:user => user, :client_application => current_client_application) if @token.blank?
@token = Carto::AccessToken.create(user: user.carto_user, client_application_id: current_client_application.id) if @token.blank?
if @token
render :text => @token.to_query

@ -84,6 +84,7 @@ class Carto::User < ActiveRecord::Base
alias_method :assets_dataset, :assets
alias_method :data_imports_dataset, :data_imports
alias_method :geocodings_dataset, :geocodings
def carto_user; self end
before_create :set_database_host
before_create :generate_api_key

@ -1,8 +1,15 @@
#!/usr/bin/env ruby
# bundle exec rails runner script/20200804_do_datasets_addfields_redis.rb
$users_metadata.keys('do:*:datasets').each do |k|
username = k.split(':')[1]
if ARGV.length != 1 then
puts "*** Please introduce the target username (or --all to update them all) ***"
exit
end
username = (ARGV[0] != '--all')? ARGV[0] : '*'
puts "Updating user: #{username}..."
$users_metadata.keys("do:#{username}:datasets").each do |k|
user = User.where(username: username).first
datasets = $users_metadata.hget(k, :bq)
@ -10,23 +17,40 @@ $users_metadata.keys('do:*:datasets').each do |k|
datasets_enriched = datasets.map do |dataset|
# Do not process already enriched datasets:
if !(dataset['unsyncable_reason'].present?) then
doss = Carto::DoSyncServiceFactory.get_for_user(user)
sync_data = doss.sync(dataset['dataset_id'])
dataset = dataset.merge({
status: dataset['status'] || 'active',
available_in: ['bq'],
type: sync_data[:type],
estimated_size: sync_data[:estimated_size],
estimated_row_count: sync_data[:estimated_row_count],
estimated_columns_count: sync_data[:estimated_columns_count],
num_bytes: sync_data[:num_bytes],
sync_status: sync_data[:sync_status],
unsyncable_reason: sync_data[:unsyncable_reason],
sync_table: sync_data[:sync_table],
sync_table_id: sync_data[:sync_table_id],
synchronization_id: sync_data[:synchronization_id],
})
if !(dataset['unsynced_errors'].present?) then
begin
doss = Carto::DoSyncServiceFactory.get_for_user(user)
sync_data = doss.sync(dataset['dataset_id'])
# Initial quick&dirty hack.
# Since there is no public datasets yet,
# We don't want this to check the user quota (which is the last check in db-connector)
if !sync_data[:unsyncable_reason].nil? && (sync_data[:unsyncable_reason].include? "exceeds the quota available") then
sync_data[:unsyncable_reason] = nil
sync_data[:sync_status] = 'unsynced'
end
dataset = dataset.merge({
status: dataset['status'] || 'active',
created_at: dataset[:created_at] || (Time.parse(dataset['expires_at']) - 1.year).to_s,
available_in: ['bq'],
type: sync_data[:type],
estimated_size: sync_data[:estimated_size],
estimated_row_count: sync_data[:estimated_row_count],
estimated_columns_count: sync_data[:estimated_columns_count],
num_bytes: sync_data[:num_bytes],
sync_status: sync_data[:sync_status],
unsyncable_reason: sync_data[:unsyncable_reason],
unsynced_errors: [],
sync_table: sync_data[:sync_table],
sync_table_id: sync_data[:sync_table_id],
synchronization_id: sync_data[:synchronization_id],
})
rescue Google::Apis::ClientError => e
puts "Not found in BQ: #{dataset['dataset_id']}"
# leaving it as is:
dataset
end
end
puts "Update: #{dataset['dataset_id']} for #{username}"
dataset

Loading…
Cancel
Save