2015-12-06 22:01:46 +08:00
|
|
|
import os
|
|
|
|
import requests
|
|
|
|
import json
|
2016-02-03 22:46:05 +08:00
|
|
|
import sys
|
2015-12-06 22:01:46 +08:00
|
|
|
|
|
|
|
import time
|
|
|
|
|
2015-12-09 20:24:16 +08:00
|
|
|
|
2015-12-06 22:01:46 +08:00
|
|
|
class ImportHelper:
|
|
|
|
|
|
|
|
@classmethod
|
2016-11-07 17:33:15 +08:00
|
|
|
def import_test_dataset(cls, username, api_key, host, schema):
|
2015-12-09 19:11:50 +08:00
|
|
|
requests.packages.urllib3.disable_warnings()
|
2016-11-07 17:33:15 +08:00
|
|
|
url = "{0}://{1}.{2}/api/v1/imports/"\
|
|
|
|
"?type_guessing=false&privacy=public&api_key={3}".format(
|
|
|
|
schema, username, host, api_key)
|
2015-12-06 22:01:46 +08:00
|
|
|
dataset = {
|
|
|
|
'file': open('fixtures/geocoder_api_test_dataset.csv', 'rb')}
|
|
|
|
response = requests.post(url, files=dataset)
|
2016-02-03 22:46:05 +08:00
|
|
|
response.raise_for_status()
|
2015-12-06 22:01:46 +08:00
|
|
|
response_json = json.loads(response.text)
|
|
|
|
if not response_json['success']:
|
|
|
|
print "Error importing the test dataset: {0}".format(response.text)
|
|
|
|
sys.exit(1)
|
|
|
|
while(True):
|
|
|
|
table_name = ImportHelper.get_imported_table_name(
|
|
|
|
username,
|
|
|
|
host,
|
|
|
|
api_key,
|
2016-11-07 17:33:15 +08:00
|
|
|
response_json['item_queue_id'],
|
|
|
|
schema
|
2015-12-06 22:01:46 +08:00
|
|
|
)
|
|
|
|
if table_name:
|
|
|
|
return table_name
|
|
|
|
else:
|
|
|
|
time.sleep(5)
|
|
|
|
|
|
|
|
@classmethod
|
2016-11-07 17:33:15 +08:00
|
|
|
def get_imported_table_name(cls, username, host, api_key, import_id, schema):
|
2015-12-09 19:11:50 +08:00
|
|
|
requests.packages.urllib3.disable_warnings()
|
2016-11-07 17:33:15 +08:00
|
|
|
import_url = "{0}://{1}.{2}/api/v1/imports/{3}?api_key={4}".format(
|
|
|
|
schema, username, host, import_id, api_key)
|
2015-12-06 22:01:46 +08:00
|
|
|
import_data_response = requests.get(import_url)
|
|
|
|
if import_data_response.status_code != 200:
|
|
|
|
print "Error getting the table name from " \
|
2016-02-03 22:46:05 +08:00
|
|
|
"the import data: {0}".format(import_data_response.text)
|
2015-12-06 22:01:46 +08:00
|
|
|
sys.exit(1)
|
|
|
|
import_data_json = json.loads(import_data_response.text)
|
|
|
|
|
|
|
|
return import_data_json['table_name']
|
2015-12-09 19:11:50 +08:00
|
|
|
|
|
|
|
@classmethod
|
2016-11-07 17:33:15 +08:00
|
|
|
def clean_test_dataset(cls, username, api_key, table_name, host, schema):
|
2015-12-09 19:11:50 +08:00
|
|
|
requests.packages.urllib3.disable_warnings()
|
2016-11-07 17:33:15 +08:00
|
|
|
url = "{0}://{1}.{2}/api/v2/sql?q=drop table {3}&api_key={4}".format(
|
|
|
|
schema, username, host, table_name, api_key
|
2015-12-09 19:11:50 +08:00
|
|
|
)
|
|
|
|
response = requests.get(url)
|
|
|
|
if response.status_code != 200:
|
|
|
|
print "Error cleaning the test dataset: {0}".format(response.text)
|
|
|
|
sys.exit(1)
|