pylint corrections

This commit is contained in:
Andy Eschbacher 2016-06-06 09:26:52 -04:00
parent 1f73be2752
commit d41e28bc6f
7 changed files with 37 additions and 21 deletions

View File

@ -1,2 +1,4 @@
import random_seeds
import clustering
"""Import all modules"""
import crankshaft.random_seeds
import crankshaft.clustering
import crankshaft.space_time_dynamics

View File

@ -1 +1,2 @@
from moran import *
"""Import all functions from moran clustering"""
from crankshaft.clustering.moran import *

View File

@ -1 +1,2 @@
from pysal_utils import *
"""Import all functions for pysal_utils"""
from crankshaft.pysal_utils.pysal_utils import *

View File

@ -1,5 +1,6 @@
"""
Utilities module for generic PySAL functionality, mainly centered on translating queries into numpy arrays or PySAL weights objects
Utilities module for generic PySAL functionality, mainly centered on
translating queries into numpy arrays or PySAL weights objects
"""
import numpy as np
@ -78,7 +79,8 @@ def query_attr_where(params):
{'subquery': ...,
'time_cols': ['time1', 'time2', 'time3'],
'etc': ...}
Output: 'idx_replace."time1" IS NOT NULL AND idx_replace."time2" IS NOT NULL AND idx_replace."time3" IS NOT NULL'
Output: 'idx_replace."time1" IS NOT NULL AND idx_replace."time2" IS NOT
NULL AND idx_replace."time3" IS NOT NULL'
"""
attr_string = []
template = "idx_replace.\"%s\" IS NOT NULL"

View File

@ -1,3 +1,4 @@
"""Random seed generator used for non-deterministic functions in crankshaft"""
import random
import numpy

View File

@ -1 +1,2 @@
from markov import *
"""Import all functions from clustering libraries."""
from crankshaft.space_time_dynamics.markov import *

View File

@ -8,29 +8,37 @@ import pysal as ps
import plpy
import crankshaft.pysal_utils as pu
def spatial_markov_trend(subquery, time_cols, num_classes = 7,
w_type = 'knn', num_ngbrs = 5, permutations = 0,
geom_col = 'the_geom', id_col = 'cartodb_id'):
def spatial_markov_trend(subquery, time_cols, num_classes=7,
w_type='knn', num_ngbrs=5, permutations=0,
geom_col='the_geom', id_col='cartodb_id'):
"""
Predict the trends of a unit based on:
1. history of its transitions to different classes (e.g., 1st quantile -> 2nd quantile)
2. average class of its neighbors
Inputs:
@param subquery string: e.g., SELECT the_geom, cartodb_id, interesting_time_column FROM table_name
@param subquery string: e.g., SELECT the_geom, cartodb_id,
interesting_time_column FROM table_name
@param time_cols list of strings: list of strings of column names
@param num_classes (optional): number of classes to break distribution of values into. Currently uses quantile bins.
@param num_classes (optional): number of classes to break distribution
of values into. Currently uses quantile bins.
@param w_type string (optional): weight type ('knn' or 'queen')
@param num_ngbrs int (optional): number of neighbors (if knn type)
@param permutations int (optional): number of permutations for test stats
@param geom_col string (optional): name of column which contains the geometries
@param id_col string (optional): name of column which has the ids of the table
@param permutations int (optional): number of permutations for test
stats
@param geom_col string (optional): name of column which contains the
geometries
@param id_col string (optional): name of column which has the ids of
the table
Outputs:
@param trend_up float: probablity that a geom will move to a higher class
@param trend_down float: probablity that a geom will move to a lower class
@param trend_up float: probablity that a geom will move to a higher
class
@param trend_down float: probablity that a geom will move to a lower
class
@param trend float: (trend_up - trend_down) / trend_static
@param volatility float: a measure of the volatility based on probability stddev(prob array)
@param volatility float: a measure of the volatility based on
probability stddev(prob array)
"""
if len(time_cols) < 2:
@ -49,7 +57,7 @@ def spatial_markov_trend(subquery, time_cols, num_classes = 7,
if len(query_result) == 0:
return zip([None], [None], [None], [None], [None])
except plpy.SPIError, err:
plpy.debug('Query failed with exception %s: %s' % (err, query))
plpy.debug('Query failed with exception %s: %s' % (err, pu.construct_neighbor_query(w_type, qvals)))
plpy.error('Query failed, check the input parameters')
return zip([None], [None], [None], [None], [None])
@ -72,8 +80,8 @@ def spatial_markov_trend(subquery, time_cols, num_classes = 7,
## get lag classes
lag_classes = ps.Quantiles(
ps.lag_spatial(weights, t_data[:, -1]),
k=num_classes).yb
ps.lag_spatial(weights, t_data[:, -1]),
k=num_classes).yb
## look up probablity distribution for each unit according to class and lag class
prob_dist = get_prob_dist(sp_markov_result.P,