mirror of
https://github.com/CartoDB/crankshaft.git
synced 2024-11-01 10:20:48 +08:00
Run 2to3
This commit is contained in:
parent
b3c5fd756c
commit
c321caaefd
@ -15,7 +15,6 @@ nosetests test/
|
||||
- Installed through it
|
||||
- Tested, when they have a test suite.
|
||||
- Fixed in the `requirements.txt`
|
||||
* At present we use Python version 2.7.3
|
||||
|
||||
---
|
||||
|
||||
|
@ -4,4 +4,4 @@ import crankshaft.clustering
|
||||
import crankshaft.space_time_dynamics
|
||||
import crankshaft.segmentation
|
||||
import crankshaft.regression
|
||||
import analysis_data_provider
|
||||
from . import analysis_data_provider
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""class for fetching data"""
|
||||
import plpy
|
||||
import pysal_utils as pu
|
||||
from . import pysal_utils as pu
|
||||
|
||||
NULL_VALUE_ERROR = ('No usable data passed to analysis. Check your input rows '
|
||||
'for null values and fill in appropriately.')
|
||||
|
@ -1,4 +1,4 @@
|
||||
"""Import all functions from for clustering"""
|
||||
from moran import *
|
||||
from kmeans import *
|
||||
from getis import *
|
||||
from .moran import *
|
||||
from .kmeans import *
|
||||
from .getis import *
|
||||
|
@ -47,4 +47,4 @@ class Getis(object):
|
||||
getis = ps.esda.getisord.G_Local(attr_vals, weight,
|
||||
star=True, permutations=permutations)
|
||||
|
||||
return zip(getis.z_sim, getis.p_sim, getis.p_z_sim, weight.id_order)
|
||||
return list(zip(getis.z_sim, getis.p_sim, getis.p_z_sim, weight.id_order))
|
||||
|
@ -28,8 +28,8 @@ class Kmeans(object):
|
||||
ids = result[0]['ids']
|
||||
|
||||
km = KMeans(n_clusters=no_clusters, n_init=no_init)
|
||||
labels = km.fit_predict(zip(xs, ys))
|
||||
return zip(ids, labels)
|
||||
labels = km.fit_predict(list(zip(xs, ys)))
|
||||
return list(zip(ids, labels))
|
||||
|
||||
def nonspatial(self, subquery, colnames, no_clusters=5,
|
||||
standardize=True, id_col='cartodb_id'):
|
||||
@ -75,18 +75,18 @@ class Kmeans(object):
|
||||
kmeans = KMeans(n_clusters=no_clusters,
|
||||
random_state=0).fit(cluster_columns)
|
||||
|
||||
centers = [json.dumps(dict(zip(colnames, c)))
|
||||
centers = [json.dumps(dict(list(zip(colnames, c))))
|
||||
for c in kmeans.cluster_centers_[kmeans.labels_]]
|
||||
|
||||
silhouettes = metrics.silhouette_samples(cluster_columns,
|
||||
kmeans.labels_,
|
||||
metric='sqeuclidean')
|
||||
|
||||
return zip(kmeans.labels_,
|
||||
return list(zip(kmeans.labels_,
|
||||
centers,
|
||||
silhouettes,
|
||||
[kmeans.inertia_] * kmeans.labels_.shape[0],
|
||||
data[0]['rowid'])
|
||||
data[0]['rowid']))
|
||||
|
||||
|
||||
# -- Preprocessing steps
|
||||
@ -99,7 +99,7 @@ def _extract_columns(data):
|
||||
# number of columns minus rowid column
|
||||
n_cols = len(data[0]) - 1
|
||||
return np.array([data[0]['arr_col{0}'.format(i+1)]
|
||||
for i in xrange(n_cols)],
|
||||
for i in range(n_cols)],
|
||||
dtype=float).T
|
||||
|
||||
|
||||
|
@ -75,7 +75,7 @@ class Moran(object):
|
||||
moran_global = ps.esda.moran.Moran(attr_vals, weight,
|
||||
permutations=permutations)
|
||||
|
||||
return zip([moran_global.I], [moran_global.EI])
|
||||
return list(zip([moran_global.I], [moran_global.EI]))
|
||||
|
||||
def local_stat(self, subquery, attr,
|
||||
w_type, num_ngbrs, permutations, geom_col, id_col):
|
||||
@ -139,7 +139,7 @@ class Moran(object):
|
||||
lag = ps.weights.spatial_lag.lag_spatial(weight, lisa.y)
|
||||
lag_std = ps.weights.spatial_lag.lag_spatial(weight, lisa.z)
|
||||
|
||||
return zip(
|
||||
return list(zip(
|
||||
quads,
|
||||
lisa.p_sim,
|
||||
lag,
|
||||
@ -148,7 +148,7 @@ class Moran(object):
|
||||
lisa.z,
|
||||
lisa.Is,
|
||||
weight.id_order
|
||||
)
|
||||
))
|
||||
|
||||
def global_rate_stat(self, subquery, numerator, denominator,
|
||||
w_type, num_ngbrs, permutations, geom_col, id_col):
|
||||
@ -194,7 +194,7 @@ class Moran(object):
|
||||
lisa_rate = ps.esda.moran.Moran_Rate(numer, denom, weight,
|
||||
permutations=permutations)
|
||||
|
||||
return zip([lisa_rate.I], [lisa_rate.EI])
|
||||
return list(zip([lisa_rate.I], [lisa_rate.EI]))
|
||||
|
||||
def local_rate_stat(self, subquery, numerator, denominator,
|
||||
w_type, num_ngbrs, permutations, geom_col, id_col):
|
||||
@ -262,7 +262,7 @@ class Moran(object):
|
||||
lag = ps.weights.spatial_lag.lag_spatial(weight, lisa.y)
|
||||
lag_std = ps.weights.spatial_lag.lag_spatial(weight, lisa.z)
|
||||
|
||||
return zip(
|
||||
return list(zip(
|
||||
quads,
|
||||
lisa.p_sim,
|
||||
lag,
|
||||
@ -271,7 +271,7 @@ class Moran(object):
|
||||
lisa.z,
|
||||
lisa.Is,
|
||||
weight.id_order
|
||||
)
|
||||
))
|
||||
|
||||
def local_bivariate_stat(self, subquery, attr1, attr2,
|
||||
permutations, geom_col, id_col,
|
||||
@ -303,7 +303,7 @@ class Moran(object):
|
||||
# find clustering of significance
|
||||
lisa_sig = quad_position(lisa.q)
|
||||
|
||||
return zip(lisa.Is, lisa_sig, lisa.p_sim, weight.id_order)
|
||||
return list(zip(lisa.Is, lisa_sig, lisa.p_sim, weight.id_order))
|
||||
|
||||
# Low level functions ----------------------------------------
|
||||
|
||||
|
@ -27,7 +27,7 @@ def get_weight(query_res, w_type='knn', num_ngbrs=5):
|
||||
"""
|
||||
|
||||
neighbors = {x['id']: x['neighbors'] for x in query_res}
|
||||
print 'len of neighbors: %d' % len(neighbors)
|
||||
print('len of neighbors: %d' % len(neighbors))
|
||||
|
||||
built_weight = ps.W(neighbors)
|
||||
built_weight.transform = 'r'
|
||||
|
@ -1,4 +1,4 @@
|
||||
import glm
|
||||
import family
|
||||
import utils
|
||||
import iwls
|
||||
from . import glm
|
||||
from . import family
|
||||
from . import utils
|
||||
from . import iwls
|
||||
|
@ -1,8 +1,9 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import numpy as np
|
||||
from scipy import stats
|
||||
from utils import cache_readonly
|
||||
from .utils import cache_readonly
|
||||
from functools import reduce
|
||||
|
||||
class Results(object):
|
||||
"""
|
||||
|
@ -7,8 +7,8 @@ The one parameter exponential family distributions used by GLM.
|
||||
|
||||
import numpy as np
|
||||
from scipy import special
|
||||
import links as L
|
||||
import varfuncs as V
|
||||
from . import links as L
|
||||
from . import varfuncs as V
|
||||
FLOAT_EPS = np.finfo(float).eps
|
||||
|
||||
|
||||
|
@ -3,10 +3,10 @@ import numpy as np
|
||||
import numpy.linalg as la
|
||||
from pysal.spreg.utils import RegressionPropsY, spdot
|
||||
import pysal.spreg.user_output as USER
|
||||
from utils import cache_readonly
|
||||
from base import LikelihoodModelResults
|
||||
import family
|
||||
from iwls import iwls
|
||||
from .utils import cache_readonly
|
||||
from .base import LikelihoodModelResults
|
||||
from . import family
|
||||
from .iwls import iwls
|
||||
|
||||
__all__ = ['GLM']
|
||||
|
||||
|
@ -3,7 +3,7 @@ import numpy.linalg as la
|
||||
from scipy import sparse as sp
|
||||
from scipy.sparse import linalg as spla
|
||||
from pysal.spreg.utils import spdot, spmultiply
|
||||
from family import Binomial, Poisson
|
||||
from .family import Binomial, Poisson
|
||||
|
||||
def _compute_betas(y, x):
|
||||
"""
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import numpy as np
|
||||
import warnings
|
||||
|
||||
@ -17,7 +17,7 @@ try:
|
||||
from scipy.lib._version import NumpyVersion
|
||||
except ImportError:
|
||||
import re
|
||||
string_types = basestring
|
||||
string_types = str
|
||||
|
||||
class NumpyVersion():
|
||||
"""Parse and compare numpy version strings.
|
||||
|
@ -1 +1 @@
|
||||
from base import *
|
||||
from .base import *
|
||||
|
@ -1,4 +1,4 @@
|
||||
import gwr
|
||||
import sel_bw
|
||||
import diagnostics
|
||||
import kernels
|
||||
from . import gwr
|
||||
from . import sel_bw
|
||||
from . import diagnostics
|
||||
from . import kernels
|
||||
|
@ -7,8 +7,8 @@ __author__ = "Taylor Oshan Tayoshan@gmail.com"
|
||||
import numpy as np
|
||||
import numpy.linalg as la
|
||||
from scipy.stats import t
|
||||
from kernels import *
|
||||
from diagnostics import get_AIC, get_AICc, get_BIC
|
||||
from .kernels import *
|
||||
from .diagnostics import get_AIC, get_AICc, get_BIC
|
||||
import pysal.spreg.user_output as USER
|
||||
from crankshaft.regression.glm.family import Gaussian, Binomial, Poisson
|
||||
from crankshaft.regression.glm.glm import GLM, GLMResults
|
||||
|
@ -117,4 +117,4 @@ class _Kernel(object):
|
||||
elif self.function =='exponential':
|
||||
return np.exp(-zs)
|
||||
else:
|
||||
print('Unsupported kernel function', self.function)
|
||||
print(('Unsupported kernel function', self.function))
|
||||
|
@ -146,7 +146,7 @@ def flexible_bw(init, y, X, n, k, family, tol, max_iter, rss_score,
|
||||
gwr_func, bw_func, sel_func):
|
||||
if init:
|
||||
bw = sel_func(bw_func(y, X))
|
||||
print bw
|
||||
print(bw)
|
||||
optim_model = gwr_func(y, X, bw)
|
||||
err = optim_model.resid_response.reshape((-1,1))
|
||||
est = optim_model.params
|
||||
@ -198,7 +198,7 @@ def flexible_bw(init, y, X, n, k, family, tol, max_iter, rss_score,
|
||||
new_rss = np.sum((y - predy)**2)
|
||||
score = np.abs((new_rss - rss)/new_rss)
|
||||
rss = new_rss
|
||||
print score
|
||||
print(score)
|
||||
scores.append(score)
|
||||
delta = score
|
||||
BWs.append(bws)
|
||||
|
@ -8,12 +8,12 @@
|
||||
|
||||
__author__ = "Taylor Oshan Tayoshan@gmail.com"
|
||||
|
||||
from kernels import *
|
||||
from search import golden_section, equal_interval, flexible_bw
|
||||
from gwr import GWR
|
||||
from .kernels import *
|
||||
from .search import golden_section, equal_interval, flexible_bw
|
||||
from .gwr import GWR
|
||||
from crankshaft.regression.glm.family import Gaussian, Poisson, Binomial
|
||||
import pysal.spreg.user_output as USER
|
||||
from diagnostics import get_AICc, get_AIC, get_BIC, get_CV
|
||||
from .diagnostics import get_AICc, get_AIC, get_BIC, get_CV
|
||||
from scipy.spatial.distance import pdist, squareform
|
||||
from pysal.common import KDTree
|
||||
import numpy as np
|
||||
@ -197,7 +197,7 @@ class Sel_BW(object):
|
||||
|
||||
if self.fb:
|
||||
self._fbw()
|
||||
print self.bw[1]
|
||||
print(self.bw[1])
|
||||
self.XB = self.bw[4]
|
||||
self.err = self.bw[5]
|
||||
else:
|
||||
|
@ -14,7 +14,7 @@ import pysal
|
||||
class TestGWRGaussian(unittest.TestCase):
|
||||
def setUp(self):
|
||||
data = pysal.open(pysal.examples.get_path('GData_utm.csv'))
|
||||
self.coords = zip(data.by_col('X'), data.by_col('Y'))
|
||||
self.coords = list(zip(data.by_col('X'), data.by_col('Y')))
|
||||
self.y = np.array(data.by_col('PctBach')).reshape((-1,1))
|
||||
rural = np.array(data.by_col('PctRural')).reshape((-1,1))
|
||||
pov = np.array(data.by_col('PctPov')).reshape((-1,1))
|
||||
@ -56,10 +56,10 @@ class TestGWRGaussian(unittest.TestCase):
|
||||
BIC = get_BIC(rslt)
|
||||
CV = get_CV(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 894.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 890.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 944.0)
|
||||
self.assertAlmostEquals(np.round(CV,2), 18.25)
|
||||
self.assertAlmostEqual(np.floor(AICc), 894.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 890.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 944.0)
|
||||
self.assertAlmostEqual(np.round(CV,2), 18.25)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-04)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-04)
|
||||
@ -107,10 +107,10 @@ class TestGWRGaussian(unittest.TestCase):
|
||||
BIC = get_BIC(rslt)
|
||||
CV = get_CV(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 896.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 892.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 941.0)
|
||||
self.assertAlmostEquals(np.around(CV, 2), 19.19)
|
||||
self.assertAlmostEqual(np.floor(AICc), 896.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 892.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 941.0)
|
||||
self.assertAlmostEqual(np.around(CV, 2), 19.19)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-04)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-04)
|
||||
@ -159,10 +159,10 @@ class TestGWRGaussian(unittest.TestCase):
|
||||
BIC = get_BIC(rslt)
|
||||
CV = get_CV(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 895.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 890.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 943.0)
|
||||
self.assertAlmostEquals(np.around(CV, 2), 18.21)
|
||||
self.assertAlmostEqual(np.floor(AICc), 895.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 890.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 943.0)
|
||||
self.assertAlmostEqual(np.around(CV, 2), 18.21)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-04)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-04)
|
||||
@ -211,10 +211,10 @@ class TestGWRGaussian(unittest.TestCase):
|
||||
BIC = get_BIC(rslt)
|
||||
CV = get_CV(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 896)
|
||||
self.assertAlmostEquals(np.floor(AIC), 894.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 922.0)
|
||||
self.assertAlmostEquals(np.around(CV, 2), 17.91)
|
||||
self.assertAlmostEqual(np.floor(AICc), 896)
|
||||
self.assertAlmostEqual(np.floor(AIC), 894.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 922.0)
|
||||
self.assertAlmostEqual(np.around(CV, 2), 17.91)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-04)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-04)
|
||||
@ -314,7 +314,7 @@ class TestGWRGaussian(unittest.TestCase):
|
||||
class TestGWRPoisson(unittest.TestCase):
|
||||
def setUp(self):
|
||||
data = pysal.open(pysal.examples.get_path('Tokyomortality.csv'), mode='Ur')
|
||||
self.coords = zip(data.by_col('X_CENTROID'), data.by_col('Y_CENTROID'))
|
||||
self.coords = list(zip(data.by_col('X_CENTROID'), data.by_col('Y_CENTROID')))
|
||||
self.y = np.array(data.by_col('db2564')).reshape((-1,1))
|
||||
self.off = np.array(data.by_col('eb2564')).reshape((-1,1))
|
||||
OCC = np.array(data.by_col('OCC_TEC')).reshape((-1,1))
|
||||
@ -355,9 +355,9 @@ class TestGWRPoisson(unittest.TestCase):
|
||||
AIC = get_AIC(rslt)
|
||||
BIC = get_BIC(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 13294.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 13247.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 13485.0)
|
||||
self.assertAlmostEqual(np.floor(AICc), 13294.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 13247.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 13485.0)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-05)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-03)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-03)
|
||||
@ -404,9 +404,9 @@ class TestGWRPoisson(unittest.TestCase):
|
||||
AIC = get_AIC(rslt)
|
||||
BIC = get_BIC(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 13285)
|
||||
self.assertAlmostEquals(np.floor(AIC), 13259.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 13442.0)
|
||||
self.assertAlmostEqual(np.floor(AICc), 13285)
|
||||
self.assertAlmostEqual(np.floor(AIC), 13259.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 13442.0)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-02)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-02)
|
||||
@ -452,9 +452,9 @@ class TestGWRPoisson(unittest.TestCase):
|
||||
AIC = get_AIC(rslt)
|
||||
BIC = get_BIC(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 367.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 361.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 451.0)
|
||||
self.assertAlmostEqual(np.floor(AICc), 367.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 361.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 451.0)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-02,
|
||||
atol=1e-02)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-02, atol=1e-02)
|
||||
@ -511,9 +511,9 @@ class TestGWRPoisson(unittest.TestCase):
|
||||
AIC = get_AIC(rslt)
|
||||
BIC = get_BIC(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 11283.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 11211.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 11497.0)
|
||||
self.assertAlmostEqual(np.floor(AICc), 11283.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 11211.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 11497.0)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-03)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-02)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-02)
|
||||
@ -559,9 +559,9 @@ class TestGWRPoisson(unittest.TestCase):
|
||||
AIC = get_AIC(rslt)
|
||||
BIC = get_BIC(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 21070.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 21069.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 21111.0)
|
||||
self.assertAlmostEqual(np.floor(AICc), 21070.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 21069.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 21111.0)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-02)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-02)
|
||||
@ -583,7 +583,7 @@ class TestGWRPoisson(unittest.TestCase):
|
||||
class TestGWRBinomial(unittest.TestCase):
|
||||
def setUp(self):
|
||||
data = pysal.open(pysal.examples.get_path('landslides.csv'))
|
||||
self.coords = zip(data.by_col('X'), data.by_col('Y'))
|
||||
self.coords = list(zip(data.by_col('X'), data.by_col('Y')))
|
||||
self.y = np.array(data.by_col('Landslid')).reshape((-1,1))
|
||||
ELEV = np.array(data.by_col('Elev')).reshape((-1,1))
|
||||
SLOPE = np.array(data.by_col('Slope')).reshape((-1,1))
|
||||
@ -630,9 +630,9 @@ class TestGWRBinomial(unittest.TestCase):
|
||||
AIC = get_AIC(rslt)
|
||||
BIC = get_BIC(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 275.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 271.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 349.0)
|
||||
self.assertAlmostEqual(np.floor(AICc), 275.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 271.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 349.0)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-00)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-00)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-00)
|
||||
@ -693,9 +693,9 @@ class TestGWRBinomial(unittest.TestCase):
|
||||
AIC = get_AIC(rslt)
|
||||
BIC = get_BIC(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 277.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 271.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 358.0)
|
||||
self.assertAlmostEqual(np.floor(AICc), 277.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 271.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 358.0)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-00)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-00)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-00)
|
||||
@ -756,9 +756,9 @@ class TestGWRBinomial(unittest.TestCase):
|
||||
AIC = get_AIC(rslt)
|
||||
BIC = get_BIC(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 276.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 272.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 341.0)
|
||||
self.assertAlmostEqual(np.floor(AICc), 276.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 272.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 341.0)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-00)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-00)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-00)
|
||||
@ -819,9 +819,9 @@ class TestGWRBinomial(unittest.TestCase):
|
||||
AIC = get_AIC(rslt)
|
||||
BIC = get_BIC(rslt)
|
||||
|
||||
self.assertAlmostEquals(np.floor(AICc), 276.0)
|
||||
self.assertAlmostEquals(np.floor(AIC), 273.0)
|
||||
self.assertAlmostEquals(np.floor(BIC), 331.0)
|
||||
self.assertAlmostEqual(np.floor(AICc), 276.0)
|
||||
self.assertAlmostEqual(np.floor(AIC), 273.0)
|
||||
self.assertAlmostEqual(np.floor(BIC), 331.0)
|
||||
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-00)
|
||||
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-00)
|
||||
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-00)
|
||||
|
@ -12,7 +12,7 @@ class TestKernels(unittest.TestCase):
|
||||
y = np.arange(5,0, -1)
|
||||
np.random.shuffle(x)
|
||||
np.random.shuffle(y)
|
||||
self.coords = np.array(zip(x, y))
|
||||
self.coords = np.array(list(zip(x, y)))
|
||||
self.fix_gauss_kern = np.array([
|
||||
[ 1. , 0.38889556, 0.48567179, 0.48567179, 0.89483932],
|
||||
[ 0.38889556, 1. , 0.89483932, 0.64118039, 0.48567179],
|
||||
|
@ -13,7 +13,7 @@ import pysal
|
||||
class TestSelBW(unittest.TestCase):
|
||||
def setUp(self):
|
||||
data = pysal.open(pysal.examples.get_path('GData_utm.csv'))
|
||||
self.coords = zip(data.by_col('X'), data.by_col('Y'))
|
||||
self.coords = list(zip(data.by_col('X'), data.by_col('Y')))
|
||||
self.y = np.array(data.by_col('PctBach')).reshape((-1,1))
|
||||
rural = np.array(data.by_col('PctRural')).reshape((-1,1))
|
||||
pov = np.array(data.by_col('PctPov')).reshape((-1,1))
|
||||
|
@ -2,8 +2,8 @@
|
||||
Geographically weighted regression
|
||||
"""
|
||||
import numpy as np
|
||||
from gwr.base.gwr import GWR as PySAL_GWR
|
||||
from gwr.base.sel_bw import Sel_BW
|
||||
from .gwr.base.gwr import GWR as PySAL_GWR
|
||||
from .gwr.base.sel_bw import Sel_BW
|
||||
import json
|
||||
from crankshaft.analysis_data_provider import AnalysisDataProvider
|
||||
import plpy
|
||||
@ -48,7 +48,7 @@ class GWR:
|
||||
# x, y are centroids of input geometries
|
||||
x = np.array(query_result[0]['x'], dtype=np.float)
|
||||
y = np.array(query_result[0]['y'], dtype=np.float)
|
||||
coords = zip(x, y)
|
||||
coords = list(zip(x, y))
|
||||
|
||||
# extract dependent variable
|
||||
Y = np.array(query_result[0]['dep_var'], dtype=np.float).reshape((-1, 1))
|
||||
@ -88,7 +88,7 @@ class GWR:
|
||||
bw = np.repeat(float(bw), n)
|
||||
|
||||
# create lists of json objs for model outputs
|
||||
for idx in xrange(n):
|
||||
for idx in range(n):
|
||||
coeffs.append(json.dumps({var: model.params[idx, k]
|
||||
for k, var in enumerate(ind_vars)}))
|
||||
stand_errs.append(json.dumps({var: model.bse[idx, k]
|
||||
@ -99,8 +99,8 @@ class GWR:
|
||||
json.dumps({var: filtered_t[idx, k]
|
||||
for k, var in enumerate(ind_vars)}))
|
||||
|
||||
return zip(coeffs, stand_errs, t_vals, filtered_t_vals,
|
||||
predicted, residuals, r_squared, bw, rowid)
|
||||
return list(zip(coeffs, stand_errs, t_vals, filtered_t_vals,
|
||||
predicted, residuals, r_squared, bw, rowid))
|
||||
|
||||
def gwr_predict(self, subquery, dep_var, ind_vars,
|
||||
bw=None, fixed=False, kernel='bisquare',
|
||||
@ -133,7 +133,7 @@ class GWR:
|
||||
|
||||
x = np.array(query_result[0]['x'], dtype=np.float)
|
||||
y = np.array(query_result[0]['y'], dtype=np.float)
|
||||
coords = np.array(zip(x, y), dtype=np.float)
|
||||
coords = np.array(list(zip(x, y)), dtype=np.float)
|
||||
|
||||
# extract dependent variable
|
||||
Y = np.array(query_result[0]['dep_var']).reshape((-1, 1))
|
||||
@ -190,7 +190,7 @@ class GWR:
|
||||
predicted = model.predy.flatten()
|
||||
|
||||
m = len(model.predy)
|
||||
for idx in xrange(m):
|
||||
for idx in range(m):
|
||||
coeffs.append(json.dumps({var: model.params[idx, k]
|
||||
for k, var in enumerate(ind_vars)}))
|
||||
stand_errs.append(json.dumps({var: model.bse[idx, k]
|
||||
@ -198,5 +198,5 @@ class GWR:
|
||||
t_vals.append(json.dumps({var: model.tvalues[idx, k]
|
||||
for k, var in enumerate(ind_vars)}))
|
||||
|
||||
return zip(coeffs, stand_errs, t_vals,
|
||||
r_squared, predicted, rowid[test])
|
||||
return list(zip(coeffs, stand_errs, t_vals,
|
||||
r_squared, predicted, rowid[test]))
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""Import all functions from for segmentation"""
|
||||
from segmentation import *
|
||||
from .segmentation import *
|
||||
|
@ -47,7 +47,7 @@ class Segmentation(object):
|
||||
model_parameters, 0.2)
|
||||
prediction = model.predict(target_features)
|
||||
accuracy_array = [accuracy] * prediction.shape[0]
|
||||
return zip(target_ids, prediction, accuracy_array)
|
||||
return list(zip(target_ids, prediction, accuracy_array))
|
||||
|
||||
def create_and_predict_segment(self, query, variable, feature_columns,
|
||||
target_query, model_params,
|
||||
@ -79,7 +79,7 @@ class Segmentation(object):
|
||||
'''
|
||||
rowid = [{'ids': [2.9, 4.9, 4, 5, 6]}]
|
||||
'''
|
||||
return zip(rowid[0]['ids'], result, accuracy_array)
|
||||
return list(zip(rowid[0]['ids'], result, accuracy_array))
|
||||
|
||||
def predict_segment(self, model, feature_columns, target_query,
|
||||
feature_means):
|
||||
|
@ -1,2 +1,2 @@
|
||||
"""Import all functions from clustering libraries."""
|
||||
from markov import *
|
||||
from .markov import *
|
||||
|
@ -91,7 +91,7 @@ class Markov(object):
|
||||
trend_up, trend_down, trend, volatility = get_prob_stats(prob_dist, sp_markov_result.classes[:, -1])
|
||||
|
||||
# output the results
|
||||
return zip(trend, trend_up, trend_down, volatility, weights.id_order)
|
||||
return list(zip(trend, trend_up, trend_down, volatility, weights.id_order))
|
||||
|
||||
|
||||
|
||||
|
@ -26,7 +26,7 @@ setup(
|
||||
'Intended Audience :: Mapping comunity',
|
||||
'Topic :: Maps :: Mapping Tools',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python',
|
||||
],
|
||||
|
||||
keywords='maps mapping tools spatial analysis geostatistics',
|
||||
|
@ -72,7 +72,7 @@ class MoranTest(unittest.TestCase):
|
||||
result = moran.local_stat('subquery', 'value',
|
||||
'knn', 5, 99, 'the_geom', 'cartodb_id')
|
||||
result = [(row[0], row[6]) for row in result]
|
||||
zipped_values = zip(result, self.moran_data)
|
||||
zipped_values = list(zip(result, self.moran_data))
|
||||
|
||||
for ([res_quad, res_val], [exp_val, exp_quad]) in zipped_values:
|
||||
self.assertAlmostEqual(res_val, exp_val)
|
||||
@ -91,7 +91,7 @@ class MoranTest(unittest.TestCase):
|
||||
'knn', 5, 99, 'the_geom', 'cartodb_id')
|
||||
result = [(row[0], row[6]) for row in result]
|
||||
|
||||
zipped_values = zip(result, self.moran_data)
|
||||
zipped_values = list(zip(result, self.moran_data))
|
||||
|
||||
for ([res_quad, res_val], [exp_val, exp_quad]) in zipped_values:
|
||||
self.assertAlmostEqual(res_val, exp_val)
|
||||
|
@ -86,7 +86,7 @@ class GWRTest(unittest.TestCase):
|
||||
|
||||
# unpack response
|
||||
coeffs, stand_errs, t_vals, t_vals_filtered, predicteds, \
|
||||
residuals, r_squareds, bws, rowids = zip(*gwr_resp)
|
||||
residuals, r_squareds, bws, rowids = list(zip(*gwr_resp))
|
||||
|
||||
# prepare for comparision
|
||||
coeff_known_pctpov = self.knowns['est_pctpov']
|
||||
@ -98,13 +98,13 @@ class GWRTest(unittest.TestCase):
|
||||
# test pctpov coefficient estimates
|
||||
for idx, val in enumerate(coeff_known_pctpov):
|
||||
resp_idx = rowids.index(ids[idx])
|
||||
self.assertAlmostEquals(val,
|
||||
self.assertAlmostEqual(val,
|
||||
json.loads(coeffs[resp_idx])['pctpov'],
|
||||
places=4)
|
||||
# test pctrural tvals
|
||||
for idx, val in enumerate(tval_known_pctblack):
|
||||
resp_idx = rowids.index(ids[idx])
|
||||
self.assertAlmostEquals(val,
|
||||
self.assertAlmostEqual(val,
|
||||
json.loads(t_vals[resp_idx])['pctrural'],
|
||||
places=4)
|
||||
|
||||
@ -119,7 +119,7 @@ class GWRTest(unittest.TestCase):
|
||||
|
||||
# unpack response
|
||||
coeffs, stand_errs, t_vals, \
|
||||
r_squareds, predicteds, rowid = zip(*gwr_resp)
|
||||
r_squareds, predicteds, rowid = list(zip(*gwr_resp))
|
||||
threshold = 0.01
|
||||
|
||||
for i, idx in enumerate(self.idx_ids_of_unknowns):
|
||||
|
@ -118,7 +118,7 @@ class SegmentationTest(unittest.TestCase):
|
||||
model_parameters,
|
||||
id_col='cartodb_id')
|
||||
results = [(row[1], row[2]) for row in result]
|
||||
zipped_values = zip(results, self.result_seg)
|
||||
zipped_values = list(zip(results, self.result_seg))
|
||||
pre_res = [r[0] for r in self.true_result]
|
||||
acc_res = [r[1] for r in self.result_seg]
|
||||
|
||||
|
@ -98,7 +98,7 @@ class SpaceTimeTests(unittest.TestCase):
|
||||
|
||||
self.assertTrue(result is not None)
|
||||
result = [(row[0], row[1], row[2], row[3], row[4]) for row in result]
|
||||
print result[0]
|
||||
print(result[0])
|
||||
expected = self.markov_data
|
||||
for ([res_trend, res_up, res_down, res_vol, res_id],
|
||||
[exp_trend, exp_up, exp_down, exp_vol, exp_id]
|
||||
|
Loading…
Reference in New Issue
Block a user