diff --git a/.travis.yml b/.travis.yml index 59293df0..b0327361 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,13 +3,26 @@ sudo: false branches: only: - master -python: - - "3.5" - - "3.6" -env: - - PYSAL_PLUS=false - - PYSAL_PLUS=true +python: + - 3.5 + - 3.6 +env: + - PYSAL_PYPI=true PYSAL_PLUS=true + - PYSAL_PYPI=true PYSAL_PLUS=false + - PYSAL_PYPI=false PYSAL_PLUS=true + - PYSAL_PYPI=false PYSAL_PLUS=false + +matrix: + allow_failures: + - python: 3.5 + env: PYSAL_PYPI=false PYSAL_PLUS=false + - python: 3.5 + env: PYSAL_PYPI=false PYSAL_PLUS=true + - python: 3.6 + env: PYSAL_PYPI=false PYSAL_PLUS=false + - python: 3.6 + env: PYSAL_PYPI=false PYSAL_PLUS=true before_install: - wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh @@ -23,9 +36,12 @@ before_install: install: - conda install --yes pip nose - which pip + - if "$PYSAL_PYPI"; then + echo 'testing pypi libpysal' && pip install libpysal; + else echo 'testing git libpysal'; git clone https://github.com/pysal/libpysal.git; cd libpysal; pip install .; cd ../; + fi; - conda install --yes --file requirements.txt; - - pip install libpysal - - if [[ PYSAL_PLUS ]]; then conda install --yes numba; fi + - if "$PYSAL_PLUS"; then conda install --yes numba; fi script: - pwd @@ -38,8 +54,8 @@ notifications: recipients: - levi.john.wolf+travis@gmail.com - sjsrey+travis@gmail.com - on_change: always - on_failure: always + on_success: change + on_failure: change after_success: - coveralls diff --git a/esda/gamma.py b/esda/gamma.py index f6f38b19..b897bafd 100644 --- a/esda/gamma.py +++ b/esda/gamma.py @@ -75,9 +75,9 @@ class Gamma(object): use same example as for join counts to show similarity - >>> import libpysal.api as lps, numpy as np + >>> import libpysal, numpy as np >>> from esda.gamma import Gamma - >>> w = lps.lat2W(4,4) + >>> w = libpysal.weights.lat2W(4,4) >>> y=np.ones(16) >>> y[0:8]=0 >>> np.random.seed(12345) diff --git a/esda/geary.py b/esda/geary.py index 0d69338d..6518178f 100644 --- a/esda/geary.py +++ b/esda/geary.py @@ -80,10 +80,10 @@ class Geary(object): Examples -------- - >>> import libpysal.api as lps + >>> import libpysal >>> from esda.geary import Geary - >>> w = lps.open(lps.get_path("book.gal")).read() - >>> f = lps.open(lps.get_path("book.txt")) + >>> w = libpysal.io.open(libpysal.examples.get_path("book.gal")).read() + >>> f = libpysal.io.open(libpysal.examples.get_path("book.txt")) >>> y = np.array(f.by_col['y']) >>> c = Geary(y,w,permutations=0) >>> round(c.C,7) diff --git a/esda/getisord.py b/esda/getisord.py index e0ce0af6..5a312f95 100644 --- a/esda/getisord.py +++ b/esda/getisord.py @@ -4,7 +4,7 @@ __author__ = "Sergio J. Rey , Myunghwa Hwang " __all__ = ['G', 'G_Local'] -from libpysal.common import np, stats, math +from libpysal.common import np, stats from libpysal.weights.spatial_lag import lag_spatial as slag from .tabular import _univariate_handler @@ -67,7 +67,7 @@ class G(object): Examples -------- - >>> import libpysal.api as lps + >>> import libpysal >>> import numpy >>> numpy.random.seed(10) @@ -75,7 +75,7 @@ class G(object): >>> points = [(10, 10), (20, 10), (40, 10), (15, 20), (30, 20), (30, 30)] Creating a weights object from points - >>> w = lps.DistanceBand(points,threshold=15) + >>> w = libpysal.weights.DistanceBand(points,threshold=15) >>> w.transform = "B" Preparing a variable @@ -106,7 +106,7 @@ def __init__(self, y, w, permutations=PERMUTATIONS): y = y.reshape(len(y), 1) # Ensure that y is an n by 1 vector, otherwise y*y.T == y*y self.den_sum = (y * y.T).sum() - (y * y).sum() self.G = self.__calc(self.y) - self.z_norm = (self.G - self.EG) / math.sqrt(self.VG) + self.z_norm = (self.G - self.EG) / np.sqrt(self.VG) self.p_norm = 1.0 - stats.norm.cdf(np.abs(self.z_norm)) if permutations: @@ -278,7 +278,7 @@ class G_Local(object): Examples -------- - >>> import libpysal.api as lps + >>> import libpysal >>> import numpy >>> numpy.random.seed(10) @@ -288,7 +288,7 @@ class G_Local(object): Creating a weights object from points - >>> w = lps.DistanceBand(points,threshold=15) + >>> w = libpysal.weights.DistanceBand(points,threshold=15) Prepareing a variable diff --git a/esda/join_counts.py b/esda/join_counts.py index 0d36e065..330afab0 100644 --- a/esda/join_counts.py +++ b/esda/join_counts.py @@ -79,8 +79,8 @@ class Join_Counts(object): Replicate example from anselin and rey >>> import numpy as np - >>> import libpysal.api as lps - >>> w = lps.lat2W(4, 4) + >>> import libpysal + >>> w = libpysal.weights.lat2W(4, 4) >>> y = np.ones(16) >>> y[0:8] = 0 >>> np.random.seed(12345) diff --git a/esda/moran.py b/esda/moran.py index 32c67b54..5be1ee27 100644 --- a/esda/moran.py +++ b/esda/moran.py @@ -98,9 +98,9 @@ class Moran(object): Examples -------- - >>> import libpysal.api as lps - >>> w = lps.open(lps.get_path("stl.gal")).read() - >>> f = lps.open(lps.get_path("stl_hom.txt")) + >>> import libpysal + >>> w = libpysal.io.open(libpysal.examples.get_path("stl.gal")).read() + >>> f = libpysal.io.open(libpysal.examples.get_path("stl_hom.txt")) >>> y = np.array(f.by_col['HR8893']) >>> from esda.moran import Moran >>> mi = Moran(y, w) @@ -112,8 +112,8 @@ class Moran(object): 0.00027147862770937614 SIDS example replicating OpenGeoda - >>> w = lps.open(lps.get_path("sids2.gal")).read() - >>> f = lps.open(lps.get_path("sids2.dbf")) + >>> w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() + >>> f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) >>> SIDR = np.array(f.by_col("SIDR74")) >>> mi = Moran(SIDR, w) >>> round(mi.I, 3) @@ -327,7 +327,7 @@ class Moran_BV(object): Examples -------- - >>> import libpysal.api as lps + >>> import libpysal >>> import numpy as np Set random number generator seed so we can replicate the example @@ -337,13 +337,13 @@ class Moran_BV(object): Open the sudden infant death dbf file and read in rates for 74 and 79 converting each to a numpy array - >>> f = lps.open(lps.get_path("sids2.dbf")) + >>> f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) >>> SIDR74 = np.array(f.by_col['SIDR74']) >>> SIDR79 = np.array(f.by_col['SIDR79']) Read a GAL file and construct our spatial weights object - >>> w = lps.open(lps.get_path("sids2.gal")).read() + >>> w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() Create an instance of Moran_BV >>> from esda.moran import Moran_BV @@ -481,8 +481,8 @@ def Moran_BV_matrix(variables, w, permutations=0, varnames=None): open dbf - >>> import libpysal.api as lps - >>> f = lps.open(lps.get_path("sids2.dbf")) + >>> import libpysal + >>> f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) pull of selected variables from dbf and create numpy arrays for each @@ -491,7 +491,7 @@ def Moran_BV_matrix(variables, w, permutations=0, varnames=None): create a contiguity matrix from an external gal file - >>> w = lps.open(lps.get_path("sids2.gal")).read() + >>> w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() create an instance of Moran_BV_matrix @@ -612,9 +612,9 @@ class Moran_Rate(Moran): Examples -------- - >>> import libpysal.api as lps - >>> w = lps.open(lps.get_path("sids2.gal")).read() - >>> f = lps.open(lps.get_path("sids2.dbf")) + >>> import libpysal + >>> w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() + >>> f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) >>> e = np.array(f.by_col('SID79')) >>> b = np.array(f.by_col('BIR79')) >>> from esda.moran import Moran_Rate @@ -788,11 +788,11 @@ class Moran_Local(object): Examples -------- - >>> import libpysal.api as lps + >>> import libpysal >>> import numpy as np >>> np.random.seed(10) - >>> w = lps.open(lps.get_path("desmith.gal")).read() - >>> f = lps.open(lps.get_path("desmith.txt")) + >>> w = libpysal.io.open(libpysal.examples.get_path("desmith.gal")).read() + >>> f = libpysal.io.open(libpysal.examples.get_path("desmith.txt")) >>> y = np.array(f.by_col['z']) >>> from esda.moran import Moran_Local >>> lm = Moran_Local(y, w, transformation = "r", permutations = 99) @@ -1020,11 +1020,11 @@ class Moran_Local_BV(object): Examples -------- - >>> import libpysal.api as lps + >>> import libpysal >>> import numpy as np >>> np.random.seed(10) - >>> w = lps.open(lps.get_path("sids2.gal")).read() - >>> f = lps.open(lps.get_path("sids2.dbf")) + >>> w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() + >>> f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) >>> x = np.array(f.by_col['SIDR79']) >>> y = np.array(f.by_col['SIDR74']) >>> from esda.moran import Moran_Local_BV @@ -1265,11 +1265,11 @@ class Moran_Local_Rate(Moran_Local): Examples -------- - >>> import libpysal.api as lps + >>> import libpysal >>> import numpy as np >>> np.random.seed(10) - >>> w = lps.open(lps.get_path("sids2.gal")).read() - >>> f = lps.open(lps.get_path("sids2.dbf")) + >>> w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() + >>> f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) >>> e = np.array(f.by_col('SID79')) >>> b = np.array(f.by_col('BIR79')) >>> from esda.moran import Moran_Local_Rate diff --git a/esda/smoothing.py b/esda/smoothing.py index 0ba914bf..e0777cb2 100644 --- a/esda/smoothing.py +++ b/esda/smoothing.py @@ -645,8 +645,8 @@ class Excess_Risk(_Smoother): Reading data in stl_hom.csv into stl to extract values for event and population-at-risk variables - >>> import libpysal.api as lps - >>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r') + >>> import libpysal + >>> stl = libpysal.io.open(libpysal.examples.get_path('stl_hom.csv'), 'r') The 11th and 14th columns in stl_hom.csv includes the number of homocides and population. Creating two arrays from these columns. @@ -700,8 +700,8 @@ class Empirical_Bayes(_Smoother): Reading data in stl_hom.csv into stl to extract values for event and population-at-risk variables - >>> import libpysal.api as lps - >>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r') + >>> import libpysal + >>> stl = libpysal.io.open(libpysal.examples.get_path('stl_hom.csv'), 'r') The 11th and 14th columns in stl_hom.csv includes the number of homocides and population. Creating two arrays from these columns. @@ -844,8 +844,8 @@ class Spatial_Empirical_Bayes(_Spatial_Smoother): Reading data in stl_hom.csv into stl to extract values for event and population-at-risk variables - >>> import libpysal.api as lps - >>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r') + >>> import libpysal + >>> stl = libpysal.io.open(libpysal.examples.get_path('stl_hom.csv'), 'r') The 11th and 14th columns in stl_hom.csv includes the number of homocides and population. Creating two arrays from these columns. @@ -854,7 +854,7 @@ class Spatial_Empirical_Bayes(_Spatial_Smoother): Creating a spatial weights instance by reading in stl.gal file. - >>> stl_w = lps.open(lps.get_path('stl.gal'), 'r').read() + >>> stl_w = libpysal.io.open(libpysal.examples.get_path('stl.gal'), 'r').read() Ensuring that the elements in the spatial weights instance are ordered by the given sequential numbers from 1 to the number of observations in stl_hom.csv @@ -924,8 +924,8 @@ class Spatial_Rate(_Spatial_Smoother): Reading data in stl_hom.csv into stl to extract values for event and population-at-risk variables - >>> import libpysal.api as lps - >>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r') + >>> import libpysal + >>> stl = libpysal.io.open(libpysal.examples.get_path('stl_hom.csv'), 'r') The 11th and 14th columns in stl_hom.csv includes the number of homocides and population. Creating two arrays from these columns. @@ -934,7 +934,7 @@ class Spatial_Rate(_Spatial_Smoother): Creating a spatial weights instance by reading in stl.gal file. - >>> stl_w = lps.open(lps.get_path('stl.gal'), 'r').read() + >>> stl_w = libpysal.io.open(libpysal.examples.get_path('stl.gal'), 'r').read() Ensuring that the elements in the spatial weights instance are ordered by the given sequential numbers from 1 to the number of observations in stl_hom.csv @@ -1229,8 +1229,8 @@ class Disk_Smoother(_Spatial_Smoother): Reading data in stl_hom.csv into stl to extract values for event and population-at-risk variables - >>> import libpysal.api as lps - >>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r') + >>> import libpysal + >>> stl = libpysal.io.open(libpysal.examples.get_path('stl_hom.csv'), 'r') The 11th and 14th columns in stl_hom.csv includes the number of homocides and population. Creating two arrays from these columns. @@ -1239,7 +1239,7 @@ class Disk_Smoother(_Spatial_Smoother): Creating a spatial weights instance by reading in stl.gal file. - >>> stl_w = lps.open(lps.get_path('stl.gal'), 'r').read() + >>> stl_w = libpysal.io.open(libpysal.examples.get_path('stl.gal'), 'r').read() Ensuring that the elements in the spatial weights instance are ordered by the given sequential numbers from 1 to the number of observations in stl_hom.csv @@ -1308,8 +1308,8 @@ class Spatial_Median_Rate(_Spatial_Smoother): Reading data in stl_hom.csv into stl to extract values for event and population-at-risk variables - >>> import libpysal.api as lps - >>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r') + >>> import libpysal + >>> stl = libpysal.io.open(libpysal.examples.get_path('stl_hom.csv'), 'r') The 11th and 14th columns in stl_hom.csv includes the number of homocides and population. Creating two arrays from these columns. @@ -1318,7 +1318,7 @@ class Spatial_Median_Rate(_Spatial_Smoother): Creating a spatial weights instance by reading in stl.gal file. - >>> stl_w = lps.open(lps.get_path('stl.gal'), 'r').read() + >>> stl_w = libpysal.io.open(libpysal.examples.get_path('stl.gal'), 'r').read() Ensuring that the elements in the spatial weights instance are ordered by the given sequential numbers from 1 to the number of observations in stl_hom.csv @@ -1437,8 +1437,8 @@ class Spatial_Filtering(_Smoother): Reading data in stl_hom.csv into stl to extract values for event and population-at-risk variables - >>> import libpysal.api as lps - >>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r') + >>> import libpysal + >>> stl = libpysal.io.open(libpysal.examples.get_path('stl_hom.csv'), 'r') Reading the stl data in the WKT format so that we can easily extract polygon centroids @@ -1613,12 +1613,12 @@ class Headbanging_Triples(object): importing k-nearest neighbor weights creator - >>> import libpysal.api as lps + >>> import libpysal Reading data in stl_hom.csv into stl_db to extract values for event and population-at-risk variables - >>> stl_db = lps.open(lps.get_path('stl_hom.csv'),'r') + >>> stl_db = libpysal.io.open(libpysal.examples.get_path('stl_hom.csv'),'r') Reading the stl data in the WKT format so that we can easily extract polygon centroids @@ -1633,7 +1633,7 @@ class Headbanging_Triples(object): Using the centroids, we create a 5-nearst neighbor weights - >>> w = lps.knnW_from_array(d,k=5) + >>> w = libpysal.weights.KNN(d,k=5) Ensuring that the elements in the spatial weights instance are ordered by the order of stl_db's IDs @@ -1655,15 +1655,15 @@ class Headbanging_Triples(object): Opening sids2.shp file - >>> import libpysal.api as lps - >>> sids = lps.open(lps.get_path('sids2.shp'),'r') + >>> import libpysal + >>> sids = libpysal.io.open(libpysal.examples.get_path('sids2.shp'),'r') Extracting the centroids of polygons in the sids data >>> sids_d = np.array([i.centroid for i in sids]) Creating a 5-nearest neighbors weights from the sids centroids - >>> sids_w = lps.knnW_from_array(sids_d,k=5) + >>> sids_w = libpysal.weights.KNN(sids_d,k=5) Ensuring that the members in sids_w are ordered by the order of sids_d's ID @@ -1796,11 +1796,11 @@ class Headbanging_Median_Rate(object): Examples -------- - >>> import libpysal.api as lps + >>> import libpysal opening the sids2 shapefile - >>> sids = lps.open(lps.get_path('sids2.shp'), 'r') + >>> sids = libpysal.io.open(libpysal.examples.get_path('sids2.shp'), 'r') extracting the centroids of polygons in the sids2 data @@ -1808,7 +1808,7 @@ class Headbanging_Median_Rate(object): creating a 5-nearest neighbors weights from the centroids - >>> sids_w = lps.knnW_from_array(sids_d,k=5) + >>> sids_w = libpysal.weights.KNN(sids_d,k=5) ensuring that the members in sids_w are ordered @@ -1823,7 +1823,7 @@ class Headbanging_Median_Rate(object): reading in the sids2 data table - >>> sids_db = lps.open(lps.get_path('sids2.dbf'), 'r') + >>> sids_db = libpysal.io.open(libpysal.examples.get_path('sids2.dbf'), 'r') extracting the 10th and 9th columns in the sids2.dbf and using data values as event and population-at-risk variables diff --git a/esda/tests/test_geary.py b/esda/tests/test_geary.py index 67ea93bf..cfd7bdd1 100644 --- a/esda/tests/test_geary.py +++ b/esda/tests/test_geary.py @@ -1,7 +1,7 @@ """Geary Unittest.""" import unittest -from libpysal import open as popen +from libpysal.io import open as popen from libpysal import examples from libpysal.common import pandas diff --git a/esda/tests/test_mixture_smoothing.py b/esda/tests/test_mixture_smoothing.py index 5d4c2543..e9806315 100644 --- a/esda/tests/test_mixture_smoothing.py +++ b/esda/tests/test_mixture_smoothing.py @@ -1,6 +1,6 @@ import unittest import numpy as np -import libpysal as pysal +import libpysal from .. import mixture_smoothing as m_s diff --git a/esda/tests/test_moran.py b/esda/tests/test_moran.py index 055c9106..992ec71d 100644 --- a/esda/tests/test_moran.py +++ b/esda/tests/test_moran.py @@ -1,5 +1,5 @@ import unittest -import libpysal as pysal +import libpysal from libpysal.common import pandas, RTOL, ATOL from .. import moran import numpy as np @@ -9,8 +9,8 @@ class Moran_Tester(unittest.TestCase): def setUp(self): - self.w = pysal.open(pysal.examples.get_path("stl.gal")).read() - f = pysal.open(pysal.examples.get_path("stl_hom.txt")) + self.w = libpysal.io.open(libpysal.examples.get_path("stl.gal")).read() + f = libpysal.io.open(libpysal.examples.get_path("stl_hom.txt")) self.y = np.array(f.by_col['HR8893']) def test_moran(self): @@ -19,8 +19,8 @@ def test_moran(self): self.assertAlmostEqual(mi.p_norm, 0.00013573931385468807) def test_sids(self): - w = pysal.open(pysal.examples.get_path("sids2.gal")).read() - f = pysal.open(pysal.examples.get_path("sids2.dbf")) + w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() + f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) SIDR = np.array(f.by_col("SIDR74")) mi = moran.Moran(SIDR, w, two_tailed=False) np.testing.assert_allclose(mi.I, 0.24772519320480135, atol=ATOL, rtol=RTOL) @@ -28,7 +28,7 @@ def test_sids(self): def test_variance(self): y = np.arange(1, 10) - w = pysal.weights.util.lat2W(3, 3) + w = libpysal.weights.util.lat2W(3, 3) mi = moran.Moran(y, w, transformation='B') np.testing.assert_allclose(mi.VI_rand, 0.059687500000000004, atol=ATOL, rtol=RTOL) np.testing.assert_allclose(mi.VI_norm, 0.053125000000000006, atol=ATOL, rtol=RTOL) @@ -44,19 +44,20 @@ def test_z_consistency(self): @unittest.skipIf(PANDAS_EXTINCT, 'missing pandas') def test_by_col(self): from libpysal.io import geotable as pdio - df = pdio.read_files(pysal.examples.get_path('sids2.dbf')) - w = pysal.open(pysal.examples.get_path("sids2.gal")).read() + np.random.seed(11213) + df = pdio.read_files(libpysal.examples.get_path('sids2.dbf')) + w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() mi = moran.Moran.by_col(df, ['SIDR74'], w=w, two_tailed=False) - sidr = np.unique(mi.SIDR74_moran.values) - pval = np.unique(mi.SIDR74_p_sim.values) + sidr = np.unique(mi.SIDR74_moran.values).item() + pval = np.unique(mi.SIDR74_p_sim.values).item() np.testing.assert_allclose(sidr, 0.24772519320480135, atol=ATOL, rtol=RTOL) self.assertAlmostEqual(pval, 0.001) class Moran_Rate_Tester(unittest.TestCase): def setUp(self): - self.w = pysal.open(pysal.examples.get_path("sids2.gal")).read() - f = pysal.open(pysal.examples.get_path("sids2.dbf")) + self.w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() + f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) self.e = np.array(f.by_col['SID79']) self.b = np.array(f.by_col['BIR79']) @@ -68,23 +69,24 @@ def test_moran_rate(self): @unittest.skipIf(PANDAS_EXTINCT, 'missing pandas') def test_by_col(self): from libpysal.io import geotable as pdio - df = pdio.read_files(pysal.examples.get_path('sids2.dbf')) + np.random.seed(11213) + df = pdio.read_files(libpysal.examples.get_path('sids2.dbf')) mi = moran.Moran_Rate.by_col(df, ['SID79'], ['BIR79'], w=self.w, two_tailed=False) - sidr = np.unique(mi["SID79-BIR79_moran_rate"].values) - pval = np.unique(mi["SID79-BIR79_p_sim"].values) + sidr = np.unique(mi["SID79-BIR79_moran_rate"].values).item() + pval = np.unique(mi["SID79-BIR79_p_sim"].values).item() np.testing.assert_allclose(sidr, 0.16622343552567395, rtol=RTOL, atol=ATOL) - self.assertAlmostEqual(pval, 0.009) + self.assertAlmostEqual(pval, 0.008) class Moran_BV_matrix_Tester(unittest.TestCase): def setUp(self): - f = pysal.open(pysal.examples.get_path("sids2.dbf")) + f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) varnames = ['SIDR74', 'SIDR79', 'NWR74', 'NWR79'] self.names = varnames vars = [np.array(f.by_col[var]) for var in varnames] self.vars = vars - self.w = pysal.open(pysal.examples.get_path("sids2.gal")).read() + self.w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() def test_Moran_BV_matrix(self): res = moran.Moran_BV_matrix(self.vars, self.w, varnames=self.names) @@ -94,8 +96,8 @@ def test_Moran_BV_matrix(self): class Moran_Local_Tester(unittest.TestCase): def setUp(self): np.random.seed(10) - self.w = pysal.open(pysal.examples.get_path("desmith.gal")).read() - f = pysal.open(pysal.examples.get_path("desmith.txt")) + self.w = libpysal.io.open(libpysal.examples.get_path("desmith.gal")).read() + f = libpysal.io.open(libpysal.examples.get_path("desmith.txt")) self.y = np.array(f.by_col['z']) def test_Moran_Local(self): @@ -117,8 +119,8 @@ def test_by_col(self): class Moran_Local_BV_Tester(unittest.TestCase): def setUp(self): np.random.seed(10) - self.w = pysal.open(pysal.examples.get_path("sids2.gal")).read() - f = pysal.open(pysal.examples.get_path("sids2.dbf")) + self.w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() + f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) self.x = np.array(f.by_col['SIDR79']) self.y = np.array(f.by_col['SIDR74']) @@ -132,7 +134,7 @@ def test_Moran_Local_BV(self): @unittest.skipIf(PANDAS_EXTINCT, 'missing pandas') def test_by_col(self): from libpysal.io import geotable as pdio - df = pdio.read_files(pysal.examples.get_path('sids2.dbf')) + df = pdio.read_files(libpysal.examples.get_path('sids2.dbf')) np.random.seed(12345) moran.Moran_Local_BV.by_col(df, ['SIDR74', 'SIDR79'], w=self.w, inplace=True, outvals=['z_sim', 'p_z_sim'], @@ -148,8 +150,8 @@ def test_by_col(self): class Moran_Local_Rate_Tester(unittest.TestCase): def setUp(self): np.random.seed(10) - self.w = pysal.open(pysal.examples.get_path("sids2.gal")).read() - f = pysal.open(pysal.examples.get_path("sids2.dbf")) + self.w = libpysal.io.open(libpysal.examples.get_path("sids2.gal")).read() + f = libpysal.io.open(libpysal.examples.get_path("sids2.dbf")) self.e = np.array(f.by_col['SID79']) self.b = np.array(f.by_col['BIR79']) @@ -162,7 +164,7 @@ def test_moran_rate(self): @unittest.skipIf(PANDAS_EXTINCT, 'missing pandas') def test_by_col(self): from libpysal.io import geotable as pdio - df = pdio.read_files(pysal.examples.get_path('sids2.dbf')) + df = pdio.read_files(libpysal.examples.get_path('sids2.dbf')) lm = moran.Moran_Local_Rate.by_col(df, ['SID79'], ['BIR79'], w=self.w, outvals=['p_z_sim', 'z_sim'], transformation='r', permutations=99) diff --git a/esda/tests/test_smoothing.py b/esda/tests/test_smoothing.py index 53c54977..ae7a34da 100644 --- a/esda/tests/test_smoothing.py +++ b/esda/tests/test_smoothing.py @@ -1,5 +1,5 @@ import unittest -import libpysal as pysal +import libpysal from libpysal.weights.Distance import KNN, Kernel from .. import smoothing as sm import numpy as np @@ -62,8 +62,8 @@ def test_indirect_age_standardization(self): class TestSRate(unittest.TestCase): def setUp(self): - sids = pysal.open(pysal.examples.get_path('sids2.dbf'), 'r') - self.w = pysal.open(pysal.examples.get_path('sids2.gal'), 'r').read() + sids = libpysal.io.open(libpysal.examples.get_path('sids2.dbf'), 'r') + self.w = libpysal.io.open(libpysal.examples.get_path('sids2.gal'), 'r').read() self.b, self.e = np.array(sids[:, 8]), np.array(sids[:, 9]) self.er = [0.453433, 0.000000, 0.775871, 0.973810, 3.133190] self.eb = [0.0016973, 0.0017054, 0.0017731, 0.0020129, 0.0035349] @@ -76,17 +76,17 @@ def setUp(self): 3.69333797e-05, 5.40245456e-05, 2.99806055e-05, 3.73034109e-05, 3.47270722e-05]).reshape(-1,1) - self.stl = pysal.open(pysal.examples.get_path('stl_hom.csv'), 'r') + self.stl = libpysal.io.open(libpysal.examples.get_path('stl_hom.csv'), 'r') self.stl_e, self.stl_b = np.array(self.stl[:, 10]), np.array(self.stl[:, 13]) - self.stl_w = pysal.open(pysal.examples.get_path('stl.gal'), 'r').read() + self.stl_w = libpysal.io.open(libpysal.examples.get_path('stl.gal'), 'r').read() if not self.stl_w.id_order_set: self.stl_w.id_order = list(range(1, len(self.stl) + 1)) if not PANDAS_EXTINCT: - self.df = pysal.open(pysal.examples.get_path('sids2.dbf')).to_df() + self.df = libpysal.io.open(libpysal.examples.get_path('sids2.dbf')).to_df() self.ename = 'SID74' self.bname = 'BIR74' - self.stl_df = pysal.open(pysal.examples.get_path('stl_hom.csv')).to_df() + self.stl_df = libpysal.io.open(libpysal.examples.get_path('stl_hom.csv')).to_df() self.stl_ename = 'HC7984' self.stl_bname = 'PO7984' @@ -259,13 +259,13 @@ def test_Smoother_multicol(self): class TestHB(unittest.TestCase): def setUp(self): - sids = pysal.open(pysal.examples.get_path('sids2.shp'), 'r') + sids = libpysal.io.open(libpysal.examples.get_path('sids2.shp'), 'r') self.sids = sids self.d = np.array([i.centroid for i in sids]) self.w = KNN.from_array(self.d, k=5) if not self.w.id_order_set: self.w.id_order = self.w.id_order - sids_db = pysal.open(pysal.examples.get_path('sids2.dbf'), 'r') + sids_db = libpysal.io.open(libpysal.examples.get_path('sids2.dbf'), 'r') self.b, self.e = np.array(sids_db[:, 8]), np.array(sids_db[:, 9]) self.sids_hb_rr5 = np.array([0.00075586, 0., 0.0008285, 0.0018315, 0.00498891])