Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 26 additions & 10 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,26 @@ sudo: false
branches:
only:
- master
python:
- "3.5"
- "3.6"

env:
- PYSAL_PLUS=false
- PYSAL_PLUS=true
python:
- 3.5
- 3.6
env:
- PYSAL_PYPI=true PYSAL_PLUS=true
- PYSAL_PYPI=true PYSAL_PLUS=false
- PYSAL_PYPI=false PYSAL_PLUS=true
- PYSAL_PYPI=false PYSAL_PLUS=false

matrix:
allow_failures:
- python: 3.5
env: PYSAL_PYPI=false PYSAL_PLUS=false
- python: 3.5
env: PYSAL_PYPI=false PYSAL_PLUS=true
- python: 3.6
env: PYSAL_PYPI=false PYSAL_PLUS=false
- python: 3.6
env: PYSAL_PYPI=false PYSAL_PLUS=true

before_install:
- wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh
Expand All @@ -23,9 +36,12 @@ before_install:
install:
- conda install --yes pip nose
- which pip
- if "$PYSAL_PYPI"; then
echo 'testing pypi libpysal' && pip install libpysal;
else echo 'testing git libpysal'; git clone https://github.com/pysal/libpysal.git; cd libpysal; pip install .; cd ../;
fi;
- conda install --yes --file requirements.txt;
- pip install libpysal
- if [[ PYSAL_PLUS ]]; then conda install --yes numba; fi
- if "$PYSAL_PLUS"; then conda install --yes numba; fi

script:
- pwd
Expand All @@ -38,8 +54,8 @@ notifications:
recipients:
- [email protected]
- [email protected]
on_change: always
on_failure: always
on_success: change
on_failure: change

after_success:
- coveralls
2 changes: 1 addition & 1 deletion esda/gamma.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ class Gamma(object):

use same example as for join counts to show similarity

>>> import libpysal.api as lps, numpy as np
>>> import libpysal, numpy as np
>>> from esda.gamma import Gamma
>>> w = lps.lat2W(4,4)
>>> y=np.ones(16)
Expand Down
6 changes: 3 additions & 3 deletions esda/geary.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,10 @@ class Geary(object):

Examples
--------
>>> import libpysal.api as lps
>>> import libpysal
>>> from esda.geary import Geary
>>> w = lps.open(lps.get_path("book.gal")).read()
>>> f = lps.open(lps.get_path("book.txt"))
>>> w = libpysal.io.open(liblibpysal.examples.get_path("book.gal")).read()
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

s/liblibpysal/libpysal/g

>>> f = libpysal.io.open(liblibpysal.examples.get_path("book.txt"))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

s/liblibpysal/libpysal/g

>>> y = np.array(f.by_col['y'])
>>> c = Geary(y,w,permutations=0)
>>> round(c.C,7)
Expand Down
4 changes: 2 additions & 2 deletions esda/getisord.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ class G(object):

Examples
--------
>>> import libpysal.api as lps
>>> import libpysal
>>> import numpy
>>> numpy.random.seed(10)

Expand Down Expand Up @@ -278,7 +278,7 @@ class G_Local(object):

Examples
--------
>>> import libpysal.api as lps
>>> import libpysal
>>> import numpy
>>> numpy.random.seed(10)

Expand Down
2 changes: 1 addition & 1 deletion esda/join_counts.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ class Join_Counts(object):
Replicate example from anselin and rey

>>> import numpy as np
>>> import libpysal.api as lps
>>> import libpysal
>>> w = lps.lat2W(4, 4)
>>> y = np.ones(16)
>>> y[0:8] = 0
Expand Down
46 changes: 23 additions & 23 deletions esda/moran.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,9 @@ class Moran(object):

Examples
--------
>>> import libpysal.api as lps
>>> w = lps.open(lps.get_path("stl.gal")).read()
>>> f = lps.open(lps.get_path("stl_hom.txt"))
>>> import libpysal
>>> w = libpysal.io.open(liblibpysal.examples.get_path("stl.gal")).read()
>>> f = libpysal.io.open(liblibpysal.examples.get_path("stl_hom.txt"))
>>> y = np.array(f.by_col['HR8893'])
>>> from esda.moran import Moran
>>> mi = Moran(y, w)
Expand All @@ -112,8 +112,8 @@ class Moran(object):
0.00027147862770937614

SIDS example replicating OpenGeoda
>>> w = lps.open(lps.get_path("sids2.gal")).read()
>>> f = lps.open(lps.get_path("sids2.dbf"))
>>> w = libpysal.io.open(liblibpysal.examples.get_path("sids2.gal")).read()
>>> f = libpysal.io.open(liblibpysal.examples.get_path("sids2.dbf"))
>>> SIDR = np.array(f.by_col("SIDR74"))
>>> mi = Moran(SIDR, w)
>>> round(mi.I, 3)
Expand Down Expand Up @@ -327,7 +327,7 @@ class Moran_BV(object):

Examples
--------
>>> import libpysal.api as lps
>>> import libpysal
>>> import numpy as np

Set random number generator seed so we can replicate the example
Expand All @@ -337,13 +337,13 @@ class Moran_BV(object):
Open the sudden infant death dbf file and read in rates for 74 and 79
converting each to a numpy array

>>> f = lps.open(lps.get_path("sids2.dbf"))
>>> f = libpysal.io.open(liblibpysal.examples.get_path("sids2.dbf"))
>>> SIDR74 = np.array(f.by_col['SIDR74'])
>>> SIDR79 = np.array(f.by_col['SIDR79'])

Read a GAL file and construct our spatial weights object

>>> w = lps.open(lps.get_path("sids2.gal")).read()
>>> w = libpysal.io.open(liblibpysal.examples.get_path("sids2.gal")).read()

Create an instance of Moran_BV
>>> from esda.moran import Moran_BV
Expand Down Expand Up @@ -481,8 +481,8 @@ def Moran_BV_matrix(variables, w, permutations=0, varnames=None):

open dbf

>>> import libpysal.api as lps
>>> f = lps.open(lps.get_path("sids2.dbf"))
>>> import libpysal
>>> f = libpysal.io.open(liblibpysal.examples.get_path("sids2.dbf"))

pull of selected variables from dbf and create numpy arrays for each

Expand All @@ -491,7 +491,7 @@ def Moran_BV_matrix(variables, w, permutations=0, varnames=None):

create a contiguity matrix from an external gal file

>>> w = lps.open(lps.get_path("sids2.gal")).read()
>>> w = libpysal.io.open(liblibpysal.examples.get_path("sids2.gal")).read()

create an instance of Moran_BV_matrix

Expand Down Expand Up @@ -612,9 +612,9 @@ class Moran_Rate(Moran):

Examples
--------
>>> import libpysal.api as lps
>>> w = lps.open(lps.get_path("sids2.gal")).read()
>>> f = lps.open(lps.get_path("sids2.dbf"))
>>> import libpysal
>>> w = libpysal.io.open(liblibpysal.examples.get_path("sids2.gal")).read()
>>> f = libpysal.io.open(liblibpysal.examples.get_path("sids2.dbf"))
>>> e = np.array(f.by_col('SID79'))
>>> b = np.array(f.by_col('BIR79'))
>>> from esda.moran import Moran_Rate
Expand Down Expand Up @@ -788,11 +788,11 @@ class Moran_Local(object):

Examples
--------
>>> import libpysal.api as lps
>>> import libpysal
>>> import numpy as np
>>> np.random.seed(10)
>>> w = lps.open(lps.get_path("desmith.gal")).read()
>>> f = lps.open(lps.get_path("desmith.txt"))
>>> w = libpysal.io.open(liblibpysal.examples.get_path("desmith.gal")).read()
>>> f = libpysal.io.open(liblibpysal.examples.get_path("desmith.txt"))
>>> y = np.array(f.by_col['z'])
>>> from esda.moran import Moran_Local
>>> lm = Moran_Local(y, w, transformation = "r", permutations = 99)
Expand Down Expand Up @@ -1020,11 +1020,11 @@ class Moran_Local_BV(object):

Examples
--------
>>> import libpysal.api as lps
>>> import libpysal
>>> import numpy as np
>>> np.random.seed(10)
>>> w = lps.open(lps.get_path("sids2.gal")).read()
>>> f = lps.open(lps.get_path("sids2.dbf"))
>>> w = libpysal.io.open(liblibpysal.examples.get_path("sids2.gal")).read()
>>> f = libpysal.io.open(liblibpysal.examples.get_path("sids2.dbf"))
>>> x = np.array(f.by_col['SIDR79'])
>>> y = np.array(f.by_col['SIDR74'])
>>> from esda.moran import Moran_Local_BV
Expand Down Expand Up @@ -1265,11 +1265,11 @@ class Moran_Local_Rate(Moran_Local):

Examples
--------
>>> import libpysal.api as lps
>>> import libpysal
>>> import numpy as np
>>> np.random.seed(10)
>>> w = lps.open(lps.get_path("sids2.gal")).read()
>>> f = lps.open(lps.get_path("sids2.dbf"))
>>> w = libpysal.io.open(liblibpysal.examples.get_path("sids2.gal")).read()
>>> f = libpysal.io.open(liblibpysal.examples.get_path("sids2.dbf"))
>>> e = np.array(f.by_col('SID79'))
>>> b = np.array(f.by_col('BIR79'))
>>> from esda.moran import Moran_Local_Rate
Expand Down
50 changes: 25 additions & 25 deletions esda/smoothing.py
Original file line number Diff line number Diff line change
Expand Up @@ -645,8 +645,8 @@ class Excess_Risk(_Smoother):

Reading data in stl_hom.csv into stl to extract values
for event and population-at-risk variables
>>> import libpysal.api as lps
>>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r')
>>> import libpysal
>>> stl = libpysal.io.open(liblibpysal.examples.get_path('stl_hom.csv'), 'r')

The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
Creating two arrays from these columns.
Expand Down Expand Up @@ -700,8 +700,8 @@ class Empirical_Bayes(_Smoother):
Reading data in stl_hom.csv into stl to extract values
for event and population-at-risk variables

>>> import libpysal.api as lps
>>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r')
>>> import libpysal
>>> stl = libpysal.io.open(liblibpysal.examples.get_path('stl_hom.csv'), 'r')

The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
Creating two arrays from these columns.
Expand Down Expand Up @@ -844,8 +844,8 @@ class Spatial_Empirical_Bayes(_Spatial_Smoother):
Reading data in stl_hom.csv into stl to extract values
for event and population-at-risk variables

>>> import libpysal.api as lps
>>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r')
>>> import libpysal
>>> stl = libpysal.io.open(liblibpysal.examples.get_path('stl_hom.csv'), 'r')

The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
Creating two arrays from these columns.
Expand All @@ -854,7 +854,7 @@ class Spatial_Empirical_Bayes(_Spatial_Smoother):

Creating a spatial weights instance by reading in stl.gal file.

>>> stl_w = lps.open(lps.get_path('stl.gal'), 'r').read()
>>> stl_w = libpysal.io.open(liblibpysal.examples.get_path('stl.gal'), 'r').read()

Ensuring that the elements in the spatial weights instance are ordered
by the given sequential numbers from 1 to the number of observations in stl_hom.csv
Expand Down Expand Up @@ -924,8 +924,8 @@ class Spatial_Rate(_Spatial_Smoother):
Reading data in stl_hom.csv into stl to extract values
for event and population-at-risk variables

>>> import libpysal.api as lps
>>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r')
>>> import libpysal
>>> stl = libpysal.io.open(liblibpysal.examples.get_path('stl_hom.csv'), 'r')

The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
Creating two arrays from these columns.
Expand All @@ -934,7 +934,7 @@ class Spatial_Rate(_Spatial_Smoother):

Creating a spatial weights instance by reading in stl.gal file.

>>> stl_w = lps.open(lps.get_path('stl.gal'), 'r').read()
>>> stl_w = libpysal.io.open(liblibpysal.examples.get_path('stl.gal'), 'r').read()

Ensuring that the elements in the spatial weights instance are ordered
by the given sequential numbers from 1 to the number of observations in stl_hom.csv
Expand Down Expand Up @@ -1229,8 +1229,8 @@ class Disk_Smoother(_Spatial_Smoother):
Reading data in stl_hom.csv into stl to extract values
for event and population-at-risk variables

>>> import libpysal.api as lps
>>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r')
>>> import libpysal
>>> stl = libpysal.io.open(liblibpysal.examples.get_path('stl_hom.csv'), 'r')

The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
Creating two arrays from these columns.
Expand All @@ -1239,7 +1239,7 @@ class Disk_Smoother(_Spatial_Smoother):

Creating a spatial weights instance by reading in stl.gal file.

>>> stl_w = lps.open(lps.get_path('stl.gal'), 'r').read()
>>> stl_w = libpysal.io.open(liblibpysal.examples.get_path('stl.gal'), 'r').read()

Ensuring that the elements in the spatial weights instance are ordered
by the given sequential numbers from 1 to the number of observations in stl_hom.csv
Expand Down Expand Up @@ -1308,8 +1308,8 @@ class Spatial_Median_Rate(_Spatial_Smoother):
Reading data in stl_hom.csv into stl to extract values
for event and population-at-risk variables

>>> import libpysal.api as lps
>>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r')
>>> import libpysal
>>> stl = libpysal.io.open(liblibpysal.examples.get_path('stl_hom.csv'), 'r')

The 11th and 14th columns in stl_hom.csv includes the number of homocides and population.
Creating two arrays from these columns.
Expand All @@ -1318,7 +1318,7 @@ class Spatial_Median_Rate(_Spatial_Smoother):

Creating a spatial weights instance by reading in stl.gal file.

>>> stl_w = lps.open(lps.get_path('stl.gal'), 'r').read()
>>> stl_w = libpysal.io.open(liblibpysal.examples.get_path('stl.gal'), 'r').read()

Ensuring that the elements in the spatial weights instance are ordered
by the given sequential numbers from 1 to the number of observations in stl_hom.csv
Expand Down Expand Up @@ -1437,8 +1437,8 @@ class Spatial_Filtering(_Smoother):
Reading data in stl_hom.csv into stl to extract values
for event and population-at-risk variables

>>> import libpysal.api as lps
>>> stl = lps.open(lps.get_path('stl_hom.csv'), 'r')
>>> import libpysal
>>> stl = libpysal.io.open(liblibpysal.examples.get_path('stl_hom.csv'), 'r')

Reading the stl data in the WKT format so that
we can easily extract polygon centroids
Expand Down Expand Up @@ -1613,12 +1613,12 @@ class Headbanging_Triples(object):

importing k-nearest neighbor weights creator

>>> import libpysal.api as lps
>>> import libpysal

Reading data in stl_hom.csv into stl_db to extract values
for event and population-at-risk variables

>>> stl_db = lps.open(lps.get_path('stl_hom.csv'),'r')
>>> stl_db = libpysal.io.open(liblibpysal.examples.get_path('stl_hom.csv'),'r')

Reading the stl data in the WKT format so that
we can easily extract polygon centroids
Expand Down Expand Up @@ -1655,8 +1655,8 @@ class Headbanging_Triples(object):

Opening sids2.shp file

>>> import libpysal.api as lps
>>> sids = lps.open(lps.get_path('sids2.shp'),'r')
>>> import libpysal
>>> sids = libpysal.io.open(liblibpysal.examples.get_path('sids2.shp'),'r')

Extracting the centroids of polygons in the sids data

Expand Down Expand Up @@ -1796,11 +1796,11 @@ class Headbanging_Median_Rate(object):
Examples
--------

>>> import libpysal.api as lps
>>> import libpysal

opening the sids2 shapefile

>>> sids = lps.open(lps.get_path('sids2.shp'), 'r')
>>> sids = libpysal.io.open(liblibpysal.examples.get_path('sids2.shp'), 'r')

extracting the centroids of polygons in the sids2 data

Expand All @@ -1823,7 +1823,7 @@ class Headbanging_Median_Rate(object):

reading in the sids2 data table

>>> sids_db = lps.open(lps.get_path('sids2.dbf'), 'r')
>>> sids_db = libpysal.io.open(liblibpysal.examples.get_path('sids2.dbf'), 'r')

extracting the 10th and 9th columns in the sids2.dbf and
using data values as event and population-at-risk variables
Expand Down
Loading