Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
files: "spint\/"
files: "spint\/|notebooks\/"
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.15.2"
Expand Down
41 changes: 16 additions & 25 deletions notebooks/4d_distance.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@
"outputs": [],
"source": [
"import numpy as np\n",
"from scipy.spatial import distance\n",
"import scipy.spatial as spatial\n",
"from pysal.weights import Distance as Distance\n",
"from pysal.weights import W\n",
"from pysal.weights.util import isKDTree\n",
"from pysal.weights import Distance as Distance"
"from scipy.spatial import KDTree, distance"
]
},
{
Expand Down Expand Up @@ -66,6 +66,7 @@
" distance.euclidean(np.array((30, 46, 23, 80)), np.array((30, 46, 23, 80)))\n",
"\n",
"\n",
"\"\"\"\n",
"def distpython():\n",
" dist(np.array((67, 46, 92, 67)), np.array((44, 97, 25, 50)))\n",
" dist(np.array((67, 46, 92, 67)), np.array((84, 37, 66, 53)))\n",
Expand All @@ -83,11 +84,11 @@
" dist(np.array((30, 46, 23, 80)), np.array((44, 97, 25, 50)))\n",
" dist(np.array((30, 46, 23, 80)), np.array((30, 46, 23, 80)))\n",
"\n",
"\n",
"def scpkdtree():\n",
" data = zip(x.ravel(), y.ravel(), w.ravel(), z.ravel())\n",
" data = zip(x.ravel(), y.ravel(), w.ravel(), z.ravel(), strict=True)\n",
" tree = spatial.KDTree(data)\n",
" W = pysal.weights.DistanceBand(tree, threshold=9999, alpha=-1.5, binary=False)"
" W = pysal.weights.DistanceBand(tree, threshold=9999, alpha=-1.5, binary=False)\n",
"\"\"\""
]
},
{
Expand Down Expand Up @@ -122,9 +123,9 @@
"w = np.random.randint(1, 1000, 3000)\n",
"z = np.random.randint(1, 1000, 3000)\n",
"\n",
"data = zip(x.ravel(), y.ravel(), w.ravel(), z.ravel())\n",
"data = zip(x.ravel(), y.ravel(), w.ravel(), z.ravel(), strict=True)\n",
"tree = spatial.KDTree(data)\n",
"W = pysal.weights.DistanceBand(tree, threshold=9999, alpha=-1.5, binary=False)"
"# W = pysal.weights.DistanceBand(tree, threshold=9999, alpha=-1.5, binary=False)"
]
},
{
Expand Down Expand Up @@ -193,8 +194,8 @@
" data = data.astype(float)\n",
" self.data = data\n",
" self.kd = KDTree(self.data)\n",
" except:\n",
" raise ValueError(\"Could not make array from data\")\n",
" except Exception:\n",
" raise ValueError(\"Could not make array from data\") from None\n",
"\n",
" self.p = p\n",
" self.threshold = threshold\n",
Expand All @@ -214,14 +215,11 @@
" self.dmat = np.array(distance_matrix(self.data, self.data))\n",
"\n",
" def _distance_to_W(self, ids=None):\n",
" if ids:\n",
" ids = np.array(ids)\n",
" else:\n",
" ids = np.arange(self.dmat.shape[0])\n",
" neighbors = dict([(i, []) for i in ids])\n",
" weights = dict([(i, []) for i in ids])\n",
" ids = np.array(ids) if ids else np.arange(self.dmat.shape[0])\n",
" neighbors = {i: [] for i in ids}\n",
" weights = {i: [] for i in ids}\n",
" if self.binary:\n",
" for key, weight in self.dmat.items():\n",
" for key, _ in self.dmat.items():\n",
" i, j = key\n",
" if i != j:\n",
" if j not in neighbors[i]:\n",
Expand All @@ -232,7 +230,7 @@
" neighbors[j].append(i)\n",
"\n",
" else:\n",
" weighted = np.array(map(lambda x: pow(x, -1.5), self.dmat))\n",
" weighted = np.array(map(lambda x: pow(x, -1.5), self.dmat)) # noqa: C417 Unnecessary `map()`\n",
" print(weighted.shape)\n",
" rows, cols = self.dmat.shape\n",
" for i in range(rows):\n",
Expand Down Expand Up @@ -264,7 +262,7 @@
"w = np.random.randint(1, 1000, 500)\n",
"z = np.random.randint(1, 1000, 500)\n",
"\n",
"data = zip(x.ravel(), y.ravel(), w.ravel(), z.ravel())\n",
"data = zip(x.ravel(), y.ravel(), w.ravel(), z.ravel(), strict=True)\n",
"tree = spatial.KDTree(data)"
]
},
Expand Down Expand Up @@ -426,13 +424,6 @@
"source": [
"np.allclose(W_new.full()[0], W_old.full()[0])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand Down
57 changes: 29 additions & 28 deletions notebooks/Example_NYCBikes_AllFeatures.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -11,28 +11,20 @@
},
"outputs": [],
"source": [
"from pysal.contrib.spint.gravity import (\n",
" BaseGravity,\n",
" Gravity,\n",
" Production,\n",
" Attraction,\n",
" Doubly,\n",
")\n",
"from pysal.contrib.spint.dispersion import phi_disp\n",
"from pysal.contrib.spint.vec_SA import VecMoran\n",
"import pysal as ps\n",
"import pandas as pd\n",
"import geopandas as gp\n",
"import numpy as np\n",
"import seaborn as sb\n",
"import matplotlib.pylab as plt\n",
"import numpy as np\n",
"import pandas as pd\n",
"import pysal as ps\n",
"\n",
"%pylab inline\n",
"from descartes import PolygonPatch\n",
"import matplotlib as mpl\n",
"from mpl_toolkits.basemap import Basemap\n",
"import pyproj as pj\n",
"from shapely.geometry import Polygon, Point"
"from spint.dispersion import phi_disp\n",
"from spint.gravity import (\n",
" Attraction,\n",
" Doubly,\n",
" Gravity,\n",
" Production,\n",
")\n",
"from spint.vec_SA import VecMoran"
]
},
{
Expand Down Expand Up @@ -87,7 +79,8 @@
" ]\n",
")\n",
"\n",
"# Destination variables: square footage of buildings, housing units, total station capacity\n",
"# Destination variables:\n",
"# square footage of buildings, housing units, total station capacity\n",
"d_vars = np.hstack(\n",
" [\n",
" bikes[\"d_sq_foot\"].values.reshape((-1, 1)),\n",
Expand Down Expand Up @@ -209,9 +202,11 @@
},
"outputs": [],
"source": [
"# Next, we can test the models for violations of the equidispersion assumption of Poisson models\n",
"# Next, we can test the models for violations of the\n",
"# equidispersion assumption of Poisson models\n",
"\n",
"# test the hypotehsis of equidispersion (var[mu] = mu) against that of QuasiPoisson (var[mu] = phi * mu)\n",
"# test the hypotehsis of equidispersion\n",
"# (var[mu] = mu) against that of QuasiPoisson (var[mu] = phi * mu)\n",
"# Results = [phi, tvalue, pvalue]\n",
"print(phi_disp(grav))\n",
"print(phi_disp(prod))\n",
Expand All @@ -235,7 +230,8 @@
},
"outputs": [],
"source": [
"# As a result we can compare our standard errors and tvalues for a Poisson model to a QuasiPoisson\n",
"# As a result we can compare our standard errors\n",
"# and tvalues for a Poisson model to a QuasiPoisson\n",
"\n",
"print(\"Production-constrained Poisson model standard errors and tvalues\")\n",
"print(prod.params[-4:])\n",
Expand Down Expand Up @@ -343,7 +339,8 @@
},
"outputs": [],
"source": [
"# Plot local \"cost\" values: darker blue is stronger distance decay; grey is no data\n",
"# Plot local \"cost\" values:\n",
"# darker blue is stronger distance decay; grey is no data\n",
"\n",
"fig = plt.figure(figsize=(12, 12))\n",
"ax = fig.add_subplot(111)\n",
Expand All @@ -366,7 +363,8 @@
},
"outputs": [],
"source": [
"# Plot local estimates for destination capacity: darker red is larger effect; grey is no data\n",
"# Plot local estimates for destination capacity:\n",
"# darker red is larger effect; grey is no data\n",
"\n",
"fig = plt.figure(figsize=(12, 12))\n",
"ax = fig.add_subplot(111)\n",
Expand All @@ -390,7 +388,8 @@
},
"outputs": [],
"source": [
"# Plot local estimates for # of housing units: darker red is larger effect; grey is no data\n",
"# Plot local estimates for # of housing units:\n",
"# darker red is larger effect; grey is no data\n",
"\n",
"fig = plt.figure(figsize=(12, 12))\n",
"ax = fig.add_subplot(111)\n",
Expand All @@ -414,7 +413,8 @@
},
"outputs": [],
"source": [
"# Plot local estimates for destination building sq footage: darker red is larger effect; grey is no data\n",
"# Plot local estimates for destination building sq footage:\n",
"# darker red is larger effect; grey is no data\n",
"\n",
"fig = plt.figure(figsize=(12, 12))\n",
"ax = fig.add_subplot(111)\n",
Expand Down Expand Up @@ -459,7 +459,8 @@
},
"outputs": [],
"source": [
"# Prep OD data as vectors and then compute origin or destination focused distance-based weights\n",
"# Prep OD data as vectors and then compute\n",
"# origin or destination focused distance-based weights\n",
"\n",
"ids = bikes[\"index\"].reshape((-1, 1))\n",
"origin_x = bikes[\"SX\"].reshape((-1, 1))\n",
Expand Down
26 changes: 3 additions & 23 deletions notebooks/NYC_Bike_Example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -11,28 +11,11 @@
},
"outputs": [],
"source": [
"import os\n",
"\n",
"os.chdir(\"../\")\n",
"from gravity import Gravity, Production, Attraction, Doubly\n",
"\n",
"os.chdir(\"/Users/toshan/dev/pysal/pysal/contrib/spint\")\n",
"import entropy as grav\n",
"import numpy as np\n",
"import scipy.stats as stats\n",
"import pandas as pd\n",
"import seaborn as sns\n",
"import geopandas as gp\n",
"\n",
"os.chdir(\"/Users/toshan/Dropbox/Data/NYC_BIKES\")\n",
"import matplotlib.pylab as plt\n",
"\n",
"%pylab inline\n",
"from descartes import PolygonPatch\n",
"import matplotlib as mpl\n",
"from mpl_toolkits.basemap import Basemap\n",
"import pyproj as pj\n",
"from shapely.geometry import Polygon, Point"
"import numpy as np\n",
"import pandas as pd\n",
"from gravity import Doubly, Gravity, Production"
]
},
{
Expand Down Expand Up @@ -89,9 +72,6 @@
},
"outputs": [],
"source": [
"os.chdir(\"/Users/toshan/dev/pysal/pysal/contrib/spint\")\n",
"from gravity import Gravity, Production, Attraction, Doubly\n",
"\n",
"model = Gravity(flows, o_vars, d_vars, cost, \"exp\")\n",
"print(model.params)\n",
"print(model.deviance)"
Expand Down
Loading
Loading