Skip to content

Commit

Permalink
MAINT: update required versions, adapt CI, lint for new target Python (
Browse files Browse the repository at this point in the history
…#517)

* update CI and reqs

* lint and remove compat

* rm pygeos

* update oldest to match pyproject.toml

* remove test for error that no longer can be raised

* bump min geopandas (due to shapely 2)

* fix zip in COINS

* fix zip in faceartifacts

* try to fix tests

* more approx

* test user guide on 3.12

* user guide deps
  • Loading branch information
martinfleis authored Nov 22, 2023
1 parent 13c78f2 commit b38dfac
Show file tree
Hide file tree
Showing 19 changed files with 99 additions and 184 deletions.
12 changes: 6 additions & 6 deletions .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@ jobs:
matrix:
os: [ubuntu-latest]
environment-file:
- ci/envs/38-minimal.yaml
- ci/envs/39-latest.yaml
- ci/envs/310-oldest.yaml
- ci/envs/310-latest.yaml
- ci/envs/311-latest.yaml
- ci/envs/311-dev.yaml
- ci/envs/312-latest.yaml
- ci/envs/312-dev.yaml
include:
- environment-file: ci/envs/311-latest.yaml
- environment-file: ci/envs/312-latest.yaml
os: macos-latest
- environment-file: ci/envs/311-latest.yaml
- environment-file: ci/envs/312-latest.yaml
os: windows-latest
defaults:
run:
Expand All @@ -48,7 +48,7 @@ jobs:
pytest -v --color yes --cov momepy --cov-append --cov-report term-missing --cov-report xml .
- name: Test user guide
if: contains(matrix.environment-file, '310-latest.yaml') && contains(matrix.os, 'ubuntu')
if: contains(matrix.environment-file, '312-latest.yaml') && contains(matrix.os, 'ubuntu')
run: |
ci/envs/test_user_guide.sh
Expand Down
8 changes: 1 addition & 7 deletions ci/envs/310-latest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,4 @@ dependencies:
# testing
- codecov
- pytest
- pytest-cov
# user guide testing
- dask
- inequality
- jupyter
- matplotlib
- osmnx
- pytest-cov
12 changes: 6 additions & 6 deletions ci/envs/38-minimal.yaml → ci/envs/310-oldest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,17 @@ name: test
channels:
- conda-forge
dependencies:
- python=3.8
- geopandas=0.8
- python=3.10
- geopandas=0.12
- inequality
- libpysal=4.6.0
- mapclassify
- networkx=2.3
- numpy=1.21
- networkx=2.7
- numpy=1.22
- packaging
- pandas>=0.23.0,!=1.5.0,<2
- pandas>=1.4.0,!=1.5.0,<2
- shapely=2.0
- tqdm=4.27.0
- tqdm=4.63.0
# testing
- codecov
- pytest
Expand Down
2 changes: 1 addition & 1 deletion ci/envs/311-dev.yaml → ci/envs/312-dev.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: test
channels:
- conda-forge
dependencies:
- python=3.11
- python=3.12
- dask
- geopandas
- inequality
Expand Down
10 changes: 8 additions & 2 deletions ci/envs/39-latest.yaml → ci/envs/312-latest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: test
channels:
- conda-forge
dependencies:
- python=3.9
- python=3.12
- geopandas
- inequality
- libpysal>=4.6.0
Expand All @@ -12,10 +12,16 @@ dependencies:
- packaging
- pandas!=1.5.0
- shapely>=2
- pygeos
- esda
- tqdm
# testing
- codecov
- pytest
- pytest-cov
# user guide testing
- dask
- inequality
- jupyter
- matplotlib
- osmnx

4 changes: 2 additions & 2 deletions momepy/coins.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def _get_links(self):

p2.append(item)

self.result = list(zip(range(len(p1)), p1, p2))
self.result = list(zip(range(len(p1)), p1, p2, strict=True))

for a in self.result:
n = a[0]
Expand Down Expand Up @@ -371,7 +371,7 @@ def _list_to_tuple(line):
def _list_to_pairs(in_list):
"""Split a line at every point."""
tmp_list = [list(point) for point in in_list]
return [list(pair) for pair in zip(tmp_list, tmp_list[1:])]
return [list(pair) for pair in zip(tmp_list[:-1], tmp_list[1:], strict=True)]


def _compute_angle(point1, point2):
Expand Down
6 changes: 3 additions & 3 deletions momepy/dimension.py
Original file line number Diff line number Diff line change
Expand Up @@ -546,7 +546,7 @@ def __init__(
end_markers = []

lengths = shapely.length(lines)
for ix, (line, length) in enumerate(zip(lines, lengths)):
for ix, (line, length) in enumerate(zip(lines, lengths, strict=True)):
pts = shapely.line_interpolate_point(
line, np.linspace(0, length, num=int((length) // distance))
)
Expand All @@ -560,7 +560,7 @@ def __init__(
ids += [ix] * 2

ticks = []
for num, (pt, end) in enumerate(zip(list_points, end_markers), 1):
for num, (pt, end) in enumerate(zip(list_points, end_markers, strict=True), 1):
if end:
ticks.append([pt, pt])
ticks.append([pt, pt])
Expand All @@ -587,7 +587,7 @@ def __init__(

min_distances = []
min_inds = []
for dis, ind in zip(dist_per_res, inp_per_res):
for dis, ind in zip(dist_per_res, inp_per_res, strict=True):
min_distances.append(np.min(dis))
min_inds.append(ind[np.argmin(dis)])

Expand Down
8 changes: 6 additions & 2 deletions momepy/distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,9 @@ def _dist(a, b):

bboxes = shapely.minimum_rotated_rectangle(gdf.geometry)
for geom, bbox in tqdm(
zip(gdf.geometry, bboxes), total=gdf.shape[0], disable=not verbose
zip(gdf.geometry, bboxes, strict=True),
total=gdf.shape[0],
disable=not verbose,
):
if geom.geom_type in ["Polygon", "MultiPolygon", "LinearRing"]:
bbox = list(bbox.exterior.coords)
Expand Down Expand Up @@ -793,7 +795,9 @@ def __init__(
print("Spatial weights ready...") if verbose else None

self.sw = spatial_weights
patches = dict(zip(gdf[unique_id], spatial_weights.component_labels))
patches = dict(
zip(gdf[unique_id], spatial_weights.component_labels, strict=True)
)

for uid in tqdm(
self.id,
Expand Down
2 changes: 1 addition & 1 deletion momepy/diversity.py
Original file line number Diff line number Diff line change
Expand Up @@ -685,7 +685,7 @@ def p(n, sum_n):
counts.update(data.value_counts())
else:
sample_bins = mc.UserDefined(data, bins)
counts = dict(zip(bins, sample_bins.counts))
counts = dict(zip(bins, sample_bins.counts, strict=True))

return -sum(p(n, sum(counts.values())) for n in counts.values() if n != 0)

Expand Down
60 changes: 18 additions & 42 deletions momepy/elements.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import numpy as np
import pandas as pd
import shapely
from packaging.version import Version
from scipy.spatial import Voronoi
from shapely.geometry.base import BaseGeometry
from shapely.ops import polygonize
Expand All @@ -25,8 +24,6 @@
"get_network_ratio",
]

GPD_10 = Version(gpd.__version__) >= Version("0.10")


def buffered_limit(gdf, buffer=100):
"""
Expand Down Expand Up @@ -241,7 +238,7 @@ def __init__(
n_chunks,
)
else:
if isinstance(limit, (gpd.GeoSeries, gpd.GeoDataFrame)):
if isinstance(limit, gpd.GeoSeries | gpd.GeoDataFrame):
limit = limit.unary_union

bounds = shapely.bounds(limit)
Expand Down Expand Up @@ -275,10 +272,7 @@ def _morphological_tessellation(
objects.loc[mask, objects.geometry.name] = objects[mask].buffer(
-shrink, cap_style=2, join_style=2
)
if GPD_10:
objects = objects.reset_index(drop=True).explode(ignore_index=True)
else:
objects = objects.reset_index(drop=True).explode().reset_index(drop=True)
objects = objects.reset_index(drop=True).explode(ignore_index=True)
objects = objects.set_index(unique_id)

print("Generating input point array...") if verbose else None
Expand Down Expand Up @@ -331,7 +325,7 @@ def _dense_point_array(self, geoms, distance, index):
else:
lines = geoms
lengths = shapely.length(lines)
for ix, line, length in zip(index, lines, lengths):
for ix, line, length in zip(index, lines, lengths, strict=True):
if length > distance: # some polygons might have collapsed
pts = shapely.line_interpolate_point(
line,
Expand Down Expand Up @@ -621,45 +615,28 @@ def __init__(self, tessellation, edges, buildings, id_name, unique_id):
gpd.GeoDataFrame(geometry=edges.buffer(0.001)),
how="difference",
)
cut = cut.explode(ignore_index=True) if GPD_10 else cut.explode()

cut = cut.explode(ignore_index=True)
weights = libpysal.weights.Queen.from_dataframe(cut, silence_warnings=True)
cut["component"] = weights.component_labels
buildings_c = buildings.copy()
buildings_c.geometry = buildings_c.representative_point() # make points
if GPD_10:
centroids_temp_id = gpd.sjoin(
buildings_c,
cut[[cut.geometry.name, "component"]],
how="left",
predicate="within",
)
else:
centroids_temp_id = gpd.sjoin(
buildings_c,
cut[[cut.geometry.name, "component"]],
how="left",
op="within",
)
centroids_temp_id = gpd.sjoin(
buildings_c,
cut[[cut.geometry.name, "component"]],
how="left",
predicate="within",
)

cells_copy = tessellation[[unique_id, tessellation.geometry.name]].merge(
centroids_temp_id[[unique_id, "component"]], on=unique_id, how="left"
)
if GPD_10:
blocks = cells_copy.dissolve(by="component").explode(ignore_index=True)
else:
blocks = (
cells_copy.dissolve(by="component").explode().reset_index(drop=True)
)
blocks = cells_copy.dissolve(by="component").explode(ignore_index=True)
blocks[id_name] = range(len(blocks))
blocks = blocks[[id_name, blocks.geometry.name]]

if GPD_10:
centroids_w_bl_id2 = gpd.sjoin(
buildings_c, blocks, how="left", predicate="within"
)
else:
centroids_w_bl_id2 = gpd.sjoin(buildings_c, blocks, how="left", op="within")
centroids_w_bl_id2 = gpd.sjoin(
buildings_c, blocks, how="left", predicate="within"
)

self.buildings_id = centroids_w_bl_id2[id_name]

Expand Down Expand Up @@ -823,7 +800,7 @@ def get_node_id(
edges = edges.set_index(edge_id)
centroids = objects.centroid
for eid, centroid in tqdm(
zip(objects[edge_id], centroids),
zip(objects[edge_id], centroids, strict=True),
total=objects.shape[0],
disable=not verbose,
):
Expand All @@ -844,7 +821,9 @@ def get_node_id(

elif edge_keys is not None and edge_values is not None:
for edge_i, edge_r, geom in tqdm(
zip(objects[edge_keys], objects[edge_values], objects.geometry),
zip(
objects[edge_keys], objects[edge_values], objects.geometry, strict=True
),
total=objects.shape[0],
disable=not verbose,
):
Expand Down Expand Up @@ -903,9 +882,6 @@ def get_network_ratio(df, edges, initial_buffer=500):
4 [26] [1]
"""

if not GPD_10:
raise ImportError("`get_network_ratio` requires geopandas 0.10 or newer.")

(df_ix, edg_ix), dist = edges.sindex.nearest(
df.geometry, max_distance=initial_buffer, return_distance=True
)
Expand Down
Loading

0 comments on commit b38dfac

Please sign in to comment.