Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

2025 python / packages compatibility #26

Merged
merged 5 commits into from
Jan 27, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 18 additions & 1 deletion .github/workflows/recursive_logit_integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,21 @@ jobs:
- name: Test with pytest
run: |
pip install -e .
pytest --ignore=src/ --ignore=manual_verification/
pytest -v --color=yes --ignore=src/ --ignore=manual_verification/

test2024:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.10

- name: Install dependencies
run: |
python -m pip install uv
uv pip install -r requirements_strict_2024.txt flake8 pytest -e .

- name: Test with pytest
run: |
pytest -v --color=yes --ignore=src/ --ignore=manual_verification/
24 changes: 5 additions & 19 deletions readme.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,30 +10,16 @@ quick start overview.

Installation
------------
The current most direct method of installation is to clone the repository and install using pip::

pip install -r requirements.txt

Note that (currently) on windows the 2004 update has broken some interactions with OpenBLAS. To
avoid any problems it is recommended to use conda, which will use MKL BLAS and avoid any issues::

conda install --file (cat requirements.txt)
(This was all written back in 2020, and python packaging workflow tools have come a long way since then,
more modern workflow tools like pixi, rye, pdm etc could be considered. )

Note that there are no direct problems related to this in the code but
pip will install numpy 1.19.4 by default, which will
immediately crash to warn about this. Reverting to numpy 1.19.3 "fixes" this in that the warning
isn't triggered, but other code which does require on the broken functionality may silently fail.

..
Currently one can install from the repository directly using pip::
The current most direct method of installation is to clone the repository and install using pip::

pip install git+https://github.com/m-richards/RecursiveLogit.git
python -m virtualenv venv
python -m pip install -r requirements_strict_2024.txt


It is recommended to use some kind of virtual environment to avoid conflicting package versions.
There is also a :code:`requirements_strict.txt` which explicitly specifies package versions. This
will likely contain less up to date versions, but should always work regardless of future
api changes of the dependencies.

Example Usage
-------------
Expand Down
Binary file added requirements_strict_2024.txt
Binary file not shown.
4 changes: 2 additions & 2 deletions src/recursiveRouteChoice/data_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def load_csv_to_sparse(fname, dtype=None, delim=None, square_matrix=True, shape=
Parameters
----------
fname : str
fname : str | Path
dtype :
delim : str, optional
square_matrix : bool
Expand Down Expand Up @@ -261,7 +261,7 @@ def load_tntp_node_formulation(net_fpath, columns_to_extract=None, sparse_format
data = net2[columns_to_extract[i]].values
data_mat = sparse.coo_matrix((data, (rows, cols)))
if sparse_format is False:
data_mat = data_mat.A
data_mat = data_mat.toarray()
data_list.append(data_mat)
data_list_headers.append(columns_to_extract[i])

Expand Down
3 changes: 2 additions & 1 deletion tests/docs/test_sioux_falls_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"""
from pathlib import Path

import numpy as np

Expand All @@ -15,7 +16,7 @@
# DATA
import os
print("sys path is", os.getcwd(), os.listdir(os.getcwd()))
network_file = os.path.join("tests", "docs", "SiouxFalls_net.tntp")
network_file = Path(__file__).parent.parent.joinpath("docs", "SiouxFalls_net.tntp")
node_max = 24 # from network file

data_list, data_list_names = load_tntp_node_formulation(
Expand Down
24 changes: 14 additions & 10 deletions tests/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
existing code.
"""
from pathlib import Path

from recursiveRouteChoice.recursive_route_choice import ALLOW_POSITIVE_VALUE_FUNCTIONS
# import pytest -- run with pytest although it's not actually imported

Expand All @@ -19,6 +21,8 @@
import os
from os.path import join

DATA_DIR = Path(__file__).parent.parent / "Datasets"

hand_net_dists = np.array(
[[4, 3.5, 4.5, 3, 3, 0, 0, 0],
[3.5, 3, 4, 0, 2.5, 3, 3, 0],
Expand Down Expand Up @@ -68,15 +72,15 @@ def _first_example_common_data_checks(travel_times_mat, incidence_mat, obs_mat):
@staticmethod
def load_example_tiny_manually():
subfolder = "ExampleTiny" # big data from classical v2
folder = join("Datasets", subfolder)
folder = DATA_DIR / subfolder
INCIDENCE = "incidence.txt"
TRAVEL_TIME = 'travelTime.txt'
OBSERVATIONS = "observations.txt"
# TURN_ANGLE = "turnAngle.txt"
file_incidence = os.path.join(folder, INCIDENCE)
file_travel_time = os.path.join(folder, TRAVEL_TIME)
file_incidence = folder / INCIDENCE
file_travel_time = folder / TRAVEL_TIME
# file_turn_angle = os.path.join(folder, TURN_ANGLE)
file_obs = os.path.join(folder, OBSERVATIONS)
file_obs = folder / OBSERVATIONS

travel_times_mat = load_csv_to_sparse(file_travel_time).todok()
incidence_mat = load_csv_to_sparse(file_incidence, dtype='int').todok()
Expand Down Expand Up @@ -107,7 +111,7 @@ def test_example_manual_loading_dense(self):

def test_example_tiny_smart_loading(self):
subfolder = "ExampleTiny" # big data from classical v2
folder = join("Datasets", subfolder)
folder = join(DATA_DIR, subfolder)
obs_mat, attrs = load_standard_path_format_csv(folder, delim=" ", angles_included=False)
incidence_mat, travel_times_mat = attrs
# left, right, _, u_turn = AngleProcessor.get_turn_categorical_matrices()
Expand Down Expand Up @@ -172,7 +176,7 @@ def test_example_tiny_modified_sparse(self):
# wrong
# Now is a bad example as @42f564e9 results in this test case being illegal valued
subfolder = "ExampleTinyModifiedObs" # big data from classical v2
folder = join("Datasets", subfolder)
folder = join(DATA_DIR, subfolder)

obs_mat, attrs = load_standard_path_format_csv(folder, delim=" ", angles_included=True)
incidence_mat, travel_times_mat, angle_cts_mat = attrs
Expand All @@ -187,7 +191,7 @@ def test_example_tiny_modified_sparse(self):

def test_example_tiny_modified_awkward_array(self):
subfolder = "ExampleTinyModifiedObs" # big data from classical v2
folder = join("Datasets", subfolder)
folder = join(DATA_DIR, subfolder)

obs_mat, attrs = load_standard_path_format_csv(folder, delim=" ", angles_included=True)
import awkward1 as ak
Expand All @@ -207,7 +211,7 @@ def test_example_tiny_modified_awkward_array_in_expected_format(self):
"""Test's awkward array input obs format when it is actually zero indexed and ragged
data, not square. See that output is consistent in this case"""
subfolder = "ExampleTinyModifiedObs" # big data from classical v2
folder = join("Datasets", subfolder)
folder = join(DATA_DIR, subfolder)

obs_mat, attrs = load_standard_path_format_csv(folder, delim=" ", angles_included=True)
import awkward1 as ak
Expand All @@ -228,7 +232,7 @@ def test_example_tiny_modified_awkward_array_in_expected_format(self):

def test_example_tiny_modified_dense(self):
subfolder = "ExampleTinyModifiedObs" # big data from classical v2
folder = join("Datasets", subfolder)
folder = join(DATA_DIR, subfolder)

obs_mat, attrs = load_standard_path_format_csv(folder, delim=" ", angles_included=True)
import awkward1 as ak
Expand Down Expand Up @@ -345,7 +349,7 @@ class TestOptimAlgs(object):

def test_compare_optim_methods(self):
subfolder = "ExampleTinyModifiedObs" # big data from classical v2
folder = join("Datasets", subfolder)
folder = join(DATA_DIR, subfolder)

obs_mat, attrs = load_standard_path_format_csv(folder, delim=" ", angles_included=True)
import awkward1 as ak
Expand Down
Loading