From e5b815b7ecd41ef5740d2403465fa82a9f34af53 Mon Sep 17 00:00:00 2001 From: Sybrand Strauss Date: Thu, 1 Sep 2022 13:34:35 -0700 Subject: [PATCH] Simplified zone geometry (#2252) - Add generalised "shape" object (to stick zones and things into) - Importing simplified zones - Added files/config for developing api inside dev container - Updated some readme's - Ran poetry update - and added some timeouts where lint was complaining. --- .devcontainer/devcontainer.json | 19 +++ .vscode/settings.json | 5 +- Dockerfile.vscode | 86 +++++++++++ README.md | 10 ++ api/Makefile | 3 + api/README.md | 6 + api/alembic.ini | 7 +- .../versions/17b1c787f420_advisory_areas.py | 56 +++++++ .../versions/c04f22e31997_import_zones.py | 88 +++++++++++ api/app/.env.example | 1 + api/app/db/models/__init__.py | 1 + api/app/db/models/advisory.py | 44 ++++++ api/app/health.py | 2 +- api/app/rocketchat_notifications.py | 3 +- api/app/utils/esri.py | 75 ++++++++++ api/poetry.lock | 138 ++++++------------ sfms/README.md | 12 ++ 17 files changed, 455 insertions(+), 101 deletions(-) create mode 100644 .devcontainer/devcontainer.json create mode 100644 Dockerfile.vscode create mode 100644 api/alembic/versions/17b1c787f420_advisory_areas.py create mode 100644 api/alembic/versions/c04f22e31997_import_zones.py create mode 100644 api/app/db/models/advisory.py create mode 100644 api/app/utils/esri.py diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000..4788f0498 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,19 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: +// https://github.com/microsoft/vscode-dev-containers/tree/v0.245.0/containers/docker-existing-dockerfile +{ + "name": "Existing Dockerfile", + // Sets the run context to one level up instead of the .devcontainer folder. + "context": "..", + // Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename. + "dockerFile": "../Dockerfile.vscode", + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + // Uncomment the next line to run commands after the container is created - for example installing curl. + // "postCreateCommand": "apt-get update && apt-get install -y curl", + // Uncomment when using a ptrace-based debugger like C++, Go, and Rust + // "runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined" ], + // Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker. + // "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ], + // Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root. + "remoteUser": "vscode" +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index e4ab6aa40..d53fa4acd 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -55,5 +55,8 @@ "python.testing.pytestArgs": [ "api" ], - "typescript.preferences.importModuleSpecifier": "non-relative" + "typescript.preferences.importModuleSpecifier": "non-relative", + "cSpell.words": [ + "Albers" + ] } \ No newline at end of file diff --git a/Dockerfile.vscode b/Dockerfile.vscode new file mode 100644 index 000000000..c8d987dc4 --- /dev/null +++ b/Dockerfile.vscode @@ -0,0 +1,86 @@ +# NOTE: +# This Dockerfile is for local development only! + +# debian would match more closely what we have in production, and would probably be ideal, +# but it's also a pain working with because debian is so old. +FROM ubuntu:22.04 + +ARG USERNAME=vscode +ARG USER_UID=1000 +ARG USER_GID=$USER_UID + +# Tell r-base not to wait for interactive input. +ENV DEBIAN_FRONTEND=noninteractive + +# Install dependancies needed by python developer packages +# One should really run all these installs and the update in one go - for a consistent install +# but ease of development trumps consistency in this instance: it's easer to have more +# faster running steps that can fail, that one big monster install that takes forever +# and fails. +# NOTE: Once we no longer need pyodbc, please remove the apt-get update and install commands below. +RUN apt-get -y update +RUN apt-get -y install unixodbc-dev +# Install old (2.4.*; current debian) version of gdal +RUN apt-get -y install libgdal-dev + +# Install R +RUN apt-get update --fix-missing && apt-get -y install r-base + +# Install cffdrs +RUN R -e "install.packages('cffdrs')" + +# Install some other dependancies +RUN apt-get -y install git build-essential python3 python3-dev python3-pip curl vim + +# Install JDK +RUN apt-get -y install openjdk-11-jdk + +# We could install poetry manually, but it's easier to use apt. +RUN apt-get -y install python3-poetry +# Poetry expects "python", but by default, on ubuntu, you need to specify "python3", so +# we work around that, by using the python3-poetry command. +RUN apt-get -y install python-is-python3 + +# I prefer zsh to bash +RUN apt-get -y install zsh + +# from: https://code.visualstudio.com/remote/advancedcontainers/add-nonroot-user +RUN groupadd --gid $USER_GID $USERNAME \ + && useradd --uid $USER_UID --gid $USER_GID -m $USERNAME + +# RUN mkdir /vscode +# RUN chown vscode /vscode +USER $USERNAME +ENV PATH="/home/${USERNAME}/.local/bin:${PATH}" + +WORKDIR /home/$USERNAME + +# Update pip +RUN python3 -m pip install --upgrade pip +RUN python3 -m pip install cachecontrol + +# I like oh-my-zsh: +RUN sh -c "$(curl -fsSL https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh)" +# BUT - for some reason git+zsh == slowness, so tell git not to slow down zsh: +# git config --add oh-my-zsh.hide-dirty 1 + +# Copy poetry files. +# COPY pyproject.toml poetry.lock ./ + +# COPY --chown=worker:worker poetry.lock pyproject.toml ./ + +# RUN poetry install + +# # We can't have this inside pyproject.toml because the gdal version differs from platform to platform. +# # To figure out what version of pygdal you need, run gdal-config +# RUN poetry run python -m pip install pygdal==3.4.1.10 + +# COPY ./app /app/app +# RUN mkdir /app/libs +# COPY ./libs /app/libs + +EXPOSE 8080 3000 + +# ENV CLASSPATH=/app/libs/REDapp_Lib.jar:/app/libs/WTime.jar:/app/libs/hss-java.jar:${CLASSPATH} +# CMD PYTHONPATH=. poetry run alembic upgrade head && poetry run uvicorn app.main:app --host 0.0.0.0 --reload --port 8080 + diff --git a/README.md b/README.md index 1558aec17..2aa20f5f3 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,14 @@ Wildfire Predictive Services to support decision making in prevention, preparedn 4. Open [http://localhost:8080](http://localhost:8080) to view the front end served up from a static folder by the python api. 5. Open [http://localhost:3000](http://localhost:3000) to view the front end served up in developer mode by node. +#### Developing the application in a dev container, using vscode: + +- Open up the project: `Remote-Containers: Open Folder in Container`, select docker-compose.vscode.yml +- Sometimes VSCode doesn't pick up you've changed the docker container: `Remote-Containers: Rebuild Container` +- Install extensions into the container, as needed. +- You can point the API database to: `host.docker.internal` +- You can start up other services outside of vscode, e.g.: `docker compose up db` and `docker compose up redis` + #### Running the api alone Refer to [api/README.md](api/README.md). @@ -41,6 +49,8 @@ A glossary of terms relating to Wildfire that are relevant to Predictive Service ## Architecture +*if you're not seeing an architecture diagram below, you need the mermaid plugin* + ```mermaid graph LR diff --git a/api/Makefile b/api/Makefile index cdb235eb3..77ca7917a 100644 --- a/api/Makefile +++ b/api/Makefile @@ -203,5 +203,8 @@ docker-run-hourly-actuals: database-upgrade: PYTHONPATH=. $(POETRY_RUN) alembic upgrade head +database-downgrade: + PYTHONPATH=. $(POETRY_RUN) alembic downgrade -1 + docker-database-upgrade: docker compose exec -e PYTHONPATH=. api alembic upgrade head diff --git a/api/README.md b/api/README.md index 3257792a8..3fb5a7c88 100644 --- a/api/README.md +++ b/api/README.md @@ -328,6 +328,12 @@ PYTHONPATH=. alembic revision --autogenerate -m "Comment relevant to change" You may have to modify the generated code to import geoalchemy2 +You may want to have a data import/modification step, where you're not actually changing the database, but want to manage new data. You can create an "empty" migration, and insert data as needed: + +```bash +PYTHONPATH=. alembic revision -m "Comment relevant to change" +``` + Then apply: ```bash diff --git a/api/alembic.ini b/api/alembic.ini index f43afc1e8..6744693f8 100644 --- a/api/alembic.ini +++ b/api/alembic.ini @@ -48,7 +48,7 @@ script_location = alembic # Logging configuration [loggers] -keys = root,sqlalchemy,alembic +keys = root,sqlalchemy,alembic,app [handlers] keys = console @@ -71,6 +71,11 @@ level = INFO handlers = qualname = alembic +[logger_app] +level = INFO +handlers = console +qualname = app + [handler_console] class = StreamHandler args = (sys.stderr,) diff --git a/api/alembic/versions/17b1c787f420_advisory_areas.py b/api/alembic/versions/17b1c787f420_advisory_areas.py new file mode 100644 index 000000000..5494e815e --- /dev/null +++ b/api/alembic/versions/17b1c787f420_advisory_areas.py @@ -0,0 +1,56 @@ +"""advisory areas + +Revision ID: 17b1c787f420 +Revises: 62d35d76e1bf +Create Date: 2022-08-31 22:46:45.138215 + +""" +from alembic import op +import sqlalchemy as sa +import geoalchemy2 + + +# revision identifiers, used by Alembic. +revision = '17b1c787f420' +down_revision = '62d35d76e1bf' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic! ### + op.create_table('advisory_shape_types', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.Enum('fire_centre', 'fire_zone', name='shapetypeenum'), nullable=False), + sa.PrimaryKeyConstraint('id'), + comment='Identify kind of advisory area (e.g. Zone, Fire etc.)' + ) + op.create_index(op.f('ix_advisory_shape_types_name'), 'advisory_shape_types', ['name'], unique=True) + op.create_table('advisory_shapes', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('source_identifier', sa.String(), nullable=False), + sa.Column('shape_type', sa.Integer(), nullable=False), + sa.Column('geom', geoalchemy2.types.Geometry(geometry_type='MULTIPOLYGON', + spatial_index=False, from_text='ST_GeomFromEWKT', name='geometry'), nullable=False), + sa.ForeignKeyConstraint(['shape_type'], ['advisory_shape_types.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('source_identifier', 'shape_type'), + comment='Record identifying some area of interest with respect to advisories' + ) + op.create_index('idx_advisory_shapes_geom', 'advisory_shapes', ['geom'], unique=False, postgresql_using='gist') + op.create_index(op.f('ix_advisory_shapes_shape_type'), 'advisory_shapes', ['shape_type'], unique=False) + op.create_index(op.f('ix_advisory_shapes_source_identifier'), + 'advisory_shapes', ['source_identifier'], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic! ### + op.drop_index(op.f('ix_advisory_shapes_source_identifier'), table_name='advisory_shapes') + op.drop_index(op.f('ix_advisory_shapes_shape_type'), table_name='advisory_shapes') + op.drop_index('idx_advisory_shapes_geom', table_name='advisory_shapes', postgresql_using='gist') + op.drop_table('advisory_shapes') + op.drop_index(op.f('ix_advisory_shape_types_name'), table_name='advisory_shape_types') + op.drop_table('advisory_shape_types') + sa.Enum(name='shapetypeenum').drop(op.get_bind()) + # ### end Alembic commands ### diff --git a/api/alembic/versions/c04f22e31997_import_zones.py b/api/alembic/versions/c04f22e31997_import_zones.py new file mode 100644 index 000000000..3329d6d4f --- /dev/null +++ b/api/alembic/versions/c04f22e31997_import_zones.py @@ -0,0 +1,88 @@ +"""Import zones + +Revision ID: c04f22e31997 +Revises: 17b1c787f420 +Create Date: 2022-08-31 22:56:52.264112 + +""" +from typing import Final +import tempfile +from alembic import op +import sqlalchemy as sa +from sqlalchemy.orm.session import Session +import geoalchemy2 +from shapely.geometry import MultiPolygon, Polygon +from shapely import wkb +from app.utils import esri + + +# revision identifiers, used by Alembic. +revision = 'c04f22e31997' +down_revision = '17b1c787f420' +branch_labels = None +depends_on = None + + +shape_type_table = sa.Table('advisory_shape_types', sa.MetaData(), + sa.Column('id', sa.Integer), + sa.Column('name', sa.String)) + +shape_table = sa.Table('advisory_shapes', sa.MetaData(), + sa.Column('id', sa.Integer), + sa.Column('source_identifier', sa.String), + sa.Column('shape_type', sa.Integer), + sa.Column('geom', geoalchemy2.Geometry)) + + +def upgrade(): + session = Session(bind=op.get_bind()) + statement = shape_type_table.insert().values(name='fire_zone').returning(shape_type_table.c.id) + result = session.execute(statement).fetchone() + shape_type_id = result.id + + # We fetch a list of object id's, fetching the entire layer in one go, will most likely crash + # the server we're talking to. + zone_url: Final = "https://maps.gov.bc.ca/arcserver/rest/services/whse/bcgw_pub_whse_legal_admin_boundaries/MapServer/8" + zone_ids = esri.fetch_object_list(zone_url) + for object_id in zone_ids: + # Fetch each object in turn. + obj = esri.fetch_object(object_id, zone_url) + for feature in obj.get('features', []): + attributes = feature.get('attributes', {}) + # Each zone is uniquely identified by a fire zone id. + mof_fire_zone_id = attributes.get('MOF_FIRE_ZONE_ID') + fire_zone_id = str(int(mof_fire_zone_id)) + geometry = feature.get('geometry', {}) + # Rings??? + # That's right: + # https://developers.arcgis.com/documentation/common-data-types/geometry-objects.htm + # "A polygon (specified as esriGeometryPolygon) contains an array of rings or curveRings + # and a spatialReference." + rings = geometry.get('rings', [[]]) + polygons = [] + for ring in rings: + # Simplify each polygon to 1000 meters, preserving topology. + polygons.append(Polygon(ring).simplify(1000, preserve_topology=True)) + geom = MultiPolygon(polygons) + # Insert. + statement = shape_table.insert().values( + source_identifier=fire_zone_id, + shape_type=shape_type_id, + geom=wkb.dumps(geom, hex=True, srid=3005)) + session.execute(statement) + + +def downgrade(): + session = Session(bind=op.get_bind()) + # Delete 'fire_zones' + statement = shape_type_table.select().where(shape_type_table.c.name == 'fire_zone') + result = session.execute(statement).fetchone() + shape_type_id = result.id + + # Delete areas of type + statement = shape_table.delete().where(shape_table.c.shape_type == shape_type_id) + session.execute(statement) + + # Delete 'fire_zone' type + statement = shape_type_table.delete().where(shape_type_table.c.name == 'fire_zone') + session.execute(statement) diff --git a/api/app/.env.example b/api/app/.env.example index fba6a2129..633e1f273 100644 --- a/api/app/.env.example +++ b/api/app/.env.example @@ -11,6 +11,7 @@ BC_FIRE_WEATHER_USER=user BC_FIRE_WEATHER_SECRET=password BC_FIRE_WEATHER_FILTER_ID=0 KEYCLOAK_PUBLIC_KEY=thisispublickey +# POSTGRES_WRITE_HOST=host.docker.internal POSTGRES_WRITE_HOST=db POSTGRES_READ_HOST=db POSTGRES_READ_USER=wpsread diff --git a/api/app/db/models/__init__.py b/api/app/db/models/__init__.py index a7829fbc2..8cd12902e 100644 --- a/api/app/db/models/__init__.py +++ b/api/app/db/models/__init__.py @@ -9,3 +9,4 @@ PredictionModelGridSubset, ModelRunGridSubsetPrediction, WeatherStationModelPrediction) from app.db.models.hfi_calc import (FireCentre, FuelType, PlanningArea, PlanningWeatherStation) +from app.db.models.advisory import (Shape, ShapeType) diff --git a/api/app/db/models/advisory.py b/api/app/db/models/advisory.py new file mode 100644 index 000000000..39d627f72 --- /dev/null +++ b/api/app/db/models/advisory.py @@ -0,0 +1,44 @@ +import enum +from sqlalchemy import (Integer, String, Column, Index, ForeignKey, Enum, UniqueConstraint) +from geoalchemy2 import Geometry +from app.db.database import Base + + +class ShapeTypeEnum(enum.Enum): + """ Define different shape types. e.g. "Zone", "Fire Centre" - later we may add + "Incident"/"Fire", "Custom" etc. etc. """ + fire_centre = 1 + fire_zone = 2 + + +class ShapeType(Base): + """ Identify some kind of area type, e.g. "Zone", or "Fire" """ + __tablename__ = 'advisory_shape_types' + __table_args__ = ( + {'comment': 'Identify kind of advisory area (e.g. Zone, Fire etc.)'} + ) + + id = Column(Integer, primary_key=True) + name = Column(Enum(ShapeTypeEnum), nullable=False, unique=True, index=True) + + +class Shape(Base): + """ Identify some area of interest with respect to advisories. """ + __tablename__ = 'advisory_shapes' + __table_args__ = ( + # we may have to re-visit this constraint - but for the time being, the idea is + # that for any given type of area, it has to be unique for the kind of thing that + # it is. e.g. a zone has some id. + UniqueConstraint('source_identifier', 'shape_type'), + {'comment': 'Record identifying some area of interest with respect to advisories'} + ) + + id = Column(Integer, primary_key=True) + # An area is uniquely identified, e.g. a zone has a number, so does a fire. + source_identifier = Column(String, nullable=False, index=True) + shape_type = Column(Integer, ForeignKey('advisory_shape_types.id'), nullable=False, index=True) + geom = Column(Geometry('MULTIPOLYGON', spatial_index=False), nullable=False) + + +# Explict creation of index due to issue with alembic + geoalchemy. +Index('idx_advisory_areas_geom', Shape.geom, postgresql_using='gist') diff --git a/api/app/health.py b/api/app/health.py index d1832c3c1..1b58a4df5 100644 --- a/api/app/health.py +++ b/api/app/health.py @@ -22,7 +22,7 @@ def patroni_cluster_health_check(): header = { 'Authorization': 'Bearer ' + config.get('STATUS_CHECKER_SECRET') } - resp = requests.get(url, headers=header) + resp = requests.get(url, headers=header, timeout=10) resp_json = resp.json() # NOTE: In Openshift parlance "replica" refers to how many of one pod we have, in Patroni, a "Replica" # refers to a read only copy of of the Leader. diff --git a/api/app/rocketchat_notifications.py b/api/app/rocketchat_notifications.py index c3fae792d..8a49a02cc 100644 --- a/api/app/rocketchat_notifications.py +++ b/api/app/rocketchat_notifications.py @@ -37,7 +37,8 @@ def send_rocketchat_notification(text: str, exc_info: Exception) -> dict: json={ 'channel': config.get('ROCKET_CHANNEL'), 'text': full_message - } + }, + timeout=10 ) result = response.json() except Exception as exception: # pylint: disable=broad-except diff --git a/api/app/utils/esri.py b/api/app/utils/esri.py new file mode 100644 index 000000000..24c96b20a --- /dev/null +++ b/api/app/utils/esri.py @@ -0,0 +1,75 @@ +""" +Functions for talking to ESRI ARC servers. +""" +import urllib.parse +import urllib.request +import json +import logging + +logger = logging.getLogger(__name__) + + +def fetch_object_list(url: str): + """ + Fetch object list from a feature layer. + + url: layer url to fetch + (e.g. https://maps.gov.bc.ca/arcserver/rest/services/whse/bcgw_pub_whse_legal_admin_boundaries/MapServer/2) + """ + logger.info('fetching object list for %s...', url) + + # 1=1 ??? + # There's no direct way of asking for all records, so the hack is to pass a condition that + # is true for all records. + params = { + 'where': '1=1', + 'geometryType': 'esriGeometryEnvelope', + 'spatialRel': 'esriSpatialRelIntersects', + 'returnGeometry': 'false', + 'returnIdsOnly': 'true', + 'f': 'json' + } + + encode_params = urllib.parse.urlencode(params).encode("utf-8") + logger.info('%s/query?%s', url, encode_params.decode()) + with urllib.request.urlopen(f'{url}/query?', encode_params) as response: + json_data = json.loads(response.read()) + return json_data['objectIds'] + + +def fetch_object(object_id: int, url: str, out_sr: str = '3005', response_format: str = 'json') -> dict: + """ + Fetch a single object from a feature layer. By default the output is + json in BC Albers (EPSG:3005) + We have to fetch objects one by one, because they + can get pretty big. Big enough, that if you ask for more than one at a time, you're likely to + encounter 500 errors. + + object_id: object id to fetch (e.g. 1) + url: layer url to fetch + (e.g. https://maps.gov.bc.ca/arcserver/rest/services/whse/bcgw_pub_whse_legal_admin_boundaries/MapServer/2) + out_sr: Spatial reference, e.g. '4326' (WGS84 EPSG:4326) or '3005' (BC Albers EPSG:3005) + response_format: output format, e.g. 'geoJSON', 'json' + + For more information see: + https://developers.arcgis.com/rest/services-reference/enterprise/query-feature-service-layer-.htm + """ + logger.info('fetching object %s', object_id) + + # Note: If you drop outSR, and set f to geoJSON, you get a GeoJSON geometry in WGS84. + params = { + 'where': f'objectid={object_id}', + 'geometryType': 'esriGeometryEnvelope', + 'spatialRel': 'esriSpatialRelIntersects', + 'outSR': out_sr, + 'outFields': '*', + 'returnGeometry': 'true', + 'returnIdsOnly': 'false', + 'f': response_format + } + + encode_params = urllib.parse.urlencode(params).encode("utf-8") + logger.info('%s/query?%s', object_id, encode_params.decode()) + with urllib.request.urlopen(f'{url}/query?', encode_params) as response: + json_data = json.loads(response.read()) + return json_data diff --git a/api/poetry.lock b/api/poetry.lock index 4c6bbdf09..fb55e9b63 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,20 +1,20 @@ [[package]] name = "aiobotocore" -version = "2.3.4" +version = "2.4.0" description = "Async client for aws services using botocore and aiohttp" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] aiohttp = ">=3.3.1" aioitertools = ">=0.5.1" -botocore = ">=1.24.21,<1.24.22" +botocore = ">=1.27.59,<1.27.60" wrapt = ">=1.10.10" [package.extras] -awscli = ["awscli (>=1.22.76,<1.22.77)"] -boto3 = ["boto3 (>=1.21.21,<1.21.22)"] +awscli = ["awscli (>=1.25.60,<1.25.61)"] +boto3 = ["boto3 (>=1.24.59,<1.24.60)"] [[package]] name = "aiodns" @@ -164,16 +164,16 @@ tests = ["pytest"] [[package]] name = "astroid" -version = "2.11.7" +version = "2.12.5" description = "An abstract syntax tree for Python with inference support." category = "main" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} -wrapt = ">=1.11,<2" +wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} [[package]] name = "asttokens" @@ -283,11 +283,11 @@ dev = ["build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "pip-tools (= [[package]] name = "botocore" -version = "1.24.21" +version = "1.27.59" description = "Low-level, data-driven core of boto 3." category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] jmespath = ">=0.7.1,<2.0.0" @@ -295,7 +295,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.13.5)"] +crt = ["awscrt (==0.14.0)"] [[package]] name = "certifi" @@ -490,7 +490,7 @@ testing = ["pre-commit"] [[package]] name = "executing" -version = "0.10.0" +version = "1.0.0" description = "Get the currently executing AST node of a frame, and other information" category = "dev" optional = false @@ -498,7 +498,7 @@ python-versions = "*" [[package]] name = "fastapi" -version = "0.80.0" +version = "0.81.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" category = "main" optional = false @@ -510,9 +510,9 @@ starlette = "0.19.1" [package.extras] all = ["requests (>=2.24.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "itsdangerous (>=1.1.0,<3.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "orjson (>=3.2.1,<4.0.0)", "email_validator (>=1.1.1,<2.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] -dev = ["python-jose[cryptography] (>=3.3.0,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)", "pre-commit (>=2.17.0,<3.0.0)"] +dev = ["python-jose[cryptography] (>=3.3.0,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)", "pre-commit (>=2.17.0,<3.0.0)"] doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "typer (>=0.4.1,<0.5.0)", "pyyaml (>=5.3.1,<7.0.0)"] -test = ["pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "mypy (==0.910)", "flake8 (>=3.8.3,<4.0.0)", "black (==22.3.0)", "isort (>=5.0.6,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "email_validator (>=1.1.1,<2.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "peewee (>=3.13.3,<4.0.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "orjson (>=3.2.1,<4.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "flask (>=1.1.2,<3.0.0)", "anyio[trio] (>=3.2.1,<4.0.0)", "types-ujson (==4.2.1)", "types-orjson (==3.6.2)", "types-dataclasses (==0.6.5)"] +test = ["pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "mypy (==0.910)", "flake8 (>=3.8.3,<6.0.0)", "black (==22.3.0)", "isort (>=5.0.6,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "email_validator (>=1.1.1,<2.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "peewee (>=3.13.3,<4.0.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "orjson (>=3.2.1,<4.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "flask (>=1.1.2,<3.0.0)", "anyio[trio] (>=3.2.1,<4.0.0)", "types-ujson (==4.2.1)", "types-orjson (==3.6.2)", "types-dataclasses (==0.6.5)"] [[package]] name = "fastjsonschema" @@ -550,7 +550,7 @@ test = ["pytest (>=3)", "pytest-cov", "boto3 (>=1.2.4)", "mock"] [[package]] name = "fonttools" -version = "4.37.0" +version = "4.37.1" description = "Tools to manipulate font files" category = "dev" optional = false @@ -580,7 +580,7 @@ python-versions = ">=3.7" [[package]] name = "geoalchemy2" -version = "0.12.3" +version = "0.12.5" description = "Using SQLAlchemy with Spatial Databases" category = "main" optional = false @@ -615,7 +615,7 @@ python-versions = "*" [[package]] name = "greenlet" -version = "1.1.2" +version = "1.1.3" description = "Lightweight in-process concurrent programming" category = "main" optional = false @@ -689,7 +689,7 @@ python-versions = "*" [[package]] name = "ipykernel" -version = "6.15.1" +version = "6.15.2" description = "IPython Kernel for Jupyter" category = "dev" optional = false @@ -846,7 +846,7 @@ docs = ["rst.linker (>=1.9)", "jaraco.packaging (>=3.2)", "sphinx"] [[package]] name = "jsonschema" -version = "4.14.0" +version = "4.15.0" description = "An implementation of JSON Schema validation for Python" category = "dev" optional = false @@ -880,7 +880,7 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "7.3.4" +version = "7.3.5" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false @@ -892,7 +892,7 @@ jupyter-core = ">=4.9.2" nest-asyncio = ">=1.5.4" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" -tornado = ">=6.0" +tornado = ">=6.2" traitlets = "*" [package.extras] @@ -980,7 +980,7 @@ source = ["Cython (>=0.29.7)"] [[package]] name = "mako" -version = "1.2.1" +version = "1.2.2" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "main" optional = false @@ -1212,7 +1212,7 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pandas" -version = "1.4.3" +version = "1.4.4" description = "Powerful data structures for data analysis, time series, and statistics" category = "main" optional = false @@ -1223,6 +1223,7 @@ numpy = [ {version = ">=1.18.5", markers = "platform_machine != \"aarch64\" and platform_machine != \"arm64\" and python_version < \"3.10\""}, {version = ">=1.19.2", markers = "platform_machine == \"aarch64\" and python_version < \"3.10\""}, {version = ">=1.20.0", markers = "platform_machine == \"arm64\" and python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, ] python-dateutil = ">=2.8.1" pytz = ">=2020.1" @@ -1476,14 +1477,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.9.2" +version = "1.10.1" description = "Data validation and settings management using python type hints" category = "main" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] -typing-extensions = ">=3.7.4.3" +typing-extensions = ">=4.1.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -1531,14 +1532,14 @@ tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] [[package]] name = "pylint" -version = "2.14.5" +version = "2.15.0" description = "python code static checker" category = "main" optional = false python-versions = ">=3.7.2" [package.dependencies] -astroid = ">=2.11.6,<=2.12.0-dev0" +astroid = ">=2.12.4,<=2.14.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = ">=0.2" isort = ">=4.2.5,<6" @@ -1647,7 +1648,7 @@ dev = ["pre-commit", "tox", "pytest-asyncio"] [[package]] name = "pytest-testmon" -version = "1.3.4" +version = "1.3.5" description = "selects tests affected by changed files and methods" category = "dev" optional = false @@ -1787,7 +1788,7 @@ py = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.3.1" +version = "5.3.2" description = "Jupyter Qt console" category = "dev" optional = false @@ -1928,7 +1929,7 @@ benchmark = ["memory-profiler (>=0.57.0)", "pandas (>=1.0.5)", "matplotlib (>=3. [[package]] name = "scipy" -version = "1.9.0" +version = "1.9.1" description = "SciPy: Scientific Library for Python" category = "main" optional = false @@ -2000,11 +2001,11 @@ scikit-learn = "*" [[package]] name = "sniffio" -version = "1.2.0" +version = "1.3.0" description = "Sniff out which async library your code is running under" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [[package]] name = "soupsieve" @@ -2048,7 +2049,7 @@ sqlcipher = ["sqlcipher3-binary"] [[package]] name = "stack-data" -version = "0.4.0" +version = "0.5.0" description = "Extract data from python stack frames and tracebacks for informative displays" category = "dev" optional = false @@ -2289,8 +2290,8 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" -python-versions = ">=3.8.10,<3.10" -content-hash = "88cd4debd422dc3a8af29756c9dfc837cf9f5f0b445fa250d9adbffef4235f55" +python-versions = ">=3.8.10,<3.11" +content-hash = "ea8f7bb41d381a07d2efd61f32cc7d7f523baa68690bca1d8008db19ff07450d" [metadata.files] aiobotocore = [] @@ -2460,10 +2461,7 @@ beautifulsoup4 = [ {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, ] bleach = [] -botocore = [ - {file = "botocore-1.24.21-py3-none-any.whl", hash = "sha256:92daca8775e738a9db9b465d533019285f09d541e903233261299fd87c2f842c"}, - {file = "botocore-1.24.21.tar.gz", hash = "sha256:7e976cfd0a61601e74624ef8f5246b40a01f2cce73a011ef29cf80a6e371d0fa"}, -] +botocore = [] certifi = [] cffi = [] charset-normalizer = [] @@ -2534,58 +2532,7 @@ geopandas = [] glob2 = [ {file = "glob2-0.7.tar.gz", hash = "sha256:85c3dbd07c8aa26d63d7aacee34fa86e9a91a3873bc30bf62ec46e531f92ab8c"}, ] -greenlet = [ - {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"}, - {file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"}, - {file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"}, - {file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"}, - {file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"}, - {file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"}, - {file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"}, - {file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, - {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, - {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, - {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, - {file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"}, - {file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"}, - {file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"}, - {file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, - {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, - {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, - {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, - {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, - {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, - {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, - {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, - {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, - {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, - {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, - {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, - {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, -] +greenlet = [] h11 = [ {file = "h11-0.13.0-py3-none-any.whl", hash = "sha256:8ddd78563b633ca55346c8cd41ec0af27d3c79931828beffb46ce70a379e7442"}, {file = "h11-0.13.0.tar.gz", hash = "sha256:70813c1135087a248a4d38cc0e1a0181ffab2188141a93eaf567940c3957ff06"}, @@ -3178,10 +3125,7 @@ six = [ sklearn = [ {file = "sklearn-0.0.tar.gz", hash = "sha256:e23001573aa194b834122d2b9562459bf5ae494a2d59ca6b8aa22c85a44c0e31"}, ] -sniffio = [ - {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, - {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, -] +sniffio = [] soupsieve = [ {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, diff --git a/sfms/README.md b/sfms/README.md index 50b4d307c..f0983c7b4 100644 --- a/sfms/README.md +++ b/sfms/README.md @@ -44,4 +44,16 @@ pyenv shell 2.7.18 pyenv which python poetry env use [use the output of the previous command] poetry install +``` + + +### pyenv + 2.7.18 on M1 + +On M1, `pyenv install 2.7.18` fails, and will never pass (they're not going to bother fixing it, and they shouldn't! it's deprecated). + +You CAN however try run a universal binary in x86_64 mode, and get it to work that way! + +```bash +arch -x86_64 /bin/bash +pyenv install -v 2.7.18 ``` \ No newline at end of file