From f3cdf14e546b06c464eeed3007fa15a4ce0fabdf Mon Sep 17 00:00:00 2001 From: Zane Selvans Date: Tue, 19 Nov 2024 19:38:50 -0600 Subject: [PATCH] Clean up some nightly build infrastructure cruft (#3962) * Bump docker container to mamba v2.0.3 * Warn against using nightly build outputs directly for Zenodo. * Remove obsolete refs to GCE_INSTANCE in nightly build workflow. * Update RTD backend link to new interface. * Update nightly build docs to reflect use of Google Batch. * Bump a couple of non-python dependency versions. * Simplify nightly/stable/workflow_dispatch logic in nightly build script. * Make VCE RARE row count asset check non-blocking for fast ETL testing * Add test distribution of parquet and other outputs. * Use BUILD_ID as test distribution path to ensure uniqueness. * Discontinue parquet distribution. Remove test distribution files. * Remove AWS CLI commands and use gcloud storage instead. * Add AWS credentials from envvars * Create ~/.aws directory before attempting to write credentials * Remove dangling && from now separate commands in build script. * Remove AWS S3 access test. * Don't && the removal of existing paths, in case it isn't there. * Fix source path for AWS S3 distribution. * Remove all testing shortcuts and revert to FULL ETL. * Remove unnecessary copy_to_dist_path function * Use more specific verstion tag matching pattern. * Use more specific version tag matching pattern. * Remove unnecessary conditional in stable deployment * Use more generous timeouts/retries in Zenodo data release script * Relock dependencies. * Switch to new Slack GitHub Action syntax. * Switch to using postgres 17 and fast ETL to run a quick test deployment. * Use postgres 16 since 17 isn't yet available in our Docker image sources. * Update comment about postgres version. * Use Ubuntu 24.04 micromamba image. * Go back to doing full ETL after Postgres 16 test. * Re-lock dependencies * Remove jq, use envvar for PG_VERSION, test fast ETL. * Add a little workflow to test pattern matching. * Fix typo in regex-test workflow. * Use a more restrictive tag matching pattern. * Use a more specific tag pattern to trigger data releases. * Revert to a simple version tag pattern v20* * revert to running full ETL. * Relock dependencies --- .github/ISSUE_TEMPLATE/versioned_release.md | 2 +- .github/workflows/build-deploy-pudl.yml | 24 +- devtools/zenodo/zenodo_data_release.py | 29 +- docker/Dockerfile | 19 +- docker/gcp_pudl_etl.sh | 237 +++++++------ docs/dev/nightly_data_builds.rst | 211 ++++-------- environments/conda-linux-64.lock.yml | 25 +- environments/conda-lock.yml | 360 +++++++++----------- environments/conda-osx-64.lock.yml | 27 +- environments/conda-osx-arm64.lock.yml | 27 +- pyproject.toml | 4 +- 11 files changed, 419 insertions(+), 546 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/versioned_release.md b/.github/ISSUE_TEMPLATE/versioned_release.md index 54e34be01a..5736ac5dc1 100644 --- a/.github/ISSUE_TEMPLATE/versioned_release.md +++ b/.github/ISSUE_TEMPLATE/versioned_release.md @@ -24,7 +24,7 @@ assignees: "" - [ ] Verify [`catalystcoop.pudl` PyPI (software) release](https://pypi.org/project/catalystcoop.pudl/) - [ ] Verify that [PUDL repo archive on Zenodo](https://zenodo.org/doi/10.5281/zenodo.3404014) has been updated w/ new version - [ ] Wait 6-10 hours for a successful build to complete -- [ ] Activate new version on the [RTD admin panel](https://readthedocs.org/projects/catalystcoop-pudl/versions/) and verify that it builds successfully. +- [ ] Activate new version on the [RTD admin panel](https://app.readthedocs.org/projects/catalystcoop-pudl/) and verify that it builds successfully. - [ ] Verify that `stable` and the version tag point at same git ref - [ ] Verify that [`stable` docs on RTD](https://catalystcoop-pudl.readthedocs.io/en/stable/) have been updated - [ ] Verify `gs://pudl.catalyst.coop/vYYYY.M.x` has the new expected data. diff --git a/.github/workflows/build-deploy-pudl.yml b/.github/workflows/build-deploy-pudl.yml index 0aac42b1dd..6c74eae2b2 100644 --- a/.github/workflows/build-deploy-pudl.yml +++ b/.github/workflows/build-deploy-pudl.yml @@ -1,3 +1,4 @@ +--- name: build-deploy-pudl on: workflow_dispatch: @@ -11,8 +12,6 @@ on: env: GCP_BILLING_PROJECT: ${{ secrets.GCP_BILLING_PROJECT }} - GCE_INSTANCE: pudl-deployment-tag # This is changed to pudl-deployment-dev if running on a schedule - GCE_INSTANCE_ZONE: ${{ secrets.GCE_INSTANCE_ZONE }} GCS_OUTPUT_BUCKET: gs://builds.catalyst.coop BATCH_JOB_JSON: batch_job.json @@ -24,12 +23,6 @@ jobs: contents: write id-token: write steps: - - name: Use pudl-deployment-dev vm if running on a schedule - if: ${{ (github.event_name == 'schedule') }} - run: | - echo "This action was triggered by a schedule." - echo "GCE_INSTANCE=pudl-deployment-dev" >> $GITHUB_ENV - - name: Checkout Repository uses: actions/checkout@v4 with: @@ -56,7 +49,6 @@ jobs: - name: Show freshly set envvars if: ${{ env.SKIP_BUILD != 'true' }} run: | - echo "GCE_INSTANCE: $GCE_INSTANCE" echo "NIGHTLY_TAG: $NIGHTLY_TAG" echo "BUILD_ID: $BUILD_ID" echo "BATCH_JOB_ID: $BATCH_JOB_ID" @@ -140,8 +132,6 @@ jobs: --container-env BUILD_ID=${{ env.BUILD_ID }} \ --container-env BUILD_REF=${{ github.ref_name }} \ --container-env FLY_ACCESS_TOKEN=${{ secrets.FLY_ACCESS_TOKEN }} \ - --container-env GCE_INSTANCE=${{ env.GCE_INSTANCE }} \ - --container-env GCE_INSTANCE_ZONE=${{ env.GCE_INSTANCE_ZONE }} \ --container-env GCP_BILLING_PROJECT=${{ secrets.GCP_BILLING_PROJECT }} \ --container-env GITHUB_ACTION_TRIGGER=${{ github.event_name }} \ --container-env NIGHTLY_TAG=${{ env.NIGHTLY_TAG }} \ @@ -160,13 +150,13 @@ jobs: if: ${{ env.SKIP_BUILD != 'true' }} run: gcloud batch jobs submit run-etl-${{ env.BATCH_JOB_ID }} --config ${{ env.BATCH_JOB_JSON }} --location us-west1 - - name: Post to a pudl-deployments channel + - name: Post to pudl-deployments channel if: always() id: slack uses: slackapi/slack-github-action@v2 with: - channel-id: "C03FHB9N0PQ" - slack-message: "`${{ env.BUILD_ID }}` build-deploy-pudl status: ${{ (env.SKIP_BUILD == 'true') && 'skipped' || job.status }}\n${{ env.GCS_OUTPUT_BUCKET }}/${{ env.BUILD_ID }}" - env: - channel-id: "C03FHB9N0PQ" - SLACK_BOT_TOKEN: ${{ secrets.PUDL_DEPLOY_SLACK_TOKEN }} + method: chat.postMessage + token: ${{ secrets.PUDL_DEPLOY_SLACK_TOKEN }} + payload: | + text: "`${{ env.BUILD_ID }}` build-deploy-pudl status: ${{ (env.SKIP_BUILD == 'true') && 'skipped' || job.status }}\n${{ env.GCS_OUTPUT_BUCKET }}/${{ env.BUILD_ID }}" + channel: "C03FHB9N0PQ" diff --git a/devtools/zenodo/zenodo_data_release.py b/devtools/zenodo/zenodo_data_release.py index 57861d767a..0cbd761d9c 100755 --- a/devtools/zenodo/zenodo_data_release.py +++ b/devtools/zenodo/zenodo_data_release.py @@ -87,24 +87,26 @@ def __init__(self, env: str): logger.info(f"Using Zenodo token: {token[:4]}...{token[-4:]}") - def retry_request(self, *, method, url, max_tries=5, timeout=5, **kwargs): + def retry_request(self, *, method, url, max_tries=6, timeout=2, **kwargs): """Wrap requests.request in retry logic. Passes method, url, and **kwargs to requests.request. """ - base_timeout = 2 for try_num in range(1, max_tries): try: return requests.request( - method=method, url=url, timeout=timeout, **kwargs + method=method, url=url, timeout=timeout**try_num, **kwargs ) except requests.RequestException as e: - timeout = base_timeout**try_num - logger.warning(f"Attempt #{try_num} Got {e}, retrying in {timeout} s") - time.sleep(timeout) + logger.warning( + f"Attempt #{try_num} Got {e}, retrying in {timeout**try_num} s" + ) + time.sleep(timeout**try_num) # don't catch errors on the last try. - return requests.request(method=method, url=url, timeout=timeout, **kwargs) + return requests.request( + method=method, url=url, timeout=timeout**max_tries, **kwargs + ) def get_deposition(self, deposition_id: int) -> _LegacyDeposition: """LEGACY API: Get JSON describing a deposition. @@ -115,7 +117,6 @@ def get_deposition(self, deposition_id: int) -> _LegacyDeposition: method="GET", url=f"{self.base_url}/deposit/depositions/{deposition_id}", headers=self.auth_headers, - timeout=5, ) logger.debug( f"License from JSON for {deposition_id} is " @@ -132,7 +133,6 @@ def get_record(self, record_id: int) -> _NewRecord: method="GET", url=f"{self.base_url}/records/{record_id}", headers=self.auth_headers, - timeout=5, ) return _NewRecord(**response.json()) @@ -146,7 +146,6 @@ def new_record_version(self, record_id: int) -> _NewRecord: method="POST", url=f"{self.base_url}/records/{record_id}/versions", headers=self.auth_headers, - timeout=5, ) return _NewRecord(**response.json()) @@ -162,7 +161,7 @@ def update_deposition_metadata( data = {"metadata": metadata.model_dump()} logger.debug(f"Setting metadata for {deposition_id} to {data}") response = self.retry_request( - method="PUT", url=url, json=data, headers=self.auth_headers, timeout=5 + method="PUT", url=url, json=data, headers=self.auth_headers ) return _LegacyDeposition(**response.json()) @@ -175,7 +174,6 @@ def delete_deposition_file(self, deposition_id: int, file_id) -> requests.Respon method="DELETE", url=f"{self.base_url}/deposit/depositions/{deposition_id}/files/{file_id}", headers=self.auth_headers, - timeout=5, ) def create_bucket_file( @@ -196,7 +194,6 @@ def create_bucket_file( url=url, headers=self.auth_headers, data=file_content, - timeout=5, ) return response @@ -206,7 +203,6 @@ def publish_deposition(self, deposition_id: int) -> _LegacyDeposition: method="POST", url=f"{self.base_url}/deposit/depositions/{deposition_id}/actions/publish", headers=self.auth_headers, - timeout=5, ) return _LegacyDeposition(**response.json()) @@ -375,7 +371,10 @@ def get_html_url(self): required=True, help="Path to a directory whose contents will be uploaded to Zenodo. " "Subdirectories are ignored. Can get files from GCS as well - just prefix " - "with gs://.", + "with gs://. NOTE: nightly build outputs are NOT suitable for creating a Zenodo " + "data release, as they include hundreds of individual Parquet files, which we " + "archive on Zenodo as a single zipfile. Check what files should actually be " + "distributed. E.g. it may be *.log *.zip *.json ", ) @click.option( "--publish/--no-publish", diff --git a/docker/Dockerfile b/docker/Dockerfile index 0009a70167..156006cd32 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM mambaorg/micromamba:2.0.2 +FROM mambaorg/micromamba:2.0.3-ubuntu24.04 ENV CONTAINER_HOME=/home/$MAMBA_USER ENV PGDATA=${CONTAINER_HOME}/pgdata @@ -8,10 +8,9 @@ USER root SHELL [ "/bin/bash", "-exo", "pipefail", "-c" ] # Install some linux packages -# awscli requires unzip, less, groff and mandoc # hadolint ignore=DL3008 RUN apt-get update && \ - apt-get install --no-install-recommends -y git jq unzip less groff mandoc postgresql && \ + apt-get install --no-install-recommends -y git postgresql && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* @@ -23,10 +22,13 @@ RUN printf '[GoogleCompute]\nservice_account = default' > /etc/boto.cfg # hadolint ignore=DL3059 RUN usermod -aG postgres "$MAMBA_USER" +# We use an enviroment variable to set the Postgres version because it is also used in +# the nightly build script and this makes it easier to ensure they are all the same. +# Remember to bump the Postgres version. Postgres 17 was released in September, 2024. +ENV PG_VERSION=16 # Create new cluster for Dagster usage that's owned by $MAMBA_USER. -# When the PG major version changes we'll have to update this from 15 to 16 # hadolint ignore=DL3059 -RUN pg_createcluster 15 dagster -u "$MAMBA_USER" -- -A trust +RUN pg_createcluster ${PG_VERSION} dagster -u "$MAMBA_USER" -- -A trust # Switch back to being non-root user and get into the home directory USER $MAMBA_USER @@ -62,13 +64,6 @@ COPY --chown=${MAMBA_USER}:${MAMBA_USER} . ${PUDL_REPO} ENV LD_LIBRARY_PATH=${CONDA_PREFIX}/lib RUN ${CONDA_RUN} pip install --no-cache-dir --no-deps --editable ${PUDL_REPO} -# Install awscli2 -# Change back to root because the install script needs access to /usr/local/aws-cli -# curl commands run within conda environment because curl is installed by conda. -USER root -RUN ${CONDA_RUN} bash -c 'curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && unzip awscliv2.zip && ./aws/install' -USER $MAMBA_USER - # Install flyctl # hadolint ignore=DL3059 RUN ${CONDA_RUN} bash -c 'curl -L https://fly.io/install.sh | sh' diff --git a/docker/gcp_pudl_etl.sh b/docker/gcp_pudl_etl.sh index 7a9b79e9e1..192e7cc540 100644 --- a/docker/gcp_pudl_etl.sh +++ b/docker/gcp_pudl_etl.sh @@ -2,8 +2,6 @@ # This script runs the entire ETL and validation tests in a docker container on a Google Compute Engine instance. # This script won't work locally because it needs adequate GCP permissions. -LOGFILE="${PUDL_OUTPUT}/${BUILD_ID}-pudl-etl.log" - function send_slack_msg() { echo "sending Slack message" curl -X POST -H "Content-type: application/json" -H "Authorization: Bearer ${SLACK_TOKEN}" https://slack.com/api/chat.postMessage --data "{\"channel\": \"C03FHB9N0PQ\", \"text\": \"$1\"}" @@ -30,9 +28,7 @@ function initialize_postgres() { # 3. tell it to actually fail when we mess up, instead of continuing blithely # 4. create a *dagster* user, whose creds correspond with those in docker/dagster.yaml # 5. make a database for dagster, which is owned by the dagster user - # - # When the PG major version changes we'll have to update this from 15 to 16 - pg_ctlcluster 15 dagster start && \ + pg_ctlcluster "$PG_VERSION" dagster start && \ createdb -h127.0.0.1 -p5433 && \ psql -v "ON_ERROR_STOP=1" -h127.0.0.1 -p5433 && \ psql -c "CREATE USER dagster WITH SUPERUSER PASSWORD 'dagster_password'" -h127.0.0.1 -p5433 && \ @@ -75,72 +71,42 @@ function save_outputs_to_gcs() { rm -f "$PUDL_OUTPUT/success" } -function upload_to_dist_path() { - GCS_PATH="gs://pudl.catalyst.coop/$1/" - AWS_PATH="s3://pudl.catalyst.coop/$1/" - +function remove_dist_path() { + DIST_PATH=$1 # Only attempt to update outputs if we have an argument # This avoids accidentally blowing away the whole bucket if it's not set. - if [[ -n "$1" ]]; then + if [[ -n "$DIST_PATH" ]]; then + GCS_PATH="gs://pudl.catalyst.coop/$DIST_PATH/" + AWS_PATH="s3://pudl.catalyst.coop/$DIST_PATH/" # If the old outputs don't exist, these will exit with status 1, so we - # don't && them with the rest of the commands. + # don't && them like with many of the other commands. echo "Removing old outputs from $GCS_PATH." - gcloud storage --quiet --billing-project="$GCP_BILLING_PROJECT" rm -r "$GCS_PATH" + gcloud storage rm --quiet --recursive --billing-project="$GCP_BILLING_PROJECT" "$GCS_PATH" echo "Removing old outputs from $AWS_PATH." - aws s3 rm --quiet --recursive "$AWS_PATH" - - echo "Copying outputs to $GCS_PATH:" && \ - gcloud storage --quiet --billing-project="$GCP_BILLING_PROJECT" cp -r "$PUDL_OUTPUT/*" "$GCS_PATH" && \ - echo "Copying outputs to $AWS_PATH" && \ - aws s3 cp --quiet --recursive "$PUDL_OUTPUT/" "$AWS_PATH" + gcloud storage rm --quiet --recursive "$AWS_PATH" else echo "No distribution path provided. Not updating outputs." exit 1 fi } -function distribute_parquet() { - PARQUET_BUCKET="gs://parquet.catalyst.coop" - # Only attempt to update outputs if we have a real value of BUILD_REF - # This avoids accidentally blowing away the whole bucket if it's not set. - echo "Copying outputs to parquet distribution bucket" - if [[ -n "$BUILD_REF" ]]; then - if [[ "$GITHUB_ACTION_TRIGGER" == "schedule" ]]; then - # If running nightly builds, copy outputs to the "nightly" bucket path - DIST_PATH="nightly" - else - # Otherwise we want to copy them to a directory named after the tag/ref - DIST_PATH="$BUILD_REF" - fi - echo "Copying outputs to $PARQUET_BUCKET/$DIST_PATH" && \ - gcloud storage --quiet --billing-project="$GCP_BILLING_PROJECT" cp -r "$PUDL_OUTPUT/parquet/*" "$PARQUET_BUCKET/$DIST_PATH" - - # If running a tagged release, ALSO update the stable distribution bucket path: - if [[ "$GITHUB_ACTION_TRIGGER" == "push" && "$BUILD_REF" == v20* ]]; then - echo "Copying outputs to $PARQUET_BUCKET/stable" && \ - gcloud storage --quiet --billing-project="$GCP_BILLING_PROJECT" cp -r "$PUDL_OUTPUT/parquet/*" "$PARQUET_BUCKET/stable" - fi - fi -} - -function copy_outputs_to_distribution_bucket() { - # Only attempt to update outputs if we have a real value of BUILD_REF +function upload_to_dist_path() { + DIST_PATH=$1 + # Only attempt to update outputs if we have an argument # This avoids accidentally blowing away the whole bucket if it's not set. - echo "Copying outputs to distribution buckets" - if [[ -n "$BUILD_REF" ]]; then - if [[ "$GITHUB_ACTION_TRIGGER" == "schedule" ]]; then - # If running nightly builds, copy outputs to the "nightly" bucket path - DIST_PATH="nightly" - else - # Otherwise we want to copy them to a directory named after the tag/ref - DIST_PATH="$BUILD_REF" - fi - upload_to_dist_path "$DIST_PATH" - - # If running a tagged release, ALSO update the stable distribution bucket path: - if [[ "$GITHUB_ACTION_TRIGGER" == "push" && "$BUILD_REF" == v20* ]]; then - upload_to_dist_path "stable" - fi + if [[ -n "$DIST_PATH" ]]; then + GCS_PATH="gs://pudl.catalyst.coop/$DIST_PATH/" + AWS_PATH="s3://pudl.catalyst.coop/$DIST_PATH/" + # Do not && this command with the others, as it will exit with status 1 if the + # old outputs don't exist. + remove_dist_path "$DIST_PATH" + echo "Copying outputs to $GCS_PATH:" && \ + gcloud storage cp --quiet --recursive --billing-project="$GCP_BILLING_PROJECT" "$PUDL_OUTPUT/*" "$GCS_PATH" && \ + echo "Copying outputs to $AWS_PATH" && \ + gcloud storage cp --quiet --recursive "$PUDL_OUTPUT/*" "$AWS_PATH" + else + echo "No distribution path provided. Not updating outputs." + exit 1 fi } @@ -149,8 +115,11 @@ function zenodo_data_release() { if [[ "$1" == "production" ]]; then ~/pudl/devtools/zenodo/zenodo_data_release.py --no-publish --env "$1" --source-dir "$PUDL_OUTPUT" - else + elif [[ "$1" == "sandbox" ]]; then ~/pudl/devtools/zenodo/zenodo_data_release.py --publish --env "$1" --source-dir "$PUDL_OUTPUT" + else + echo "Invalid Zenodo environment" + exit 1 fi } @@ -177,11 +146,8 @@ function notify_slack() { message+="DISTRIBUTION_BUCKET_SUCCESS: $DISTRIBUTION_BUCKET_SUCCESS\n" message+="GCS_TEMPORARY_HOLD_SUCCESS: $GCS_TEMPORARY_HOLD_SUCCESS \n" message+="ZENODO_SUCCESS: $ZENODO_SUCCESS\n\n" - message+="*Query* logs on .\n\n" - - message+="*Download* logs at \n\n" - + message+="*Download* logs at \n\n" message+="Get *full outputs* at ." send_slack_msg "$message" @@ -230,21 +196,46 @@ function clean_up_outputs_for_distribution() { ######################################################################################## # MAIN SCRIPT ######################################################################################## +LOGFILE="${PUDL_OUTPUT}/${BUILD_ID}.log" + # Initialize our success variables so they all definitely have a value to check ETL_SUCCESS=0 SAVE_OUTPUTS_SUCCESS=0 UPDATE_NIGHTLY_SUCCESS=0 UPDATE_STABLE_SUCCESS=0 DATASETTE_SUCCESS=0 -DISTRIBUTE_PARQUET_SUCCESS=0 CLEAN_UP_OUTPUTS_SUCCESS=0 DISTRIBUTION_BUCKET_SUCCESS=0 ZENODO_SUCCESS=0 GCS_TEMPORARY_HOLD_SUCCESS=0 +# Set the build type based on the action trigger and tag +if [[ "$GITHUB_ACTION_TRIGGER" == "push" && "$BUILD_REF" =~ ^v20.*$ ]]; then + BUILD_TYPE="stable" +elif [[ "$GITHUB_ACTION_TRIGGER" == "schedule" ]]; then + BUILD_TYPE="nightly" +elif [[ "$GITHUB_ACTION_TRIGGER" == "workflow_dispatch" ]]; then + BUILD_TYPE="workflow_dispatch" +else + echo "Unknown build type, exiting!" + echo "GITHUB_ACTION_TRIGGER: $GITHUB_ACTION_TRIGGER" + echo "BUILD_REF: $BUILD_REF" + exit 1 +fi + # Set these variables *only* if they are not already set by the container or workflow: : "${PUDL_GCS_OUTPUT:=gs://builds.catalyst.coop/$BUILD_ID}" -: "${PUDL_SETTINGS_YML:=home/mambauser/pudl/src/pudl/package_data/settings/etl_full.yml}" +: "${PUDL_SETTINGS_YML:=/home/mambauser/pudl/src/pudl/package_data/settings/etl_full.yml}" + +# Save credentials for working with AWS S3 +# set +x / set -x is used to avoid printing the AWS credentials in the logs +echo "Setting AWS credentials" +mkdir -p ~/.aws +echo "[default]" > ~/.aws/credentials +set +x +echo "aws_access_key_id = ${AWS_ACCESS_KEY_ID}" >> ~/.aws/credentials +echo "aws_secret_access_key = ${AWS_SECRET_ACCESS_KEY}" >> ~/.aws/credentials +set -x # Run ETL. Copy outputs to GCS and shutdown VM if ETL succeeds or fails # 2>&1 redirects stderr to stdout. @@ -252,59 +243,86 @@ run_pudl_etl 2>&1 | tee "$LOGFILE" ETL_SUCCESS=${PIPESTATUS[0]} # This needs to happen regardless of the ETL outcome: -pg_ctlcluster 15 dagster stop 2>&1 | tee -a "$LOGFILE" +pg_ctlcluster "$PG_VERSION" dagster stop 2>&1 | tee -a "$LOGFILE" save_outputs_to_gcs 2>&1 | tee -a "$LOGFILE" SAVE_OUTPUTS_SUCCESS=${PIPESTATUS[0]} -# if pipeline is successful, distribute + publish datasette -if [[ $ETL_SUCCESS == 0 ]]; then - if [[ "$GITHUB_ACTION_TRIGGER" == "schedule" ]]; then - merge_tag_into_branch "$NIGHTLY_TAG" nightly 2>&1 | tee -a "$LOGFILE" - UPDATE_NIGHTLY_SUCCESS=${PIPESTATUS[0]} - fi - # If running a tagged release, merge the tag into the stable branch - if [[ "$GITHUB_ACTION_TRIGGER" == "push" && "$BUILD_REF" == v20* ]]; then - merge_tag_into_branch "$BUILD_REF" stable 2>&1 | tee -a "$LOGFILE" - UPDATE_STABLE_SUCCESS=${PIPESTATUS[0]} - fi +if [[ $ETL_SUCCESS != 0 ]]; then + notify_slack "failure" + exit 1 +fi - # Deploy the updated data to datasette if we're on main - if [[ "$BUILD_REF" == "main" ]]; then - python ~/pudl/devtools/datasette/publish.py --production 2>&1 | tee -a "$LOGFILE" - DATASETTE_SUCCESS=${PIPESTATUS[0]} - fi +if [[ "$BUILD_TYPE" == "nightly" ]]; then + merge_tag_into_branch "$NIGHTLY_TAG" nightly 2>&1 | tee -a "$LOGFILE" + UPDATE_NIGHTLY_SUCCESS=${PIPESTATUS[0]} + # Update our datasette deployment + python ~/pudl/devtools/datasette/publish.py --production 2>&1 | tee -a "$LOGFILE" + DATASETTE_SUCCESS=${PIPESTATUS[0]} + # Remove files we don't want to distribute and zip SQLite and Parquet outputs + clean_up_outputs_for_distribution 2>&1 | tee -a "$LOGFILE" + CLEAN_UP_OUTPUTS_SUCCESS=${PIPESTATUS[0]} + # Copy cleaned up outputs to the S3 and GCS distribution buckets + upload_to_dist_path "nightly" | tee -a "$LOGFILE" + DISTRIBUTION_BUCKET_SUCCESS=${PIPESTATUS[0]} + # Remove individual parquet outputs and distribute just the zipped parquet + # archives on Zenodo, due to their number of files limit + rm -f "$PUDL_OUTPUT"/*.parquet + # push a data release to Zenodo sandbox + zenodo_data_release "$ZENODO_TARGET_ENV" 2>&1 | tee -a "$LOGFILE" + ZENODO_SUCCESS=${PIPESTATUS[0]} + +elif [[ "$BUILD_TYPE" == "stable" ]]; then + merge_tag_into_branch "$BUILD_REF" stable 2>&1 | tee -a "$LOGFILE" + UPDATE_STABLE_SUCCESS=${PIPESTATUS[0]} + # Remove files we don't want to distribute and zip SQLite and Parquet outputs + clean_up_outputs_for_distribution 2>&1 | tee -a "$LOGFILE" + CLEAN_UP_OUTPUTS_SUCCESS=${PIPESTATUS[0]} + # Copy cleaned up outputs to the S3 and GCS distribution buckets + upload_to_dist_path "$BUILD_REF" | tee -a "$LOGFILE" && \ + upload_to_dist_path "stable" | tee -a "$LOGFILE" + DISTRIBUTION_BUCKET_SUCCESS=${PIPESTATUS[0]} + # Remove individual parquet outputs and distribute just the zipped parquet + # archives on Zenodo, due to their number of files limit + rm -f "$PUDL_OUTPUT"/*.parquet + # push a data release to Zenodo production + zenodo_data_release "$ZENODO_TARGET_ENV" 2>&1 | tee -a "$LOGFILE" + ZENODO_SUCCESS=${PIPESTATUS[0]} + # This is a versioned release. Ensure that outputs can't be accidentally deleted. + # We can only do this on the GCS bucket, not S3 + gcloud storage --billing-project="$GCP_BILLING_PROJECT" objects update "gs://pudl.catalyst.coop/$BUILD_REF/*" --temporary-hold 2>&1 | tee -a "$LOGFILE" + GCS_TEMPORARY_HOLD_SUCCESS=${PIPESTATUS[0]} + +elif [[ "$BUILD_TYPE" == "workflow_dispatch" ]]; then + # Remove files we don't want to distribute and zip SQLite and Parquet outputs + clean_up_outputs_for_distribution 2>&1 | tee -a "$LOGFILE" + CLEAN_UP_OUTPUTS_SUCCESS=${PIPESTATUS[0]} + # Upload to GCS / S3 just to test that it works. + upload_to_dist_path "$BUILD_ID" | tee -a "$LOGFILE" + DISTRIBUTION_BUCKET_SUCCESS=${PIPESTATUS[0]} + Remove the uploaded files: + # Remove those uploads since they were just for testing. + remove_dist_path "$BUILD_ID" | tee -a "$LOGFILE" + # Remove individual parquet outputs and distribute just the zipped parquet + # archives on Zenodo, due to their number of files limit + rm -f "$PUDL_OUTPUT"/*.parquet + # push a data release to Zenodo sandbox + zenodo_data_release "$ZENODO_TARGET_ENV" 2>&1 | tee -a "$LOGFILE" + ZENODO_SUCCESS=${PIPESTATUS[0]} - # TODO: this behavior should be controlled by on/off switch here and this logic - # should be moved to the triggering github action. Having it here feels fragmented. - # Distribute outputs if branch is main or the build was triggered by tag push - if [[ "$GITHUB_ACTION_TRIGGER" == "push" || "$BUILD_REF" == "main" ]]; then - # Distribute Parquet outputs to a private bucket - distribute_parquet 2>&1 | tee -a "$LOGFILE" - DISTRIBUTE_PARQUET_SUCCESS=${PIPESTATUS[0]} - # Remove some cruft from the builds that we don't want to distribute - clean_up_outputs_for_distribution 2>&1 | tee -a "$LOGFILE" - CLEAN_UP_OUTPUTS_SUCCESS=${PIPESTATUS[0]} - # Copy cleaned up outputs to the S3 and GCS distribution buckets - copy_outputs_to_distribution_bucket | tee -a "$LOGFILE" - DISTRIBUTION_BUCKET_SUCCESS=${PIPESTATUS[0]} - # Remove individual parquet outputs and distribute just the zipped parquet - # archives on Zenodo, due to their number of files limit - rm -f "$PUDL_OUTPUT"/*.parquet && \ - # Push a data release to Zenodo for long term accessiblity - zenodo_data_release "$ZENODO_TARGET_ENV" 2>&1 | tee -a "$LOGFILE" - ZENODO_SUCCESS=${PIPESTATUS[0]} - fi - # If running a tagged release, ensure that outputs can't be accidentally deleted - # It's not clear that an object lock can be applied in S3 with the AWS CLI - if [[ "$GITHUB_ACTION_TRIGGER" == "push" && "$BUILD_REF" == v20* ]]; then - gcloud storage --billing-project="$GCP_BILLING_PROJECT" objects update "gs://pudl.catalyst.coop/$BUILD_REF/*" --temporary-hold 2>&1 | tee -a "$LOGFILE" - GCS_TEMPORARY_HOLD_SUCCESS=${PIPESTATUS[0]} - fi +else + echo "Unknown build type, exiting!" + echo "BUILD_TYPE: $BUILD_TYPE" + echo "GITHUB_ACTION_TRIGGER: $GITHUB_ACTION_TRIGGER" + echo "BUILD_REF: $BUILD_REF" + notify_slack "failure" + exit 1 fi # This way we also save the logs from latter steps in the script gcloud storage --quiet cp "$LOGFILE" "$PUDL_GCS_OUTPUT" +# Remove the AWS credentials file just in case the disk image sticks around +rm -f ~/.aws/credentials # Notify slack about entire pipeline's success or failure; if [[ $ETL_SUCCESS == 0 && \ @@ -312,7 +330,6 @@ if [[ $ETL_SUCCESS == 0 && \ $UPDATE_NIGHTLY_SUCCESS == 0 && \ $UPDATE_STABLE_SUCCESS == 0 && \ $DATASETTE_SUCCESS == 0 && \ - $DISTRIBUTE_PARQUET_SUCCESS == 0 && \ $CLEAN_UP_OUTPUTS_SUCCESS == 0 && \ $DISTRIBUTION_BUCKET_SUCCESS == 0 && \ $GCS_TEMPORARY_HOLD_SUCCESS == 0 && \ diff --git a/docs/dev/nightly_data_builds.rst b/docs/dev/nightly_data_builds.rst index 34d302912e..475a820c95 100644 --- a/docs/dev/nightly_data_builds.rst +++ b/docs/dev/nightly_data_builds.rst @@ -56,64 +56,6 @@ occurred: process. If the "transient" problem persists, bring it up with the person managing the builds. -Debugging a Broken Build ------------------------- - -If a build has failed, usually the VM will have shut down. You'll have to figure out -which VM it was running on and then restart it before you can do anything else. - -To find the VM name, go into the `Github Action listing -`__ -and click on your run. The ``GCE_INSTANCE_NAME`` that gets printed in "Print -action vars" is what you're after. - -Then you can go to the `Google Compute Engine -`__ -page and restart it. - -Once that's started, you should be able to SSH to the VM using a command like: - -.. code:: - - gcloud compute ssh pudl-deployment-tag --zone=us-west1-a - -You may run into some permissions issues here, in which case you probably need the -``Service Account User`` role on your gcloud user. - -Now you want to get some logs about what's failing. - -First, try ``docker ps`` - this should show two images, one ``pudl``-ey one and -one that's the Google ``stackdriver-agent`` which handles monitoring:: - - CONTAINER ID IMAGE NAMES - d678f709d1f5 catalystcoop/pudl-etl... klt-pudl-deployment-tag-luui - aa3163671da4 gcr.io/stackdriver-ag... stackdriver-logging-agent - -If the image is running, great! You can get logs via ``docker logs -`` (use ``docker logs -f`` if the process is still -going and you want to stream logs from the container.) - -You can also attach a shell to the container and poke around with ``docker exec --it bash``. This is really helpful if something has failed and you -want to try to fix the code & re-run, without having to re-run everything -before the failed task. - -.. Warning:: - - If you use ``docker attach`` as recommended by the login message, and then - hit Ctrl-C, you will interrupt the running build! - -Sometimes you'll see two containers running, but neither of them are PUDL. -That's because the VM first spins up a "loader" container that downloads the -PUDL image, then exits the loader and starts the PUDL image. - -If you don't see two containers running, then there's probably some issue with -the PUDL container startup itself. To find logs about that, run ``sudo -journalctl -u konlet-startup | tail -n 1000 | less``. You should be able to see -any errors that occurred during container startup, and also the container ID, -which you can then boot into via ``docker run -it bash``. - - The GitHub Action ----------------- The ``build-deploy-pudl`` GitHub action contains the main coordination logic for @@ -123,35 +65,24 @@ on code releases, and PUDL's code and data are tested every night. The action is modeled after an `example from the setup-gcloud GitHub action repository `__. The ``gcloud`` command in ``build-deploy-pudl`` requires certain Google Cloud -Platform (GCP) permissions to start and update the GCE instance. The -``gcloud`` command authenticates using a service account key for the -``deploy-pudl-github-action`` service account stored in PUDL's GitHub secrets -as ``DEPLOY_PUDL_SA_KEY``. The ``deploy-pudl-github-action`` service account has -the `Compute Instance Admin (v1) IAM `__ -role on the GCE instances to update the container and start the instance. +Platform (GCP) permissions to start and update the GCE instance. We use Workflow +Identity Federation to authenticate the GitHub Action with GCP in the GitHub Action +workflow. Google Compute Engine --------------------- The PUDL image is deployed on a `Container Optimized GCE `__ -instance, a type of virtual machine (VM) built to run containers. The -``pudl-deployment-dev`` and ``pudl-deployment-tag`` instances in the -``catalyst-cooperative-pudl`` GCP project handle deployments from the ``main`` branch -and tags or manually initiated ``workflow_dispatch`` runs respectively. There are two -VMs so a scheduled and a tag build can run at the same time. - -.. note:: +instance, a type of virtual machine (VM) built to run containers. - If a tag build starts before the previous tag build has finished, the previous build - will be interrupted. +We use ephemeral VMs created with `Google Batch `__ +to run the nightly builds. Once the build has finished -- successfully or not -- the VM +is shut down. The build VMs use the ``e2-highmem-8`` machine type (8 CPUs and 64GB of +RAM) to accommodate the PUDL ETL's memory-intensive steps. Currently, these VMs do not +have swap space enabled, so if they run out of memory, the build will immediately +terminate. -The build VMs use the e2-highmem-8 machine type (64 GB of RAM and 8 CPUs) to accommodate -the PUDL ETL's memory-intensive steps. Currently, these VMs do not have swap space -enabled, so if they run out of memory, the build will immediately terminate. - -Each GCE VM has a service account that gives the VM permissions to GCP resources. -The two PUDL deployment VMs share the ``deploy-pudl-vm-service-account``. This -service account has permissions to: +The ``deploy-pudl-vm-service-account`` service account has permissions to: 1. Write logs to Cloud Logging. 2. Start and stop the VM so the container can shut the instance off when the ETL @@ -159,12 +90,16 @@ service account has permissions to: 3. Bill the ``catalyst-cooperative-pudl`` project for egress fees from accessing the ``zenodo-cache.catalyst.coop`` bucket. Note: The ``catalyst-cooperative-pudl`` won't be charged anything because the data stays within Google's network. -4. Write logs and outputs to the ``gs://builds.catalyst.coop``, +4. Write logs and build outputs to the ``gs://builds.catalyst.coop``, ``gs://pudl.catalyst.coop`` and ``s3://pudl.catalyst.coop`` buckets. - The egress and storage fees of the s3 bucket are covered by + Egress and storage costs for the S3 bucket are covered by `Amazon Web Services's Open Data Sponsorship Program `__. +Build outputs and logs are saved to the ``gs://builds.catalyst.coop`` bucket so you can +access them later. Build logs and outputs are retained for 30 days and then deleted +automatically. + Docker ------ The Docker image the VMs pull installs PUDL into a mamba environment. The VMs @@ -173,8 +108,9 @@ are configured to run the ``docker/gcp_pudl_etl.sh`` script. This script: 1. Notifies the ``pudl-deployments`` Slack channel that a deployment has started. Note: if the container is manually stopped, slack will not be notified. 2. Runs the ETL and full test suite. -3. Copies the outputs and logs to a directory in the ``pudl-etl-logs`` bucket. The - directory is named using the git SHA of the commit that launched the build. +3. Copies the outputs and logs to a directory in the ``gs://builds.catalyst.coop`` + bucket. The directory is named using the git SHA of the commit that launched the + build. 4. Copies the outputs to the ``gs://pudl.catalyst.coop`` and ``s3://pudl.catalyst.coop`` buckets if the ETL and test suite run successfully. 5. Notifies the ``pudl-deployments`` Slack channel with the final build status. @@ -184,8 +120,8 @@ permissions. How to access the nightly build outputs from AWS ------------------------------------------------ -To access the nightly build outputs you can download -the data directly from the ``s3://pudl.catalyst.coop`` bucket. To do this, you'll +You can download the outputs from a successful nightly build data directly from the +``s3://pudl.catalyst.coop`` bucket. To do this, you'll need to `follow the instructions `__ for installing the AWS CLI tool. @@ -206,10 +142,9 @@ You should see a list of directories with version names: PRE v2023.12.01/ ... -The ``--no-sign-request`` flag allows you to make requsts to the -public bucket without having to load AWS credentials. If you don't -include this flag when interacting with the ``s3://pudl.catalyst.coop`` -bucket, ``aws`` will give you an authentication error. +The ``--no-sign-request`` flag allows you to make requsts to the public bucket without +having to load AWS credentials. If you don't include this flag when interacting with the +``s3://pudl.catalyst.coop`` bucket, ``aws`` will give you an authentication error. .. warning:: @@ -222,23 +157,10 @@ which behaves very much like the Unix ``cp`` command: .. code:: - aws s3 cp s3://pudl.catalyst.coop/nightly/pudl.sqlite.gz ./ --no-sign-request - -.. note:: - - To reduce network transfer times, we ``gzip`` the SQLite database files, which can - be quite large when uncompressed. To decompress them locally, at the command line - on Linux, MacOS, or Windows you can use the ``gunzip`` command. + aws s3 cp s3://pudl.catalyst.coop/nightly/pudl.sqlite.zip ./ --no-sign-request - .. code-block:: console - - $ gunzip *.sqlite.gz - - On Windows you can also use a 3rd party tool like - `7zip `__. - -If you wanted to download all of the build outputs (more than 10GB!) you could use ``cp ---recursive`` flag on the whole directory: +If you wanted to download all of the build outputs (more than 10GB!) you can use a +recursive copy: .. code:: @@ -254,20 +176,11 @@ For more details on how to use ``aws`` in general see the How to access the nightly build outputs and logs (for the Catalyst team only) ----------------------------------------------------------------------------- -Sometimes it is helpful to download the logs and data outputs of -nightly builds when debugging failures. To do this you'll need to -set up the Google Cloud software Development Kit (SDK). - -Install the `gcloud utilities `__ on your -computer. There are several ways to do this. We recommend using ``conda`` or its faster -sibling ``mamba``. If you're not using ``conda`` environments, there are other -ways to install the Google Cloud SDK explained in the link above. - -.. code:: - - mamba install -c conda-forge google-cloud-sdk +Sometimes it is helpful to download the logs and data outputs of nightly builds when +debugging failures. To do this you'll need to set up the Google Cloud software +Development Kit (SDK). It is installed as part of the ``pudl-dev`` conda environment. -Log into the account you used to create your new project above by running: +To authenticate with Google Cloud Platform (GCP) you'll need to run the following: .. code:: @@ -307,35 +220,33 @@ like this: .. code:: - gcloud storage ls --long --readable-sizes gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ - - 804.57 MiB 2024-01-03T11:19:15Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/censusdp1tract.sqlite - 5.01 GiB 2024-01-03T11:20:02Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/core_epacems__hourly_emissions.parquet - 759.32 MiB 2024-01-03T11:19:17Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc1_dbf.sqlite - 813.52 MiB 2024-01-03T11:19:18Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc1_xbrl.sqlite - 1.65 MiB 2024-01-03T11:18:18Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc1_xbrl_datapackage.json - 6.94 MiB 2024-01-03T11:18:19Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc1_xbrl_taxonomy_metadata.json - 282.71 MiB 2024-01-03T11:19:02Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc2_dbf.sqlite - 89.55 MiB 2024-01-03T11:18:40Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc2_xbrl.sqlite - 1.88 MiB 2024-01-03T11:18:18Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc2_xbrl_datapackage.json - 6.78 MiB 2024-01-03T11:18:18Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc2_xbrl_taxonomy_metadata.json - 8.25 MiB 2024-01-03T11:18:20Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc60_dbf.sqlite - 20.02 MiB 2024-01-03T11:18:22Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc60_xbrl.sqlite - 731.31 KiB 2024-01-03T11:18:18Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc60_xbrl_datapackage.json - 1.77 MiB 2024-01-03T11:18:19Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc60_xbrl_taxonomy_metadata.json - 153.72 MiB 2024-01-03T11:18:54Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc6_dbf.sqlite - 62.01 MiB 2024-01-03T11:18:28Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc6_xbrl.sqlite - 1.02 MiB 2024-01-03T11:18:18Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc6_xbrl_datapackage.json - 2.74 MiB 2024-01-03T11:18:18Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc6_xbrl_taxonomy_metadata.json - 905.31 MiB 2024-01-03T11:19:17Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc714_xbrl.sqlite - 58.41 KiB 2024-01-03T11:18:18Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc714_xbrl_datapackage.json - 187.86 KiB 2024-01-03T11:18:18Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/ferc714_xbrl_taxonomy_metadata.json - 4.05 MiB 2024-01-03T11:18:19Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/metadata.yml - 4 MiB 2024-01-03T12:09:34Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/pudl-etl.log - 13.1 GiB 2024-01-03T11:21:41Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/pudl.sqlite - 0 B 2024-01-03T11:18:18Z gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/success - gs://builds.catalyst.coop/2024-01-03-0605-e9a91be-dev/core_epacems__hourly_emissions/ - TOTAL: 25 objects, 23557650395 bytes (21.94 GiB) + gcloud storage ls --long --readable-sizes gcloud storage ls --long --readable-sizes gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main + + 6.60MiB 2024-11-15T13:28:20Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/2024-11-15-0603-60f488239-main-pudl-etl.log + 804.57MiB 2024-11-15T12:40:35Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/censusdp1tract.sqlite + 759.32MiB 2024-11-15T12:41:01Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc1_dbf.sqlite + 1.19GiB 2024-11-15T12:41:12Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc1_xbrl.sqlite + 2.16MiB 2024-11-15T12:39:23Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc1_xbrl_datapackage.json + 6.95MiB 2024-11-15T12:39:23Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc1_xbrl_taxonomy_metadata.json + 282.71MiB 2024-11-15T12:40:40Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc2_dbf.sqlite + 127.39MiB 2024-11-15T12:39:59Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc2_xbrl.sqlite + 2.46MiB 2024-11-15T12:40:54Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc2_xbrl_datapackage.json + 6.82MiB 2024-11-15T12:40:48Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc2_xbrl_taxonomy_metadata.json + 8.25MiB 2024-11-15T12:39:22Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc60_dbf.sqlite + 27.89MiB 2024-11-15T12:39:24Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc60_xbrl.sqlite + 942.19kiB 2024-11-15T12:39:22Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc60_xbrl_datapackage.json + 1.77MiB 2024-11-15T12:39:22Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc60_xbrl_taxonomy_metadata.json + 153.72MiB 2024-11-15T12:41:03Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc6_dbf.sqlite + 90.51MiB 2024-11-15T12:41:09Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc6_xbrl.sqlite + 1.32MiB 2024-11-15T12:40:47Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc6_xbrl_datapackage.json + 2.74MiB 2024-11-15T12:39:22Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc6_xbrl_taxonomy_metadata.json + 1.38GiB 2024-11-15T12:41:06Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc714_xbrl.sqlite + 83.39kiB 2024-11-15T12:40:46Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc714_xbrl_datapackage.json + 187.86kiB 2024-11-15T12:40:46Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/ferc714_xbrl_taxonomy_metadata.json + 15.06GiB 2024-11-15T12:42:17Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/pudl.sqlite + 0B 2024-11-15T12:39:22Z gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/success + gs://builds.catalyst.coop/2024-11-15-0603-60f488239-main/parquet/ + TOTAL: 23 objects, 21331056422 bytes (19.87GiB) If you want to copy these files down directly to your computer, you can use the ``gcloud storage cp`` command, which behaves very much like the Unix ``cp`` command: @@ -344,8 +255,8 @@ the ``gcloud storage cp`` command, which behaves very much like the Unix ``cp`` gcloud storage cp gs://builds.catalyst.coop//pudl.sqlite ./ -If you wanted to download all of the build outputs (more than 10GB!) you could use ``cp --r`` on the whole directory: +If you need to download all of the build outputs (~20GB!) you can do a recursive copy of +the whole directory hierarchy (note that this will incurr egress charges): .. code:: diff --git a/environments/conda-linux-64.lock.yml b/environments/conda-linux-64.lock.yml index e784d93fa7..c851c61783 100644 --- a/environments/conda-linux-64.lock.yml +++ b/environments/conda-linux-64.lock.yml @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 6cc1175217c86c9eda2619a9f1debb83afde9256846ae4b2355ba02488c7883b +# input_hash: c1ed864e3eed9f6346940754a63fe1f5957a0e2d1cfd95d12a856a1ebdbef548 channels: - conda-forge @@ -10,7 +10,7 @@ dependencies: - addfips=0.4.2=pyhd8ed1ab_0 - aiofiles=24.1.0=pyhd8ed1ab_0 - aiohappyeyeballs=2.4.3=pyhd8ed1ab_0 - - aiohttp=3.11.2=py312h178313f_0 + - aiohttp=3.11.4=py312h178313f_1 - aiosignal=1.3.1=pyhd8ed1ab_0 - alabaster=1.0.0=pyhd8ed1ab_0 - alembic=1.14.0=pyhd8ed1ab_0 @@ -57,8 +57,8 @@ dependencies: - bleach=6.2.0=pyhd8ed1ab_0 - blinker=1.9.0=pyhff2d567_0 - blosc=1.21.6=hef167b5_0 - - boto3=1.35.63=pyhd8ed1ab_0 - - botocore=1.35.63=pyge310_1234567_0 + - boto3=1.35.64=pyhd8ed1ab_0 + - botocore=1.35.64=pyge310_1234567_0 - bottleneck=1.4.2=py312hc0a28a1_0 - branca=0.7.2=pyhd8ed1ab_0 - brotli=1.1.0=hb9d3cd8_2 @@ -103,9 +103,9 @@ dependencies: - dagster-pipes=1.9.2=pyhd8ed1ab_0 - dagster-postgres=0.25.2=pyhd8ed1ab_0 - dagster-webserver=1.9.2=pyhd8ed1ab_0 - - dask-core=2024.11.2=pyhd8ed1ab_0 + - dask-core=2024.11.2=pyhff2d567_1 - dask-expr=1.1.19=pyhd8ed1ab_0 - - databricks-sdk=0.37.0=pyhd8ed1ab_0 + - databricks-sdk=0.38.0=pyhd8ed1ab_0 - datasette=0.65=pyhd8ed1ab_0 - dbus=1.13.6=h5008d03_3 - debugpy=1.8.8=py312h2ec8cdc_0 @@ -201,7 +201,6 @@ dependencies: - idna=3.10=pyhd8ed1ab_0 - imagesize=1.4.1=pyhd8ed1ab_0 - importlib-metadata=8.5.0=pyha770c72_0 - - importlib_metadata=8.5.0=hd8ed1ab_0 - importlib_resources=6.4.5=pyhd8ed1ab_0 - iniconfig=2.0.0=pyhd8ed1ab_0 - ipykernel=6.29.5=pyh3099207_0 @@ -330,16 +329,16 @@ dependencies: - mergedeep=1.3.4=pyhd8ed1ab_0 - minizip=4.0.7=h401b404_0 - mistune=3.0.2=pyhd8ed1ab_0 - - mlflow=2.17.2=h7900ff3_0 - - mlflow-skinny=2.17.2=py312h7900ff3_0 - - mlflow-ui=2.17.2=py312h7900ff3_0 + - mlflow=2.18.0=h7900ff3_0 + - mlflow-skinny=2.18.0=py312h7900ff3_0 + - mlflow-ui=2.18.0=py312h7900ff3_0 - more-itertools=10.5.0=pyhd8ed1ab_0 - msgpack-python=1.1.0=py312h68727a3_0 - multidict=6.1.0=py312h178313f_1 - multimethod=1.9.1=pyhd8ed1ab_0 - munkres=1.1.4=pyh9f0ad1d_0 - mypy_extensions=1.0.0=pyha770c72_0 - - narwhals=1.14.0=pyhff2d567_0 + - narwhals=1.14.1=pyhff2d567_0 - nbclassic=1.1.0=pyhd8ed1ab_0 - nbclient=0.10.0=pyhd8ed1ab_0 - nbconvert=7.16.4=hd8ed1ab_1 @@ -503,11 +502,11 @@ dependencies: - sphinxcontrib-serializinghtml=1.1.10=pyhd8ed1ab_0 - splink=4.0.5=pyhd8ed1ab_0 - sqlalchemy=2.0.36=py312h66e93f0_0 - - sqlglot=25.30.0=pyhff2d567_1 + - sqlglot=25.31.4=pyhff2d567_0 - sqlite=3.47.0=h9eae976_1 - sqlparse=0.5.2=pyhff2d567_0 - stack_data=0.6.2=pyhd8ed1ab_0 - - starlette=0.41.2=pyha770c72_0 + - starlette=0.41.3=pyh7900ff3_0 - stevedore=5.3.0=pyhd8ed1ab_0 - stringcase=1.2.0=pyhd8ed1ab_1 - structlog=24.4.0=pyhd8ed1ab_0 diff --git a/environments/conda-lock.yml b/environments/conda-lock.yml index 35fb384de7..b96856ea96 100644 --- a/environments/conda-lock.yml +++ b/environments/conda-lock.yml @@ -15,9 +15,9 @@ version: 1 metadata: content_hash: - linux-64: 6cc1175217c86c9eda2619a9f1debb83afde9256846ae4b2355ba02488c7883b - osx-64: bcf2452442dc2658e1840c236a2756bc5b5ff91c1c9e4f14e2af898c7efbf1fb - osx-arm64: d195cdc6e756c1121586b7add937cff32983380eb304e5c37cfb159de9b5c6cd + linux-64: c1ed864e3eed9f6346940754a63fe1f5957a0e2d1cfd95d12a856a1ebdbef548 + osx-64: 3b411f767af9c3cf3d53a12e2b181931d0a9d5e8a26eec9844c36f4f2eeafe34 + osx-arm64: 0b79779e9803db47b1e57d14193b913280cf82b1fe3cc43a71d098160a71abe8 channels: - url: conda-forge used_env_vars: [] @@ -164,7 +164,7 @@ package: category: main optional: false - name: aiohttp - version: 3.11.2 + version: 3.11.4 manager: conda platform: linux-64 dependencies: @@ -179,14 +179,14 @@ package: python: ">=3.12,<3.13.0a0" python_abi: 3.12.* yarl: ">=1.17.0,<2.0" - url: https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.2-py312h178313f_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.4-py312h178313f_1.conda hash: - md5: e2f92c2c85d3a0d376947847942ed36c - sha256: 020315ba01fcd1b53fcb81280a00b8de7051ecf5c4503fc3b3281df0cbca05ed + md5: cd87a5581629dbe1997740e65ccd4722 + sha256: c421da03a70723966ded071eb9a7dcd3d0efae1de706cf7056522ef803bd0357 category: main optional: false - name: aiohttp - version: 3.11.2 + version: 3.11.4 manager: conda platform: osx-64 dependencies: @@ -200,14 +200,14 @@ package: python: ">=3.12,<3.13.0a0" python_abi: 3.12.* yarl: ">=1.17.0,<2.0" - url: https://conda.anaconda.org/conda-forge/osx-64/aiohttp-3.11.2-py312h3520af0_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/aiohttp-3.11.4-py312h3520af0_1.conda hash: - md5: 59275e907c94aea5764ef7a5bb94287e - sha256: db3e98f45d3a2b0c0aa38327a7e266b51994ce556d791bbf908b73f2405f2381 + md5: 8cb26acc42cb098f03fd7567582fcd3a + sha256: f928168b03b4c629c8925e4329b5d484e3e5c1d24ca99e9f07bcce666c0f4139 category: main optional: false - name: aiohttp - version: 3.11.2 + version: 3.11.4 manager: conda platform: osx-arm64 dependencies: @@ -221,10 +221,10 @@ package: python: ">=3.12,<3.13.0a0" python_abi: 3.12.* yarl: ">=1.17.0,<2.0" - url: https://conda.anaconda.org/conda-forge/osx-arm64/aiohttp-3.11.2-py312h998013c_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/aiohttp-3.11.4-py312h998013c_1.conda hash: - md5: 1c0150beac996afe9d8ee8297d324352 - sha256: 5e619945d37829cde16c5add63abb042ba953f0dc92b94abb990000a6ba3e191 + md5: 31c809c1790430b11963ac29486256bf + sha256: a9ac49857e20ac046b7fcbcb3f9df2d33cdc934480a512f3cfac5b55d295410b category: main optional: false - name: aiosignal @@ -2286,52 +2286,52 @@ package: category: main optional: false - name: boto3 - version: 1.35.63 + version: 1.35.64 manager: conda platform: linux-64 dependencies: - botocore: ">=1.35.63,<1.36.0" + botocore: ">=1.35.64,<1.36.0" jmespath: ">=0.7.1,<2.0.0" python: ">=3.8" s3transfer: ">=0.10.0,<0.11.0" - url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.35.63-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.35.64-pyhd8ed1ab_0.conda hash: - md5: 41ac72c902b3ddfa0764328413a06c7d - sha256: 1bfed989428f04e4552d3e87b6c53109a7ee5d3cb37b0c3d66c8c9e8d33a9390 + md5: 9a1fcfa6ff6736efbe88dd67b8277015 + sha256: 0eb1ad4094212933c93b87e959705493f3b5d12e2079dbd91b893b3e00158214 category: main optional: false - name: boto3 - version: 1.35.63 + version: 1.35.64 manager: conda platform: osx-64 dependencies: python: ">=3.8" jmespath: ">=0.7.1,<2.0.0" s3transfer: ">=0.10.0,<0.11.0" - botocore: ">=1.35.63,<1.36.0" - url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.35.63-pyhd8ed1ab_0.conda + botocore: ">=1.35.64,<1.36.0" + url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.35.64-pyhd8ed1ab_0.conda hash: - md5: 41ac72c902b3ddfa0764328413a06c7d - sha256: 1bfed989428f04e4552d3e87b6c53109a7ee5d3cb37b0c3d66c8c9e8d33a9390 + md5: 9a1fcfa6ff6736efbe88dd67b8277015 + sha256: 0eb1ad4094212933c93b87e959705493f3b5d12e2079dbd91b893b3e00158214 category: main optional: false - name: boto3 - version: 1.35.63 + version: 1.35.64 manager: conda platform: osx-arm64 dependencies: python: ">=3.8" jmespath: ">=0.7.1,<2.0.0" s3transfer: ">=0.10.0,<0.11.0" - botocore: ">=1.35.63,<1.36.0" - url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.35.63-pyhd8ed1ab_0.conda + botocore: ">=1.35.64,<1.36.0" + url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.35.64-pyhd8ed1ab_0.conda hash: - md5: 41ac72c902b3ddfa0764328413a06c7d - sha256: 1bfed989428f04e4552d3e87b6c53109a7ee5d3cb37b0c3d66c8c9e8d33a9390 + md5: 9a1fcfa6ff6736efbe88dd67b8277015 + sha256: 0eb1ad4094212933c93b87e959705493f3b5d12e2079dbd91b893b3e00158214 category: main optional: false - name: botocore - version: 1.35.63 + version: 1.35.64 manager: conda platform: linux-64 dependencies: @@ -2339,14 +2339,14 @@ package: python: ">=3.10" python-dateutil: ">=2.1,<3.0.0" urllib3: ">=1.25.4,!=2.2.0,<3" - url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.35.63-pyge310_1234567_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.35.64-pyge310_1234567_0.conda hash: - md5: f6ca7743a8eb6a2dd26d82395348eff9 - sha256: c484095c92c86c10209d70c54f07e4311531e1ad2e1fa2a9fdc193b0611b720e + md5: baf44a99dc095d9ace5a02a51c10858b + sha256: 58069de1f339e29f41f69343c38d093e8db833644aee2b4021558bfafc614392 category: main optional: false - name: botocore - version: 1.35.63 + version: 1.35.64 manager: conda platform: osx-64 dependencies: @@ -2354,14 +2354,14 @@ package: python-dateutil: ">=2.1,<3.0.0" jmespath: ">=0.7.1,<2.0.0" urllib3: ">=1.25.4,!=2.2.0,<3" - url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.35.63-pyge310_1234567_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.35.64-pyge310_1234567_0.conda hash: - md5: f6ca7743a8eb6a2dd26d82395348eff9 - sha256: c484095c92c86c10209d70c54f07e4311531e1ad2e1fa2a9fdc193b0611b720e + md5: baf44a99dc095d9ace5a02a51c10858b + sha256: 58069de1f339e29f41f69343c38d093e8db833644aee2b4021558bfafc614392 category: main optional: false - name: botocore - version: 1.35.63 + version: 1.35.64 manager: conda platform: osx-arm64 dependencies: @@ -2369,10 +2369,10 @@ package: python-dateutil: ">=2.1,<3.0.0" jmespath: ">=0.7.1,<2.0.0" urllib3: ">=1.25.4,!=2.2.0,<3" - url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.35.63-pyge310_1234567_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.35.64-pyge310_1234567_0.conda hash: - md5: f6ca7743a8eb6a2dd26d82395348eff9 - sha256: c484095c92c86c10209d70c54f07e4311531e1ad2e1fa2a9fdc193b0611b720e + md5: baf44a99dc095d9ace5a02a51c10858b + sha256: 58069de1f339e29f41f69343c38d093e8db833644aee2b4021558bfafc614392 category: main optional: false - name: bottleneck @@ -4400,16 +4400,16 @@ package: click: ">=8.1" cloudpickle: ">=3.0.0" fsspec: ">=2021.09.0" - importlib_metadata: ">=4.13.0" + importlib-metadata: ">=4.13.0" packaging: ">=20.0" partd: ">=1.4.0" python: ">=3.10" pyyaml: ">=5.3.1" toolz: ">=0.10.0" - url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.11.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.11.2-pyhff2d567_1.conda hash: - md5: 86269596fa40b5b59b1eb8187f04ca1c - sha256: f7991985162a9ef8b506192cddfe95a5bee45a42b70c00bea39693dcb340f38d + md5: ae2be36dab764e655a22f240837cef75 + sha256: b5e120fbcab57343aedbb312c22df8faa1a8444fb16b4d66879efbd7fd560d53 category: main optional: false - name: dask-core @@ -4422,14 +4422,14 @@ package: pyyaml: ">=5.3.1" toolz: ">=0.10.0" click: ">=8.1" - importlib_metadata: ">=4.13.0" + importlib-metadata: ">=4.13.0" fsspec: ">=2021.09.0" cloudpickle: ">=3.0.0" partd: ">=1.4.0" - url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.11.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.11.2-pyhff2d567_1.conda hash: - md5: 86269596fa40b5b59b1eb8187f04ca1c - sha256: f7991985162a9ef8b506192cddfe95a5bee45a42b70c00bea39693dcb340f38d + md5: ae2be36dab764e655a22f240837cef75 + sha256: b5e120fbcab57343aedbb312c22df8faa1a8444fb16b4d66879efbd7fd560d53 category: main optional: false - name: dask-core @@ -4442,14 +4442,14 @@ package: pyyaml: ">=5.3.1" toolz: ">=0.10.0" click: ">=8.1" - importlib_metadata: ">=4.13.0" + importlib-metadata: ">=4.13.0" fsspec: ">=2021.09.0" cloudpickle: ">=3.0.0" partd: ">=1.4.0" - url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.11.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.11.2-pyhff2d567_1.conda hash: - md5: 86269596fa40b5b59b1eb8187f04ca1c - sha256: f7991985162a9ef8b506192cddfe95a5bee45a42b70c00bea39693dcb340f38d + md5: ae2be36dab764e655a22f240837cef75 + sha256: b5e120fbcab57343aedbb312c22df8faa1a8444fb16b4d66879efbd7fd560d53 category: main optional: false - name: dask-expr @@ -4498,45 +4498,45 @@ package: category: main optional: false - name: databricks-sdk - version: 0.37.0 + version: 0.38.0 manager: conda platform: linux-64 dependencies: google-auth: ">=2.0,<3" python: ">=3.7" requests: ">=2.28.1,<3" - url: https://conda.anaconda.org/conda-forge/noarch/databricks-sdk-0.37.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/databricks-sdk-0.38.0-pyhd8ed1ab_0.conda hash: - md5: 6e5eab6d8b300923c12cde2d7a58394e - sha256: 3df15bd09e686248ceae79229646a6c8a294826faecb236d54fedfffb1996b39 + md5: 1a76130b86eceedd30f5f192afa629b8 + sha256: f8d1155a2104d614fa88d3c3ac0aa8f54937b5fa7ee0266c464e67c2243e8f9f category: main optional: false - name: databricks-sdk - version: 0.37.0 + version: 0.38.0 manager: conda platform: osx-64 dependencies: python: ">=3.7" requests: ">=2.28.1,<3" google-auth: ">=2.0,<3" - url: https://conda.anaconda.org/conda-forge/noarch/databricks-sdk-0.37.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/databricks-sdk-0.38.0-pyhd8ed1ab_0.conda hash: - md5: 6e5eab6d8b300923c12cde2d7a58394e - sha256: 3df15bd09e686248ceae79229646a6c8a294826faecb236d54fedfffb1996b39 + md5: 1a76130b86eceedd30f5f192afa629b8 + sha256: f8d1155a2104d614fa88d3c3ac0aa8f54937b5fa7ee0266c464e67c2243e8f9f category: main optional: false - name: databricks-sdk - version: 0.37.0 + version: 0.38.0 manager: conda platform: osx-arm64 dependencies: python: ">=3.7" requests: ">=2.28.1,<3" google-auth: ">=2.0,<3" - url: https://conda.anaconda.org/conda-forge/noarch/databricks-sdk-0.37.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/databricks-sdk-0.38.0-pyhd8ed1ab_0.conda hash: - md5: 6e5eab6d8b300923c12cde2d7a58394e - sha256: 3df15bd09e686248ceae79229646a6c8a294826faecb236d54fedfffb1996b39 + md5: 1a76130b86eceedd30f5f192afa629b8 + sha256: f8d1155a2104d614fa88d3c3ac0aa8f54937b5fa7ee0266c464e67c2243e8f9f category: main optional: false - name: datasette @@ -8291,8 +8291,8 @@ package: platform: osx-64 dependencies: certifi: "" - idna: "" sniffio: "" + idna: "" anyio: "" python: ">=3.8" httpcore: 1.* @@ -8308,8 +8308,8 @@ package: platform: osx-arm64 dependencies: certifi: "" - idna: "" sniffio: "" + idna: "" anyio: "" python: ">=3.8" httpcore: 1.* @@ -8708,42 +8708,6 @@ package: sha256: 7194700ce1a5ad2621fd68e894dd8c1ceaff9a38723e6e0e5298fdef13017b1c category: main optional: false - - name: importlib_metadata - version: 8.5.0 - manager: conda - platform: linux-64 - dependencies: - importlib-metadata: ">=8.5.0,<8.5.1.0a0" - url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda - hash: - md5: 2a92e152208121afadf85a5e1f3a5f4d - sha256: 313b8a05211bacd6b15ab2621cb73d7f41ea5c6cae98db53367d47833f03fef1 - category: main - optional: false - - name: importlib_metadata - version: 8.5.0 - manager: conda - platform: osx-64 - dependencies: - importlib-metadata: ">=8.5.0,<8.5.1.0a0" - url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda - hash: - md5: 2a92e152208121afadf85a5e1f3a5f4d - sha256: 313b8a05211bacd6b15ab2621cb73d7f41ea5c6cae98db53367d47833f03fef1 - category: main - optional: false - - name: importlib_metadata - version: 8.5.0 - manager: conda - platform: osx-arm64 - dependencies: - importlib-metadata: ">=8.5.0,<8.5.1.0a0" - url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda - hash: - md5: 2a92e152208121afadf85a5e1f3a5f4d - sha256: 313b8a05211bacd6b15ab2621cb73d7f41ea5c6cae98db53367d47833f03fef1 - category: main - optional: false - name: importlib_resources version: 6.4.5 manager: conda @@ -10328,8 +10292,8 @@ package: jupyter-lsp: ">=2.0.0" async-lru: ">=1.0.0" notebook-shim: ">=0.2" - setuptools: ">=40.1.0" httpx: ">=0.25.0" + setuptools: ">=40.1.0" jupyterlab_server: ">=2.27.1,<3" ipykernel: ">=6.5.0" url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.1-pyhff2d567_0.conda @@ -10355,8 +10319,8 @@ package: jupyter-lsp: ">=2.0.0" async-lru: ">=1.0.0" notebook-shim: ">=0.2" - setuptools: ">=40.1.0" httpx: ">=0.25.0" + setuptools: ">=40.1.0" jupyterlab_server: ">=2.27.1,<3" ipykernel: ">=6.5.0" url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.1-pyhff2d567_0.conda @@ -11465,27 +11429,27 @@ package: category: main optional: false - name: libcxx - version: 19.1.3 + version: 19.1.4 manager: conda platform: osx-64 dependencies: __osx: ">=10.13" - url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.3-hf95d169_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/libcxx-19.1.4-hf95d169_0.conda hash: - md5: 86801fc56d4641e3ef7a63f5d996b960 - sha256: 466f259bb13a8058fef28843977c090d21ad337b71a842ccc0407bccf8d27011 + md5: 5f23923c08151687ff2fc3002b0a7234 + sha256: 48c6d0ab9dd0c66693f79f4a032cd9ebb64fb88329dfa747aeac5299f9b3f33b category: main optional: false - name: libcxx - version: 19.1.3 + version: 19.1.4 manager: conda platform: osx-arm64 dependencies: __osx: ">=11.0" - url: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-19.1.3-ha82da77_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-19.1.4-ha82da77_0.conda hash: - md5: bf691071fba4734984231617783225bc - sha256: 6d062760c6439e75b9a44d800d89aff60fe3441998d87506c62dc94c50412ef4 + md5: a2d3d484d95889fccdd09498d8f6bf9a + sha256: 342896ebc1d6acbf022ca6df006a936b9a472579e91e3c502cb1f52f218b78e9 category: main optional: false - name: libdeflate @@ -14344,7 +14308,7 @@ package: category: main optional: false - name: mlflow - version: 2.17.2 + version: 2.18.0 manager: conda platform: linux-64 dependencies: @@ -14356,24 +14320,24 @@ package: jinja2: <4,>=2.11 markdown: <4,>=3.3 matplotlib-base: <4 - mlflow-ui: 2.17.2 + mlflow-ui: 2.18.0 numpy: <3 pandas: <3 prometheus_flask_exporter: <1 - pyarrow: <18,>=4.0.0 + pyarrow: <19,>=4.0.0 python_abi: 3.12.* querystring_parser: <2 scikit-learn: <2 scipy: <2 sqlalchemy: ">=1.4.0,<3" - url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-2.17.2-h7900ff3_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-2.18.0-h7900ff3_0.conda hash: - md5: e0703d0a12f4d5530ae30313d3cc8ca3 - sha256: 5446b915828e642e4bc61796b2a14a52fb5f16b56dccb19bad04850adfaa99d3 + md5: 34ee68e9926d77fc3235e459b34991bc + sha256: 62fbf0d4f1cb32c0a734cd6c2d771323b9b243b7651781a7267857d9f7ba7ef4 category: main optional: false - name: mlflow - version: 2.17.2 + version: 2.18.0 manager: conda platform: osx-64 dependencies: @@ -14385,24 +14349,24 @@ package: jinja2: <4,>=2.11 markdown: <4,>=3.3 matplotlib-base: <4 - mlflow-ui: 2.17.2 + mlflow-ui: 2.18.0 numpy: <3 pandas: <3 prometheus_flask_exporter: <1 - pyarrow: <18,>=4.0.0 + pyarrow: <19,>=4.0.0 python_abi: 3.12.* querystring_parser: <2 scikit-learn: <2 scipy: <2 sqlalchemy: ">=1.4.0,<3" - url: https://conda.anaconda.org/conda-forge/osx-64/mlflow-2.17.2-hb401068_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/mlflow-2.18.0-hb401068_0.conda hash: - md5: 7c8f9d65992f318e3ad9d761e350127e - sha256: 34535e4268a7d03cd23ef62d72cc9869c72d7e02cea3bb59f07e17a886540cb8 + md5: 2f88ee852e22c0405db37bd4547c406f + sha256: 26e8e76598b5605b853708ebffa44346954df49fe9807e06b47963fda9b1681b category: main optional: false - name: mlflow - version: 2.17.2 + version: 2.18.0 manager: conda platform: osx-arm64 dependencies: @@ -14414,25 +14378,25 @@ package: jinja2: <4,>=2.11 markdown: <4,>=3.3 matplotlib-base: <4 - mlflow-ui: 2.17.2 + mlflow-ui: 2.18.0 numpy: <3 pandas: <3 prometheus_flask_exporter: <1 - pyarrow: <18,>=4.0.0 + pyarrow: <19,>=4.0.0 python: ">=3.12,<3.13.0a0" python_abi: 3.12.* querystring_parser: <2 scikit-learn: <2 scipy: <2 sqlalchemy: ">=1.4.0,<3" - url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-2.17.2-py312h81bd7bf_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-2.18.0-py312h81bd7bf_0.conda hash: - md5: 180655f1d98275c156a407c8bc82be50 - sha256: e0e9fffa6bc1b03838e9b95b5a83309b89978901cc8180d551e29e0dc9434784 + md5: 1b15f30789e0379014ea615de00690ea + sha256: 6ddbefb88a978b8225e5df1ec9ec1e269d6efec74eef5a8a9ce07ee79bfeacbf category: main optional: false - name: mlflow-skinny - version: 2.17.2 + version: 2.18.0 manager: conda platform: linux-64 dependencies: @@ -14453,14 +14417,14 @@ package: pyyaml: ">=5.1,<7" requests: ">=2.17.3,<3" sqlparse: ">=0.4.0,<1" - url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-skinny-2.17.2-py312h7900ff3_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-skinny-2.18.0-py312h7900ff3_0.conda hash: - md5: 0dbaa33d7e0d91e0b8b8573973f5ffd8 - sha256: df1e25db2591f50facf46e7097fb80e0e55283ec0dcc9a62345e4c69a8f2cb77 + md5: 19b36b1c50ea845bd1c759a1b777e1b3 + sha256: d4eee9878d2c6c2539227325241c41fb967a3d8751d338f517092d415723d6d8 category: main optional: false - name: mlflow-skinny - version: 2.17.2 + version: 2.18.0 manager: conda platform: osx-64 dependencies: @@ -14481,14 +14445,14 @@ package: pyyaml: ">=5.1,<7" requests: ">=2.17.3,<3" sqlparse: ">=0.4.0,<1" - url: https://conda.anaconda.org/conda-forge/osx-64/mlflow-skinny-2.17.2-py312hb401068_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/mlflow-skinny-2.18.0-py312hb401068_0.conda hash: - md5: 38ce677d23c3f9b4033365a5f02c0572 - sha256: 4bbb7c7ccb68a8e51152296b32001160f321b17da1635509dd7818f8ba67212d + md5: fba04a313c30c6e06d28ac5a90039963 + sha256: 7323aaa4a61c2521c1affc400e5d5cb1250e2721b82a9e25f2a2d4e25bfcd214 category: main optional: false - name: mlflow-skinny - version: 2.17.2 + version: 2.18.0 manager: conda platform: osx-arm64 dependencies: @@ -14509,61 +14473,61 @@ package: pyyaml: ">=5.1,<7" requests: ">=2.17.3,<3" sqlparse: ">=0.4.0,<1" - url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-skinny-2.17.2-py312h81bd7bf_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-skinny-2.18.0-py312h81bd7bf_0.conda hash: - md5: 7b67fde6b12bfd05913a0efc77e1cce5 - sha256: 9fcb30fe9414aa79dce6f8983175e2314de07e9cc6a4faf1ea5a50394785acea + md5: 6a8c4c7bc0038c4345440396cd604fda + sha256: 95558261545b392aeebf6ba67d780d1dbcf4c76c1059a1e5ea75f8cddab899fc category: main optional: false - name: mlflow-ui - version: 2.17.2 + version: 2.18.0 manager: conda platform: linux-64 dependencies: flask: <4 gunicorn: <23 - mlflow-skinny: 2.17.2 + mlflow-skinny: 2.18.0 python: ">=3.12,<3.13.0a0" python_abi: 3.12.* querystring_parser: <2 - url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-ui-2.17.2-py312h7900ff3_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-ui-2.18.0-py312h7900ff3_0.conda hash: - md5: 74c90b9f9c86c1d154f336c8bd713a5d - sha256: 6c43d0799996c02dc455ba84d9f6ac6bda0b5116202b98e3ebb4de3a3f24a735 + md5: d8f4aadcb96e11fa4e10552a09a39cef + sha256: df6408620b5fed80cdff38e5aeee93b4a72b1c27a34e6e9718bb9c6200b44b1a category: main optional: false - name: mlflow-ui - version: 2.17.2 + version: 2.18.0 manager: conda platform: osx-64 dependencies: flask: <4 gunicorn: <23 - mlflow-skinny: 2.17.2 + mlflow-skinny: 2.18.0 python: ">=3.12,<3.13.0a0" python_abi: 3.12.* querystring_parser: <2 - url: https://conda.anaconda.org/conda-forge/osx-64/mlflow-ui-2.17.2-py312hb401068_0.conda + url: https://conda.anaconda.org/conda-forge/osx-64/mlflow-ui-2.18.0-py312hb401068_0.conda hash: - md5: 24802598b409a0be749a0a9dca959969 - sha256: 06bce095b15f1c1b556ec2d5c53cfbf7fcfda4fefa73dcf39978ff1f6ac2655d + md5: 857fd014ab9fb0fbdaf4c7e3d3a8587b + sha256: c5c80bb877d7c26be73f3039a5b0f5e9af9683f2851d4bc47a607df9c82f3c33 category: main optional: false - name: mlflow-ui - version: 2.17.2 + version: 2.18.0 manager: conda platform: osx-arm64 dependencies: flask: <4 gunicorn: <23 - mlflow-skinny: 2.17.2 + mlflow-skinny: 2.18.0 python: ">=3.12,<3.13.0a0" python_abi: 3.12.* querystring_parser: <2 - url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-ui-2.17.2-py312h81bd7bf_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-ui-2.18.0-py312h81bd7bf_0.conda hash: - md5: 16ec3014dc8b5a4982e9f319c05d985d - sha256: 1d85dcf19f0defcdd859b19c7d42b30f42644ff0d5c80130e0943fb778b7596e + md5: bfa52f1ec6cc55159b40f59c033fc045 + sha256: f7c7f94829a38c5104c9c4fc7b49e69c3c4de7ebc43db17054929e819bda29f2 category: main optional: false - name: more-itertools @@ -14800,39 +14764,39 @@ package: category: main optional: false - name: narwhals - version: 1.14.0 + version: 1.14.1 manager: conda platform: linux-64 dependencies: python: ">=3.9" - url: https://conda.anaconda.org/conda-forge/noarch/narwhals-1.14.0-pyhff2d567_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/narwhals-1.14.1-pyhff2d567_0.conda hash: - md5: dc78d38bb510dc63524b17cdd1ef3e99 - sha256: 4f1f539340b51541ff3c1e3018f8a06fac9c8c4f2d077ceb3ede71fe2e3e6ee4 + md5: 6aeb2311753a549bb28ceb732d803a9c + sha256: d0396c1f29adc6654851120c957fe8b7dbbe1154f543d4a276ebe779a90f6fc8 category: main optional: false - name: narwhals - version: 1.14.0 + version: 1.14.1 manager: conda platform: osx-64 dependencies: python: ">=3.9" - url: https://conda.anaconda.org/conda-forge/noarch/narwhals-1.14.0-pyhff2d567_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/narwhals-1.14.1-pyhff2d567_0.conda hash: - md5: dc78d38bb510dc63524b17cdd1ef3e99 - sha256: 4f1f539340b51541ff3c1e3018f8a06fac9c8c4f2d077ceb3ede71fe2e3e6ee4 + md5: 6aeb2311753a549bb28ceb732d803a9c + sha256: d0396c1f29adc6654851120c957fe8b7dbbe1154f543d4a276ebe779a90f6fc8 category: main optional: false - name: narwhals - version: 1.14.0 + version: 1.14.1 manager: conda platform: osx-arm64 dependencies: python: ">=3.9" - url: https://conda.anaconda.org/conda-forge/noarch/narwhals-1.14.0-pyhff2d567_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/narwhals-1.14.1-pyhff2d567_0.conda hash: - md5: dc78d38bb510dc63524b17cdd1ef3e99 - sha256: 4f1f539340b51541ff3c1e3018f8a06fac9c8c4f2d077ceb3ede71fe2e3e6ee4 + md5: 6aeb2311753a549bb28ceb732d803a9c + sha256: d0396c1f29adc6654851120c957fe8b7dbbe1154f543d4a276ebe779a90f6fc8 category: main optional: false - name: nbclassic @@ -21896,39 +21860,39 @@ package: category: main optional: false - name: sqlglot - version: 25.30.0 + version: 25.31.4 manager: conda platform: linux-64 dependencies: python: ">=3.9" - url: https://conda.anaconda.org/conda-forge/noarch/sqlglot-25.30.0-pyhff2d567_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/sqlglot-25.31.4-pyhff2d567_0.conda hash: - md5: 9bc69c2216cb02da285fefcb394e0c7b - sha256: 6d0f8ccf7efa34340a54db27bf2dbf075dbc667049ff5894484a1baa30aa274f + md5: 5901114fc3886a25df43417540d0c76f + sha256: 3135f25540028d3418796061f29f50e54c5bb91c21b94aa750ef76117bdbf89f category: main optional: false - name: sqlglot - version: 25.30.0 + version: 25.31.4 manager: conda platform: osx-64 dependencies: python: ">=3.9" - url: https://conda.anaconda.org/conda-forge/noarch/sqlglot-25.30.0-pyhff2d567_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/sqlglot-25.31.4-pyhff2d567_0.conda hash: - md5: 9bc69c2216cb02da285fefcb394e0c7b - sha256: 6d0f8ccf7efa34340a54db27bf2dbf075dbc667049ff5894484a1baa30aa274f + md5: 5901114fc3886a25df43417540d0c76f + sha256: 3135f25540028d3418796061f29f50e54c5bb91c21b94aa750ef76117bdbf89f category: main optional: false - name: sqlglot - version: 25.30.0 + version: 25.31.4 manager: conda platform: osx-arm64 dependencies: python: ">=3.9" - url: https://conda.anaconda.org/conda-forge/noarch/sqlglot-25.30.0-pyhff2d567_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/sqlglot-25.31.4-pyhff2d567_0.conda hash: - md5: 9bc69c2216cb02da285fefcb394e0c7b - sha256: 6d0f8ccf7efa34340a54db27bf2dbf075dbc667049ff5894484a1baa30aa274f + md5: 5901114fc3886a25df43417540d0c76f + sha256: 3135f25540028d3418796061f29f50e54c5bb91c21b94aa750ef76117bdbf89f category: main optional: false - name: sqlite @@ -22062,45 +22026,45 @@ package: category: main optional: false - name: starlette - version: 0.41.2 + version: 0.41.3 manager: conda platform: linux-64 dependencies: anyio: ">=3.4.0,<5" - python: ">=3.8" + python: ">=3.9" typing_extensions: ">=3.10.0" - url: https://conda.anaconda.org/conda-forge/noarch/starlette-0.41.2-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/starlette-0.41.3-pyh7900ff3_0.conda hash: - md5: 287492bb6e159da4357a10a2bd05c13c - sha256: 02206e5369944e0fd29e4f5c8e9b51dd926a74a46b621a73323669ad404f1081 + md5: 0889c5a3e95d8c382cff7556757aedb0 + sha256: 33986032cb0515f7e9f6647d07006b7dc49b3f373b73d5a1826e6979c661b27a category: dev optional: true - name: starlette - version: 0.41.2 + version: 0.41.3 manager: conda platform: osx-64 dependencies: - python: ">=3.8" + python: ">=3.9" typing_extensions: ">=3.10.0" anyio: ">=3.4.0,<5" - url: https://conda.anaconda.org/conda-forge/noarch/starlette-0.41.2-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/starlette-0.41.3-pyh7900ff3_0.conda hash: - md5: 287492bb6e159da4357a10a2bd05c13c - sha256: 02206e5369944e0fd29e4f5c8e9b51dd926a74a46b621a73323669ad404f1081 + md5: 0889c5a3e95d8c382cff7556757aedb0 + sha256: 33986032cb0515f7e9f6647d07006b7dc49b3f373b73d5a1826e6979c661b27a category: dev optional: true - name: starlette - version: 0.41.2 + version: 0.41.3 manager: conda platform: osx-arm64 dependencies: - python: ">=3.8" + python: ">=3.9" typing_extensions: ">=3.10.0" anyio: ">=3.4.0,<5" - url: https://conda.anaconda.org/conda-forge/noarch/starlette-0.41.2-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/starlette-0.41.3-pyh7900ff3_0.conda hash: - md5: 287492bb6e159da4357a10a2bd05c13c - sha256: 02206e5369944e0fd29e4f5c8e9b51dd926a74a46b621a73323669ad404f1081 + md5: 0889c5a3e95d8c382cff7556757aedb0 + sha256: 33986032cb0515f7e9f6647d07006b7dc49b3f373b73d5a1826e6979c661b27a category: dev optional: true - name: stevedore diff --git a/environments/conda-osx-64.lock.yml b/environments/conda-osx-64.lock.yml index 2ba8ee1a95..8e8a3014d0 100644 --- a/environments/conda-osx-64.lock.yml +++ b/environments/conda-osx-64.lock.yml @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: osx-64 -# input_hash: bcf2452442dc2658e1840c236a2756bc5b5ff91c1c9e4f14e2af898c7efbf1fb +# input_hash: 3b411f767af9c3cf3d53a12e2b181931d0a9d5e8a26eec9844c36f4f2eeafe34 channels: - conda-forge @@ -8,7 +8,7 @@ dependencies: - addfips=0.4.2=pyhd8ed1ab_0 - aiofiles=24.1.0=pyhd8ed1ab_0 - aiohappyeyeballs=2.4.3=pyhd8ed1ab_0 - - aiohttp=3.11.2=py312h3520af0_0 + - aiohttp=3.11.4=py312h3520af0_1 - aiosignal=1.3.1=pyhd8ed1ab_0 - alabaster=1.0.0=pyhd8ed1ab_0 - alembic=1.14.0=pyhd8ed1ab_0 @@ -56,8 +56,8 @@ dependencies: - bleach=6.2.0=pyhd8ed1ab_0 - blinker=1.9.0=pyhff2d567_0 - blosc=1.21.6=h7d75f6d_0 - - boto3=1.35.63=pyhd8ed1ab_0 - - botocore=1.35.63=pyge310_1234567_0 + - boto3=1.35.64=pyhd8ed1ab_0 + - botocore=1.35.64=pyge310_1234567_0 - bottleneck=1.4.2=py312h59f7578_0 - branca=0.7.2=pyhd8ed1ab_0 - brotli=1.1.0=h00291cd_2 @@ -102,9 +102,9 @@ dependencies: - dagster-pipes=1.9.2=pyhd8ed1ab_0 - dagster-postgres=0.25.2=pyhd8ed1ab_0 - dagster-webserver=1.9.2=pyhd8ed1ab_0 - - dask-core=2024.11.2=pyhd8ed1ab_0 + - dask-core=2024.11.2=pyhff2d567_1 - dask-expr=1.1.19=pyhd8ed1ab_0 - - databricks-sdk=0.37.0=pyhd8ed1ab_0 + - databricks-sdk=0.38.0=pyhd8ed1ab_0 - datasette=0.65=pyhd8ed1ab_0 - debugpy=1.8.8=py312haafddd8_0 - decorator=5.1.1=pyhd8ed1ab_0 @@ -198,7 +198,6 @@ dependencies: - idna=3.10=pyhd8ed1ab_0 - imagesize=1.4.1=pyhd8ed1ab_0 - importlib-metadata=8.5.0=pyha770c72_0 - - importlib_metadata=8.5.0=hd8ed1ab_0 - importlib_resources=6.4.5=pyhd8ed1ab_0 - iniconfig=2.0.0=pyhd8ed1ab_0 - ipykernel=6.29.5=pyh57ce528_0 @@ -256,7 +255,7 @@ dependencies: - libcblas=3.9.0=25_osx64_openblas - libcrc32c=1.1.2=he49afe7_0 - libcurl=8.10.1=h58e7537_0 - - libcxx=19.1.3=hf95d169_0 + - libcxx=19.1.4=hf95d169_0 - libdeflate=1.22=h00291cd_0 - libedit=3.1.20191231=h0678c8f_2 - libev=4.33=h10d778d_2 @@ -319,16 +318,16 @@ dependencies: - mergedeep=1.3.4=pyhd8ed1ab_0 - minizip=4.0.7=h62b0c8d_0 - mistune=3.0.2=pyhd8ed1ab_0 - - mlflow=2.17.2=hb401068_0 - - mlflow-skinny=2.17.2=py312hb401068_0 - - mlflow-ui=2.17.2=py312hb401068_0 + - mlflow=2.18.0=hb401068_0 + - mlflow-skinny=2.18.0=py312hb401068_0 + - mlflow-ui=2.18.0=py312hb401068_0 - more-itertools=10.5.0=pyhd8ed1ab_0 - msgpack-python=1.1.0=py312hc5c4d5f_0 - multidict=6.1.0=py312h6f3313d_1 - multimethod=1.9.1=pyhd8ed1ab_0 - munkres=1.1.4=pyh9f0ad1d_0 - mypy_extensions=1.0.0=pyha770c72_0 - - narwhals=1.14.0=pyhff2d567_0 + - narwhals=1.14.1=pyhff2d567_0 - nbclassic=1.1.0=pyhd8ed1ab_0 - nbclient=0.10.0=pyhd8ed1ab_0 - nbconvert=7.16.4=hd8ed1ab_1 @@ -491,11 +490,11 @@ dependencies: - sphinxcontrib-serializinghtml=1.1.10=pyhd8ed1ab_0 - splink=4.0.5=pyhd8ed1ab_0 - sqlalchemy=2.0.36=py312h3d0f464_0 - - sqlglot=25.30.0=pyhff2d567_1 + - sqlglot=25.31.4=pyhff2d567_0 - sqlite=3.47.0=h6285a30_1 - sqlparse=0.5.2=pyhff2d567_0 - stack_data=0.6.2=pyhd8ed1ab_0 - - starlette=0.41.2=pyha770c72_0 + - starlette=0.41.3=pyh7900ff3_0 - stevedore=5.3.0=pyhd8ed1ab_0 - stringcase=1.2.0=pyhd8ed1ab_1 - structlog=24.4.0=pyhd8ed1ab_0 diff --git a/environments/conda-osx-arm64.lock.yml b/environments/conda-osx-arm64.lock.yml index 37b8315f52..01e8296598 100644 --- a/environments/conda-osx-arm64.lock.yml +++ b/environments/conda-osx-arm64.lock.yml @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: osx-arm64 -# input_hash: d195cdc6e756c1121586b7add937cff32983380eb304e5c37cfb159de9b5c6cd +# input_hash: 0b79779e9803db47b1e57d14193b913280cf82b1fe3cc43a71d098160a71abe8 channels: - conda-forge @@ -8,7 +8,7 @@ dependencies: - addfips=0.4.2=pyhd8ed1ab_0 - aiofiles=24.1.0=pyhd8ed1ab_0 - aiohappyeyeballs=2.4.3=pyhd8ed1ab_0 - - aiohttp=3.11.2=py312h998013c_0 + - aiohttp=3.11.4=py312h998013c_1 - aiosignal=1.3.1=pyhd8ed1ab_0 - alabaster=1.0.0=pyhd8ed1ab_0 - alembic=1.14.0=pyhd8ed1ab_0 @@ -56,8 +56,8 @@ dependencies: - bleach=6.2.0=pyhd8ed1ab_0 - blinker=1.9.0=pyhff2d567_0 - blosc=1.21.6=h5499902_0 - - boto3=1.35.63=pyhd8ed1ab_0 - - botocore=1.35.63=pyge310_1234567_0 + - boto3=1.35.64=pyhd8ed1ab_0 + - botocore=1.35.64=pyge310_1234567_0 - bottleneck=1.4.2=py312h147345f_0 - branca=0.7.2=pyhd8ed1ab_0 - brotli=1.1.0=hd74edd7_2 @@ -102,9 +102,9 @@ dependencies: - dagster-pipes=1.9.2=pyhd8ed1ab_0 - dagster-postgres=0.25.2=pyhd8ed1ab_0 - dagster-webserver=1.9.2=pyhd8ed1ab_0 - - dask-core=2024.11.2=pyhd8ed1ab_0 + - dask-core=2024.11.2=pyhff2d567_1 - dask-expr=1.1.19=pyhd8ed1ab_0 - - databricks-sdk=0.37.0=pyhd8ed1ab_0 + - databricks-sdk=0.38.0=pyhd8ed1ab_0 - datasette=0.65=pyhd8ed1ab_0 - debugpy=1.8.8=py312hd8f9ff3_0 - decorator=5.1.1=pyhd8ed1ab_0 @@ -198,7 +198,6 @@ dependencies: - idna=3.10=pyhd8ed1ab_0 - imagesize=1.4.1=pyhd8ed1ab_0 - importlib-metadata=8.5.0=pyha770c72_0 - - importlib_metadata=8.5.0=hd8ed1ab_0 - importlib_resources=6.4.5=pyhd8ed1ab_0 - iniconfig=2.0.0=pyhd8ed1ab_0 - ipykernel=6.29.5=pyh57ce528_0 @@ -256,7 +255,7 @@ dependencies: - libcblas=3.9.0=25_osxarm64_openblas - libcrc32c=1.1.2=hbdafb3b_0 - libcurl=8.10.1=h13a7ad3_0 - - libcxx=19.1.3=ha82da77_0 + - libcxx=19.1.4=ha82da77_0 - libdeflate=1.22=hd74edd7_0 - libedit=3.1.20191231=hc8eb9b7_2 - libev=4.33=h93a5062_2 @@ -319,16 +318,16 @@ dependencies: - mergedeep=1.3.4=pyhd8ed1ab_0 - minizip=4.0.7=h27ee973_0 - mistune=3.0.2=pyhd8ed1ab_0 - - mlflow=2.17.2=py312h81bd7bf_0 - - mlflow-skinny=2.17.2=py312h81bd7bf_0 - - mlflow-ui=2.17.2=py312h81bd7bf_0 + - mlflow=2.18.0=py312h81bd7bf_0 + - mlflow-skinny=2.18.0=py312h81bd7bf_0 + - mlflow-ui=2.18.0=py312h81bd7bf_0 - more-itertools=10.5.0=pyhd8ed1ab_0 - msgpack-python=1.1.0=py312h6142ec9_0 - multidict=6.1.0=py312hdb8e49c_1 - multimethod=1.9.1=pyhd8ed1ab_0 - munkres=1.1.4=pyh9f0ad1d_0 - mypy_extensions=1.0.0=pyha770c72_0 - - narwhals=1.14.0=pyhff2d567_0 + - narwhals=1.14.1=pyhff2d567_0 - nbclassic=1.1.0=pyhd8ed1ab_0 - nbclient=0.10.0=pyhd8ed1ab_0 - nbconvert=7.16.4=hd8ed1ab_1 @@ -491,11 +490,11 @@ dependencies: - sphinxcontrib-serializinghtml=1.1.10=pyhd8ed1ab_0 - splink=4.0.5=pyhd8ed1ab_0 - sqlalchemy=2.0.36=py312h0bf5046_0 - - sqlglot=25.30.0=pyhff2d567_1 + - sqlglot=25.31.4=pyhff2d567_0 - sqlite=3.47.0=hcd14bea_1 - sqlparse=0.5.2=pyhff2d567_0 - stack_data=0.6.2=pyhd8ed1ab_0 - - starlette=0.41.2=pyha770c72_0 + - starlette=0.41.3=pyh7900ff3_0 - stevedore=5.3.0=pyhd8ed1ab_0 - stringcase=1.2.0=pyhd8ed1ab_1 - structlog=24.4.0=pyhd8ed1ab_0 diff --git a/pyproject.toml b/pyproject.toml index d974f4307c..b714fcb199 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -338,13 +338,13 @@ name = "pudl-dev" [tool.conda-lock.dependencies] curl = ">=8.4.0" -google-cloud-sdk = ">=474" +google-cloud-sdk = ">=500" nodejs = ">=20" pandoc = ">=2" pip = ">=24" prettier = ">=3.0" python = ">=3.12,<3.13" -sqlite = ">=3.45" +sqlite = ">=3.47" zip = ">=3.0" [tool.coverage.run]