Skip to content

Commit

Permalink
Merge branch 'main' into rmq-out-small-step-try
Browse files Browse the repository at this point in the history
  • Loading branch information
unkcpz authored Jan 10, 2025
2 parents 0cee2f2 + 5e8bbe1 commit eb34d37
Show file tree
Hide file tree
Showing 11 changed files with 1,878 additions and 944 deletions.
19 changes: 10 additions & 9 deletions .github/actions/install-aiida-core/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,18 @@ inputs:
default: '3.9' # Lowest supported version
required: false
extras:
description: aiida-core extras (including brackets)
default: ''
description: list of optional dependencies
# NOTE: The default 'pre-commit' extra recursively contains
# other extras needed to run the tests.
default: pre-commit
required: false
# NOTE: Hard-learned lesson: we cannot use type=boolean here, apparently :-(
# https://stackoverflow.com/a/76294014
# NOTE2: When installing from lockfile, aiida-core and its dependencies
# are installed in a virtual environment located in .venv directory.
# Subsuquent jobs steps must either activate the environment or use `uv run`
from-lock:
description: Install aiida-core dependencies from a uv lock file
description: Install aiida-core dependencies from uv lock file
default: 'true'
required: false

Expand All @@ -29,19 +31,18 @@ runs:
python-version: ${{ inputs.python-version }}

- name: Set up uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v5
with:
version: 0.5.6
version: 0.5.x
python-version: ${{ inputs.python-version }}

- name: Install dependencies from uv lock
if: ${{ inputs.from-lock == 'true' }}
# NOTE: We're asserting that the lockfile is up to date
# NOTE2: 'pre-commit' extra recursively contains other extras
# needed to run the tests.
run: uv sync --locked --extra pre-commit
run: uv sync --locked ${{ inputs.extras && format('--extra {0}', inputs.extras) || '' }}
shell: bash

- name: Install aiida-core
if: ${{ inputs.from-lock != 'true' }}
run: uv pip install --system -e .${{ inputs.extras }}
run: uv pip install -e .${{ inputs.extras }}
shell: bash
24 changes: 18 additions & 6 deletions .github/workflows/ci-code.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ jobs:
fail-fast: false
matrix:
python-version: ['3.9', '3.12']
database-backend: [psql]
include:
- python-version: '3.9'
database-backend: sqlite

services:
postgres:
Expand Down Expand Up @@ -64,16 +68,22 @@ jobs:
python-version: ${{ matrix.python-version }}

- name: Setup environment
# Note: The virtual environment in .venv was created by uv in previous step
run: source .venv/bin/activate && .github/workflows/setup.sh
run: .github/workflows/setup.sh

- name: Run test suite
env:
AIIDA_TEST_PROFILE: test_aiida
AIIDA_WARN_v3: 1
# Python 3.12 has a performance regression when running with code coverage
# NOTE1: Python 3.12 has a performance regression when running with code coverage
# so run code coverage only for python 3.9.
run: uv run pytest -n auto --db-backend psql -m 'not nightly' tests/ ${{ matrix.python-version == '3.9' && '--cov aiida' || '' }}
# NOTE2: Unset CI envvar to workaround a pymatgen issue for Python 3.9
# https://github.com/materialsproject/pymatgen/issues/4243
# TODO: Remove a workaround for VIRTUAL_ENV once the setup-uv action is updated
# https://github.com/astral-sh/setup-uv/issues/219
run: |
${{ matrix.python-version == '3.9' && 'unset CI' || '' }}
${{ matrix.python-version == '3.9' && 'VIRTUAL_ENV=$PWD/.venv' || '' }}
pytest -n auto --db-backend ${{ matrix.database-backend }} -m 'not nightly' tests/ ${{ matrix.python-version == '3.9' && '--cov aiida' || '' }}
- name: Upload coverage report
if: matrix.python-version == 3.9 && github.repository == 'aiidateam/aiida-core'
Expand All @@ -84,7 +94,6 @@ jobs:
file: ./coverage.xml
fail_ci_if_error: false # don't fail job, if coverage upload fails


tests-presto:

runs-on: ubuntu-latest
Expand Down Expand Up @@ -122,7 +131,10 @@ jobs:
uses: ./.github/actions/install-aiida-core
with:
python-version: '3.12'
from-lock: 'false'
from-lock: 'true'
# NOTE: The `verdi devel check-undesired-imports` fails if
# the 'tui' extra is installed.
extras: ''

- name: Run verdi tests
run: |
Expand Down
20 changes: 15 additions & 5 deletions .github/workflows/test-install.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ on:
schedule:
- cron: 30 02 * * * # nightly build

env:
FORCE_COLOR: 1

# https://docs.github.com/en/actions/using-jobs/using-concurrency
concurrency:
# only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
Expand All @@ -32,18 +35,18 @@ jobs:
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: '3.9'
python-version: '3.11'

- name: Set up uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v5
with:
version: 0.5.6
version: 0.5.x

- name: Install utils/ dependencies
run: uv pip install --system -r utils/requirements.txt

- name: Validate uv lockfile
run: uv lock --locked
run: uv lock --check

- name: Validate conda environment file
run: python ./utils/dependency_management.py validate-environment-yml
Expand Down Expand Up @@ -208,4 +211,11 @@ jobs:
env:
AIIDA_TEST_PROFILE: test_aiida
AIIDA_WARN_v3: 1
run: pytest -n auto --db-backend psql tests -m 'not nightly' tests/
# Unset CI envvar to workaround a pymatgen issue for Python 3.9
# https://github.com/materialsproject/pymatgen/issues/4243
# TODO: Remove a workaround for VIRTUAL_ENV once the setup-uv action is updated
# https://github.com/astral-sh/setup-uv/issues/219
run: |
${{ matrix.python-version == '3.9' && 'unset CI' || '' }}
${{ matrix.python-version == '3.9' && 'VIRTUAL_ENV=$PWD/.venv' || '' }}
pytest -n auto --db-backend psql -m 'not nightly' tests/
11 changes: 6 additions & 5 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ ci:

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v5.0.0
hooks:
- id: check-merge-conflict
- id: check-yaml
Expand All @@ -26,7 +26,7 @@ repos:
exclude: *exclude_pre_commit_hooks

- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.28.6
rev: 0.30.0
hooks:
- id: check-github-workflows

Expand All @@ -37,14 +37,14 @@ repos:
args: [--line-length=120, --fail-on-change]

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.0
rev: v0.8.6
hooks:
- id: ruff-format
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --show-fixes]

- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.13.0
rev: v2.14.0
hooks:
- id: pretty-format-toml
args: [--autofix]
Expand Down Expand Up @@ -191,12 +191,13 @@ repos:
- id: check-uv-lock
name: Check uv lockfile up to date
# NOTE: This will not automatically update the lockfile
entry: uv lock --locked
entry: uv lock --check
language: system
pass_filenames: false
files: >-
(?x)^(
pyproject.toml|
uv.lock|
)$
- id: generate-conda-environment
Expand Down
2 changes: 1 addition & 1 deletion docs/source/reference/command_line.rst
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,7 @@ Below is a list with all available subcommands.
--broker-host HOSTNAME Hostname for the message broker. [default: 127.0.0.1]
--broker-port INTEGER Port for the message broker. [default: 5672]
--broker-virtual-host TEXT Name of the virtual host for the message broker without
leading forward slash.
leading forward slash. [default: ""]
--repository DIRECTORY Absolute path to the file repository.
--test-profile Designate the profile to be used for running the test
suite only.
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ minversion = '7.0'
testpaths = [
'tests'
]
timeout = 30
timeout = 240
xfail_strict = true

[tool.ruff]
Expand Down
41 changes: 41 additions & 0 deletions src/aiida/cmdline/commands/cmd_data/cmd_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"""`verdi data core.remote` command."""

import stat
from pathlib import Path

import click

Expand Down Expand Up @@ -87,3 +88,43 @@ def remote_show(datum):
"""Show information for a RemoteData object."""
echo.echo(f'- Remote computer name: {datum.computer.label}')
echo.echo(f'- Remote folder full path: {datum.get_remote_path()}')


@remote.command('size')
@arguments.NODE()
@click.option(
'-m',
'--method',
type=click.STRING,
default='du',
help='The method that should be used to evaluate the size (either ``du`` or ``stat``.)',
)
@click.option(
'-p',
'--path',
type=click.Path(),
default=None,
help='Relative path of the object of the ``RemoteData`` node for which the size should be evaluated.',
)
@click.option(
'-b',
'--bytes',
'return_bytes',
type=bool,
is_flag=True,
default=False,
help='Return the size in bytes or human-readable format?',
)
def remote_size(node, method, path, return_bytes):
"""Obtain the total size of a file or directory at a given path that is stored via a ``RemoteData`` object."""
try:
# `method` might change, if `du` fails, so assigning to new variable here
total_size, used_method = node.get_size_on_disk(relpath=path, method=method, return_bytes=return_bytes)
remote_path = Path(node.get_remote_path())
full_path = remote_path / path if path is not None else remote_path
echo.echo_success(
f'Estimated total size of path `{full_path}` on the Computer '
f'<{node.computer.label}> obtained via `{used_method}`: {total_size}'
)
except (OSError, FileNotFoundError, NotImplementedError) as exc:
echo.echo_critical(str(exc))
31 changes: 31 additions & 0 deletions src/aiida/common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -572,3 +572,34 @@ def __init__(self, dtobj, precision):

self.dtobj = dtobj
self.precision = precision


def format_directory_size(size_in_bytes: int) -> str:
"""Converts a size in bytes to a human-readable string with the appropriate prefix.
:param size_in_bytes: Size in bytes.
:raises ValueError: If the size is negative.
:return: Human-readable size string with a prefix (e.g., "1.23 KB", "5.67 MB").
The function converts a given size in bytes to a more readable format by
adding the appropriate unit suffix (e.g., KB, MB, GB). It uses the binary
system (base-1024) for unit conversions.
Example:
>>> format_directory_size(123456789)
'117.74 MB'
"""
if size_in_bytes < 0:
raise ValueError('Size cannot be negative.')

# Define size prefixes
prefixes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
factor = 1024 # 1 KB = 1024 B
index = 0

while size_in_bytes >= factor and index < len(prefixes) - 1:
size_in_bytes /= factor
index += 1

# Format the size to two decimal places
return f'{size_in_bytes:.2f} {prefixes[index]}'
Loading

0 comments on commit eb34d37

Please sign in to comment.