Skip to content

Commit

Permalink
Merge branch 'main' into uv-update
Browse files Browse the repository at this point in the history
  • Loading branch information
khsrali authored Jan 16, 2025
2 parents af8c2d3 + c88fc05 commit cc5221e
Show file tree
Hide file tree
Showing 14 changed files with 231 additions and 209 deletions.
2 changes: 1 addition & 1 deletion .github/actions/install-aiida-core/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ runs:
python-version: ${{ inputs.python-version }}

- name: Set up uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v5.2.0
with:
version: 0.5.x
python-version: ${{ inputs.python-version }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ description: Bash run in Docker image through Singularity
default_calc_job_plugin: core.arithmetic.add
computer: localhost
filepath_executable: /bin/sh
image_name: docker://alpine:3
engine_command: singularity exec --bind $PWD:$PWD {image_name}
image_name: alpine:3
engine_command: docker run --user 1001:100 -v $PWD:$PWD -w $PWD -i {image_name}
prepend_text: ' '
append_text: ' '
3 changes: 0 additions & 3 deletions .github/workflows/ci-code.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,7 @@ jobs:
AIIDA_WARN_v3: 1
# NOTE1: Python 3.12 has a performance regression when running with code coverage
# so run code coverage only for python 3.9.
# TODO: Remove a workaround for VIRTUAL_ENV once the setup-uv action is updated
# https://github.com/astral-sh/setup-uv/issues/219
run: |
${{ matrix.python-version == '3.9' && 'VIRTUAL_ENV=$PWD/.venv' || '' }}
pytest -n auto --db-backend ${{ matrix.database-backend }} -m 'not nightly' tests/ ${{ matrix.python-version == '3.9' && '--cov aiida' || '' }}
- name: Upload coverage report
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,3 @@ verdi -p test_aiida run ${SYSTEM_TESTS}/test_daemon.py
verdi -p test_aiida run ${SYSTEM_TESTS}/test_containerized_code.py
bash ${SYSTEM_TESTS}/test_polish_workchains.sh
verdi daemon stop

AIIDA_TEST_PROFILE=test_aiida pytest --db-backend psql -m nightly tests/
21 changes: 13 additions & 8 deletions .github/workflows/nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
nightly-tests:

if: github.repository == 'aiidateam/aiida-core' # Prevent running the builds on forks as well
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04

services:
postgres:
Expand Down Expand Up @@ -55,9 +55,6 @@ jobs:

steps:
- uses: actions/checkout@v4
- uses: eWaterCycle/setup-singularity@v7 # for containerized code test
with:
singularity-version: 3.8.7

- name: Install system dependencies
run: sudo apt update && sudo apt install postgresql
Expand All @@ -72,15 +69,23 @@ jobs:
- name: Setup environment
run: .github/workflows/setup.sh

- name: Run tests
id: tests
run: .github/workflows/tests_nightly.sh
- name: Run pytest nigthly tests
id: pytest-tests
env:
AIIDA_TEST_PROFILE: test_aiida
AIIDA_WARN_v3: 1
run: |
pytest --db-backend psql -m nightly tests/
- name: Run daemon nightly tests
id: daemon-tests
run: .github/workflows/daemon_tests.sh

- name: Slack notification
# Always run this step (otherwise it would be skipped if any of the previous steps fail) but only if the
# `install` or `tests` steps failed, and the `SLACK_WEBHOOK` is available. The latter is not the case for
# pull requests that come from forks. This is a limitation of secrets on GHA
if: always() && (steps.install.outcome == 'failure' || steps.tests.outcome == 'failure') && env.SLACK_WEBHOOK != null
if: always() && (steps.install.outcome == 'failure' || steps.pytest-tests.outcome == 'failure' || steps.daemon-tests.outcome == 'failure') && env.SLACK_WEBHOOK != null
uses: rtCamp/action-slack-notify@v2
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ verdi computer configure core.local localhost --config "${CONFIG}/localhost-conf
verdi computer test localhost
verdi code create core.code.installed --non-interactive --config "${CONFIG}/doubler.yaml"
verdi code create core.code.installed --non-interactive --config "${CONFIG}/add.yaml"
verdi code create core.code.containerized --non-interactive --config "${CONFIG}/add-singularity.yaml"
verdi code create core.code.containerized --non-interactive --config "${CONFIG}/add-containerized.yaml"

# set up slurm-ssh computer
verdi computer setup --non-interactive --config "${CONFIG}/slurm-ssh.yaml"
Expand Down
5 changes: 1 addition & 4 deletions .github/workflows/test-install.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:
python-version: '3.11'

- name: Set up uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v5.2.0
with:
version: 0.5.x

Expand Down Expand Up @@ -211,8 +211,5 @@ jobs:
env:
AIIDA_TEST_PROFILE: test_aiida
AIIDA_WARN_v3: 1
# TODO: Remove a workaround for VIRTUAL_ENV once the setup-uv action is updated
# https://github.com/astral-sh/setup-uv/issues/219
run: |
${{ matrix.python-version == '3.9' && 'VIRTUAL_ENV=$PWD/.venv' || '' }}
pytest -n auto --db-backend psql -m 'not nightly' tests/
4 changes: 2 additions & 2 deletions .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ build:
# https://docs.readthedocs.io/en/stable/build-customization.html#install-dependencies-with-uv
pre_create_environment:
- asdf plugin add uv
- asdf install uv 0.5.17
- asdf global uv 0.5.17
- asdf install uv 0.5.20
- asdf global uv 0.5.20
create_environment:
- uv venv
install:
Expand Down
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -510,3 +510,6 @@ passenv =
AIIDA_TEST_WORKERS
commands = molecule {posargs:test}
"""

[tool.uv]
required-version = ">=0.5.20"
9 changes: 7 additions & 2 deletions src/aiida/tools/pytest_fixtures/daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@

from __future__ import annotations

import logging
import pathlib
import typing as t

import pytest

if t.TYPE_CHECKING:
from aiida.engine import Process, ProcessBuilder
from aiida.engine.daemon.client import DaemonClient
from aiida.orm import ProcessNode


Expand Down Expand Up @@ -47,7 +49,7 @@ def test(daemon_client):


@pytest.fixture
def started_daemon_client(daemon_client):
def started_daemon_client(daemon_client: 'DaemonClient'):
"""Ensure that the daemon is running for the test profile and return the associated client.
Usage::
Expand All @@ -60,11 +62,14 @@ def test(started_daemon_client):
daemon_client.start_daemon()
assert daemon_client.is_daemon_running

logger = logging.getLogger('tests.daemon:started_daemon_client')
logger.debug(f'Daemon log file is located at: {daemon_client.daemon_log_file}')

yield daemon_client


@pytest.fixture
def stopped_daemon_client(daemon_client):
def stopped_daemon_client(daemon_client: 'DaemonClient'):
"""Ensure that the daemon is not running for the test profile and return the associated client.
Usage::
Expand Down
1 change: 1 addition & 0 deletions src/aiida/tools/pytest_fixtures/orm.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,7 @@ def factory(label: str | None = None, configure: bool = True) -> 'Computer':
computer.configure(
key_filename=str(ssh_key),
key_policy='AutoAddPolicy',
safe_interval=1.0,
)

return computer
Expand Down
1 change: 0 additions & 1 deletion tests/engine/test_memory_leaks.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ def test_leak_local_calcjob(aiida_code_installed):


@pytest.mark.skipif(sys.version_info >= (3, 12), reason='Garbage collecting hangs on Python 3.12')
@pytest.mark.nightly
@pytest.mark.usefixtures('aiida_profile', 'check_memory_leaks')
def test_leak_ssh_calcjob(aiida_computer_ssh):
"""Test whether running a CalcJob over SSH leaks memory.
Expand Down
29 changes: 17 additions & 12 deletions tests/tools/archive/orm/test_links.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,16 @@
###########################################################################
"""orm links tests for the export and import routines"""

import pytest

from aiida import orm
from aiida.common.links import LinkType
from aiida.orm.entities import EntityTypes
from aiida.tools.archive import ArchiveFormatSqlZip, create_archive, import_archive
from tests.tools.archive.utils import get_all_node_links


def test_links_to_unknown_nodes(tmp_path, aiida_profile):
def test_links_to_unknown_nodes(tmp_path, aiida_profile_clean):
"""Test importing of nodes, that have links to unknown nodes."""
# store a node
node = orm.Data()
Expand Down Expand Up @@ -46,7 +48,7 @@ def test_links_to_unknown_nodes(tmp_path, aiida_profile):
with ArchiveFormatSqlZip().open(filename, 'r') as archive:
assert archive.querybuilder().append(entity_type='link').count() == 1

aiida_profile.reset_storage()
aiida_profile_clean.reset_storage()

# since the query builder only looks for links between known nodes,
# this should not import the erroneous link
Expand Down Expand Up @@ -262,7 +264,8 @@ def test_complex_workflow_graph_links(aiida_profile_clean, tmp_path, aiida_local
assert set(export_set) == set(import_set)


def test_complex_workflow_graph_export_sets(aiida_profile, tmp_path, aiida_localhost_factory):
@pytest.mark.nightly
def test_complex_workflow_graph_export_sets(aiida_profile_clean, tmp_path, aiida_localhost_factory):
"""Test ex-/import of individual nodes in complex graph"""
for export_conf in range(0, 9):
_, (export_node, export_target) = construct_complex_graph(aiida_localhost_factory, export_conf)
Expand All @@ -272,7 +275,7 @@ def test_complex_workflow_graph_export_sets(aiida_profile, tmp_path, aiida_local
create_archive([export_node], filename=export_file, overwrite=True)
export_node_str = str(export_node)

aiida_profile.reset_storage()
aiida_profile_clean.reset_storage()

import_archive(export_file)

Expand All @@ -296,7 +299,8 @@ def test_complex_workflow_graph_export_sets(aiida_profile, tmp_path, aiida_local
)


def test_high_level_workflow_links(aiida_profile, tmp_path, aiida_localhost_factory):
@pytest.mark.nightly
def test_high_level_workflow_links(aiida_profile_clean, tmp_path, aiida_localhost_factory):
"""This test checks that all the needed links are correctly exported and imported.
INPUT_CALC, INPUT_WORK, CALL_CALC, CALL_WORK, CREATE, and RETURN
links connecting Data nodes and high-level Calculation and Workflow nodes:
Expand All @@ -318,7 +322,7 @@ def test_high_level_workflow_links(aiida_profile, tmp_path, aiida_localhost_fact

for calcs in high_level_calc_nodes:
for works in high_level_work_nodes:
aiida_profile.reset_storage()
aiida_profile_clean.reset_storage()

graph_nodes, _ = construct_complex_graph(aiida_localhost_factory, calc_nodes=calcs, work_nodes=works)

Expand Down Expand Up @@ -350,7 +354,7 @@ def test_high_level_workflow_links(aiida_profile, tmp_path, aiida_localhost_fact
export_file = tmp_path.joinpath('export.aiida')
create_archive(graph_nodes, filename=export_file, overwrite=True)

aiida_profile.reset_storage()
aiida_profile_clean.reset_storage()

import_archive(export_file)
import_links = get_all_node_links()
Expand Down Expand Up @@ -450,7 +454,8 @@ def link_flags_export_helper(name, all_nodes, tmp_path, nodes_to_export, flags,
return ret


def test_link_flags(aiida_profile, tmp_path, aiida_localhost_factory):
@pytest.mark.nightly
def test_link_flags(aiida_profile_clean, tmp_path, aiida_localhost_factory):
"""Verify all link follow flags are working as intended.
Graph (from ``construct_complex_graph()``)::
Expand Down Expand Up @@ -584,10 +589,10 @@ def test_link_flags(aiida_profile, tmp_path, aiida_localhost_factory):
),
)

link_flags_import_helper(input_links_forward, aiida_profile.reset_storage)
link_flags_import_helper(create_return_links_backward, aiida_profile.reset_storage)
link_flags_import_helper(call_links_backward_calc1, aiida_profile.reset_storage)
link_flags_import_helper(call_links_backward_work2, aiida_profile.reset_storage)
link_flags_import_helper(input_links_forward, aiida_profile_clean.reset_storage)
link_flags_import_helper(create_return_links_backward, aiida_profile_clean.reset_storage)
link_flags_import_helper(call_links_backward_calc1, aiida_profile_clean.reset_storage)
link_flags_import_helper(call_links_backward_work2, aiida_profile_clean.reset_storage)


def test_double_return_links_for_workflows(tmp_path, aiida_profile_clean):
Expand Down
Loading

0 comments on commit cc5221e

Please sign in to comment.