Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
20e908c
test larger cpu runner
mikemhenry Feb 28, 2025
778e735
update omsf runner version
mikemhenry Feb 28, 2025
bee8367
nvidia-smi throws an error if there is no GPU
mikemhenry Feb 28, 2025
973201d
Try a large
mikemhenry Mar 4, 2025
98cec71
just run slow tets
mikemhenry Mar 5, 2025
0fad31f
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry Mar 5, 2025
6f6b35c
try xlarge (should fail but thats okay)
mikemhenry Mar 10, 2025
d551ae4
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry Mar 12, 2025
5c392d2
lets see if the intel flavor is any faster
mikemhenry Mar 12, 2025
25ac23e
see if this changes the runtime at all
mikemhenry Mar 13, 2025
ababe63
go back to working runner instance type
mikemhenry Mar 13, 2025
17cf308
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry Mar 24, 2025
df35dbc
bump size of disk
mikemhenry Mar 24, 2025
ab13563
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry Mar 25, 2025
bf280de
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry Apr 2, 2025
1b4732c
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry Apr 21, 2025
53b89f7
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry May 2, 2025
c133c52
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry May 27, 2025
fd952ae
check time on a t3 med
mikemhenry May 27, 2025
66d008d
try xlarge
mikemhenry May 28, 2025
04fbd29
try t3a.xlarge
mikemhenry May 28, 2025
10c637d
now t3a.2xl
mikemhenry May 28, 2025
7a44864
now t3.2xl
mikemhenry May 28, 2025
2bc498f
use platforms that actually will work
mikemhenry May 29, 2025
fc67eab
need empty set not dict
mikemhenry May 29, 2025
df5c841
Merge remote-tracking branch 'origin' into feat/test_larger_cpu_runner
mikemhenry May 29, 2025
9bdfaec
pin buggy mypy
mikemhenry May 29, 2025
8d17990
fix mypy pin
mikemhenry May 29, 2025
12895e5
see if large works
mikemhenry May 29, 2025
38a0448
check t3.2xlarge
mikemhenry May 29, 2025
69ee3da
make sure we load the marks correctly
mikemhenry May 30, 2025
7d68588
I think we are using the wrong instance family
mikemhenry May 30, 2025
cfcb51f
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry Jun 11, 2025
faaeb06
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry Jun 11, 2025
caec4c0
don't run itergration tests on the CPU
mikemhenry Jun 11, 2025
7e4e3b4
Merge remote-tracking branch 'refs/remotes/origin/feat/test_larger_cp…
mikemhenry Jun 11, 2025
8a8b718
Merge branch 'main' into feat/test_larger_cpu_runner
mikemhenry Jun 12, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 3 additions & 6 deletions .github/workflows/cpu-long-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@ jobs:
uses: omsf/start-aws-gha-runner@v1.0.0
with:
aws_image_id: ami-0b7f661c228e6a4bb
aws_instance_type: t3a.small # TODO try t3a.medium next
aws_instance_type: c7i.xlarge
aws_home_dir: /home/ubuntu
aws_root_device_size: 125
env:
GH_PAT: ${{ secrets.GH_PAT }}

Expand All @@ -46,9 +47,6 @@ jobs:
- name: Print Docker details
run: "docker version || true"

- name: Check for nvidia-smi
run: "nvidia-smi"

- name: "Setup Micromamba"
uses: mamba-org/setup-micromamba@v2
with:
Expand Down Expand Up @@ -88,12 +86,11 @@ jobs:

- name: "Run tests"
env:
# Set the OFE_SLOW_TESTS to True if running a Cron job
OFE_SLOW_TESTS: "true"
DUECREDIT_ENABLE: 'yes'
OFE_INTEGRATION_TESTS: FALSE
run: |
pytest -n logical -vv --durations=10
pytest -n logical -vv --durations=10 openfecli/tests/ openfe/tests/

stop-aws-runner:
runs-on: ubuntu-latest
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/mypy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
- name: "Install steps"
run: |
python -m pip install --no-deps git+https://github.com/OpenFreeEnergy/gufe@main
python -m pip install mypy
python -m pip install "mypy<1.16.0"
python -m pip install types-setuptools
python -m pip install --no-deps -e .

Expand Down
30 changes: 29 additions & 1 deletion openfe/tests/protocols/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ def afe_solv_transformation_json() -> str:
"""
d = resources.files('openfe.tests.data.openmm_afe')
fname = "AHFEProtocol_json_results.gz"

with gzip.open((d / fname).as_posix(), 'r') as f: # type: ignore
return f.read().decode() # type: ignore

Expand All @@ -240,3 +240,31 @@ def md_json() -> str:

with gzip.open((d / fname).as_posix(), 'r') as f: # type: ignore
return f.read().decode() # type: ignore

@pytest.fixture
def get_available_openmm_platforms() -> set[str]:
"""
OpenMM Platforms that are available and functional on system
"""
import openmm
from openmm import Platform
# Get platforms that openmm was built with
platforms = {Platform.getPlatform(i).getName() for i in range(Platform.getNumPlatforms())}

# Now check if we can actually use the platforms
working_platforms = set()
for platform in platforms:
system = openmm.System()
system.addParticle(1.0)
integrator = openmm.VerletIntegrator(0.001)
try:
context = openmm.Context(system, integrator, Platform.getPlatformByName(platform))
working_platforms.add(platform)
del context
except openmm.OpenMMException:
continue
finally:
del system, integrator


return working_platforms
4 changes: 2 additions & 2 deletions openfe/tests/protocols/openmm_ahfe/test_ahfe_slow.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@
@pytest.mark.parametrize('platform', ['CPU', 'CUDA'])
def test_openmm_run_engine(
platform,
available_platforms,
get_available_openmm_platforms,
benzene_modifications,
tmpdir
):
if platform not in available_platforms:
if platform not in get_available_openmm_platforms:
pytest.skip(f"OpenMM Platform: {platform} not available")

# Run a really short calculation to check everything is going well
Expand Down
4 changes: 2 additions & 2 deletions openfe/tests/protocols/openmm_rfe/test_hybrid_top_slow.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@
def test_openmm_run_engine(
benzene_vacuum_system,
platform,
available_platforms,
get_available_openmm_platforms,
benzene_modifications,
tmpdir
):
if platform not in available_platforms:
if platform not in get_available_openmm_platforms:
pytest.skip(f"OpenMM Platform: {platform} not available")
# this test actually runs MD
# these settings are a small self to self sim, that has enough eq that
Expand Down
Loading