Dry-Run Testing¶
The Problem¶
The rendering engine requires an OpenGL context and compiled C extensions. Running it in CI or headless environments is impractical. But we still want to test that every tool:
- Can be imported without errors
- Accepts its expected arguments
- Returns a reasonable result
The Solution: cmd=None¶
Every tool checks if cmd is None and returns a description instead of calling the rendering engine:
def run(cmd, atom1: str, atom2: str) -> str:
if cmd is None:
return f"[dry-run] distance {atom1} — {atom2}"
dist = cmd.distance("dist_0", atom1, atom2)
return f"Distance: {dist:.2f} Å"
Automatic Test Discovery¶
tests/conftest.py discovers all tools and creates parametrized tests:
@pytest.fixture(scope="session")
def all_tools():
"""Discover all tools once per session."""
return discover_tools()
The mock_cmd Fixture¶
For tests that need rendering behavior without the real library:
@pytest.fixture
def mock_cmd():
cmd = MagicMock()
cmd.count_atoms.return_value = 10
cmd.distance.return_value = 3.5
cmd.angle.return_value = 109.5
cmd.get_dihedral.return_value = -60.0
cmd.get_names.return_value = []
cmd.get_object_list.return_value = ["test_obj"]
cmd.get_model.return_value = MagicMock(atom=[])
return cmd
Test Categories¶
1. Dry-Run Tests (No Rendering Engine)¶
def test_distance_dry_run():
from tools.measurements.distance import run
result = run(None, "A/45/CA", "B/120/CA")
assert "dry-run" in result
2. Mock Tests (Mocked Rendering)¶
def test_distance_mock(mock_cmd):
from tools.measurements.distance import run
result = run(mock_cmd, "A/45/CA", "B/120/CA")
assert "3.50" in result
mock_cmd.distance.assert_called_once()
3. Integration Tests (Full Rendering)¶
@pytest.mark.integration
def test_distance_real():
"""Skipped automatically if rendering engine not installed."""
import codemol
cmd.fetch("1ake")
from tools.measurements.distance import run
result = run(cmd, "A/45/CA", "A/120/CA")
assert "Å" in result
Integration tests are auto-skipped via conftest.py:
def pytest_collection_modifyitems(config, items):
"""Auto-skip integration tests if rendering engine not available."""
try:
import codemol
except ImportError:
skip = pytest.mark.skip(reason="rendering engine not available")
for item in items:
if "integration" in item.keywords:
item.add_marker(skip)
Auto-Reset Fixtures¶
Tests that use shared state get automatic cleanup:
@pytest.fixture(autouse=True)
def reset_measurement_registry():
"""Clear measurement registry before and after each test."""
from codemol.measurement_registry import clear
clear()
yield
clear()
@pytest.fixture(autouse=True)
def reset_selection_overrides():
"""Clear selection alias overrides."""
from codemol.selection_aliases import clear_overrides
clear_overrides()
yield
clear_overrides()