Skip to content

Commit 1e1a99f

Browse files
committed
rename protocoldag_cache to protocoldag-results_cache
1 parent 85711c3 commit 1e1a99f

File tree

2 files changed

+11
-8
lines changed

2 files changed

+11
-8
lines changed

gufe/protocols/protocoldag.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -468,10 +468,10 @@ def execute_DAG(
468468

469469
all_cached_results: list[ProtocolUnitResult] = [] # store all unitresults found in the cache
470470
if cache_basedir is not None:
471-
dag_unitresults_dir = cache_basedir / f"{str(protocoldag.key)}_cache"
472-
dag_unitresults_dir.mkdir(exist_ok=True, parents=True)
471+
dag_unitresults_cache = cache_basedir / f"{str(protocoldag.key)}-results_cache"
472+
dag_unitresults_cache.mkdir(exist_ok=True, parents=True)
473473

474-
for file in dag_unitresults_dir.rglob("*.json"):
474+
for file in dag_unitresults_cache.rglob("*.json"):
475475
try:
476476
unit_result = ProtocolUnitResult.from_json(file)
477477
except JSONDecodeError as e:
@@ -531,7 +531,7 @@ def execute_DAG(
531531

532532
# Serialize results if requested
533533
if cache_basedir is not None:
534-
result.to_json(dag_unitresults_dir / f"{str(unit.key)}_unitresults.json")
534+
result.to_json(dag_unitresults_cache / f"{str(unit.key)}_unitresults.json")
535535
break
536536
attempt += 1
537537

@@ -543,7 +543,7 @@ def execute_DAG(
543543
shutil.rmtree(shared_path)
544544

545545
if not keep_cache and cache_basedir is not None:
546-
shutil.rmtree(dag_unitresults_dir)
546+
shutil.rmtree(dag_unitresults_cache)
547547

548548
return ProtocolDAGResult(
549549
name=protocoldag.name,

gufe/tests/test_protocoldag.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ def test_execute_dag(tmp_path, keep_shared, keep_scratch, keep_cache, writefile_
116116
shared_file = os.path.join(shared, f"shared_{str(pu.key)}_attempt_0", f"unit_{id}_shared.txt")
117117
scratch_file = os.path.join(scratch, f"scratch_{str(pu.key)}_attempt_0", f"unit_{id}_scratch.txt")
118118
unit_result_file = os.path.join(
119-
cache_basedir, f"{str(writefile_dag.key)}_cache", f"{str(pu.key)}_unitresults.json"
119+
cache_basedir, f"{str(writefile_dag.key)}-results_cache", f"{str(pu.key)}_unitresults.json"
120120
)
121121

122122
if capture_stderr_stdout:
@@ -206,13 +206,16 @@ def test_execute_DAG_cached_unitresults(tmp_path):
206206
)
207207

208208
for pu in dep_dag.protocol_units:
209-
assert os.path.exists(os.path.join(unit_results_dir, f"{dep_dag.key}_cache", f"{str(pu.key)}_unitresults.json"))
209+
assert os.path.exists(
210+
os.path.join(unit_results_dir, f"{dep_dag.key}-results_cache", f"{str(pu.key)}_unitresults.json")
211+
)
210212

211213
# choose a terminal result so that only one node is rerun
212214
pu_to_corrupt = dependent_units[0]
213215

214216
with open(
215-
os.path.join(unit_results_dir, f"{dep_dag.key}_cache", f"{str(pu_to_corrupt.key)}_unitresults.json"), "a"
217+
os.path.join(unit_results_dir, f"{dep_dag.key}-results_cache", f"{str(pu_to_corrupt.key)}_unitresults.json"),
218+
"a",
216219
) as f:
217220
f.write("string that will break JSON.")
218221

0 commit comments

Comments
 (0)