Skip to content

Commit 4539ed9

Browse files
remove upload for cache, show url
1 parent b0348be commit 4539ed9

File tree

4 files changed

+58
-53
lines changed

4 files changed

+58
-53
lines changed

tests/test_web/test_simulation_cache.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -218,9 +218,7 @@ def _test_job_run_cache(monkeypatch, basic_simulation):
218218
assert counters["download"] == 0
219219

220220

221-
@pytest.mark.parametrize("structure_key", ["polyslab"])
222-
@pytest.mark.parametrize("monitor_key", ["mode"])
223-
def _test_autograd_cache(monkeypatch, structure_key, monitor_key):
221+
def _test_autograd_cache(monkeypatch):
224222
counters = _patch_run_pipeline(monkeypatch)
225223
cache = resolve_simulation_cache(use_cache=True)
226224
cache.clear()
@@ -349,4 +347,4 @@ def test_cache_end_to_end(monkeypatch, tmp_path, tmp_path_factory, basic_simulat
349347
_test_cache_eviction_by_size(monkeypatch, tmp_path_factory, basic_simulation)
350348
_test_run_cache_hit_async(monkeypatch, basic_simulation, tmp_path)
351349
_test_job_run_cache(monkeypatch, basic_simulation)
352-
_test_autograd_cache(monkeypatch, basic_simulation)
350+
_test_autograd_cache(monkeypatch)

tidy3d/web/api/container.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -566,6 +566,7 @@ class BatchData(Tidy3dBaseModel, Mapping):
566566
verbose: bool = pd.Field(
567567
True, title="Verbose", description="Whether to print info messages and progressbars."
568568
)
569+
569570
cached_tasks: Optional[dict[TaskName, bool]] = pd.Field(
570571
None,
571572
title="Cached Tasks",
@@ -578,6 +579,13 @@ class BatchData(Tidy3dBaseModel, Mapping):
578579
description="Whether to use local cache for retrieving Simulation results.",
579580
)
580581

582+
is_downloaded: Optional[bool] = pd.Field(
583+
False,
584+
title="Is Downloaded",
585+
description="Whether the simulation data was downloaded before.",
586+
)
587+
588+
581589
def load_sim_data(self, task_name: str) -> WorkflowDataType:
582590
"""Load a simulation data object from file by task name."""
583591
task_data_path = self.task_paths[task_name]
@@ -592,7 +600,7 @@ def load_sim_data(self, task_name: str) -> WorkflowDataType:
592600
verbose=False,
593601
from_cache=from_cache,
594602
use_cache=self.use_cache,
595-
replace_existing=False,
603+
replace_existing=not (from_cache or self.is_downloaded),
596604
)
597605

598606
def __getitem__(self, task_name: TaskName) -> WorkflowDataType:
@@ -1207,21 +1215,23 @@ def load(self, path_dir: str = DEFAULT_DATA_DIR, replace_existing: bool = False)
12071215
task_ids[task_name] = self.jobs[task_name].task_id
12081216

12091217
loaded = {task_name: job.load_if_cached for task_name, job in self.jobs.items()}
1218+
1219+
self.download(path_dir=path_dir, replace_existing=replace_existing)
1220+
12101221
data = BatchData(
12111222
task_paths=task_paths,
12121223
task_ids=task_ids,
12131224
verbose=self.verbose,
12141225
cached_tasks=loaded,
12151226
use_cache=self.use_cache,
1227+
is_downloaded=True,
12161228
)
12171229

12181230
for task_name, job in self.jobs.items():
12191231
if isinstance(job.simulation, ModeSolver):
12201232
job_data = data[task_name]
12211233
job.simulation._patch_data(data=job_data)
12221234

1223-
self.download(path_dir=path_dir, replace_existing=replace_existing)
1224-
12251235
return data
12261236

12271237
def delete(self) -> None:

tidy3d/web/api/webapi.py

Lines changed: 35 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -139,16 +139,26 @@ def restore_simulation_if_cached(
139139
path: str,
140140
use_cache: Optional[bool] = None,
141141
reduce_simulation: Literal["auto", True, False] = "auto",
142+
verbose: bool = True,
142143
) -> bool:
143144
simulation_cache = resolve_simulation_cache(use_cache)
144145
copied_from_cache = False
145146
if simulation_cache is not None:
146147
sim_for_cache = simulation
147148
if isinstance(simulation, (ModeSolver, ModeSimulation)):
148149
sim_for_cache = get_reduced_simulation(simulation, reduce_simulation)
149-
entry = simulation_cache.try_fetch(simulation=sim_for_cache)
150+
entry = simulation_cache.try_fetch(simulation=sim_for_cache, verbose=verbose)
150151
if entry is not None:
151152
copied_from_cache = _copy_simulation_data_from_cache_entry(entry, path)
153+
cached_task_id = entry.metadata.get("task_id")
154+
cached_workflow_type = entry.metadata.get("workflow_type")
155+
if cached_task_id is not None and cached_workflow_type is not None and verbose:
156+
console = get_logging_console() if verbose else None
157+
url, _ = _get_task_urls(
158+
cached_workflow_type,
159+
simulation,
160+
cached_task_id)
161+
console.log(f"Loaded simulation from local cache.\nView cached task using web UI at [link={url}]'{url}'[/link].")
152162
return copied_from_cache
153163

154164

@@ -278,7 +288,7 @@ def run(
278288
Monitor progress of each of the running tasks.
279289
"""
280290
copied_from_cache = restore_simulation_if_cached(
281-
simulation=simulation, path=path, use_cache=use_cache, reduce_simulation=reduce_simulation
291+
simulation=simulation, path=path, use_cache=use_cache, reduce_simulation=reduce_simulation, verbose=verbose
282292
)
283293

284294
if not copied_from_cache:
@@ -320,6 +330,26 @@ def run(
320330
simulation._patch_data(data=data)
321331
return data
322332

333+
def _get_task_urls(
334+
task_type: str,
335+
simulation: WorkflowType,
336+
resource_id: str,
337+
folder_id: Optional[str] = None,
338+
group_id: Optional[str] = None,
339+
) -> tuple[str, Optional[str]]:
340+
"""Log task and folder links to the web UI."""
341+
print("task_type:", task_type)
342+
if (task_type in ["RF", "COMPONENT_MODELER", "TERMINAL_COMPONENT_MODELER"]) and isinstance(simulation, TerminalComponentModeler):
343+
url = _get_url_rf(group_id or resource_id)
344+
else:
345+
url = _get_url(resource_id)
346+
347+
if folder_id is not None:
348+
folder_url = _get_folder_url(folder_id)
349+
else:
350+
folder_url = None
351+
return url, folder_url
352+
323353

324354
@wait_for_connection
325355
def upload(
@@ -441,16 +471,9 @@ def upload(
441471
f"Cost of {solver_name} simulations is subject to change in the future."
442472
)
443473
if task_type in GUI_SUPPORTED_TASK_TYPES:
444-
if (task_type == "RF") and (isinstance(simulation, TerminalComponentModeler)):
445-
url = _get_url_rf(group_id or resource_id)
446-
folder_url = _get_folder_url(task.folder_id)
447-
console.log(f"View task using web UI at [link={url}]'{url}'[/link].")
448-
console.log(f"Task folder: [link={folder_url}]'{task.folder_name}'[/link].")
449-
else:
450-
url = _get_url(resource_id)
451-
folder_url = _get_folder_url(task.folder_id)
452-
console.log(f"View task using web UI at [link={url}]'{url}'[/link].")
453-
console.log(f"Task folder: [link={folder_url}]'{task.folder_name}'[/link].")
474+
url, folder_url = _get_task_urls(task_type, simulation, resource_id, task.folder_id, group_id)
475+
console.log(f"View task using web UI at [link={url}]'{url}'[/link].")
476+
console.log(f"Task folder: [link={folder_url}]'{task.folder_name}'[/link].")
454477

455478
remote_sim_file = SIM_FILE_HDF5_GZ
456479
if task_type == "MODE_SOLVER":

tidy3d/web/cache.py

Lines changed: 8 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -301,36 +301,20 @@ def _fetch(self, key: str) -> Optional[CacheEntry]:
301301
self._touch(entry)
302302
return entry
303303

304-
def fetch_by_task(self, task_id: str) -> Optional[CacheEntry]:
305-
"""Retrieve an entry by task id."""
306-
with self._lock:
307-
for entry in self._iter_entries():
308-
metadata = entry.metadata
309-
task_ids = metadata.get("task_ids", [])
310-
if task_id in task_ids and entry.exists():
311-
if not entry.verify():
312-
self._remove_entry(entry)
313-
return None
314-
self._touch(entry)
315-
return entry
316-
return None
317-
318304
def __len__(self) -> int:
319305
"""Return number of valid cache entries."""
320306
with self._lock:
321307
return sum(1 for _ in self._iter_entries())
322308

323309
def _store(
324-
self, key: str, task_id: Optional[str], source_path: Path, metadata: dict[str, Any]
310+
self, key: str, source_path: Path, metadata: dict[str, Any]
325311
) -> Optional[CacheEntry]:
326312
"""Store a new cache entry from ``source_path``.
327313
328314
Parameters
329315
----------
330316
key : str
331317
Cache key computed from simulation hash and runtime context.
332-
task_id : str, optional
333-
Server task id associated with this artifact.
334318
source_path : Path
335319
Location of the artifact to cache.
336320
metadata : dict[str, Any]
@@ -358,11 +342,6 @@ def _store(
358342
metadata["last_used"] = now_iso
359343
metadata["checksum"] = checksum
360344
metadata["file_size"] = file_size
361-
if task_id:
362-
task_ids = list(metadata.get("task_ids", []))
363-
if task_id not in task_ids:
364-
task_ids.append(task_id)
365-
metadata["task_ids"] = task_ids
366345

367346
_write_metadata(tmp_meta, metadata)
368347
try:
@@ -508,7 +487,7 @@ def try_fetch(
508487
return None
509488
if verbose:
510489
log.info(
511-
"Simulation cache hit for workflow '%s'; using local results.", workflow_type
490+
f"Simulation cache hit for workflow '{workflow_type}'; using local results."
512491
)
513492

514493
return entry
@@ -543,16 +522,13 @@ def store_result(
543522
metadata = build_entry_metadata(
544523
simulation_hash=simulation_hash,
545524
workflow_type=workflow_type,
546-
runtime_context={
547-
"task_id": task_id,
548-
},
525+
task_id=task_id,
549526
version=version,
550-
extras={"path": str(Path(path))},
527+
path=Path(path),
551528
)
552529

553530
self._store(
554531
key=cache_key,
555-
task_id=task_id, # keeps a reverse link for legacy fetch_by_task
556532
source_path=Path(path),
557533
metadata=metadata,
558534
)
@@ -672,19 +648,17 @@ def build_entry_metadata(
672648
*,
673649
simulation_hash: str,
674650
workflow_type: str,
675-
runtime_context: dict[str, Any],
651+
task_id: str,
676652
version: str,
677-
extras: Optional[dict[str, Any]] = None,
653+
path: Path,
678654
) -> dict[str, Any]:
679655
"""Create metadata dictionary for a cache entry."""
680656

681657
metadata: dict[str, Any] = {
682658
"simulation_hash": simulation_hash,
683659
"workflow_type": workflow_type,
684-
"runtime_context": _canonicalize(runtime_context),
685660
"versions": _canonicalize(version),
686-
"task_ids": [],
661+
"task_id": task_id,
662+
"path": str(path),
687663
}
688-
if extras:
689-
metadata.update(_canonicalize(extras))
690664
return metadata

0 commit comments

Comments
 (0)