diff --git a/.gitignore b/.gitignore index 24c7553..6f72347 100644 --- a/.gitignore +++ b/.gitignore @@ -110,6 +110,7 @@ celerybeat.pid .env .venv env/ +venv/ venv*/ ENV/ env.bak/ diff --git a/changelog/165.feature.rst b/changelog/165.feature.rst new file mode 100644 index 0000000..e69586b --- /dev/null +++ b/changelog/165.feature.rst @@ -0,0 +1 @@ +add a internal ephemeris data cache. This allows to bypass hidden internal FIDO searches and downloads of the same data multiple times, improving performance when repeatedly querying for the same time ranges but also give the user a option to preload the cache with data. diff --git a/stixpy/coordinates/tests/test_transforms.py b/stixpy/coordinates/tests/test_transforms.py index e5bba7a..0b41e03 100644 --- a/stixpy/coordinates/tests/test_transforms.py +++ b/stixpy/coordinates/tests/test_transforms.py @@ -84,19 +84,32 @@ def test_stx_to_hpc_obstime_end(): assert np.all(stix_coord.obstime_end.isclose(stix_coord_rt.obstime_end)) +def test_stx_to_hpc_obstime_end_x2(): + # strange error that the call the second times crashes + test_stx_to_hpc_obstime_end() + test_stx_to_hpc_obstime_end() + + @pytest.mark.remote_data def test_get_aux_data(): with pytest.raises(ValueError, match="No STIX pointing data found for time range"): _get_ephemeris_data(Time("2015-06-06")) # Before the mission started - aux_data = _get_ephemeris_data(Time("2022-08-28T16:02:00")) - assert len(aux_data) == 1341 + t1 = Time("2022-08-28T16:02:00") + aux_data = _get_ephemeris_data(t1) + assert len(aux_data) > 0 + assert aux_data["time"].min() <= t1 <= aux_data["time_end"].max() - aux_data = _get_ephemeris_data(Time("2022-08-28T16:02:00"), end_time=Time("2022-08-28T16:04:00")) - assert len(aux_data) == 1341 + t2 = Time("2022-08-28T16:04:00") + aux_data = _get_ephemeris_data(t1, end_time=t2) + assert len(aux_data) > 0 + assert t1 <= aux_data["time"].min() < t2 <= aux_data["time_end"].max() - aux_data = _get_ephemeris_data(Time("2022-08-28T23:58:00"), end_time=Time("2022-08-29T00:02:00")) - assert len(aux_data) == 2691 + t1 = Time("2022-08-28T23:58:00") + t2 = Time("2022-08-29T00:02:00") + aux_data = _get_ephemeris_data(t1, end_time=t2) + assert len(aux_data) > 0 + assert aux_data["time"].min() <= t1 < t2 <= aux_data["time_end"].max() @pytest.mark.remote_data @@ -135,9 +148,9 @@ def test_get_hpc_info_shapes(): roll3, solo_heeq3, stix_pointing3 = get_hpc_info(t[5]) assert_quantity_allclose(roll1[5], roll2) - assert_quantity_allclose(solo_heeq1[5, :], solo_heeq2[0, :]) - assert_quantity_allclose(stix_pointing1[5, :], stix_pointing2[0, :]) + assert_quantity_allclose(solo_heeq1[5, :], solo_heeq2) + assert_quantity_allclose(stix_pointing1[5, :], stix_pointing2) assert_quantity_allclose(roll3, roll2[0]) - assert_quantity_allclose(solo_heeq3, solo_heeq2[0, :]) - assert_quantity_allclose(stix_pointing3, stix_pointing2[0, :]) + assert_quantity_allclose(solo_heeq3, solo_heeq2) + assert_quantity_allclose(stix_pointing3, stix_pointing2) diff --git a/stixpy/coordinates/transforms.py b/stixpy/coordinates/transforms.py index ef58f53..9536821 100644 --- a/stixpy/coordinates/transforms.py +++ b/stixpy/coordinates/transforms.py @@ -1,20 +1,21 @@ import warnings -from functools import lru_cache import astropy.coordinates as coord import astropy.units as u import numpy as np from astropy.coordinates import frame_transform_graph from astropy.coordinates.matrix_utilities import matrix_transpose, rotation_matrix -from astropy.io import fits -from astropy.table import QTable, vstack +from astropy.table import vstack from astropy.time import Time from sunpy.coordinates import HeliographicStonyhurst, Helioprojective from sunpy.net import Fido from sunpy.net import attrs as a from stixpy.coordinates.frames import STIXImaging +from stixpy.product.product_factory import Product +from stixpy.product.sources.anc import Ephemeris from stixpy.utils.logging import get_logger +from stixpy.utils.table_lru import TableLRUCache STIX_X_SHIFT = 26.1 * u.arcsec # fall back to this when non sas solution available STIX_Y_SHIFT = 58.2 * u.arcsec # fall back to this when non sas solution available @@ -23,7 +24,23 @@ logger = get_logger(__name__) -__all__ = ["get_hpc_info", "stixim_to_hpc", "hpc_to_stixim"] +__all__ = ["get_hpc_info", "stixim_to_hpc", "hpc_to_stixim", "STIX_EPHEMERIS_CACHE", "load_ephemeris_fits_to_cache"] + +# Create a global cache for STIX ephemeris data +STIX_EPHEMERIS_CACHE = TableLRUCache("STIX_EPHEMERIS_CACHE", maxsize=300000, default_bin_duration=64 * u.s) + + +def load_ephemeris_fits_to_cache(anc_file): + """ + Load ephemeris data from a fits file into the ephemeris cache. + """ + logger.info(f"Loading STIX ephemeris data from {anc_file}. into cache") + try: + anc = Product(anc_file, data_only=True) + if isinstance(anc, Ephemeris): + STIX_EPHEMERIS_CACHE.put(anc.data) + except (OSError, ValueError) as e: + logger.error(f"Error loading STIX ephemeris data from {anc_file}: {e}") def _get_rotation_matrix_and_position(obstime, obstime_end=None): @@ -57,7 +74,7 @@ def _get_rotation_matrix_and_position(obstime, obstime_end=None): def get_hpc_info(times, end_time=None): r""" - Get STIX pointing and SO location from L2 aspect files. + Get STIX pointing and SO location from ANC aspect files. Parameters ---------- @@ -68,11 +85,20 @@ def get_hpc_info(times, end_time=None): ------- """ - aux = _get_ephemeris_data(times.min(), end_time or times.max()) + start_time = times.min() + end_time = end_time or times.max() + + aux = _get_ephemeris_data(start_time, end_time) + + # indices = np.argwhere( + # ((aux["time"] >= start_time) | (aux["time_end"] >= start_time)) + # & ((aux["time_end"] <= end_time) | (aux["time"] <= end_time)) + # ) indices = np.argwhere((aux["time"] >= times.min()) & (aux["time"] <= times.max())) if end_time is not None: indices = np.argwhere((aux["time"] >= times.min()) & (aux["time"] <= end_time)) + indices = indices.flatten() if end_time is not None and times.size == 1 and indices.size >= 2: @@ -120,8 +146,8 @@ def get_hpc_info(times, end_time=None): << aux["solo_loc_heeq_zxy"].unit ) - sas_x = np.interp(x, xp, aux["y_srf"]) - sas_y = np.interp(x, xp, aux["z_srf"]) + sas_x = np.interp(x, xp, aux["y_srf"].value) << aux["y_srf"].unit + sas_y = np.interp(x, xp, aux["z_srf"].value) << aux["z_srf"].unit if x.size == 1: good_sas = [True] if np.interp(x, xp, aux["sas_ok"]).astype(bool) else [] else: @@ -154,10 +180,9 @@ def get_hpc_info(times, end_time=None): return roll, solo_heeq, stix_pointing -@lru_cache def _get_ephemeris_data(start_time, end_time=None): r""" - Search, download and read L2 pointing data. + Search, in cache or download and read ANC pointing data. Parameters ---------- @@ -170,39 +195,44 @@ def _get_ephemeris_data(start_time, end_time=None): """ if end_time is None: end_time = start_time - # Find, download, read aux file with pointing, sas and position information - logger.debug(f"Searching for AUX data: {start_time} - {end_time}") - query = Fido.search( - a.Time(start_time, end_time), - a.Instrument.stix, - a.Level.anc, - a.stix.DataType.asp, - a.stix.DataProduct.asp_ephemeris, - ) - if len(query["stix"]) == 0: - raise ValueError(f"No STIX pointing data found for time range {start_time} to {end_time}.") - - logger.debug(f"Downloading {len(query['stix'])} AUX files") - aux_files = Fido.fetch(query["stix"]) - if len(aux_files.errors) > 0: - raise ValueError("There were errors downloading the data.") - # Read and extract data - logger.debug("Loading and extracting AUX data") - - aux_data = [] - for aux_file in aux_files: - hdu = fits.getheader(aux_file, ext=0) - aux = QTable.read(aux_file, hdu=2) - date_beg = Time(hdu.get("DATE-BEG")) - aux["time"] = ( - date_beg + aux["time"] - 32 * u.s - ) # Shift AUX data by half a time bin (starting time vs. bin centre) - aux_data.append(aux) - - aux = vstack(aux_data) - aux.sort(keys=["time"]) - - return aux + + logger.warning(f"Getting STIX ephemeris data for {start_time} to {end_time}") + from_cache = STIX_EPHEMERIS_CACHE.get(start_time, end_time) + if from_cache is None: + # Find, download, read aux file with pointing, sas and position information + logger.warning(f"FIDO searching for AUX data: {start_time} - {end_time}") + query = Fido.search( + a.Time(start_time, end_time), + a.Instrument.stix, + a.Level.anc, + a.stix.DataType.asp, + a.stix.DataProduct.asp_ephemeris, + ) + if len(query["stix"]) == 0: + raise ValueError(f"No STIX pointing data found for time range {start_time} to {end_time}.") + else: + query["stix"].filter_for_latest_version() + logger.debug(f"Downloading {len(query['stix'])} AUX files") + aux_files = Fido.fetch(query["stix"]) + if len(aux_files.errors) > 0: + raise ValueError("There were errors downloading the data.") + # Read and extract data + logger.debug("Loading and extracting ANC data") + + anc_data = [] + for aux_file in aux_files: + anc = Product(aux_file, data_only=True) + anc_data.append(anc.data) + + ephemeris = vstack(anc_data) + ephemeris.sort(keys=["time"]) + + STIX_EPHEMERIS_CACHE.put(ephemeris) + + return ephemeris + else: + logger.debug(f"Using cached ANC data: {start_time} - {end_time}") + return from_cache @frame_transform_graph.transform(coord.FunctionTransform, STIXImaging, Helioprojective) diff --git a/stixpy/product/product.py b/stixpy/product/product.py index ce9deeb..f22fe5f 100644 --- a/stixpy/product/product.py +++ b/stixpy/product/product.py @@ -23,7 +23,7 @@ def __init_subclass__(cls, **kwargs): class GenericProduct(BaseProduct): - def __init__(self, *, meta, control, data, idb_versions=None, energies=None): + def __init__(self, *, meta, control, data, idb_versions=None, energies=None, **kwargs): """ Generic product composed of meta, control, data and optionally idb, and energy information diff --git a/stixpy/product/product_factory.py b/stixpy/product/product_factory.py index 413f616..ba8a789 100644 --- a/stixpy/product/product_factory.py +++ b/stixpy/product/product_factory.py @@ -116,7 +116,16 @@ def _read_file(self, fname, **kwargs): raise FileError(f"File '{fname}' is not a STIX fits file.") data = {"meta": hdul[0].header} - for name in ["CONTROL", "DATA", "IDB_VERSIONS", "ENERGIES"]: + + # determine extensions to load + # allow for just data extension if requested + if kwargs.get("data_only", False): + extensions = ["CONTROL", "DATA"] + # normally read all extensions + else: + extensions = ["CONTROL", "DATA", "IDB_VERSIONS", "ENERGIES"] + + for name in extensions: try: data[name.lower()] = read_qtable(fname, hdu=name) except KeyError as e: diff --git a/stixpy/product/sources/__init__.py b/stixpy/product/sources/__init__.py index 6814efe..f8ed7e4 100644 --- a/stixpy/product/sources/__init__.py +++ b/stixpy/product/sources/__init__.py @@ -1,3 +1,4 @@ +from stixpy.product.sources.anc import * from stixpy.product.sources.housekeeping import * from stixpy.product.sources.quicklook import * from stixpy.product.sources.science import * diff --git a/stixpy/product/sources/anc.py b/stixpy/product/sources/anc.py new file mode 100644 index 0000000..8d2abc3 --- /dev/null +++ b/stixpy/product/sources/anc.py @@ -0,0 +1,55 @@ +import astropy.units as u +from astropy.time import Time +from astropy.units import Quantity +from sunpy.time import TimeRange + +from stixpy.product.product import L1Product + +__all__ = ["ANCProduct", "Ephemeris"] + + +class ANCProduct(L1Product): + """ + Basic ANC + """ + + @property + def time(self) -> Time: + return self.data["time"] + + @property + def exposure_time(self) -> Quantity[u.s]: + return self.data["timedel"].to(u.s) + + @property + def time_range(self) -> TimeRange: + """ + A `sunpy.time.TimeRange` for the data. + """ + return TimeRange(self.time[0] - self.exposure_time[0] / 2, self.time[-1] + self.exposure_time[-1] / 2) + + +class Ephemeris(ANCProduct): + """ + Ephemeris data in daily files normal with 64s time resolution + """ + + def __init__(self, **kwargs): + super().__init__(**kwargs) + # TODO remove this when we have a better solution + # Shift ANC data by half a time bin (starting time vs. bin centre) + self.data["time_end"] = self.data["time"] + 32 * u.s + self.data["time"] = self.data["time"] - 32 * u.s + self.data["timedel"] = 64 * u.s + self.data["time_utc"] = [Time(t, format="isot", scale="utc") for t in self.data["time_utc"]] + + @classmethod + def is_datasource_for(cls, *, meta, **kwargs): + """Determines if meta data meach Raw Pixel Data""" + service_subservice_ssid = tuple(meta[name] for name in ["STYPE", "SSTYPE", "SSID"]) + level = meta["level"] + if service_subservice_ssid == (0, 0, 1) and level == "ANC": + return True + + def __repr__(self): + return f"{self.__class__.__name__}\n {self.time_range}" diff --git a/stixpy/utils/table_lru.py b/stixpy/utils/table_lru.py new file mode 100644 index 0000000..c910c10 --- /dev/null +++ b/stixpy/utils/table_lru.py @@ -0,0 +1,140 @@ +from datetime import datetime + +import astropy.units as u +import numpy as np +from astropy.table import QTable, unique, vstack + +from stixpy.utils.logging import get_logger + +logger = get_logger(__name__) + +__all__ = ["TableLRUCache"] + + +class TableLRUCache: + """ + A LRU cache based on astropy.table.Table. + Search and uniqueness is working on the 'time' column. + """ + + def __init__(self, name: str, *, maxsize=300000, default_bin_duration=64 * u.s): + """Creates and initialize a LRU cache table. + + Parameters + ---------- + name : str + name of the cache + this is used for logging only + maxsize : int, optional + max rows in the cache table, by default 300000 ~= 6 month of data for 64s time resolution + default_bin_duration : int, optional + default time duration of a data entry, by default 64*u.s + this is used to determine if the search results might contain data gaps + """ + self.name = name + self.maxsize = maxsize + self.cache = QTable() + self.default_bin_duration = default_bin_duration + + def clear(self): + """ + Clear the cache. + """ + self.cache = QTable() + logger.info(f"{self.name}: Cleared LRU cache.") + + def put(self, anc_data): + """ + Add data to the cache. + """ + # create a primary index by time if not set so far + if len(self.cache.indices) > 0: + self.cache.add_index("time") + + anc_data["__lcu"] = datetime.now().timestamp() + logger.info(f"{self.name}: Adding {len(anc_data)} rows to STIX ephemeris cache.") + self.cache = vstack([self.cache, anc_data]) + self.cache = unique(self.cache, keys=["time"], keep="last") + self._prune() + + def __len__(self): + """ + Return the number of rows in the cache. + """ + return len(self.cache) + + def _prune(self): + """ + Remove the not used data from the cache. + new size will be 80% of the maxsize to avoid frequent pruning + """ + if len(self.cache) > self.maxsize: + logger.info(f"{self.name}:Pruning LRU cache.") + + sorted_by_lcu_time_idx = self.cache.argsort(keys=["__lcu", "time"]) + # remove the oldest data + self.cache = self.cache[sorted_by_lcu_time_idx[-int(self.maxsize * 0.8) :]] + + def get(self, start_time, end_time=None): + r""" + Get data from the cache. + + Parameters + ---------- + start_time : `astropy.time.Time` + Time or start of a time interval. + + Returns + ------- + data : `astropy.table.Table` + a deep copy of the cached data for the given time range. + + none if no data is found in the cache. + none if data in the cache might have time gaps (based on the default bin duration) + """ + + # return None + + if len(self.cache) == 0: + return None + + if end_time is None: + end_time = start_time + + # TODO: should we use the time_end column for a better search + # indices = np.argwhere( + # ((self.cache["time"] >= start_time) | (self.cache["time_end"] >= start_time)) + # & ((self.cache["time_end"] <= end_time) | (self.cache["time"] <= end_time)) + # ) + + if end_time is not None: + indices = np.argwhere((self.cache["time"] >= start_time) & (self.cache["time"] <= end_time)) + else: + indices = np.argwhere((self.cache["time"] >= start_time) & (self.cache["time"] <= start_time)) + + indices = indices.flatten() + + if len(indices) == 0: + return None + + self.cache["__lcu"][indices] = datetime.now().timestamp() + + if len(indices) == 1 and end_time == start_time: + # single time row copy + logger.info(f"{self.name}: Using cached data for time point {start_time} to {end_time}") + ret_val = self.cache[indices].copy(copy_data=True) + ret_val["__lcu"] = 1 # mark as a cache hit + return ret_val + + # multiple times simple heuristic if we have a all data within time range + duration = abs((end_time - start_time).to_value(u.s)) + frames = np.ceil(duration / self.default_bin_duration.to_value(u.s) * 0.98) # allow for minor error + if len(indices) < frames: + logger.info(f"{self.name}: not enough data in cache for time range {start_time} to {end_time}") + return None + + # return a copy of the cached data + logger.info(f"{self.name}: Using cached data for time range {start_time} to {end_time}") + ret_val = self.cache[indices].copy(copy_data=True) + ret_val["__lcu"] = 1 # mark as a cache hit + return ret_val diff --git a/stixpy/utils/tests/test_table_lru_cache.py b/stixpy/utils/tests/test_table_lru_cache.py new file mode 100644 index 0000000..d1bcc89 --- /dev/null +++ b/stixpy/utils/tests/test_table_lru_cache.py @@ -0,0 +1,226 @@ +from datetime import datetime, timedelta + +import astropy.units as u +import numpy as np +import pytest +from astropy.table import QTable +from astropy.time import Time +from sunpy.net import Fido +from sunpy.net import attrs as a + +from stixpy.coordinates.transforms import STIX_EPHEMERIS_CACHE, get_hpc_info, load_ephemeris_fits_to_cache +from stixpy.utils.table_lru import TableLRUCache + + +@pytest.fixture +def cache(): + """Fixture to create a fresh TableLRUCache instance.""" + return TableLRUCache("testcache", maxsize=10, default_bin_duration=1 * u.s) + + +@pytest.fixture +def mock_data_table(): + """Fixture to create mock ephemeris data.""" + now = Time(datetime.now()) + times = now + np.arange(10) * timedelta(seconds=1) + data = QTable({"time": times, "value": np.arange(10)}) + return data + + +def test_initialization_default(): + """Test default initialization of TableLRUCache.""" + cache = TableLRUCache("testcache") + assert cache.maxsize == 300000 + assert cache.default_bin_duration == 64 * u.s + assert len(cache.cache) == 0 + + +def test_initialization_custom_maxsize(): + """Test initialization with a custom maxsize.""" + cache = TableLRUCache("testcache", maxsize=100) + assert cache.maxsize == 100 + + +def test_clear_cache(cache, mock_data_table): + """Test clearing the cache.""" + cache.put(mock_data_table) + assert len(cache.cache) == len(mock_data_table) + cache.clear() + assert len(cache.cache) == 0 + + +def test_put_data(cache, mock_data_table): + """Test adding data to the cache.""" + cache.put(mock_data_table) + assert len(cache.cache) == len(mock_data_table) + assert "__lcu" in cache.cache.colnames + + +def test_get_data(cache, mock_data_table): + """Test retrieving data from the cache.""" + cache.put(mock_data_table) + start_time = mock_data_table["time"][2] + end_time = mock_data_table["time"][5] + result = cache.get(start_time, end_time) + assert len(result) == 4 + assert all(result["time"] >= start_time) + assert all(result["time"] <= end_time) + assert all(result["__lcu"] == 1) # code for HIT + + +def test_get_data_no_match(cache, mock_data_table): + """Test retrieving data when no matching time range exists.""" + cache.put(mock_data_table) + start_time = Time(datetime.now() + timedelta(days=1)) + result = cache.get(start_time) + assert result is None + + +def test_get_data_single_time(cache, mock_data_table): + """Test retrieving data for a single time point.""" + cache.put(mock_data_table) + start_time = mock_data_table["time"][3] + result = cache.get(start_time) + assert len(result) == 1 + assert result["time"][0] == start_time + assert result["__lcu"][0] == 1 # code for HIT + + +def test_prune_logic(cache, mock_data_table): + """Test internal pruning logic. + + pruning should be cald if the cache is full and new data is added + """ + cache.maxsize = len(mock_data_table) - 3 + cache.put(mock_data_table) + assert len(cache.cache) <= cache.maxsize + + +def test_put_new_data_overrides_same_old_data(cache): + now = Time(datetime.now()) + times = now + np.arange(5) * timedelta(seconds=1) + data_first = QTable({"time": times, "value": np.full((5,), 1)}) + data_last = QTable({"time": times, "value": np.full((5,), 2)}) + + cache.put(data_first) + assert len(cache.cache) == len(data_first) + + found = cache.get(times[0]) + assert len(found) == 1 + assert found["value"][0] == 1 + + # Add new data with the same time but different value + # This should override the old data + cache.put(data_last) + + found = cache.get(times[0]) + assert len(found) == 1 + assert found["value"][0] == 2 + + +def test_get_range_in_block(cache): + """Test retrieving data for a range with time gaps. + This test checks if the cache can handle time gaps correctly. + should return all data in the range or None if any gaps in range + """ + now = Time(datetime.now()) + times = now + np.arange(10) * u.s + times[5:] += 2 * u.s # introduce a gap in the data + data = QTable({"time": times, "value": np.full((10,), 1)}) + cache.put(data) + start_time = data["time"][1] + end_time = data["time"][3] + result = cache.get(start_time, end_time) + assert len(result) == 3 + assert all(result["time"] >= start_time) + assert all(result["time"] <= end_time) + + # Test with a range that includes a gap + end_time = data["time"][6] + result = cache.get(start_time, end_time) + assert result is None + + +def test_lru_data_stays(cache): + """Test that the LRU data stays in the cache.""" + now = Time(datetime.now()) + times = now + np.arange(10) * timedelta(seconds=1) + data = QTable({"time": times, "value": np.arange(10)}) + cache.put(data) + assert len(cache.cache) == len(data) + + # Simulate accessing some data to update the LRU + assert cache.get(times[2])["value"] == 2 + assert cache.get(times[5])["value"] == 5 + + # reduce the cache size to trigger pruning + cache.maxsize = 3 + cache._prune() + + # Check that the accessed data is still in the cache + assert len(cache.cache) > 0 + assert len(cache.cache) <= 3 + assert cache.get(times[2]) is not None + assert cache.get(times[5]) is not None + + +def test_gloabl_ephemeris_cache(): + """Test that the global ephemeris cache is initialized.""" + assert STIX_EPHEMERIS_CACHE is not None + assert isinstance(STIX_EPHEMERIS_CACHE, TableLRUCache) + + +@pytest.mark.remote_data +def test_get_hpc_info_fills_cache(): + STIX_EPHEMERIS_CACHE.clear() + assert len(STIX_EPHEMERIS_CACHE.cache) == 0 + res = get_hpc_info(Time("2023-01-01T00:00:00"), Time("2023-01-01T00:00:10")) + assert res is not None + assert len(STIX_EPHEMERIS_CACHE.cache) > 0 + + +@pytest.mark.remote_data +def test_get_hpc_info_cache_hit(): + """Test that the cache is used when available.""" + STIX_EPHEMERIS_CACHE.clear() + assert len(STIX_EPHEMERIS_CACHE.cache) == 0 + res1 = get_hpc_info(Time("2023-01-01T12:00:00"), Time("2023-01-01T12:00:10")) + assert res1 is not None + assert len(STIX_EPHEMERIS_CACHE.cache) > 0 + + # Call again with the same time range to check if cache is used + res2 = STIX_EPHEMERIS_CACHE.get(Time("2023-01-01T12:00:00"), Time("2023-01-01T12:30:00")) + assert res2 is not None + assert res2["__lcu"][0] == 1 # code for HIT + + +@pytest.mark.remote_data +def test_load_anc_file(): + """Test loading an ANC file.""" + start_time = Time("2023-01-01T12:00:00") + end_time = Time("2023-01-01T12:30:00") + query = Fido.search( + a.Time(start_time, end_time), + a.Instrument.stix, + a.Level.anc, + a.stix.DataType.asp, + a.stix.DataProduct.asp_ephemeris, + ) + if len(query["stix"]) == 0: + raise ValueError(f"No STIX pointing data found for time range {start_time} to {end_time}.") + else: + query["stix"].filter_for_latest_version() + aux_files = Fido.fetch(query["stix"]) + + assert len(aux_files) > 0 + + STIX_EPHEMERIS_CACHE.clear() + for file in aux_files: + load_ephemeris_fits_to_cache(file) + + assert len(STIX_EPHEMERIS_CACHE.cache) > 0 + + # Call again with the same time range to check if cache is used + res2 = STIX_EPHEMERIS_CACHE.get(Time("2023-01-01T12:00:00"), Time("2023-01-01T12:30:00")) + assert res2 is not None + assert res2["__lcu"][0] == 1 # code for HIT