Skip to content

Commit

Permalink
rework levels, change to lazy formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
snbianco committed Oct 16, 2024
1 parent 46a3ae2 commit 5efd55b
Show file tree
Hide file tree
Showing 9 changed files with 70 additions and 69 deletions.
2 changes: 1 addition & 1 deletion astrocut/asdf_cutouts.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def _get_cloud_http(s3_uri: Union[str, S3Path], key: str = None, secret: str = N
resp = requests.head(url, timeout=10)
is_anon = False if resp.status_code == 403 else True
if not is_anon:
log.info(f'Attempting to access private S3 bucket: {s3_path.bucket}')
log.debug('Attempting to access private S3 bucket: %s', s3_path.bucket)

# create file system and get URL of file
fs = s3fs.S3FileSystem(anon=is_anon, key=key, secret=secret, token=token)
Expand Down
23 changes: 11 additions & 12 deletions astrocut/cube_cut.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def _parse_table_info(self, table_data, verbose=False):
if data_ind == len(table_data):
raise wcs.NoWcsKeywordsFoundError("No FFI rows contain valid WCS keywords.")

log.info("Using WCS from row {} out of {}".format(data_ind, len(table_data)))
log.debug("Using WCS from row %s out of %s", data_ind, len(table_data))

# Turning the table row into a new header object
wcs_header = fits.header.Header()
Expand Down Expand Up @@ -462,9 +462,9 @@ def _get_cutout(self, transposed_cube, threads: Union[int, Literal["auto"]] = 1,
uncert_cutout = np.pad(uncert_cutout, padding, 'constant', constant_values=np.nan)
aperture = np.pad(aperture, padding[1:], 'constant', constant_values=0)

log.info("Image cutout cube shape: {}".format(img_cutout.shape))
log.debug("Image cutout cube shape: %s", img_cutout.shape)
if self.product == "SPOC":
log.info("Uncertainty cutout cube shape: {}".format(uncert_cutout.shape))
log.debug("Uncertainty cutout cube shape: %s", uncert_cutout.shape)

return img_cutout, uncert_cutout, aperture

Expand Down Expand Up @@ -874,8 +874,7 @@ def cube_cut(
else:
self.center_coord = SkyCoord(coordinates, unit='deg')

log.info("Cutout center coordinate: {},{}".format(self.center_coord.ra.deg,
self.center_coord.dec.deg))
log.debug("Cutout center coordinate: %s, %s", self.center_coord.ra.deg, self.center_coord.dec.deg)

# Making size into an array [ny, nx]
if np.isscalar(cutout_size):
Expand All @@ -893,8 +892,8 @@ def cube_cut(

# Get cutout limits
self._get_cutout_limits(cutout_size)
log.info("xmin,xmax: {}".format(self.cutout_lims[1]))
log.info("ymin,ymax: {}".format(self.cutout_lims[0]))
log.debug("xmin,xmax: %s", self.cutout_lims[1])
log.debug("ymin,ymax: %s", self.cutout_lims[0])

# Make the cutout
img_cutout, uncert_cutout, aperture = self._get_cutout(getattr(cube[1], cube_data_prop), threads=threads,
Expand All @@ -903,8 +902,8 @@ def cube_cut(
# Get cutout wcs info
cutout_wcs_full = self._get_full_cutout_wcs(cube[2].header)
max_dist, sigma = self._fit_cutout_wcs(cutout_wcs_full, img_cutout.shape[1:])
log.info("Maximum distance between approximate and true location: {}".format(max_dist))
log.info("Error in approximate WCS (sigma): {}".format(sigma))
log.debug("Maximum distance between approximate and true location: %s", max_dist)
log.debug("Error in approximate WCS (sigma): %f", sigma)

cutout_wcs_dict = self._get_cutout_wcs_dict()

Expand All @@ -926,7 +925,7 @@ def cube_cut(
target_pixel_file = os.path.join(output_path, target_pixel_file)


log.info("Target pixel file: {}".format(target_pixel_file))
log.debug("Target pixel file: %s", target_pixel_file)

# Make sure the output directory exists
if not os.path.exists(output_path):
Expand All @@ -935,7 +934,7 @@ def cube_cut(
# Write the TPF
tpf_object.writeto(target_pixel_file, overwrite=True, checksum=True)

log.info("Write time: {:.2} sec".format(time()-write_time))
log.info("Total time: {:.2} sec".format(time()-start_time))
log.debug("Write time: %.2f sec", (time() - write_time))
log.debug("Total time: %.2f sec", (time() - start_time))

return target_pixel_file
2 changes: 1 addition & 1 deletion astrocut/cutout_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def _moving_target_focus(path, size, cutout_fles, verbose=False):
tck_tuple, u = splprep([path["position"].ra, path["position"].dec], u=path["time"].jd, s=0)

for fle in cutout_fles:
log.info(f'Processing file: {fle}')
log.debug('Processing file: %s', fle)

# Get the stuff we need from the cutout file
hdu = fits.open(fle)
Expand Down
37 changes: 19 additions & 18 deletions astrocut/cutouts.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,13 +80,13 @@ def _hducut(img_hdu, center_coord, cutout_size, correct_wcs=False, verbose=False

img_data = img_hdu.data

log.info("Original image shape: {}".format(img_data.shape))
log.debug("Original image shape: %s", img_data.shape)

# Get cutout limits
cutout_lims = get_cutout_limits(img_wcs, center_coord, cutout_size)

log.info("xmin,xmax: {}".format(cutout_lims[0]))
log.info("ymin,ymax: {}".format(cutout_lims[1]))
log.debug("xmin,xmax: %s", cutout_lims[0])
log.debug("ymin,ymax: %s", cutout_lims[1])

# These limits are not guarenteed to be within the image footprint
xmin, xmax = cutout_lims[0]
Expand Down Expand Up @@ -119,7 +119,7 @@ def _hducut(img_hdu, center_coord, cutout_size, correct_wcs=False, verbose=False
if padding.any(): # only do if we need to pad
img_cutout = np.pad(img_cutout, padding, 'constant', constant_values=np.nan)

log.info("Image cutout shape: {}".format(img_cutout.shape))
log.debug("Image cutout shape: %s", img_cutout.shape)

# Getting the cutout wcs
cutout_wcs = get_cutout_wcs(img_wcs, cutout_lims)
Expand Down Expand Up @@ -259,18 +259,19 @@ def fits_cut(input_files, coordinates, cutout_size, correct_wcs=False, extension
# Turning the cutout size into a 2 member array
cutout_size = parse_size_input(cutout_size)

log.info(f"Number of input files: {len(input_files)}")
log.info(f"Cutting out {extension} extension(s)")
log.info(f"Center coordinate: {coordinates.to_string()} deg")
log.info(f"Cutout size: {cutout_size}")
log.debug("Number of input files: %d", len(input_files))
if extension:
log.debug("Cutting out %s extension(s)", extension)
log.debug("Center coordinate: %s deg", coordinates)
log.debug("Cutout size: %s", cutout_size)

# Making the cutouts
cutout_hdu_dict = {}
num_empty = 0
num_cutouts = 0
fsspec_kwargs = None
for in_fle in input_files:
log.info("\nCutting out {}".format(in_fle))
log.debug("\nCutting out %s", in_fle)

if "s3://" in in_fle:
fsspec_kwargs = {"anon": True}
Expand Down Expand Up @@ -318,7 +319,7 @@ def fits_cut(input_files, coordinates, cutout_size, correct_wcs=False, extension
cutout_path = None
if single_outfile:

log.info("Returning cutout as single FITS")
log.debug("Returning cutout as single FITS")

if not memory_only:
cutout_path = "{}_{:7f}_{:7f}_{}-x-{}_astrocut.fits".format(cutout_prefix,
Expand All @@ -327,7 +328,7 @@ def fits_cut(input_files, coordinates, cutout_size, correct_wcs=False, extension
str(cutout_size[0]).replace(' ', ''),
str(cutout_size[1]).replace(' ', ''))
cutout_path = os.path.join(output_dir, cutout_path)
log.info("Cutout fits file: {}".format(cutout_path))
log.debug("Cutout fits file: %s", cutout_path)

cutout_hdus = [x for fle in cutout_hdu_dict for x in cutout_hdu_dict[fle]]
cutout_fits = get_fits(cutout_hdus, coordinates, cutout_path)
Expand All @@ -339,13 +340,13 @@ def fits_cut(input_files, coordinates, cutout_size, correct_wcs=False, extension

else: # one output file per input file

log.info("Returning cutouts as individual FITS")
log.debug("Returning cutouts as individual FITS")

if memory_only:
all_hdus = []
else:
all_paths = []
log.info("Cutout fits files:")
log.debug("Cutout fits files:")

for fle in input_files:
cutout_list = cutout_hdu_dict[fle]
Expand All @@ -366,9 +367,9 @@ def fits_cut(input_files, coordinates, cutout_size, correct_wcs=False, extension
all_hdus.append(cutout_fits)
else:
all_paths.append(cutout_path)
log.info(cutout_path)
log.debug(cutout_path)

log.info("Total time: {:.2} sec".format(time()-start_time))
log.debug("Total time: %.2f sec", time() - start_time)

if memory_only:
return all_hdus
Expand Down Expand Up @@ -526,7 +527,7 @@ def img_cut(input_files, coordinates, cutout_size, stretch='asinh', minmax_perce
# Making the cutouts
cutout_hdu_dict = {}
for in_fle in input_files:
log.info("\n{}".format(in_fle))
log.debug("\n%s", in_fle)


warnings.filterwarnings("ignore", category=wcs.FITSFixedWarning)
Expand Down Expand Up @@ -611,7 +612,7 @@ def img_cut(input_files, coordinates, cutout_size, stretch='asinh', minmax_perce

Image.fromarray(cutout).save(file_path)

log.info("Cutout fits file(s): {}".format(cutout_path))
log.info("Total time: {:.2} sec".format(time()-start_time))
log.debug("Cutout fits file(s): %s", cutout_path)
log.debug("Total time: %.2f sec", time() - start_time)

return cutout_path
13 changes: 7 additions & 6 deletions astrocut/footprint_cutouts.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,18 +270,18 @@ def cube_cut_from_footprint(coordinates: Union[str, SkyCoord], cutout_size,
# Convert to SkyCoord
if not isinstance(coordinates, SkyCoord):
coordinates = SkyCoord(coordinates, unit='deg')
log.info(f'Coordinates: {coordinates}')
log.debug('Coordinates: %s', coordinates)

# Parse cutout size
cutout_size = parse_size_input(cutout_size)
log.info(f'Cutout size: {cutout_size}')
log.debug('Cutout size: %s', cutout_size)

# Get FFI footprints from the cloud
# s3_uri = 's3://tesscut-ops-footprints/tess_ffi_footprint_cache.json' if product == 'SPOC' \
# else 's3://tesscut-ops-footprints/tica_ffi_footprint_cache.json'
# all_ffis = _get_s3_ffis(s3_uri=s3_uri, as_table=True, load_polys=True)
all_ffis = get_caom_ffis(product)
log.info(f'Found {len(all_ffis)} footprint files.')
log.debug('Found %d footprint files.', len(all_ffis))

# Filter FFIs by provided sectors
if sequence:
Expand All @@ -295,15 +295,15 @@ def cube_cut_from_footprint(coordinates: Union[str, SkyCoord], cutout_size,
', '.join(str(s) for s in sequence))


log.info(f'Filtered to {len(all_ffis)} footprints for sequences: {", ".join(str(s) for s in sequence)}')
log.debug('Filtered to %d footprints for sequences: %s', len(all_ffis), ", ".join(str(s) for s in sequence))

# Get sector names and cube files that contain the cutout
cone_results = ra_dec_crossmatch(all_ffis, coordinates, cutout_size, TESS_ARCSEC_PER_PX)
if not cone_results:
raise InvalidQueryError('The given coordinates were not found within the specified sequence(s).')
seq_list = _create_sequence_list(cone_results, product)
cube_files_mapping = _get_cube_files_from_sequence_obs(seq_list)
log.info(f'Found {len(cube_files_mapping)} matching cube files.')
log.debug('Found %d matching cube files.', len(cube_files_mapping))
base_file_path = "s3://stpubdata/tess/public/mast/" if product == 'SPOC' \
else "s3://stpubdata/tess/public/mast/tica/"

Expand All @@ -313,6 +313,7 @@ def cube_cut_from_footprint(coordinates: Union[str, SkyCoord], cutout_size,

# Executor function to generate cutouts from a cube file
def process_file(file):
log.debug('Creating cutout from %s', file['cube'])
try:
factory = CutoutFactory()
file_path = os.path.join(base_file_path, file['cube'])
Expand All @@ -332,7 +333,7 @@ def process_file(file):
return None

# Generate cutout from each cube file
log.info('Generating cutouts...')
log.debug('Generating cutouts...')
cutout_files = [process_file(file) for file in cube_files_mapping]

return cutout_files
36 changes: 18 additions & 18 deletions astrocut/make_cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def _write_block(self, cube_hdu, start_row=0, end_row=None, fill_info_table=Fals
nulval = ""
self.info_table[kwd][i] = ffi_data[1].header.get(kwd, nulval)

log.info(f"Completed file {i} in {time()-st:.3} sec.")
log.debug("Completed file %d in %.3f sec.", i, time() - st)

# Fill block and flush to disk
cube_hdu[1].data[start_row:end_row, :, :, :] = sub_cube
Expand Down Expand Up @@ -319,8 +319,8 @@ def make_cube(self, file_list, cube_file="img-cube.fits", sector=None, max_memor

self._configure_cube(file_list, sector=sector)

log.info("Using {} to initialize the image header table.".format(os.path.basename(self.template_file)))
log.info(f"Cube will be made in {self.num_blocks} blocks of {self.block_size} rows each.")
log.debug("Using %s to initialize the image header table.", os.path.basename(self.template_file))
log.debug("Cube will be made in %d blocks of %d rows each.", self.num_blocks, self.block_size)

# Set up the table to old the individual image heades
self._build_info_table()
Expand All @@ -345,11 +345,11 @@ def make_cube(self, file_list, cube_file="img-cube.fits", sector=None, max_memor
fill_info_table = True if (i == 0) else False
self._write_block(cube_hdu, start_row, end_row, fill_info_table, verbose)

log.info(f"Completed block {i+1} of {self.num_blocks}")
log.debug("Completed block %d of %d", i + 1, self.num_blocks)

# Add the info table to the cube file
self._write_info_table()
log.info(f"Total time elapsed: {(time() - startTime)/60:.2f} min")
log.debug("Total time elapsed: %.2f min", (time() - startTime) / 60)

return self.cube_file

Expand Down Expand Up @@ -625,7 +625,7 @@ def _write_block(self, cube_hdu, start_row=0, end_row=None, fill_info_table=Fals
else:
raise

log.info(f"Completed file {i} in {time()-st:.3} sec.")
log.debug("Completed file %d in %.3f sec.", i, time() - st)

# Fill block and flush to disk
if not self.update:
Expand Down Expand Up @@ -739,7 +739,7 @@ def _update_cube(self, file_list, cube_file, sector=None, max_memory=50, verbose
assert os.path.exists(cube_file), err_msg
self.cube_file = cube_file

log.info(f'Updating cube file: {cube_file}')
log.debug('Updating cube file: %s', cube_file)

# Ensure that none of the files in file_list are in the cube already, to avoid duplicates
in_cube = list(fits.getdata(self.cube_file, 2)['FFI_FILE'])
Expand All @@ -758,7 +758,7 @@ def _update_cube(self, file_list, cube_file, sector=None, max_memory=50, verbose
noffis_err_msg = 'No new FFIs found for the given sector.'
assert len(filtered_file_list) > 0, noffis_err_msg

log.info(f'{len(filtered_file_list)} new FFIs found!')
log.debug('%d new FFIs found!', len(filtered_file_list))

# Creating an empty cube that will be appended to the existing cube
og_cube = fits.getdata(cube_file, 1)
Expand All @@ -770,7 +770,7 @@ def _update_cube(self, file_list, cube_file, sector=None, max_memory=50, verbose
sector = (sector, "Observing sector")
self._configure_cube(filtered_file_list, sector=sector)

log.info(f"FFIs will be appended in {self.num_blocks} blocks of {self.block_size} rows each.")
log.debug("FFIs will be appended in %d blocks of %d rows each.", self.num_blocks, self.block_size)

# Starting a new info table from scratch with new rows
self._build_info_table()
Expand Down Expand Up @@ -799,24 +799,24 @@ def _update_cube(self, file_list, cube_file, sector=None, max_memory=50, verbose
# the info table also gets updated here
fill_info_table = True
self._write_block(cube_hdu, start_row, end_row, fill_info_table, verbose)
log.info(f"Completed block {i+1} of {self.num_blocks}")
log.debug("Completed block %d of %d", i + 1, self.num_blocks)

# Append the new cube to the existing cube
new_cube = np.concatenate((og_cube, self.cube_append), axis=2)

# Add it to the HDU list
with fits.open(self.cube_file, mode='update') as hdul:
log.info(f'Original cube of size: {str(og_cube.shape)}')
log.info(f'will now be replaced with cube of size: {str(new_cube.shape)}')
log.info(f'for file ``{cube_file}``')
log.debug('Original cube of size: %s', og_cube.shape)
log.debug('will now be replaced with cube of size: %s', new_cube.shape)
log.debug('for file ``%s``', cube_file)
hdul[1].data = new_cube

# Appending new info table to original
self._update_info_table()

# Writing the info table to EXT2 of the FITS file
self._write_info_table()
log.info(f"Total time elapsed: {(time() - startTime)/60:.2f} min")
log.debug("Total time elapsed: %.2f min", (time() - startTime) / 60)

return self.cube_file

Expand Down Expand Up @@ -866,8 +866,8 @@ def make_cube(self, file_list, cube_file="img-cube.fits", sector=None, max_memor
# Set up the basic cube parameters
sector = (sector, "Observing sector")
self._configure_cube(file_list, sector=sector)
log.info("Using {} to initialize the image header table.".format(os.path.basename(self.template_file)))
log.info(f"Cube will be made in {self.num_blocks} blocks of {self.block_size} rows each.")
log.debug("Using %s to initialize the image header table.", os.path.basename(self.template_file))
log.debug("Cube will be made in %d blocks of %d rows each.", self.num_blocks, self.block_size)

# Set up the table to hold the individual image headers
self._build_info_table()
Expand All @@ -891,11 +891,11 @@ def make_cube(self, file_list, cube_file="img-cube.fits", sector=None, max_memor

fill_info_table = True if (i == 0) else False
self._write_block(cube_hdu, start_row, end_row, fill_info_table, verbose)
log.info(f"Completed block {i+1} of {self.num_blocks}")
log.debug("Completed block %d of %d", i + 1, self.num_blocks)

# Add the info table to the cube file
self._write_info_table()
log.info(f"Total time elapsed: {(time() - startTime)/60:.2f} min")
log.debug("Total time elapsed: %.2f min", (time() - startTime) / 60)

return self.cube_file

Expand Down
2 changes: 1 addition & 1 deletion astrocut/tests/test_cube_cut.py
Original file line number Diff line number Diff line change
Expand Up @@ -612,7 +612,7 @@ def test_inputs(cube_file, ffi_type, tmp_path, caplog):
captured = caplog.text
assert "Image cutout cube shape: (100, 3, 5)" in captured
assert "Using WCS from row 50 out of 100" in captured
assert "Cutout center coordinate: 256.88,6.38" in captured
assert "Cutout center coordinate: 256.88, 6.38" in captured
assert "5x3" in cutout_file

cutout_size = [5, 3]*u.arcmin
Expand Down
Loading

0 comments on commit 5efd55b

Please sign in to comment.