Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HLA-1110: aperture keyword from poller to header #1683

Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions drizzlepac/hapsequencer.py
Original file line number Diff line number Diff line change
Expand Up @@ -765,10 +765,10 @@ def run_align_to_gaia(tot_obj, log_level=logutil.logging.INFO, diagnostic_mode=F
for exp_obj in tot_obj.edp_list:
if gaia_obj is None:
prod_list = exp_obj.info.split("_")
prod_list[4] = "metawcs"
prod_list[5] = "metawcs"
gaia_obj = product.FilterProduct(prod_list[0], prod_list[1], prod_list[2],
prod_list[3], prod_list[4], "all",
prod_list[5][0:3], log_level)
prod_list[3], prod_list[4], prod_list[5], "all",
prod_list[6][0:3], log_level)
gaia_obj.configobj_pars = tot_obj.configobj_pars
gaia_obj.add_member(exp_obj)

Expand Down
94 changes: 64 additions & 30 deletions drizzlepac/haputils/poller_utils.py
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@
# -----------------------------------------------------------------------------
# Single Visit Processing Functions
#
def interpret_obset_input(results, log_level):
def interpret_obset_input(results: str, log_level):
"""

Parameters
Expand Down Expand Up @@ -127,7 +127,6 @@
"""
s-goldman marked this conversation as resolved.
Show resolved Hide resolved
# set logging level to user-specified level
log.setLevel(log_level)

log.debug("Interpret the poller file for the observation set.")
obset_table = build_poller_table(results, log_level)
# Add INSTRUMENT column
Expand All @@ -146,7 +145,7 @@
# Now create the output product objects
log.debug("Parse the observation set tree and create the exposure, filter, and total detection objects.")
obset_dict, tdp_list = parse_obset_tree(obset_tree, log_level)

# This little bit of code adds an attribute to single exposure objects that is True
# if a given filter only contains one input (e.g. n_exp = 1)
for tot_obj in tdp_list:
Expand Down Expand Up @@ -198,7 +197,7 @@
def create_row_info(row):
"""Build info string for a row from the obset table"""
info_list = [str(row['proposal_id']), "{}".format(row['obset_id']), row['instrument'],
row['detector'], row['filename'][:row['filename'].find('_')], row['filters']]
row['detector'], row['aperture'], row['filename'][:row['filename'].find('_')], row['filters']]
return ' '.join(map(str.upper, info_list)), row['filename']


Expand Down Expand Up @@ -482,7 +481,7 @@
# mvm prod_info = 'skycell_p1234_x01y01 wfc3 uvis f200lp all 2009 1 drz'
#
prod_list = prod_info.split(" ")
multi_scale = prod_list[2].upper() in ['IR', 'PC']
multi_scale = prod_list[2].upper() in ['IR']

Check warning on line 484 in drizzlepac/haputils/poller_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/poller_utils.py#L484

Added line #L484 was not covered by tests
pscale = 'fine' if not multi_scale else 'coarse'
prod_info += " {:s}".format(pscale)

Expand Down Expand Up @@ -672,15 +671,15 @@

s-goldman marked this conversation as resolved.
Show resolved Hide resolved
# Determine if this image is a Grism/Prism or a nominal direct exposure
is_grism = False
if prod_list[5].lower().find('g') != -1 or prod_list[5].lower().find('pr') != -1:
if prod_list[6].lower().find('g') != -1 or prod_list[6].lower().find('pr') != -1:
is_grism = True
filt_indx -= 1
grism_sep_obj = GrismExposureProduct(prod_list[0], prod_list[1], prod_list[2],
prod_list[3], filename[1], prod_list[5],
prod_list[6], log_level)
prod_list[3], prod_list[4], filename[1], prod_list[6],
prod_list[7], log_level)
else:
sep_obj = ExposureProduct(prod_list[0], prod_list[1], prod_list[2], prod_list[3],
filename[1], prod_list[5], prod_list[6], log_level)
sep_obj = ExposureProduct(prod_list[0], prod_list[1], prod_list[2], prod_list[3], prod_list[4],
filename[1], prod_list[6], prod_list[7], log_level)
# Now that we have defined the ExposureProduct for this input exposure,
# do not include it any total or filter product.
if not is_member:
Expand All @@ -700,21 +699,19 @@

# Create a filter product object for this instrument/detector
filt_obj = FilterProduct(prod_list[0], prod_list[1], prod_list[2], prod_list[3],
prod_list[4], prod_list[5], prod_list[6], log_level)
prod_list[4], prod_list[5], prod_list[6], prod_list[7], log_level)
# Append exposure object to the list of exposure objects for this specific filter product object
filt_obj.add_member(sep_obj)
# Populate filter product dictionary with input filename
obset_products[fprod]['files'].append(filename[1])

# Set up the total detection product dictionary and create a total detection product object
# Initialize `info` key for total detection product
if not obset_products[totprod]['info']:
obset_products[totprod]['info'] = prod_info

# Create a total detection product object for this instrument/detector
tdp_obj = TotalProduct(prod_list[0], prod_list[1], prod_list[2], prod_list[3],
prod_list[4], prod_list[6], log_level)

prod_list[4], prod_list[5], prod_list[7], log_level)
if not is_grism:
# Append exposure object to the list of exposure objects for this specific total detection product
tdp_obj.add_member(sep_obj)
Expand Down Expand Up @@ -888,9 +885,12 @@
# ------------------------------------------------------------------------------


def build_poller_table(input, log_level, all_mvm_exposures=[], poller_type='svm',
def build_poller_table(input: str, log_level, all_mvm_exposures=[], poller_type='svm',
include_small=True, only_cte=False):
"""Create a poller file from dataset names.
"""Create a poller file from dataset names for either SMV or MVM processing. Information is either gathered
from the poller file or by using the filename to open the file and pulling information from the header keywords.
The code treats WFPC2 differently, by uses both approaches. For WFPC2, We use simple poller files with a second column
that includes the aperture. The code gathers the rest of the relevant informaiton from the header keywords.

Parameters
-----------
Expand Down Expand Up @@ -931,10 +931,27 @@
poller_dtype = POLLER_DTYPE

datasets = []

# limit column string types to minimum length formats e.g. str8, str11, etc.
obs_converters = {'col4': [ascii.convert_numpy(np.str_)]}

if isinstance(input, str):
input_table = ascii.read(input, format='no_header', converters=obs_converters)
if len(input_table.columns) == len(poller_colnames):
if len(input_table.columns) == 1:
input_table.columns[0].name = 'filename'
input_table['aperture']= 'empty_aperture'
poller_dtype+=('str',)
is_poller_file = False # gets important keywords from file headers instead of poller file

# unique logic to collect WFPC2 aperture data from poller file
elif len(input_table.columns) == 2:
input_table.columns[0].name = 'filename'
input_table.columns[1].name = 'aperture'

Check warning on line 949 in drizzlepac/haputils/poller_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/poller_utils.py#L948-L949

Added lines #L948 - L949 were not covered by tests
# add dtype for aperture column
poller_dtype+=('str',)
is_poller_file = False

Check warning on line 952 in drizzlepac/haputils/poller_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/poller_utils.py#L951-L952

Added lines #L951 - L952 were not covered by tests

elif len(input_table.columns) == len(poller_colnames):
# We were provided a poller file
# Now assign column names to table
for i, colname in enumerate(poller_colnames):
Expand Down Expand Up @@ -1001,28 +1018,38 @@
# an exception and exit.
for table_line in input_table:
if os.path.exists(table_line['filename']):
log.info("Input image {} found in current working directory.".format(table_line['filename']))
log.info(f"Input image {table_line['filename']} found in current working directory.")
elif os.path.exists(table_line['pathname']):
log.info("Input image {} not found in current working directory. However, it was found in the path specified in the poller file.".format(table_line['filename']))
log.info(f"Input image {table_line['filename']} not found in current working directory. However, it was found in the path specified in the poller file.")

Check warning on line 1023 in drizzlepac/haputils/poller_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/poller_utils.py#L1023

Added line #L1023 was not covered by tests
shutil.copy(table_line['pathname'], os.getcwd())
log.info("Input image {} copied to current working directory.".format(table_line['pathname']))
log.info(f"Input image {table_line['pathname']} copied to current working directory.")

Check warning on line 1025 in drizzlepac/haputils/poller_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/poller_utils.py#L1025

Added line #L1025 was not covered by tests
else:
log.error("Input image {} not found in current working directory.".format(table_line['filename']))
log.error("Archived input image {} not found.".format(table_line['pathname']))
err_msg = "Input image {} missing from current working directory and from the path specified in the poller file. Exiting... ".format(table_line['filename'])
log.error(f"Input image {table_line['filename']} not found in current working directory.")
log.error(f"Archived input image {table_line['pathname']} not found.")
err_msg = f"Input image {table_line['filename']} missing from current working directory and from the path specified in the poller file. Exiting... "

Check warning on line 1029 in drizzlepac/haputils/poller_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/poller_utils.py#L1027-L1029

Added lines #L1027 - L1029 were not covered by tests
log.error(err_msg)
raise Exception(err_msg)
elif len(input_table.columns) == 1:
input_table.columns[0].name = 'filename'
is_poller_file = False


elif (poller_type == 'mvm') & (len(input_table.columns) != len(poller_colnames)):
log.error(f"MVMs should use full poller files with {len(poller_colnames)} columns.")
s-goldman marked this conversation as resolved.
Show resolved Hide resolved
err_msg = f"Full poller files should have {len(poller_colnames)} columns. Exiting... "
log.error(err_msg)
raise Exception(err_msg)

Check warning on line 1037 in drizzlepac/haputils/poller_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/poller_utils.py#L1033-L1037

Added lines #L1033 - L1037 were not covered by tests

# input is string with unexpected number of columns
else:
log.error(f'Poller file has an unexpected number of columns, code expects either 1, 2, or {len(poller_colnames)} but received: {len(input_table.columns)}')
raise ValueError

Check warning on line 1042 in drizzlepac/haputils/poller_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/poller_utils.py#L1041-L1042

Added lines #L1041 - L1042 were not covered by tests

# Since a poller file was the input, it is assumed all the input
# data is in the locale directory so just collect the filenames.
# datasets = input_table[input_table.colnames[0]].tolist()
filenames = list(input_table.columns[0])

# If input is a list of filenames
elif isinstance(input, list):
filenames = input
input_table= None

Check warning on line 1052 in drizzlepac/haputils/poller_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/poller_utils.py#L1052

Added line #L1052 was not covered by tests

else:
id = '[poller_utils.build_poller_table] '
Expand Down Expand Up @@ -1066,7 +1093,15 @@
for cname in poller_colnames:
cols[cname] = []
cols['filename'] = usable_datasets

if input_table:
if 'aperture' in input_table.colnames:
cols['aperture'] = input_table['aperture'].tolist()
else:
cols['aperture'] = ['empty_aperture'] * len(usable_datasets)
poller_dtype+=('str',)
else:
raise ValueError("Input table is empty. Exiting...")

Check warning on line 1103 in drizzlepac/haputils/poller_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/poller_utils.py#L1103

Added line #L1103 was not covered by tests

# If MVM processing and a poller file is the input, this implies there is
# only one skycell of interest for all the listed filenames in the poller
# file. Establish the WCS, but no need for discovery of overlapping skycells
Expand Down Expand Up @@ -1120,7 +1155,6 @@
poller_names = [colname for colname in cols]
poller_table = Table(data=poller_data, names=poller_names,
dtype=poller_dtype)

# The input was a poller file, so just keep the viable data rows for output
else:
good_rows = []
Expand Down Expand Up @@ -1184,7 +1218,7 @@
sys.exit(0)

poller_table = new_poller_table

return poller_table


Expand Down
9 changes: 8 additions & 1 deletion drizzlepac/haputils/processing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,14 @@
# Re-format ACS filter specification
if phdu['instrume'] == 'ACS':
phdu['filter'] = get_acs_filters(hdu, delimiter=';')


# WFPC2 update aperture if in poller file
s-goldman marked this conversation as resolved.
Show resolved Hide resolved
if (total_obj_list[0].aperture != 'empty_aperture') & (phdu['instrume'] =='WFPC2'):
log.info(f"Updating aperture header keyword from {phdu['aperture']} to {total_obj_list[0].aperture}")
phdu['aperture'] = total_obj_list[0].aperture

Check warning on line 176 in drizzlepac/haputils/processing_utils.py

View check run for this annotation

Codecov / codecov/patch

drizzlepac/haputils/processing_utils.py#L175-L176

Added lines #L175 - L176 were not covered by tests
s-goldman marked this conversation as resolved.
Show resolved Hide resolved
else:
log.debug("Not updating aperture keyword.")

# Insure PHOT* keywords are always in SCI extension
for pkw in PHOT_KEYWORDS:
if pkw in phdu:
Expand Down
32 changes: 19 additions & 13 deletions drizzlepac/haputils/product.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ class HAPProduct:
"""

def __init__(
self, prop_id, obset_id, instrument, detector, filename, filetype, log_level
self, prop_id, obset_id, instrument, detector, aperture, filename, filetype, log_level
):
s-goldman marked this conversation as resolved.
Show resolved Hide resolved
# set logging level to user-specified level
log.setLevel(log_level)
Expand All @@ -79,6 +79,7 @@ def __init__(
self.obset_id = obset_id
self.instrument = instrument
self.detector = detector
self.aperture = aperture
self.filetype = filetype
self.rules_file = None
self.basename = (
Expand Down Expand Up @@ -536,13 +537,13 @@ class TotalProduct(HAPProduct):
"""

def __init__(
self, prop_id, obset_id, instrument, detector, filename, filetype, log_level
self, prop_id, obset_id, instrument, detector, aperture, filename, filetype, log_level
):
super().__init__(
prop_id, obset_id, instrument, detector, filename, filetype, log_level
prop_id, obset_id, instrument, detector, aperture, filename, filetype, log_level
)
self.info = "_".join(
[prop_id, obset_id, instrument, detector, filename, filetype]
[prop_id, obset_id, instrument, detector, aperture, filename, filetype]
)
self.exposure_name = filename[0:6]

Expand Down Expand Up @@ -702,17 +703,18 @@ def __init__(
obset_id,
instrument,
detector,
aperture,
filename,
filters,
filetype,
log_level,
):
super().__init__(
prop_id, obset_id, instrument, detector, filename, filetype, log_level
prop_id, obset_id, instrument, detector, aperture, filename, filetype, log_level
)

self.info = "_".join(
[prop_id, obset_id, instrument, detector, filename, filters, filetype]
[prop_id, obset_id, instrument, detector, aperture, filename, filters, filetype]
)
if filename[0:7].lower() != "metawcs":
self.exposure_name = filename[0:6]
Expand Down Expand Up @@ -849,17 +851,18 @@ def __init__(
obset_id,
instrument,
detector,
aperture,
filename,
filters,
filetype,
log_level,
):
super().__init__(
prop_id, obset_id, instrument, detector, filename, filetype, log_level
prop_id, obset_id, instrument, detector, aperture, filename, filetype, log_level
)

self.info = "_".join(
[prop_id, obset_id, instrument, detector, filename, filters, filetype]
[prop_id, obset_id, instrument, detector, aperture, filename, filters, filetype]
)
self.filters = filters
self.full_filename = self.copy_exposure(filename)
Expand Down Expand Up @@ -1014,17 +1017,18 @@ def __init__(
obset_id,
instrument,
detector,
aperture,
filename,
filters,
filetype,
log_level,
):
super().__init__(
prop_id, obset_id, instrument, detector, filename, filetype, log_level
prop_id, obset_id, instrument, detector, aperture, filename, filetype, log_level
)

self.info = "_".join(
[prop_id, obset_id, instrument, detector, filename, filters, filetype]
[prop_id, obset_id, instrument, detector, aperture, filename, filters, filetype]
)
self.filters = filters
self.full_filename = self.copy_exposure(filename)
Expand Down Expand Up @@ -1110,13 +1114,14 @@ def __init__(
obset_id,
instrument,
detector,
aperture,
filename,
layer,
filetype,
log_level,
):
super().__init__(
prop_id, obset_id, instrument, detector, filename, filetype, log_level
prop_id, obset_id, instrument, detector, aperture, filename, filetype, log_level
)

filter_str = layer[0]
Expand Down Expand Up @@ -1345,13 +1350,14 @@ def __init__(
obset_id,
instrument,
detector,
aperture,
skycell_name,
layer,
filetype,
log_level,
):
super().__init__(
prop_id, obset_id, instrument, detector, skycell_name, filetype, log_level
prop_id, obset_id, instrument, detector, aperture, skycell_name, filetype, log_level
)
# May need to exclude 'filter' component from layer_str
filter_str = layer[0]
Expand All @@ -1369,7 +1375,7 @@ def __init__(
layer_scale = layer[1]

self.info = "_".join(
["hst", skycell_name, instrument, detector, filter_str, layer_str]
["hst", skycell_name, instrument, detector, aperture, filter_str, layer_str]
)
self.exposure_name = skycell_name
self.cell_id = skycell_name.strip("skycell-")
Expand Down
Loading