Commit 643c4fa1 authored by BO ZHANG's avatar BO ZHANG 🏀
Browse files

update example_interface.py

parent ab1ba559
Pipeline #258 passed with stages
in 21 seconds
...@@ -161,6 +161,9 @@ raw 0 ``dm.l0_log(detector=detector)`` ...@@ -161,6 +161,9 @@ raw 0 ``dm.l0_log(detector=detector)``
``Slit-Less Spectra 1D`` ``Slit-Less Spectra 1D``
^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^
Note that for 1D pipeline, ``dm.target_detectors`` is a list containing only one element.
=========================== ===== =============================================================== ================== =========================== ===== =============================================================== ==================
Data source Level File name / access code Notes Data source Level File name / access code Notes
=========================== ===== =============================================================== ================== =========================== ===== =============================================================== ==================
...@@ -283,7 +286,7 @@ Source code ...@@ -283,7 +286,7 @@ Source code
.. literalinclude:: csst_common/example_interface.py .. literalinclude:: csst_common/example_interface.py
:caption: ``example_interface.py`` :caption: ``example_interface.py``
:emphasize-lines: 7-11,36-41,85,87-88,91-92,95-96,99-101,111-116,148,155-165,167-173,178-179,182-183,186-187,190-191 :emphasize-lines: 7-11,25-29,75-86,101-112,169-171
:linenos: :linenos:
:language: python :language: python
...@@ -298,7 +301,14 @@ Rendered ``docstring`` ...@@ -298,7 +301,14 @@ Rendered ``docstring``
``csst_common.params`` ``csst_common.params``
---------------------- ----------------------
to be updated See https://csst-tb.bao.ac.cn/code/csst-l1/csst_common/-/blob/main/csst_common/data/csst_params.yml.
To use the parameters, use
.. code-block:: python
from csst_common.params import CSST_PARAMS as CP
Module Identifier Module Identifier
----------------- -----------------
......
...@@ -21,18 +21,6 @@ def process_data(data: np.ndarray) -> np.ndarray: ...@@ -21,18 +21,6 @@ def process_data(data: np.ndarray) -> np.ndarray:
return np.fliplr(np.flipud(data)) return np.fliplr(np.flipud(data))
def check_results(dm: CsstMsDataManager, logger: logging.Logger) -> bool:
""" Check whether processed data are generated. """
existence = [
os.path.exists(dm.l1_detector(detector=detector, post="L1_processed.fits"))
for detector in dm.target_detectors
]
if all(existence):
return True
else:
return False
# process a single image (NOT RECOMMENDED!) # process a single image (NOT RECOMMENDED!)
def process_single_image( def process_single_image(
filepath_input: str, filepath_input: str,
...@@ -74,26 +62,28 @@ def process_single_image( ...@@ -74,26 +62,28 @@ def process_single_image(
fr = FileRecorder() fr = FileRecorder()
# process data # process data
try: logger.info("Start processing image {}".format(filepath_input))
# this will NOT be written into the log file # start processing
logger.debug("Reading the image {}".format(filepath_input)) data = read_image(filepath_input)
# start processing data_processed = process_data(data)
data = read_image(filepath_input) np.save(filepath_output, data_processed)
data_processed = process_data(data) # record file!
np.save(filepath_output, data_processed) fr.add_record(filepath=filepath_output, db=True, comment="the processed image")
# record file! # this will be written into the log file
fr.add_record(filepath=filepath_output, db=True, comment="the processed image") logger.info("Finish processing, result saved to {}".format(filepath_output))
# this will be written into the log file
logger.info("Processed image saved to {}".format(filepath_output)) # check result existence
return CsstStatus.PERFECT, fr if os.path.exists(filepath_output):
except DeprecationWarning: # file exists, check precison
# this will be written into the log file if fits.getheader(filepath_output)["TOL"] < 1e-5:
logger.warning("Suffered DeprecationWarning!") return CsstStatus.PERFECT, fr
else:
return CsstStatus.WARNING, fr
else:
# file doesn't exist, do your fallback solution
fits.HDUList(fits.PrimaryHDU()).writeto(filepath_output)
assert os.path.exists(filepath_output)
return CsstStatus.WARNING, fr return CsstStatus.WARNING, fr
except IOError:
# this will be written into the log file
logger.error("Suffered IOError!")
return CsstStatus.ERROR, fr
# process multiple images in an exposure (RECOMMENDED, at least for MBI or SLS) # process multiple images in an exposure (RECOMMENDED, at least for MBI or SLS)
...@@ -102,17 +92,29 @@ def one_job(dm: CsstMsDataManager, detector: int): ...@@ -102,17 +92,29 @@ def one_job(dm: CsstMsDataManager, detector: int):
""" Process a single image, defined for parallel processing. """ """ Process a single image, defined for parallel processing. """
filepath_input = dm.l0_detector(detector=detector) filepath_input = dm.l0_detector(detector=detector)
filepath_output = dm.l1_detector(detector=detector, post="L1_processed.fits") filepath_output = dm.l1_detector(detector=detector, post="L1_processed.fits")
# data processing
data = read_image(filepath_input) data = read_image(filepath_input)
data_processed = process_data(data) data_processed = process_data(data)
np.save(filepath_output, data_processed) np.save(filepath_output, data_processed)
return
# check result existence
if os.path.exists(filepath_output):
# file exists, check precison
if fits.getheader(filepath_output)["TOL"] < 1e-5:
return CsstStatus.PERFECT
else:
return CsstStatus.WARNING
else:
# file doesn't exist, do your fallback solution
fits.HDUList(fits.PrimaryHDU()).writeto(filepath_output)
assert os.path.exists(filepath_output)
return CsstStatus.WARNING
# process in serial / parallel # process in serial / parallel
def process_multiple_images( def process_multiple_images(
dm: CsstMsDataManager, dm: CsstMsDataManager,
logger: Union[None, logging.Logger] = None,
n_jobs: int = -1,
) -> tuple[CsstStatus, FileRecorder]: ) -> tuple[CsstStatus, FileRecorder]:
""" """
Flip all images. Flip all images.
...@@ -123,10 +125,6 @@ def process_multiple_images( ...@@ -123,10 +125,6 @@ def process_multiple_images(
---------- ----------
dm : CsstMsDataManager dm : CsstMsDataManager
The data manager of the specified exposure. The data manager of the specified exposure.
logger : {None, logging.Logger}
The logger. If None, use the default logger.
n_jobs : int
The number of prcesses.
Returns Returns
------- -------
...@@ -137,55 +135,37 @@ def process_multiple_images( ...@@ -137,55 +135,37 @@ def process_multiple_images(
-------- --------
>>> dm = CsstMsDataManager.quickstart( >>> dm = CsstMsDataManager.quickstart(
>>> ver_sim="C5.2", dir_l1="", datatype="sls", exposure_id=100) >>> ver_sim="C5.2", dir_l1="", datatype="sls", exposure_id=100)
>>> logger = get_logger() >>> process_multiple_images(dm)
>>> process_multiple_images(dm, logger)
""" """
# set default logger
if logger is None:
logger = get_logger()
# get an empty file recorder # get an empty file recorder
fr = FileRecorder() fr = FileRecorder()
# process data # process data
try: # start processing (dm.target_detectors is a list of detector number that should be processed)
# start processing (dm.target_detectors is a list of detector number that should be processed)
# [1/2] single-thread mode
# [1/2] single-thread mode for detector in dm.target_detectors:
for detector in dm.target_detectors: # this will NOT be written into the log file
# this will NOT be written into the log file dm.logger_mod.info("Start data processing for detector {}".format(detector))
logger.debug("Processing for detector {}".format(detector)) filepath_input = dm.l0_detector(detector=detector)
filepath_input = dm.l0_detector(detector=detector) filepath_output = dm.l1_detector(detector=detector, post="L1_processed.fits")
filepath_output = dm.l1_detector(detector=detector, post="L1_processed.fits") data = read_image(filepath_input)
data = read_image(filepath_input) data_processed = process_data(data)
data_processed = process_data(data) np.save(filepath_output, data_processed)
np.save(filepath_output, data_processed) # record file!
# record file! fr.add_record(filepath=filepath_output, db=True, comment="processed file for Detector {}".format(detector))
fr.add_record(filepath=filepath_output, db=True, comment="processed file for Detector {}".format(detector))
# [2/2] multi-processing mode
# [2/2] multi-processing mode dm.logger_mod.info("Starting data processing with multiprocessing ...")
joblib.Parallel(n_jobs=n_jobs, backend="multiprocessing")( status_list = joblib.Parallel(n_jobs=dm.n_jobs, backend=dm.backend)(
joblib.delayed(one_job)(dm, detector) for detector in dm.target_detectors joblib.delayed(one_job)(dm, detector) for detector in dm.target_detectors
) )
for detector in dm.target_detectors: dm.logger_mod.info("Finished processing ...")
filepath_output = dm.l1_detector(detector=detector, post="L1_processed.fits") for detector in dm.target_detectors:
fr.add_record(filepath=filepath_output, db=True, comment="processed file for Detector {}".format(detector)) filepath_output = dm.l1_detector(detector=detector, post="L1_processed.fits")
fr.add_record(filepath=filepath_output, db=True, comment="processed file for Detector {}".format(detector))
# check results
if check_results(dm=dm, logger=logger): # check results
# this will be written into the log file assert fr.is_good()
logger.info("All processed files are generated!") return CsstStatus.PERFECT if all([_ == CsstStatus.PERFECT for _ in status_list]) else CsstStatus.WARNING, fr
return CsstStatus.PERFECT, fr
else:
# not all images are properly processed
logger.warning("Not all processed files are generated!")
return CsstStatus.ERROR, fr
except DeprecationWarning:
# this will be written into the log file
logger.warning("Suffered DeprecationWarning!")
return CsstStatus.WARNING, fr
except IOError:
# this will be written into the log file
logger.error("Suffered IOError!")
return CsstStatus.ERROR, fr
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment