Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
csst-pipeline
csst_proto
Commits
643c4fa1
Commit
643c4fa1
authored
Feb 08, 2023
by
BO ZHANG
🏀
Browse files
update example_interface.py
parent
ab1ba559
Pipeline
#258
passed with stages
in 21 seconds
Changes
2
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
doc/source/ch08_csst_common.rst
View file @
643c4fa1
...
...
@@ -161,6 +161,9 @@ raw 0 ``dm.l0_log(detector=detector)``
``Slit-Less Spectra 1D``
^^^^^^^^^^^^^^^^^^^^^^^^
Note that for 1D pipeline, ``dm.target_detectors`` is a list containing only one element.
=========================== ===== =============================================================== ==================
Data source Level File name / access code Notes
=========================== ===== =============================================================== ==================
...
...
@@ -283,7 +286,7 @@ Source code
.. literalinclude:: csst_common/example_interface.py
:caption: ``example_interface.py``
:emphasize-lines: 7-11,
36-41,85,87-88,91-92,95-96,99-101,111-116,148,155-165,167-173,178-179,182-183,186-187,190-19
1
:emphasize-lines: 7-11,
25-29,75-86,101-112,169-17
1
:linenos:
:language: python
...
...
@@ -298,7 +301,14 @@ Rendered ``docstring``
``csst_common.params``
----------------------
to be updated
See https://csst-tb.bao.ac.cn/code/csst-l1/csst_common/-/blob/main/csst_common/data/csst_params.yml.
To use the parameters, use
.. code-block:: python
from csst_common.params import CSST_PARAMS as CP
Module Identifier
-----------------
...
...
doc/source/csst_common/example_interface.py
View file @
643c4fa1
...
...
@@ -21,18 +21,6 @@ def process_data(data: np.ndarray) -> np.ndarray:
return
np
.
fliplr
(
np
.
flipud
(
data
))
def
check_results
(
dm
:
CsstMsDataManager
,
logger
:
logging
.
Logger
)
->
bool
:
""" Check whether processed data are generated. """
existence
=
[
os
.
path
.
exists
(
dm
.
l1_detector
(
detector
=
detector
,
post
=
"L1_processed.fits"
))
for
detector
in
dm
.
target_detectors
]
if
all
(
existence
):
return
True
else
:
return
False
# process a single image (NOT RECOMMENDED!)
def
process_single_image
(
filepath_input
:
str
,
...
...
@@ -74,26 +62,28 @@ def process_single_image(
fr
=
FileRecorder
()
# process data
try
:
# this will NOT be written into the log file
logger
.
debug
(
"Reading the image {}"
.
format
(
filepath_input
))
# start processing
data
=
read_image
(
filepath_input
)
data_processed
=
process_data
(
data
)
np
.
save
(
filepath_output
,
data_processed
)
# record file!
fr
.
add_record
(
filepath
=
filepath_output
,
db
=
True
,
comment
=
"the processed image"
)
# this will be written into the log file
logger
.
info
(
"Processed image saved to {}"
.
format
(
filepath_output
))
return
CsstStatus
.
PERFECT
,
fr
except
DeprecationWarning
:
# this will be written into the log file
logger
.
warning
(
"Suffered DeprecationWarning!"
)
logger
.
info
(
"Start processing image {}"
.
format
(
filepath_input
))
# start processing
data
=
read_image
(
filepath_input
)
data_processed
=
process_data
(
data
)
np
.
save
(
filepath_output
,
data_processed
)
# record file!
fr
.
add_record
(
filepath
=
filepath_output
,
db
=
True
,
comment
=
"the processed image"
)
# this will be written into the log file
logger
.
info
(
"Finish processing, result saved to {}"
.
format
(
filepath_output
))
# check result existence
if
os
.
path
.
exists
(
filepath_output
):
# file exists, check precison
if
fits
.
getheader
(
filepath_output
)[
"TOL"
]
<
1e-5
:
return
CsstStatus
.
PERFECT
,
fr
else
:
return
CsstStatus
.
WARNING
,
fr
else
:
# file doesn't exist, do your fallback solution
fits
.
HDUList
(
fits
.
PrimaryHDU
()).
writeto
(
filepath_output
)
assert
os
.
path
.
exists
(
filepath_output
)
return
CsstStatus
.
WARNING
,
fr
except
IOError
:
# this will be written into the log file
logger
.
error
(
"Suffered IOError!"
)
return
CsstStatus
.
ERROR
,
fr
# process multiple images in an exposure (RECOMMENDED, at least for MBI or SLS)
...
...
@@ -102,17 +92,29 @@ def one_job(dm: CsstMsDataManager, detector: int):
""" Process a single image, defined for parallel processing. """
filepath_input
=
dm
.
l0_detector
(
detector
=
detector
)
filepath_output
=
dm
.
l1_detector
(
detector
=
detector
,
post
=
"L1_processed.fits"
)
# data processing
data
=
read_image
(
filepath_input
)
data_processed
=
process_data
(
data
)
np
.
save
(
filepath_output
,
data_processed
)
return
# check result existence
if
os
.
path
.
exists
(
filepath_output
):
# file exists, check precison
if
fits
.
getheader
(
filepath_output
)[
"TOL"
]
<
1e-5
:
return
CsstStatus
.
PERFECT
else
:
return
CsstStatus
.
WARNING
else
:
# file doesn't exist, do your fallback solution
fits
.
HDUList
(
fits
.
PrimaryHDU
()).
writeto
(
filepath_output
)
assert
os
.
path
.
exists
(
filepath_output
)
return
CsstStatus
.
WARNING
# process in serial / parallel
def
process_multiple_images
(
dm
:
CsstMsDataManager
,
logger
:
Union
[
None
,
logging
.
Logger
]
=
None
,
n_jobs
:
int
=
-
1
,
)
->
tuple
[
CsstStatus
,
FileRecorder
]:
"""
Flip all images.
...
...
@@ -123,10 +125,6 @@ def process_multiple_images(
----------
dm : CsstMsDataManager
The data manager of the specified exposure.
logger : {None, logging.Logger}
The logger. If None, use the default logger.
n_jobs : int
The number of prcesses.
Returns
-------
...
...
@@ -137,55 +135,37 @@ def process_multiple_images(
--------
>>> dm = CsstMsDataManager.quickstart(
>>> ver_sim="C5.2", dir_l1="", datatype="sls", exposure_id=100)
>>> logger = get_logger()
>>> process_multiple_images(dm, logger)
>>> process_multiple_images(dm)
"""
# set default logger
if
logger
is
None
:
logger
=
get_logger
()
# get an empty file recorder
fr
=
FileRecorder
()
# process data
try
:
# start processing (dm.target_detectors is a list of detector number that should be processed)
# [1/2] single-thread mode
for
detector
in
dm
.
target_detectors
:
# this will NOT be written into the log file
logger
.
debug
(
"Processing for detector {}"
.
format
(
detector
))
filepath_input
=
dm
.
l0_detector
(
detector
=
detector
)
filepath_output
=
dm
.
l1_detector
(
detector
=
detector
,
post
=
"L1_processed.fits"
)
data
=
read_image
(
filepath_input
)
data_processed
=
process_data
(
data
)
np
.
save
(
filepath_output
,
data_processed
)
# record file!
fr
.
add_record
(
filepath
=
filepath_output
,
db
=
True
,
comment
=
"processed file for Detector {}"
.
format
(
detector
))
# [2/2] multi-processing mode
joblib
.
Parallel
(
n_jobs
=
n_jobs
,
backend
=
"multiprocessing"
)(
joblib
.
delayed
(
one_job
)(
dm
,
detector
)
for
detector
in
dm
.
target_detectors
)
for
detector
in
dm
.
target_detectors
:
filepath_output
=
dm
.
l1_detector
(
detector
=
detector
,
post
=
"L1_processed.fits"
)
fr
.
add_record
(
filepath
=
filepath_output
,
db
=
True
,
comment
=
"processed file for Detector {}"
.
format
(
detector
))
# check results
if
check_results
(
dm
=
dm
,
logger
=
logger
):
# this will be written into the log file
logger
.
info
(
"All processed files are generated!"
)
return
CsstStatus
.
PERFECT
,
fr
else
:
# not all images are properly processed
logger
.
warning
(
"Not all processed files are generated!"
)
return
CsstStatus
.
ERROR
,
fr
except
DeprecationWarning
:
# this will be written into the log file
logger
.
warning
(
"Suffered DeprecationWarning!"
)
return
CsstStatus
.
WARNING
,
fr
except
IOError
:
# this will be written into the log file
logger
.
error
(
"Suffered IOError!"
)
return
CsstStatus
.
ERROR
,
fr
# start processing (dm.target_detectors is a list of detector number that should be processed)
# [1/2] single-thread mode
for
detector
in
dm
.
target_detectors
:
# this will NOT be written into the log file
dm
.
logger_mod
.
info
(
"Start data processing for detector {}"
.
format
(
detector
))
filepath_input
=
dm
.
l0_detector
(
detector
=
detector
)
filepath_output
=
dm
.
l1_detector
(
detector
=
detector
,
post
=
"L1_processed.fits"
)
data
=
read_image
(
filepath_input
)
data_processed
=
process_data
(
data
)
np
.
save
(
filepath_output
,
data_processed
)
# record file!
fr
.
add_record
(
filepath
=
filepath_output
,
db
=
True
,
comment
=
"processed file for Detector {}"
.
format
(
detector
))
# [2/2] multi-processing mode
dm
.
logger_mod
.
info
(
"Starting data processing with multiprocessing ..."
)
status_list
=
joblib
.
Parallel
(
n_jobs
=
dm
.
n_jobs
,
backend
=
dm
.
backend
)(
joblib
.
delayed
(
one_job
)(
dm
,
detector
)
for
detector
in
dm
.
target_detectors
)
dm
.
logger_mod
.
info
(
"Finished processing ..."
)
for
detector
in
dm
.
target_detectors
:
filepath_output
=
dm
.
l1_detector
(
detector
=
detector
,
post
=
"L1_processed.fits"
)
fr
.
add_record
(
filepath
=
filepath_output
,
db
=
True
,
comment
=
"processed file for Detector {}"
.
format
(
detector
))
# check results
assert
fr
.
is_good
()
return
CsstStatus
.
PERFECT
if
all
([
_
==
CsstStatus
.
PERFECT
for
_
in
status_list
])
else
CsstStatus
.
WARNING
,
fr
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment