Commit b026df97 authored by BO ZHANG's avatar BO ZHANG 🏀
Browse files

major updates

parent cad9fd06
Pipeline #2166 failed with stage
in 0 seconds
unit_test_data_root
.idea/* .idea/*
*.png *.png
*.DS_Store* *.DS_Store*
......
...@@ -12,6 +12,6 @@ uninstall: ...@@ -12,6 +12,6 @@ uninstall:
pip uninstall $(PKG) -y pip uninstall $(PKG) -y
test: test:
coverage run -m pytest . --import-mode=importlib --cov-report=html --cov-report=term-missing coverage run -m pytest . --cov=$(PKG) --import-mode=importlib --cov-report=html --cov-report=term-missing
coverage report -m coverage report -m
rm -rf .coverage .pytest_cache rm -rf .coverage .pytest_cache
...@@ -44,42 +44,3 @@ from csst_common.params import CSST_PARAMS as CP ...@@ -44,42 +44,3 @@ from csst_common.params import CSST_PARAMS as CP
print(CP) print(CP)
print(CP["mbi"]["detector2filter"]) print(CP["mbi"]["detector2filter"])
``` ```
how to use data_manager
```python
from csst_common.data_manager import CsstMbiDataManager
dm = CsstMbiDataManager(...)
# access L0 directory
dm.dir_l0
# access L1 directory
dm.dir_l1
# access dir_pcref
dm.dir_pcref
# access path_aux
dm.path_aux
# access ver_sim
dm.ver_sim
# access target detectors
dm.target_detectors
# access available detectors
dm.available_detectors
# define an L1 file (detector-specified)
dm.l1_detector(detector=6)
# define an L1 file (non-detector-specified)
dm.l1_file("flipped_image.fits")
```
a shortcut for test dataset
```python
from csst_common.data_manager import CsstMbiDataManager
CsstMbiDataManager.quickstart(ver_sim="C5.2", dir_l1=".", exposure_id=100)
```
## algorithm description
The `csst_common` provides some common modules for CSST pipeline.
- `csst_common.parameters`
- `csst_common.data_manager`
...@@ -9,10 +9,10 @@ Modified-History: ...@@ -9,10 +9,10 @@ Modified-History:
2022-09-13, Bo Zhang, fixed a bug 2022-09-13, Bo Zhang, fixed a bug
""" """
import os
from .status import CsstResult, CsstStatus from .status import CsstResult, CsstStatus
from .pipeline import Pipeline
from .ccds import CCDS
from .dfs import DFS
from .decorator import parameterized_module_decorator
__version__ = "0.0.1" __version__ = "0.0.2"
PACKAGE_PATH = os.path.dirname(__file__)
import json
import os
import re
import astropy.io.fits as pyfits
from ccds import client
# chipid: [01, 02, 03, 04, 05, 10, 21, 26, 27, 28, 29, 30]
# filter: [GI, GV, GU, GU, GV, GI, GI, GV, GU, GU, GV, GI]
# clabel: [GI-1, GV-1, GU-1, GU-2, GV-2, GI-2, GI-3, GV-3, GU-3, GU-4, GV-4, GI-4]
def get_version():
observatory = client.get_default_observatory()
operational_context = client.get_default_context(observatory)
ver = re.split(r"[_.]", operational_context)[1]
return ver
def resave_sensitivity(dir_save, chipid, flt):
ver = get_version()
h = pyfits.open(
dir_save + "CSST_MSC_MS_SENSITIVITY_" + chipid + "_" + ver + ".fits"
)
for extname, order in zip(
["L0ST", "LP1ST", "LM1ST", "LP2ST", "LM2ST"],
["0st", "1st", "-1st", "2st", "-2st"],
):
hdu0 = pyfits.PrimaryHDU()
hdu1 = pyfits.BinTableHDU(h[extname].data)
hdul = pyfits.HDUList([hdu0, hdu1])
hdul.writeto(
dir_save + "CSST_MSC_MS_SENSITIVITY_" + flt + "_" + order + ".fits",
overwrite=True,
checksum=True,
)
def readjson(dir_save, fjsoname):
with open(dir_save + fjsoname) as f:
d = json.load(f)
return d
##################################################################################
##################################################################################
# keys = ['BEAMA', 'MMAG_EXTRACT_A', 'MMAG_MARK_A', 'DYDX_ORDER_A', 'DYDX_A_0', 'DYDX_A_1', 'XOFF_A', 'YOFF_A',
# 'DISP_ORDER_A', 'DLDP_A_0', 'DLDP_A_1', 'BEAMB', 'MMAG_EXTRACT_B', 'MMAG_MARK_B', 'DYDX_ORDER_B', 'DYDX_B_0',
# 'XOFF_B', 'YOFF_B', 'DISP_ORDER_B', 'DLDP_B_0', 'DLDP_B_1', 'BEAMC', 'MMAG_EXTRACT_C', 'MMAG_MARK_C',
# 'DYDX_ORDER_C', 'DYDX_C_0', 'DYDX_C_1', 'XOFF_C', 'YOFF_C', 'DISP_ORDER_C', 'DLDP_C_0', 'DLDP_C_1', 'BEAMD',
# 'MMAG_EXTRACT_D', 'MMAG_MARK_D', 'DYDX_ORDER_D', 'DYDX_D_0', 'DYDX_D_1', 'XOFF_D', 'YOFF_D', 'DISP_ORDER_D',
# 'DLDP_D_0', 'DLDP_D_1', 'BEAME', 'MMAG_EXTRACT_E', 'MMAG_MARK_E', 'DYDX_ORDER_E', 'DYDX_E_0', 'DYDX_E_1',
# 'XOFF_E', 'YOFF_E', 'DISP_ORDER_E', 'DLDP_E_0', 'DLDP_E_1']
GL = [
"GI2",
"GV4",
"GU2",
"GU4",
"GV2",
"GI4",
"GI6",
"GV8",
"GU6",
"GU8",
"GV6",
"GI8",
]
GR = [
"GI1",
"GV3",
"GU1",
"GU3",
"GV1",
"GI3",
"GI5",
"GV7",
"GU5",
"GU7",
"GV5",
"GI7",
]
SEN = ["GI", "GV", "GU", "GU", "GV", "GI", "GI", "GV", "GU", "GU", "GV", "GI"]
def fwriteKEY(fsx, i):
fsx.write("INSTRUMENT CSSTSLS" + "\n")
fsx.write("CAMERA " + SEN[i] + "\n")
if SEN[i] == "GI":
fsx.write("WAVELENGTH 6200 10000" + "\n")
elif SEN[i] == "GV":
fsx.write("WAVELENGTH 4000 6200" + "\n")
elif SEN[i] == "GU":
fsx.write("WAVELENGTH 2550 4000" + "\n")
fsx.write("\n" + "SCIENCE_EXT SCI ; Science extension" + "\n")
fsx.write("DQ_EXT DQ ; DQ extension" + "\n")
fsx.write("ERRORS_EXT ERR ; Error extension" + "\n")
fsx.write("FFNAME csstFlat.fits" + "\n")
fsx.write("DQMASK 246 ; 4096 and 512 taken out" + "\n")
fsx.write("\n" + "RDNOISE 5.0" + "\n")
fsx.write("EXPTIME EXPTIME" + "\n")
fsx.write("POBJSIZE 1.0" + "\n")
fsx.write("#SMFACTOR 1.0" + "\n\n")
def fwriteBEAM(
dir_save,
fsx,
i,
GRATINGLR,
BEAMX,
MMAG_EXTRACT_X,
MMAG_MARK_X,
DYDX_ORDER_X,
DYDX_X_0,
DYDX_X_1,
XOFF_X,
YOFF_X,
DISP_ORDER_X,
DLDP_X_0,
DLDP_X_1,
):
ver = get_version()
[
d01_GI21,
d02_GV43,
d03_GU21,
d04_GU43,
d05_GV21,
d10_GI43,
d21_GI65,
d26_GV87,
d27_GU65,
d28_GU87,
d29_GV65,
d30_GI87,
] = [
readjson(dir_save, "CSST_MSC_MS_EXTRACT1D_" + chipid + "_" + ver + ".json")
for chipid in [
"01",
"02",
"03",
"04",
"05",
"10",
"21",
"26",
"27",
"28",
"29",
"30",
]
]
d = [
d01_GI21,
d02_GV43,
d03_GU21,
d04_GU43,
d05_GV21,
d10_GI43,
d21_GI65,
d26_GV87,
d27_GU65,
d28_GU87,
d29_GV65,
d30_GI87,
]
fsx.write(BEAMX), [
fsx.write(" " + str(d[i][GRATINGLR][BEAMX][j]))
for j in range(len(d[i][GRATINGLR][BEAMX]))
], fsx.write("\n")
fsx.write(MMAG_EXTRACT_X + " " + str(d[i][GRATINGLR][MMAG_EXTRACT_X]) + "\n")
fsx.write(MMAG_MARK_X + " " + str(d[i][GRATINGLR][MMAG_MARK_X]) + "\n")
fsx.write("# " + "\n")
fsx.write("# Trace description " + "\n")
fsx.write("# " + "\n")
fsx.write(DYDX_ORDER_X + " " + str(d[i][GRATINGLR][DYDX_ORDER_X]) + "\n")
fsx.write(DYDX_X_0), [
fsx.write(" " + str(d[i][GRATINGLR][DYDX_X_0][j]))
for j in range(len(d[i][GRATINGLR][DYDX_X_0]))
], fsx.write("\n")
if BEAMX == "BEAMB":
pass
else:
fsx.write(DYDX_X_1), [
fsx.write(" " + str(d[i][GRATINGLR][DYDX_X_1][j]))
for j in range(len(d[i][GRATINGLR][DYDX_X_1]))
], fsx.write("\n")
fsx.write("# " + "\n")
fsx.write("# X and Y Offsets " + "\n")
fsx.write("# " + "\n")
fsx.write(XOFF_X + " " + str(d[i][GRATINGLR][XOFF_X]) + "\n")
fsx.write(YOFF_X + " " + str(d[i][GRATINGLR][YOFF_X]) + "\n")
fsx.write("# " + "\n")
fsx.write("# Dispersion solution " + "\n")
fsx.write("# " + "\n")
fsx.write(DISP_ORDER_X + " " + str(d[i][GRATINGLR][DISP_ORDER_X]) + "\n")
fsx.write(DLDP_X_0), [
fsx.write(" " + str(d[i][GRATINGLR][DLDP_X_0][j]))
for j in range(len(d[i][GRATINGLR][DLDP_X_0]))
], fsx.write("\n")
fsx.write(DLDP_X_1), [
fsx.write(" " + str(d[i][GRATINGLR][DLDP_X_1][j]))
for j in range(len(d[i][GRATINGLR][DLDP_X_1]))
], fsx.write("\n")
fsx.write("# " + "\n")
if BEAMX == "BEAMA":
ordername = "1st"
fsx.write(
"SENSITIVITY_A CSST_MSC_MS_SENSITIVITY_" + SEN[i] + "_1st.fits" + "\n"
)
elif BEAMX == "BEAMB":
ordername = "0st"
fsx.write(
"SENSITIVITY_B CSST_MSC_MS_SENSITIVITY_" + SEN[i] + "_0st.fits" + "\n"
)
elif BEAMX == "BEAMC":
ordername = "-1st"
fsx.write(
"SENSITIVITY_C CSST_MSC_MS_SENSITIVITY_" + SEN[i] + "_-1st.fits" + "\n"
)
elif BEAMX == "BEAMD":
ordername = "2st"
fsx.write(
"SENSITIVITY_D CSST_MSC_MS_SENSITIVITY_" + SEN[i] + "_2st.fits" + "\n"
)
elif BEAMX == "BEAME":
ordername = "-2st"
fsx.write(
"SENSITIVITY_E CSST_MSC_MS_SENSITIVITY_" + SEN[i] + "_-2st.fits" + "\n"
)
fsx.write("# " + "\n" + "\n")
def fsave_conf(dir_save, GLR, GRATINGLR, i):
c = dir_save + "CSST_MSC_MS_" + GLR[i] + ".conf"
os.system("> " + c)
fs = open(c, "a")
fwriteKEY(fs, i)
fs.write("# 1 order (BEAM A) *******************" + "\n")
fwriteBEAM(
dir_save,
fs,
i,
GRATINGLR,
"BEAMA",
"MMAG_EXTRACT_A",
"MMAG_MARK_A",
"DYDX_ORDER_A",
"DYDX_A_0",
"DYDX_A_1",
"XOFF_A",
"YOFF_A",
"DISP_ORDER_A",
"DLDP_A_0",
"DLDP_A_1",
)
fs.write("\n# 0 order (BEAM B) *******************" + "\n")
fwriteBEAM(
dir_save,
fs,
i,
GRATINGLR,
"BEAMB",
"MMAG_EXTRACT_B",
"MMAG_MARK_B",
"DYDX_ORDER_B",
"DYDX_B_0",
"DYDX_B_1",
"XOFF_B",
"YOFF_B",
"DISP_ORDER_B",
"DLDP_B_0",
"DLDP_B_1",
)
fs.write("\n# -1 order (BEAM C) *******************" + "\n")
fwriteBEAM(
dir_save,
fs,
i,
GRATINGLR,
"BEAMC",
"MMAG_EXTRACT_C",
"MMAG_MARK_C",
"DYDX_ORDER_C",
"DYDX_C_0",
"DYDX_C_1",
"XOFF_C",
"YOFF_C",
"DISP_ORDER_C",
"DLDP_C_0",
"DLDP_C_1",
)
fs.write("\n# 2 order (BEAM D) *******************" + "\n")
fwriteBEAM(
dir_save,
fs,
i,
GRATINGLR,
"BEAMD",
"MMAG_EXTRACT_D",
"MMAG_MARK_D",
"DYDX_ORDER_D",
"DYDX_D_0",
"DYDX_D_1",
"XOFF_D",
"YOFF_D",
"DISP_ORDER_D",
"DLDP_D_0",
"DLDP_D_1",
)
fs.write("\n# -2 order (BEAM E) *******************" + "\n")
fwriteBEAM(
dir_save,
fs,
i,
GRATINGLR,
"BEAME",
"MMAG_EXTRACT_E",
"MMAG_MARK_E",
"DYDX_ORDER_E",
"DYDX_E_0",
"DYDX_E_1",
"XOFF_E",
"YOFF_E",
"DISP_ORDER_E",
"DLDP_E_0",
"DLDP_E_1",
)
fs.close()
def get_slsconf(dir_save=".", **kwargs):
"""save SLS conf files to `dir_save`"""
# resave the sensitivity.fits
for chipid, flt in zip(["01", "02", "03"], ["GI", "GV", "GU"]):
resave_sensitivity(dir_save, chipid, flt)
# save CSST_MSC_MS_*.conf
for i in range(0, 12):
fsave_conf(dir_save, GL, "GRATINGL", i)
fsave_conf(dir_save, GR, "GRATINGR", i)
# TODO: assert all files are saved correctly, then return
# TODO: return a dict containing filepath mapping
from astropy.wcs import WCS
def transform_coordinate(
ra: float = 180.000,
dec: float = 0.0,
original_epoch: float = 2016.0,
target_epoch: float = 2020.0,
) -> tuple[float, float]:
"""Transform a coordinate from `original_epoch` to `target_epoch`."""
pass
def transform_wcs(
wcs: WCS, original_epoch: float = 2016.0, target_epoch: float = 2020.0
) -> WCS:
"""Transform a wcs from `original_epoch` to `target_epoch`."""
pass
import re
import astropy.io.fits as pyfits, numpy as np, time, os, json
from crds import client
# chipid: [01, 02, 03, 04, 05, 10, 21, 26, 27, 28, 29, 30]
# filter: [GI, GV, GU, GU, GV, GI, GI, GV, GU, GU, GV, GI]
# clabel: [GI-1, GV-1, GU-1, GU-2, GV-2, GI-2, GI-3, GV-3, GU-3, GU-4, GV-4, GI-4]
def get_version():
observatory = client.get_default_observatory()
operational_context = client.get_default_context(observatory)
ver = re.split(r'[_\.]', operational_context)[1]
return ver
def resave_sensitivity(dir_save, chipid, flt):
ver = get_version()
h = pyfits.open(dir_save+'CSST_MSC_MS_SENSITIVITY_'+chipid+'_'+ver+'.fits')
for extname, order in zip(['L0ST', 'LP1ST', 'LM1ST', 'LP2ST', 'LM2ST'], ['0st', '1st', '-1st', '2st', '-2st']):
hdu0 = pyfits.PrimaryHDU()
hdu1 = pyfits.BinTableHDU(h[extname].data)
hdul = pyfits.HDUList([hdu0, hdu1])
hdul.writeto(dir_save+'CSST_MSC_MS_SENSITIVITY_'+flt+'_'+order+'.fits', overwrite=True, checksum=True)
def readjson(dir_save, fjsoname):
with open(dir_save+fjsoname) as f:
d = json.load(f)
return d
##################################################################################
##################################################################################
# keys = ['BEAMA', 'MMAG_EXTRACT_A', 'MMAG_MARK_A', 'DYDX_ORDER_A', 'DYDX_A_0', 'DYDX_A_1', 'XOFF_A', 'YOFF_A', 'DISP_ORDER_A', 'DLDP_A_0', 'DLDP_A_1', 'BEAMB', 'MMAG_EXTRACT_B', 'MMAG_MARK_B', 'DYDX_ORDER_B', 'DYDX_B_0', 'XOFF_B', 'YOFF_B', 'DISP_ORDER_B', 'DLDP_B_0', 'DLDP_B_1', 'BEAMC', 'MMAG_EXTRACT_C', 'MMAG_MARK_C', 'DYDX_ORDER_C', 'DYDX_C_0', 'DYDX_C_1', 'XOFF_C', 'YOFF_C', 'DISP_ORDER_C', 'DLDP_C_0', 'DLDP_C_1', 'BEAMD', 'MMAG_EXTRACT_D', 'MMAG_MARK_D', 'DYDX_ORDER_D', 'DYDX_D_0', 'DYDX_D_1', 'XOFF_D', 'YOFF_D', 'DISP_ORDER_D', 'DLDP_D_0', 'DLDP_D_1', 'BEAME', 'MMAG_EXTRACT_E', 'MMAG_MARK_E', 'DYDX_ORDER_E', 'DYDX_E_0', 'DYDX_E_1', 'XOFF_E', 'YOFF_E', 'DISP_ORDER_E', 'DLDP_E_0', 'DLDP_E_1']
GL = ['GI2', 'GV4', 'GU2', 'GU4', 'GV2', 'GI4', 'GI6', 'GV8', 'GU6', 'GU8', 'GV6', 'GI8' ]
GR = ['GI1', 'GV3', 'GU1', 'GU3', 'GV1', 'GI3', 'GI5', 'GV7', 'GU5', 'GU7', 'GV5', 'GI7' ]
SEN = ['GI', 'GV', 'GU', 'GU', 'GV', 'GI', 'GI', 'GV', 'GU', 'GU', 'GV', 'GI' ]
def fwriteKEY(fsx, i):
fsx.write('INSTRUMENT CSSTSLS'+'\n')
fsx.write('CAMERA '+SEN[i]+'\n')
if SEN[i] == 'GI':
fsx.write('WAVELENGTH 6200 10000'+'\n')
elif SEN[i] == 'GV':
fsx.write('WAVELENGTH 4000 6200'+'\n')
elif SEN[i] == 'GU':
fsx.write('WAVELENGTH 2550 4000'+'\n')
fsx.write('\n'+'SCIENCE_EXT SCI ; Science extension'+'\n')
fsx.write('DQ_EXT DQ ; DQ extension'+'\n')
fsx.write('ERRORS_EXT ERR ; Error extension'+'\n')
fsx.write('FFNAME csstFlat.fits'+'\n')
fsx.write('DQMASK 246 ; 4096 and 512 taken out'+'\n')
fsx.write('\n'+'RDNOISE 5.0'+'\n')
fsx.write('EXPTIME EXPTIME'+'\n')
fsx.write('POBJSIZE 1.0'+'\n')
fsx.write('#SMFACTOR 1.0'+'\n\n')
def fwriteBEAM(dir_save, fsx, i, GRATINGLR, BEAMX, MMAG_EXTRACT_X, MMAG_MARK_X, DYDX_ORDER_X, DYDX_X_0, DYDX_X_1, XOFF_X, YOFF_X, DISP_ORDER_X, DLDP_X_0, DLDP_X_1):
ver = get_version()
[d01_GI21, d02_GV43, d03_GU21, d04_GU43, d05_GV21, d10_GI43, d21_GI65, d26_GV87, d27_GU65, d28_GU87, d29_GV65, d30_GI87] = [readjson(dir_save, 'CSST_MSC_MS_EXTRACT1D_'+chipid+'_'+ver+'.json') for chipid in ['01', '02', '03', '04', '05', '10', '21', '26', '27', '28', '29', '30']]
d = [d01_GI21, d02_GV43, d03_GU21, d04_GU43, d05_GV21, d10_GI43, d21_GI65, d26_GV87, d27_GU65, d28_GU87, d29_GV65, d30_GI87]
fsx.write(BEAMX), [fsx.write(' '+str(d[i][GRATINGLR][BEAMX][j])) for j in range(len(d[i][GRATINGLR][BEAMX]))], fsx.write('\n')
fsx.write(MMAG_EXTRACT_X+' '+str(d[i][GRATINGLR][MMAG_EXTRACT_X])+'\n')
fsx.write(MMAG_MARK_X+' '+str(d[i][GRATINGLR][MMAG_MARK_X])+'\n')
fsx.write('# '+'\n')
fsx.write('# Trace description '+'\n')
fsx.write('# '+'\n')
fsx.write(DYDX_ORDER_X+' '+str(d[i][GRATINGLR][DYDX_ORDER_X])+'\n')
fsx.write(DYDX_X_0), [fsx.write(' '+str(d[i][GRATINGLR][DYDX_X_0][j])) for j in range(len(d[i][GRATINGLR][DYDX_X_0]))], fsx.write('\n')
if BEAMX == 'BEAMB':
pass
else:
fsx.write(DYDX_X_1), [fsx.write(' '+str(d[i][GRATINGLR][DYDX_X_1][j])) for j in range(len(d[i][GRATINGLR][DYDX_X_1]))], fsx.write('\n')
fsx.write('# '+'\n')
fsx.write('# X and Y Offsets '+'\n')
fsx.write('# '+'\n')
fsx.write(XOFF_X+' '+str(d[i][GRATINGLR][XOFF_X])+'\n')
fsx.write(YOFF_X+' '+str(d[i][GRATINGLR][YOFF_X])+'\n')
fsx.write('# '+'\n')
fsx.write('# Dispersion solution '+'\n')
fsx.write('# '+'\n')
fsx.write(DISP_ORDER_X+' '+str(d[i][GRATINGLR][DISP_ORDER_X])+'\n')
fsx.write(DLDP_X_0), [fsx.write(' '+str(d[i][GRATINGLR][DLDP_X_0][j])) for j in range(len(d[i][GRATINGLR][DLDP_X_0]))], fsx.write('\n')
fsx.write(DLDP_X_1), [fsx.write(' '+str(d[i][GRATINGLR][DLDP_X_1][j])) for j in range(len(d[i][GRATINGLR][DLDP_X_1]))], fsx.write('\n')
fsx.write('# '+'\n')
if BEAMX == 'BEAMA':
ordername = '1st'
fsx.write('SENSITIVITY_A CSST_MSC_MS_SENSITIVITY_'+SEN[i]+'_1st.fits'+'\n')
elif BEAMX == 'BEAMB':
ordername = '0st'
fsx.write('SENSITIVITY_B CSST_MSC_MS_SENSITIVITY_'+SEN[i]+'_0st.fits'+'\n')
elif BEAMX == 'BEAMC':
ordername = '-1st'
fsx.write('SENSITIVITY_C CSST_MSC_MS_SENSITIVITY_'+SEN[i]+'_-1st.fits'+'\n')
elif BEAMX == 'BEAMD':
ordername = '2st'
fsx.write('SENSITIVITY_D CSST_MSC_MS_SENSITIVITY_'+SEN[i]+'_2st.fits'+'\n')
elif BEAMX == 'BEAME':
ordername = '-2st'
fsx.write('SENSITIVITY_E CSST_MSC_MS_SENSITIVITY_'+SEN[i]+'_-2st.fits'+'\n')
fsx.write('# '+'\n'+'\n')
def fsave_conf(dir_save, GLR, GRATINGLR, i):
c = dir_save+'CSST_MSC_MS_'+GLR[i]+'.conf'
os.system('> '+c)
fs = open(c, 'a')
fwriteKEY(fs, i)
fs.write('# 1 order (BEAM A) *******************'+'\n')
fwriteBEAM(dir_save, fs, i, GRATINGLR, 'BEAMA', 'MMAG_EXTRACT_A', 'MMAG_MARK_A', 'DYDX_ORDER_A', 'DYDX_A_0', 'DYDX_A_1', 'XOFF_A', 'YOFF_A', 'DISP_ORDER_A', 'DLDP_A_0', 'DLDP_A_1')
fs.write('\n# 0 order (BEAM B) *******************'+'\n')
fwriteBEAM(dir_save, fs, i, GRATINGLR, 'BEAMB', 'MMAG_EXTRACT_B', 'MMAG_MARK_B', 'DYDX_ORDER_B', 'DYDX_B_0', 'DYDX_B_1', 'XOFF_B', 'YOFF_B', 'DISP_ORDER_B', 'DLDP_B_0', 'DLDP_B_1')
fs.write('\n# -1 order (BEAM C) *******************'+'\n')
fwriteBEAM(dir_save, fs, i, GRATINGLR, 'BEAMC', 'MMAG_EXTRACT_C', 'MMAG_MARK_C', 'DYDX_ORDER_C', 'DYDX_C_0', 'DYDX_C_1', 'XOFF_C', 'YOFF_C', 'DISP_ORDER_C', 'DLDP_C_0', 'DLDP_C_1')
fs.write('\n# 2 order (BEAM D) *******************'+'\n')
fwriteBEAM(dir_save, fs, i, GRATINGLR, 'BEAMD', 'MMAG_EXTRACT_D', 'MMAG_MARK_D', 'DYDX_ORDER_D', 'DYDX_D_0', 'DYDX_D_1', 'XOFF_D', 'YOFF_D', 'DISP_ORDER_D', 'DLDP_D_0', 'DLDP_D_1')
fs.write('\n# -2 order (BEAM E) *******************'+'\n')
fwriteBEAM(dir_save, fs, i, GRATINGLR, 'BEAME', 'MMAG_EXTRACT_E', 'MMAG_MARK_E', 'DYDX_ORDER_E', 'DYDX_E_0', 'DYDX_E_1', 'XOFF_E', 'YOFF_E', 'DISP_ORDER_E', 'DLDP_E_0', 'DLDP_E_1')
fs.close()
def get_slsconf(dir_save=".", **kwargs):
""" save SLS conf files to `dir_save` """
# resave the sensitivity.fits
for chipid, flt in zip(['01', '02', '03'], ['GI', 'GV', 'GU']):
resave_sensitivity(dir_save,chipid, flt)
# save CSST_MSC_MS_*.conf
for i in range(0,12):
fsave_conf(dir_save, GL, 'GRATINGL', i)
fsave_conf(dir_save, GR, 'GRATINGR', i)
pass
---
pml:
CSST_DFS_API_MODE: "cluster"
CSST_DFS_GATEWAY: "172.24.27.2:30880"
CSST_DFS_APP_ID: "test"
CSST_DFS_APP_TOKEN: "test"
kmust:
CSST_DFS_API_MODE: "cluster"
CSST_DFS_GATEWAY: "222.197.214.168:30880"
CSST_DFS_APP_ID: "1"
CSST_DFS_APP_TOKEN: "1"
tcc:
CSST_DFS_API_MODE: "cluster"
CSST_DFS_GATEWAY: "10.0.0.8:30880"
CSST_DFS_APP_ID: "test"
CSST_DFS_APP_TOKEN: "test"
...@@ -32,9 +32,8 @@ from csst_dfs_api.sls.level2spectra import Level2SpectraApi as SlsLevel2DataApi ...@@ -32,9 +32,8 @@ from csst_dfs_api.sls.level2spectra import Level2SpectraApi as SlsLevel2DataApi
from .logger import get_logger from .logger import get_logger
from .params import CSST_PARAMS as CP from .params import CSST_PARAMS as CP
from .params import DFS_CONF
from .time import now from .time import now
from .crds import CRDS from .ccds import CCDS
from .dfs import DFS from .dfs import DFS
...@@ -89,11 +88,11 @@ class CsstMsDataManager: ...@@ -89,11 +88,11 @@ class CsstMsDataManager:
dfs_root : str dfs_root : str
The DFS root path. The DFS root path.
telescope : str telescope : str
The telescope name. Defaults to ``CSST`` for C5.2 simulation. The telescope pipeline_id. Defaults to ``CSST`` for C5.2 simulation.
instrument : str instrument : str
The instrument name. Defaults to ``MSC`` for C5.2 simulation. The instrument pipeline_id. Defaults to ``MSC`` for C5.2 simulation.
project : str project : str
The survey name. Defaults to ``MS`` for C5.2 simulation. The survey pipeline_id. Defaults to ``MS`` for C5.2 simulation.
obs_type : str obs_type : str
The image type signature for science images. Defualts to ``SCI`` for C5.2 simulation. The image type signature for science images. Defualts to ``SCI`` for C5.2 simulation.
exp_start : int exp_start : int
...@@ -107,11 +106,11 @@ class CsstMsDataManager: ...@@ -107,11 +106,11 @@ class CsstMsDataManager:
version : str version : str
The L0 version. The L0 version.
ext : str ext : str
The extension name, i.e., ``.fits``. The extension pipeline_id, i.e., ``.fits``.
log_ppl : str log_ppl : str
The pipeline log file name. The pipeline log file pipeline_id.
log_mod : str log_mod : str
The module log file name. The module log file pipeline_id.
clear_dir : bool clear_dir : bool
If True, clear ``dm.dir_l1`` directory. If True, clear ``dm.dir_l1`` directory.
verbose : bool verbose : bool
...@@ -148,7 +147,8 @@ class CsstMsDataManager: ...@@ -148,7 +147,8 @@ class CsstMsDataManager:
""" """
# TODO: update docstring # TODO: update docstring
def __init__(self, def __init__(
self,
ver_sim: str = "C6.2", ver_sim: str = "C6.2",
datatype: str = "mbi", datatype: str = "mbi",
available_detectors: Union[None, list] = None, available_detectors: Union[None, list] = None,
...@@ -177,7 +177,6 @@ class CsstMsDataManager: ...@@ -177,7 +177,6 @@ class CsstMsDataManager:
device: str = "CPU", device: str = "CPU",
stamps: str = "", stamps: str = "",
): ):
# set DFS log dir # set DFS log dir
os.environ["CSST_DFS_LOGS_DIR"] = "." os.environ["CSST_DFS_LOGS_DIR"] = "."
...@@ -205,7 +204,9 @@ class CsstMsDataManager: ...@@ -205,7 +204,9 @@ class CsstMsDataManager:
print("Valid detectors are: ", self.valid_detectors) print("Valid detectors are: ", self.valid_detectors)
# available_detectors # available_detectors
self.available_detectors = available_detectors if available_detectors is not None else list() self.available_detectors = (
available_detectors if available_detectors is not None else list()
)
if verbose: if verbose:
print("Available detectors are:", self.available_detectors) print("Available detectors are:", self.available_detectors)
# set all available detectors by default # set all available detectors by default
...@@ -218,7 +219,7 @@ class CsstMsDataManager: ...@@ -218,7 +219,7 @@ class CsstMsDataManager:
self.exp_start = exp_start self.exp_start = exp_start
self.exp_stop = exp_stop self.exp_stop = exp_stop
# file name components # file pipeline_id components
self.telescope = telescope self.telescope = telescope
self.instrument = instrument self.instrument = instrument
self.project = project self.project = project
...@@ -254,28 +255,46 @@ class CsstMsDataManager: ...@@ -254,28 +255,46 @@ class CsstMsDataManager:
self.clear_dir(self.dir_l1) self.clear_dir(self.dir_l1)
# L1 whitelist # L1 whitelist
self.l1_whitelist = [self.l1_file(name="mrs.csv"), ] self.l1_whitelist = [
self.l1_file(name="mrs.csv"),
]
for detector in self.target_detectors: for detector in self.target_detectors:
if self.datatype == "mbi": if self.datatype == "mbi":
self.l1_whitelist.append(self.l1_detector(detector=detector, post="img_L1.fits")) self.l1_whitelist.append(
self.l1_whitelist.append(self.l1_detector(detector=detector, post="wht_L1.fits")) self.l1_detector(detector=detector, post="img_L1.fits")
self.l1_whitelist.append(self.l1_detector(detector=detector, post="flg_L1.fits")) )
self.l1_whitelist.append(self.l1_detector(detector=detector, post="cat.fits")) self.l1_whitelist.append(
self.l1_whitelist.append(self.l1_detector(detector=detector, post="psf.fits")) self.l1_detector(detector=detector, post="wht_L1.fits")
)
self.l1_whitelist.append(
self.l1_detector(detector=detector, post="flg_L1.fits")
)
self.l1_whitelist.append(
self.l1_detector(detector=detector, post="cat.fits")
)
self.l1_whitelist.append(
self.l1_detector(detector=detector, post="psf.fits")
)
elif self.datatype == "sls": elif self.datatype == "sls":
self.l1_whitelist.append(self.l1_detector(detector=detector, post="L1_1.fits")) self.l1_whitelist.append(
self.l1_detector(detector=detector, post="L1_1.fits")
)
# pipeline logger # pipeline logger
if log_ppl == "": if log_ppl == "":
self.logger_ppl = get_logger(name="CSST L1 Pipeline Logger", filename="") self.logger_ppl = get_logger(name="CSST L1 Pipeline Logger", filename="")
else: else:
self.logger_ppl = get_logger(name="CSST L1 Pipeline Logger", filename=os.path.join(dir_l1, log_ppl)) self.logger_ppl = get_logger(
name="CSST L1 Pipeline Logger", filename=os.path.join(dir_l1, log_ppl)
)
self.l1_whitelist.append(os.path.join(dir_l1, log_ppl)) self.l1_whitelist.append(os.path.join(dir_l1, log_ppl))
# module logger # module logger
if log_mod == "": if log_mod == "":
self.logger_mod = get_logger(name="CSST L1 Module Logger", filename="") self.logger_mod = get_logger(name="CSST L1 Module Logger", filename="")
else: else:
self.logger_mod = get_logger(name="CSST L1 Module Logger", filename=os.path.join(dir_l1, log_mod)) self.logger_mod = get_logger(
name="CSST L1 Module Logger", filename=os.path.join(dir_l1, log_mod)
)
self.l1_whitelist.append(os.path.join(dir_l1, log_mod)) self.l1_whitelist.append(os.path.join(dir_l1, log_mod))
self.custom_bias = None self.custom_bias = None
...@@ -336,14 +355,24 @@ class CsstMsDataManager: ...@@ -336,14 +355,24 @@ class CsstMsDataManager:
def target_detectors(self, detectors: Union[None, list, int] = None): def target_detectors(self, detectors: Union[None, list, int] = None):
assert detectors is None or type(detectors) in [list, int] assert detectors is None or type(detectors) in [list, int]
if detectors is None: if detectors is None:
self._target_detectors = list(set(self.available_detectors) & set(self.valid_detectors)) self._target_detectors = list(
set(self.available_detectors) & set(self.valid_detectors)
)
elif isinstance(detectors, list): elif isinstance(detectors, list):
self._target_detectors = list(set(self.available_detectors) & set(self.valid_detectors) & set(detectors)) self._target_detectors = list(
set(self.available_detectors)
& set(self.valid_detectors)
& set(detectors)
)
elif isinstance(detectors, int): elif isinstance(detectors, int):
self._target_detectors = list(set(self.available_detectors) & set(self.valid_detectors) & {detectors}) self._target_detectors = list(
set(self.available_detectors) & set(self.valid_detectors) & {detectors}
)
def set_detectors(self, detectors=None): def set_detectors(self, detectors=None):
raise DeprecationWarning("This method is deprecated, please directly use dm.target_detectors = detectors!") raise DeprecationWarning(
"This method is deprecated, please directly use dm.target_detectors = detectors!"
)
@staticmethod @staticmethod
def from_dir( def from_dir(
...@@ -358,9 +387,9 @@ class CsstMsDataManager: ...@@ -358,9 +387,9 @@ class CsstMsDataManager:
n_jobs=18, n_jobs=18,
backend="multiprocessing", backend="multiprocessing",
device="CPU", device="CPU",
**kwargs **kwargs,
): ):
""" initialize the multi-band imaging data manager """ """initialize the multi-band imaging data manager"""
assert ver_sim in CP["sim"]["versions"] assert ver_sim in CP["sim"]["versions"]
...@@ -408,7 +437,9 @@ class CsstMsDataManager: ...@@ -408,7 +437,9 @@ class CsstMsDataManager:
mogd.pop("detector") mogd.pop("detector")
# available detectors # available detectors
available_detectors = [int(re.fullmatch(pattern, fp)["detector"]) for fp in fps_img] available_detectors = [
int(re.fullmatch(pattern, fp)["detector"]) for fp in fps_img
]
available_detectors.sort() available_detectors.sort()
return CsstMsDataManager( return CsstMsDataManager(
...@@ -425,12 +456,12 @@ class CsstMsDataManager: ...@@ -425,12 +456,12 @@ class CsstMsDataManager:
backend=backend, backend=backend,
device=device, device=device,
**mogd, **mogd,
**kwargs **kwargs,
) )
@staticmethod @staticmethod
def glob_image(dir_l0, pattern="CSST_MSC_*_SCIE_*.fits"): def glob_image(dir_l0, pattern="CSST_MSC_*_SCIE_*.fits"):
""" glob files in L0 data directory """ """glob files in L0 data directory"""
fps = glob.glob(os.path.join(dir_l0, pattern)) fps = glob.glob(os.path.join(dir_l0, pattern))
fps = [os.path.basename(fp) for fp in fps] fps = [os.path.basename(fp) for fp in fps]
fps.sort() fps.sort()
...@@ -438,11 +469,11 @@ class CsstMsDataManager: ...@@ -438,11 +469,11 @@ class CsstMsDataManager:
return fps return fps
def l0_id(self, detector=6): def l0_id(self, detector=6):
""" Level0 ID, consistent with DFS. """ """Level0 ID, consistent with DFS."""
return f"{self.obs_id}{detector:02d}" return f"{self.obs_id}{detector:02d}"
def l0_cat(self, detector=6): def l0_cat(self, detector=6):
""" the L0 cat file path""" """the L0 cat file path"""
assert detector in self.available_detectors assert detector in self.available_detectors
fn = f"{self.instrument}_{self.obs_id}_chip_{detector:02d}_filt_{self.detector2filter[detector]}.cat" fn = f"{self.instrument}_{self.obs_id}_chip_{detector:02d}_filt_{self.detector2filter[detector]}.cat"
fp = os.path.join(self.dir_l0, fn) fp = os.path.join(self.dir_l0, fn)
...@@ -451,7 +482,7 @@ class CsstMsDataManager: ...@@ -451,7 +482,7 @@ class CsstMsDataManager:
return fp return fp
def l0_log(self, detector=6): def l0_log(self, detector=6):
""" L0 log file path """ """L0 log file path"""
assert detector in self.available_detectors assert detector in self.available_detectors
fn = f"{self.instrument}_{self.obs_id}_chip_{detector:02d}_filt_{self.detector2filter[detector]}.log" fn = f"{self.instrument}_{self.obs_id}_chip_{detector:02d}_filt_{self.detector2filter[detector]}.log"
fp = os.path.join(self.dir_l0, fn) fp = os.path.join(self.dir_l0, fn)
...@@ -460,35 +491,43 @@ class CsstMsDataManager: ...@@ -460,35 +491,43 @@ class CsstMsDataManager:
return fp return fp
def l0_detector(self, detector=6): def l0_detector(self, detector=6):
""" L0 detector-specific image file path """ """L0 detector-specific image file path"""
assert detector in self.available_detectors assert detector in self.available_detectors
if self.ver_sim in ["C5.2", "C6.1"]: if self.ver_sim in ["C5.2", "C6.1"]:
fn = f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_" \ fn = (
f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_"
f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L{self.level}_{self.version}.{self.ext}" f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L{self.level}_{self.version}.{self.ext}"
)
else: else:
fn = f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_" \ fn = (
f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_"
f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L{self.level}_V{self.version}.{self.ext}" f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L{self.level}_V{self.version}.{self.ext}"
)
fp = os.path.join(self.dir_l0, fn) fp = os.path.join(self.dir_l0, fn)
if not os.path.exists(fp): if not os.path.exists(fp):
raise FileNotFoundError(f"File not found: {fp}") raise FileNotFoundError(f"File not found: {fp}")
return fp return fp
def l0_crs(self, detector=6): def l0_crs(self, detector=6):
""" L0 cosmic ray file path """ """L0 cosmic ray file path"""
assert detector in self.available_detectors assert detector in self.available_detectors
if self.ver_sim == "C5.2": if self.ver_sim == "C5.2":
fn = f"{self.telescope}_{self.instrument}_{self.project}_CRS_{self.exp_start}_" \ fn = (
f"{self.telescope}_{self.instrument}_{self.project}_CRS_{self.exp_start}_"
f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L{self.level}_{self.version}.{self.ext}" f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L{self.level}_{self.version}.{self.ext}"
)
else: else:
fn = f"{self.telescope}_{self.instrument}_{self.project}_CRS_{self.exp_start}_" \ fn = (
f"{self.telescope}_{self.instrument}_{self.project}_CRS_{self.exp_start}_"
f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L{self.level}_V{self.version}.{self.ext}" f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L{self.level}_V{self.version}.{self.ext}"
)
fp = os.path.join(self.dir_l0, fn) fp = os.path.join(self.dir_l0, fn)
if not os.path.exists(fp): if not os.path.exists(fp):
raise FileNotFoundError(f"File not found: {fp}") raise FileNotFoundError(f"File not found: {fp}")
return fp return fp
def l1_detector(self, detector=6, post="IMG.fits"): def l1_detector(self, detector=6, post="IMG.fits"):
""" generate L1 file path """generate L1 file path
Parameters Parameters
---------- ----------
...@@ -506,11 +545,15 @@ class CsstMsDataManager: ...@@ -506,11 +545,15 @@ class CsstMsDataManager:
assert detector in self.available_detectors assert detector in self.available_detectors
if post == ".fits": if post == ".fits":
# no additional suffix # no additional suffix
fn = (f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_" fn = (
f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L1_V{self.version}{post}") f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_"
f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L1_V{self.version}{post}"
)
else: else:
fn = (f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_" fn = (
f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L1_V{self.version}_{post}") f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_"
f"{self.exp_stop}_{self.obs_id}_{detector:02d}_L1_V{self.version}_{post}"
)
return os.path.join(self.dir_l1, fn) return os.path.join(self.dir_l1, fn)
def get_refs(self, detector=6): def get_refs(self, detector=6):
...@@ -523,17 +566,16 @@ class CsstMsDataManager: ...@@ -523,17 +566,16 @@ class CsstMsDataManager:
Returns Returns
------- -------
dict dict
{'bias': '/crdsroot/references/msc/csst_msc_ms_bias_11_000001.fits', {'bias': '/ccds_root/references/msc/csst_msc_ms_bias_11_000001.fits',
'dark': '/crdsroot/references/msc/csst_msc_ms_dark_11_000001.fits', 'dark': '/ccds_root/references/msc/csst_msc_ms_dark_11_000001.fits',
'ledflat': '/crdsroot/references/msc/csst_msc_ms_ledflat_11_000001.fits', 'ledflat': '/ccds_root/references/msc/csst_msc_ms_ledflat_11_000001.fits',
'shutter': '/crdsroot/references/msc/csst_msc_ms_shutter_11_000001.fits'} 'shutter': '/ccds_root/references/msc/csst_msc_ms_shutter_11_000001.fits'}
""" """
if self.crds.is_available: if self.crds.is_available:
try: try:
print("CRDS available, use refs from CRDS ...") print("CRDS available, use refs from CRDS ...")
return self.crds.retry( return self.crds.retry(
self.crds.get_refs, 3, self.crds.get_refs, 3, file_path=self.l0_detector(detector)
file_path=self.l0_detector(detector)
) )
except BaseException as e: except BaseException as e:
print("CRDS reference access failed! ", e) print("CRDS reference access failed! ", e)
...@@ -548,18 +590,18 @@ class CsstMsDataManager: ...@@ -548,18 +590,18 @@ class CsstMsDataManager:
fp = os.path.join( fp = os.path.join(
self.path_aux, self.path_aux,
"C6.2_ref_crds", "C6.2_ref_crds",
"csst_msc_ms_{}_{:02d}_{:06d}.fits".format(ref_type, detector, 1) "csst_msc_ms_{}_{:02d}_{:06d}.fits".format(ref_type, detector, 1),
) )
assert os.path.exists(fp), f"File not found: [{fp}]" assert os.path.exists(fp), f"File not found: [{fp}]"
refs[ref_type] = fp refs[ref_type] = fp
return refs return refs
def get_bias(self, detector=6): def get_bias(self, detector=6):
""" get bias data """ """get bias data"""
fp = os.path.join( fp = os.path.join(
self.path_aux, self.path_aux,
"C6.2_ref_crds", "C6.2_ref_crds",
"csst_msc_ms_{}_{:02d}_{:06d}.fits".format("bias", detector, 1) "csst_msc_ms_{}_{:02d}_{:06d}.fits".format("bias", detector, 1),
) )
if not os.path.exists(fp): if not os.path.exists(fp):
raise FileNotFoundError(fp) raise FileNotFoundError(fp)
...@@ -567,11 +609,11 @@ class CsstMsDataManager: ...@@ -567,11 +609,11 @@ class CsstMsDataManager:
return fp return fp
def get_dark(self, detector=6): def get_dark(self, detector=6):
""" get dark data """ """get dark data"""
fp = os.path.join( fp = os.path.join(
self.path_aux, self.path_aux,
"C6.2_ref_crds", "C6.2_ref_crds",
"csst_msc_ms_{}_{:02d}_{:06d}.fits".format("dark", detector, 1) "csst_msc_ms_{}_{:02d}_{:06d}.fits".format("dark", detector, 1),
) )
if not os.path.exists(fp): if not os.path.exists(fp):
raise FileNotFoundError(fp) raise FileNotFoundError(fp)
...@@ -579,11 +621,11 @@ class CsstMsDataManager: ...@@ -579,11 +621,11 @@ class CsstMsDataManager:
return fp return fp
def get_flat(self, detector=6): def get_flat(self, detector=6):
""" get flat data """ """get flat data"""
fp = os.path.join( fp = os.path.join(
self.path_aux, self.path_aux,
"C6.2_ref_crds", "C6.2_ref_crds",
"csst_msc_ms_{}_{:02d}_{:06d}.fits".format("ledflat", detector, 1) "csst_msc_ms_{}_{:02d}_{:06d}.fits".format("ledflat", detector, 1),
) )
if not os.path.exists(fp): if not os.path.exists(fp):
raise FileNotFoundError(fp) raise FileNotFoundError(fp)
...@@ -591,11 +633,11 @@ class CsstMsDataManager: ...@@ -591,11 +633,11 @@ class CsstMsDataManager:
return fp return fp
def get_shutter(self, detector=6): def get_shutter(self, detector=6):
""" get flat data """ """get flat data"""
fp = os.path.join( fp = os.path.join(
self.path_aux, self.path_aux,
"C6.2_ref_crds", "C6.2_ref_crds",
"csst_msc_ms_{}_{:02d}_{:06d}.fits".format("shutter", detector, 1) "csst_msc_ms_{}_{:02d}_{:06d}.fits".format("shutter", detector, 1),
) )
if not os.path.exists(fp): if not os.path.exists(fp):
raise FileNotFoundError(fp) raise FileNotFoundError(fp)
...@@ -606,14 +648,14 @@ class CsstMsDataManager: ...@@ -606,14 +648,14 @@ class CsstMsDataManager:
# return os.path.join(self.path_aux, "axeconf") # "/home/csstpipeline/L1Pipeline/aux/axeconf" # return os.path.join(self.path_aux, "axeconf") # "/home/csstpipeline/L1Pipeline/aux/axeconf"
def l1_file(self, name="", comment=""): def l1_file(self, name="", comment=""):
""" L1 file path """L1 file path
Parameters Parameters
---------- ----------
name : str name : str
file name file pipeline_id
comment : str comment : str
use the function name plz use the function pipeline_id plz
Returns Returns
------- -------
...@@ -627,7 +669,7 @@ class CsstMsDataManager: ...@@ -627,7 +669,7 @@ class CsstMsDataManager:
return fp return fp
def get_sls_info(self): def get_sls_info(self):
""" Get the target SLS image header info and return. """ """Get the target SLS image header info and return."""
# if self.use_dfs: # if self.use_dfs:
# raise NotImplementedError() # raise NotImplementedError()
# else: # else:
...@@ -636,17 +678,23 @@ class CsstMsDataManager: ...@@ -636,17 +678,23 @@ class CsstMsDataManager:
return header return header
def get_mbi_info(self): def get_mbi_info(self):
""" Get all MBI image header info and return as a table. """ """Get all MBI image header info and return as a table."""
# if self.use_dfs: # if self.use_dfs:
# raise NotImplementedError() # raise NotImplementedError()
# else: # else:
info = Table.read("/nfsdata/share/csst_simulation_data/Cycle-5-SimuData/slitlessSpectroscopy/t_mbi_l1.fits") info = Table.read(
"/nfsdata/share/csst_simulation_data/Cycle-5-SimuData/slitlessSpectroscopy/t_mbi_l1.fits"
)
return info return info
@staticmethod @staticmethod
def quickstart( def quickstart(
ver_sim="C6.2", datatype="mbi", dir_l1=".", exposure_id=100, ver_sim="C6.2",
n_jobs=18, backend="multiprocessing" datatype="mbi",
dir_l1=".",
exposure_id=100,
n_jobs=18,
backend="multiprocessing",
): ):
""" """
Quick dataset generator for tests on dandelion or PML Quick dataset generator for tests on dandelion or PML
...@@ -677,16 +725,24 @@ class CsstMsDataManager: ...@@ -677,16 +725,24 @@ class CsstMsDataManager:
raise ValueError("Please use ver_sim = 'C6.2'! and exposure_id = 100") raise ValueError("Please use ver_sim = 'C6.2'! and exposure_id = 100")
assert datatype in ["mbi", "sls"] assert datatype in ["mbi", "sls"]
# auto identify node name # auto identify node pipeline_id
hostname = os.uname()[1] hostname = os.uname()[1]
assert hostname in ["dandelion", "tulip", ] assert hostname in [
"dandelion",
"tulip",
]
dir_l0 = "/nfsdata/share/pipeline-unittest/MSC_C6.2_UNITTEST/MSC_0000100" dir_l0 = "/nfsdata/share/pipeline-unittest/MSC_C6.2_UNITTEST/MSC_0000100"
path_aux = "/nfsdata/share/pipeline/aux" path_aux = "/nfsdata/share/pipeline/aux"
return CsstMsDataManager.from_dir( return CsstMsDataManager.from_dir(
ver_sim=ver_sim, datatype=datatype, dir_l0=dir_l0, dir_l1=dir_l1, path_aux=path_aux, ver_sim=ver_sim,
n_jobs=n_jobs, backend=backend datatype=datatype,
dir_l0=dir_l0,
dir_l1=dir_l1,
path_aux=path_aux,
n_jobs=n_jobs,
backend=backend,
) )
def __repr__(self): def __repr__(self):
...@@ -703,11 +759,11 @@ class CsstMsDataManager: ...@@ -703,11 +759,11 @@ class CsstMsDataManager:
return lines return lines
def remove_files(self, fmt="*.fits"): def remove_files(self, fmt="*.fits"):
""" Remove L1 files conforming the format. """ """Remove L1 files conforming the format."""
os.system(f"rm -rf {os.path.join(self.dir_l1, fmt)}") os.system(f"rm -rf {os.path.join(self.dir_l1, fmt)}")
def remove_dir(self, dir_name): def remove_dir(self, dir_name):
""" Remove L1 (sub-)directory. """ """Remove L1 (sub-)directory."""
os.system(f"rm -rf {os.path.join(self.dir_l1, dir_name)}") os.system(f"rm -rf {os.path.join(self.dir_l1, dir_name)}")
@staticmethod @staticmethod
...@@ -721,25 +777,25 @@ class CsstMsDataManager: ...@@ -721,25 +777,25 @@ class CsstMsDataManager:
@staticmethod @staticmethod
def dfs_is_available(): def dfs_is_available():
""" Test if DFS works. """ """Test if DFS works."""
try: try:
tbl = CatalogApi().catalog_query( tbl = CatalogApi().catalog_query(
catalog_name="gaia3", catalog_name="gaia3",
ra=180, ra=180,
dec=0, dec=0,
radius=.1, radius=0.1,
columns=("ra", "dec"), columns=("ra", "dec"),
min_mag=0, min_mag=0,
max_mag=30, max_mag=30,
obstime=-1, obstime=-1,
limit=-1 limit=-1,
) )
return len(tbl) > 0 return len(tbl) > 0
except: except:
return False return False
def get_coord(self): def get_coord(self):
""" Get pointing coordinate. """ """Get pointing coordinate."""
header = fits.getheader(self.l0_detector(detector=self.target_detectors[0])) header = fits.getheader(self.l0_detector(detector=self.target_detectors[0]))
if self.ver_sim == "C5.2": if self.ver_sim == "C5.2":
pointing_ra = header["RA_OBJ"] pointing_ra = header["RA_OBJ"]
...@@ -750,7 +806,7 @@ class CsstMsDataManager: ...@@ -750,7 +806,7 @@ class CsstMsDataManager:
return pointing_ra, pointing_dec return pointing_ra, pointing_dec
def dfs_rc_auto(self): def dfs_rc_auto(self):
""" Download RC for the current exposure. """ """Download RC for the current exposure."""
assert self.dfs_is_available() assert self.dfs_is_available()
assert len(self.target_detectors) >= 1 assert len(self.target_detectors) >= 1
pointing_ra, pointing_dec = self.get_coord() pointing_ra, pointing_dec = self.get_coord()
...@@ -758,25 +814,25 @@ class CsstMsDataManager: ...@@ -758,25 +814,25 @@ class CsstMsDataManager:
ra=pointing_ra, ra=pointing_ra,
dec=pointing_dec, dec=pointing_dec,
columns=( columns=(
'ref_epoch', "ref_epoch",
'ra', "ra",
'ra_error', "ra_error",
'dec', "dec",
'dec_error', "dec_error",
'parallax', "parallax",
'parallax_error', "parallax_error",
'pmra', "pmra",
'pmra_error', "pmra_error",
'pmdec', "pmdec",
'pmdec_error', "pmdec_error",
'phot_g_mean_mag', "phot_g_mean_mag",
'source_id' "source_id",
), ),
radius=2, radius=2,
min_mag=0, min_mag=0,
max_mag=30, max_mag=30,
obstime=-1, obstime=-1,
limit=-1 limit=-1,
) )
return refcat return refcat
...@@ -786,26 +842,26 @@ class CsstMsDataManager: ...@@ -786,26 +842,26 @@ class CsstMsDataManager:
dec=0, dec=0,
radius=2, radius=2,
columns=( columns=(
'ref_epoch', "ref_epoch",
'ra', "ra",
'ra_error', "ra_error",
'dec', "dec",
'dec_error', "dec_error",
'parallax', "parallax",
'parallax_error', "parallax_error",
'pmra', "pmra",
'pmra_error', "pmra_error",
'pmdec', "pmdec",
'pmdec_error', "pmdec_error",
'phot_g_mean_mag', "phot_g_mean_mag",
'source_id' "source_id",
), ),
min_mag=0, min_mag=0,
max_mag=30, max_mag=30,
obstime=-1, obstime=-1,
limit=-1 limit=-1,
): ):
""" Query Reference Catalog (RC) from DFS. """Query Reference Catalog (RC) from DFS.
Ref. Ref.
https://gea.esac.esa.int/archive/documentation/GDR3/Gaia_archive/chap_datamodel/ https://gea.esac.esa.int/archive/documentation/GDR3/Gaia_archive/chap_datamodel/
sec_dm_main_source_catalogue/ssec_dm_gaia_source.html sec_dm_main_source_catalogue/ssec_dm_gaia_source.html
...@@ -819,13 +875,13 @@ class CsstMsDataManager: ...@@ -819,13 +875,13 @@ class CsstMsDataManager:
min_mag=min_mag, min_mag=min_mag,
max_mag=max_mag, max_mag=max_mag,
obstime=obstime, obstime=obstime,
limit=limit limit=limit,
) )
if cat["code"] == 0: if cat["code"] == 0:
self.logger_ppl.info( self.logger_ppl.info(
f"Results from DFS CATAPI:\n" f"Results from DFS CATAPI:\n"
f" - code = {cat['code']}\n" f" - code = {cat['code']}\n"
f" - message = {cat['message']}\n" f" - msg = {cat['msg']}\n"
f" - totalCount = {cat['totalCount']}\n" f" - totalCount = {cat['totalCount']}\n"
f" - columns = {cat['columns']}" f" - columns = {cat['columns']}"
) )
...@@ -834,18 +890,18 @@ class CsstMsDataManager: ...@@ -834,18 +890,18 @@ class CsstMsDataManager:
self.logger_ppl.info( self.logger_ppl.info(
f"Results from DFS CATAPI:\n" f"Results from DFS CATAPI:\n"
f" - code = {cat['code']}\n" f" - code = {cat['code']}\n"
f" - message = {cat['message']}" f" - msg = {cat['msg']}"
) )
raise ValueError("Bad catalog query result!") raise ValueError("Bad catalog query result!")
def dfs_l1_push(self): def dfs_l1_push(self):
""" Push MBI/SLS L1 data to DFS. """ """Push MBI/SLS L1 data to DFS."""
# l1api = get_l1api() # l1api = get_l1api()
# l1api.write() # l1api.write()
return return
def dfs_l2_push(self): def dfs_l2_push(self):
""" Push SLS spectra to DFS. """ """Push SLS spectra to DFS."""
pass pass
@staticmethod @staticmethod
...@@ -857,7 +913,7 @@ class CsstMsDataManager: ...@@ -857,7 +913,7 @@ class CsstMsDataManager:
use_dfs=True, use_dfs=True,
dfs_node="pml", dfs_node="pml",
clear_l1=False, clear_l1=False,
dfs_root="/share/dfs" dfs_root="/share/dfs",
): ):
pass pass
...@@ -866,7 +922,7 @@ class CsstMsDataManager: ...@@ -866,7 +922,7 @@ class CsstMsDataManager:
# query for L0 data # query for L0 data
print(f"Query obs_id={obs_id} ...", end="") print(f"Query obs_id={obs_id} ...", end="")
recs = Level0DataApi().find(obs_id=obs_id) recs = Level0DataApi().find(obs_id=obs_id)
print("Message: \n", recs) print("MessageWriter: \n", recs)
print(f"{recs['totalCount']} records obtained!") print(f"{recs['totalCount']} records obtained!")
assert recs["code"] == 0 assert recs["code"] == 0
assert recs["totalCount"] > 0 assert recs["totalCount"] > 0
...@@ -894,9 +950,9 @@ class CsstMsDataManager: ...@@ -894,9 +950,9 @@ class CsstMsDataManager:
n_jobs=18, n_jobs=18,
backend="multiprocessing", backend="multiprocessing",
device="CPU", device="CPU",
**kwargs **kwargs,
): ):
""" Initialize CsstMsDataManager from DFS. """ """Initialize CsstMsDataManager from DFS."""
# (clear and) make directories # (clear and) make directories
if os.path.exists(dir_l0): if os.path.exists(dir_l0):
os.system(f"rm -rf {dir_l0}/*") os.system(f"rm -rf {dir_l0}/*")
...@@ -936,14 +992,14 @@ class CsstMsDataManager: ...@@ -936,14 +992,14 @@ class CsstMsDataManager:
n_jobs=n_jobs, n_jobs=n_jobs,
backend=backend, backend=backend,
device=device, device=device,
**kwargs **kwargs,
) )
assert dm.obs_id == obs_id assert dm.obs_id == obs_id
return dm return dm
def dfs_l0_query(self, obs_id: str = "100000100"): def dfs_l0_query(self, obs_id: str = "100000100"):
""" Query L0 data from DFS. """ """Query L0 data from DFS."""
result = self.dfs_L0DataApi.find(obs_id=str(obs_id)) result = self.dfs_L0DataApi.find(obs_id=str(obs_id))
print(f"{result['totalCount']} records returned from DFS.") print(f"{result['totalCount']} records returned from DFS.")
if not result["code"] == 0: if not result["code"] == 0:
...@@ -953,24 +1009,36 @@ class CsstMsDataManager: ...@@ -953,24 +1009,36 @@ class CsstMsDataManager:
# Check if all 30 detectors are available # Check if all 30 detectors are available
for detector in CP["all"]["detectors"]: for detector in CP["all"]["detectors"]:
for obs_type in ["sci", "cosmic_ray"]: for obs_type in ["sci", "cosmic_ray"]:
if np.sum((tbl["detector_no"] == f"{detector:02d}") & (tbl["obs_type"] == obs_type)) == 0: if (
self.logger_ppl.warning(f"Record not found for detector {detector:02d} and obs_type {obs_type}") np.sum(
(tbl["detector_no"] == f"{detector:02d}")
& (tbl["obs_type"] == obs_type)
)
== 0
):
self.logger_ppl.warning(
f"Record not found for detector {detector:02d} and obs_type {obs_type}"
)
return tbl return tbl
def dfs_l0_check_all(self): def dfs_l0_check_all(self):
""" Check all C5.2 L0 data is available in DFS. """ """Check all C5.2 L0 data is available in DFS."""
is_good = True is_good = True
for obs_id in range(100000020, 100000155): for obs_id in range(100000020, 100000155):
tbl = self.dfs_l0_query(obs_id=f"{obs_id}") tbl = self.dfs_l0_query(obs_id=f"{obs_id}")
if len(tbl) == 60: if len(tbl) == 60:
self.logger_ppl.info(f"DFS returns {len(tbl)} records for obs_id={obs_id}") self.logger_ppl.info(
f"DFS returns {len(tbl)} records for obs_id={obs_id}"
)
else: else:
is_good = False is_good = False
self.logger_ppl.warning(f"DFS returns {len(tbl)} records for obs_id={obs_id}") self.logger_ppl.warning(
f"DFS returns {len(tbl)} records for obs_id={obs_id}"
)
return is_good return is_good
def dfs_l1_query(self, obs_id, detector): def dfs_l1_query(self, obs_id, detector):
""" Query L1 data from DFS. """ """Query L1 data from DFS."""
pass pass
def l1_cleanup(self): def l1_cleanup(self):
...@@ -1025,20 +1093,28 @@ class CsstMsFile(dict): ...@@ -1025,20 +1093,28 @@ class CsstMsFile(dict):
def fpo(self, post=".fits"): def fpo(self, post=".fits"):
if post.startswith("."): if post.startswith("."):
fn = f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_{self.exp_stop}_" \ fn = (
f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_{self.exp_stop}_"
f"{self.obs_id}_{self.detector}_L1_V{self.version}{post}" f"{self.obs_id}_{self.detector}_L1_V{self.version}{post}"
)
else: else:
fn = f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_{self.exp_stop}_" \ fn = (
f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_{self.exp_stop}_"
f"{self.obs_id}_{self.detector}_L1_V{self.version}_{post}" f"{self.obs_id}_{self.detector}_L1_V{self.version}_{post}"
)
return os.path.join(self.dir_out, fn) return os.path.join(self.dir_out, fn)
def fno(self, post=".fits"): def fno(self, post=".fits"):
if post.startswith("."): if post.startswith("."):
fn = (f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_" fn = (
f"{self.exp_stop}_{self.obs_id}_{self.detector}_L1_V{self.version}{post}") f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_"
f"{self.exp_stop}_{self.obs_id}_{self.detector}_L1_V{self.version}{post}"
)
else: else:
fn = (f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_" fn = (
f"{self.exp_stop}_{self.obs_id}_{self.detector}_L1_V{self.version}_{post}") f"{self.telescope}_{self.instrument}_{self.project}_{self.obs_type}_{self.exp_start}_"
f"{self.exp_stop}_{self.obs_id}_{self.detector}_L1_V{self.version}_{post}"
)
return fn return fn
def fpi(self): def fpi(self):
...@@ -1059,7 +1135,7 @@ class CsstMsFile(dict): ...@@ -1059,7 +1135,7 @@ class CsstMsFile(dict):
filepath=os.path.join(dfs_root, rec["data"].file_path), filepath=os.path.join(dfs_root, rec["data"].file_path),
ver_sim=ver_sim, ver_sim=ver_sim,
dir_out=dir_out, dir_out=dir_out,
header=header header=header,
) )
def move_to(self, dir_in="./input", dir_out="./output"): def move_to(self, dir_in="./input", dir_out="./output"):
...@@ -1067,7 +1143,7 @@ class CsstMsFile(dict): ...@@ -1067,7 +1143,7 @@ class CsstMsFile(dict):
filepath=os.path.join(dir_in, os.path.basename(self.filepath)), filepath=os.path.join(dir_in, os.path.basename(self.filepath)),
ver_sim=self.ver_sim, ver_sim=self.ver_sim,
dir_out=dir_out, dir_out=dir_out,
**self.kwargs **self.kwargs,
) )
def to_dict(self): def to_dict(self):
...@@ -1075,11 +1151,13 @@ class CsstMsFile(dict): ...@@ -1075,11 +1151,13 @@ class CsstMsFile(dict):
filepath=self.filepath, filepath=self.filepath,
ver_sim=self.ver_sim, ver_sim=self.ver_sim,
dir_out=self.dir_out, dir_out=self.dir_out,
kwargs=self.kwargs kwargs=self.kwargs,
) )
def __repr__(self): def __repr__(self):
return f"CsstMsFile(filepath=\"{self.filename}\", ver_sim=\"{self.ver_sim}\", dir_out=\"{self.dir_out}\")" return f'CsstMsFile(filepath="{self.filename}", ver_sim="{self.ver_sim}", dir_out="{self.dir_out}")'
# file = CsstMsFile( # file = CsstMsFile(
# "L1/MSC/SCI/62173/10160000108/CSST_MSC_MS_SCI_20290206174352_20290206174622_10160000108_21_L1_V01.fits") # "L1/MSC/SCI/62173/10160000108/CSST_MSC_MS_SCI_20290206174352_20290206174622_10160000108_21_L1_V01.fits")
# file = CsstMsFile.from_l1id(id=17796, dfs_root="/share/dfs") # file = CsstMsFile.from_l1id(id=17796, dfs_root="/share/dfs")
......
"""
Identifier: KSC-SJ4-csst_common/__init__.py
Name: __init__.py
Description: csst_common package
Author: Bo Zhang
Created: 2022-09-13
Modified-History:
2022-09-13, Bo Zhang, created
2022-09-13, Bo Zhang, fixed a bug
2023-12-39, Bo Zhang, deprecated
"""
import os
from collections import namedtuple
from astropy import table
FileRecord = namedtuple("FileRecord", ["filepath", "db", "comment", "existence"])
class FileRecorder(list):
"""
FileRecord Recorder, inherited from the built-in ``list``.
This recorder is used to record files generated by functional modules.
In principle, a CSST pipeline module should return a status (CsstStatus)
and a file recorder (``FileRecorder``) after it finishes data processing.
Parameters
----------
*args : any
The arguments passed to ``list()``.
**kwargs : any
The keyword arguments passed to ``list()``.
Methods
-------
add_record(filepath: str = "", db: bool = False, comment: str = "")
Add a file record, each record is a ``collections.namedtuple``.
Users should provide a file path ``filepath``, whether this file should be
written to database ``db``, a comment ``comment``.
An existence boolean will be attached to check if the file exists.
to_table()
Convert to ``astropy.table.Table``.
pprint(*args, **kwargs)
Use ``astropy.table.Table.pprint`` to print file records in table format.
pprint_all(*args, **kwargs)
Use ``astropy.table.Table.pprint_all`` to print file records in table format.
Examples
--------
>>> fr = FileRecorder()
>>> for i in range(3):
>>> fr.add_record("test{:03d}.txt".format(i), db=True, comment="Test file {:d}".format(i))
>>> fr.pprint_all()
<FileRecorder length=3>
filepath db comment existence
----------- ---- ----------- ---------
test000.txt True Test file 0 False
test001.txt True Test file 1 False
test002.txt True Test file 2 False
>>> fr.pprint_all()
<FileRecorder length=3>
filepath db comment existence
----------- ---- ----------- ---------
test000.txt True Test file 0 False
test001.txt True Test file 1 False
test002.txt True Test file 2 False
"""
def __init__(self, *args, **kwargs):
super(FileRecorder, self).__init__(*args, **kwargs)
@staticmethod
def FileRecord(filepath: str = "", db: bool = False, comment: str = ""):
return FileRecord(
filepath=filepath,
db=db,
comment=comment,
existence=os.path.exists(filepath),
)
def add_record(self, filepath: str = "", db: bool = False, comment: str = ""):
existence = os.path.exists(filepath)
assert isinstance(filepath, str)
assert isinstance(db, bool)
assert isinstance(comment, str)
super().append(
FileRecord(filepath=filepath, db=db, comment=comment, existence=existence)
)
def to_table(self):
return table.Table([_._asdict() for _ in self])
def pprint(self, *args, **kwargs):
print("<FileRecorder length={}>".format(len(self)))
return self.to_table().pprint(*args, **kwargs)
def pprint_all(self, *args, **kwargs):
print("<FileRecorder length={}>".format(len(self)))
return self.to_table().pprint_all(*args, **kwargs)
def __repr__(self):
t = self.to_table()
lines, outs = t.formatter._pformat_table(
t,
max_lines=-1,
max_width=-1,
show_name=True,
show_unit=None,
show_dtype=False,
align="<",
)
if outs["show_length"]:
lines.append(f"Length = {len(self)} rows")
return "\n".join(lines)
@property
def summary(self):
if len(self) == 0:
return "0 alleged, 0 missing, 0 to db"
else:
return (
f"{len(self)} alleged, "
f"{len(self) - sum(self.to_table()['existence'])} missing, "
f"{sum(self.to_table()['existence'])} to db"
)
def is_good(self):
"""Check if all alleged files exist."""
return all(self.to_table()["existence"])
from astropy.io import fits
def check_file(file_path="test.fits") -> bool:
pass
def _check_file_fits() -> bool:
"""Validate checksum for .fits files."""
return True
...@@ -23,9 +23,9 @@ def get_logger(name: str = "CSST pipeline", filename: Optional[str] = None): ...@@ -23,9 +23,9 @@ def get_logger(name: str = "CSST pipeline", filename: Optional[str] = None):
Parameters Parameters
---------- ----------
name : str name : str
The logger name. The logger pipeline_id.
filename : str filename : str
The log file name. The log file pipeline_id.
Returns Returns
------- -------
...@@ -50,7 +50,7 @@ def get_logger(name: str = "CSST pipeline", filename: Optional[str] = None): ...@@ -50,7 +50,7 @@ def get_logger(name: str = "CSST pipeline", filename: Optional[str] = None):
# logging formatter # logging formatter
formatter = logging.Formatter( formatter = logging.Formatter(
"%(asctime)s - %(levelname)s - %(module)s.py:%(lineno)d - %(message)s" "%(asctime)s - %(levelname)s - %(module)s.py:%(lineno)d - %(msg)s"
) )
# stream handler # stream handler
......
...@@ -9,11 +9,14 @@ Modified-History: ...@@ -9,11 +9,14 @@ Modified-History:
2022-09-13, Bo Zhang, added CSST_PARAMS 2022-09-13, Bo Zhang, added CSST_PARAMS
2022-10-28, Bo Zhang, added DFS_CONF 2022-10-28, Bo Zhang, added DFS_CONF
""" """
import os.path
import yaml import yaml
from . import PACKAGE_PATH
PACKAGE_PATH = os.path.dirname(__file__)
with open(PACKAGE_PATH + "/data/csst_params.yml") as f: with open(PACKAGE_PATH + "/data/csst_params.yml") as f:
CSST_PARAMS = yaml.safe_load(f) CSST_PARAMS = yaml.safe_load(f)
with open(PACKAGE_PATH + "/data/node_conf.yml") as f: # with open(PACKAGE_PATH + "/data/node_conf.yml") as f:
DFS_CONF = yaml.safe_load(f) # DFS_CONF = yaml.safe_load(f)
...@@ -2,10 +2,11 @@ import json ...@@ -2,10 +2,11 @@ import json
import os import os
import subprocess import subprocess
import warnings import warnings
from typing import Any
from astropy import time from astropy import time
from .crds import CRDS from .ccds import CCDS
from .dfs import DFS from .dfs import DFS
from .logger import get_logger from .logger import get_logger
...@@ -13,83 +14,83 @@ from .logger import get_logger ...@@ -13,83 +14,83 @@ from .logger import get_logger
class Pipeline: class Pipeline:
def __init__( def __init__(
self, self,
dir_in="/pipeline/input", dir_input: str = "/pipeline/input",
dir_out="/pipeline/output", dir_output: str = "/pipeline/output",
dir_aux="/pipeline/aux", dir_aux: str = "/pipeline/aux",
dfs_root="/dfsroot", dfs_root: str = "/dfs_root",
crds_root="/crdsroot", ccds_root: str = "/ccds_root",
crds_cache="/pipeline/crds_cache", ccds_cache: str = "/pipeline/ccds_cache",
clean=True, filter_warnings: bool = False,
n_jobs_cpu=18, dfs: bool = True,
n_jobs_gpu=9, ccds: bool = False,
device="CPU", **kwargs: Any,
filter_warnings=False,
dfs=True,
crds=False,
): ):
# get pipeline information from environ # get pipeline information from env vars
self.name = os.getenv("PIPELINE", "-") self.pipeline_id = os.getenv("PIPELINE_ID", "-")
self.build = os.getenv("BUILD", "-") self.build = os.getenv("BUILD", "-")
self.created = os.getenv("CREATED", "-")
# set directory information # set directory information
self.dir_in = dir_in self.dir_input = dir_input
self.dir_out = dir_out self.dir_output = dir_output
self.dir_aux = dir_aux self.dir_aux = dir_aux
self.dfs_root = dfs_root self.dfs_root = dfs_root
self.crds_root = crds_root self.ccds_root = ccds_root
self.crds_cache = crds_cache self.ccds_cache = ccds_cache
self.clean = clean
# set resource information # additional parameters
self.n_jobs_cpu = n_jobs_cpu self.kwargs = kwargs
self.n_jobs_gpu = n_jobs_gpu
self.device = device.upper()
# set logger # set logger
self.pipeline_logger = get_logger(name="pipeline", filename=os.path.join(self.dir_out, "pipeline.log")) self.pipeline_logger = get_logger(
self.module_logger = get_logger(name="module", filename=os.path.join(self.dir_out, "module.log")) name="pipeline",
filename=os.path.join(
self.dir_output,
"pipeline.log",
),
)
self.module_logger = get_logger(
name="module",
filename=os.path.join(
self.dir_output,
"module.log",
),
)
# change working directory # change working directory
print(f"Change directory to {self.dir_out}") print(f"Change directory to {self.dir_output}")
os.chdir(self.dir_out) os.chdir(self.dir_output)
# clean input/output directory
if self.clean:
self.clean_directory(self.dir_in)
self.clean_directory(self.dir_out)
# Frequently used files # Frequently used files
self.message = Message(os.path.join(self.dir_out, "msg.txt")) self.msg = MessageWriter(
self.time_stamp = TimeStamp(os.path.join(self.dir_out, "time_stamp.txt")) os.path.join(self.dir_output, "message.txt"),
self.exit_code = ExitCode(os.path.join(self.dir_out, "exit_code")) )
self.error_trace = ErrorTrace(os.path.join(self.dir_out, "error_trace")) self.tsr = TimeStampRecorder(
os.path.join(self.dir_output, "time_stamp.txt"),
)
# self.exit_code = ExitCode(os.path.join(self.dir_output, "exit_code"))
# self.error_trace = ErrorTrace(os.path.join(self.dir_output, "error_trace"))
if dfs: if dfs:
self.dfs = DFS(n_try=5) self.dfs = DFS(n_try=5)
else: else:
self.dfs = None self.dfs = None
if crds: if ccds:
self.crds = CRDS() self.ccds = CCDS()
else: else:
self.crds = None self.ccds = None
if filter_warnings: if filter_warnings:
self.filter_warnings() self.filter_warnings()
self.ban_multithreading() def inspect(self):
print(f"PIPELINE_ID={self.pipeline_id}")
print(f"BUILD={self.build}")
print(f"CREATED={self.created}")
def ban_multithreading(self): def clean_output(self):
os.environ["OMP_NUM_THREADS"] = "1" """Clean output directory."""
os.environ["OPENBLAS_NUM_THREADS"] = "1" self.clean_directory(self.dir_output)
os.environ["MKL_NUM_THREADS"] = "1"
os.environ["VECLIB_MAXIMUM_THREADS"] = "1"
os.environ["NUMEXPR_NUM_THREADS"] = "1"
def set_test_env(self):
os.environ["CSST_DFS_API_MODE"] = "cluster"
os.environ["CSST_DFS_GATEWAY"] = "172.24.27.2:30880"
os.environ["CSST_DFS_APP_ID"] = "test"
os.environ["CSST_DFS_APP_TOKEN"] = "test"
os.environ["CRDS_SERVER_URL"] = "http://172.24.27.2:29000"
@staticmethod @staticmethod
def clean_directory(d): def clean_directory(d):
...@@ -112,19 +113,23 @@ class Pipeline: ...@@ -112,19 +113,23 @@ class Pipeline:
warnings.resetwarnings() warnings.resetwarnings()
class ErrorTrace: # class ErrorTrace:
def __init__(self, file_path=""): # """Write error trace to file."""
self.file_path = file_path #
# def __init__(self, file_path=""):
# self.file_path = file_path
#
# def __repr__(self):
# return f"< ErrorTrace [{self.file_path}] >"
#
# def write(self, s: str):
# with open(self.file_path, "w+") as f:
# f.write(s)
def __repr__(self):
return f"< ErrorTrace [{self.file_path}] >"
def write(self, s: str): class MessageWriter:
with open(self.file_path, "w+") as f: """Write JSON format messages to file."""
f.write(s)
class Message:
def __init__(self, file_path=""): def __init__(self, file_path=""):
self.file_path = file_path self.file_path = file_path
...@@ -147,34 +152,47 @@ class Message: ...@@ -147,34 +152,47 @@ class Message:
return d return d
class ExitCode: # DEPRECATED
def __init__(self, file_path=""): # class ExitCode:
self.file_path = file_path # def __init__(self, file_path=""):
# self.file_path = file_path
def __repr__(self): #
return f"< ExitCode [{self.file_path}] >" # def __repr__(self):
# return f"< ExitCode [{self.file_path}] >"
def truncate(self): #
with open(self.file_path, 'w') as file: # def truncate(self):
file.truncate(0) # with open(self.file_path, "w") as file:
# file.truncate(0)
def write(self, code=0): #
with open(self.file_path, "w+") as f: # def write(self, code=0):
f.write(str(code)) # with open(self.file_path, "w+") as f:
print(f"Exit with code {code} (written to '{self.file_path}')") # f.write(str(code))
# print(f"Exit with code {code} (written to '{self.file_path}')")
class TimeStamp:
def __init__(self, file_path=""): class TimeStampRecorder:
def __init__(self, file_path: str = "tsr.txt"):
"""
TimeStampRecorder Class.
Initialize a TimeStampRecorder object anc connect it to `file_path`.
Parameters
----------
file_path : str
Time stamp file path.
"""
self.file_path = file_path self.file_path = file_path
def __repr__(self): def __repr__(self):
return f"< TimeStamp [{self.file_path}] >" return f"< TimeStampRecorder [{self.file_path}] >"
def truncate(self): def empty(self):
with open(self.file_path, 'w') as file: """Clean time stamp file."""
with open(self.file_path, "w") as file:
file.truncate(0) file.truncate(0)
def punch_in(self): def touch(self):
"""Write a time stamp."""
with open(self.file_path, "a+") as f: with open(self.file_path, "a+") as f:
f.write(f"{time.Time.now().isot}+00:00\n") f.write(f"{time.Time.now().isot}+00:00\n")
from ._module_docstr import ModuleHeader from ._module_docstr import ModuleHeader
from ._io import remove_dir, remove_files
from .tempfile import randfile from .tempfile import randfile
import glob
import os
import shutil
def remove_files(fmt="*post.fits"):
""" Remove files matching the specified format. """
for fp in glob.glob(fmt):
os.remove(fp)
def remove_dir(path=""):
""" Remove the specified directory. """
if os.path.exists(path):
shutil.rmtree(path)
...@@ -15,7 +15,7 @@ import shutil ...@@ -15,7 +15,7 @@ import shutil
import numpy as np import numpy as np
from csst_common import PACKAGE_PATH PACKAGE_PATH = os.path.dirname(os.path.dirname(__file__))
with open(PACKAGE_PATH + "/data/module_header.txt", "r") as f: with open(PACKAGE_PATH + "/data/module_header.txt", "r") as f:
...@@ -50,7 +50,7 @@ class ModuleHeader: ...@@ -50,7 +50,7 @@ class ModuleHeader:
identifier: str identifier: str
the identifier, e.g., SJ4 the identifier, e.g., SJ4
author: str author: str
author name author pipeline_id
description: str description: str
description of the module description of the module
ignore_init: bool ignore_init: bool
......
...@@ -24,4 +24,3 @@ def randfile(digits=20, ext=".fits"): ...@@ -24,4 +24,3 @@ def randfile(digits=20, ext=".fits"):
# 使用secrets模块生成指定长度的随机字符串 # 使用secrets模块生成指定长度的随机字符串
random_string = ''.join(secrets.choice(characters) for _ in range(digits)) + ext random_string = ''.join(secrets.choice(characters) for _ in range(digits)) + ext
return random_string return random_string
import functools
import time
import traceback
from collections import namedtuple
from astropy.time import Time
from csst_common.data_manager import CsstMsDataManager
from csst_common.file_recorder import FileRecorder
from csst_common.status import CsstStatus
__all__ = ["ModuleResult", "l1ppl_module", "module_wrapper"]
# module should return ModuleResult as result
ModuleResult = namedtuple("ModuleResult", ["module", "timecost", "status", "fr", "output"])
def l1ppl_module(func):
@functools.wraps(func)
def call_l1ppl_module(dm: CsstMsDataManager, *args, **kwargs):
dm.logger_ppl.info(f"=====================================================")
t_start = time.time()
dm.logger_ppl.info(f"Starting Module: **{func.__name__}**")
# dm.logger_ppl.info(f"Additional arguments: {args} {kwargs}")
try:
# if the module finishes
status, fr, *output = func(dm, *args, **kwargs)
except Exception as e:
# if the module raises error
exc_info = traceback.format_exc() # traceback info
dm.logger_ppl.error(f"Error occurs! \n{exc_info}")
status = CsstStatus.ERROR # default status if exceptions occur
fr = FileRecorder() # default FileRecorder if exceptions occur
output = [exc_info, ] # default output if exceptions occur
finally:
t_stop = time.time()
t_cost = t_stop - t_start
if status in [CsstStatus.PERFECT, CsstStatus.WARNING, CsstStatus.ERROR]:
# perfect / warning / error
dm.logger_ppl.info(f"Module finished with status: {status} - time cost: {t_cost:.1f} sec")
else:
# invalid status
dm.logger_ppl.error(f"Invalid status: {status}")
# record exception traceback info
dm.logger_ppl.info(
f"ModuleResult: \n"
f" - name: {func.__name__}\n"
f" - status: {status}\n"
f" - additional output: {output}\n"
f" - fr: [{fr.summary}]\n{fr}\n"
)
# write time stamp
dm.write_stamp()
return ModuleResult(func.__name__, t_cost, status, fr, output)
return call_l1ppl_module
def module_wrapper(func):
@functools.wraps(func)
def call_module(logger, tstamp=None, *args, **kwargs):
logger.info(f"=====================================================")
t_start = time.time()
logger.info(f"Starting Module: **{func.__name__}**")
# logger.info(f"Additional arguments: {args} {kwargs}")
try:
# if the module finishes
status, fr, *output = func(*args, **kwargs)
except Exception as e:
# if the module raises error
exc_info = traceback.format_exc() # traceback info
logger.error(f"Error occurs! \n{exc_info}")
status = CsstStatus.ERROR # default status if exceptions occur
fr = FileRecorder() # default FileRecorder if exceptions occur
output = [exc_info, ] # default output if exceptions occur
finally:
t_stop = time.time()
t_cost = t_stop - t_start
if status in [CsstStatus.PERFECT, CsstStatus.WARNING, CsstStatus.ERROR]:
# perfect / warning / error
logger.info(f"Module finished with status: {status} - time cost: {t_cost:.1f} sec")
else:
# invalid status
logger.error(f"Invalid status: {status}")
# record exception traceback info
logger.info(
f"ModuleResult: \n"
f" - name: {func.__name__}\n"
f" - status: {status}\n"
f" - additional output: {output}\n"
f" - fr: [{fr.summary}]\n{fr}\n"
)
# write time stamp
if tstamp is not None:
with open(tstamp, "a+") as f:
f.write(f"{Time.now().isot}+08:00\n")
return ModuleResult(func.__name__, t_cost, status, fr, output)
return call_module
if __name__ == "__main__":
@l1ppl_module
def call_add(dm, a, b):
if isinstance(a, float) and isinstance(b, float):
return ModuleResult(CsstStatus.PERFECT, None, a + b)
else:
return ModuleResult(CsstStatus.ERROR, None, (a, b))
# dm = CsstMsDataManager()
# print(call_add(dm, 1., 2.))
# print(call_add(dm, 1., None))
""" # """
Identifier: KSC-SJ4-tests/test_data_manager.py # Identifier: KSC-SJ4-tests/test_data_manager.py
Name: test_data_manager.py # Name: test_data_manager.py
Description: data manager unit test # Description: data manager unit test
Author: Bo Zhang # Author: Bo Zhang
Created: 2022-09-13 # Created: 2022-09-13
Modified-History: # Modified-History:
2022-09-13, Bo Zhang, created # 2022-09-13, Bo Zhang, created
2022-09-29, Bo Zhang, added test for CsstMbiDataManager # 2022-09-29, Bo Zhang, added test for CsstMbiDataManager
2022-10-28, Bo Zhang, deleted unit test for CsstMsDataManager # 2022-10-28, Bo Zhang, deleted unit test for CsstMsDataManager
""" # """
import os # import os
import unittest # import unittest
#
from csst_common.data_manager import CsstMsDataManager # from csst_common.data_manager import CsstMsDataManager
from csst_common.params import CSST_PARAMS as CP # from csst_common.params import CSST_PARAMS as CP
#
dir_unittest = "/nfsdata/share/pipeline-unittest/csst_common" # dir_unittest = "/nfsdata/share/pipeline-unittest/csst_common"
#
#
class TestCsstMsDataManager(unittest.TestCase): # class TestCsstMsDataManager(unittest.TestCase):
def setUp(self) -> None: # def setUp(self) -> None:
self.dm_mbi = CsstMsDataManager.quickstart( # self.dm_mbi = CsstMsDataManager.quickstart(
ver_sim="C6.2", datatype="mbi", dir_l1=dir_unittest, exposure_id=100) # ver_sim="C6.2", datatype="mbi", dir_l1=dir_unittest, exposure_id=100)
self.dm_sls = CsstMsDataManager.quickstart( # self.dm_sls = CsstMsDataManager.quickstart(
ver_sim="C6.2", datatype="sls", dir_l1=dir_unittest, exposure_id=100) # ver_sim="C6.2", datatype="sls", dir_l1=dir_unittest, exposure_id=100)
self.dm_mbi.target_detectors = None # self.dm_mbi.target_detectors = None
self.dm_sls.target_detectors = None # self.dm_sls.target_detectors = None
#
def test_mbi_data_existence(self): # def test_mbi_data_existence(self):
self.assertTrue(self.dm_mbi.target_detectors == CP["mbi"]["detectors"]) # self.assertTrue(self.dm_mbi.target_detectors == CP["mbi"]["detectors"])
self.assertTrue(os.path.exists(self.dm_mbi.l0_detector(6))) # self.assertTrue(os.path.exists(self.dm_mbi.l0_detector(6)))
self.assertTrue(os.path.exists(self.dm_mbi.l0_log(6))) # self.assertTrue(os.path.exists(self.dm_mbi.l0_log(6)))
self.assertTrue(os.path.exists(self.dm_mbi.l0_cat(6))) # self.assertTrue(os.path.exists(self.dm_mbi.l0_cat(6)))
self.assertTrue(os.path.exists(self.dm_mbi.l0_crs(6))) # self.assertTrue(os.path.exists(self.dm_mbi.l0_crs(6)))
self.assertTrue(isinstance(self.dm_mbi.l1_detector(6, post="img.fits"), str)) # self.assertTrue(isinstance(self.dm_mbi.l1_detector(6, post="img.fits"), str))
self.assertTrue(isinstance(self.dm_mbi.l1_file(name="some_file.ext", comment="a demo file"), str)) # self.assertTrue(isinstance(self.dm_mbi.l1_file(pipeline_id="some_file.ext", comment="a demo file"), str))
#
def test_sls_data_existence(self): # def test_sls_data_existence(self):
self.assertTrue(self.dm_sls.target_detectors == CP["sls"]["detectors"]) # self.assertTrue(self.dm_sls.target_detectors == CP["sls"]["detectors"])
self.assertTrue(os.path.exists(self.dm_sls.l0_detector(1))) # self.assertTrue(os.path.exists(self.dm_sls.l0_detector(1)))
self.assertTrue(os.path.exists(self.dm_sls.l0_log(1))) # self.assertTrue(os.path.exists(self.dm_sls.l0_log(1)))
self.assertTrue(os.path.exists(self.dm_sls.l0_cat(1))) # self.assertTrue(os.path.exists(self.dm_sls.l0_cat(1)))
self.assertTrue(os.path.exists(self.dm_sls.l0_crs(1))) # self.assertTrue(os.path.exists(self.dm_sls.l0_crs(1)))
self.assertTrue(isinstance(self.dm_sls.l1_detector(1, post="flt.fits"), str)) # self.assertTrue(isinstance(self.dm_sls.l1_detector(1, post="flt.fits"), str))
self.assertTrue(isinstance(self.dm_sls.l1_file(name="some_file.ext", comment="a demo file"), str)) # self.assertTrue(isinstance(self.dm_sls.l1_file(pipeline_id="some_file.ext", comment="a demo file"), str))
#
# DFS is not always available # # DFS is not always available
# def test_dfs_is_available(self): # # def test_dfs_is_available(self):
# self.assertTrue(self.dm_mbi.dfs_is_available()) # # self.assertTrue(self.dm_mbi.dfs_is_available())
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment