Commit 1353f622 authored by Wei Shoulin's avatar Wei Shoulin
Browse files

rm reqs

parent 7191c49c
......@@ -17,22 +17,15 @@ drop table if exists t_level0_header;
drop table if exists t_level0_prc;
drop table if exists t_cal2level0;
drop table if exists t_level1_data;
drop table if exists t_cal_header;
drop table if exists t_level1_header;
drop table if exists t_cal_merge;
drop table if exists t_level1_prc;
/*----------------msc------------------------------*/
drop table if exists msc_level1_data;
drop table if exists msc_level1_header;
drop table if exists t_level1_ref;
drop table if exists msc_level1_prc;
drop table if exists msc_level1_ref;
/*----------------msc------------------------------*/
drop table if exists msc_level2_data;
......@@ -41,44 +34,17 @@ drop table if exists msc_level2_header;
drop table if exists msc_level2_catalog;
/*----------------ifs------------------------------*/
drop table if exists ifs_level1_data;
drop table if exists ifs_level1_header;
drop table if exists ifs_level1_prc;
drop table if exists ifs_level1_ref;
/*----------------mci------------------------------*/
drop table if exists mci_level1_data;
drop table if exists mci_level1_header;
drop table if exists mci_level1_prc;
drop table if exists mci_level1_ref;
/*----------------cpic------------------------------*/
drop table if exists cpic_level1_data;
drop table if exists cpic_level1_header;
drop table if exists cpic_level1_prc;
drop table if exists cpic_level1_ref;
/*----------------sls------------------------------*/
drop table if exists sls_level1_data;
drop table if exists sls_level1_header;
drop table if exists sls_level1_prc;
drop table if exists sls_level1_ref;
drop table if exists sls_level2_spectra;
/*===========================facility===================================*/
/*==============================================================*/
/* Table: t_detector */
/*==============================================================*/
create table t_detector
(
no varchar(10) not null,
......@@ -155,13 +121,17 @@ create table t_observation
create_time datetime,
import_status tinyint(1)
);
/*==============================================================*/
/* Table: t_level0_data */
/*==============================================================*/
create table t_level0_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
obs_id varchar(10) not null,
detector_no varchar(10) not null,
filter varchar(16),
module_id varchar(16),
obs_type varchar(16),
obs_time datetime,
exp_time float,
......@@ -208,44 +178,13 @@ create table t_level0_prc
result_file_path varchar(256)
);
create table t_cal2level0
(
merge_id int(20) not null,
level0_id varchar(20) not null,
primary key (merge_id, level0_id)
);
create table t_cal_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table t_cal_merge
(
id integer PRIMARY KEY autoincrement,
cal_id varchar(20) not null,
detector_no varchar(10) not null,
ref_type varchar(16),
obs_time datetime,
exp_time float,
filename varchar(128),
file_path varchar(256),
qc1_status tinyint(1),
qc1_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
/*========================msc======================================*/
create table msc_level1_data
create table t_level1_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
data_type varchar(64) not null,
filter varchar(16),
module_id varchar(16),
cor_sci_id int(20),
prc_params varchar(1024),
filename varchar(128),
......@@ -255,26 +194,30 @@ create table msc_level1_data
qc1_status tinyint(1),
qc1_time datetime,
create_time datetime,
pipeline_id varchar(60)
pipeline_id varchar(60),
last_query_time datetime
);
create table msc_level1_ref (
create table t_level1_ref (
id integer PRIMARY KEY autoincrement,
level1_id int(20) not null,
ref_type varchar(64) not null,
cal_id int(20) not null,
primary key (level1_id, ref_type)
cal_id varchar(128) not null
);
create table msc_level1_header
create table t_level1_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
ra_cen float,
dec_cen float,
obj_hpix int(20) default 0,
cen_hpix int(20) default 0,
primary key (id)
);
create table msc_level1_prc
create table t_level1_prc
(
id integer PRIMARY KEY autoincrement,
level1_id int(20) not null,
......@@ -285,7 +228,7 @@ create table msc_level1_prc
prc_time datetime,
result_file_path varchar(256)
);
/*========================msc======================================*/
create table msc_level2_data (
id integer PRIMARY KEY autoincrement,
level0_id VARCHAR(20) null,
......@@ -331,138 +274,11 @@ create table msc_level2co_header (
constraint PK_MSC_LEVEL2CO_HEADER primary key (id)
);
/*===========================ifs===================================*/
create table ifs_level1_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
data_type varchar(64) not null,
cor_sci_id int(20),
prc_params varchar(1024),
filename varchar(128),
file_path varchar(256),
prc_status tinyint(1),
prc_time datetime,
qc1_status tinyint(1),
qc1_time datetime,
create_time datetime,
pipeline_id varchar(60)
);
create table ifs_level1_ref (
level1_id int(20) not null,
ref_type varchar(64) not null,
cal_id int(20) not null,
primary key (level1_id, ref_type)
);
create table ifs_level1_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table ifs_level1_prc
(
id integer PRIMARY KEY autoincrement,
level1_id int(20) not null,
pipeline_id varchar(64) not null,
prc_module varchar(32) not null,
params_file_path varchar(256),
prc_status int(2),
prc_time datetime,
result_file_path varchar(256)
);
/*===========================mci===================================*/
create table mci_level1_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
data_type varchar(64) not null,
cor_sci_id int(20),
prc_params varchar(1024),
filename varchar(128),
file_path varchar(256),
prc_status tinyint(1),
prc_time datetime,
qc1_status tinyint(1),
qc1_time datetime,
create_time datetime,
pipeline_id varchar(60)
);
create table mci_level1_ref (
level1_id int(20) not null,
ref_type varchar(64) not null,
cal_id int(20) not null,
primary key (level1_id, ref_type)
);
create table mci_level1_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table mci_level1_prc
(
id integer PRIMARY KEY autoincrement,
level1_id int(20) not null,
pipeline_id varchar(64) not null,
prc_module varchar(32) not null,
params_file_path varchar(256),
prc_status int(2),
prc_time datetime,
result_file_path varchar(256)
);
/*===========================sls===================================*/
create table sls_level1_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
data_type varchar(64) not null,
prc_params varchar(1024),
filename varchar(128),
file_path varchar(256),
prc_status tinyint(1),
prc_time datetime,
qc1_status tinyint(1),
qc1_time datetime,
create_time datetime,
pipeline_id varchar(60)
);
create table sls_level1_ref (
level1_id int(20) not null,
ref_type varchar(64) not null,
cal_id int(20) not null,
primary key (level1_id, ref_type)
);
create table sls_level1_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table sls_level1_prc
(
id integer PRIMARY KEY autoincrement,
level1_id int(20) not null,
pipeline_id varchar(64) not null,
prc_module varchar(32) not null,
params_file_path varchar(256),
prc_status int(2),
prc_time datetime,
result_file_path varchar(256)
);
create table sls_level2_spectra
(
......@@ -489,47 +305,3 @@ create table sls_level2_spectra_header
primary key (id)
);
/*===========================cpic===================================*/
create table cpic_level1_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
data_type varchar(64) not null,
prc_params varchar(1024),
filename varchar(128),
file_path varchar(256),
prc_status tinyint(1),
prc_time datetime,
qc1_status tinyint(1),
qc1_time datetime,
create_time datetime,
pipeline_id varchar(60)
);
create table cpic_level1_ref (
level1_id int(20) not null,
ref_type varchar(64) not null,
cal_id int(20) not null,
primary key (level1_id, ref_type)
);
create table cpic_level1_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table cpic_level1_prc
(
id integer PRIMARY KEY autoincrement,
level1_id int(20) not null,
pipeline_id varchar(64) not null,
prc_module varchar(32) not null,
params_file_path varchar(256),
prc_status int(2),
prc_time datetime,
result_file_path varchar(256)
);
\ No newline at end of file
from .calmerge import CalMergeApi
from .level0 import Level0DataApi
from .level0prc import Level0PrcApi
from .level1 import Level1DataApi
from .level1prc import Level1PrcApi
\ No newline at end of file
import os, sys
import argparse
import logging
from astropy.io import fits
import datetime
import shutil
from csst_dfs_api_local.common.db import DBClient
log = logging.getLogger('csst-dfs-api-local')
def ingest():
parser = argparse.ArgumentParser(prog=f"{sys.argv[0]}", description="ingest the local files")
parser.add_argument('-i','--infile', dest="infile", help="a file or a directory")
parser.add_argument('-m', '--copyfiles', dest="copyfiles", action='store_true', default=False, help="copy files after import")
args = parser.parse_args(sys.argv[1:])
import_root_dir = args.infile
if import_root_dir is None or (not os.path.isfile(import_root_dir) and not os.path.isdir(import_root_dir)):
parser.print_help()
sys.exit(0)
db = DBClient()
if os.path.isfile(import_root_dir):
log.info(f"prepare import {import_root_dir}")
ingesst_one(import_root_dir, db, args.copyfiles)
if os.path.isdir(import_root_dir):
for (path, _, file_names) in os.walk(import_root_dir):
for filename in file_names:
if filename.find(".fits") > 0:
file_full_path = os.path.join(path, filename)
log.info(f"prepare import {file_full_path}")
ingesst_one(file_full_path, db, args.copyfiles)
db.close()
def ingesst_one(file_path, db, copyfiles):
dest_root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
hdul = fits.open(file_path)
header = hdul[0].header
obs_id = header["OBSID"]
exp_start_time = f"{header['DATE-OBS']} {header['TIME-OBS']}"
exp_time = header['EXPTIME']
module_id = header["INSTRUME"]
obs_type = header["FILETYPE"]
qc0_status = -1
prc_status = -1
time_now = datetime.datetime.now()
create_time = time_now.strftime('%Y-%m-%d %H:%M:%S')
facility_status_id = 0
module_status_id = 0
existed = db.exists("select * from t_observation where obs_id=?", (obs_id,))
if not existed:
db.execute("insert into t_observation \
(obs_id,obs_time,exp_time,module_id,obs_type,facility_status_id, module_status_id, qc0_status, prc_status,create_time) \
values (?,?,?,?,?,?,?,?,?,?)",
(obs_id,exp_start_time,exp_time,module_id,obs_type,facility_status_id,module_status_id,qc0_status, prc_status,create_time))
db.end()
#level0
detector = header["DETECTOR"]
filename = header["FILENAME"]
existed = db.exists(
"select * from cpic_level0_data where filename=?",
(filename,)
)
if existed:
log.warning('%s has already been imported' %(file_path, ))
db.end()
return
detector_status_id = 0
file_full_path = file_path
if copyfiles:
file_dir = f"{dest_root_dir}/{module_id}/{obs_type.upper()}/{header['EXPSTART']}/{obs_id}/MS"
if not os.path.exists(file_dir):
os.makedirs(file_dir)
file_full_path = f"{file_dir}/{filename}.fits"
level0_id = f"{obs_id}{detector}"
c = db.execute("insert into cpic_level0_data \
(level0_id, obs_id, detector_no, obs_type, obs_time, exp_time,detector_status_id, filename, file_path,qc0_status, prc_status,create_time) \
values (?,?,?,?,?,?,?,?,?,?,?,?)",
(level0_id, obs_id, detector, obs_type, exp_start_time, exp_time, detector_status_id, filename, file_full_path, qc0_status, prc_status,create_time))
db.end()
level0_id_id = db.last_row_id()
#level0-header
ra_obj = header["RA_OBJ"]
dec_obj = header["DEC_OBJ"]
db.execute("delete from cpic_level0_header where id=?",(level0_id_id,))
db.execute("insert into cpic_level0_header \
(id, ra_obj, dec_obj, create_time) \
values (?,?,?,?)",
(level0_id_id, ra_obj, dec_obj, create_time))
if copyfiles:
#copy files
shutil.copyfile(file_path, file_full_path)
db.end()
print(f"{file_path} imported")
if __name__ == "__main__":
ingest()
\ No newline at end of file
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.cpic import Level1Record
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level1DataApi(object):
def __init__(self, sub_system = "cpic"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level1 records from database
parameter kwargs:
level0_id: [str]
data_type: [str]
create_time : (start, end),
qc1_status : [int],
prc_status : [int],
filename: [str]
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
data_type = get_parameter(kwargs, "data_type")
create_time_start = get_parameter(kwargs, "create_time", [None, None])[0]
create_time_end = get_parameter(kwargs, "create_time", [None, None])[1]
qc1_status = get_parameter(kwargs, "qc1_status")
prc_status = get_parameter(kwargs, "prc_status")
filename = get_parameter(kwargs, "filename")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from cpic_level1_data where 1=1"
sql_data = f"select * from cpic_level1_data where 1=1"
sql_condition = ""
if level0_id:
sql_condition = f"{sql_condition} and level0_id='{level0_id}'"
if data_type:
sql_condition = f"{sql_condition} and data_type='{data_type}'"
if create_time_start:
sql_condition = f"{sql_condition} and create_time >='{create_time_start}'"
if create_time_end:
sql_condition = f"{sql_condition} and create_time <='{create_time_end}'"
if qc1_status:
sql_condition = f"{sql_condition} and qc1_status={qc1_status}"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
if filename:
sql_condition = f" and filename='{filename}'"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, recs = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level1Record, recs)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
'''
parameter kwargs:
id = [int]
return csst_dfs_common.models.Result
'''
try:
id = get_parameter(kwargs, "id", -1)
r = self.db.select_one(
"select * from cpic_level1_data where id=?", (id,))
if r:
return Result.ok_data(data=Level1Record().from_dict(r))
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
status : [int]
return csst_dfs_common.models.Result
'''
fits_id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from cpic_level1_data where id=?",
(fits_id,)
)
if not existed:
log.warning('%s not found' %(fits_id, ))
return Result.error(message ='%s not found' %(fits_id, ))
self.db.execute(
'update cpic_level1_data set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), fits_id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc1_status(self, **kwargs):
''' update the status of QC1
parameter kwargs:
id : [int],
status : [int]
'''
fits_id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from cpic_level1_data where id=?",
(fits_id,)
)
if not existed:
log.warning('%s not found' %(fits_id, ))
return Result.error(message ='%s not found' %(fits_id, ))
self.db.execute(
'update cpic_level1_data set qc1_status=?, qc1_time=? where id=?',
(status, format_time_ms(time.time()), fits_id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level1 record into database
parameter kwargs:
level0_id : [str]
data_type : [str]
prc_params : [str]
filename : [str]
file_path : [str]
prc_status : [int]
prc_time : [str]
pipeline_id : [str]
refs: [dict]
return csst_dfs_common.models.Result
'''
try:
rec = Level1Record(
id = 0,
level0_id = get_parameter(kwargs, "level0_id"),
data_type = get_parameter(kwargs, "data_type"),
prc_params = get_parameter(kwargs, "prc_params"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time", format_datetime(datetime.now())),
pipeline_id = get_parameter(kwargs, "pipeline_id"),
refs = get_parameter(kwargs, "refs", {})
)
existed = self.db.exists(
"select * from cpic_level1_data where filename=?",
(rec.filename,)
)
if existed:
log.error(f'{rec.filename} has already been existed')
return Result.error(message=f'{rec.filename} has already been existed')
self.db.execute(
'INSERT INTO cpic_level1_data (level0_id,data_type,prc_params,filename,file_path,qc1_status,prc_status,prc_time,create_time,pipeline_id) \
VALUES(?,?,?,?,?,?,?,?,?,?)',
(rec.level0_id, rec.data_type, rec.prc_params, rec.filename, rec.file_path, -1, rec.prc_status, rec.prc_time, format_time_ms(time.time()),rec.pipeline_id,)
)
self.db.end()
rec.id = self.db.last_row_id()
if rec.refs.items():
sql_refs = "insert into cpic_level1_ref (level1_id,ref_type,cal_id) values "
values = ["(%s,'%s',%s)"%(rec.id,k,v) for k,v in rec.refs.items()]
_ = self.db.execute(sql_refs + ",".join(values))
self.db.end()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
\ No newline at end of file
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.cpic import Level1PrcRecord
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level1PrcApi(object):
def __init__(self, sub_system = "cpic"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level1 procedure records from database
parameter kwargs:
level1_id: [int]
pipeline_id: [str]
prc_module: [str]
prc_status : [int]
return: csst_dfs_common.models.Result
'''
try:
level1_id = get_parameter(kwargs, "level1_id", 0)
pipeline_id = get_parameter(kwargs, "pipeline_id")
prc_module = get_parameter(kwargs, "prc_module")
prc_status = get_parameter(kwargs, "prc_status")
sql_data = f"select * from cpic_level1_prc"
sql_condition = f"where level1_id={level1_id}"
if pipeline_id:
sql_condition = sql_condition + " and pipeline_id='" + pipeline_id + "'"
if prc_module:
sql_condition = sql_condition + " and prc_module ='" + prc_module + "'"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
sql_data = f"{sql_data} {sql_condition}"
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level1PrcRecord, records)).append("totalCount", len(records))
except Exception as e:
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from cpic_level1_prc where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update cpic_level1_prc set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level1 procedure record into database
parameter kwargs:
level1_id : [int]
pipeline_id : [str]
prc_module : [str]
params_file_path : [str]
prc_status : [int]
prc_time : [str]
result_file_path : [str]
return csst_dfs_common.models.Result
'''
rec = Level1PrcRecord(
id = 0,
level1_id = get_parameter(kwargs, "level1_id", 0),
pipeline_id = get_parameter(kwargs, "pipeline_id"),
prc_module = get_parameter(kwargs, "prc_module"),
params_file_path = get_parameter(kwargs, "params_file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
result_file_path = get_parameter(kwargs, "result_file_path")
)
try:
self.db.execute(
'INSERT INTO cpic_level1_prc (level1_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
VALUES(?,?,?,?,?,?,?)',
(rec.level1_id, rec.pipeline_id, rec.prc_module, rec.params_file_path, rec.prc_status, rec.prc_time, rec.result_file_path)
)
self.db.end()
rec.id = self.db.last_row_id()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.facility import CalMergeRecord
from csst_dfs_commons.models.common import from_dict_list
from .level0 import Level0DataApi
log = logging.getLogger('csst')
class CalMergeApi(object):
def __init__(self):
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
self.level0Api = Level0DataApi()
def get_latest_by_l0(self, **kwargs):
''' retrieve calibration merge records from database by level0 data
:param kwargs: Parameter dictionary, key items support:
level0_id: [str],
ref_type: [str]
:returns: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
ref_type = get_parameter(kwargs, "ref_type")
level0_data = self.level0Api.get_by_level0_id(level0_id)
if level0_data.data is None:
return Result.error(message = "level0 data [%s]not found"%(level0_id))
sql_data = f"select * from t_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time >= '{level0_data.data.obs_time}' order by obs_time ASC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
sql_data = f"select * from t_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time <= '{level0_data.data.obs_time}' order by obs_time DESC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
return Result.error(message = "not found")
except Exception as e:
return Result.error(message=str(e))
def find(self, **kwargs):
''' retrieve calibration merge records from database
parameter kwargs:
detector_no: [str]
ref_type: [str]
obs_time: (start,end)
qc1_status : [int]
prc_status : [int]
file_name: [str]
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
detector_no = get_parameter(kwargs, "detector_no")
ref_type = get_parameter(kwargs, "ref_type")
exp_time_start = get_parameter(kwargs, "obs_time", [None, None])[0]
exp_time_end = get_parameter(kwargs, "obs_time", [None, None])[1]
qc1_status = get_parameter(kwargs, "qc1_status")
prc_status = get_parameter(kwargs, "prc_status")
file_name = get_parameter(kwargs, "file_name")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from t_cal_merge where 1=1"
sql_data = f"select * from t_cal_merge where 1=1"
sql_condition = ""
if detector_no:
sql_condition = f"{sql_condition} and detector_no='{detector_no}'"
if ref_type:
sql_condition = f"{sql_condition} and ref_type='{ref_type}'"
if exp_time_start:
sql_condition = f"{sql_condition} and obs_time >='{exp_time_start}'"
if exp_time_end:
sql_condition = f"{sql_condition} and obs_time <='{exp_time_end}'"
if qc1_status:
sql_condition = f"{sql_condition} and qc1_status={qc1_status}"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
if file_name:
sql_condition = f" and filename={file_name}"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
log.info(sql_count)
log.info(sql_data)
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(CalMergeRecord, records)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
''' fetch a record from database
parameter kwargs:
id : [int],
cal_id : [str]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
if id == 0 and cal_id == "":
return Result.error(message="at least define id or cal_id")
if id != 0:
return self.get_by_id(id)
if cal_id != "":
return self.get_by_cal_id(cal_id)
def get_by_id(self, iid: int):
try:
r = self.db.select_one(
"select * from t_cal_merge where id=?", (iid,))
if r:
sql_get_level0_id = f"select level0_id from t_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
rec = CalMergeRecord().from_dict(r)
rec.level0_ids = level0_ids
return Result.ok_data(data=rec)
else:
return Result.error(message=f"id:{iid} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def get_by_cal_id(self, cal_id: str):
try:
r = self.db.select_one(
"select * from t_cal_merge where cal_id=?", (cal_id,))
if r:
sql_get_level0_id = f"select level0_id from t_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
rec = CalMergeRecord().from_dict(r)
rec.level0_ids = level0_ids
return Result.ok_data(data=rec)
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc1_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
cal_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
result = self.get(id = id, cal_id = cal_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update t_cal_merge set qc1_status=?, qc1_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
cal_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
result = self.get(id = id, cal_id = cal_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from t_cal_merge where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update t_cal_merge set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a calibration merge record into database
parameter kwargs:
cal_id : [str]
detector_no : [str]
ref_type : [str]
obs_time : [str]
exp_time : [float]
prc_status : [int]
prc_time : [str]
filename : [str]
file_path : [str]
level0_ids : [list]
return csst_dfs_common.models.Result
'''
rec = CalMergeRecord(
id = 0,
cal_id = get_parameter(kwargs, "cal_id"),
detector_no = get_parameter(kwargs, "detector_no"),
ref_type = get_parameter(kwargs, "ref_type"),
obs_time = get_parameter(kwargs, "obs_time"),
exp_time = get_parameter(kwargs, "exp_time"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
level0_ids = get_parameter(kwargs, "level0_ids", [])
)
try:
self.db.execute(
'INSERT INTO t_cal_merge (cal_id,detector_no,ref_type,obs_time,exp_time,filename,file_path,prc_status,prc_time,create_time) \
VALUES(?,?,?,?,?,?,?,?,?,?)',
(rec.cal_id, rec.detector_no, rec.ref_type, rec.obs_time, rec.exp_time, rec.filename, rec.file_path,rec.prc_status,rec.prc_time,format_time_ms(time.time()))
)
self.db.end()
rec.id = self.db.last_row_id()
sql_level0_ids = "insert into t_cal2level0 (merge_id,level0_id) values "
values = ["(%s,%s)"%(rec.id,i) for i in rec.level0_ids]
_ = self.db.execute(sql_level0_ids + ",".join(values))
self.db.end()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
\ No newline at end of file
......@@ -46,15 +46,72 @@ def ingest_one(file_path, db, copyfiles):
hdul = fits.open(file_path)
header = hdul[0].header
header1 = hdul[1].header
obs_id = header["OBSID"]
exp_start_time = f"{header['DATE-OBS']}"
exp_time = header['EXPTIME']
module_id = header["INSTRUME"]
obs_type = header["OBSTYPE"]
#处理fits头
obs_id = get_header_value("OBSID", header, "")
obs_id_str = "%09d" % (obs_id) if isinstance(obs_id, int) else obs_id
module_id = get_header_value("INSTRUME", header, "")
exp_start_time = f"{get_header_value('DATE-OBS', header, '')} {get_header_value('TIME-OBS', header, '')}"
exp_time = get_header_value("EXPTIME", header, 0)
module_id = get_header_value("INSTRUME", header, "")
obs_type = get_header_value("FILETYPE", header, "")
detector = str(get_header_value("DETECTOR", header, ""))
filename = get_header_value("FILENAME", header, "")
mjd_int = int(get_header_value("EXPSTART", header, 0))
if module_id.upper() == "HSTDM":
img_header = hdul[0].header
else:
img_header = hdul[1].header
filter = get_header_value("FILTER", img_header, "")
#level0_headder
object_name = get_header_value("OBJECT", header, "-")
ra_obj = get_header_value("RA_OBJ", header, 0)
dec_obj = get_header_value("DEC_OBJ", header, 0)
crpix1 = get_header_value("CRPIX1", img_header, 0)
crpix2 = get_header_value("CRPIX2", img_header, 0)
crval1 = get_header_value("CRVAL1", img_header, 0)
crval2 = get_header_value("CRVAL2", img_header, 0)
ctype1 = get_header_value("CTYPE1", img_header, "")
ctype2 = get_header_value("CTYPE2", img_header, "")
cd1_1 = get_header_value("CD1_1", img_header, 0)
cd1_2 = get_header_value("CD1_2", img_header, 0)
cd2_1 = get_header_value("CD2_1", img_header, 0)
cd2_2 = get_header_value("CD2_2", img_header, 0)
version = get_header_value("IMG_VER", img_header, "-")
# -----------------------------------------------------------------
# 针对模块id特殊处理
if len(detector) > 2 and module_id.upper() == "MSC":
detector = detector[-2:]
if module_id.upper() == "IFS" or module_id.upper() == "MCI" :
obs_type = get_header_value("OBSTYPE", header, "")
exp_start_time = get_header_value('DATE-OBS', header, '')
ra_obj = get_header_value("OBJ_RA", header, 0)
dec_obj = get_header_value("OBJ_DEC", header, 0)
detector = str(get_header_value("CAMERA", img_header, ""))
filter = get_header_value("DETNAM", img_header, "")
if module_id.upper() == "MCI" :
obs_type = get_header_value("OBSTYPE", header, "")
exp_start_time = get_header_value('DATE-OBS', header, '')
ra_obj = get_header_value("OBJ_RA", header, 0)
dec_obj = get_header_value("OBJ_DEC", header, 0)
detector = str(get_header_value("CAMERA", img_header, ""))
if module_id.upper() == "CPIC":
exp_start_time = get_header_value('DATE-OBS', header, '')
obs_type = get_header_value("OBSTYPE", header, "")
detector = get_header_value("CCDLABEL", img_header, "")
if len(detector) > 2:
detector = detector[0:2]
object_name = get_header_value("TARGET", header, "-")
if module_id.upper() == "HSTDM":
if len(detector) == 1:
detector = '0' + detector
# -----------------------------------------------------------------
qc0_status = -1
prc_status = -1
......@@ -64,63 +121,66 @@ def ingest_one(file_path, db, copyfiles):
facility_status_id = 0
module_status_id = 0
existed = db.exists("select * from t_observation where obs_id=?", (obs_id,))
existed = db.exists("select * from t_observation where obs_id=?", (obs_id_str,))
if not existed:
db.execute("insert into t_observation \
(obs_id,obs_time,exp_time,module_id,obs_type,facility_status_id, module_status_id, qc0_status, prc_status,create_time) \
values (?,?,?,?,?,?,?,?,?,?)",
(obs_id,exp_start_time,exp_time,module_id,obs_type,facility_status_id,module_status_id,qc0_status, prc_status,create_time))
(obs_id_str,exp_start_time,exp_time,module_id,obs_type,facility_status_id,module_status_id,qc0_status, prc_status,create_time))
db.end()
#level0
detector = get_header_value("DETNAM", header1, "-")
filename = get_header_value("FILENAME", header, os.path.basename(file_path))
version = get_header_value("IMG_VER", header1, "-")
existed = db.exists(
"select * from ifs_level0_data where filename=?",
# 文件路径处理
#MSC----组件名
file_dir_in_db = f"L0/{module_id}/{obs_type.upper()}/{mjd_int}/{obs_id_str}/MS"
if module_id.upper() == "IFS":
file_dir_in_db = f"L0/{module_id}/{obs_type.upper()}/{mjd_int}"
if module_id.upper() == "MCI":
file_dir_in_db = f"L0/{module_id}/{obs_type.upper()}/{mjd_int}/{obs_id_str}"
if module_id.upper() == "CPIC":
file_dir_in_db = f"L0/{module_id}/{obs_type.upper()}/{mjd_int}/{detector}"
if module_id.upper() == "HSTDM":
file_dir_in_db = f"L0/{module_id}/{obs_type.upper()}/{mjd_int}"
level0Ids = db.select_one(
"select id from t_level0_data where filename=?",
(filename,)
)
if existed:
log.warning('%s has already been imported' %(file_path, ))
db.end()
return
detector_status_id = 0
if level0Ids is None:
file_full_path = file_path
if copyfiles:
file_dir = f"{dest_root_dir}/{module_id}/{obs_type.upper()}/{header['EXPSTART']}/{obs_id}/MS"
file_dir = f"{dest_root_dir}/{file_dir_in_db}"
if not os.path.exists(file_dir):
os.makedirs(file_dir)
file_full_path = f"{file_dir}/{filename}.fits"
file_full_path = f"{file_dir}/{filename}"
shutil.copyfile(file_path, file_full_path)
level0_id = f"{obs_id}{detector}"
c = db.execute("insert into ifs_level0_data \
(level0_id, obs_id, detector_no, obs_type, obs_time, exp_time,detector_status_id, filename, file_path,qc0_status, prc_status,create_time) \
values (?,?,?,?,?,?,?,?,?,?,?,?)",
(level0_id, obs_id, detector, obs_type, exp_start_time, exp_time, detector_status_id, filename, file_full_path, qc0_status, prc_status,create_time))
db.end()
c = db.execute("insert into t_level0_data \
(level0_id, obs_id, detector_no, filter, obs_type, module_id, obs_time, exp_time,detector_status_id, filename, file_path,qc0_status, prc_status,create_time) \
values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
(level0_id, obs_id, detector, filter, obs_type, module_id, exp_start_time, exp_time, detector_status_id, filename, file_full_path, qc0_status, prc_status,create_time))
level0_id_id = db.last_row_id()
#level0-header
ra_obj = header["OBJ_RA"]
dec_obj = header["OBJ_DEC"]
db.execute("delete from ifs_level0_header where id=?",(level0_id_id,))
db.execute("insert into ifs_level0_header \
(id, ra_obj, dec_obj, object_name, version) \
values (?,?,?,?,?)",
(level0_id_id, ra_obj, dec_obj, object_name, version))
if copyfiles:
#copy files
shutil.copyfile(file_path, file_full_path)
else:
level0_id_id = level0Ids[0]
log.warning('%s has already been imported' %(file_path, ))
#level0-header
db.execute("delete from t_level0_header where id=?",(level0_id_id,))
db.execute("insert into t_level0_header \
(id, ra_obj, dec_obj, crpix1, crpix2, crval1, crval2, \
ctype1, ctype2, cd1_1, cd1_2, cd2_1, cd2_2, object_name, version, create_time) \
values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
(level0_id_id, ra_obj, dec_obj, crpix1, crpix2, crval1, crval2, ctype1, ctype2, cd1_1, cd1_2, cd2_1, cd2_2, object_name, version, create_time))
db.end()
rec = Level0Record(
id = level0_id_id,
level0_id = level0_id,
filter = filter,
module_id = module_id,
obs_id = obs_id,
detector_no = detector,
obs_type = obs_type,
......
......@@ -8,15 +8,24 @@ from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.ifs import Level1Record
from csst_dfs_commons.models.common import from_dict_list
from csst_dfs_commons.utils.fits import get_header_value, get_healpix_id, get_healpix_ids
log = logging.getLogger('csst')
class Level1DataApi(object):
def __init__(self, sub_system = "ifs"):
self.sub_system = sub_system
def __init__(self):
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def make_sql_heapix(self, ra, dec, radius, ra_column, dec_column, hpix_column):
arcDec = (PI / 180) * dec
whereSql = f"abs((180./pi()) * ACOS(SIN(pi() * {dec_column}/180) * SIN({arcDec}) + COS(pi() * {dec_column}/180) * COS({arcDec}) * COS((pi()/180) * ({ra_column} - {ra})))) < {radius}"
heapix_ids = get_healpix_ids(ra, dec, radius, 128)
whereZoneSql = "%s in (%s)" % \
(hpix_column, ','.join([str(i) for i in heapix_ids]))
return f"{whereZoneSql} and {whereSql}"
def find(self, **kwargs):
''' retrieve level1 records from database
......@@ -41,8 +50,8 @@ class Level1DataApi(object):
filename = get_parameter(kwargs, "filename")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from ifs_level1_data where 1=1"
sql_data = f"select * from ifs_level1_data where 1=1"
sql_count = "select count(*) from t_level1_data d left join t_level1_header h on d.id=h.id where 1=1"
sql_data = f"select d.* from t_level1_data d left join t_level1_header h on d.id=h.id where 1=1"
sql_condition = ""
if level0_id:
......@@ -66,6 +75,18 @@ class Level1DataApi(object):
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
ra_cen = request.other_conditions['ra_cen']
dec_cen = request.other_conditions['dec_cen']
radius_cen = request.other_conditions['radius_cen']
if ra_cen and dec_cen and radius_cen:
sql_condition = f"{sql_condition} and {self.make_sql_heapix(float(ra_cen), float(dec_cen),float(radius_cen), 'h.ra_cen','h.dec_cen','h.cen_hpix')}"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
if request.limit > 0:
sql_data = f"{sql_data} limit {request.limit}"
totalCount = self.db.select_one(sql_count)
_, recs = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level1Record, recs)).append("totalCount", totalCount['c'])
......@@ -84,7 +105,7 @@ class Level1DataApi(object):
try:
fits_id = get_parameter(kwargs, "id", -1)
r = self.db.select_one(
"select * from ifs_level1_data where id=?", (fits_id,))
"select * from t_level1_data where id=?", (fits_id,))
if r:
return Result.ok_data(data=Level1Record().from_dict(r))
......@@ -107,14 +128,14 @@ class Level1DataApi(object):
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from ifs_level1_data where id=?",
"select * from t_level1_data where id=?",
(fits_id,)
)
if not existed:
log.warning('%s not found' %(fits_id, ))
return Result.error(message ='%s not found' %(fits_id, ))
self.db.execute(
'update ifs_level1_data set prc_status=?, prc_time=? where id=?',
'update t_level1_data set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), fits_id)
)
self.db.end()
......@@ -135,19 +156,18 @@ class Level1DataApi(object):
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from ifs_level1_data where id=?",
"select * from t_level1_data where id=?",
(fits_id,)
)
if not existed:
log.warning('%s not found' %(fits_id, ))
return Result.error(message ='%s not found' %(fits_id, ))
self.db.execute(
'update ifs_level1_data set qc1_status=?, qc1_time=? where id=?',
'update t_level1_data set qc1_status=?, qc1_time=? where id=?',
(status, format_time_ms(time.time()), fits_id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
......@@ -184,7 +204,7 @@ class Level1DataApi(object):
refs = get_parameter(kwargs, "refs", {})
)
existed = self.db.exists(
"select * from ifs_level1_data where filename=?",
"select * from t_level1_data where filename=?",
(rec.filename,)
)
if existed:
......@@ -193,7 +213,7 @@ class Level1DataApi(object):
now_str = format_time_ms(time.time())
self.db.execute(
'INSERT INTO ifs_level1_data (level0_id,data_type,cor_sci_id,prc_params,filename,file_path,qc1_status,prc_status,prc_time, create_time,pipeline_id) \
'INSERT INTO t_level1_data (level0_id,data_type,cor_sci_id,prc_params,filename,file_path,qc1_status,prc_status,prc_time, create_time,pipeline_id) \
VALUES(?,?,?,?,?,?,?,?,?,?,?)',
(rec.level0_id, rec.data_type, rec.cor_sci_id, rec.prc_params, rec.filename, rec.file_path, -1, rec.prc_status,rec.prc_time, now_str, rec.pipeline_id,)
)
......@@ -201,7 +221,7 @@ class Level1DataApi(object):
rec.id = self.db.last_row_id()
if rec.refs.items():
sql_refs = "insert into ifs_level1_ref (level1_id,ref_type,cal_id) values "
sql_refs = "insert into t_level1_ref (level1_id,ref_type,cal_id) values "
values = ["(%s,'%s',%s)"%(rec.id,k,v) for k,v in rec.refs.items()]
_ = self.db.execute(sql_refs + ",".join(values))
self.db.end()
......
......@@ -33,7 +33,7 @@ class Level1PrcApi(object):
prc_module = get_parameter(kwargs, "prc_module")
prc_status = get_parameter(kwargs, "prc_status")
sql_data = f"select * from ifs_level1_prc"
sql_data = f"select * from t_level1_prc"
sql_condition = f"where level1_id={level1_id}"
if pipeline_id:
......@@ -65,14 +65,14 @@ class Level1PrcApi(object):
try:
existed = self.db.exists(
"select * from ifs_level1_prc where id=?",
"select * from t_level1_prc where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update ifs_level1_prc set prc_status=?, prc_time=? where id=?',
'update t_level1_prc set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
......@@ -108,7 +108,7 @@ class Level1PrcApi(object):
)
try:
self.db.execute(
'INSERT INTO ifs_level1_prc (level1_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
'INSERT INTO t_level1_prc (level1_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
VALUES(?,?,?,?,?,?,?)',
(rec.level1_id, rec.pipeline_id, rec.prc_module, rec.params_file_path, rec.prc_status, rec.prc_time, rec.result_file_path)
)
......
from .calmerge import CalMergeApi
from .level0 import Level0DataApi
from .level0prc import Level0PrcApi
from .level1 import Level1DataApi
from .level1prc import Level1PrcApi
\ No newline at end of file
from .calmerge import CalMergeApi
from .level0 import Level0DataApi
from .level0prc import Level0PrcApi
from .level1 import Level1DataApi
from .level1prc import Level1PrcApi
\ No newline at end of file
import os, sys
import argparse
import logging
from astropy.io import fits
import datetime
import shutil
from csst_dfs_api_local.common.db import DBClient
from csst_dfs_commons.utils.fits import get_header_value
from csst_dfs_commons.models.mci import Level0Record
log = logging.getLogger('csst-dfs-api-local')
def ingest():
parser = argparse.ArgumentParser(prog=f"{sys.argv[0]}", description="ingest the local files")
parser.add_argument('-i','--infile', dest="infile", help="a file or a directory")
parser.add_argument('-m', '--copyfiles', dest="copyfiles", action='store_true', default=False, help="whether copy files after import")
args = parser.parse_args(sys.argv[1:])
import_root_dir = args.infile
if import_root_dir is None or (not os.path.isfile(import_root_dir) and not os.path.isdir(import_root_dir)):
parser.print_help()
sys.exit(0)
db = DBClient()
if os.path.isfile(import_root_dir):
log.info(f"prepare import {import_root_dir}")
ingest_one(import_root_dir, db, args.copyfiles)
if os.path.isdir(import_root_dir):
for (path, _, file_names) in os.walk(import_root_dir):
for filename in file_names:
if filename.find(".fits") > 0:
file_full_path = os.path.join(path, filename)
log.info(f"prepare import {file_full_path}")
try:
ingest_one(file_full_path, db, args.copyfiles)
except Exception as e:
print(f"{file_full_path} import error!!!")
log.error(e)
db.close()
def ingest_one(file_path, db, copyfiles):
dest_root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
hdul = fits.open(file_path)
header = hdul[0].header
header1 = hdul[1].header
obs_id = header["OBSID"]
exp_start_time = f"{header['DATE-OBS']}"
exp_time = header['EXPTIME']
module_id = header["INSTRUME"]
obs_type = header["OBSTYPE"]
object_name = get_header_value("OBJECT", header, "-")
qc0_status = -1
prc_status = -1
time_now = datetime.datetime.now()
create_time = time_now.strftime('%Y-%m-%d %H:%M:%S')
facility_status_id = 0
module_status_id = 0
existed = db.exists("select * from t_observation where obs_id=?", (obs_id,))
if not existed:
db.execute("insert into t_observation \
(obs_id,obs_time,exp_time,module_id,obs_type,facility_status_id, module_status_id, qc0_status, prc_status,create_time) \
values (?,?,?,?,?,?,?,?,?,?)",
(obs_id,exp_start_time,exp_time,module_id,obs_type,facility_status_id,module_status_id,qc0_status, prc_status,create_time))
db.end()
#level0
detector = get_header_value("DETNAM", header1, "-")
filename = get_header_value("FILENAME", header, os.path.basename(file_path))
version = get_header_value("IMG_VER", header1, "-")
existed = db.exists(
"select * from mci_level0_data where filename=?",
(filename,)
)
if existed:
log.warning('%s has already been imported' %(file_path, ))
db.end()
return
detector_status_id = 0
file_full_path = file_path
if copyfiles:
file_dir = f"{dest_root_dir}/{module_id}/{obs_type.upper()}/{header['EXPSTART']}/{obs_id}/MS"
if not os.path.exists(file_dir):
os.makedirs(file_dir)
file_full_path = f"{file_dir}/{filename}.fits"
level0_id = f"{obs_id}{detector}"
c = db.execute("insert into mci_level0_data \
(level0_id, obs_id, detector_no, obs_type, obs_time, exp_time,detector_status_id, filename, file_path,qc0_status, prc_status,create_time) \
values (?,?,?,?,?,?,?,?,?,?,?,?)",
(level0_id, obs_id, detector, obs_type, exp_start_time, exp_time, detector_status_id, filename, file_full_path, qc0_status, prc_status,create_time))
db.end()
level0_id_id = db.last_row_id()
#level0-header
ra_obj = header["OBJ_RA"]
dec_obj = header["OBJ_DEC"]
db.execute("delete from mci_level0_header where id=?",(level0_id_id,))
db.execute("insert into mci_level0_header \
(id, ra_obj, dec_obj, object_name, version) \
values (?,?,?,?,?)",
(level0_id_id, ra_obj, dec_obj, object_name, version))
if copyfiles:
#copy files
shutil.copyfile(file_path, file_full_path)
db.end()
rec = Level0Record(
id = level0_id_id,
level0_id = level0_id,
obs_id = obs_id,
detector_no = detector,
obs_type = obs_type,
obs_time = exp_start_time,
exp_time = exp_time,
detector_status_id = detector_status_id,
filename = filename,
file_path = file_full_path
)
print(f"{file_path} imported")
return rec
if __name__ == "__main__":
ingest()
\ No newline at end of file
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.mci import Level1Record
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level1DataApi(object):
def __init__(self, sub_system = "mci"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level1 records from database
parameter kwargs:
level0_id: [str]
data_type: [str]
create_time : (start, end),
qc1_status : [int],
prc_status : [int],
filename: [str]
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
data_type = get_parameter(kwargs, "data_type")
create_time_start = get_parameter(kwargs, "create_time", [None, None])[0]
create_time_end = get_parameter(kwargs, "create_time", [None, None])[1]
qc1_status = get_parameter(kwargs, "qc1_status")
prc_status = get_parameter(kwargs, "prc_status")
filename = get_parameter(kwargs, "filename")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from mci_level1_data where 1=1"
sql_data = f"select * from mci_level1_data where 1=1"
sql_condition = ""
if level0_id:
sql_condition = f"{sql_condition} and level0_id='{level0_id}'"
if data_type:
sql_condition = f"{sql_condition} and data_type='{data_type}'"
if create_time_start:
sql_condition = f"{sql_condition} and create_time >='{create_time_start}'"
if create_time_end:
sql_condition = f"{sql_condition} and create_time <='{create_time_end}'"
if qc1_status:
sql_condition = f"{sql_condition} and qc1_status={qc1_status}"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
if filename:
sql_condition = f" and filename='{filename}'"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, recs = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level1Record, recs)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
'''
parameter kwargs:
id = [int]
return dict or None
'''
try:
fits_id = get_parameter(kwargs, "id", -1)
r = self.db.select_one(
"select * from mci_level1_data where id=?", (fits_id,))
if r:
return Result.ok_data(data=Level1Record().from_dict(r))
else:
return Result.error(message=f"id:{fits_id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
status : [int]
return csst_dfs_common.models.Result
'''
fits_id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from mci_level1_data where id=?",
(fits_id,)
)
if not existed:
log.warning('%s not found' %(fits_id, ))
return Result.error(message ='%s not found' %(fits_id, ))
self.db.execute(
'update mci_level1_data set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), fits_id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc1_status(self, **kwargs):
''' update the status of QC1
parameter kwargs:
id : [int],
status : [int]
'''
fits_id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from mci_level1_data where id=?",
(fits_id,)
)
if not existed:
log.warning('%s not found' %(fits_id, ))
return Result.error(message ='%s not found' %(fits_id, ))
self.db.execute(
'update mci_level1_data set qc1_status=?, qc1_time=? where id=?',
(status, format_time_ms(time.time()), fits_id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level1 record into database
parameter kwargs:
level0_id : [str]
data_type : [str]
cor_sci_id : [int]
prc_params : [str]
filename : [str]
file_path : [str]
prc_status : [int]
prc_time : [str]
pipeline_id : [str]
refs : [dict]
return csst_dfs_common.models.Result
'''
try:
rec = Level1Record(
id = 0,
level0_id = get_parameter(kwargs, "level0_id"),
data_type = get_parameter(kwargs, "data_type"),
cor_sci_id = get_parameter(kwargs, "cor_sci_id"),
prc_params = get_parameter(kwargs, "prc_params"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time", format_datetime(datetime.now())),
pipeline_id = get_parameter(kwargs, "pipeline_id"),
refs = get_parameter(kwargs, "refs", {})
)
existed = self.db.exists(
"select * from mci_level1_data where filename=?",
(rec.filename,)
)
if existed:
log.error(f'{rec.filename} has already been existed')
return Result.error(message=f'{rec.filename} has already been existed')
now_str = format_time_ms(time.time())
self.db.execute(
'INSERT INTO mci_level1_data (level0_id,data_type,cor_sci_id,prc_params,filename,file_path,qc1_status,prc_status,prc_time, create_time,pipeline_id) \
VALUES(?,?,?,?,?,?,?,?,?,?,?)',
(rec.level0_id, rec.data_type, rec.cor_sci_id, rec.prc_params, rec.filename, rec.file_path, -1, rec.prc_status,rec.prc_time, now_str, rec.pipeline_id,)
)
self.db.end()
rec.id = self.db.last_row_id()
if rec.refs.items():
sql_refs = "insert into mci_level1_ref (level1_id,ref_type,cal_id) values "
values = ["(%s,'%s',%s)"%(rec.id,k,v) for k,v in rec.refs.items()]
_ = self.db.execute(sql_refs + ",".join(values))
self.db.end()
rec.create_time = now_str
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
\ No newline at end of file
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.mci import Level1PrcRecord
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level1PrcApi(object):
def __init__(self):
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level1 procedure records from database
parameter kwargs:
level1_id: [int]
pipeline_id: [str]
prc_module: [str]
prc_status : [int]
return: csst_dfs_common.models.Result
'''
try:
level1_id = get_parameter(kwargs, "level1_id", 0)
pipeline_id = get_parameter(kwargs, "pipeline_id")
prc_module = get_parameter(kwargs, "prc_module")
prc_status = get_parameter(kwargs, "prc_status")
sql_data = f"select * from mci_level1_prc"
sql_condition = f"where level1_id={level1_id}"
if pipeline_id:
sql_condition = sql_condition + " and pipeline_id='" + pipeline_id + "'"
if prc_module:
sql_condition = sql_condition + " and prc_module ='" + prc_module + "'"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
sql_data = f"{sql_data} {sql_condition}"
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level1PrcRecord, records)).append("totalCount", len(records))
except Exception as e:
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from mci_level1_prc where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update mci_level1_prc set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level1 procedure record into database
parameter kwargs:
level1_id : [int]
pipeline_id : [str]
prc_module : [str]
params_file_path : [str]
prc_status : [int]
prc_time : [str]
result_file_path : [str]
return csst_dfs_common.models.Result
'''
rec = Level1PrcRecord(
id = 0,
level1_id = get_parameter(kwargs, "level1_id", 0),
pipeline_id = get_parameter(kwargs, "pipeline_id"),
prc_module = get_parameter(kwargs, "prc_module"),
params_file_path = get_parameter(kwargs, "params_file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
result_file_path = get_parameter(kwargs, "result_file_path")
)
try:
self.db.execute(
'INSERT INTO mci_level1_prc (level1_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
VALUES(?,?,?,?,?,?,?)',
(rec.level1_id, rec.pipeline_id, rec.prc_module, rec.params_file_path, rec.prc_status, rec.prc_time, rec.result_file_path)
)
self.db.end()
rec.id = self.db.last_row_id()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
from .calmerge import CalMergeApi
from .level0 import Level0DataApi
from .level0prc import Level0PrcApi
from .level1 import Level1DataApi
from .level1prc import Level1PrcApi
from .level2 import Level2DataApi
\ No newline at end of file
import os, sys
import argparse
import logging
from astropy.io import fits
import datetime
import shutil
from csst_dfs_api_local.common.db import DBClient
from csst_dfs_commons.utils.fits import get_header_value
log = logging.getLogger('csst-dfs-api-local')
def ingest():
parser = argparse.ArgumentParser(prog=f"{sys.argv[0]}", description="ingest the local files")
parser.add_argument('-i','--infile', dest="infile", help="a file or a directory")
parser.add_argument('-m', '--copyfiles', dest="copyfiles", action='store_true', default=False, help="whether copy files after import")
args = parser.parse_args(sys.argv[1:])
import_root_dir = args.infile
if import_root_dir is None or (not os.path.isfile(import_root_dir) and not os.path.isdir(import_root_dir)):
parser.print_help()
sys.exit(0)
db = DBClient()
if os.path.isfile(import_root_dir):
log.info(f"prepare import {import_root_dir}")
ingest_one(import_root_dir, db, args.copyfiles)
if os.path.isdir(import_root_dir):
for (path, _, file_names) in os.walk(import_root_dir):
for filename in file_names:
if filename.find(".fits") > 0:
file_full_path = os.path.join(path, filename)
log.info(f"prepare import {file_full_path}")
ingest_one(file_full_path, db, args.copyfiles)
db.close()
def ingest_one(file_path, db, copyfiles):
dest_root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
hdul = fits.open(file_path)
fits_header = hdul[0].header
img_header = hdul[1].header
obs_id = fits_header["OBSID"]
exp_start_time = f"{fits_header['DATE-OBS']} {fits_header['TIME-OBS']}"
exp_time = fits_header['EXPTIME']
module_id = fits_header["INSTRUME"]
obs_type = fits_header["FILETYPE"]
qc0_status = -1
prc_status = -1
time_now = datetime.datetime.now()
create_time = time_now.strftime('%Y-%m-%d %H:%M:%S')
facility_status_id = 0
module_status_id = 0
existed = db.exists("select * from t_observation where obs_id=?", (obs_id,))
if not existed:
db.execute("insert into t_observation \
(obs_id,obs_time,exp_time,module_id,obs_type,facility_status_id, module_status_id, qc0_status, prc_status,create_time) \
values (?,?,?,?,?,?,?,?,?,?)",
(obs_id,exp_start_time,exp_time,module_id,obs_type,facility_status_id,module_status_id,qc0_status, prc_status,create_time))
db.end()
#level0
detector = get_header_value("DETECTOR", fits_header, "")
if len(detector) > 2:
detector = detector[-2:]
filename = get_header_value("FILENAME", fits_header, os.path.basename(file_path))
existed = db.exists(
"select * from msc_level0_data where filename=?",
(filename,)
)
if existed:
log.warning('%s has already been imported' %(file_path, ))
db.end()
return
detector_status_id = 0
file_full_path = file_path
if copyfiles:
file_dir = f"{dest_root_dir}/{module_id}/{obs_type.upper()}/{fits_header['EXPSTART']}/{obs_id}/MS"
if not os.path.exists(file_dir):
os.makedirs(file_dir)
file_full_path = f"{file_dir}/{filename}.fits"
level0_id = f"{obs_id}{detector}"
c = db.execute("insert into msc_level0_data \
(level0_id, obs_id, detector_no, obs_type, obs_time, exp_time,detector_status_id, filename, file_path,qc0_status, prc_status,create_time) \
values (?,?,?,?,?,?,?,?,?,?,?,?)",
(level0_id, obs_id, detector, obs_type, exp_start_time, exp_time, detector_status_id, filename, file_full_path, qc0_status, prc_status,create_time))
db.end()
level0_id_id = db.last_row_id()
#level0-header
ra_obj = get_header_value("RA_OBJ", fits_header, 0)
dec_obj = get_header_value("DEC_OBJ", fits_header, 0)
crpix1 = get_header_value("CRPIX1", img_header, 0)
crpix2 = get_header_value("CRPIX2", img_header, 0)
crval1 = get_header_value("CRVAL1", img_header, 0)
crval2 = get_header_value("CRVAL2", img_header, 0)
ctype1 = get_header_value("CTYPE1", img_header, "")
ctype2 = get_header_value("CTYPE2", img_header, "")
cd1_1 = get_header_value("CD1_1", img_header, 0)
cd1_2 = get_header_value("CD1_2", img_header, 0)
cd2_1 = get_header_value("CD2_1", img_header, 0)
cd2_2 = get_header_value("CD2_2", img_header, 0)
db.execute("delete from msc_level0_header where id=?",(level0_id_id,))
db.execute("insert into msc_level0_header \
(id, ra_obj, dec_obj, crpix1, crpix2, crval1, crval2, \
ctype1, ctype2, cd1_1, cd1_2, cd2_1, cd2_2, create_time) \
values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
(level0_id_id, ra_obj, dec_obj, crpix1, crpix2, crval1, crval2, ctype1, ctype2, cd1_1, cd1_2, cd2_1, cd2_2, create_time))
if copyfiles:
#copy files
shutil.copyfile(file_path, file_full_path)
db.end()
print(f"{file_path} imported")
if __name__ == "__main__":
ingest()
\ No newline at end of file
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.msc import Level1Record
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level1DataApi(object):
def __init__(self, sub_system = "msc"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level1 records from database
parameter kwargs:
level0_id: [str]
data_type: [str]
create_time : (start, end),
qc1_status : [int],
prc_status : [int],
filename: [str]
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
data_type = get_parameter(kwargs, "data_type")
create_time_start = get_parameter(kwargs, "create_time", [None, None])[0]
create_time_end = get_parameter(kwargs, "create_time", [None, None])[1]
qc1_status = get_parameter(kwargs, "qc1_status")
prc_status = get_parameter(kwargs, "prc_status")
filename = get_parameter(kwargs, "filename")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from msc_level1_data where 1=1"
sql_data = f"select * from msc_level1_data where 1=1"
sql_condition = ""
if level0_id:
sql_condition = f"{sql_condition} and level0_id='{level0_id}'"
if data_type:
sql_condition = f"{sql_condition} and data_type='{data_type}'"
if create_time_start:
sql_condition = f"{sql_condition} and create_time >='{create_time_start}'"
if create_time_end:
sql_condition = f"{sql_condition} and create_time <='{create_time_end}'"
if qc1_status:
sql_condition = f"{sql_condition} and qc1_status={qc1_status}"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
if filename:
sql_condition = f" and filename='{filename}'"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, recs = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level1Record, recs)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
'''
parameter kwargs:
id = [int]
return csst_dfs_common.models.Result
'''
try:
id = get_parameter(kwargs, "id", -1)
r = self.db.select_one(
"select * from msc_level1_data where id=?", (id,))
if r:
return Result.ok_data(data=Level1Record().from_dict(r))
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
status : [int]
return csst_dfs_common.models.Result
'''
fits_id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from msc_level1_data where id=?",
(fits_id,)
)
if not existed:
log.warning('%s not found' %(fits_id, ))
return Result.error(message ='%s not found' %(fits_id, ))
self.db.execute(
'update msc_level1_data set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), fits_id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc1_status(self, **kwargs):
''' update the status of QC1
parameter kwargs:
id : [int],
status : [int]
'''
fits_id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from msc_level1_data where id=?",
(fits_id,)
)
if not existed:
log.warning('%s not found' %(fits_id, ))
return Result.error(message ='%s not found' %(fits_id, ))
self.db.execute(
'update msc_level1_data set qc1_status=?, qc1_time=? where id=?',
(status, format_time_ms(time.time()), fits_id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level1 record into database
parameter kwargs:
level0_id : [str]
data_type : [str]
cor_sci_id : [int]
prc_params : [str]
filename : [str]
file_path : [str]
prc_status : [int]
prc_time : [str]
pipeline_id : [str]
refs: [dict]
return csst_dfs_common.models.Result
'''
try:
rec = Level1Record(
id = 0,
level0_id = get_parameter(kwargs, "level0_id"),
data_type = get_parameter(kwargs, "data_type"),
cor_sci_id = get_parameter(kwargs, "cor_sci_id"),
prc_params = get_parameter(kwargs, "prc_params"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time", format_datetime(datetime.now())),
pipeline_id = get_parameter(kwargs, "pipeline_id"),
refs = get_parameter(kwargs, "refs", {})
)
existed = self.db.exists(
"select * from msc_level1_data where filename=?",
(rec.filename,)
)
if existed:
log.error(f'{rec.filename} has already been existed')
return Result.error(message=f'{rec.filename} has already been existed')
self.db.execute(
'INSERT INTO msc_level1_data (level0_id,data_type,cor_sci_id,prc_params,filename,file_path,qc1_status,prc_status,prc_time,create_time,pipeline_id) \
VALUES(?,?,?,?,?,?,?,?,?,?,?)',
(rec.level0_id, rec.data_type, rec.cor_sci_id, rec.prc_params, rec.filename, rec.file_path, -1, rec.prc_status, rec.prc_time, format_time_ms(time.time()),rec.pipeline_id,)
)
self.db.end()
rec.id = self.db.last_row_id()
if rec.refs.items():
sql_refs = "insert into msc_level1_ref (level1_id,ref_type,cal_id) values "
values = ["(%s,'%s',%s)"%(rec.id,k,v) for k,v in rec.refs.items()]
_ = self.db.execute(sql_refs + ",".join(values))
self.db.end()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
\ No newline at end of file
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.msc import Level1PrcRecord
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level1PrcApi(object):
def __init__(self, sub_system = "msc"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level1 procedure records from database
parameter kwargs:
level1_id: [int]
pipeline_id: [str]
prc_module: [str]
prc_status : [int]
return: csst_dfs_common.models.Result
'''
try:
level1_id = get_parameter(kwargs, "level1_id", 0)
pipeline_id = get_parameter(kwargs, "pipeline_id")
prc_module = get_parameter(kwargs, "prc_module")
prc_status = get_parameter(kwargs, "prc_status")
sql_data = f"select * from msc_level1_prc"
sql_condition = f"where level1_id={level1_id}"
if pipeline_id:
sql_condition = sql_condition + " and pipeline_id='" + pipeline_id + "'"
if prc_module:
sql_condition = sql_condition + " and prc_module ='" + prc_module + "'"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
sql_data = f"{sql_data} {sql_condition}"
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level1PrcRecord, records)).append("totalCount", len(records))
except Exception as e:
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from msc_level1_prc where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update msc_level1_prc set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level1 procedure record into database
parameter kwargs:
level1_id : [int]
pipeline_id : [str]
prc_module : [str]
params_file_path : [str]
prc_status : [int]
prc_time : [str]
result_file_path : [str]
return csst_dfs_common.models.Result
'''
rec = Level1PrcRecord(
id = 0,
level1_id = get_parameter(kwargs, "level1_id", 0),
pipeline_id = get_parameter(kwargs, "pipeline_id"),
prc_module = get_parameter(kwargs, "prc_module"),
params_file_path = get_parameter(kwargs, "params_file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
result_file_path = get_parameter(kwargs, "result_file_path")
)
try:
self.db.execute(
'INSERT INTO msc_level1_prc (level1_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
VALUES(?,?,?,?,?,?,?)',
(rec.level1_id, rec.pipeline_id, rec.prc_module, rec.params_file_path, rec.prc_status, rec.prc_time, rec.result_file_path)
)
self.db.end()
rec.id = self.db.last_row_id()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
......@@ -154,7 +154,6 @@ class Level2DataApi(object):
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
......@@ -172,7 +171,6 @@ class Level2DataApi(object):
prc_status : [int]
prc_time : [str]
return csst_dfs_common.models.Result
'''
try:
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment