Commit 7191c49c authored by Wei Shoulin's avatar Wei Shoulin
Browse files

un level0

parent 434f1e57
......@@ -11,18 +11,20 @@ drop table if exists t_facility_status;
drop table if exists t_guiding;
/*----------------msc------------------------------*/
drop table if exists msc_level0_data;
drop table if exists t_level0_data;
drop table if exists t_level0_header;
drop table if exists t_level0_prc;
drop table if exists msc_level0_header;
drop table if exists t_cal2level0;
drop table if exists msc_level0_prc;
drop table if exists t_cal_header;
drop table if exists msc_cal2level0;
drop table if exists t_cal_merge;
drop table if exists msc_cal_header;
/*----------------msc------------------------------*/
drop table if exists msc_cal_merge;
drop table if exists msc_level1_data;
......@@ -39,18 +41,6 @@ drop table if exists msc_level2_header;
drop table if exists msc_level2_catalog;
/*----------------ifs------------------------------*/
drop table if exists ifs_level0_data;
drop table if exists ifs_level0_header;
drop table if exists ifs_level0_prc;
drop table if exists ifs_cal2level0;
drop table if exists ifs_cal_header;
drop table if exists ifs_cal_merge;
drop table if exists ifs_level1_data;
drop table if exists ifs_level1_header;
......@@ -60,18 +50,6 @@ drop table if exists ifs_level1_prc;
drop table if exists ifs_level1_ref;
/*----------------mci------------------------------*/
drop table if exists mci_level0_data;
drop table if exists mci_level0_header;
drop table if exists mci_level0_prc;
drop table if exists mci_cal2level0;
drop table if exists mci_cal_header;
drop table if exists mci_cal_merge;
drop table if exists mci_level1_data;
drop table if exists mci_level1_header;
......@@ -79,18 +57,16 @@ drop table if exists mci_level1_header;
drop table if exists mci_level1_prc;
drop table if exists mci_level1_ref;
/*----------------sls------------------------------*/
drop table if exists sls_level0_data;
/*----------------cpic------------------------------*/
drop table if exists sls_level0_header;
drop table if exists cpic_level1_data;
drop table if exists sls_level0_prc;
drop table if exists cpic_level1_header;
drop table if exists sls_cal2level0;
drop table if exists cpic_level1_prc;
drop table if exists sls_cal_header;
drop table if exists sls_cal_merge;
drop table if exists cpic_level1_ref;
/*----------------sls------------------------------*/
drop table if exists sls_level1_data;
......@@ -180,8 +156,7 @@ create table t_observation
import_status tinyint(1)
);
/*===========================msc===================================*/
create table msc_level0_data
create table t_level0_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
......@@ -200,7 +175,7 @@ create table msc_level0_data
create_time datetime
);
create table msc_level0_header
create table t_level0_header
(
id int(20) not null,
ra_obj float,
......@@ -215,11 +190,13 @@ create table msc_level0_header
cd1_2 float,
cd2_1 float,
cd2_2 float,
object_name varchar(64),
version varchar(64),
create_time datetime,
primary key (id)
);
create table msc_level0_prc
create table t_level0_prc
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
......@@ -230,6 +207,40 @@ create table msc_level0_prc
prc_time datetime,
result_file_path varchar(256)
);
create table t_cal2level0
(
merge_id int(20) not null,
level0_id varchar(20) not null,
primary key (merge_id, level0_id)
);
create table t_cal_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table t_cal_merge
(
id integer PRIMARY KEY autoincrement,
cal_id varchar(20) not null,
detector_no varchar(10) not null,
ref_type varchar(16),
obs_time datetime,
exp_time float,
filename varchar(128),
file_path varchar(256),
qc1_status tinyint(1),
qc1_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
/*========================msc======================================*/
create table msc_level1_data
(
id integer PRIMARY KEY autoincrement,
......@@ -246,28 +257,15 @@ create table msc_level1_data
create_time datetime,
pipeline_id varchar(60)
);
create table msc_level1_ref (
level1_id int(20) not null,
ref_type varchar(64) not null,
cal_id int(20) not null,
primary key (level1_id, ref_type)
);
create table msc_level1_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table msc_cal2level0
(
merge_id int(20) not null,
level0_id varchar(20) not null,
primary key (merge_id, level0_id)
);
create table msc_cal_header
create table msc_level1_header
(
id int(20) not null,
ra_obj float,
......@@ -276,22 +274,6 @@ create table msc_cal_header
primary key (id)
);
create table msc_cal_merge
(
id integer PRIMARY KEY autoincrement,
cal_id varchar(20) not null,
detector_no varchar(10) not null,
ref_type varchar(16),
obs_time datetime,
exp_time float,
filename varchar(128),
file_path varchar(256),
qc1_status tinyint(1),
qc1_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
create table msc_level1_prc
(
id integer PRIMARY KEY autoincrement,
......@@ -349,79 +331,6 @@ create table msc_level2co_header (
constraint PK_MSC_LEVEL2CO_HEADER primary key (id)
);
/*===========================ifs===================================*/
create table ifs_level0_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
obs_id varchar(10) not null,
detector_no varchar(10) not null,
obs_type varchar(16),
obs_time datetime,
exp_time float,
detector_status_id int(20),
filename varchar(128),
file_path varchar(256),
qc0_status tinyint(1),
qc0_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
create table ifs_level0_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
object_name varchar(64) not null,
version varchar(64) not null,
primary key (id)
);
create table ifs_level0_prc
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
pipeline_id varchar(64) not null,
prc_module varchar(32) not null,
params_file_path varchar(256),
prc_status int(2),
prc_time datetime,
result_file_path varchar(256)
);
create table ifs_cal2level0
(
merge_id int(20) not null,
level0_id varchar(20) not null,
primary key (merge_id, level0_id)
);
create table ifs_cal_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table ifs_cal_merge
(
id integer PRIMARY KEY autoincrement,
cal_id varchar(20) not null,
detector_no varchar(10) not null,
ref_type varchar(16),
obs_time datetime,
exp_time float,
filename varchar(128),
file_path varchar(256),
qc1_status tinyint(1),
qc1_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
create table ifs_level1_data
(
id integer PRIMARY KEY autoincrement,
......@@ -466,78 +375,6 @@ create table ifs_level1_prc
result_file_path varchar(256)
);
/*===========================mci===================================*/
create table mci_level0_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
obs_id varchar(10) not null,
detector_no varchar(10) not null,
obs_type varchar(16),
obs_time datetime,
exp_time float,
detector_status_id int(20),
filename varchar(128),
file_path varchar(256),
qc0_status tinyint(1),
qc0_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
create table mci_level0_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
object_name varchar(64) not null,
version varchar(64) not null,
primary key (id)
);
create table mci_level0_prc
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
pipeline_id varchar(64) not null,
prc_module varchar(32) not null,
params_file_path varchar(256),
prc_status int(2),
prc_time datetime,
result_file_path varchar(256)
);
create table mci_cal2level0
(
merge_id int(20) not null,
level0_id varchar(20) not null,
primary key (merge_id, level0_id)
);
create table mci_cal_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table mci_cal_merge
(
id integer PRIMARY KEY autoincrement,
cal_id varchar(20) not null,
detector_no varchar(10) not null,
ref_type varchar(16),
obs_time datetime,
exp_time float,
filename varchar(128),
file_path varchar(256),
qc1_status tinyint(1),
qc1_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
create table mci_level1_data
(
......@@ -583,78 +420,6 @@ create table mci_level1_prc
result_file_path varchar(256)
);
/*===========================sls===================================*/
create table sls_level0_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
obs_id varchar(10) not null,
detector_no varchar(10) not null,
obs_type varchar(16),
obs_time datetime,
exp_time float,
detector_status_id int(20),
filename varchar(128),
file_path varchar(256),
qc0_status tinyint(1),
qc0_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
create table sls_level0_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table sls_level0_prc
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
pipeline_id varchar(64) not null,
prc_module varchar(32) not null,
params_file_path varchar(256),
prc_status int(2),
prc_time datetime,
result_file_path varchar(256)
);
create table sls_cal2level0
(
merge_id int(20) not null,
level0_id varchar(20) not null,
primary key (merge_id, level0_id)
);
create table sls_cal_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table sls_cal_merge
(
id integer PRIMARY KEY autoincrement,
cal_id varchar(20) not null,
detector_no varchar(10) not null,
ref_type varchar(16),
obs_time datetime,
exp_time float,
filename varchar(128),
file_path varchar(256),
qc1_status tinyint(1),
qc1_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
create table sls_level1_data
(
id integer PRIMARY KEY autoincrement,
......@@ -724,77 +489,6 @@ create table sls_level2_spectra_header
primary key (id)
);
/*===========================cpic===================================*/
create table cpic_level0_data
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
obs_id varchar(10) not null,
detector_no varchar(10) not null,
obs_type varchar(16),
obs_time datetime,
exp_time float,
detector_status_id int(20),
filename varchar(128),
file_path varchar(256),
qc0_status tinyint(1),
qc0_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
create table cpic_level0_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table cpic_level0_prc
(
id integer PRIMARY KEY autoincrement,
level0_id varchar(20) not null,
pipeline_id varchar(64) not null,
prc_module varchar(32) not null,
params_file_path varchar(256),
prc_status int(2),
prc_time datetime,
result_file_path varchar(256)
);
create table cpic_cal2level0
(
merge_id int(20) not null,
level0_id varchar(20) not null,
primary key (merge_id, level0_id)
);
create table cpic_cal_header
(
id int(20) not null,
ra_obj float,
dec_obj float,
create_time datetime,
primary key (id)
);
create table cpic_cal_merge
(
id integer PRIMARY KEY autoincrement,
cal_id varchar(20) not null,
detector_no varchar(10) not null,
ref_type varchar(16),
obs_time datetime,
exp_time float,
filename varchar(128),
file_path varchar(256),
qc1_status tinyint(1),
qc1_time datetime,
prc_status tinyint(1),
prc_time datetime,
create_time datetime
);
create table cpic_level1_data
(
......
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.cpic import CalMergeRecord
from csst_dfs_commons.models.common import from_dict_list
from .level0 import Level0DataApi
log = logging.getLogger('csst')
class CalMergeApi(object):
def __init__(self, sub_system = "cpic"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
self.level0Api = Level0DataApi()
def get_latest_by_l0(self, **kwargs):
''' retrieve calibration merge records from database by level0 data
:param kwargs: Parameter dictionary, key items support:
level0_id: [str],
ref_type: [str]
:returns: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
ref_type = get_parameter(kwargs, "ref_type")
level0_data = self.level0Api.get_by_level0_id(level0_id)
if level0_data.data is None:
return Result.error(message = "level0 data [%s]not found"%(level0_id))
sql_data = f"select * from cpic_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time >= '{level0_data.data.obs_time}' order by obs_time ASC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
sql_data = f"select * from cpic_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time <= '{level0_data.data.obs_time}' order by obs_time DESC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
return Result.error(message = "not found")
except Exception as e:
return Result.error(message=str(e))
def find(self, **kwargs):
''' retrieve calibration merge records from database
parameter kwargs:
detector_no: [str]
ref_type: [str]
obs_time: (start,end)
qc1_status : [int]
prc_status : [int]
file_name: [str]
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
detector_no = get_parameter(kwargs, "detector_no")
ref_type = get_parameter(kwargs, "ref_type")
exp_time_start = get_parameter(kwargs, "obs_time", [None, None])[0]
exp_time_end = get_parameter(kwargs, "obs_time", [None, None])[1]
qc1_status = get_parameter(kwargs, "qc1_status")
prc_status = get_parameter(kwargs, "prc_status")
file_name = get_parameter(kwargs, "file_name")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from cpic_cal_merge where 1=1"
sql_data = f"select * from cpic_cal_merge where 1=1"
sql_condition = ""
if detector_no:
sql_condition = f"{sql_condition} and detector_no='{detector_no}'"
if ref_type:
sql_condition = f"{sql_condition} and ref_type='{ref_type}'"
if exp_time_start:
sql_condition = f"{sql_condition} and obs_time >='{exp_time_start}'"
if exp_time_end:
sql_condition = f"{sql_condition} and obs_time <='{exp_time_end}'"
if qc1_status:
sql_condition = f"{sql_condition} and qc1_status={qc1_status}"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
if file_name:
sql_condition = f" and filename={file_name}"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
log.info(sql_count)
log.info(sql_data)
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(CalMergeRecord, records)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
''' fetch a record from database
parameter kwargs:
id : [int],
cal_id : [str]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
if id == 0 and cal_id == "":
return Result.error(message="at least define id or cal_id")
if id != 0:
return self.get_by_id(id)
if cal_id != "":
return self.get_by_cal_id(cal_id)
def get_by_id(self, id: str):
try:
r = self.db.select_one(
"select * from cpic_cal_merge where id=?", (id,))
if r:
sql_get_level0_id = f"select level0_id from cpic_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
rec = CalMergeRecord().from_dict(r)
rec.level0_ids = level0_ids
return Result.ok_data(data=rec)
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def get_by_cal_id(self, cal_id: str):
try:
r = self.db.select_one(
"select * from cpic_cal_merge where cal_id=?", (cal_id,))
if r:
sql_get_level0_id = f"select level0_id from cpic_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
rec = CalMergeRecord().from_dict(r)
rec.level0_ids = level0_ids
return Result.ok_data(data=rec)
else:
return Result.error(message=f"id:{cal_id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc1_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
cal_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
result = self.get(id = id, cal_id = cal_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update cpic_cal_merge set qc1_status=?, qc1_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
cal_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
result = self.get(id = id, cal_id = cal_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from cpic_cal_merge where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update cpic_cal_merge set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a calibration merge record into database
parameter kwargs:
cal_id : [str]
detector_no : [str]
ref_type : [str]
obs_time : [str]
exp_time : [float]
prc_status : [int]
prc_time : [str]
filename : [str]
file_path : [str]
level0_ids : [list]
return csst_dfs_common.models.Result
'''
rec = CalMergeRecord(
id = 0,
cal_id = get_parameter(kwargs, "cal_id"),
detector_no = get_parameter(kwargs, "detector_no"),
ref_type = get_parameter(kwargs, "ref_type"),
obs_time = get_parameter(kwargs, "obs_time"),
exp_time = get_parameter(kwargs, "exp_time"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
level0_ids = get_parameter(kwargs, "level0_ids", [])
)
try:
self.db.execute(
'INSERT INTO cpic_cal_merge (cal_id,detector_no,ref_type,obs_time,exp_time,filename,file_path,prc_status,prc_time,create_time) \
VALUES(?,?,?,?,?,?,?,?,?,?)',
(rec.cal_id, rec.detector_no, rec.ref_type, rec.obs_time, rec.exp_time, rec.filename, rec.file_path,rec.prc_status,rec.prc_time,format_time_ms(time.time()))
)
self.db.end()
rec.id = self.db.last_row_id()
sql_level0_ids = "insert into cpic_cal2level0 (merge_id,level0_id) values "
values = ["(%s,%s)"%(rec.id,i) for i in rec.level0_ids]
_ = self.db.execute(sql_level0_ids + ",".join(values))
self.db.end()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
\ No newline at end of file
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.cpic import Level0Record
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level0DataApi(object):
def __init__(self, sub_system = "cpic"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level0 records from database
parameter kwargs:
obs_id: [str]
detector_no: [str]
obs_type: [str]
obs_time : (start, end),
qc0_status : [int],
prc_status : [int],
file_name: [str]
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
obs_id = get_parameter(kwargs, "obs_id")
detector_no = get_parameter(kwargs, "detector_no")
obs_type = get_parameter(kwargs, "obs_type")
exp_time_start = get_parameter(kwargs, "obs_time", [None, None])[0]
exp_time_end = get_parameter(kwargs, "obs_time", [None, None])[1]
qc0_status = get_parameter(kwargs, "qc0_status")
prc_status = get_parameter(kwargs, "prc_status")
file_name = get_parameter(kwargs, "file_name")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from cpic_level0_data where 1=1"
sql_data = f"select * from cpic_level0_data where 1=1"
sql_condition = ""
if obs_id:
sql_condition = f"{sql_condition} and obs_id='{obs_id}'"
if detector_no:
sql_condition = f"{sql_condition} and detector_no='{detector_no}'"
if obs_type:
sql_condition = f"{sql_condition} and obs_type='{obs_type}'"
if exp_time_start:
sql_condition = f"{sql_condition} and obs_time >='{exp_time_start}'"
if exp_time_end:
sql_condition = f"{sql_condition} and obs_time <='{exp_time_end}'"
if qc0_status:
sql_condition = f"{sql_condition} and qc0_status={qc0_status}"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
if file_name:
sql_condition = f" and filename='{file_name}'"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level0Record, records)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
''' fetch a record from database
parameter kwargs:
id : [int],
level0_id : [str]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
level0_id = get_parameter(kwargs, "level0_id", "")
if id == 0 and level0_id == "":
return Result.error(message="at least define id or level0_id")
if id != 0:
return self.get_by_id(id)
if level0_id != "":
return self.get_by_level0_id(level0_id)
def get_by_id(self, id: int):
try:
r = self.db.select_one(
"select * from cpic_level0_data where id=?", (id,))
if r:
return Result.ok_data(data=Level0Record().from_dict(r))
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def get_by_level0_id(self, level0_id: str):
try:
r = self.db.select_one(
"select * from cpic_level0_data where level0_id=?", (level0_id,))
if r:
return Result.ok_data(data=Level0Record().from_dict(r))
else:
return Result.error(message=f"level0_id:{level0_id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
level0_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
level0_id = get_parameter(kwargs, "level0_id")
result = self.get(id = id, level0_id = level0_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update cpic_level0_data set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc0_status(self, **kwargs):
''' update the status of QC0
parameter kwargs:
id : [int],
level0_id : [str],
status : [int]
'''
id = get_parameter(kwargs, "id")
level0_id = get_parameter(kwargs, "level0_id")
result = self.get(id = id, level0_id = level0_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update cpic_level0_data set qc0_status=?, qc0_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level0 data record into database
parameter kwargs:
obs_id = [str]
detector_no = [str]
obs_type = [str]
obs_time = [str]
exp_time = [int]
detector_status_id = [int]
filename = [str]
file_path = [str]
return: csst_dfs_common.models.Result
'''
rec = Level0Record(
obs_id = get_parameter(kwargs, "obs_id"),
detector_no = get_parameter(kwargs, "detector_no"),
obs_type = get_parameter(kwargs, "obs_type"),
obs_time = get_parameter(kwargs, "obs_time"),
exp_time = get_parameter(kwargs, "exp_time"),
detector_status_id = get_parameter(kwargs, "detector_status_id"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path")
)
rec.level0_id = f"{rec.obs_id}{rec.detector_no}"
try:
existed = self.db.exists(
"select * from cpic_level0_data where filename=?",
(rec.filename,)
)
if existed:
log.warning('%s existed' %(rec.filename, ))
return Result.error(message ='%s existed' %(rec.filename, ))
self.db.execute(
'INSERT INTO cpic_level0_data (level0_id, obs_id, detector_no, obs_type, obs_time, exp_time,detector_status_id, filename, file_path,qc0_status, prc_status,create_time) \
VALUES(?,?,?,?,?,?,?,?,?,?,?,?)',
(rec.level0_id, rec.obs_id, rec.detector_no, rec.obs_type, rec.obs_time, rec.exp_time, rec.detector_status_id, rec.filename, rec.file_path,-1,-1,format_time_ms(time.time()))
)
self.db.end()
rec.id = self.db.last_row_id()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.cpic import Level0PrcRecord
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level0PrcApi(object):
def __init__(self, sub_system = "cpic"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level0 procedure records from database
parameter kwargs:
level0_id: [str]
pipeline_id: [str]
prc_module: [str]
prc_status : [int]
return: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
pipeline_id = get_parameter(kwargs, "pipeline_id")
prc_module = get_parameter(kwargs, "prc_module")
prc_status = get_parameter(kwargs, "prc_status")
sql_data = f"select * from cpic_level0_prc"
sql_condition = f"where level0_id='{level0_id}'"
if pipeline_id:
sql_condition = sql_condition + " and pipeline_id='" + pipeline_id + "'"
if prc_module:
sql_condition = sql_condition + " and prc_module ='" + prc_module + "'"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
sql_data = f"{sql_data} {sql_condition}"
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level0PrcRecord, records)).append("totalCount", len(records))
except Exception as e:
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from cpic_level0_prc where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update cpic_level0_prc set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level0 procedure record into database
parameter kwargs:
level0_id : [str]
pipeline_id : [str]
prc_module : [str]
params_file_path : [str]
prc_status : [int]
prc_time : [str]
result_file_path : [str]
return csst_dfs_common.models.Result
'''
rec = Level0PrcRecord(
id = 0,
level0_id = get_parameter(kwargs, "level0_id"),
pipeline_id = get_parameter(kwargs, "pipeline_id"),
prc_module = get_parameter(kwargs, "prc_module"),
params_file_path = get_parameter(kwargs, "params_file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
result_file_path = get_parameter(kwargs, "result_file_path")
)
try:
self.db.execute(
'INSERT INTO cpic_level0_prc (level0_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
VALUES(?,?,?,?,?,?,?)',
(rec.level0_id, rec.pipeline_id, rec.prc_module, rec.params_file_path, rec.prc_status, rec.prc_time, rec.result_file_path)
)
self.db.end()
rec.id = self.db.last_row_id()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
......@@ -6,15 +6,14 @@ import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.mci import CalMergeRecord
from csst_dfs_commons.models.facility import CalMergeRecord
from csst_dfs_commons.models.common import from_dict_list
from .level0 import Level0DataApi
log = logging.getLogger('csst')
class CalMergeApi(object):
def __init__(self, sub_system = "mci"):
self.sub_system = sub_system
def __init__(self):
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
self.level0Api = Level0DataApi()
......@@ -36,14 +35,14 @@ class CalMergeApi(object):
if level0_data.data is None:
return Result.error(message = "level0 data [%s]not found"%(level0_id))
sql_data = f"select * from mci_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time >= '{level0_data.data.obs_time}' order by obs_time ASC limit 1"
sql_data = f"select * from t_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time >= '{level0_data.data.obs_time}' order by obs_time ASC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
sql_data = f"select * from mci_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time <= '{level0_data.data.obs_time}' order by obs_time DESC limit 1"
sql_data = f"select * from t_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time <= '{level0_data.data.obs_time}' order by obs_time DESC limit 1"
r = self.db.select_one(sql_data)
if r:
......@@ -79,8 +78,8 @@ class CalMergeApi(object):
file_name = get_parameter(kwargs, "file_name")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from mci_cal_merge where 1=1"
sql_data = f"select * from mci_cal_merge where 1=1"
sql_count = "select count(*) as c from t_cal_merge where 1=1"
sql_data = f"select * from t_cal_merge where 1=1"
sql_condition = ""
if detector_no:
......@@ -139,10 +138,10 @@ class CalMergeApi(object):
def get_by_id(self, iid: int):
try:
r = self.db.select_one(
"select * from mci_cal_merge where id=?", (iid,))
"select * from t_cal_merge where id=?", (iid,))
if r:
sql_get_level0_id = f"select level0_id from mci_cal2level0 where merge_id={r['id']}"
sql_get_level0_id = f"select level0_id from t_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
......@@ -159,10 +158,10 @@ class CalMergeApi(object):
def get_by_cal_id(self, cal_id: str):
try:
r = self.db.select_one(
"select * from mci_cal_merge where cal_id=?", (cal_id,))
"select * from t_cal_merge where cal_id=?", (cal_id,))
if r:
sql_get_level0_id = f"select level0_id from mci_cal2level0 where merge_id={r['id']}"
sql_get_level0_id = f"select level0_id from t_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
......@@ -198,7 +197,7 @@ class CalMergeApi(object):
try:
self.db.execute(
'update mci_cal_merge set qc1_status=?, qc1_time=? where id=?',
'update t_cal_merge set qc1_status=?, qc1_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
......@@ -230,14 +229,14 @@ class CalMergeApi(object):
try:
existed = self.db.exists(
"select * from mci_cal_merge where id=?",
"select * from t_cal_merge where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update mci_cal_merge set prc_status=?, prc_time=? where id=?',
'update t_cal_merge set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
......@@ -279,14 +278,14 @@ class CalMergeApi(object):
)
try:
self.db.execute(
'INSERT INTO mci_cal_merge (cal_id,detector_no,ref_type,obs_time,exp_time,filename,file_path,prc_status,prc_time,create_time) \
'INSERT INTO t_cal_merge (cal_id,detector_no,ref_type,obs_time,exp_time,filename,file_path,prc_status,prc_time,create_time) \
VALUES(?,?,?,?,?,?,?,?,?,?)',
(rec.cal_id, rec.detector_no, rec.ref_type, rec.obs_time, rec.exp_time, rec.filename, rec.file_path,rec.prc_status,rec.prc_time,format_time_ms(time.time()))
)
self.db.end()
rec.id = self.db.last_row_id()
sql_level0_ids = "insert into mci_cal2level0 (merge_id,level0_id) values "
sql_level0_ids = "insert into t_cal2level0 (merge_id,level0_id) values "
values = ["(%s,%s)"%(rec.id,i) for i in rec.level0_ids]
_ = self.db.execute(sql_level0_ids + ",".join(values))
......
......@@ -13,8 +13,7 @@ from .ingest import ingest_one
log = logging.getLogger('csst')
class Level0DataApi(object):
def __init__(self, sub_system = "ifs"):
self.sub_system = sub_system
def __init__(self):
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
......@@ -38,20 +37,21 @@ class Level0DataApi(object):
obs_id = get_parameter(kwargs, "obs_id")
detector_no = get_parameter(kwargs, "detector_no")
obs_type = get_parameter(kwargs, "obs_type")
object_name = get_parameter(kwargs, "object_name")
version = get_parameter(kwargs, "version")
exp_time_start = get_parameter(kwargs, "obs_time", [None, None])[0]
exp_time_end = get_parameter(kwargs, "obs_time", [None, None])[1]
qc0_status = get_parameter(kwargs, "qc0_status")
prc_status = get_parameter(kwargs, "prc_status")
file_name = get_parameter(kwargs, "file_name")
limit = get_parameter(kwargs, "limit", 0)
ra_obj = get_parameter(kwargs, "ra_obj", None)
dec_obj = get_parameter(kwargs, "dec_obj", None)
radius = get_parameter(kwargs, "radius", 0)
object_name = get_parameter(kwargs, "object_name")
version = get_parameter(kwargs, "version", None)
limit = get_parameter(kwargs, "limit", 0)
sql_count = 'select count(*) as c from ifs_level0_data d left join ifs_level0_header h on d.id=h.id where 1=1'
sql_data = 'select d.* from ifs_level0_data d left join ifs_level0_header h on d.id=h.id where 1=1'
sql_count = 'select count(*) as c from t_level0_data d left join t_level0_header h on d.id=h.id where 1=1'
sql_data = 'select d.* from t_level0_data d left join t_level0_header h on d.id=h.id where 1=1'
sql_condition = ""
if obs_id:
......@@ -105,6 +105,7 @@ class Level0DataApi(object):
'''
id = get_parameter(kwargs, "id", 0)
level0_id = get_parameter(kwargs, "level0_id", "")
obs_type = get_parameter(kwargs, "obs_type")
if id == 0 and level0_id == "":
return Result.error(message="at least define id or level0_id")
......@@ -112,15 +113,15 @@ class Level0DataApi(object):
if id != 0:
return self.get_by_id(id)
if level0_id != "":
return self.get_by_level0_id(level0_id)
return self.get_by_level0_id(level0_id, obs_type)
def get_by_id(self, id: int):
try:
r = self.db.select_one(
"select * from ifs_level0_data where id=?", (id,))
"select * from t_level0_data where id=?", (id,))
if r:
data = Level0Record().from_dict(r)
data.header = self.db.select_one("select * from ifs_level0_header where id=?", (id,))
data.header = self.db.select_one("select * from t_level0_header where id=?", (id,))
return Result.ok_data(data = data)
else:
return Result.error(message=f"id:{id} not found")
......@@ -128,13 +129,13 @@ class Level0DataApi(object):
log.error(e)
return Result.error(message=str(e))
def get_by_level0_id(self, level0_id: str):
def get_by_level0_id(self, level0_id: str, obs_type: str):
try:
r = self.db.select_one(
"select * from ifs_level0_data where level0_id=?", (level0_id,))
"select * from t_level0_data where level0_id=? and obs_type=?", (level0_id, obs_type,))
if r:
data = Level0Record().from_dict(r)
data.header = self.db.select_one("select * from msc_level0_header where id=?", (data.id,))
data.header = self.db.select_one("select * from t_level0_header where id=?", (data.id,))
return Result.ok_data(data = data)
else:
return Result.error(message=f"level0_id:{level0_id} not found")
......@@ -154,7 +155,9 @@ class Level0DataApi(object):
'''
id = get_parameter(kwargs, "id")
level0_id = get_parameter(kwargs, "level0_id")
result = self.get(id = id, level0_id = level0_id)
obs_type = get_parameter(kwargs, "obs_type")
result = self.get(id = id, level0_id = level0_id, obs_type = obs_type)
if not result.success:
return Result.error(message="not found")
......@@ -163,7 +166,7 @@ class Level0DataApi(object):
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update ifs_level0_data set prc_status=?, prc_time=? where id=?',
'update t_level0_data set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
......@@ -183,7 +186,8 @@ class Level0DataApi(object):
'''
id = get_parameter(kwargs, "id")
level0_id = get_parameter(kwargs, "level0_id")
result = self.get(id = id, level0_id = level0_id)
obs_type = get_parameter(kwargs, "obs_type")
result = self.get(id = id, level0_id = level0_id, obs_type = obs_type)
if not result.success:
return Result.error(message="not found")
......@@ -192,7 +196,7 @@ class Level0DataApi(object):
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update ifs_level0_data set qc0_status=?, qc0_time=? where id=?',
'update t_level0_data set qc0_status=?, qc0_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
......
......@@ -12,8 +12,7 @@ from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level0PrcApi(object):
def __init__(self, sub_system = "ifs"):
self.sub_system = sub_system
def __init__(self):
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
......@@ -34,7 +33,7 @@ class Level0PrcApi(object):
prc_module = get_parameter(kwargs, "prc_module")
prc_status = get_parameter(kwargs, "prc_status")
sql_data = f"select * from ifs_level0_prc"
sql_data = f"select * from t_level0_prc"
sql_condition = f"where level0_id='{level0_id}'"
if pipeline_id:
......@@ -66,14 +65,14 @@ class Level0PrcApi(object):
try:
existed = self.db.exists(
"select * from ifs_level0_prc where id=?",
"select * from t_level0_prc where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update ifs_level0_prc set prc_status=?, prc_time=? where id=?',
'update t_level0_prc set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
......@@ -109,7 +108,7 @@ class Level0PrcApi(object):
)
try:
self.db.execute(
'INSERT INTO ifs_level0_prc (level0_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
'INSERT INTO t_level0_prc (level0_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
VALUES(?,?,?,?,?,?,?)',
(rec.level0_id, rec.pipeline_id, rec.prc_module, rec.params_file_path, rec.prc_status, rec.prc_time, rec.result_file_path)
)
......
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.ifs import CalMergeRecord
from csst_dfs_commons.models.common import from_dict_list
from .level0 import Level0DataApi
log = logging.getLogger('csst')
class CalMergeApi(object):
def __init__(self, sub_system = "ifs"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
self.level0Api = Level0DataApi()
def get_latest_by_l0(self, **kwargs):
''' retrieve calibration merge records from database by level0 data
:param kwargs: Parameter dictionary, key items support:
level0_id: [str],
ref_type: [str]
:returns: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
ref_type = get_parameter(kwargs, "ref_type")
level0_data = self.level0Api.get_by_level0_id(level0_id)
if level0_data.data is None:
return Result.error(message = "level0 data [%s]not found"%(level0_id))
sql_data = f"select * from ifs_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time >= '{level0_data.data.obs_time}' order by obs_time ASC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
sql_data = f"select * from ifs_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time <= '{level0_data.data.obs_time}' order by obs_time DESC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
return Result.error(message = "not found")
except Exception as e:
return Result.error(message=str(e))
def find(self, **kwargs):
''' retrieve calibration merge records from database
parameter kwargs:
detector_no: [str]
ref_type: [str]
obs_time: (start,end)
qc1_status : [int]
prc_status : [int]
file_name: [str]
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
detector_no = get_parameter(kwargs, "detector_no")
ref_type = get_parameter(kwargs, "ref_type")
exp_time_start = get_parameter(kwargs, "obs_time", [None, None])[0]
exp_time_end = get_parameter(kwargs, "obs_time", [None, None])[1]
qc1_status = get_parameter(kwargs, "qc1_status")
prc_status = get_parameter(kwargs, "prc_status")
file_name = get_parameter(kwargs, "file_name")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from ifs_cal_merge where 1=1"
sql_data = f"select * from ifs_cal_merge where 1=1"
sql_condition = ""
if detector_no:
sql_condition = f"{sql_condition} and detector_no='{detector_no}'"
if ref_type:
sql_condition = f"{sql_condition} and ref_type='{ref_type}'"
if exp_time_start:
sql_condition = f"{sql_condition} and obs_time >='{exp_time_start}'"
if exp_time_end:
sql_condition = f"{sql_condition} and obs_time <='{exp_time_end}'"
if qc1_status:
sql_condition = f"{sql_condition} and qc1_status={qc1_status}"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
if file_name:
sql_condition = f" and filename={file_name}"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
log.info(sql_count)
log.info(sql_data)
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(CalMergeRecord, records)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
''' fetch a record from database
parameter kwargs:
id : [int],
cal_id : [str]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
if id == 0 and cal_id == "":
return Result.error(message="at least define id or cal_id")
if id != 0:
return self.get_by_id(id)
if cal_id != "":
return self.get_by_cal_id(cal_id)
def get_by_id(self, iid: int):
try:
r = self.db.select_one(
"select * from ifs_cal_merge where id=?", (iid,))
if r:
sql_get_level0_id = f"select level0_id from ifs_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
rec = CalMergeRecord().from_dict(r)
rec.level0_ids = level0_ids
return Result.ok_data(data=rec)
else:
return Result.error(message=f"id:{iid} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def get_by_cal_id(self, cal_id: str):
try:
r = self.db.select_one(
"select * from ifs_cal_merge where cal_id=?", (cal_id,))
if r:
sql_get_level0_id = f"select level0_id from ifs_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
rec = CalMergeRecord().from_dict(r)
rec.level0_ids = level0_ids
return Result.ok_data(data=rec)
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc1_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
cal_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
result = self.get(id = id, cal_id = cal_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update ifs_cal_merge set qc1_status=?, qc1_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
cal_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
result = self.get(id = id, cal_id = cal_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from ifs_cal_merge where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update ifs_cal_merge set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a calibration merge record into database
parameter kwargs:
cal_id : [str]
detector_no : [str]
ref_type : [str]
obs_time : [str]
exp_time : [float]
prc_status : [int]
prc_time : [str]
filename : [str]
file_path : [str]
level0_ids : [list]
return csst_dfs_common.models.Result
'''
rec = CalMergeRecord(
id = 0,
cal_id = get_parameter(kwargs, "cal_id"),
detector_no = get_parameter(kwargs, "detector_no"),
ref_type = get_parameter(kwargs, "ref_type"),
obs_time = get_parameter(kwargs, "obs_time"),
exp_time = get_parameter(kwargs, "exp_time"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
level0_ids = get_parameter(kwargs, "level0_ids", [])
)
try:
self.db.execute(
'INSERT INTO ifs_cal_merge (cal_id,detector_no,ref_type,obs_time,exp_time,filename,file_path,prc_status,prc_time,create_time) \
VALUES(?,?,?,?,?,?,?,?,?,?)',
(rec.cal_id, rec.detector_no, rec.ref_type, rec.obs_time, rec.exp_time, rec.filename, rec.file_path,rec.prc_status,rec.prc_time,format_time_ms(time.time()))
)
self.db.end()
rec.id = self.db.last_row_id()
sql_level0_ids = "insert into ifs_cal2level0 (merge_id,level0_id) values "
values = ["(%s,%s)"%(rec.id,i) for i in rec.level0_ids]
_ = self.db.execute(sql_level0_ids + ",".join(values))
self.db.end()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
\ No newline at end of file
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.mci import Level0Record
from csst_dfs_commons.models.common import from_dict_list
from .ingest import ingest_one
log = logging.getLogger('csst')
class Level0DataApi(object):
def __init__(self, sub_system = "mci"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level0 records from database
parameter kwargs:
obs_id: [str],
detector_no: [str],
obs_type: [str],
object_name: [str],
obs_time : (start, end),
qc0_status : [int],
prc_status : [int],
file_name: [str],
version: [str],
ra_obj: [float],
dec_obj: [float],
radius: [float],
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
obs_id = get_parameter(kwargs, "obs_id")
detector_no = get_parameter(kwargs, "detector_no")
obs_type = get_parameter(kwargs, "obs_type")
object_name = get_parameter(kwargs, "object_name")
version = get_parameter(kwargs, "version")
exp_time_start = get_parameter(kwargs, "obs_time", [None, None])[0]
exp_time_end = get_parameter(kwargs, "obs_time", [None, None])[1]
qc0_status = get_parameter(kwargs, "qc0_status")
prc_status = get_parameter(kwargs, "prc_status")
file_name = get_parameter(kwargs, "file_name")
ra_obj = get_parameter(kwargs, "ra_obj", None)
dec_obj = get_parameter(kwargs, "dec_obj", None)
radius = get_parameter(kwargs, "radius", 0)
limit = get_parameter(kwargs, "limit", 0)
sql_count = 'select count(*) as c from mci_level0_data d left join mci_level0_header h on d.id=h.id where 1=1'
sql_data = 'select d.* from mci_level0_data d left join mci_level0_header h on d.id=h.id where 1=1'
sql_condition = ""
if obs_id:
sql_condition = f"{sql_condition} and d.obs_id='{obs_id}'"
if detector_no:
sql_condition = f"{sql_condition} and d.detector_no='{detector_no}'"
if obs_type:
sql_condition = f"{sql_condition} and d.obs_type='{obs_type}'"
if exp_time_start:
sql_condition = f"{sql_condition} and d.obs_time >='{exp_time_start}'"
if exp_time_end:
sql_condition = f"{sql_condition} and d.obs_time <='{exp_time_end}'"
if qc0_status:
sql_condition = f"{sql_condition} and d.qc0_status={qc0_status}"
if prc_status:
sql_condition = f"{sql_condition} and d.prc_status={prc_status}"
if object_name:
sql_condition = f"{sql_condition} and h.object_name='{object_name}'"
if version:
sql_condition = f"{sql_condition} and h.version='{version}'"
if ra_obj:
sql_condition = f"{sql_condition} and (h.ra <= {ra_obj + 2*radius} and h.ra >={ra_obj - 2*radius})"
if dec_obj:
sql_condition = f"{sql_condition} and (h.dec <= {dec_obj + 2*radius} and h.ra >={dec_obj - 2*radius})"
if file_name:
sql_condition = f" and filename='{file_name}'"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level0Record, records)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
''' fetch a record from database
parameter kwargs:
id : [int],
level0_id : [str]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
level0_id = get_parameter(kwargs, "level0_id", "")
if id == 0 and level0_id == "":
return Result.error(message="at least define id or level0_id")
if id != 0:
return self.get_by_id(id)
if level0_id != "":
return self.get_by_level0_id(level0_id)
def get_by_id(self, id: int):
try:
r = self.db.select_one(
"select * from mci_level0_data where id=?", (id,))
if r:
data = Level0Record().from_dict(r)
data.header = self.db.select_one("select * from msc_level0_header where id=?", (data.id,))
return Result.ok_data(data = data)
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def get_by_level0_id(self, level0_id: str):
try:
r = self.db.select_one(
"select * from mci_level0_data where level0_id=?", (level0_id,))
if r:
data = Level0Record().from_dict(r)
data.header = self.db.select_one("select * from msc_level0_header where id=?", (data.id,))
return Result.ok_data(data = data)
else:
return Result.error(message=f"level0_id:{level0_id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
level0_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
level0_id = get_parameter(kwargs, "level0_id")
result = self.get(id = id, level0_id = level0_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update mci_level0_data set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc0_status(self, **kwargs):
''' update the status of QC0
parameter kwargs:
id : [int],
level0_id : [str],
status : [int]
'''
id = get_parameter(kwargs, "id")
level0_id = get_parameter(kwargs, "level0_id")
result = self.get(id = id, level0_id = level0_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update mci_level0_data set qc0_status=?, qc0_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level0 data record into database
parameter kwargs:
file_path = [str],
copyfiles = [boolean]
return: csst_dfs_common.models.Result
'''
file_path = get_parameter(kwargs, "file_path")
copyfiles = get_parameter(kwargs, "copyfiles", False)
try:
rec = ingest_one(file_path, self.db, copyfiles)
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.mci import Level0PrcRecord
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level0PrcApi(object):
def __init__(self, sub_system = "mci"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level0 procedure records from database
parameter kwargs:
level0_id: [str]
pipeline_id: [str]
prc_module: [str]
prc_status : [int]
return: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
pipeline_id = get_parameter(kwargs, "pipeline_id")
prc_module = get_parameter(kwargs, "prc_module")
prc_status = get_parameter(kwargs, "prc_status")
sql_data = f"select * from mci_level0_prc"
sql_condition = f"where level0_id='{level0_id}'"
if pipeline_id:
sql_condition = sql_condition + " and pipeline_id='" + pipeline_id + "'"
if prc_module:
sql_condition = sql_condition + " and prc_module ='" + prc_module + "'"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
sql_data = f"{sql_data} {sql_condition}"
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level0PrcRecord, records)).append("totalCount", len(records))
except Exception as e:
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from mci_level0_prc where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update mci_level0_prc set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level0 procedure record into database
parameter kwargs:
level0_id : [str]
pipeline_id : [str]
prc_module : [str]
params_file_path : [str]
prc_status : [int]
prc_time : [str]
result_file_path : [str]
return csst_dfs_common.models.Result
'''
rec = Level0PrcRecord(
id = 0,
level0_id = get_parameter(kwargs, "level0_id"),
pipeline_id = get_parameter(kwargs, "pipeline_id"),
prc_module = get_parameter(kwargs, "prc_module"),
params_file_path = get_parameter(kwargs, "params_file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
result_file_path = get_parameter(kwargs, "result_file_path")
)
try:
self.db.execute(
'INSERT INTO mci_level0_prc (level0_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
VALUES(?,?,?,?,?,?,?)',
(rec.level0_id, rec.pipeline_id, rec.prc_module, rec.params_file_path, rec.prc_status, rec.prc_time, rec.result_file_path)
)
self.db.end()
rec.id = self.db.last_row_id()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.msc import CalMergeRecord
from csst_dfs_commons.models.common import from_dict_list
from .level0 import Level0DataApi
log = logging.getLogger('csst')
class CalMergeApi(object):
def __init__(self, sub_system = "msc"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
self.level0Api = Level0DataApi()
def get_latest_by_l0(self, **kwargs):
''' retrieve calibration merge records from database by level0 data
:param kwargs: Parameter dictionary, key items support:
level0_id: [str],
ref_type: [str]
:returns: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
ref_type = get_parameter(kwargs, "ref_type")
level0_data = self.level0Api.get_by_level0_id(level0_id)
if level0_data.data is None:
return Result.error(message = "level0 data [%s]not found"%(level0_id))
sql_data = f"select * from msc_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time >= '{level0_data.data.obs_time}' order by obs_time ASC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
sql_data = f"select * from msc_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time <= '{level0_data.data.obs_time}' order by obs_time DESC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
return Result.error(message = "not found")
except Exception as e:
return Result.error(message=str(e))
def find(self, **kwargs):
''' retrieve calibration merge records from database
parameter kwargs:
detector_no: [str]
ref_type: [str]
obs_time: (start,end)
qc1_status : [int]
prc_status : [int]
file_name: [str]
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
detector_no = get_parameter(kwargs, "detector_no")
ref_type = get_parameter(kwargs, "ref_type")
exp_time_start = get_parameter(kwargs, "obs_time", [None, None])[0]
exp_time_end = get_parameter(kwargs, "obs_time", [None, None])[1]
qc1_status = get_parameter(kwargs, "qc1_status")
prc_status = get_parameter(kwargs, "prc_status")
file_name = get_parameter(kwargs, "file_name")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from msc_cal_merge where 1=1"
sql_data = f"select * from msc_cal_merge where 1=1"
sql_condition = ""
if detector_no:
sql_condition = f"{sql_condition} and detector_no='{detector_no}'"
if ref_type:
sql_condition = f"{sql_condition} and ref_type='{ref_type}'"
if exp_time_start:
sql_condition = f"{sql_condition} and obs_time >='{exp_time_start}'"
if exp_time_end:
sql_condition = f"{sql_condition} and obs_time <='{exp_time_end}'"
if qc1_status:
sql_condition = f"{sql_condition} and qc1_status={qc1_status}"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
if file_name:
sql_condition = f" and filename={file_name}"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
log.info(sql_count)
log.info(sql_data)
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(CalMergeRecord, records)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
''' fetch a record from database
parameter kwargs:
id : [int],
cal_id : [str]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
if id == 0 and cal_id == "":
return Result.error(message="at least define id or cal_id")
if id != 0:
return self.get_by_id(id)
if cal_id != "":
return self.get_by_cal_id(cal_id)
def get_by_id(self, id: str):
try:
r = self.db.select_one(
"select * from msc_cal_merge where id=?", (id,))
if r:
sql_get_level0_id = f"select level0_id from msc_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
rec = CalMergeRecord().from_dict(r)
rec.level0_ids = level0_ids
return Result.ok_data(data=rec)
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def get_by_cal_id(self, cal_id: str):
try:
r = self.db.select_one(
"select * from msc_cal_merge where cal_id=?", (cal_id,))
if r:
sql_get_level0_id = f"select level0_id from msc_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
rec = CalMergeRecord().from_dict(r)
rec.level0_ids = level0_ids
return Result.ok_data(data=rec)
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc1_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
cal_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
result = self.get(id = id, cal_id = cal_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update msc_cal_merge set qc1_status=?, qc1_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
cal_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
result = self.get(id = id, cal_id = cal_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from msc_cal_merge where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update msc_cal_merge set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a calibration merge record into database
parameter kwargs:
cal_id : [str]
detector_no : [str]
ref_type : [str]
obs_time : [str]
exp_time : [float]
prc_status : [int]
prc_time : [str]
filename : [str]
file_path : [str]
level0_ids : [list]
return csst_dfs_common.models.Result
'''
rec = CalMergeRecord(
id = 0,
cal_id = get_parameter(kwargs, "cal_id"),
detector_no = get_parameter(kwargs, "detector_no"),
ref_type = get_parameter(kwargs, "ref_type"),
obs_time = get_parameter(kwargs, "obs_time"),
exp_time = get_parameter(kwargs, "exp_time"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
level0_ids = get_parameter(kwargs, "level0_ids", [])
)
try:
self.db.execute(
'INSERT INTO msc_cal_merge (cal_id,detector_no,ref_type,obs_time,exp_time,filename,file_path,prc_status,prc_time,create_time) \
VALUES(?,?,?,?,?,?,?,?,?,?)',
(rec.cal_id, rec.detector_no, rec.ref_type, rec.obs_time, rec.exp_time, rec.filename, rec.file_path,rec.prc_status,rec.prc_time,format_time_ms(time.time()))
)
self.db.end()
rec.id = self.db.last_row_id()
sql_level0_ids = "insert into msc_cal2level0 (merge_id,level0_id) values "
values = ["(%s,%s)"%(rec.id,i) for i in rec.level0_ids]
_ = self.db.execute(sql_level0_ids + ",".join(values))
self.db.end()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
\ No newline at end of file
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.msc import Level0Record
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level0DataApi(object):
def __init__(self, sub_system = "msc"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level0 records from database
parameter kwargs:
obs_id: [str]
detector_no: [str]
obs_type: [str]
obs_time : (start, end),
qc0_status : [int],
prc_status : [int],
file_name: [str],
ra_obj: [float],
dec_obj: [float],
radius: [float],
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
obs_id = get_parameter(kwargs, "obs_id")
detector_no = get_parameter(kwargs, "detector_no")
obs_type = get_parameter(kwargs, "obs_type")
exp_time_start = get_parameter(kwargs, "obs_time", [None, None])[0]
exp_time_end = get_parameter(kwargs, "obs_time", [None, None])[1]
qc0_status = get_parameter(kwargs, "qc0_status")
prc_status = get_parameter(kwargs, "prc_status")
file_name = get_parameter(kwargs, "file_name")
limit = get_parameter(kwargs, "limit", 0)
ra_obj = get_parameter(kwargs, "ra_obj", None)
dec_obj = get_parameter(kwargs, "dec_obj", None)
radius = get_parameter(kwargs, "radius", 0)
sql_count = "select count(*) as c from msc_level0_data d left join msc_level0_header h on d.id=h.id where 1=1"
sql_data = f"select * from msc_level0_data d left join msc_level0_header h on d.id=h.id where 1=1"
sql_condition = ""
if obs_id:
sql_condition = f"{sql_condition} and d.obs_id='{obs_id}'"
if detector_no:
sql_condition = f"{sql_condition} and d.detector_no='{detector_no}'"
if obs_type:
sql_condition = f"{sql_condition} and d.obs_type='{obs_type}'"
if exp_time_start:
sql_condition = f"{sql_condition} and d.obs_time >='{exp_time_start}'"
if exp_time_end:
sql_condition = f"{sql_condition} and d.obs_time <='{exp_time_end}'"
if qc0_status:
sql_condition = f"{sql_condition} and d.qc0_status={qc0_status}"
if prc_status:
sql_condition = f"{sql_condition} and d.prc_status={prc_status}"
if ra_obj:
sql_condition = f"{sql_condition} and (h.ra_obj <={ra_obj + 2*radius} and h.ra_obj >={ra_obj - 2*radius}"
if dec_obj:
sql_condition = f"{sql_condition} and (h.dec_obj <={dec_obj+2*radius} and h.dec_obj >={dec_obj- 2*radius}"
if file_name:
sql_condition = f" and filename='{file_name}'"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level0Record, records)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
''' fetch a record from database
parameter kwargs:
id : [int],
level0_id : [str]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
level0_id = get_parameter(kwargs, "level0_id", "")
if id == 0 and level0_id == "":
return Result.error(message="at least define id or level0_id")
if id != 0:
return self.get_by_id(id)
if level0_id != "":
return self.get_by_level0_id(level0_id)
def get_by_id(self, id: int):
try:
r = self.db.select_one(
"select * from msc_level0_data where id=?", (id,))
if r:
data = Level0Record().from_dict(r)
data.header = self.db.select_one("select * from msc_level0_header where id=?", (id,))
return Result.ok_data(data = data)
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def get_by_level0_id(self, level0_id: str):
try:
r = self.db.select_one(
"select * from msc_level0_data where level0_id=?", (level0_id,))
if r:
data = Level0Record().from_dict(r)
data.header = self.db.select_one("select * from msc_level0_header where id=?", (data.id,))
return Result.ok_data(data = data)
else:
return Result.error(message=f"level0_id:{level0_id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
level0_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
level0_id = get_parameter(kwargs, "level0_id")
result = self.get(id = id, level0_id = level0_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update msc_level0_data set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc0_status(self, **kwargs):
''' update the status of QC0
parameter kwargs:
id : [int],
level0_id : [str],
status : [int]
'''
id = get_parameter(kwargs, "id")
level0_id = get_parameter(kwargs, "level0_id")
result = self.get(id = id, level0_id = level0_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update msc_level0_data set qc0_status=?, qc0_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level0 data record into database
parameter kwargs:
obs_id = [str]
detector_no = [str]
obs_type = [str]
obs_time = [str]
exp_time = [int]
detector_status_id = [int]
filename = [str]
file_path = [str]
return: csst_dfs_common.models.Result
'''
rec = Level0Record(
obs_id = get_parameter(kwargs, "obs_id"),
detector_no = get_parameter(kwargs, "detector_no"),
obs_type = get_parameter(kwargs, "obs_type"),
obs_time = get_parameter(kwargs, "obs_time"),
exp_time = get_parameter(kwargs, "exp_time"),
detector_status_id = get_parameter(kwargs, "detector_status_id"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path")
)
rec.level0_id = f"{rec.obs_id}{rec.detector_no}"
try:
existed = self.db.exists(
"select * from msc_level0_data where filename=?",
(rec.filename,)
)
if existed:
log.warning('%s existed' %(rec.filename, ))
return Result.error(message ='%s existed' %(rec.filename, ))
self.db.execute(
'INSERT INTO msc_level0_data (level0_id, obs_id, detector_no, obs_type, obs_time, exp_time,detector_status_id, filename, file_path,qc0_status, prc_status,create_time) \
VALUES(?,?,?,?,?,?,?,?,?,?,?,?)',
(rec.level0_id, rec.obs_id, rec.detector_no, rec.obs_type, rec.obs_time, rec.exp_time, rec.detector_status_id, rec.filename, rec.file_path,-1,-1,format_time_ms(time.time()))
)
self.db.end()
rec.id = self.db.last_row_id()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.msc import Level0PrcRecord
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level0PrcApi(object):
def __init__(self, sub_system = "msc"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level0 procedure records from database
parameter kwargs:
level0_id: [str]
pipeline_id: [str]
prc_module: [str]
prc_status : [int]
return: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
pipeline_id = get_parameter(kwargs, "pipeline_id")
prc_module = get_parameter(kwargs, "prc_module")
prc_status = get_parameter(kwargs, "prc_status")
sql_data = f"select * from msc_level0_prc"
sql_condition = f"where level0_id='{level0_id}'"
if pipeline_id:
sql_condition = sql_condition + " and pipeline_id='" + pipeline_id + "'"
if prc_module:
sql_condition = sql_condition + " and prc_module ='" + prc_module + "'"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
sql_data = f"{sql_data} {sql_condition}"
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level0PrcRecord, records)).append("totalCount", len(records))
except Exception as e:
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from msc_level0_prc where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update msc_level0_prc set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level0 procedure record into database
parameter kwargs:
level0_id : [str]
pipeline_id : [str]
prc_module : [str]
params_file_path : [str]
prc_status : [int]
prc_time : [str]
result_file_path : [str]
return csst_dfs_common.models.Result
'''
rec = Level0PrcRecord(
id = 0,
level0_id = get_parameter(kwargs, "level0_id"),
pipeline_id = get_parameter(kwargs, "pipeline_id"),
prc_module = get_parameter(kwargs, "prc_module"),
params_file_path = get_parameter(kwargs, "params_file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
result_file_path = get_parameter(kwargs, "result_file_path")
)
try:
self.db.execute(
'INSERT INTO msc_level0_prc (level0_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
VALUES(?,?,?,?,?,?,?)',
(rec.level0_id, rec.pipeline_id, rec.prc_module, rec.params_file_path, rec.prc_status, rec.prc_time, rec.result_file_path)
)
self.db.end()
rec.id = self.db.last_row_id()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.sls import CalMergeRecord
from csst_dfs_commons.models.common import from_dict_list
from .level0 import Level0DataApi
log = logging.getLogger('csst')
class CalMergeApi(object):
def __init__(self, sub_system = "sls"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
self.level0Api = Level0DataApi()
def get_latest_by_l0(self, **kwargs):
''' retrieve calibration merge records from database by level0 data
:param kwargs: Parameter dictionary, key items support:
level0_id: [str],
ref_type: [str]
:returns: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
ref_type = get_parameter(kwargs, "ref_type")
level0_data = self.level0Api.get_by_level0_id(level0_id)
if level0_data.data is None:
return Result.error(message = "level0 data [%s]not found"%(level0_id))
sql_data = f"select * from sls_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time >= '{level0_data.data.obs_time}' order by obs_time ASC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
sql_data = f"select * from sls_cal_merge where detector_no='{level0_data.data.detector_no}' and ref_type='{ref_type}' and obs_time <= '{level0_data.data.obs_time}' order by obs_time DESC limit 1"
r = self.db.select_one(sql_data)
if r:
rec = CalMergeRecord().from_dict(r)
return Result.ok_data(data=rec)
return Result.error(message = "not found")
except Exception as e:
return Result.error(message=str(e))
def find(self, **kwargs):
''' retrieve calibration merge records from database
parameter kwargs:
detector_no: [str]
ref_type: [str]
obs_time: (start,end)
qc1_status : [int]
prc_status : [int]
file_name: [str]
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
detector_no = get_parameter(kwargs, "detector_no")
ref_type = get_parameter(kwargs, "ref_type")
exp_time_start = get_parameter(kwargs, "obs_time", [None, None])[0]
exp_time_end = get_parameter(kwargs, "obs_time", [None, None])[1]
qc1_status = get_parameter(kwargs, "qc1_status")
prc_status = get_parameter(kwargs, "prc_status")
file_name = get_parameter(kwargs, "file_name")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from sls_cal_merge where 1=1"
sql_data = f"select * from sls_cal_merge where 1=1"
sql_condition = ""
if detector_no:
sql_condition = f"{sql_condition} and detector_no='{detector_no}'"
if ref_type:
sql_condition = f"{sql_condition} and ref_type='{ref_type}'"
if exp_time_start:
sql_condition = f"{sql_condition} and obs_time >='{exp_time_start}'"
if exp_time_end:
sql_condition = f"{sql_condition} and obs_time <='{exp_time_end}'"
if qc1_status:
sql_condition = f"{sql_condition} and qc1_status={qc1_status}"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
if file_name:
sql_condition = f" and filename={file_name}"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
log.info(sql_count)
log.info(sql_data)
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(CalMergeRecord, records)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
''' fetch a record from database
parameter kwargs:
id : [int],
cal_id : [str]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
if id == 0 and cal_id == "":
return Result.error(message="at least define id or cal_id")
if id != 0:
return self.get_by_id(id)
if cal_id != "":
return self.get_by_cal_id(cal_id)
def get_by_id(self, id: str):
try:
r = self.db.select_one(
"select * from sls_cal_merge where id=?", (id,))
if r:
sql_get_level0_id = f"select level0_id from sls_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
rec = CalMergeRecord().from_dict(r)
rec.level0_ids = level0_ids
return Result.ok_data(data=rec)
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def get_by_cal_id(self, cal_id: str):
try:
r = self.db.select_one(
"select * from sls_cal_merge where cal_id=?", (cal_id,))
if r:
sql_get_level0_id = f"select level0_id from sls_cal2level0 where merge_id={r['id']}"
_, records = self.db.select_many(sql_get_level0_id)
level0_ids = [r["level0_id"] for r in records]
rec = CalMergeRecord().from_dict(r)
rec.level0_ids = level0_ids
return Result.ok_data(data=rec)
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc1_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
cal_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
result = self.get(id = id, cal_id = cal_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update sls_cal_merge set qc1_status=?, qc1_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
cal_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
cal_id = get_parameter(kwargs, "cal_id", "")
result = self.get(id = id, cal_id = cal_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from sls_cal_merge where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update sls_cal_merge set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a calibration merge record into database
parameter kwargs:
cal_id : [str]
detector_no : [str]
ref_type : [str]
obs_time : [str]
exp_time : [float]
prc_status : [int]
prc_time : [str]
filename : [str]
file_path : [str]
level0_ids : [list]
return csst_dfs_common.models.Result
'''
rec = CalMergeRecord(
id = 0,
cal_id = get_parameter(kwargs, "cal_id"),
detector_no = get_parameter(kwargs, "detector_no"),
ref_type = get_parameter(kwargs, "ref_type"),
obs_time = get_parameter(kwargs, "obs_time"),
exp_time = get_parameter(kwargs, "exp_time"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
level0_ids = get_parameter(kwargs, "level0_ids", [])
)
try:
self.db.execute(
'INSERT INTO sls_cal_merge (cal_id,detector_no,ref_type,obs_time,exp_time,filename,file_path,prc_status,prc_time,create_time) \
VALUES(?,?,?,?,?,?,?,?,?,?)',
(rec.cal_id, rec.detector_no, rec.ref_type, rec.obs_time, rec.exp_time, rec.filename, rec.file_path,rec.prc_status,rec.prc_time,format_time_ms(time.time()))
)
self.db.end()
rec.id = self.db.last_row_id()
sql_level0_ids = "insert into sls_cal2level0 (merge_id,level0_id) values "
values = ["(%s,%s)"%(rec.id,i) for i in rec.level0_ids]
_ = self.db.execute(sql_level0_ids + ",".join(values))
self.db.end()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
\ No newline at end of file
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.sls import Level0Record
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level0DataApi(object):
def __init__(self, sub_system = "sls"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level0 records from database
parameter kwargs:
obs_id: [str]
detector_no: [str]
obs_type: [str]
obs_time : (start, end),
qc0_status : [int],
prc_status : [int],
file_name: [str]
limit: limits returns the number of records,default 0:no-limit
return: csst_dfs_common.models.Result
'''
try:
obs_id = get_parameter(kwargs, "obs_id")
detector_no = get_parameter(kwargs, "detector_no")
obs_type = get_parameter(kwargs, "obs_type")
exp_time_start = get_parameter(kwargs, "obs_time", [None, None])[0]
exp_time_end = get_parameter(kwargs, "obs_time", [None, None])[1]
qc0_status = get_parameter(kwargs, "qc0_status")
prc_status = get_parameter(kwargs, "prc_status")
file_name = get_parameter(kwargs, "file_name")
limit = get_parameter(kwargs, "limit", 0)
sql_count = "select count(*) as c from sls_level0_data where 1=1"
sql_data = f"select * from sls_level0_data where 1=1"
sql_condition = ""
if obs_id:
sql_condition = f"{sql_condition} and obs_id='{obs_id}'"
if detector_no:
sql_condition = f"{sql_condition} and detector_no='{detector_no}'"
if obs_type:
sql_condition = f"{sql_condition} and obs_type='{obs_type}'"
if exp_time_start:
sql_condition = f"{sql_condition} and obs_time >='{exp_time_start}'"
if exp_time_end:
sql_condition = f"{sql_condition} and obs_time <='{exp_time_end}'"
if qc0_status:
sql_condition = f"{sql_condition} and qc0_status={qc0_status}"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
if file_name:
sql_condition = f" and filename='{file_name}'"
sql_count = f"{sql_count} {sql_condition}"
sql_data = f"{sql_data} {sql_condition}"
if limit > 0:
sql_data = f"{sql_data} limit {limit}"
totalCount = self.db.select_one(sql_count)
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level0Record, records)).append("totalCount", totalCount['c'])
except Exception as e:
return Result.error(message=str(e))
def get(self, **kwargs):
''' fetch a record from database
parameter kwargs:
id : [int],
level0_id : [str]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id", 0)
level0_id = get_parameter(kwargs, "level0_id", "")
if id == 0 and level0_id == "":
return Result.error(message="at least define id or level0_id")
if id != 0:
return self.get_by_id(id)
if level0_id != "":
return self.get_by_level0_id(level0_id)
def get_by_id(self, id: int):
try:
r = self.db.select_one(
"select * from sls_level0_data where id=?", (id,))
if r:
return Result.ok_data(data=Level0Record().from_dict(r))
else:
return Result.error(message=f"id:{id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def get_by_level0_id(self, level0_id: str):
try:
r = self.db.select_one(
"select * from sls_level0_data where level0_id=?", (level0_id,))
if r:
return Result.ok_data(data=Level0Record().from_dict(r))
else:
return Result.error(message=f"level0_id:{level0_id} not found")
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
level0_id : [str],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
level0_id = get_parameter(kwargs, "level0_id")
result = self.get(id = id, level0_id = level0_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update sls_level0_data set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def update_qc0_status(self, **kwargs):
''' update the status of QC0
parameter kwargs:
id : [int],
level0_id : [str],
status : [int]
'''
id = get_parameter(kwargs, "id")
level0_id = get_parameter(kwargs, "level0_id")
result = self.get(id = id, level0_id = level0_id)
if not result.success:
return Result.error(message="not found")
id = result.data.id
status = get_parameter(kwargs, "status")
try:
self.db.execute(
'update sls_level0_data set qc0_status=?, qc0_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level0 data record into database
parameter kwargs:
obs_id = [str]
detector_no = [str]
obs_type = [str]
obs_time = [str]
exp_time = [int]
detector_status_id = [int]
filename = [str]
file_path = [str]
return: csst_dfs_common.models.Result
'''
rec = Level0Record(
obs_id = get_parameter(kwargs, "obs_id"),
detector_no = get_parameter(kwargs, "detector_no"),
obs_type = get_parameter(kwargs, "obs_type"),
obs_time = get_parameter(kwargs, "obs_time"),
exp_time = get_parameter(kwargs, "exp_time"),
detector_status_id = get_parameter(kwargs, "detector_status_id"),
filename = get_parameter(kwargs, "filename"),
file_path = get_parameter(kwargs, "file_path")
)
rec.level0_id = f"{rec.obs_id}{rec.detector_no}"
try:
existed = self.db.exists(
"select * from sls_level0_data where filename=?",
(rec.filename,)
)
if existed:
log.warning('%s existed' %(rec.filename, ))
return Result.error(message ='%s existed' %(rec.filename, ))
self.db.execute(
'INSERT INTO sls_level0_data (level0_id, obs_id, detector_no, obs_type, obs_time, exp_time,detector_status_id, filename, file_path,qc0_status, prc_status,create_time) \
VALUES(?,?,?,?,?,?,?,?,?,?,?,?)',
(rec.level0_id, rec.obs_id, rec.detector_no, rec.obs_type, rec.obs_time, rec.exp_time, rec.detector_status_id, rec.filename, rec.file_path,-1,-1,format_time_ms(time.time()))
)
self.db.end()
rec.id = self.db.last_row_id()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
import os
import logging
import time, datetime
import shutil
from ..common.db import DBClient
from ..common.utils import *
from csst_dfs_commons.models import Result
from csst_dfs_commons.models.sls import Level0PrcRecord
from csst_dfs_commons.models.common import from_dict_list
log = logging.getLogger('csst')
class Level0PrcApi(object):
def __init__(self, sub_system = "sls"):
self.sub_system = sub_system
self.root_dir = os.getenv("CSST_LOCAL_FILE_ROOT", "/opt/temp/csst")
self.db = DBClient()
def find(self, **kwargs):
''' retrieve level0 procedure records from database
parameter kwargs:
level0_id: [str]
pipeline_id: [str]
prc_module: [str]
prc_status : [int]
return: csst_dfs_common.models.Result
'''
try:
level0_id = get_parameter(kwargs, "level0_id")
pipeline_id = get_parameter(kwargs, "pipeline_id")
prc_module = get_parameter(kwargs, "prc_module")
prc_status = get_parameter(kwargs, "prc_status")
sql_data = f"select * from sls_level0_prc"
sql_condition = f"where level0_id='{level0_id}'"
if pipeline_id:
sql_condition = sql_condition + " and pipeline_id='" + pipeline_id + "'"
if prc_module:
sql_condition = sql_condition + " and prc_module ='" + prc_module + "'"
if prc_status:
sql_condition = f"{sql_condition} and prc_status={prc_status}"
sql_data = f"{sql_data} {sql_condition}"
_, records = self.db.select_many(sql_data)
return Result.ok_data(data=from_dict_list(Level0PrcRecord, records)).append("totalCount", len(records))
except Exception as e:
return Result.error(message=str(e))
def update_proc_status(self, **kwargs):
''' update the status of reduction
parameter kwargs:
id : [int],
status : [int]
return csst_dfs_common.models.Result
'''
id = get_parameter(kwargs, "id")
status = get_parameter(kwargs, "status")
try:
existed = self.db.exists(
"select * from sls_level0_prc where id=?",
(id,)
)
if not existed:
log.warning('%s not found' %(id, ))
return Result.error(message ='%s not found' %(id, ))
self.db.execute(
'update sls_level0_prc set prc_status=?, prc_time=? where id=?',
(status, format_time_ms(time.time()), id)
)
self.db.end()
return Result.ok_data()
except Exception as e:
log.error(e)
return Result.error(message=str(e))
def write(self, **kwargs):
''' insert a level0 procedure record into database
parameter kwargs:
level0_id : [str]
pipeline_id : [str]
prc_module : [str]
params_file_path : [str]
prc_status : [int]
prc_time : [str]
result_file_path : [str]
return csst_dfs_common.models.Result
'''
rec = Level0PrcRecord(
id = 0,
level0_id = get_parameter(kwargs, "level0_id"),
pipeline_id = get_parameter(kwargs, "pipeline_id"),
prc_module = get_parameter(kwargs, "prc_module"),
params_file_path = get_parameter(kwargs, "params_file_path"),
prc_status = get_parameter(kwargs, "prc_status", -1),
prc_time = get_parameter(kwargs, "prc_time"),
result_file_path = get_parameter(kwargs, "result_file_path")
)
try:
self.db.execute(
'INSERT INTO sls_level0_prc (level0_id,pipeline_id,prc_module, params_file_path, prc_status,prc_time,result_file_path) \
VALUES(?,?,?,?,?,?,?)',
(rec.level0_id, rec.pipeline_id, rec.prc_module, rec.params_file_path, rec.prc_status, rec.prc_time, rec.result_file_path)
)
self.db.end()
rec.id = self.db.last_row_id()
return Result.ok_data(data=rec)
except Exception as e:
log.error(e)
return Result.error(message=str(e))
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment