Commit 924f36aa authored by Wei Shoulin's avatar Wei Shoulin
Browse files

C9

parent 97625e52
Pipeline #4159 passed with stage
......@@ -13,7 +13,7 @@ class CatalogApi(object):
:param ra: in deg
:param dec: in deg
:param radius: in deg
:param catalog_name: one of ['gaia3','','']
:param catalog_name: one of ['gaia3','sim']
:param columns: tuple of str, like ('ra','dec','phot_g_mean_mag')
:param min_mag: minimal magnitude
:param max_mag: maximal magnitude
......@@ -22,32 +22,19 @@ class CatalogApi(object):
:returns: csst_dfs_common.models.Result
'''
if catalog_name == "gaia3":
return self.gaia3_query(ra, dec, radius, columns, min_mag, max_mag, obstime, limit)
else:
return Result.error(message="%s catalog search not yet implemented" %(catalog_name, ))
if not catalog_name or catalog_name not in ('gaia3', 'sim'):
raise Exception("catalog_name is not allowed")
if not columns:
raise Exception("columns is empty")
return self.catalog_query(ra = ra,
dec = dec,
radius = radius,
catalog_name = catalog_name,
columns = columns,
min_mag = min_mag,
max_mag = max_mag,
obstime = obstime,
limit = limit)
def to_table(self, query_result):
return to_fits_table(query_result)
def gaia3_query(self, ra: float, dec: float, radius: float, columns: tuple,min_mag: float, max_mag: float, obstime: int, limit: int):
"""retrieval GAIA DR 3, all column name must be lowercase. columns specification at https://gea.esac.esa.int/archive/documentation/GDR3/Gaia_archive/chap_datamodel/sec_dm_main_source_catalogue/ssec_dm_gaia_source.html
:param ra: in deg
:param dec: in deg
:param radius: in deg
:param columns: tuple of str, like ('ra','dec','phot_g_mean_mag')
:param min_mag: minimal magnitude
:param max_mag: maximal magnitude
:param obstime: seconds
:param limit: limits returns the number of records
:returns: csst_dfs_common.models.Result
"""
try:
if not columns:
raise Exception("columns is empty")
return self.stub.gaia3_query(ra, dec, radius, columns, min_mag, max_mag, obstime, limit)
except Exception as e:
return Result.error(message=repr(e))
......@@ -131,6 +131,6 @@ def object_list_to_table(query_result):
t.add_row(tuple([rec.__getattribute__(k) for k in fields]))
return t
def get_nextId_by_prefix(prefix: str):
def get_next_id(prefix: str):
pymodule = Delegate().load(sub_module = "common.utils")
return getattr(pymodule, "get_nextId_by_prefix")(prefix)
\ No newline at end of file
return getattr(pymodule, "get_next_id")(prefix)
\ No newline at end of file
from .detector import DetectorApi
from .brick import BrickApi
from .level2producer import Level2ProducerApi
from .observation import ObservationApi
from .level0 import Level0DataApi
from .level0prc import Level0PrcApi
......
......@@ -52,14 +52,13 @@ class BrickApi(object):
'''
return self.stub.find_obs_status(**kwargs)
def find_level1_data(self, **kwargs):
''' find level1 data
def find_level1_ids(self, **kwargs):
''' find level1 id
:param kwargs: Parameter dictionary, support:
brick_id = [int]\n
level1_id = [int]\n
module = [str]
level1_id = [int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.find_level1_data(**kwargs)
\ No newline at end of file
return self.stub.find_level1_ids(**kwargs)
\ No newline at end of file
......@@ -13,10 +13,11 @@ class Level0DataApi(object):
''' retrieve level0 records from database
:param kwargs: Parameter dictionary, key items support:
project_id: [str],
obs_id: [str],
module_id: [str]
detector_no: [str],
obs_type: [str],
file_type: [str],
filter: [str],
obs_time : (start, end),
qc0_status : [int],
......@@ -26,7 +27,6 @@ class Level0DataApi(object):
dec_obj: [float],
radius: [float],
object_name: [str],
version: [str],
limit: limits returns the number of records,default 0:no-limit
:returns: csst_dfs_common.models.Result
......@@ -48,7 +48,7 @@ class Level0DataApi(object):
:param kwargs: Parameter dictionary, key items support:
id : [int],
level0_id: [str]
obs_type: [str]
file_type: [str]
:returns: csst_dfs_common.models.Result
'''
......@@ -60,7 +60,7 @@ class Level0DataApi(object):
:param kwargs: Parameter dictionary, key items support:
id : [int],
level0_id: [str],
obs_type: [str],
file_type: [str],
status : [int]
:returns: csst_dfs_common.models.Result
......@@ -73,7 +73,7 @@ class Level0DataApi(object):
:param kwargs: Parameter dictionary, key items support:
id : [int],
level0_id: [str],
obs_type: [str],
file_type: [str],
status : [int]
:returns: csst_dfs_common.models.Result
......@@ -86,7 +86,7 @@ class Level0DataApi(object):
:param kwargs: Parameter dictionary, key items support:
obs_id = [str],
detector_no = [str],
obs_type = [str],
file_type = [str],
obs_time = [str],
exp_time = [int],
detector_status_id = [int],
......
......@@ -37,13 +37,13 @@ class Level0PrcApi(object):
''' insert a level0 procedure record into database
:param kwargs: Parameter dictionary, key items support:
level0_id : [str]
level0_id : [int]
pipeline_id : [str]
prc_module : [str]
params_file_path : [str]
run_id : [str]
prc_status : [int]
prc_time : [str]
result_file_path : [str]
message : [str]
:returns: csst_dfs_common.models.Result
'''
......
......@@ -40,10 +40,10 @@ class Level1PrcApi(object):
level1_id : [int]
pipeline_id : [str]
prc_module : [str]
params_file_path : [str]
run_id : [str]
prc_status : [int]
prc_time : [str]
result_file_path : [str]
message : [str]
:returns: csst_dfs_common.models.Result
'''
......
import random
from ..common.delegate import Delegate
from csst_dfs_commons.models.errors import CSSTGenericException
class Level2ProducerApi(object):
"""
Level 0 Data Operation API
"""
def __init__(self):
self.pymodule = Delegate().load(sub_module = "facility")
self.stub = getattr(self.pymodule, "Level2ProducerApi")()
def register(self, **kwargs):
''' register a Level2Producer data record into database
:param kwargs: Parameter dictionary, key items support:
name = [str]\n
gitlink = [str]\n
paramfiles = [str]\n
priority = [int]\n
pre_producers = list[int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.register(**kwargs)
def find(self, **kwargs):
''' retrieve Level2Producer records from database
:param kwargs: Parameter dictionary, key items support:
key: [str]
limit: limits returns the number of records,default 0:no-limit
:returns: csst_dfs_common.models.Result
'''
return self.stub.find(**kwargs)
def get(self, **kwargs):
''' fetch a record from database
:param kwargs: Parameter dictionary, key items support:
id : [int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.get(**kwargs)
def find_nexts(self, **kwargs):
''' retrieve Level2Producer records from database
:param kwargs: Parameter dictionary, key items support:
id : [int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.find_nexts(**kwargs)
def find_start(self, **kwargs):
''' retrieve Level2Producer records from database
:param kwargs: Parameter dictionary, key items support:
key : [str]
:returns: csst_dfs_common.models.Result
'''
return self.stub.find_start(**kwargs)
def update(self, **kwargs):
''' update a Level2Producer
:param kwargs: Parameter dictionary, key items support:
id : [int]\n
name = [str]\n
gitlink = [str]\n
paramfiles = [str]\n
priority = [int]\n
pre_producers = list[int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.update(**kwargs)
def delete(self, **kwargs):
''' delete a Level2Producer data
:param kwargs: Parameter dictionary, key items support:
id = [int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.delete(**kwargs)
def new_job(self, **kwargs):
''' new a Level2Producer Job
:param kwargs: Parameter dictionary, key items support:
name = [str]
dag = [str]
:returns: csst_dfs_common.models.Result
'''
return self.stub.new_job(**kwargs)
def get_job(self, **kwargs):
''' fetch a record from database
:param kwargs: Parameter dictionary, key items support:
id : [int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.get_job(**kwargs)
def update_job(self, **kwargs):
''' update a Level2Producer Job
:param kwargs: Parameter dictionary, key items support:
id = [int]
name = [str]
dag = [str]
status = [int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.update_job(**kwargs)
def new_running(self, **kwargs):
''' insert a Level2ProducerRuningRecord data
:param kwargs: Parameter dictionary, key items support:
job_id = [int]\n
producer_id = [int]\n
brick_id = [int]\n
start_time = [str]\n
end_time = [str]\n
prc_status = [int]\n
prc_result = [str]
:returns: csst_dfs_common.models.Result
'''
return self.stub.new_running(**kwargs)
def get_running(self, **kwargs):
''' fetch a record from database
:param kwargs: Parameter dictionary, key items support:
id : [int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.get_running(**kwargs)
def update_running(self, **kwargs):
''' udpate a Level2ProducerRuningRecord data
:param kwargs: Parameter dictionary, key items support:
id = [int]\n
job_id = [int]\n
producer_id = [int]\n
brick_id = [int]\n
start_time = [str]\n
end_time = [str]\n
prc_status = [int]\n
prc_result = [str]
:returns: csst_dfs_common.models.Result
'''
return self.stub.update_running(**kwargs)
def find_running(self, **kwargs):
''' find Level2ProducerRuningRecord data
:param kwargs: Parameter dictionary, key items support:
job_id = [int]\n
producer_id = [int]\n
brick_id = [int]\n
prc_status = [int]\n
create_time : (start, end)\n
limit = [int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.find_running(**kwargs)
def make_graph(self, start_producer_id, fig_path = None):
start_node = self.get(id = start_producer_id)
if not start_node.success:
raise CSSTGenericException("start node not found")
def get_next(pre_node, node_level_x, node_level_y):
the_nodes = self.find_nexts(id = pre_node.id)
graph_name_edges = [(pre_node.name, n.name) for n in the_nodes.data]
graph_id_edges = [(pre_node.id, n.id) for n in the_nodes.data]
pos = {pre_node.name: (node_level_x,node_level_y)}
for idx, node in enumerate(the_nodes.data):
sub_id_edges, sub_name_edges, sub_pos = get_next(node, node_level_x+1, random.randint(-3,3))
graph_id_edges.extend(sub_id_edges)
graph_name_edges.extend(sub_name_edges)
pos.update(sub_pos)
return graph_id_edges, graph_name_edges, pos
graph_id_edges, graph_name_edges, pos = get_next(start_node.data, 0, 0)
if fig_path:
import networkx as nx
from matplotlib import pyplot as plt
g1 = nx.DiGraph()
vertex_list = list(set([str(i) for e in graph_name_edges for i in e]))
g1.add_nodes_from(vertex_list)
g1.add_edges_from(graph_name_edges)
plt.xlim(-1, 8)
plt.ylim(-4, 4)
plt.tight_layout()
nx.draw(
g1,
pos = pos,
node_color = 'orange',
edge_color = 'black',
font_size =12,
node_size =360,
with_labels=True
)
plt.savefig(fig_path, format="PNG")
plt.clf()
return graph_id_edges
......@@ -35,31 +35,6 @@ class ObservationApi(object):
'''
return self.stub.get(**kwargs)
def update_proc_status(self, **kwargs):
''' update the status of reduction
:param kwargs: Parameter dictionary, key items support:
id = [int],
obs_id = [str],
status = [int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.update_proc_status(**kwargs)
def update_qc0_status(self, **kwargs):
''' update the status of QC0
:param kwargs: Parameter dictionary, key items support:
id = [int],
obs_id = [str],
status = [int]
:returns: csst_dfs_common.models.Result
'''
return self.stub.update_qc0_status(**kwargs)
def write(self, **kwargs):
''' insert a observational record into database
......
from .level2 import Level2DataApi
\ No newline at end of file
from ..common.delegate import Delegate
class Level2DataApi(object):
class Level1DataApi(object):
"""
Level1 Data Operation Class
"""
def __init__(self):
self.pymodule = Delegate().load(sub_module = "hstdm")
self.stub = getattr(self.pymodule, "Level2DataApi")()
self.pymodule = Delegate().load(sub_module = "facility")
self.stub = getattr(self.pymodule, "Level1DataApi")()
def find(self, **kwargs):
''' retrieve level2 spectra records from database
''' retrieve level1 records from database
:param kwargs: Parameter dictionary, key items support:
obs_id: [str]
level0_id: [str]
level1_id: [int]
project_id: [int]
file_type: [str]
create_time : (start, end),
qc2_status : [int],
prc_status : [int],
module_id: [str]
data_type: [str]
create_time : (start, end)
qc1_status : [int]
prc_status : [int]
filename: [str]
ra_cen: float = None
dec_cen: float = None,
radius_cen: float = None
pipeline_id: [str]
build: [int]
detector_no: [str]
filter: [str]
object_name: [str]
limit: limits returns the number of records,default 0:no-limit
:returns: csst_dfs_common.models.Result
'''
return self.stub.find(**kwargs)
def find_by_brick_ids(self, **kwargs):
''' retrieve level1 records by brick_ids like [1,2,3,4]
:param kwargs: Parameter dictionary, key items support:
brick_ids: [list]
:returns: csst_dfs_common.models.Result
'''
return self.stub.find_by_brick_ids(**kwargs)
def find_by_ids(self, **kwargs):
''' retrieve level1 records by internal level1 ids like [1,2,3,4]
:param kwargs: Parameter dictionary, key items support:
ids: [list]
:returns: csst_dfs_common.models.Result
'''
return self.stub.find_by_ids(**kwargs)
def sls_find_by_qc1_status(self, **kwargs):
''' retrieve level1 records from database
:param kwargs: Parameter dictionary, key items support:
qc1_status : [int],
limit: limits returns the number of records,default 1
:returns: csst_dfs_common.models.Result
'''
return self.stub.sls_find_by_qc1_status(**kwargs)
def get(self, **kwargs):
''' fetch a record from database
......@@ -49,8 +86,8 @@ class Level2DataApi(object):
'''
return self.stub.update_proc_status(**kwargs)
def update_qc2_status(self, **kwargs):
''' update the status of QC2
def update_qc1_status(self, **kwargs):
''' update the status of QC1
:param kwargs: Parameter dictionary, key items support:
id = [int],
......@@ -58,21 +95,23 @@ class Level2DataApi(object):
:returns: csst_dfs_common.models.Result
'''
return self.stub.update_qc2_status(**kwargs)
return self.stub.update_qc1_status(**kwargs)
def write(self, **kwargs):
''' insert a level2 record into database
''' insert a level1 record into database
:param kwargs: Parameter dictionary, key items support:
level0_id: [str]
level1_id: [int]
project_id: [int]
file_type : [str]
data_type : [str]
cor_sci_id : [int]
prc_params : [str]
filename : [str]
file_path : [str]
prc_status : [int]
prc_time : [str]
pipeline_id : [str]
pmapname : [str]
build : [str]
:returns: csst_dfs_common.models.Result
'''
......
......@@ -8,7 +8,7 @@ class CommonCatalogTestCase(unittest.TestCase):
def setUp(self):
self.api = CatalogApi()
def test_catalog_query(self):
def test_gaia3_query(self):
t= time.time()
result = self.api.catalog_query(
ra=90,
......@@ -29,3 +29,25 @@ class CommonCatalogTestCase(unittest.TestCase):
# print(df.head())
print('used:', time.time()-t)
print('return:', len(result.data))
def test_sim_query(self):
t= time.time()
result = self.api.catalog_query(
ra=90,
dec=24.5,
radius=0.21,
columns = ('id','ra','dec','av','pmdec','phot_g_mean_mag'),
catalog_name='sim',
min_mag=-1,
max_mag=-1,
obstime = -1,
limit = 20
)
print(result)
if result.success:
dt = self.api.to_table(result)
dt.pprint()
# df = dt.to_pandas()
# print(df.head())
print('used:', time.time()-t)
print('return:', len(result.data))
\ No newline at end of file
import unittest
import time
from csst_dfs_api.common.utils import get_nextId_by_prefix
from csst_dfs_api.common.utils import get_next_id
class CommonUtilsTestCase(unittest.TestCase):
def setUp(self):
pass
def test_get_nextId_by_prefix(self):
def test_get_nextId(self):
t= time.time()
result = get_nextId_by_prefix("MBI")
result = get_next_id("MBI")
print(result)
assert type(result.data) == int
......@@ -13,17 +13,17 @@ class FacilityBrickTestCase(unittest.TestCase):
print('find:', recs)
def test_get(self):
rec = self.api.get(id=1)
rec = self.api.get(brick_id=1)
print('get:', rec)
def test_write(self):
rec = self.api.write(id=1, ra = 3.2, dec = 3.3, boundingbox = '12,12')
rec = self.api.write(brick_id=3, ra = 3.2, dec = 3.3, boundingbox = '12,12')
print('test_write:', rec)
def test_find_obs_status(self):
rec = self.api.find_obs_status(brick_id=1, band = 'r')
print('find_obs_status:', rec)
def test_find_level1_data(self):
rec = self.api.find_level1_data(brick_id=1, level1_id = 1, module = 'msc')
print('find_level1_data:', rec)
def test_find_level1_ids(self):
rec = self.api.find_level1_ids(brick_id=1, level1_id = 1)
print('find_level1_ids:', rec)
......@@ -16,10 +16,10 @@ class DetectorApiTestCase(unittest.TestCase):
print('get:', rec)
def test_write(self):
rec = self.api.write(no = 'CCD02',
detector_name = 'CCD02',
rec = self.api.write(no = 'CCD03',
detector_name = 'CCD03',
module_id = 'MSC',
filter_id='f2')
filter_id='uv')
print('write:', rec)
def test_update(self):
......
......@@ -8,18 +8,14 @@ class Level0DataTestCase(unittest.TestCase):
self.api = Level0DataApi()
def test_find(self):
recs = self.api.find(obs_id = '0000011', obs_type = 'sci', limit = 0)
recs = self.api.find(obs_id = '100000101', file_type = 'sci', limit = 0)
print('find:', recs)
def test_find_by_brick_ids(self):
recs = self.api.find_by_brick_ids(brick_ids = [1,2,3,4])
print('find_by_brick_ids:', recs)
def test_get(self):
rec = self.api.get(id = 3)
rec = self.api.get(id = 14)
print('get:', rec)
rec = self.api.get(level0_id = '000001101',obs_type='sci')
rec = self.api.get(level0_id = '000001101',file_type='sci')
print('get:', rec)
def test_update_proc_status(self):
......@@ -36,7 +32,7 @@ class Level0DataTestCase(unittest.TestCase):
obs_id = '100000101',
detector_no = "01",
filter = "u",
obs_type = "sci",
file_type = "sci",
obs_time = "2021-06-06 11:12:13",
exp_time = 150,
detector_status_id = 3,
......
......@@ -18,10 +18,10 @@ class Level0PrcTestCase(unittest.TestCase):
def test_write(self):
rec = self.api.write(
level0_id='134',
run_id = "P1",
pipeline_id = "P1",
prc_module = "QC0",
params_file_path = "/opt/dddasd.params",
prc_status = 3,
prc_time = '2021-06-04 11:12:13',
result_file_path = "/opt/dddasd.header")
message = "test")
print('write:', rec)
\ No newline at end of file
......@@ -11,16 +11,31 @@ class Level1DataTestCase(unittest.TestCase):
def test_find(self):
recs = self.api.find(
level0_id='000001201',
create_time = ("2021-06-01 11:12:13","2021-06-08 11:12:13"))
level0_id='10000010101',
module_id = "MSC",
create_time = ("2024-01-01 11:12:13","2024-06-08 11:12:13"))
print('find:', recs)
def test_find_by_ids(self):
recs = self.api.find_by_ids(
ids=[1,2,3,4]
module_id = "MSC",
ids=[16]
)
print('find_by_ids:', recs)
def test_find_by_brick_ids(self):
recs = self.api.find_by_brick_ids(
brick_ids=[1,2,3,4]
)
print('find_by_brick_ids:', recs)
def test_sls_find_by_qc1_status(self):
recs = self.api.sls_find_by_qc1_status(
qc1_status = 1,
limit = 1
)
print('sls_find_by_qc1_status:', recs)
def test_get(self):
rec = self.api.get(id = 2)
print('get:', rec)
......@@ -36,13 +51,13 @@ class Level1DataTestCase(unittest.TestCase):
def test_write(self):
rec = self.api.write(
level0_id = '10000010101',
data_type = "SCIE",
cor_sci_id = 0,
prc_params = "/opt/dddasd.params",
file_type = "SCIE",
prc_status = 3,
prc_time = '2021-10-22 11:12:13',
filename = "CSST_MSC_MS_SCI_20270810142128_20270810142358_100000101_20_img_L1.fits",
file_path = "/opt/temp/csst/CSST_MSC_MS_SCI_20270810142128_20270810142358_100000101_20_img_L1.fits",
pipeline_id = "P1",
refs = {'dark': '1', 'bias': '2', 'flat': '3' })
build_id = 123,
refs = [('dark', '1'), ('bias', '2'), ('flat', '3')]
)
print('write:', rec)
\ No newline at end of file
......@@ -21,8 +21,8 @@ class Level1PrcTestCase(unittest.TestCase):
rec = self.api.write(level1_id=1,
pipeline_id = "P1",
prc_module = "QC0",
params_file_path = "/opt/dddasd.params",
run_id = "2323",
prc_status = 3,
prc_time = '2021-06-04 11:12:13',
result_file_path = "/opt/dddasd.header")
message = "256's character")
print('write:', rec)
\ No newline at end of file
......@@ -14,51 +14,62 @@ class Level2DataTestCase(unittest.TestCase):
level1_id=1)
print('find:', recs)
def test_find_existed_brick_ids(self):
recs = self.api.find_existed_brick_ids(data_type = "csst-msc-l1-mbi-cat")
print('find_existed_brick_ids:', recs)
# def test_find_existed_brick_ids(self):
# recs = self.api.find_existed_brick_ids(data_type = "csst-msc-l1-mbi-cat")
# print('find_existed_brick_ids:', recs)
def test_catalog_query(self):
result = self.api.catalog_query(
sql = 'select x,y,A,B,PA,AB,E from csst_msc_l1_mbi_cat',
limit = 2)
print(result)
if result.success and result['totalCount'] > 0:
dt = to_fits_table(result)
dt.pprint()
# def test_catalog_query(self):
# result = self.api.catalog_query(
# sql = 'select x,y,A,B,PA,AB,E from csst_msc_l1_mbi_cat',
# limit = 2)
# print(result)
# if result.success and result['totalCount'] > 0:
# dt = to_fits_table(result)
# dt.pprint()
def test_coord_cond_sql(self):
result = self.api.coord_cond_sql(data_type="csst-msc-l1-mbi-cat", ra = 120, dec = 40, radius = 0.5)
print(result)
# def test_coord_cond_sql(self):
# result = self.api.coord_cond_sql(data_type="csst-msc-l1-mbi-cat", ra = 120, dec = 40, radius = 0.5)
# print(result)
def test_catalog_column(self):
result = self.api.catalog_columns(data_type="csst-msc-l1-mbi-cat", columns = ['RA', 'DEC'])
print(result)
# def test_catalog_column(self):
# result = self.api.catalog_columns(data_type="csst-msc-l1-mbi-cat", columns = ['RA', 'DEC'])
# print(result)
def test_get(self):
rec = self.api.get(id = 1)
print('get:', rec)
# def test_get(self):
# rec = self.api.get(id = 10)
# print('get:', rec)
def test_update_proc_status(self):
rec = self.api.update_proc_status(id = 1, status = 4)
print('update_proc_status:', rec)
# def test_update_proc_status(self):
# rec = self.api.update_proc_status(id = 10, status = 4)
# print('update_proc_status:', rec)
def test_update_qc2_status(self):
rec = self.api.update_qc2_status(id = 1, status = 7)
print('update_qc2_status:', rec)
# def test_update_qc2_status(self):
# rec = self.api.update_qc2_status(id = 10, status = 7)
# print('update_qc2_status:', rec)
def test_write(self):
rec = self.api.write(
level0_id= '1016000000412',
module_id = 'MSC',
data_type = "csst-msc-l1-mbi-cat",
prc_status = 3,
prc_time = '2021-10-22 11:12:13',
filename = "CSST_MSC_MS_SCIE_20240821023334_20240821023604_10160000011_19_L1_V01_CAT.fits",
file_path = "/opt/temp/csst/msc/L2/CSST_MSC_MS_SCIE_20240821023334_20240821023604_10160000011_19_L1_V01_CAT.fits",
pipeline_id = "csst-msc-l1-mbi-cat"
)
print('write:', rec)
# def test_write(self):
# # rec = self.api.write(
# # level0_id= '1016000000412',
# # module_id = 'MSC',
# # data_type = "csst_example_table",
# # prc_status = 3,
# # prc_time = '2021-10-22 11:12:13',
# # filename = "csst_example_table.fits",
# # file_path = "/Users/wsl/temp/csst/csst_example_table.fits",
# # pipeline_id = "csst-msc-l1-mbi-cat"
# # )
# # print('write:', rec)
# rec = self.api.write(
# level0_id= '1016000000412',
# module_id = 'MSC',
# data_type = "csst-msc-l1-mbi-cat",
# prc_status = 3,
# prc_time = '2021-10-22 11:12:13',
# filename = "CSST_MSC_MS_SCIE_20240821023334_20240821023604_10160000011_19_L1_V01_CAT.fits",
# file_path = "/opt/temp/csst/msc/L2/CSST_MSC_MS_SCIE_20240821023334_20240821023604_10160000011_19_L1_V01_CAT.fits",
# pipeline_id = "csst-msc-l1-mbi-cat"
# )
# print('write:', rec)
# rec = self.api.write(
# level1_id= 1,
# module_id = 'MSC',
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment