11import logging
22import subprocess as sp
3- from datetime import datetime
43from pathlib import Path
54from textwrap import dedent
6-
7- import click
5+ import argparse
86import pandas as pd
7+
98from astropy import units as u
109from astropy .table import Table , join , unique , vstack
1110from astropy .time import Time
1211from lstchain .io .io import dl1_params_lstcam_key
1312from lstchain .reco .utils import add_delta_t_key , get_effective_time
1413
15- from osa .paths import get_major_version , get_dl1_prod_id
16- from osa .utils .utils import get_lstchain_version
14+ from osa .configs .config import cfg
15+ from osa .utils .cliopts import valid_date
16+ from osa .paths import DEFAULT_CFG , get_major_version , get_dl1_prod_id
17+ from osa .utils .utils import get_lstchain_version , date_to_dir , date_to_iso
18+
1719
1820pd .set_option ("display.float_format" , "{:.1f}" .format )
1921
2022logging .basicConfig (level = logging .INFO , format = "%(asctime)s:%(levelname)s:%(message)s" )
2123log = logging .getLogger (__name__ )
2224
23-
24- BASE_DL1 = Path ("/fefs/aswg/data/real/DL1" )
25- BASE_MONITORING = Path ("/fefs/aswg/data/real/monitoring" )
26- CATALOG_DIR = Path ("/fefs/aswg/data/real/OSA/Catalog" )
27- TAILCUTS_DIR = Path ("/fefs/aswg/data/real/auxiliary/TailCuts" )
28-
25+ parser = argparse .ArgumentParser ()
26+ parser .add_argument (
27+ "-c" ,
28+ "--config" ,
29+ action = "store" ,
30+ type = Path ,
31+ default = DEFAULT_CFG ,
32+ help = "Configuration file" ,
33+ )
34+ parser .add_argument (
35+ "-d" ,
36+ "--date" ,
37+ default = None ,
38+ type = valid_date ,
39+ help = "Night to apply the gain selection in YYYY-MM-DD format" ,
40+ )
41+ parser .add_argument (
42+ "-v" ,
43+ "--version" ,
44+ type = str ,
45+ default = get_lstchain_version ()
46+ )
2947
3048def add_table_to_html (html_table ):
3149 return dedent (
@@ -126,9 +144,10 @@ def add_start_and_elapsed(table: Table, datedir: str, version: str) -> None:
126144
127145 for run in table ["run_id" ]:
128146 major_version = get_major_version (version )
129- dl1b_config_file = TAILCUTS_DIR / f"dl1ab_Run{ run :05d} .json"
147+ dl1b_config_file = Path ( cfg . get ( "LST1" , "TAILCUTS_FINDER_DIR" )) / f"dl1ab_Run{ run :05d} .json"
130148 dl1_prod_id = get_dl1_prod_id (dl1b_config_file )
131- file = BASE_DL1 / datedir / major_version / dl1_prod_id / f"dl1_LST-1.Run{ run :05d} .h5"
149+ dl1_dir = Path (cfg .get ("LST1" , "DL1_DIR" ))
150+ file = dl1_dir / datedir / major_version / dl1_prod_id / f"dl1_LST-1.Run{ run :05d} .h5"
132151 df = pd .read_hdf (file , key = dl1_params_lstcam_key )
133152
134153 # Timestamp of the first event
@@ -150,32 +169,33 @@ def copy_to_webserver(html_file, csv_file):
150169 sp .run (["scp" , str (csv_file ), "datacheck:/home/www/html/datacheck/lstosa/." ], check = True )
151170
152171
153- @click .command ()
154- @click .argument ("date" , type = click .DateTime (formats = ["%Y-%m-%d" ]))
155- @click .option ("-v" , "--version" , type = str , default = get_lstchain_version ())
156- def main (date : datetime = None , version : str = get_lstchain_version ()):
172+ def main ():
157173 """Update source catalog with new run entries from a given date in format YYYY-MM-DD.
158174
159175 Notes
160176 -----
161177 It needs to be run as lstanalyzer user.
162178 """
163- catalog_path = CATALOG_DIR / "LST_source_catalog.ecsv"
179+ args = parser .parse_args ()
180+
181+ catalog_path = Path (cfg .get ("LST1" , "SOURCE_CATALOG" )) / "LST_source_catalog.ecsv"
164182 catalog_table = Table .read (catalog_path )
165183
166184 # Open table for given date and append its content to the table with entire catalog
167- datedir = date .strftime ("%Y%m%d" )
168- today_catalog = Table .read (BASE_MONITORING / f"RunCatalog/RunCatalog_{ datedir } .ecsv" )
169- today_runsummary = Table .read (BASE_MONITORING / f"RunSummary/RunSummary_{ datedir } .ecsv" )
185+ datedir = date_to_dir (args .date )
186+ run_catalog_dir = Path (cfg .get ("LST1" , "RUN_CATALOG" ))
187+ today_catalog = Table .read (run_catalog_dir / f"RunCatalog_{ datedir } .ecsv" )
188+ run_summary_dir = Path (cfg .get ("LST1" , "RUN_SUMMARY" ))
189+ today_runsummary = Table .read (run_summary_dir / f"RunSummary_{ datedir } .ecsv" )
170190 # Keep only astronomical data runs
171191 today_runsummary = today_runsummary [today_runsummary ["run_type" ] == "DATA" ]
172192 todays_info = join (today_runsummary , today_catalog )
173- todays_info .add_column (date . strftime ( "%Y-%m-%d" ), name = "date_dir" )
193+ todays_info .add_column (date_to_iso ( args . date ), name = "date_dir" )
174194 todays_info .keep_columns (["run_id" , "source_name" , "date_dir" ])
175195
176196 # Add start of run in iso format and elapsed time for each run
177197 log .info ("Getting run start and elapsed time" )
178- add_start_and_elapsed (todays_info , datedir , version )
198+ add_start_and_elapsed (todays_info , datedir , args . version )
179199
180200 # Change column names
181201 todays_info .rename_column ("run_id" , "Run ID" )
@@ -201,7 +221,7 @@ def main(date: datetime = None, version: str = get_lstchain_version()):
201221 html_content = add_query_table_to_html (html_table )
202222
203223 # Save the HTML and ECSV files and copy them to the LST-1 webserver
204- html_file = CATALOG_DIR / "LST_source_catalog.html"
224+ html_file = Path ( cfg . get ( "LST1" , "SOURCE_CATALOG" )) / "LST_source_catalog.html"
205225 html_file .write_text (html_content )
206226 table_unique .write (catalog_path , delimiter = "," , overwrite = True )
207227
0 commit comments