Skip to content

Commit 241de7f

Browse files
committed
#582: Update JSON task lister as package console command
1 parent b081728 commit 241de7f

File tree

3 files changed

+65
-59
lines changed

3 files changed

+65
-59
lines changed

setup.cfg

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,4 +49,5 @@ console_scripts =
4949
lbaf-vt-data-files-validator-loader = lbaf:vt_data_files_validator_loader
5050
lbaf-vt-data-files-validator = lbaf:vt_data_files_validator
5151
lbaf-vt-data-files-maker = lbaf:vt_data_files_maker
52+
lbaf-vt-data-files-task-lister = lbaf:vt_data_files_task_lister
5253
lbaf-vt-data-stat-files-updater = lbaf:vt_data_stat_files_updater

src/lbaf/Utils/lbsJSONTaskLister.py

Lines changed: 59 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -47,80 +47,80 @@
4747
"""
4848

4949
import os
50-
import sys
5150
import json
5251
import yaml
5352
import argparse
54-
import re
5553

5654
from lbaf.IO.lbsVTDataReader import LoadReader
5755
from lbaf.Utils.lbsLogging import get_logger, Logger
5856

59-
def process_files(directory, file_stem, file_suffix, logger: Logger):
60-
reader = LoadReader(
61-
file_prefix = directory + file_stem,
62-
logger = logger,
63-
file_suffix = file_suffix
64-
)
65-
66-
tasks = {}
67-
n_ranks = reader.n_ranks
68-
69-
try:
70-
for rank in range(n_ranks):
71-
_, data = reader._load_vt_file(rank)
72-
phases = data.get("phases", [])
73-
if not phases:
74-
logger.warning("No phases found for rank %s", str(rank))
75-
continue
76-
77-
last_phase = phases[-1]
78-
79-
if "lb_iterations" in last_phase:
80-
lb_iterations = last_phase["lb_iterations"]
81-
if lb_iterations:
82-
last_lb_iteration = lb_iterations[-1]
83-
iteration_tasks = [task["entity"].get("seq_id", task["entity"].get("id")) for task in last_lb_iteration.get("tasks", [])]
84-
tasks[rank] = iteration_tasks
57+
class JSONTaskLister:
58+
def __process_files(self):
59+
reader = LoadReader(
60+
file_prefix = self.__directory + self.__file_stem,
61+
logger = self.__logger,
62+
file_suffix = self.__file_suffix
63+
)
64+
65+
tasks = {}
66+
n_ranks = reader.n_ranks
67+
68+
try:
69+
for rank in range(n_ranks):
70+
_, data = reader._load_vt_file(rank)
71+
phases = data.get("phases", [])
72+
if not phases:
73+
self.__logger.warning("No phases found for rank %s", str(rank))
74+
continue
75+
76+
last_phase = phases[-1]
77+
78+
if "lb_iterations" in last_phase:
79+
lb_iterations = last_phase["lb_iterations"]
80+
if lb_iterations:
81+
last_lb_iteration = lb_iterations[-1]
82+
iteration_tasks = [task["entity"].get("seq_id", task["entity"].get("id")) for task in last_lb_iteration.get("tasks", [])]
83+
tasks[rank] = iteration_tasks
84+
else:
85+
self.__logger.warning("No lb_iterations found in the last phase of rank %s", str(rank))
8586
else:
86-
logger.warning("No lb_iterations found in the last phase of rank %s", str(rank))
87-
else:
88-
phase_tasks = [task["entity"].get("seq_id", task["entity"].get("id")) for task in last_phase.get("tasks", [])]
89-
tasks[rank] = phase_tasks
90-
except (json.JSONDecodeError, KeyError, ValueError, IndexError) as e:
91-
logger.error("Error processing rank %s: %s", str(rank), e)
92-
sys.exit(1)
87+
phase_tasks = [task["entity"].get("seq_id", task["entity"].get("id")) for task in last_phase.get("tasks", [])]
88+
tasks[rank] = phase_tasks
89+
except (json.JSONDecodeError, KeyError, ValueError, IndexError) as e:
90+
self.__logger.error("Error processing rank %s: %s", str(rank), e)
91+
return
9392

94-
return tasks
93+
return tasks
9594

96-
def main():
97-
parser = argparse.ArgumentParser(description="Extract tasks from JSON files.")
98-
parser.add_argument("directory", type=str, help="Directory containing JSON files.")
99-
parser.add_argument("--file-stem", type=str, default="data", help="File stem for JSON files (default: 'data').")
100-
parser.add_argument("--file-suffix", type=str, default="json", help="File suffix for JSON files (default: 'json').")
101-
parser.add_argument("--output", type=str, default="tasks.yml", help="Output YAML file (default: 'tasks.yml').")
95+
def run(self):
96+
parser = argparse.ArgumentParser(description="Extract tasks from JSON files.")
97+
parser.add_argument("directory", type=str, help="Directory containing JSON files.")
98+
parser.add_argument("--file-stem", type=str, default="data", help="File stem for JSON files (default: 'data').")
99+
parser.add_argument("--file-suffix", type=str, default="json", help="File suffix for JSON files (default: 'json').")
100+
parser.add_argument("--output", type=str, default="tasks.yml", help="Output YAML file (default: 'tasks.yml').")
102101

103-
args = parser.parse_args()
102+
args = parser.parse_args()
104103

105-
directory = args.directory
106-
file_stem = args.file_stem
107-
file_suffix = args.file_suffix
108-
output_file = args.output
104+
self.__directory = args.directory
105+
self.__file_stem = args.file_stem
106+
self.__file_suffix = args.file_suffix
107+
self.__output_file = args.output
109108

110-
logger = get_logger()
109+
self.__logger = get_logger()
111110

112-
if not os.path.isdir(directory):
113-
logger.error("Directory not found: %s", directory)
114-
return
111+
if not os.path.isdir(self.__directory):
112+
self.__logger.error("Directory not found: %s", self.__directory)
113+
return
115114

116-
tasks = process_files(directory, file_stem, file_suffix, logger)
115+
tasks = self.__process_files()
117116

118-
try:
119-
with open(output_file, 'w') as file:
120-
yaml.safe_dump(tasks, file)
121-
logger.info("Tasks successfully written to %s", output_file)
122-
except IOError as e:
123-
logger.error("Error writing to %s: %s", output_file, e)
117+
try:
118+
with open(self.__output_file, 'w') as file:
119+
yaml.safe_dump(tasks, file)
120+
self.__logger.info("Tasks successfully written to %s", self.__output_file)
121+
except IOError as e:
122+
self.__logger.error("Error writing to %s: %s", self.__output_file, e)
123+
return
124124

125125
if __name__ == "__main__":
126-
main()
126+
JSONTaskLister().run()

src/lbaf/__init__.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,7 @@
6363
from lbaf.Utils.lbsCsv2JsonDataConverter import Csv2JsonConverter
6464
from lbaf.Utils.lbsDataStatFilesUpdater import DataStatFilesUpdater
6565
from lbaf.Utils.lbsJSONDataFilesMaker import JSONDataFilesMaker
66+
from lbaf.Utils.lbsJSONTaskLister import JSONTaskLister
6667
# pylint:enable=C0413:wrong-import-position
6768

6869
# lbaf functions to expose as lbaf package console commands (see setup.cfg)
@@ -92,6 +93,10 @@ def vt_data_files_maker() -> int:
9293
"""Run a JSONDataFilesMaker instance."""
9394
return JSONDataFilesMaker().run()
9495

96+
def vt_data_files_task_lister() -> int:
97+
"""Run a JSONTaskLister instance."""
98+
return JSONTaskLister().run()
99+
95100
def csv_2_json_converter() -> int:
96101
"""Run a Csv2JsonConverter instance."""
97102
return Csv2JsonConverter().run()

0 commit comments

Comments
 (0)