Skip to content

Commit f63a469

Browse files
[executors] chore: unify CLI execution from parameters and config
* update tests to cover reading config
1 parent e829b9b commit f63a469

File tree

4 files changed

+60
-26
lines changed

4 files changed

+60
-26
lines changed

libs/executors/garf_executors/entrypoints/cli.py

Lines changed: 18 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -88,14 +88,6 @@ def main():
8888
raise exceptions.GarfExecutorError(
8989
f'No execution context found for source {args.source} in {config_file}'
9090
)
91-
query_executor = garf_executors.setup_executor(
92-
source=args.source,
93-
fetcher_parameters=context.fetcher_parameters,
94-
enable_cache=args.enable_cache,
95-
cache_ttl_seconds=args.cache_ttl_seconds,
96-
)
97-
batch = {query: reader_client.read(query) for query in args.query}
98-
query_executor.execute_batch(batch, context, args.parallel_threshold)
9991
else:
10092
param_types = ['source', 'macro', 'template']
10193
outputs = args.output.split(',')
@@ -116,23 +108,24 @@ def main():
116108
writer_parameters=writer_parameters,
117109
fetcher_parameters=source_parameters,
118110
)
119-
query_executor = garf_executors.setup_executor(
120-
source=args.source,
121-
fetcher_parameters=context.fetcher_parameters,
122-
enable_cache=args.enable_cache,
123-
cache_ttl_seconds=args.cache_ttl_seconds,
124-
)
125-
if args.parallel_queries and len(args.query) > 1:
126-
logger.info('Running queries in parallel')
127-
batch = {query: reader_client.read(query) for query in args.query}
128-
query_executor.execute_batch(batch, context, args.parallel_threshold)
129-
else:
130-
if len(args.query) > 1:
131-
logger.info('Running queries sequentially')
132-
for query in args.query:
133-
query_executor.execute(
134-
query=reader_client.read(query), title=query, context=context
135-
)
111+
query_executor = garf_executors.setup_executor(
112+
source=args.source,
113+
fetcher_parameters=context.fetcher_parameters,
114+
enable_cache=args.enable_cache,
115+
cache_ttl_seconds=args.cache_ttl_seconds,
116+
)
117+
batch = {query: reader_client.read(query) for query in args.query}
118+
if args.parallel_queries and len(args.query) > 1:
119+
logger.info('Running queries in parallel')
120+
batch = {query: reader_client.read(query) for query in args.query}
121+
query_executor.execute_batch(batch, context, args.parallel_threshold)
122+
else:
123+
if len(args.query) > 1:
124+
logger.info('Running queries sequentially')
125+
for query in args.query:
126+
query_executor.execute(
127+
query=reader_client.read(query), title=query, context=context
128+
)
136129
logging.shutdown()
137130

138131

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
SELECT
2+
resource,
3+
dimension.name AS name,
4+
metric.clicks AS clics
5+
FROM resource

libs/executors/tests/end-to-end/test_cli.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
import pathlib
1919
import subprocess
2020

21+
import yaml
22+
2123
_SCRIPT_PATH = pathlib.Path(__file__).parent
2224

2325

@@ -89,3 +91,35 @@ def test_fake_source_from_file(self, tmp_path):
8991

9092
assert result.returncode == 0
9193
assert json.loads(result.stdout) == self.expected_output
94+
95+
def test_fake_source_from_config(self, tmp_path):
96+
query_path = tmp_path / 'query.sql'
97+
with pathlib.Path.open(query_path, 'w', encoding='utf-8') as f:
98+
f.write(self.query)
99+
test_config = _SCRIPT_PATH / 'test_config.yaml'
100+
with open(test_config, 'r', encoding='utf-8') as f:
101+
config_data = yaml.safe_load(f)
102+
original_data_location = config_data['fake']['fetcher_parameters'][
103+
'data_location'
104+
]
105+
config_data['fake']['fetcher_parameters']['data_location'] = str(
106+
_SCRIPT_PATH / original_data_location
107+
)
108+
tmp_config = tmp_path / 'config.yaml'
109+
with open(tmp_config, 'w', encoding='utf-8') as f:
110+
yaml.dump(config_data, f, encoding='utf-8')
111+
command = (
112+
f'garf {str(query_path)} --source fake '
113+
f'-c {str(tmp_config)} '
114+
'--loglevel ERROR'
115+
)
116+
result = subprocess.run(
117+
command,
118+
shell=True,
119+
check=False,
120+
capture_output=True,
121+
text=True,
122+
)
123+
124+
assert result.returncode == 0
125+
assert json.loads(result.stdout) == self.expected_output
Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
11
fake:
22
writer: console
3+
writer_parameters:
4+
format: json
35
fetcher_parameters:
4-
data_location: ./test.json
6+
data_location: test.json

0 commit comments

Comments
 (0)