Skip to content

Commit dafb6bf

Browse files
committed
no f-strings
1 parent 01894b8 commit dafb6bf

File tree

3 files changed

+24
-20
lines changed

3 files changed

+24
-20
lines changed

metaflow/click_api.py

+9-10
Original file line numberDiff line numberDiff line change
@@ -65,17 +65,16 @@ def _method_sanity_check(
6565
for supplied_k, supplied_v in kwargs.items():
6666
if supplied_k not in possible_params:
6767
raise ValueError(
68-
f"Unknown argument: '{supplied_k}', "
69-
f"possible args are: {list(possible_params.keys())}"
68+
"Unknown argument: '%s', possible args are: %s"
69+
% (supplied_k, list(possible_params.keys()))
7070
)
7171

7272
try:
7373
check_type(supplied_v, annotations[supplied_k])
7474
except TypeCheckError:
7575
raise TypeError(
76-
f"Invalid type for '{supplied_k}', "
77-
f"expected: '{annotations[supplied_k]}', "
78-
f"default is '{defaults[supplied_k]}'"
76+
"Invalid type for '%s', expected: '%s', default is '%s'"
77+
% (supplied_k, annotations[supplied_k], defaults[supplied_k])
7978
)
8079

8180
if supplied_k in possible_arg_params:
@@ -105,7 +104,7 @@ def _method_sanity_check(
105104
(cli_name not in method_params["args"])
106105
and (cli_name not in method_params["options"])
107106
) and possible_v.required:
108-
raise ValueError(f"Missing argument: {cli_name} is required.")
107+
raise ValueError("Missing argument: %s is required." % cli_name)
109108

110109
return method_params
111110

@@ -189,7 +188,7 @@ def from_cli(cls, flow_file: str, cli_collection: Callable) -> Callable:
189188
class_dict[cmd_obj.name] = extract_command(cmd_obj, flow_parameters)
190189
else:
191190
raise RuntimeError(
192-
f"Cannot handle {cmd_obj.name} of type {type(cmd_obj)}"
191+
"Cannot handle %s of type %s" % (cmd_obj.name, type(cmd_obj))
193192
)
194193

195194
to_return = type(flow_file, (MetaflowAPI,), class_dict)
@@ -252,10 +251,10 @@ def execute(self) -> List[str]:
252251
for k, v in options.items():
253252
if isinstance(v, list):
254253
for i in v:
255-
components.append(f"--{k}")
254+
components.append("--%s" % k)
256255
components.append(str(i))
257256
else:
258-
components.append(f"--{k}")
257+
components.append("--%s" % k)
259258
if v != "flag":
260259
components.append(str(v))
261260

@@ -307,7 +306,7 @@ def extract_group(cmd_obj: click.Group, flow_parameters: List[Parameter]) -> Cal
307306
class_dict[sub_cmd_obj.name] = extract_command(sub_cmd_obj, flow_parameters)
308307
else:
309308
raise RuntimeError(
310-
f"Cannot handle {sub_cmd_obj.name} of type {type(sub_cmd_obj)}"
309+
"Cannot handle %s of type %s" % (sub_cmd_obj.name, type(sub_cmd_obj))
311310
)
312311

313312
resulting_class = type(cmd_obj.name, (MetaflowAPI,), class_dict)

metaflow/metaflow_runner.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ def __getattr__(self, name: str):
3737
else:
3838
return command_attr
3939
else:
40-
raise AttributeError(f"Invalid attribute {name}")
40+
raise AttributeError("Invalid attribute %s" % name)
4141

4242

4343
class Runner(object):

metaflow/subprocess_manager.py

+14-9
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,8 @@ async def wait(
101101
except asyncio.TimeoutError:
102102
command_string = " ".join(self.command)
103103
print(
104-
f"Timeout: The process: '{command_string}' didn't complete within {timeout} seconds."
104+
"Timeout: The process: '%s' didn't complete within %s seconds."
105+
% (command_string, timeout)
105106
)
106107

107108
async def run(self):
@@ -128,7 +129,7 @@ async def run(self):
128129
self.run_called = True
129130
return self.process
130131
except Exception as e:
131-
print(f"Error starting subprocess: {e}")
132+
print("Error starting subprocess: %s" % e)
132133
await self.cleanup()
133134

134135
async def stream_logs(
@@ -145,7 +146,8 @@ async def stream_logs(
145146

146147
if stream not in self.log_files:
147148
raise ValueError(
148-
f"No log file found for '{stream}', valid values are: {list(self.log_files.keys())}"
149+
"No log file found for '%s', valid values are: %s"
150+
% (stream, list(self.log_files.keys()))
149151
)
150152

151153
log_file = self.log_files[stream]
@@ -167,7 +169,8 @@ async def stream_logs(
167169
line = await asyncio.wait_for(f.readline(), timeout_per_line)
168170
except asyncio.TimeoutError as e:
169171
raise LogReadTimeoutError(
170-
f"Timeout while reading a line from the log file for the stream: {stream}"
172+
"Timeout while reading a line from the log file for the stream: %s"
173+
% stream
171174
) from e
172175

173176
# when we encounter an empty line
@@ -209,7 +212,8 @@ async def kill(self, termination_timeout: float = 5):
209212
self.process.kill()
210213
else:
211214
print(
212-
f"Process has already terminated with return code: {self.process.returncode}"
215+
"Process has already terminated with return code: %s"
216+
% self.process.returncode
213217
)
214218
else:
215219
print("No process to kill.")
@@ -259,7 +263,7 @@ async def main():
259263
interesting_position = position
260264
break
261265

262-
print(f"ended streaming at: {interesting_position}")
266+
print("ended streaming at: %s" % interesting_position)
263267

264268
# wait / do some other processing while the process runs in background
265269
# if the process finishes before this sleep period, the streaming of logs
@@ -268,7 +272,8 @@ async def main():
268272

269273
# this blocks till the process completes unless we uncomment the `time.sleep` above..
270274
print(
271-
f"resuming streaming from: {interesting_position} while process is still running..."
275+
"resuming streaming from: %s while process is still running..."
276+
% interesting_position
272277
)
273278
async for position, line in command_obj.stream_logs(
274279
stream="stdout", position=interesting_position
@@ -292,12 +297,12 @@ async def main():
292297
# two parallel streams for stdout
293298
tasks = [
294299
command_obj.emit_logs(
295-
stream="stdout", custom_logger=lambda x: print(f"[STREAM A]: {x}")
300+
stream="stdout", custom_logger=lambda x: print("[STREAM A]: %s" % x)
296301
),
297302
# this can be another 'command_obj' too, in which case
298303
# we stream logs from 2 different subprocesses in parallel :)
299304
command_obj.emit_logs(
300-
stream="stdout", custom_logger=lambda x: print(f"[STREAM B]: {x}")
305+
stream="stdout", custom_logger=lambda x: print("[STREAM B]: %s" % x)
301306
),
302307
]
303308
await asyncio.gather(*tasks)

0 commit comments

Comments
 (0)