Skip to content

Commit 98742e7

Browse files
committed
don't use f-strings
1 parent d1ff2da commit 98742e7

File tree

2 files changed

+15
-10
lines changed

2 files changed

+15
-10
lines changed

metaflow/metaflow_runner.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ def __getattr__(self, name: str):
3737
else:
3838
return command_attr
3939
else:
40-
raise AttributeError(f"Invalid attribute {name}")
40+
raise AttributeError("Invalid attribute %s" % name)
4141

4242

4343
class Runner(object):

metaflow/subprocess_manager.py

+14-9
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,8 @@ async def wait(
101101
except asyncio.TimeoutError:
102102
command_string = " ".join(self.command)
103103
print(
104-
f"Timeout: The process: '{command_string}' didn't complete within {timeout} seconds."
104+
"Timeout: The process: '%s' didn't complete within %s seconds."
105+
% (command_string, timeout)
105106
)
106107

107108
async def run(self):
@@ -128,7 +129,7 @@ async def run(self):
128129
self.run_called = True
129130
return self.process
130131
except Exception as e:
131-
print(f"Error starting subprocess: {e}")
132+
print("Error starting subprocess: %s" % e)
132133
await self.cleanup()
133134

134135
async def stream_logs(
@@ -145,7 +146,8 @@ async def stream_logs(
145146

146147
if stream not in self.log_files:
147148
raise ValueError(
148-
f"No log file found for '{stream}', valid values are: {list(self.log_files.keys())}"
149+
"No log file found for '%s', valid values are: %s"
150+
% (stream, list(self.log_files.keys()))
149151
)
150152

151153
log_file = self.log_files[stream]
@@ -167,7 +169,8 @@ async def stream_logs(
167169
line = await asyncio.wait_for(f.readline(), timeout_per_line)
168170
except asyncio.TimeoutError as e:
169171
raise LogReadTimeoutError(
170-
f"Timeout while reading a line from the log file for the stream: {stream}"
172+
"Timeout while reading a line from the log file for the stream: %s"
173+
% stream
171174
) from e
172175

173176
# when we encounter an empty line
@@ -209,7 +212,8 @@ async def kill(self, termination_timeout: float = 5):
209212
self.process.kill()
210213
else:
211214
print(
212-
f"Process has already terminated with return code: {self.process.returncode}"
215+
"Process has already terminated with return code: %s"
216+
% self.process.returncode
213217
)
214218
else:
215219
print("No process to kill.")
@@ -259,7 +263,7 @@ async def main():
259263
interesting_position = position
260264
break
261265

262-
print(f"ended streaming at: {interesting_position}")
266+
print("ended streaming at: %s" % interesting_position)
263267

264268
# wait / do some other processing while the process runs in background
265269
# if the process finishes before this sleep period, the streaming of logs
@@ -268,7 +272,8 @@ async def main():
268272

269273
# this blocks till the process completes unless we uncomment the `time.sleep` above..
270274
print(
271-
f"resuming streaming from: {interesting_position} while process is still running..."
275+
"resuming streaming from: %s while process is still running..."
276+
% interesting_position
272277
)
273278
async for position, line in command_obj.stream_logs(
274279
stream="stdout", position=interesting_position
@@ -292,12 +297,12 @@ async def main():
292297
# two parallel streams for stdout
293298
tasks = [
294299
command_obj.emit_logs(
295-
stream="stdout", custom_logger=lambda x: print(f"[STREAM A]: {x}")
300+
stream="stdout", custom_logger=lambda x: print("[STREAM A]: %s" % x)
296301
),
297302
# this can be another 'command_obj' too, in which case
298303
# we stream logs from 2 different subprocesses in parallel :)
299304
command_obj.emit_logs(
300-
stream="stdout", custom_logger=lambda x: print(f"[STREAM B]: {x}")
305+
stream="stdout", custom_logger=lambda x: print("[STREAM B]: %s" % x)
301306
),
302307
]
303308
await asyncio.gather(*tasks)

0 commit comments

Comments
 (0)