Skip to content

Commit ac03b63

Browse files
committed
fix: ruff ci
Signed-off-by: Huang, Zeyu <11222265+fhfuih@users.noreply.github.com>
1 parent 3efa9ad commit ac03b63

File tree

6 files changed

+18
-7
lines changed

6 files changed

+18
-7
lines changed

apps/ComfyUI-vLLM-Omni/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Top-level package for comfyui_vllm_omni."""
1+
"""Top-level package for comfyui_vllm_omni.""" # noqa: N999 # This is not a python library intended to be imported
22

33
__all__ = [
44
"NODE_CLASS_MAPPINGS",
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
# noqa: N999 # This is not a python library intended to be imported

apps/ComfyUI-vLLM-Omni/comfyui_vllm_omni/nodes.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -408,7 +408,8 @@ def INPUT_TYPES(cls):
408408
"FLOAT",
409409
{"default": 1.0, "min": 0.0, "max": 5.0, "step": 0.01},
410410
),
411-
# === Put seed at last. Whenever a field named "seed" is present, ComfyUI adds another field called "control after generate" ===
411+
# === Put seed at last. ===
412+
# Whenever a field named "seed" is present, ComfyUI adds another field called "control after generate"
412413
"seed": (
413414
"INT",
414415
{
@@ -487,7 +488,8 @@ def INPUT_TYPES(cls):
487488
"tooltip": "Enable VAE slicing for reduced memory usage (slight quality trade-off)",
488489
},
489490
),
490-
# === Put seed at last. Whenever a field named "seed" is present, ComfyUI adds another field called "control after generate" ===
491+
# === Put seed at last. ===
492+
# Whenever a field named "seed" is present, ComfyUI adds another field called "control after generate"
491493
"seed": (
492494
"INT",
493495
{
@@ -539,7 +541,8 @@ def aggregate(
539541
for i, p in enumerate((param1, param2, param3)):
540542
if isinstance(p, list):
541543
raise ValueError(
542-
f"Input {i} is a Multi-Stage Sampling Params List. Expected a single sampling parameters node (either AR or Diffusion)."
544+
f"Input {i} is a Multi-Stage Sampling Params List."
545+
f"Expected a single sampling parameters node (either AR or Diffusion)."
543546
)
544547

545548
params = [param1]

apps/ComfyUI-vLLM-Omni/comfyui_vllm_omni/utils/api_client.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -398,7 +398,8 @@ async def _check_model_exist(self, model: str):
398398
if not response.ok:
399399
error_text = await response.text()
400400
raise (ValueError if response.status < 500 else RuntimeError)(
401-
f"vLLM-Omni API returned status {response.status} when getting hosted model list: {error_text}"
401+
f"vLLM-Omni API returned status {response.status}"
402+
f"when getting hosted model list: {error_text}"
402403
)
403404

404405
try:

apps/ComfyUI-vLLM-Omni/comfyui_vllm_omni/utils/logger.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,10 @@ def _format(
9090
"messages": [
9191
{
9292
"role": "system",
93-
"content": "You are Qwen, a virtual human developed by the Qwen Team, Alibaba Group, capable of perceiving auditory and visual inputs, as well as generating text and speech.",
93+
"content": (
94+
"You are Qwen, a virtual human developed by the Qwen Team, Alibaba Group,"
95+
"capable of perceiving auditory and visual inputs, as well as generating text and speech."
96+
),
9497
},
9598
{
9699
"role": "user",

apps/ComfyUI-vLLM-Omni/comfyui_vllm_omni/utils/models.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,10 @@ def _qwen25_payload_preprocessor(payload: dict) -> dict:
2121
payload["messages"] = [
2222
{
2323
"role": "system",
24-
"content": "You are Qwen, a virtual human developed by the Qwen Team, Alibaba Group, capable of perceiving auditory and visual inputs, as well as generating text and speech.",
24+
"content": (
25+
"You are Qwen, a virtual human developed by the Qwen Team, Alibaba Group,"
26+
"capable of perceiving auditory and visual inputs, as well as generating text and speech."
27+
),
2528
},
2629
*payload["messages"],
2730
]

0 commit comments

Comments
 (0)