-
Notifications
You must be signed in to change notification settings - Fork 1.1k
Expand file tree
/
Copy pathweb.py
More file actions
121 lines (105 loc) · 5.19 KB
/
web.py
File metadata and controls
121 lines (105 loc) · 5.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
try:
import numpy as np
import gradio as gr
import python_coreml_stable_diffusion.pipeline as pipeline
from diffusers import StableDiffusionPipeline
def init(args):
pipeline.logger.info("Initializing PyTorch pipe for reference configuration")
pytorch_pipe = StableDiffusionPipeline.from_pretrained(args.model_version,
use_auth_token=True)
user_specified_scheduler = None
if args.scheduler is not None:
user_specified_scheduler = pipeline.SCHEDULER_MAP[
args.scheduler].from_config(pytorch_pipe.scheduler.config)
coreml_pipe = pipeline.get_coreml_pipe(pytorch_pipe=pytorch_pipe,
mlpackages_dir=args.i,
model_version=args.model_version,
compute_unit=args.compute_unit,
scheduler_override=user_specified_scheduler)
def infer(prompt, negative_prompt, steps, seed):
pipeline.logger.info(f"Setting random seed to {seed}")
np.random.seed(seed)
pipeline.logger.info("Beginning image generation.")
image = coreml_pipe(
prompt=prompt,
negative_prompt=negative_prompt,
height=coreml_pipe.height,
width=coreml_pipe.width,
num_inference_steps=steps,
)
images = []
images.append(image["images"][0])
return images
demo = gr.Blocks()
with demo:
gr.Markdown(
"<center><h1>Core ML Stable Diffusion</h1>Run Stable Diffusion on Apple Silicon with Core ML</center>")
with gr.Group():
with gr.Box():
with gr.Row():
with gr.Column():
with gr.Row():
text = gr.Textbox(
label="Prompt",
lines=11,
placeholder="Enter your prompt",
)
with gr.Row():
n_text = gr.Textbox(
label="Negative_prompt",
lines=11,
placeholder="Enter your negative_prompt",
)
with gr.Column():
with gr.Row():
steps = gr.Slider(label="Steps", minimum=1,
maximum=50, value=10, step=1)
with gr.Row():
seed = gr.Slider(label="Seed", minimum=0,
maximum=4294967295, value=0, step=1)
with gr.Row():
btn = gr.Button("Generate image")
gallery = gr.Gallery(
label="Generated image", elem_id="gallery"
)
text.submit(infer, inputs=[text, n_text, steps, seed], outputs=gallery)
btn.click(infer, inputs=[text, n_text, steps, seed], outputs=gallery)
demo.launch(debug=True, server_name="0.0.0.0")
if __name__ == "__main__":
parser = pipeline.argparse.ArgumentParser()
parser.add_argument(
"-i",
required=True,
help=("Path to input directory with the .mlpackage files generated by "
"python_coreml_stable_diffusion.torch2coreml"))
parser.add_argument(
"--model-version",
default="CompVis/stable-diffusion-v1-4",
help=
("The pre-trained model checkpoint and configuration to restore. "
"For available versions: https://huggingface.co/models?search=stable-diffusion"
))
parser.add_argument(
"--compute-unit",
choices=pipeline.get_available_compute_units(),
default="ALL",
help=("The compute units to be used when executing Core ML models. "
f"Options: {pipeline.get_available_compute_units()}"))
parser.add_argument(
"--scheduler",
choices=tuple(pipeline.SCHEDULER_MAP.keys()),
default=None,
help=("The scheduler to use for running the reverse diffusion process. "
"If not specified, the default scheduler from the diffusers pipeline is utilized"))
args = parser.parse_args()
init(args)
except ModuleNotFoundError as moduleNotFound:
print(f'Found that `gradio` is not installed, try to install it automatically')
try:
import subprocess
import sys
subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'gradio'])
print(f'Successfully installed missing package `gradio`.')
print(f'Now re-execute the command :D')
except subprocess.CalledProcessError:
print(f'Automatic package installation failed, try manually executing `pip install gradio`, then retry the command again.')