linoyts HF Staff commited on
Commit
fc3d081
·
verified ·
1 Parent(s): dea1d11

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -1
app.py CHANGED
@@ -30,6 +30,7 @@ def get_default_negative_prompt(existing_json: dict) -> str:
30
  def infer(
31
  prompt,
32
  prompt_refine,
 
33
  prompt_in_json,
34
  negative_prompt="",
35
  seed=42,
@@ -52,6 +53,12 @@ def infer(
52
  else str(prompt_in_json)
53
  )
54
  output = vlm_pipe(json_prompt=json_prompt_str, prompt=prompt_refine)
 
 
 
 
 
 
55
  else:
56
  output = vlm_pipe(prompt=prompt)
57
  json_prompt = output.values["json_prompt"]
@@ -70,7 +77,7 @@ def infer(
70
  height=height,
71
  guidance_scale=guidance_scale,
72
  ).images[0]
73
-
74
  return image, seed, json_prompt, neg_json_prompt
75
 
76
 
@@ -94,6 +101,14 @@ with gr.Blocks(css=css, theme=gr.themes.Soft(primary_hue="violet")) as demo:
94
  with gr.Row():
95
  prompt_refine = gr.Textbox(label="Prompt", info="describe the change you want to make", placeholder="make the cat white")
96
 
 
 
 
 
 
 
 
 
97
  submit_btn = gr.Button("Generate", variant="primary")
98
  result = gr.Image(label="output")
99
  with gr.Accordion("Structured Prompt", open=False):
@@ -118,12 +133,14 @@ with gr.Blocks(css=css, theme=gr.themes.Soft(primary_hue="violet")) as demo:
118
 
119
  tab_generate.select(lambda: "generate", outputs=current_mode)
120
  tab_refine.select(lambda: "refine", outputs=current_mode)
 
121
 
122
  submit_btn.click(
123
  fn=infer,
124
  inputs=[
125
  prompt_generate,
126
  prompt_refine,
 
127
  prompt_in_json,
128
  negative_prompt,
129
  seed,
 
30
  def infer(
31
  prompt,
32
  prompt_refine,
33
+ prompt_inspire_image,
34
  prompt_in_json,
35
  negative_prompt="",
36
  seed=42,
 
53
  else str(prompt_in_json)
54
  )
55
  output = vlm_pipe(json_prompt=json_prompt_str, prompt=prompt_refine)
56
+
57
+ elif mode == "inspire":
58
+ if prompt_inspire_image is None:
59
+ raise ValueError("Please upload an image to inspire the model.")
60
+ output = vlm_pipe(image=prompt_inspire_image, prompt="")
61
+
62
  else:
63
  output = vlm_pipe(prompt=prompt)
64
  json_prompt = output.values["json_prompt"]
 
77
  height=height,
78
  guidance_scale=guidance_scale,
79
  ).images[0]
80
+ print(neg_json_prompt)
81
  return image, seed, json_prompt, neg_json_prompt
82
 
83
 
 
101
  with gr.Row():
102
  prompt_refine = gr.Textbox(label="Prompt", info="describe the change you want to make", placeholder="make the cat white")
103
 
104
+ with gr.Tab("inspire") as tab_inspire:
105
+ with gr.Row():
106
+ prompt_inspire_image = gr.Image(
107
+ label="Inspiration Image",
108
+ type="pil",
109
+ tool=None
110
+ )
111
+
112
  submit_btn = gr.Button("Generate", variant="primary")
113
  result = gr.Image(label="output")
114
  with gr.Accordion("Structured Prompt", open=False):
 
133
 
134
  tab_generate.select(lambda: "generate", outputs=current_mode)
135
  tab_refine.select(lambda: "refine", outputs=current_mode)
136
+ tab_inspire.select(lambda: "inspire", outputs=current_mode)
137
 
138
  submit_btn.click(
139
  fn=infer,
140
  inputs=[
141
  prompt_generate,
142
  prompt_refine,
143
+ prompt_inspire_image,
144
  prompt_in_json,
145
  negative_prompt,
146
  seed,