linoyts HF Staff commited on
Commit
139be99
·
verified ·
1 Parent(s): 7cf4b81

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -61,7 +61,7 @@ def infer(prompt,
61
  with torch.inference_mode():
62
  # 1. Create a prompt to generate an initial image
63
  output = vlm_pipe(prompt=prompt)
64
- json_prompt_generate = output.values["json_prompt"]
65
 
66
 
67
  image = pipe(prompt=json_prompt,
@@ -73,7 +73,7 @@ def infer(prompt,
73
 
74
 
75
 
76
- return image
77
 
78
  css = """
79
  #col-container{
@@ -142,12 +142,12 @@ with gr.Blocks(css=css) as demo:
142
  submit_btn = gr.Button("Generate")
143
  result = gr.Image(label="output")
144
 
145
- prompt_in.change(
146
- handle_json,
147
- inputs=prompt_in,
148
- outputs=prompt_in_json)
149
 
150
- negative_prompt.change(handle_json, inputs=negative_prompt, outputs=negative_prompt_json)
151
 
152
  submit_btn.click(
153
  fn = infer,
@@ -162,7 +162,7 @@ with gr.Blocks(css=css) as demo:
162
  num_inference_steps,
163
  ],
164
  outputs = [
165
- result
166
  ]
167
  )
168
  demo.queue().launch()
 
61
  with torch.inference_mode():
62
  # 1. Create a prompt to generate an initial image
63
  output = vlm_pipe(prompt=prompt)
64
+ json_prompt = output.values["json_prompt"]
65
 
66
 
67
  image = pipe(prompt=json_prompt,
 
73
 
74
 
75
 
76
+ return image, json_prompt
77
 
78
  css = """
79
  #col-container{
 
142
  submit_btn = gr.Button("Generate")
143
  result = gr.Image(label="output")
144
 
145
+ # prompt_in.change(
146
+ # handle_json,
147
+ # inputs=prompt_in,
148
+ # outputs=prompt_in_json)
149
 
150
+ # negative_prompt.change(handle_json, inputs=negative_prompt, outputs=negative_prompt_json)
151
 
152
  submit_btn.click(
153
  fn = infer,
 
162
  num_inference_steps,
163
  ],
164
  outputs = [
165
+ result, prompt_in_json
166
  ]
167
  )
168
  demo.queue().launch()