Spaces:
Sleeping
Sleeping
Test with gallery on all but main model
Browse files
app.py
CHANGED
|
@@ -51,11 +51,11 @@ def run_xlnc(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pa
|
|
| 51 |
seed = random.randint(0, 9007199254740991)
|
| 52 |
|
| 53 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
| 54 |
-
image = pipe_xlnc(prompt, negative_prompt=negative_prompt, guidance_scale=guidance_scale, pag_scale=pag_scale, pag_applied_layers=pag_layers, generator=generator, num_inference_steps=25).images
|
| 55 |
|
| 56 |
return image, seed
|
| 57 |
|
| 58 |
-
@spaces.GPU(duration=
|
| 59 |
def run_sc(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_layers=["mid"], randomize_seed=True, seed=42, progress=gr.Progress(track_tqdm=True)):
|
| 60 |
if(randomize_seed):
|
| 61 |
seed = random.randint(0, 9007199254740991)
|
|
@@ -65,7 +65,7 @@ def run_sc(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_
|
|
| 65 |
|
| 66 |
return image, seed
|
| 67 |
|
| 68 |
-
@spaces.GPU(duration=
|
| 69 |
def run_snc(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_layers=["mid"], randomize_seed=True, seed=42, progress=gr.Progress(track_tqdm=True)):
|
| 70 |
if(randomize_seed):
|
| 71 |
seed = random.randint(0, 9007199254740991)
|
|
@@ -114,7 +114,7 @@ with gr.Blocks(css=css, theme=theme) as demo:
|
|
| 114 |
with gr.Row():
|
| 115 |
prompt_sc = gr.Textbox(show_label=False, scale=4, placeholder="Your prompt for S-C")
|
| 116 |
button_sc = gr.Button("Generate", min_width=120)
|
| 117 |
-
output = gr.
|
| 118 |
with gr.Accordion("Advanced Settings", open=False):
|
| 119 |
guidance_scale = gr.Number(label="CFG Guidance Scale", info="The guidance scale for CFG, ignored if no prompt is entered (unconditional generation)", value=7.0)
|
| 120 |
negative_prompt = gr.Textbox(label="Negative prompt", info="Is only applied for the CFG part, leave blank for unconditional generation")
|
|
|
|
| 51 |
seed = random.randint(0, 9007199254740991)
|
| 52 |
|
| 53 |
generator = torch.Generator(device="cuda").manual_seed(seed)
|
| 54 |
+
image = pipe_xlnc(prompt, negative_prompt=negative_prompt, guidance_scale=guidance_scale, pag_scale=pag_scale, pag_applied_layers=pag_layers, generator=generator, num_inference_steps=25, num_images_per_prompt=4).images
|
| 55 |
|
| 56 |
return image, seed
|
| 57 |
|
| 58 |
+
@spaces.GPU(duration=10)
|
| 59 |
def run_sc(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_layers=["mid"], randomize_seed=True, seed=42, progress=gr.Progress(track_tqdm=True)):
|
| 60 |
if(randomize_seed):
|
| 61 |
seed = random.randint(0, 9007199254740991)
|
|
|
|
| 65 |
|
| 66 |
return image, seed
|
| 67 |
|
| 68 |
+
@spaces.GPU(duration=10)
|
| 69 |
def run_snc(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_layers=["mid"], randomize_seed=True, seed=42, progress=gr.Progress(track_tqdm=True)):
|
| 70 |
if(randomize_seed):
|
| 71 |
seed = random.randint(0, 9007199254740991)
|
|
|
|
| 114 |
with gr.Row():
|
| 115 |
prompt_sc = gr.Textbox(show_label=False, scale=4, placeholder="Your prompt for S-C")
|
| 116 |
button_sc = gr.Button("Generate", min_width=120)
|
| 117 |
+
output = gr.Gallery(label="Your result", interactive=False)
|
| 118 |
with gr.Accordion("Advanced Settings", open=False):
|
| 119 |
guidance_scale = gr.Number(label="CFG Guidance Scale", info="The guidance scale for CFG, ignored if no prompt is entered (unconditional generation)", value=7.0)
|
| 120 |
negative_prompt = gr.Textbox(label="Negative prompt", info="Is only applied for the CFG part, leave blank for unconditional generation")
|