weathon commited on
Commit
5ea5de7
·
1 Parent(s): df75977

store data

Browse files
Files changed (1) hide show
  1. app.py +56 -2
app.py CHANGED
@@ -34,6 +34,38 @@ nag_pipe = NAGStableDiffusion3Pipeline.from_pretrained(
34
  torch_dtype=torch.bfloat16,
35
  token="hf_token",
36
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  # pipe = pipe.to("cuda")
38
  # nag_pipe = nag_pipe.to("cuda")
39
  import os
@@ -69,7 +101,16 @@ def generate_video(positive_prompt, negative_prompt, guidance_scale, bias, step,
69
  nag_path = f"images/{uuid.uuid4().hex}_nag.png"
70
  output_nag.save(nag_path)
71
  print(f"NAG Image saved to {nag_path}")
72
-
 
 
 
 
 
 
 
 
 
73
  return output_path, nag_path
74
 
75
  import json
@@ -111,8 +152,21 @@ def load_abstract_prompt():
111
  # ]
112
  # )
113
 
 
 
 
 
 
 
 
 
 
 
 
114
  with gr.Blocks(title="Value Sign Flip SD3.5 Demo") as demo:
115
- gr.Markdown("# Value Sign Flip SD3.5 Demo \n\n This demo is based on SD3.5-L-Turbo model and uses Value Sign Flip technique to generate videos with different guidance scales and biases. More on [GitHub](https://github.com/weathon/VSF/blob/main/wan.md)\n\nPositive prompt should be at least 1 sentence long or the results will be weird.")
 
 
116
  # gr.Markdown("# Value Sign Flip Wan 2.1 Demo \n\n This demo is based on Wan 2.1 T2V model and uses Value Sign Flip technique to generate videos with different guidance scales and biases. More on [GitHub](https://github.com/weathon/VSF/blob/main/wan.md)\n\nPositive prompt should be at least 2 sentence long or the results will be weird.")
117
 
118
  with gr.Row():
 
34
  torch_dtype=torch.bfloat16,
35
  token="hf_token",
36
  )
37
+
38
+
39
+ from huggingface_hub import CommitScheduler, InferenceClient
40
+
41
+
42
+ IMAGE_DATASET_DIR = Path(f"{uuid4()}")
43
+ IMAGE_DATASET_DIR.mkdir(parents=True, exist_ok=True)
44
+ IMAGE_JSONL_PATH = IMAGE_DATASET_DIR / "metadata.jsonl"
45
+
46
+ scheduler = CommitScheduler(
47
+ repo_id="weathon/vsf-log",
48
+ repo_type="dataset",
49
+ folder_path=IMAGE_DATASET_DIR,
50
+ path_in_repo=IMAGE_DATASET_DIR.name,
51
+ )
52
+ from PIL import Image
53
+ import json
54
+ from datetime import datetime
55
+ from pathlib import Path
56
+ from uuid import uuid4
57
+ def save_image(prompt: str, negative_prompt: str, img_vsf: Image, img_nag: Image, parameters: dict) -> None:
58
+ vsf_image_path = IMAGE_DATASET_DIR / f"{uuid4()}_vsf.png"
59
+ nag_image_path = IMAGE_DATASET_DIR / f"{uuid4()}_nag.png"
60
+
61
+ with scheduler.lock:
62
+ img_vsf.save(vsf_image_path)
63
+ img_nag.save(nag_image_path)
64
+ with IMAGE_JSONL_PATH.open("a") as f:
65
+ json.dump({"prompt": prompt, "negative_prompt": negative_prompt, "vsf_image_path": str(vsf_image_path), "nag_image_path": str(nag_image_path), "parameters": parameters, "timestamp": datetime.utcnow().isoformat()}, f)
66
+ f.write("\n")
67
+
68
+
69
  # pipe = pipe.to("cuda")
70
  # nag_pipe = nag_pipe.to("cuda")
71
  import os
 
101
  nag_path = f"images/{uuid.uuid4().hex}_nag.png"
102
  output_nag.save(nag_path)
103
  print(f"NAG Image saved to {nag_path}")
104
+ save_image(positive_prompt, negative_prompt, output, output_nag, {
105
+ "guidance_scale": guidance_scale,
106
+ "bias": bias,
107
+ "step": step,
108
+ "seed": seed,
109
+ "nag_guidance": nag_guidance,
110
+ "nag_alpha": nag_alpha,
111
+ "nag_tau": nag_tau,
112
+ "nag_step": nag_step,
113
+ })
114
  return output_path, nag_path
115
 
116
  import json
 
152
  # ]
153
  # )
154
 
155
+
156
+ import json
157
+ from datetime import datetime
158
+ from pathlib import Path
159
+ from uuid import uuid4
160
+
161
+ import gradio as gr
162
+ import numpy as np
163
+ from PIL import Image
164
+
165
+
166
  with gr.Blocks(title="Value Sign Flip SD3.5 Demo") as demo:
167
+ gr.Markdown("# Value Sign Flip SD3.5 Demo \n\n This demo is based on SD3.5-L-Turbo model and uses Value Sign Flip technique to generate videos with different guidance scales and biases. More on [GitHub](https://github.com/weathon/VSF/blob/main/wan.md)\n\nPositive prompt should be at least 1 sentence long or the results will be weird. ")
168
+ gr.Markdown("To help with further research, all generations will be logged anonymously. If you do not wish to participate, please do not use the demo. Please keep prompts safe for work and non-offensive. ")
169
+
170
  # gr.Markdown("# Value Sign Flip Wan 2.1 Demo \n\n This demo is based on Wan 2.1 T2V model and uses Value Sign Flip technique to generate videos with different guidance scales and biases. More on [GitHub](https://github.com/weathon/VSF/blob/main/wan.md)\n\nPositive prompt should be at least 2 sentence long or the results will be weird.")
171
 
172
  with gr.Row():