Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -2,11 +2,36 @@ import gradio as gr
|
|
| 2 |
from gradio_client import Client
|
| 3 |
import os
|
| 4 |
from dotenv import load_dotenv
|
|
|
|
| 5 |
|
| 6 |
# Load environment variables
|
| 7 |
load_dotenv()
|
| 8 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
# Enhanced examples with more detailed prompts and specific styling
|
| 11 |
EXAMPLES = [
|
| 12 |
{
|
|
@@ -202,22 +227,7 @@ EXAMPLES = [
|
|
| 202 |
}
|
| 203 |
]
|
| 204 |
|
| 205 |
-
|
| 206 |
-
"""Generate a diagram using FLUX AI"""
|
| 207 |
-
try:
|
| 208 |
-
client = Client("black-forest-labs/FLUX.1-schnell", hf_token=HF_TOKEN)
|
| 209 |
-
result = client.predict(
|
| 210 |
-
prompt,
|
| 211 |
-
1872187377, # seed
|
| 212 |
-
False, # randomize_seed
|
| 213 |
-
width,
|
| 214 |
-
height,
|
| 215 |
-
4, # num_inference_steps
|
| 216 |
-
api_name="/infer"
|
| 217 |
-
)
|
| 218 |
-
return result
|
| 219 |
-
except Exception as e:
|
| 220 |
-
raise gr.Error(f"Error generating diagram: {str(e)}")
|
| 221 |
|
| 222 |
# Convert examples to Gradio format
|
| 223 |
GRADIO_EXAMPLES = [
|
|
@@ -251,61 +261,39 @@ demo = gr.Interface(
|
|
| 251 |
],
|
| 252 |
outputs=gr.Image(type="filepath", label="Generated Diagram"),
|
| 253 |
title="π¨ FLUX Diagram Generator",
|
| 254 |
-
description="
|
| 255 |
-
Create intricate mind maps with custom themes and detailed iconography.""",
|
| 256 |
article="""
|
| 257 |
### Tips for Better Results
|
| 258 |
-
- Use
|
| 259 |
-
- Include
|
| 260 |
-
-
|
| 261 |
-
- Use consistent
|
| 262 |
-
- Combine both functional and decorative elements
|
| 263 |
|
| 264 |
### Template Structure
|
| 265 |
```
|
| 266 |
MAIN TOPIC
|
| 267 |
-
βββ
|
| 268 |
-
|
| 269 |
-
|
| 270 |
-
|
| 271 |
-
|
| 272 |
-
β βββ SUB-D [Custom Icon]
|
| 273 |
-
βββ TERTIARY
|
| 274 |
-
βββ SUB-E [Dynamic Icon]
|
| 275 |
-
βββ SUB-F [Unique Icon]
|
| 276 |
```
|
| 277 |
""",
|
| 278 |
-
examples=
|
| 279 |
cache_examples=True,
|
| 280 |
-
theme=gr.themes.Soft()
|
| 281 |
-
css="""
|
| 282 |
-
.gradio-container {
|
| 283 |
-
font-family: 'Inter', sans-serif;
|
| 284 |
-
}
|
| 285 |
-
.gr-prose {
|
| 286 |
-
max-width: 850px;
|
| 287 |
-
margin: auto;
|
| 288 |
-
}
|
| 289 |
-
.gr-prose code {
|
| 290 |
-
background-color: #f3f4f6;
|
| 291 |
-
padding: 0.2em 0.4em;
|
| 292 |
-
border-radius: 0.3em;
|
| 293 |
-
font-size: 0.95em;
|
| 294 |
-
}
|
| 295 |
-
.gr-prose h3 {
|
| 296 |
-
margin-top: 1.5em;
|
| 297 |
-
color: #2563eb;
|
| 298 |
-
}
|
| 299 |
-
"""
|
| 300 |
)
|
| 301 |
|
| 302 |
-
# Launch app
|
| 303 |
if __name__ == "__main__":
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
| 311 |
-
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
from gradio_client import Client
|
| 3 |
import os
|
| 4 |
from dotenv import load_dotenv
|
| 5 |
+
import torch
|
| 6 |
|
| 7 |
# Load environment variables
|
| 8 |
load_dotenv()
|
| 9 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 10 |
|
| 11 |
+
# Check CUDA availability and set device
|
| 12 |
+
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
| 13 |
+
|
| 14 |
+
def generate_diagram(prompt, width=1024, height=1024):
|
| 15 |
+
"""Generate a diagram using FLUX AI"""
|
| 16 |
+
try:
|
| 17 |
+
client = Client(
|
| 18 |
+
"black-forest-labs/FLUX.1-schnell",
|
| 19 |
+
hf_token=HF_TOKEN,
|
| 20 |
+
)
|
| 21 |
+
result = client.predict(
|
| 22 |
+
prompt,
|
| 23 |
+
1872187377, # seed
|
| 24 |
+
False, # randomize_seed
|
| 25 |
+
width,
|
| 26 |
+
height,
|
| 27 |
+
4, # num_inference_steps
|
| 28 |
+
api_name="/infer"
|
| 29 |
+
)
|
| 30 |
+
return result
|
| 31 |
+
except Exception as e:
|
| 32 |
+
raise gr.Error(f"Error generating diagram: {str(e)}")
|
| 33 |
+
|
| 34 |
+
|
| 35 |
# Enhanced examples with more detailed prompts and specific styling
|
| 36 |
EXAMPLES = [
|
| 37 |
{
|
|
|
|
| 227 |
}
|
| 228 |
]
|
| 229 |
|
| 230 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 231 |
|
| 232 |
# Convert examples to Gradio format
|
| 233 |
GRADIO_EXAMPLES = [
|
|
|
|
| 261 |
],
|
| 262 |
outputs=gr.Image(type="filepath", label="Generated Diagram"),
|
| 263 |
title="π¨ FLUX Diagram Generator",
|
| 264 |
+
description="Generate beautiful hand-drawn style diagrams using FLUX AI",
|
|
|
|
| 265 |
article="""
|
| 266 |
### Tips for Better Results
|
| 267 |
+
- Use clear hierarchical structures
|
| 268 |
+
- Include icon descriptions in brackets
|
| 269 |
+
- Keep text concise and meaningful
|
| 270 |
+
- Use consistent formatting
|
|
|
|
| 271 |
|
| 272 |
### Template Structure
|
| 273 |
```
|
| 274 |
MAIN TOPIC
|
| 275 |
+
βββ SUBTOPIC 1 [Icon]
|
| 276 |
+
βββ SUBTOPIC 2 [Icon]
|
| 277 |
+
βββ SUBTOPIC 3
|
| 278 |
+
βββ DETAIL 1 [Icon]
|
| 279 |
+
βββ DETAIL 2 [Icon]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 280 |
```
|
| 281 |
""",
|
| 282 |
+
examples=EXAMPLES,
|
| 283 |
cache_examples=True,
|
| 284 |
+
theme=gr.themes.Soft()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 285 |
)
|
| 286 |
|
| 287 |
+
# Launch app with error handling
|
| 288 |
if __name__ == "__main__":
|
| 289 |
+
try:
|
| 290 |
+
demo.queue(max_size=10)
|
| 291 |
+
demo.launch(
|
| 292 |
+
server_name="0.0.0.0",
|
| 293 |
+
server_port=7860,
|
| 294 |
+
share=False,
|
| 295 |
+
show_error=True,
|
| 296 |
+
enable_queue=True
|
| 297 |
+
)
|
| 298 |
+
except Exception as e:
|
| 299 |
+
print(f"Error launching app: {str(e)}")
|