cutechicken commited on
Commit
7deb5c5
ยท
verified ยท
1 Parent(s): 0e83d42

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +400 -48
app.py CHANGED
@@ -1,58 +1,410 @@
1
  import gradio as gr
 
 
 
 
 
2
  from datetime import datetime
3
- import random
 
 
 
4
 
5
- # ๊ฐ„๋‹จํ•œ ์ž‘์—… ๋ชฉ๋ก
6
- jobs = []
 
7
 
8
- def submit_job(prompt):
9
- """์ž‘์—… ์ œ์ถœ"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  if not prompt:
11
- return "ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
12
 
13
- job_id = f"JOB-{random.randint(1000, 9999)}"
14
- jobs.append({
15
  "id": job_id,
16
  "prompt": prompt,
17
- "time": datetime.now().strftime("%H:%M:%S")
18
- })
19
-
20
- return f"โœ… ์ž‘์—… ์ œ์ถœ๋จ: {job_id}\nํ”„๋กฌํ”„ํŠธ: {prompt}"
21
-
22
- def get_status():
23
- """์ƒํƒœ ํ™•์ธ"""
24
- if not jobs:
25
- return "์•„์ง ์ž‘์—…์ด ์—†์Šต๋‹ˆ๋‹ค."
26
-
27
- status = f"์ „์ฒด ์ž‘์—…: {len(jobs)}๊ฐœ\n\n์ตœ๊ทผ ์ž‘์—…:\n"
28
- for job in jobs[-5:]:
29
- status += f"- {job['id']}: {job['prompt'][:30]}... ({job['time']})\n"
30
-
31
- return status
32
-
33
- # Gradio ์ธํ„ฐํŽ˜์ด์Šค
34
- demo = gr.Interface(
35
- fn=submit_job,
36
- inputs=gr.Textbox(label="์ด๋ฏธ์ง€ ํ”„๋กฌํ”„ํŠธ", placeholder="์›ํ•˜๋Š” ์ด๋ฏธ์ง€๋ฅผ ์„ค๋ช…ํ•˜์„ธ์š”..."),
37
- outputs=gr.Textbox(label="๊ฒฐ๊ณผ"),
38
- title="P2P GPU ์ด๋ฏธ์ง€ ์ƒ๏ฟฝ๏ฟฝ๏ฟฝ ํ—ˆ๋ธŒ",
39
- description="๋ถ„์‚ฐ GPU๋ฅผ ํ™œ์šฉํ•œ ์ด๋ฏธ์ง€ ์ƒ์„ฑ ์‹œ์Šคํ…œ"
40
- )
41
-
42
- # ์ƒํƒœ ํ™•์ธ์šฉ ๋‘ ๋ฒˆ์งธ ์ธํ„ฐํŽ˜์ด์Šค
43
- status_demo = gr.Interface(
44
- fn=get_status,
45
- inputs=None,
46
- outputs=gr.Textbox(label="์‹œ์Šคํ…œ ์ƒํƒœ"),
47
- title="์‹œ์Šคํ…œ ์ƒํƒœ"
48
- )
49
-
50
- # ํƒญ์œผ๋กœ ๊ฒฐํ•ฉ
51
- app = gr.TabbedInterface(
52
- [demo, status_demo],
53
- ["์ž‘์—… ์ œ์ถœ", "์‹œ์Šคํ…œ ์ƒํƒœ"]
54
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
 
 
56
  if __name__ == "__main__":
57
- app.launch(server_name="0.0.0.0", server_port=7860)
58
-
 
1
  import gradio as gr
2
+ from fastapi import FastAPI, UploadFile, File, Request
3
+ from fastapi.responses import FileResponse, HTMLResponse
4
+ import uuid
5
+ import os
6
+ import json
7
  from datetime import datetime
8
+ from typing import Dict, List
9
+ import shutil
10
+ import asyncio
11
+ from contextlib import asynccontextmanager
12
 
13
+ # Initialize data storage
14
+ peers: Dict[str, Dict] = {}
15
+ jobs: List[Dict] = []
16
 
17
+ # Create directories
18
+ os.makedirs("results", exist_ok=True)
19
+ os.makedirs("client", exist_ok=True)
20
+
21
+ # Client code
22
+ CLIENT_CODE = '''import requests
23
+ import subprocess
24
+ import time
25
+ import os
26
+ import sys
27
+ from datetime import datetime
28
+
29
+ # Configuration
30
+ PEER_ID = f"peer-{os.getenv('COMPUTERNAME', 'unknown')}-{datetime.now().strftime('%Y%m%d%H%M%S')}"
31
+ SERVER_URL = "https://your-username-your-space.hf.space" # Replace with actual Space URL
32
+
33
+ def check_gpu():
34
+ """Check GPU availability"""
35
+ try:
36
+ result = subprocess.run(['nvidia-smi', '--query-gpu=utilization.gpu', '--format=csv,noheader,nounits'],
37
+ capture_output=True, text=True)
38
+ if result.returncode == 0:
39
+ gpu_usage = int(result.stdout.strip())
40
+ return gpu_usage < 20 # GPU is idle if usage < 20%
41
+ except:
42
+ print("GPU not found. Running in CPU mode.")
43
+ return False
44
+
45
+ def register_peer():
46
+ """Register peer with server"""
47
+ try:
48
+ response = requests.post(f"{SERVER_URL}/api/peers/register", params={"peer_id": PEER_ID})
49
+ if response.status_code == 200:
50
+ print(f"โœ… Peer registered: {PEER_ID}")
51
+ return True
52
+ except Exception as e:
53
+ print(f"โŒ Server connection failed: {e}")
54
+ return False
55
+
56
+ def generate_image_cpu(prompt, output_path):
57
+ """Generate test image using CPU"""
58
+ from PIL import Image, ImageDraw, ImageFont
59
+
60
+ img = Image.new('RGB', (512, 512), color='white')
61
+ draw = ImageDraw.Draw(img)
62
+
63
+ # Draw prompt text
64
+ text = f"Prompt: {prompt[:50]}..."
65
+ draw.text((10, 10), text, fill='black')
66
+ draw.text((10, 40), f"Generated by: {PEER_ID}", fill='gray')
67
+ draw.text((10, 70), f"Time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", fill='gray')
68
+
69
+ img.save(output_path)
70
+ print(f"๐Ÿ“ Test image generated: {output_path}")
71
+
72
+ def main():
73
+ print("๐Ÿš€ Starting P2P GPU Client...")
74
+
75
+ if not register_peer():
76
+ print("Server registration failed. Exiting.")
77
+ return
78
+
79
+ while True:
80
+ try:
81
+ # Heartbeat
82
+ requests.post(f"{SERVER_URL}/api/peers/heartbeat", params={"peer_id": PEER_ID})
83
+
84
+ # Request job
85
+ response = requests.get(f"{SERVER_URL}/api/jobs/request", params={"peer_id": PEER_ID})
86
+ if response.status_code == 200:
87
+ job_data = response.json()
88
+
89
+ if job_data.get("job"):
90
+ job = job_data["job"]
91
+ job_id = job["id"]
92
+ prompt = job["prompt"]
93
+
94
+ print(f"\\n๐Ÿ“‹ New job received: {prompt}")
95
+
96
+ # Generate image
97
+ output_path = f"{job_id}.png"
98
+
99
+ if check_gpu():
100
+ print("๐ŸŽฎ Generating with GPU...")
101
+ # Actual GPU generation code would go here
102
+ generate_image_cpu(prompt, output_path)
103
+ else:
104
+ print("๐Ÿ’ป Generating with CPU...")
105
+ generate_image_cpu(prompt, output_path)
106
+
107
+ # Upload result
108
+ with open(output_path, 'rb') as f:
109
+ files = {'file': (output_path, f, 'image/png')}
110
+ response = requests.post(
111
+ f"{SERVER_URL}/api/jobs/result",
112
+ params={"job_id": job_id},
113
+ files=files
114
+ )
115
+
116
+ if response.status_code == 200:
117
+ print("โœ… Result uploaded successfully")
118
+
119
+ # Clean up
120
+ os.remove(output_path)
121
+
122
+ time.sleep(10) # Check every 10 seconds
123
+
124
+ except KeyboardInterrupt:
125
+ print("\\n๐Ÿ‘‹ Shutting down")
126
+ break
127
+ except Exception as e:
128
+ print(f"โš ๏ธ Error: {e}")
129
+ time.sleep(30)
130
+
131
+ if __name__ == "__main__":
132
+ # Check required packages
133
+ try:
134
+ import PIL
135
+ except ImportError:
136
+ print("Installing required packages...")
137
+ subprocess.run([sys.executable, "-m", "pip", "install", "pillow", "requests"])
138
+
139
+ main()
140
+ '''
141
+
142
+ # Create client files
143
+ with open("client/peer_agent.py", "w", encoding="utf-8") as f:
144
+ f.write(CLIENT_CODE)
145
+
146
+ with open("client/requirements.txt", "w") as f:
147
+ f.write("requests\npillow\n")
148
+
149
+ with open("client/README.md", "w", encoding="utf-8") as f:
150
+ f.write("""# P2P GPU Client for Windows
151
+
152
+ ## Installation
153
+ 1. Install Python 3.8+
154
+ 2. Run `pip install -r requirements.txt`
155
+ 3. Update SERVER_URL in `peer_agent.py` with actual Hugging Face Space URL
156
+ 4. Run `python peer_agent.py`
157
+
158
+ ## GPU Support
159
+ - Automatically detects NVIDIA GPU if available
160
+ - Falls back to CPU mode for testing
161
+ """)
162
+
163
+ # FastAPI app with lifespan
164
+ @asynccontextmanager
165
+ async def lifespan(app: FastAPI):
166
+ # Startup
167
+ print("Starting P2P GPU Hub...")
168
+ yield
169
+ # Shutdown
170
+ print("Shutting down P2P GPU Hub...")
171
+
172
+ app = FastAPI(lifespan=lifespan)
173
+
174
+ # API endpoints
175
+ @app.get("/api/status")
176
+ async def get_status():
177
+ """Get system status"""
178
+ active_peers = sum(1 for p in peers.values()
179
+ if (datetime.now() - p['last_seen']).seconds < 60)
180
+ pending_jobs = sum(1 for j in jobs if j['status'] == 'pending')
181
+ completed_jobs = sum(1 for j in jobs if j['status'] == 'completed')
182
+
183
+ recent_results = [
184
+ {"filename": j['filename'], "prompt": j['prompt']}
185
+ for j in jobs[-10:] if j['status'] == 'completed' and 'filename' in j
186
+ ]
187
+
188
+ return {
189
+ "active_peers": active_peers,
190
+ "pending_jobs": pending_jobs,
191
+ "completed_jobs": completed_jobs,
192
+ "recent_results": recent_results
193
+ }
194
+
195
+ @app.post("/api/peers/register")
196
+ async def register_peer(peer_id: str):
197
+ """Register a peer"""
198
+ peers[peer_id] = {
199
+ "status": "idle",
200
+ "last_seen": datetime.now(),
201
+ "jobs_completed": 0
202
+ }
203
+ return {"status": "registered", "peer_id": peer_id}
204
+
205
+ @app.post("/api/peers/heartbeat")
206
+ async def heartbeat(peer_id: str):
207
+ """Update peer status"""
208
+ if peer_id in peers:
209
+ peers[peer_id]["last_seen"] = datetime.now()
210
+ return {"status": "alive"}
211
+ return {"status": "unregistered"}
212
+
213
+ @app.post("/api/jobs/submit")
214
+ async def submit_job(request: Request):
215
+ """Submit a job"""
216
+ data = await request.json()
217
+ job_id = str(uuid.uuid4())
218
+ job = {
219
+ "id": job_id,
220
+ "prompt": data.get("prompt", ""),
221
+ "status": "pending",
222
+ "created_at": datetime.now()
223
+ }
224
+ jobs.append(job)
225
+ return {"job_id": job_id, "status": "submitted"}
226
+
227
+ @app.get("/api/jobs/request")
228
+ async def request_job(peer_id: str):
229
+ """Request a job for processing"""
230
+ for job in jobs:
231
+ if job["status"] == "pending":
232
+ job["status"] = "assigned"
233
+ job["peer_id"] = peer_id
234
+ job["assigned_at"] = datetime.now()
235
+ return {"job": job}
236
+
237
+ return {"job": None}
238
+
239
+ @app.post("/api/jobs/result")
240
+ async def submit_result(job_id: str, file: UploadFile = File(...)):
241
+ """Submit job result"""
242
+ filename = f"{job_id}.png"
243
+ file_path = f"results/{filename}"
244
+
245
+ with open(file_path, "wb") as buffer:
246
+ shutil.copyfileobj(file.file, buffer)
247
+
248
+ for job in jobs:
249
+ if job["id"] == job_id:
250
+ job["status"] = "completed"
251
+ job["filename"] = filename
252
+ job["completed_at"] = datetime.now()
253
+
254
+ if "peer_id" in job and job["peer_id"] in peers:
255
+ peers[job["peer_id"]]["jobs_completed"] += 1
256
+ break
257
+
258
+ return {"status": "success", "filename": filename}
259
+
260
+ @app.get("/api/results/{filename}")
261
+ async def get_result(filename: str):
262
+ """Get generated image"""
263
+ file_path = f"results/{filename}"
264
+ if os.path.exists(file_path):
265
+ return FileResponse(file_path)
266
+ return {"error": "File not found"}
267
+
268
+ @app.get("/api/client/{filename}")
269
+ async def get_client_file(filename: str):
270
+ """Download client file"""
271
+ file_path = f"client/{filename}"
272
+ if os.path.exists(file_path):
273
+ return FileResponse(file_path, filename=filename)
274
+ return {"error": "File not found"}
275
+
276
+ # Gradio interface functions
277
+ def gradio_submit_job(prompt):
278
+ """Submit job through Gradio"""
279
  if not prompt:
280
+ return "Please enter a prompt"
281
 
282
+ job_id = str(uuid.uuid4())
283
+ job = {
284
  "id": job_id,
285
  "prompt": prompt,
286
+ "status": "pending",
287
+ "created_at": datetime.now()
288
+ }
289
+ jobs.append(job)
290
+ return f"Job submitted successfully! Job ID: {job_id}"
291
+
292
+ def gradio_get_status():
293
+ """Get status through Gradio"""
294
+ active_peers = sum(1 for p in peers.values()
295
+ if (datetime.now() - p['last_seen']).seconds < 60)
296
+ pending = sum(1 for j in jobs if j['status'] == 'pending')
297
+ completed = sum(1 for j in jobs if j['status'] == 'completed')
298
+
299
+ status_text = f"""### System Status
300
+ - Active Peers: {active_peers}
301
+ - Pending Jobs: {pending}
302
+ - Completed Jobs: {completed}
303
+
304
+ ### Recent Jobs
305
+ """
306
+
307
+ # Add recent jobs
308
+ recent_jobs = jobs[-5:][::-1] # Last 5 jobs, reversed
309
+ for job in recent_jobs:
310
+ status_text += f"\n- **{job['id'][:8]}...**: {job['prompt'][:50]}... ({job['status']})"
311
+
312
+ return status_text
313
+
314
+ def gradio_get_gallery():
315
+ """Get completed images for gallery"""
316
+ image_files = []
317
+ for job in jobs[-20:]: # Last 20 jobs
318
+ if job['status'] == 'completed' and 'filename' in job:
319
+ file_path = f"results/{job['filename']}"
320
+ if os.path.exists(file_path):
321
+ image_files.append((file_path, job['prompt']))
322
+
323
+ return image_files
324
+
325
+ # Create Gradio interface
326
+ with gr.Blocks(title="P2P GPU Image Generation Hub") as demo:
327
+ gr.Markdown("# ๐Ÿค– P2P GPU Image Generation Hub")
328
+ gr.Markdown("Distributed image generation using idle GPUs from peer nodes")
329
+
330
+ with gr.Tabs():
331
+ with gr.Tab("Submit Job"):
332
+ with gr.Row():
333
+ with gr.Column():
334
+ prompt_input = gr.Textbox(
335
+ label="Image Prompt",
336
+ placeholder="Describe the image you want to generate...",
337
+ lines=3
338
+ )
339
+ submit_btn = gr.Button("Submit Job", variant="primary")
340
+ result_text = gr.Textbox(label="Result", interactive=False)
341
+
342
+ submit_btn.click(
343
+ fn=gradio_submit_job,
344
+ inputs=prompt_input,
345
+ outputs=result_text
346
+ )
347
+
348
+ with gr.Tab("System Status"):
349
+ status_display = gr.Markdown()
350
+ refresh_btn = gr.Button("Refresh Status")
351
+
352
+ refresh_btn.click(
353
+ fn=gradio_get_status,
354
+ outputs=status_display
355
+ )
356
+
357
+ # Auto-refresh status on load
358
+ demo.load(fn=gradio_get_status, outputs=status_display)
359
+
360
+ with gr.Tab("Gallery"):
361
+ gallery = gr.Gallery(
362
+ label="Generated Images",
363
+ show_label=True,
364
+ elem_id="gallery",
365
+ columns=3,
366
+ rows=2,
367
+ height="auto"
368
+ )
369
+ refresh_gallery_btn = gr.Button("Refresh Gallery")
370
+
371
+ refresh_gallery_btn.click(
372
+ fn=gradio_get_gallery,
373
+ outputs=gallery
374
+ )
375
+
376
+ # Auto-load gallery on tab load
377
+ demo.load(fn=gradio_get_gallery, outputs=gallery)
378
+
379
+ with gr.Tab("Download Client"):
380
+ gr.Markdown("""
381
+ ## Windows Client Setup
382
+
383
+ 1. Download the client files:
384
+ - [peer_agent.py](/api/client/peer_agent.py)
385
+ - [requirements.txt](/api/client/requirements.txt)
386
+ - [README.md](/api/client/README.md)
387
+
388
+ 2. Install Python 3.8 or higher
389
+
390
+ 3. Install requirements:
391
+ ```bash
392
+ pip install -r requirements.txt
393
+ ```
394
+
395
+ 4. Update the SERVER_URL in peer_agent.py with this Space's URL
396
+
397
+ 5. Run the client:
398
+ ```bash
399
+ python peer_agent.py
400
+ ```
401
+
402
+ The client will automatically detect GPU availability and start processing jobs.
403
+ """)
404
+
405
+ # Mount Gradio app to FastAPI
406
+ app = gr.mount_gradio_app(app, demo, path="/")
407
 
408
+ # For Hugging Face Spaces
409
  if __name__ == "__main__":
410
+ demo.launch()