learnmlf commited on
Commit
f19ab94
·
1 Parent(s): a020cd9
Files changed (1) hide show
  1. app.py +23 -6
app.py CHANGED
@@ -1,7 +1,19 @@
1
- import spaces # ZeroGPU support - MUST be imported first!
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
  import gradio as gr
4
- import os
5
  import numpy as np
6
  import yaml
7
  import cv2
@@ -32,7 +44,10 @@ if torch.cuda.is_available():
32
  print(f" GPU device: {torch.cuda.get_device_name(0)}")
33
  print(f" GPU count: {torch.cuda.device_count()}")
34
  else:
35
- print(" ℹ️ GPU will be allocated on-demand (ZeroGPU mode)")
 
 
 
36
  print("="*50)
37
 
38
  PROCESSED_VIDEO_DIR = './processed_videos'
@@ -186,7 +201,7 @@ def preview_crop(image_path, npy_file, video_path, expand_x, expand_y, offset_x,
186
  else:
187
  return None,None, "Failed to generate crop preview"
188
 
189
- @spaces.GPU(duration=120) # Allocate GPU for 120 seconds (adjust based on your video length)
190
  def generate_video(input_img, should_crop_face, expand_x, expand_y, offset_x, offset_y, input_video_type, input_video, input_npy_select, input_npy, input_video_frames,
191
  settings_steps, settings_cfg_scale, settings_seed, resolution_w, resolution_h,
192
  model_step, custom_output_path, use_custom_fps, output_fps, callback_steps, context_frames, context_stride, context_overlap, context_batch_size, anomaly_action,intropolate_factor):
@@ -281,8 +296,10 @@ with gr.Blocks() as demo:
281
  gpu_name = torch.cuda.get_device_name(0)
282
  gpu_info = f"🚀 **GPU Enabled**: {gpu_name}"
283
  else:
284
- # ZeroGPU mode - GPU allocated on-demand
285
- gpu_info = "⚡ **ZeroGPU Mode**: GPU will be allocated automatically when generating"
 
 
286
 
287
  gr.Markdown(f"<div style='text-align: center; padding: 10px; background-color: #e8f5e9; border-radius: 5px; border: 1px solid #4caf50;'>{gpu_info}</div>")
288
 
 
1
+ # Conditional import for ZeroGPU support
2
+ import os
3
+ if os.environ.get("SPACES_ZERO_GPU") is not None:
4
+ import spaces
5
+ else:
6
+ # Create a dummy spaces decorator for non-ZeroGPU environments
7
+ class spaces:
8
+ @staticmethod
9
+ def GPU(*decorator_args, **decorator_kwargs):
10
+ def decorator(func):
11
+ def wrapper(*args, **kwargs):
12
+ return func(*args, **kwargs)
13
+ return wrapper
14
+ return decorator
15
 
16
  import gradio as gr
 
17
  import numpy as np
18
  import yaml
19
  import cv2
 
44
  print(f" GPU device: {torch.cuda.get_device_name(0)}")
45
  print(f" GPU count: {torch.cuda.device_count()}")
46
  else:
47
+ if os.environ.get("SPACES_ZERO_GPU"):
48
+ print(" ℹ️ ZeroGPU mode - GPU will be allocated on-demand")
49
+ else:
50
+ print(" ⚠️ No CUDA GPU detected - will use CPU")
51
  print("="*50)
52
 
53
  PROCESSED_VIDEO_DIR = './processed_videos'
 
201
  else:
202
  return None,None, "Failed to generate crop preview"
203
 
204
+ @spaces.GPU(duration=300)
205
  def generate_video(input_img, should_crop_face, expand_x, expand_y, offset_x, offset_y, input_video_type, input_video, input_npy_select, input_npy, input_video_frames,
206
  settings_steps, settings_cfg_scale, settings_seed, resolution_w, resolution_h,
207
  model_step, custom_output_path, use_custom_fps, output_fps, callback_steps, context_frames, context_stride, context_overlap, context_batch_size, anomaly_action,intropolate_factor):
 
296
  gpu_name = torch.cuda.get_device_name(0)
297
  gpu_info = f"🚀 **GPU Enabled**: {gpu_name}"
298
  else:
299
+ if os.environ.get("SPACES_ZERO_GPU"):
300
+ gpu_info = "⚡ **ZeroGPU Mode**: GPU will be allocated when generating"
301
+ else:
302
+ gpu_info = "⚠️ **Running on CPU** (Generation will be slower)"
303
 
304
  gr.Markdown(f"<div style='text-align: center; padding: 10px; background-color: #e8f5e9; border-radius: 5px; border: 1px solid #4caf50;'>{gpu_info}</div>")
305