Spaces:
Running
on
Zero
Running
on
Zero
Add application file
Browse files
app.py
CHANGED
|
@@ -61,7 +61,7 @@ class GlobalText:
|
|
| 61 |
self.pipeline = None
|
| 62 |
self.torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
| 63 |
self.lora_model_state_dict = {}
|
| 64 |
-
self.device = torch.device("
|
| 65 |
|
| 66 |
def init_source_image_path(self, source_path):
|
| 67 |
self.source_paths = sorted(glob(os.path.join(source_path, '*')))
|
|
@@ -83,9 +83,9 @@ class GlobalText:
|
|
| 83 |
|
| 84 |
self.scheduler = 'LCM'
|
| 85 |
scheduler = LCMScheduler.from_pretrained(model_path, subfolder="scheduler")
|
| 86 |
-
self.pipeline = ZePoPipeline.from_pretrained(model_path,scheduler=scheduler,torch_dtype=torch.float16,)
|
| 87 |
-
if is_xformers:
|
| 88 |
-
|
| 89 |
time_end = datetime.now()
|
| 90 |
print(f'Load {model_path} successful in {time_end-time_start}')
|
| 91 |
return gr.Dropdown()
|
|
|
|
| 61 |
self.pipeline = None
|
| 62 |
self.torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
| 63 |
self.lora_model_state_dict = {}
|
| 64 |
+
self.device = torch.device("cpu")
|
| 65 |
|
| 66 |
def init_source_image_path(self, source_path):
|
| 67 |
self.source_paths = sorted(glob(os.path.join(source_path, '*')))
|
|
|
|
| 83 |
|
| 84 |
self.scheduler = 'LCM'
|
| 85 |
scheduler = LCMScheduler.from_pretrained(model_path, subfolder="scheduler")
|
| 86 |
+
self.pipeline = ZePoPipeline.from_pretrained(model_path,scheduler=scheduler,torch_dtype=torch.float16,)
|
| 87 |
+
# if is_xformers:
|
| 88 |
+
# self.pipeline.enable_xformers_memory_efficient_attention()
|
| 89 |
time_end = datetime.now()
|
| 90 |
print(f'Load {model_path} successful in {time_end-time_start}')
|
| 91 |
return gr.Dropdown()
|