Spaces:
Paused
Paused
Update api/ltx/ltx_aduc_pipeline.py
Browse files
api/ltx/ltx_aduc_pipeline.py
CHANGED
|
@@ -563,7 +563,7 @@ class LtxAducPipeline:
|
|
| 563 |
|
| 564 |
# (O resto das funções de _finalize_generation, _save_and_log_video, etc., permanecem as mesmas)
|
| 565 |
@log_function_io
|
| 566 |
-
def
|
| 567 |
final_latents_path = RESULTS_DIR / f"latents_{base_filename}_{seed}.pt"
|
| 568 |
torch.save(final_latents_cpu, final_latents_path)
|
| 569 |
logging.info(f"Final latents saved to: {final_latents_path}")
|
|
@@ -609,7 +609,7 @@ class LtxAducPipeline:
|
|
| 609 |
return random.randint(0, 2**32 - 1)
|
| 610 |
|
| 611 |
|
| 612 |
-
def
|
| 613 |
"""Loads latents, concatenates, decodes to video, and saves both."""
|
| 614 |
logging.info("Finalizing generation: decoding latents to video.")
|
| 615 |
all_tensors_cpu = [torch.load(p) for p in latents_paths]
|
|
|
|
| 563 |
|
| 564 |
# (O resto das funções de _finalize_generation, _save_and_log_video, etc., permanecem as mesmas)
|
| 565 |
@log_function_io
|
| 566 |
+
def _finalize_generation(self, final_latents_cpu: torch.Tensor, base_filename: str, seed: int) -> Tuple[str, str]:
|
| 567 |
final_latents_path = RESULTS_DIR / f"latents_{base_filename}_{seed}.pt"
|
| 568 |
torch.save(final_latents_cpu, final_latents_path)
|
| 569 |
logging.info(f"Final latents saved to: {final_latents_path}")
|
|
|
|
| 609 |
return random.randint(0, 2**32 - 1)
|
| 610 |
|
| 611 |
|
| 612 |
+
def _finalize_generation1(self, latents_paths: List[Path], base_filename: str, seed: int) -> Tuple[str, str, int]:
|
| 613 |
"""Loads latents, concatenates, decodes to video, and saves both."""
|
| 614 |
logging.info("Finalizing generation: decoding latents to video.")
|
| 615 |
all_tensors_cpu = [torch.load(p) for p in latents_paths]
|