Spaces:
Running
on
Zero
Running
on
Zero
Ji4chenLi
commited on
Commit
ยท
70cdee7
1
Parent(s):
6c6e249
description
Browse files
app.py
CHANGED
|
@@ -26,6 +26,18 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
| 26 |
MAX_SEED = np.iinfo(np.int32).max
|
| 27 |
CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
|
| 28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
|
| 30 |
if torch.cuda.is_available():
|
| 31 |
config = OmegaConf.load("configs/inference_t2v_512_v2.0.yaml")
|
|
@@ -187,10 +199,7 @@ else:
|
|
| 187 |
with gr.Blocks(css="style.css") as demo:
|
| 188 |
|
| 189 |
with gr.Column(elem_id="col-container"):
|
| 190 |
-
gr.Markdown(
|
| 191 |
-
# Text-to-Image Gradio Template
|
| 192 |
-
Currently running on {power_device}.
|
| 193 |
-
""")
|
| 194 |
|
| 195 |
with gr.Row():
|
| 196 |
prompt = gr.Text(
|
|
|
|
| 26 |
MAX_SEED = np.iinfo(np.int32).max
|
| 27 |
CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
|
| 28 |
|
| 29 |
+
DESCRIPTION = """# T2V-Turbo ๐
|
| 30 |
+
We provide T2V-Turbo (VC2) distilled from [VideoCrafter2](https://ailab-cvc.github.io/videocrafter2/) with the reward feedback from [HPSv2.1](https://github.com/tgxs002/HPSv2/tree/master) and [InternVid2 Stage 2 Model](https://huggingface.co/OpenGVLab/InternVideo2-Stage2_1B-224p-f4).
|
| 31 |
+
|
| 32 |
+
You can download the the models from [here](https://huggingface.co/jiachenli-ucsb/T2V-Turbo-VC2). Check out our [Project page](https://t2v-turbo.github.io) ๐
|
| 33 |
+
"""
|
| 34 |
+
if torch.cuda.is_available():
|
| 35 |
+
DESCRIPTION += "\n<p>Running on CUDA ๐</p>"
|
| 36 |
+
elif hasattr(torch, "xpu") and torch.xpu.is_available():
|
| 37 |
+
DESCRIPTION += "\n<p>Running on XPU ๐ค</p>"
|
| 38 |
+
else:
|
| 39 |
+
DESCRIPTION += "\n<p>Running on CPU ๐ฅถ This demo does not work on CPU.</p>"
|
| 40 |
+
|
| 41 |
|
| 42 |
if torch.cuda.is_available():
|
| 43 |
config = OmegaConf.load("configs/inference_t2v_512_v2.0.yaml")
|
|
|
|
| 199 |
with gr.Blocks(css="style.css") as demo:
|
| 200 |
|
| 201 |
with gr.Column(elem_id="col-container"):
|
| 202 |
+
gr.Markdown(DESCRIPTION)
|
|
|
|
|
|
|
|
|
|
| 203 |
|
| 204 |
with gr.Row():
|
| 205 |
prompt = gr.Text(
|