Update app.py
Browse files
app.py
CHANGED
|
@@ -242,7 +242,7 @@ with block:
|
|
| 242 |
gr.Markdown("## Canny Edge")
|
| 243 |
with gr.Row():
|
| 244 |
with gr.Column():
|
| 245 |
-
input_image = gr.Image
|
| 246 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 247 |
low_threshold = gr.Slider(label="low_threshold", minimum=1, maximum=255, value=100, step=1)
|
| 248 |
high_threshold = gr.Slider(label="high_threshold", minimum=1, maximum=255, value=200, step=1)
|
|
@@ -276,7 +276,7 @@ with block:
|
|
| 276 |
gr.Markdown("## HED Edge "SoftEdge"")
|
| 277 |
with gr.Row():
|
| 278 |
with gr.Column():
|
| 279 |
-
input_image = gr.Image
|
| 280 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 281 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 282 |
run_button = gr.Button("Run")
|
|
@@ -290,7 +290,7 @@ with block:
|
|
| 290 |
gr.Markdown("## Pidi Edge "SoftEdge"")
|
| 291 |
with gr.Row():
|
| 292 |
with gr.Column():
|
| 293 |
-
input_image = gr.Image
|
| 294 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 295 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 296 |
run_button = gr.Button("Run")
|
|
@@ -305,7 +305,7 @@ with block:
|
|
| 305 |
gr.Markdown("## MLSD Edge")
|
| 306 |
with gr.Row():
|
| 307 |
with gr.Column():
|
| 308 |
-
input_image = gr.Image
|
| 309 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 310 |
value_threshold = gr.Slider(label="value_threshold", minimum=0.01, maximum=2.0, value=0.1, step=0.01)
|
| 311 |
distance_threshold = gr.Slider(label="distance_threshold", minimum=0.01, maximum=20.0, value=0.1, step=0.01)
|
|
@@ -322,7 +322,7 @@ with block:
|
|
| 322 |
gr.Markdown("## MIDAS Depth")
|
| 323 |
with gr.Row():
|
| 324 |
with gr.Column():
|
| 325 |
-
input_image = gr.Image
|
| 326 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 327 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=384, step=64)
|
| 328 |
run_button = gr.Button("Run")
|
|
@@ -338,7 +338,7 @@ with block:
|
|
| 338 |
gr.Markdown("## Zoe Depth")
|
| 339 |
with gr.Row():
|
| 340 |
with gr.Column():
|
| 341 |
-
input_image = gr.Image
|
| 342 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 343 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 344 |
run_button = gr.Button("Run")
|
|
@@ -353,7 +353,7 @@ with block:
|
|
| 353 |
gr.Markdown("## Normal Bae")
|
| 354 |
with gr.Row():
|
| 355 |
with gr.Column():
|
| 356 |
-
input_image = gr.Image
|
| 357 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 358 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 359 |
run_button = gr.Button("Run")
|
|
@@ -368,7 +368,7 @@ with block:
|
|
| 368 |
gr.Markdown("## DWPose")
|
| 369 |
with gr.Row():
|
| 370 |
with gr.Column():
|
| 371 |
-
input_image = gr.Image
|
| 372 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 373 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 374 |
run_button = gr.Button("Run")
|
|
@@ -383,7 +383,7 @@ with block:
|
|
| 383 |
gr.Markdown("## Openpose")
|
| 384 |
with gr.Row():
|
| 385 |
with gr.Column():
|
| 386 |
-
input_image = gr.Image
|
| 387 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 388 |
hand_and_face = gr.Checkbox(label='Hand and Face', value=False)
|
| 389 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
|
@@ -399,7 +399,7 @@ with block:
|
|
| 399 |
gr.Markdown("## Lineart Anime \n<p>Check Invert to use with Mochi Diffusion.")
|
| 400 |
with gr.Row():
|
| 401 |
with gr.Column():
|
| 402 |
-
input_image = gr.Image
|
| 403 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 404 |
invert = gr.Checkbox(label='Invert', value=True)
|
| 405 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
|
@@ -415,7 +415,7 @@ with block:
|
|
| 415 |
gr.Markdown("## Lineart \n<p>Check Invert to use with Mochi Diffusion. Inverted image can also be created here for use with ControlNet Scribble.")
|
| 416 |
with gr.Row():
|
| 417 |
with gr.Column():
|
| 418 |
-
input_image = gr.Image
|
| 419 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 420 |
coarse = gr.Checkbox(label='Using coarse model', value=False)
|
| 421 |
invert = gr.Checkbox(label='Invert', value=True)
|
|
@@ -443,7 +443,7 @@ with block:
|
|
| 443 |
gr.Markdown("## Oneformer COCO Segmentation")
|
| 444 |
with gr.Row():
|
| 445 |
with gr.Column():
|
| 446 |
-
input_image = gr.Image
|
| 447 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 448 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 449 |
run_button = gr.Button("Run")
|
|
@@ -458,7 +458,7 @@ with block:
|
|
| 458 |
gr.Markdown("## Oneformer ADE20K Segmentation")
|
| 459 |
with gr.Row():
|
| 460 |
with gr.Column():
|
| 461 |
-
input_image = gr.Image
|
| 462 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 463 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=640, step=64)
|
| 464 |
run_button = gr.Button("Run")
|
|
@@ -472,7 +472,7 @@ with block:
|
|
| 472 |
gr.Markdown("## Content Shuffle")
|
| 473 |
with gr.Row():
|
| 474 |
with gr.Column():
|
| 475 |
-
input_image = gr.Image
|
| 476 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 477 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 478 |
run_button = gr.Button("Run")
|
|
@@ -487,7 +487,7 @@ with block:
|
|
| 487 |
gr.Markdown("## Color Shuffle")
|
| 488 |
with gr.Row():
|
| 489 |
with gr.Column():
|
| 490 |
-
input_image = gr.Image
|
| 491 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 492 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 493 |
run_button = gr.Button("Run")
|
|
|
|
| 242 |
gr.Markdown("## Canny Edge")
|
| 243 |
with gr.Row():
|
| 244 |
with gr.Column():
|
| 245 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 246 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 247 |
low_threshold = gr.Slider(label="low_threshold", minimum=1, maximum=255, value=100, step=1)
|
| 248 |
high_threshold = gr.Slider(label="high_threshold", minimum=1, maximum=255, value=200, step=1)
|
|
|
|
| 276 |
gr.Markdown("## HED Edge "SoftEdge"")
|
| 277 |
with gr.Row():
|
| 278 |
with gr.Column():
|
| 279 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 280 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 281 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 282 |
run_button = gr.Button("Run")
|
|
|
|
| 290 |
gr.Markdown("## Pidi Edge "SoftEdge"")
|
| 291 |
with gr.Row():
|
| 292 |
with gr.Column():
|
| 293 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 294 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 295 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 296 |
run_button = gr.Button("Run")
|
|
|
|
| 305 |
gr.Markdown("## MLSD Edge")
|
| 306 |
with gr.Row():
|
| 307 |
with gr.Column():
|
| 308 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 309 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 310 |
value_threshold = gr.Slider(label="value_threshold", minimum=0.01, maximum=2.0, value=0.1, step=0.01)
|
| 311 |
distance_threshold = gr.Slider(label="distance_threshold", minimum=0.01, maximum=20.0, value=0.1, step=0.01)
|
|
|
|
| 322 |
gr.Markdown("## MIDAS Depth")
|
| 323 |
with gr.Row():
|
| 324 |
with gr.Column():
|
| 325 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 326 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 327 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=384, step=64)
|
| 328 |
run_button = gr.Button("Run")
|
|
|
|
| 338 |
gr.Markdown("## Zoe Depth")
|
| 339 |
with gr.Row():
|
| 340 |
with gr.Column():
|
| 341 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 342 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 343 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 344 |
run_button = gr.Button("Run")
|
|
|
|
| 353 |
gr.Markdown("## Normal Bae")
|
| 354 |
with gr.Row():
|
| 355 |
with gr.Column():
|
| 356 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 357 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 358 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 359 |
run_button = gr.Button("Run")
|
|
|
|
| 368 |
gr.Markdown("## DWPose")
|
| 369 |
with gr.Row():
|
| 370 |
with gr.Column():
|
| 371 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 372 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 373 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 374 |
run_button = gr.Button("Run")
|
|
|
|
| 383 |
gr.Markdown("## Openpose")
|
| 384 |
with gr.Row():
|
| 385 |
with gr.Column():
|
| 386 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 387 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 388 |
hand_and_face = gr.Checkbox(label='Hand and Face', value=False)
|
| 389 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
|
|
|
| 399 |
gr.Markdown("## Lineart Anime \n<p>Check Invert to use with Mochi Diffusion.")
|
| 400 |
with gr.Row():
|
| 401 |
with gr.Column():
|
| 402 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 403 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 404 |
invert = gr.Checkbox(label='Invert', value=True)
|
| 405 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
|
|
|
| 415 |
gr.Markdown("## Lineart \n<p>Check Invert to use with Mochi Diffusion. Inverted image can also be created here for use with ControlNet Scribble.")
|
| 416 |
with gr.Row():
|
| 417 |
with gr.Column():
|
| 418 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 419 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 420 |
coarse = gr.Checkbox(label='Using coarse model', value=False)
|
| 421 |
invert = gr.Checkbox(label='Invert', value=True)
|
|
|
|
| 443 |
gr.Markdown("## Oneformer COCO Segmentation")
|
| 444 |
with gr.Row():
|
| 445 |
with gr.Column():
|
| 446 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 447 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 448 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 449 |
run_button = gr.Button("Run")
|
|
|
|
| 458 |
gr.Markdown("## Oneformer ADE20K Segmentation")
|
| 459 |
with gr.Row():
|
| 460 |
with gr.Column():
|
| 461 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 462 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 463 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=640, step=64)
|
| 464 |
run_button = gr.Button("Run")
|
|
|
|
| 472 |
gr.Markdown("## Content Shuffle")
|
| 473 |
with gr.Row():
|
| 474 |
with gr.Column():
|
| 475 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 476 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 477 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 478 |
run_button = gr.Button("Run")
|
|
|
|
| 487 |
gr.Markdown("## Color Shuffle")
|
| 488 |
with gr.Row():
|
| 489 |
with gr.Column():
|
| 490 |
+
input_image = gr.Image(label="Input Image", type="numpy", height=480)
|
| 491 |
# input_image = gr.Image(source='upload', type="numpy")
|
| 492 |
resolution = gr.Slider(label="resolution", minimum=256, maximum=1024, value=512, step=64)
|
| 493 |
run_button = gr.Button("Run")
|