Ely-testa commited on
Commit
80af36d
·
verified ·
1 Parent(s): 2f185d5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -16
app.py CHANGED
@@ -1,24 +1,24 @@
1
  from fastai.vision.all import *
2
  import gradio as gr
3
- import torch
4
- from diffusers import FluxPipeline
5
- from huggingface_hub import login
6
- login()
7
 
8
- pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
9
  #pipe.enable_model_cpu_offload() #save some VRAM by offloading the model to CPU. Remove this if you have enough GPU power
10
 
11
- prompt = "A cat holding a sign that says hello world"
12
- image = pipe(
13
- prompt,
14
- height=1024,
15
- width=1024,
16
- guidance_scale=3.5,
17
- num_inference_steps=50,
18
- max_sequence_length=512,
19
- generator=torch.Generator("cpu").manual_seed(0)
20
- ).images[0]
21
- image.save("flux-dev.png")
22
 
23
 
24
  learn = load_learner('export.pkl')
 
1
  from fastai.vision.all import *
2
  import gradio as gr
3
+ #import torch
4
+ #from diffusers import FluxPipeline
5
+ #from huggingface_hub import login
6
+ #login()
7
 
8
+ #pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
9
  #pipe.enable_model_cpu_offload() #save some VRAM by offloading the model to CPU. Remove this if you have enough GPU power
10
 
11
+ #prompt = "A cat holding a sign that says hello world"
12
+ #image = pipe(
13
+ # prompt,
14
+ # height=1024,
15
+ # width=1024,
16
+ # guidance_scale=3.5,
17
+ # num_inference_steps=50,
18
+ #max_sequence_length=512,
19
+ # generator=torch.Generator("cpu").manual_seed(0)
20
+ #).images[0]
21
+ #image.save("flux-dev.png")
22
 
23
 
24
  learn = load_learner('export.pkl')