badaoui HF Staff commited on
Commit
e2f0f2b
·
verified ·
1 Parent(s): 1742e91

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -1
app.py CHANGED
@@ -3,6 +3,7 @@ from transformers import AutoModel
3
  from diffusers import FluxPipeline
4
  import torch
5
  from huggingface_hub.utils import HfHubHTTPError, RepositoryNotFoundError
 
6
 
7
  def check_model_access(model_id: str, oauth_token: gr.OAuthToken | None):
8
  """
@@ -27,7 +28,17 @@ def check_model_access(model_id: str, oauth_token: gr.OAuthToken | None):
27
  try:
28
  # 3. The core test: try to load the model's config using the user's token.
29
  # This will fail if the user doesn't have access to the gated repo's files.
30
- pipe = FluxPipeline.from_pretrained(pretrained_model_name_or_path=model_id, torch_dtype=torch.bfloat16, token=oauth_token.token)
 
 
 
 
 
 
 
 
 
 
31
 
32
  # 4. If the call succeeds, format a success message
33
  return f"""
 
3
  from diffusers import FluxPipeline
4
  import torch
5
  from huggingface_hub.utils import HfHubHTTPError, RepositoryNotFoundError
6
+ from optimum.neuron import NeuronFluxPipeline
7
 
8
  def check_model_access(model_id: str, oauth_token: gr.OAuthToken | None):
9
  """
 
28
  try:
29
  # 3. The core test: try to load the model's config using the user's token.
30
  # This will fail if the user doesn't have access to the gated repo's files.
31
+ # pipe = FluxPipeline.from_pretrained(pretrained_model_name_or_path=model_id, torch_dtype=torch.bfloat16, token=oauth_token.token)
32
+
33
+ model = NeuronFluxPipeline.from_pretrained(
34
+ model_id,
35
+ export=True,
36
+ tensor_parallel_size=4,
37
+ token=oauth_token.token,
38
+ batch_size=1,
39
+ sequence_length=128,
40
+ )
41
+
42
 
43
  # 4. If the call succeeds, format a success message
44
  return f"""