badaoui HF Staff commited on
Commit
878bcc4
·
verified ·
1 Parent(s): 7b11e8e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import gradio as gr
2
  from transformers import AutoModel
 
3
  from huggingface_hub.utils import HfHubHTTPError, RepositoryNotFoundError
4
 
5
  def check_model_access(model_id: str, oauth_token: gr.OAuthToken | None):
@@ -25,14 +26,14 @@ def check_model_access(model_id: str, oauth_token: gr.OAuthToken | None):
25
  try:
26
  # 3. The core test: try to load the model's config using the user's token.
27
  # This will fail if the user doesn't have access to the gated repo's files.
28
- model = AutoModel.from_pretrained(pretrained_model_name_or_path=model_id, token=oauth_token.token)
29
 
30
  # 4. If the call succeeds, format a success message
31
  return f"""
32
  ### <span style='color: green;'>Access Granted ✅</span>
33
  Successfully loaded the configuration for **{model_id}**.
34
 
35
- - **Model Type:** `{model}`
36
  - **Architecture:**
37
  - token : {oauth_token.token}
38
  """
 
1
  import gradio as gr
2
  from transformers import AutoModel
3
+ from diffusers import FluxPipeline
4
  from huggingface_hub.utils import HfHubHTTPError, RepositoryNotFoundError
5
 
6
  def check_model_access(model_id: str, oauth_token: gr.OAuthToken | None):
 
26
  try:
27
  # 3. The core test: try to load the model's config using the user's token.
28
  # This will fail if the user doesn't have access to the gated repo's files.
29
+ pipe = FluxPipeline.from_pretrained(pretrained_model_name_or_path=model_id, torch_dtype=torch.bfloat16, token=oauth_token.token)
30
 
31
  # 4. If the call succeeds, format a success message
32
  return f"""
33
  ### <span style='color: green;'>Access Granted ✅</span>
34
  Successfully loaded the configuration for **{model_id}**.
35
 
36
+ - **Model Type:** `{pipe}`
37
  - **Architecture:**
38
  - token : {oauth_token.token}
39
  """