Spaces:
Running
Running
feat(handlers): pass Hugging Face profile to handlers
Browse files- [feat] Update `handle_chat_submit()` and `handle_chat_retry()` signatures to accept `hf_profile` and initialize `username` (chat_handler.py:168,178, 210,219)
- [feat] Update `handle_image_to_image_generation()` and `handle_image_generation()` signatures to accept `hf_profile` and initialize `username` (image_handler.py:279,289, 307,318)
- [feat] Update `handle_text_to_speech_generation()` signature to accept `hf_profile` and initialize `username` (tts_handler.py:155,169)
- [feat] Update `handle_video_generation()` signature to accept `hf_profile` and initialize `username` (video_handler.py:132,140)
- chat_handler.py +4 -4
- image_handler.py +4 -4
- tts_handler.py +2 -2
- video_handler.py +2 -2
chat_handler.py
CHANGED
|
@@ -168,7 +168,7 @@ def chat_respond(
|
|
| 168 |
yield format_error_message("Unexpected Error", f"An unexpected error occurred: {error_msg}")
|
| 169 |
|
| 170 |
|
| 171 |
-
def handle_chat_submit(message, history, system_msg, model_name, provider, max_tokens, temperature, top_p, hf_token: gr.OAuthToken = None):
|
| 172 |
"""
|
| 173 |
Handle chat submission and manage conversation history with streaming.
|
| 174 |
"""
|
|
@@ -178,7 +178,7 @@ def handle_chat_submit(message, history, system_msg, model_name, provider, max_t
|
|
| 178 |
|
| 179 |
# Require sign-in: if no token present, prompt login
|
| 180 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 181 |
-
username = None
|
| 182 |
if not access_token:
|
| 183 |
assistant_response = format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 184 |
current_history = history + [{"role": "assistant", "content": assistant_response}]
|
|
@@ -210,14 +210,14 @@ def handle_chat_submit(message, history, system_msg, model_name, provider, max_t
|
|
| 210 |
yield current_history, ""
|
| 211 |
|
| 212 |
|
| 213 |
-
def handle_chat_retry(history, system_msg, model_name, provider, max_tokens, temperature, top_p, hf_token: gr.OAuthToken = None, retry_data=None):
|
| 214 |
"""
|
| 215 |
Retry the assistant response for the selected message.
|
| 216 |
Works with gr.Chatbot.retry() which provides retry_data.index for the message.
|
| 217 |
"""
|
| 218 |
# Require sign-in: if no token present, prompt login
|
| 219 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 220 |
-
username = None
|
| 221 |
if not access_token:
|
| 222 |
assistant_response = format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 223 |
current_history = (history or []) + [{"role": "assistant", "content": assistant_response}]
|
|
|
|
| 168 |
yield format_error_message("Unexpected Error", f"An unexpected error occurred: {error_msg}")
|
| 169 |
|
| 170 |
|
| 171 |
+
def handle_chat_submit(message, history, system_msg, model_name, provider, max_tokens, temperature, top_p, hf_token: gr.OAuthToken = None, hf_profile: gr.OAuthProfile = None):
|
| 172 |
"""
|
| 173 |
Handle chat submission and manage conversation history with streaming.
|
| 174 |
"""
|
|
|
|
| 178 |
|
| 179 |
# Require sign-in: if no token present, prompt login
|
| 180 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 181 |
+
username = getattr(hf_profile, "username", None) if hf_profile is not None else None
|
| 182 |
if not access_token:
|
| 183 |
assistant_response = format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 184 |
current_history = history + [{"role": "assistant", "content": assistant_response}]
|
|
|
|
| 210 |
yield current_history, ""
|
| 211 |
|
| 212 |
|
| 213 |
+
def handle_chat_retry(history, system_msg, model_name, provider, max_tokens, temperature, top_p, hf_token: gr.OAuthToken = None, hf_profile: gr.OAuthProfile = None, retry_data=None):
|
| 214 |
"""
|
| 215 |
Retry the assistant response for the selected message.
|
| 216 |
Works with gr.Chatbot.retry() which provides retry_data.index for the message.
|
| 217 |
"""
|
| 218 |
# Require sign-in: if no token present, prompt login
|
| 219 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 220 |
+
username = getattr(hf_profile, "username", None) if hf_profile is not None else None
|
| 221 |
if not access_token:
|
| 222 |
assistant_response = format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 223 |
current_history = (history or []) + [{"role": "assistant", "content": assistant_response}]
|
image_handler.py
CHANGED
|
@@ -279,7 +279,7 @@ def generate_image_to_image(
|
|
| 279 |
return None, format_error_message("Unexpected Error", f"An unexpected error occurred: {error_msg}")
|
| 280 |
|
| 281 |
|
| 282 |
-
def handle_image_to_image_generation(input_image_val, prompt_val, model_val, provider_val, negative_prompt_val, steps_val, guidance_val, seed_val, hf_token: gr.OAuthToken = None):
|
| 283 |
"""
|
| 284 |
Handle image-to-image generation request with validation.
|
| 285 |
"""
|
|
@@ -289,7 +289,7 @@ def handle_image_to_image_generation(input_image_val, prompt_val, model_val, pro
|
|
| 289 |
|
| 290 |
# Require sign-in via HF OAuth token
|
| 291 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 292 |
-
username = None
|
| 293 |
if not access_token:
|
| 294 |
return None, format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 295 |
|
|
@@ -307,7 +307,7 @@ def handle_image_to_image_generation(input_image_val, prompt_val, model_val, pro
|
|
| 307 |
)
|
| 308 |
|
| 309 |
|
| 310 |
-
def handle_image_generation(prompt_val, model_val, provider_val, negative_prompt_val, width_val, height_val, steps_val, guidance_val, seed_val, hf_token: gr.OAuthToken = None):
|
| 311 |
"""
|
| 312 |
Handle image generation request with validation.
|
| 313 |
"""
|
|
@@ -318,7 +318,7 @@ def handle_image_generation(prompt_val, model_val, provider_val, negative_prompt
|
|
| 318 |
|
| 319 |
# Require sign-in via HF OAuth token
|
| 320 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 321 |
-
username = None
|
| 322 |
if not access_token:
|
| 323 |
return None, format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 324 |
|
|
|
|
| 279 |
return None, format_error_message("Unexpected Error", f"An unexpected error occurred: {error_msg}")
|
| 280 |
|
| 281 |
|
| 282 |
+
def handle_image_to_image_generation(input_image_val, prompt_val, model_val, provider_val, negative_prompt_val, steps_val, guidance_val, seed_val, hf_token: gr.OAuthToken = None, hf_profile: gr.OAuthProfile = None):
|
| 283 |
"""
|
| 284 |
Handle image-to-image generation request with validation.
|
| 285 |
"""
|
|
|
|
| 289 |
|
| 290 |
# Require sign-in via HF OAuth token
|
| 291 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 292 |
+
username = getattr(hf_profile, "username", None) if hf_profile is not None else None
|
| 293 |
if not access_token:
|
| 294 |
return None, format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 295 |
|
|
|
|
| 307 |
)
|
| 308 |
|
| 309 |
|
| 310 |
+
def handle_image_generation(prompt_val, model_val, provider_val, negative_prompt_val, width_val, height_val, steps_val, guidance_val, seed_val, hf_token: gr.OAuthToken = None, hf_profile: gr.OAuthProfile = None):
|
| 311 |
"""
|
| 312 |
Handle image generation request with validation.
|
| 313 |
"""
|
|
|
|
| 318 |
|
| 319 |
# Require sign-in via HF OAuth token
|
| 320 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 321 |
+
username = getattr(hf_profile, "username", None) if hf_profile is not None else None
|
| 322 |
if not access_token:
|
| 323 |
return None, format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 324 |
|
tts_handler.py
CHANGED
|
@@ -155,7 +155,7 @@ def generate_text_to_speech(
|
|
| 155 |
return None, format_error_message("Unexpected Error", f"An unexpected error occurred: {error_msg}")
|
| 156 |
|
| 157 |
|
| 158 |
-
def handle_text_to_speech_generation(text_val, model_val, provider_val, voice_val, speed_val, audio_url_val, exaggeration_val, temperature_val, cfg_val, hf_token: gr.OAuthToken = None):
|
| 159 |
"""
|
| 160 |
Handle text-to-speech generation request with validation.
|
| 161 |
"""
|
|
@@ -169,7 +169,7 @@ def handle_text_to_speech_generation(text_val, model_val, provider_val, voice_va
|
|
| 169 |
|
| 170 |
# Require sign-in via HF OAuth token
|
| 171 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 172 |
-
username = None
|
| 173 |
if not access_token:
|
| 174 |
return None, format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 175 |
|
|
|
|
| 155 |
return None, format_error_message("Unexpected Error", f"An unexpected error occurred: {error_msg}")
|
| 156 |
|
| 157 |
|
| 158 |
+
def handle_text_to_speech_generation(text_val, model_val, provider_val, voice_val, speed_val, audio_url_val, exaggeration_val, temperature_val, cfg_val, hf_token: gr.OAuthToken = None, hf_profile: gr.OAuthProfile = None):
|
| 159 |
"""
|
| 160 |
Handle text-to-speech generation request with validation.
|
| 161 |
"""
|
|
|
|
| 169 |
|
| 170 |
# Require sign-in via HF OAuth token
|
| 171 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 172 |
+
username = getattr(hf_profile, "username", None) if hf_profile is not None else None
|
| 173 |
if not access_token:
|
| 174 |
return None, format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 175 |
|
video_handler.py
CHANGED
|
@@ -132,7 +132,7 @@ def generate_video(
|
|
| 132 |
return None, format_error_message("Unexpected Error", f"An unexpected error occurred: {error_msg}")
|
| 133 |
|
| 134 |
|
| 135 |
-
def handle_video_generation(prompt_val, model_val, provider_val, steps_val, guidance_val, seed_val, hf_token: gr.OAuthToken = None):
|
| 136 |
"""
|
| 137 |
Handle text-to-video generation request with validation and org access.
|
| 138 |
"""
|
|
@@ -140,7 +140,7 @@ def handle_video_generation(prompt_val, model_val, provider_val, steps_val, guid
|
|
| 140 |
return None, format_error_message("Validation Error", "Please enter a prompt for video generation")
|
| 141 |
|
| 142 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 143 |
-
username = None
|
| 144 |
if not access_token:
|
| 145 |
return None, format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 146 |
|
|
|
|
| 132 |
return None, format_error_message("Unexpected Error", f"An unexpected error occurred: {error_msg}")
|
| 133 |
|
| 134 |
|
| 135 |
+
def handle_video_generation(prompt_val, model_val, provider_val, steps_val, guidance_val, seed_val, hf_token: gr.OAuthToken = None, hf_profile: gr.OAuthProfile = None):
|
| 136 |
"""
|
| 137 |
Handle text-to-video generation request with validation and org access.
|
| 138 |
"""
|
|
|
|
| 140 |
return None, format_error_message("Validation Error", "Please enter a prompt for video generation")
|
| 141 |
|
| 142 |
access_token = getattr(hf_token, "token", None) if hf_token is not None else None
|
| 143 |
+
username = getattr(hf_profile, "username", None) if hf_profile is not None else None
|
| 144 |
if not access_token:
|
| 145 |
return None, format_error_message("Access Required", "Please sign in with Hugging Face (sidebar Login button).")
|
| 146 |
|