Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from openai import OpenAI | |
| import os | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| SYSTEM_PROMPT = os.getenv("XTRNPMT") | |
| API_BASE_URL = "https://api.featherless.ai/v1" | |
| FEATHERLESS_API_KEY = os.getenv("FEATHERLESS_API_KEY") | |
| FEATHERLESS_MODEL = "darkc0de/XortronCriminalComputingConfig" | |
| if not FEATHERLESS_API_KEY: | |
| print("WARNING: FEATHERLESS_API_KEY environment variable is not set.") | |
| try: | |
| if not FEATHERLESS_API_KEY: | |
| raise ValueError("FEATHERLESS_API_KEY is not set. Please set it as an environment variable or a secret in your deployment environment.") | |
| client = OpenAI( | |
| base_url=API_BASE_URL, | |
| api_key=FEATHERLESS_API_KEY | |
| ) | |
| print(f"OpenAI client initialized with base_url: {API_BASE_URL} for Featherless AI, model: {FEATHERLESS_MODEL}") | |
| except Exception as e: | |
| print(f"Error initializing OpenAI client with base_url '{API_BASE_URL}': {e}") | |
| raise RuntimeError( | |
| "Could not initialize OpenAI client. " | |
| f"Please check the API base URL ('{API_BASE_URL}'), your Featherless AI API key, model ID, " | |
| f"and ensure the server is accessible. Original error: {e}" | |
| ) | |
| def respond(message, history): | |
| """ | |
| This function processes the user's message and the chat history to generate a response | |
| from the language model using the Featherless AI API (compatible with OpenAI's API), | |
| including a static system prompt. | |
| Args: | |
| message (str): The latest message from the user. | |
| history (list of lists): A list where each inner list contains a pair of | |
| [user_message, ai_message]. | |
| Yields: | |
| str: The generated response token by token (for streaming). | |
| """ | |
| messages = [{"role": "system", "content": SYSTEM_PROMPT}] | |
| for user_message, ai_message in history: | |
| if user_message: | |
| messages.append({"role": "user", "content": user_message}) | |
| if ai_message: | |
| messages.append({"role": "assistant", "content": ai_message}) | |
| messages.append({"role": "user", "content": message}) | |
| response_text = "" | |
| try: | |
| stream = client.chat.completions.create( | |
| messages=messages, | |
| model=FEATHERLESS_MODEL, | |
| stream=True, | |
| ) | |
| for chunk in stream: | |
| if chunk.choices and chunk.choices[0].delta and chunk.choices[0].delta.content is not None: | |
| token = chunk.choices[0].delta.content | |
| response_text += token | |
| yield response_text | |
| elif chunk.choices and chunk.choices[0].message and chunk.choices[0].message.content is not None: | |
| token = chunk.choices[0].message.content | |
| response_text += token | |
| yield response_text | |
| except Exception as e: | |
| error_message = f"An error occurred during model inference with Featherless AI: {e}" | |
| print(error_message) | |
| yield error_message | |
| kofi_script = """ | |
| <script src='https://storage.ko-fi.com/cdn/scripts/overlay-widget.js'></script> | |
| <script> | |
| kofiWidgetOverlay.draw('xortron', { | |
| 'type': 'floating-chat', | |
| 'floating-chat.donateButton.text': 'Support me', | |
| 'floating-chat.donateButton.background-color': '#794bc4', | |
| 'floating-chat.donateButton.text-color': '#fff' | |
| }); | |
| </script> | |
| """ | |
| kofi_button_html = """ | |
| <div style="text-align: center; padding: 20px;"> | |
| <a href='https://ko-fi.com/Z8Z51E5TIG' target='_blank'> | |
| <img height='36' style='border:0px;height:36px;' src='https://storage.ko-fi.com/cdn/kofi5.png?v=6' border='0' alt='Buy Me a Coffee at ko-fi.com' /> | |
| </a> | |
| </div> | |
| """ | |
| donation_solicitation_html = """ | |
| <div style="text-align: center; font-size: x-small; margin-bottom: 5px;"> | |
| The Cybernetic Criminal Computing Corporation presents: XORTRON 4.2, free of charge, unlimited, no login, no signup, no bullshit. Finding yourself in a long queue? Consider donating so we can scale and remain 100% free for all. Im sure even a low-life deadbeat freeloader like yourself can at least throw some spare change right? - Support Xortron @ ko-fi.com/xortron<br> | |
| </div> | |
| """ | |
| custom_css = """ | |
| @import url('https://fonts.googleapis.com/css2?family=Orbitron:wght@400;700&display=swap'); | |
| body, .gradio-container { | |
| font-family: 'Orbitron', sans-serif !important; | |
| } | |
| .gr-button { font-family: 'Orbitron', sans-serif !important; } | |
| .gr-input { font-family: 'Orbitron', sans-serif !important; } | |
| .gr-label { font-family: 'Orbitron', sans-serif !important; } | |
| .gr-chatbot .message { font-family: 'Orbitron', sans-serif !important; } | |
| """ | |
| with gr.Blocks(theme="Nymbo/Nymbo_Theme", head=kofi_script, css=custom_css) as demo: | |
| # Added the header image using gr.HTML | |
| gr.HTML('<img src="https://cdn-uploads.huggingface.co/production/uploads/6540a02d1389943fef4d2640/TVPNkZCjnaOwfzD4Ze9tj.png" alt="Header Image" style="display: block; margin-left: auto; margin-right: auto; max-width: 22%; height: auto;">') | |
| gr.ChatInterface( | |
| fn=respond, # The function to call when a message is sent | |
| chatbot=gr.Chatbot( # Configure the chatbot display area | |
| height=700, # Set the height of the chat history display to 800px | |
| label="XORTRON - Criminal Computing" # Set the label | |
| ) | |
| ) | |
| gr.HTML(donation_solicitation_html) | |
| gr.HTML(kofi_button_html) | |
| if __name__ == "__main__": | |
| if not FEATHERLESS_API_KEY: | |
| print("\nCRITICAL ERROR: FEATHERLESS_API_KEY is not set.") | |
| print("Please ensure it's set as a secret in your Hugging Face Space settings or as an environment variable.\n") | |
| try: | |
| demo.queue(default_concurrency_limit=3) | |
| demo.launch(show_api=False, share=False) | |
| except NameError as ne: | |
| print(f"Gradio demo could not be launched. 'client' might not have been initialized: {ne}") | |
| except RuntimeError as re: | |
| print(f"Gradio demo could not be launched due to an error during client initialization: {re}") | |
| except Exception as e: | |
| print(f"An unexpected error occurred when trying to launch Gradio demo: {e}") | |