|
|
import gradio as gr |
|
|
import requests |
|
|
import json |
|
|
import os |
|
|
import datetime |
|
|
|
|
|
|
|
|
API_KEY = os.getenv("OpenRounter_API_KEY") |
|
|
|
|
|
|
|
|
MODEL_OPTIONS = [ |
|
|
"openai/gpt-4o-mini-2024-07-18", |
|
|
"meta-llama/llama-3.1-405b-instruct", |
|
|
"nvidia/llama-3.1-nemotron-70b-instruct", |
|
|
"qwen/qwen-2.5-7b-instruct", |
|
|
"mistralai/mistral-large-2411", |
|
|
"microsoft/phi-3-medium-128k-instruct", |
|
|
"meta-llama/llama-3.1-405b-instruct:free", |
|
|
"nousresearch/hermes-3-llama-3.1-405b:free", |
|
|
"mistralai/mistral-7b-instruct:free", |
|
|
"microsoft/phi-3-medium-128k-instruct:free", |
|
|
"liquid/lfm-40b:free" |
|
|
] |
|
|
|
|
|
|
|
|
history = [] |
|
|
|
|
|
def generate_model_outputs_with_history(input_text, selected_models): |
|
|
global history |
|
|
results = {} |
|
|
for model in selected_models: |
|
|
response = requests.post( |
|
|
url="https://openrouter.ai/api/v1/chat/completions", |
|
|
headers={ |
|
|
"Authorization": f"Bearer {API_KEY}", |
|
|
"Content-Type": "application/json" |
|
|
}, |
|
|
data=json.dumps({ |
|
|
"model": model, |
|
|
"messages": [{"role": "user", "content": input_text}], |
|
|
"top_p": 1, |
|
|
"temperature": 1, |
|
|
"frequency_penalty": 0, |
|
|
"presence_penalty": 0, |
|
|
"repetition_penalty": 1, |
|
|
"top_k": 0, |
|
|
}) |
|
|
) |
|
|
|
|
|
if response.status_code == 200: |
|
|
try: |
|
|
response_json = response.json() |
|
|
results[model] = response_json.get("choices", [{}])[0].get("message", {}).get("content", "No content returned.") |
|
|
except json.JSONDecodeError: |
|
|
results[model] = "Error: Unable to parse response." |
|
|
else: |
|
|
results[model] = f"Error: {response.status_code}, {response.text}" |
|
|
|
|
|
|
|
|
history_entry = { |
|
|
"input": input_text, |
|
|
"selected_models": selected_models, |
|
|
"outputs": results, |
|
|
"timestamp": str(datetime.datetime.now()) |
|
|
} |
|
|
history.append(history_entry) |
|
|
|
|
|
return results |
|
|
|
|
|
|
|
|
def create_outputs(selected_models): |
|
|
return [ |
|
|
gr.Textbox( |
|
|
label=f"Output from {model}", |
|
|
interactive=False, |
|
|
lines=5, |
|
|
max_lines=10, |
|
|
show_label=False, |
|
|
elem_id=f"output_{model}", |
|
|
css=".output-window { overflow-y: auto; max-height: 200px; }" |
|
|
) |
|
|
for model in selected_models |
|
|
] |
|
|
|
|
|
def clear_history(): |
|
|
global history |
|
|
history = [] |
|
|
return "History Cleared!", [] |
|
|
|
|
|
def export_history(): |
|
|
global history |
|
|
|
|
|
file_name = f"history_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.json" |
|
|
with open(file_name, 'w') as f: |
|
|
json.dump(history, f, indent=4) |
|
|
return f"History exported to {file_name}", [] |
|
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
with gr.Row(): |
|
|
input_text = gr.Textbox(lines=2, label="Input Text", placeholder="Enter your query here") |
|
|
selected_models = gr.CheckboxGroup(choices=MODEL_OPTIONS, label="Select Models", value=[MODEL_OPTIONS[0]]) |
|
|
|
|
|
output_placeholder = gr.State([]) |
|
|
history_placeholder = gr.State(history) |
|
|
|
|
|
|
|
|
generate_button = gr.Button("Generate Outputs") |
|
|
|
|
|
def generate_and_update(input_text, selected_models): |
|
|
results = generate_model_outputs_with_history(input_text, selected_models) |
|
|
output_components = create_outputs(selected_models) |
|
|
return output_components, results |
|
|
|
|
|
generate_button.click( |
|
|
fn=generate_and_update, |
|
|
inputs=[input_text, selected_models], |
|
|
outputs=[output_placeholder, history_placeholder] |
|
|
) |
|
|
|
|
|
|
|
|
clear_history_button = gr.Button("Clear History") |
|
|
clear_history_button.click(fn=clear_history, outputs=[gr.Textbox(value="History Cleared!"), history_placeholder]) |
|
|
|
|
|
|
|
|
export_history_button = gr.Button("Export History") |
|
|
export_history_button.click(fn=export_history, outputs=[gr.Textbox(value="History Exported Successfully!")]) |
|
|
|
|
|
demo.launch() |
|
|
|