Spaces:
Running
Running
File size: 6,778 Bytes
a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc a9fb7e9 cb6eafc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 |
import gradio as gr
from config import CHAT_SYSTEM_PROMPT_PLACEHOLDER, CHAT_MODEL_SPECS
from models import get_model_response
import logging
import copy
logger = logging.getLogger(__name__)
# --- Backend Logic ---
def handle_chat(message, history, system_prompt, temperature, model_id):
"""Core function to handle chat message submission"""
logger.debug(f"handle_chat 输入: message={message}, history={history}, system_prompt={system_prompt}, temperature={temperature}, model_id={model_id}")
if history is None:
history = []
history = copy.deepcopy(history)
history.append((message, ""))
# Get the display name from the spec
model_display_name = CHAT_MODEL_SPECS.get(model_id, {}).get("display_name", model_id)
is_first_chunk = True
for chunk in get_model_response(model_id, history, system_prompt, temperature):
if is_first_chunk:
# Add model name before the first chunk
history[-1] = (message, f"**{model_display_name}**\n\n" + chunk)
is_first_chunk = False
else:
history[-1] = (message, history[-1][1] + chunk)
yield copy.deepcopy(history), ""
# --- UI Event Handlers ---
def handle_model_change(model_id):
"""Update UI when user switches model"""
spec = CHAT_MODEL_SPECS[model_id]
scenarios = spec.get("prompt_scenarios", [])
# Load the first scenario by default
if scenarios:
first_scenario = scenarios[0]
scenario_titles = [[s["title"]] for s in scenarios]
message_examples = [[m] for m in first_scenario["message_examples"]]
system_prompt_value = first_scenario["system_prompt"]
else: # Compatible with no scenarios
scenario_titles = []
message_examples = []
system_prompt_value = ""
return (
gr.update(value=spec["description"]),
gr.update(samples=scenario_titles),
gr.update(value=system_prompt_value),
gr.update(samples=message_examples)
)
def handle_scenario_selection(model_id, evt: gr.SelectData):
"""Update UI when user selects a scenario from the dataset"""
logger.debug(f"--- Scenario Selection Event ---")
logger.debug(f"Selected event value: {evt.value}")
logger.debug(f"Type of event value: {type(evt.value)}")
# Correction: extract string from list
selected_title = evt.value[0] if isinstance(evt.value, list) and evt.value else None
if not selected_title:
logger.error("Selected event value is not a valid list or is empty.")
return gr.update(), gr.update()
spec = CHAT_MODEL_SPECS[model_id]
scenarios = spec.get("prompt_scenarios", [])
available_titles = [s['title'] for s in scenarios]
logger.debug(f"Available scenario titles for model '{model_id}': {available_titles}")
selected_scenario = next((s for s in scenarios if s["title"] == selected_title), None)
if selected_scenario:
logger.debug(f"Found matching scenario: '{selected_title}'")
system_prompt_value = selected_scenario["system_prompt"]
message_examples = [[m] for m in selected_scenario["message_examples"]]
return gr.update(value=system_prompt_value), gr.update(samples=message_examples)
logger.warning(f"No matching scenario found for title: '{selected_title}'")
# If no scenario is found, do not update
return gr.update(), gr.update()
# --- UI Creation ---
def create_chat_tab():
"""Create and return all Gradio components for the chat tab"""
# Extract model information from config for UI display
# choices is a list of (display_name, model_id) tuples
model_choices = [(spec["display_name"], model_id) for model_id, spec in CHAT_MODEL_SPECS.items()]
default_model_id = list(CHAT_MODEL_SPECS.keys())[0]
default_spec = CHAT_MODEL_SPECS[default_model_id]
default_scenarios = default_spec.get("prompt_scenarios", [])
with gr.Row():
with gr.Column(scale=3):
chatbot = gr.Chatbot(
label="Chat Window",
bubble_full_width=False,
height=500,
value=[(None, "Hello! I'm Ling. Try selecting a scenario and a message example below to get started.")]
)
with gr.Row():
chat_input = gr.Textbox(placeholder="Ask me anything...", label="Input", show_label=False, scale=4)
send_button = gr.Button("Send", variant="primary", scale=1)
# 新的场景化示例区域
with gr.Accordion("✨ Try these scenarios...", open=True):
# 场景选择器
scenario_selector = gr.Dataset(
components=[gr.Textbox(visible=False)],
samples=[[s["title"]] for s in default_scenarios],
label="System Prompt Examples",
headers=["Select a role or task to get started:"],
)
# 消息示例
message_examples_display = gr.Dataset(
components=[chat_input],
samples=[[m] for m in default_scenarios[0]["message_examples"]] if default_scenarios else [],
label="Message Examples",
headers=["Then, try these specific questions:"],
)
with gr.Column(scale=1):
model_selector = gr.Radio(
choices=model_choices,
label="Select Model",
value=default_model_id
)
model_description = gr.Markdown(default_spec["description"])
system_prompt = gr.Textbox(
label="System Prompt",
lines=8,
placeholder=CHAT_SYSTEM_PROMPT_PLACEHOLDER,
value=default_scenarios[0]["system_prompt"] if default_scenarios else ""
)
temperature_slider = gr.Slider(minimum=0.0, maximum=2.0, value=1.0, step=0.1, label="Temperature")
# --- Event Listeners ---
model_selector.change(
fn=handle_model_change,
inputs=[model_selector],
outputs=[model_description, scenario_selector, system_prompt, message_examples_display]
)
scenario_selector.select(
fn=handle_scenario_selection,
inputs=[model_selector],
outputs=[system_prompt, message_examples_display]
)
message_examples_display.click(
fn=lambda value: value[0],
inputs=[message_examples_display],
outputs=[chat_input]
)
return {
"chatbot": chatbot,
"chat_input": chat_input,
"send_button": send_button,
"system_prompt": system_prompt,
"temperature_slider": temperature_slider,
"model_selector": model_selector,
}
|