Spaces:
				
			
			
	
			
			
					
		Running
		
	
	
	
			
			
	
	
	
	
		
		
					
		Running
		
	| import gradio as gr | |
| from config import CHAT_SYSTEM_PROMPT_PLACEHOLDER, CHAT_MODEL_SPECS | |
| from models import get_model_response | |
| import logging | |
| import copy | |
| logger = logging.getLogger(__name__) | |
| # --- Backend Logic --- | |
| def handle_chat(message, history, system_prompt, temperature, model_id): | |
| """Core function to handle chat message submission""" | |
| logger.debug(f"handle_chat 输入: message={message}, history={history}, system_prompt={system_prompt}, temperature={temperature}, model_id={model_id}") | |
| if history is None: | |
| history = [] | |
| history = copy.deepcopy(history) | |
| history.append((message, "")) | |
| # Get the display name from the spec | |
| model_display_name = CHAT_MODEL_SPECS.get(model_id, {}).get("display_name", model_id) | |
| is_first_chunk = True | |
| for chunk in get_model_response(model_id, history, system_prompt, temperature): | |
| if is_first_chunk: | |
| # Add model name before the first chunk | |
| history[-1] = (message, f"**{model_display_name}**\n\n" + chunk) | |
| is_first_chunk = False | |
| else: | |
| history[-1] = (message, history[-1][1] + chunk) | |
| yield copy.deepcopy(history), "" | |
| # --- UI Event Handlers --- | |
| def handle_model_change(model_id): | |
| """Update UI when user switches model""" | |
| spec = CHAT_MODEL_SPECS[model_id] | |
| scenarios = spec.get("prompt_scenarios", []) | |
| # Load the first scenario by default | |
| if scenarios: | |
| first_scenario = scenarios[0] | |
| scenario_titles = [[s["title"]] for s in scenarios] | |
| message_examples = [[m] for m in first_scenario["message_examples"]] | |
| system_prompt_value = first_scenario["system_prompt"] | |
| else: # Compatible with no scenarios | |
| scenario_titles = [] | |
| message_examples = [] | |
| system_prompt_value = "" | |
| return ( | |
| gr.update(value=spec["description"]), | |
| gr.update(samples=scenario_titles), | |
| gr.update(value=system_prompt_value), | |
| gr.update(samples=message_examples) | |
| ) | |
| def handle_scenario_selection(model_id, evt: gr.SelectData): | |
| """Update UI when user selects a scenario from the dataset""" | |
| logger.debug(f"--- Scenario Selection Event ---") | |
| logger.debug(f"Selected event value: {evt.value}") | |
| logger.debug(f"Type of event value: {type(evt.value)}") | |
| # Correction: extract string from list | |
| selected_title = evt.value[0] if isinstance(evt.value, list) and evt.value else None | |
| if not selected_title: | |
| logger.error("Selected event value is not a valid list or is empty.") | |
| return gr.update(), gr.update() | |
| spec = CHAT_MODEL_SPECS[model_id] | |
| scenarios = spec.get("prompt_scenarios", []) | |
| available_titles = [s['title'] for s in scenarios] | |
| logger.debug(f"Available scenario titles for model '{model_id}': {available_titles}") | |
| selected_scenario = next((s for s in scenarios if s["title"] == selected_title), None) | |
| if selected_scenario: | |
| logger.debug(f"Found matching scenario: '{selected_title}'") | |
| system_prompt_value = selected_scenario["system_prompt"] | |
| message_examples = [[m] for m in selected_scenario["message_examples"]] | |
| return gr.update(value=system_prompt_value), gr.update(samples=message_examples) | |
| logger.warning(f"No matching scenario found for title: '{selected_title}'") | |
| # If no scenario is found, do not update | |
| return gr.update(), gr.update() | |
| # --- UI Creation --- | |
| def create_chat_tab(): | |
| """Create and return all Gradio components for the chat tab""" | |
| # Extract model information from config for UI display | |
| # choices is a list of (display_name, model_id) tuples | |
| model_choices = [(spec["display_name"], model_id) for model_id, spec in CHAT_MODEL_SPECS.items()] | |
| default_model_id = list(CHAT_MODEL_SPECS.keys())[0] | |
| default_spec = CHAT_MODEL_SPECS[default_model_id] | |
| default_scenarios = default_spec.get("prompt_scenarios", []) | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| chatbot = gr.Chatbot( | |
| label="Chat Window", | |
| bubble_full_width=False, | |
| height=500, | |
| value=[(None, "Hello! I'm Ling. Try selecting a scenario and a message example below to get started.")] | |
| ) | |
| with gr.Row(): | |
| chat_input = gr.Textbox(placeholder="Ask me anything...", label="Input", show_label=False, scale=4) | |
| send_button = gr.Button("Send", variant="primary", scale=1) | |
| # 新的场景化示例区域 | |
| with gr.Accordion("✨ Try these scenarios...", open=True): | |
| # 场景选择器 | |
| scenario_selector = gr.Dataset( | |
| components=[gr.Textbox(visible=False)], | |
| samples=[[s["title"]] for s in default_scenarios], | |
| label="System Prompt Examples", | |
| headers=["Select a role or task to get started:"], | |
| ) | |
| # 消息示例 | |
| message_examples_display = gr.Dataset( | |
| components=[chat_input], | |
| samples=[[m] for m in default_scenarios[0]["message_examples"]] if default_scenarios else [], | |
| label="Message Examples", | |
| headers=["Then, try these specific questions:"], | |
| ) | |
| with gr.Column(scale=1): | |
| model_selector = gr.Radio( | |
| choices=model_choices, | |
| label="Select Model", | |
| value=default_model_id | |
| ) | |
| model_description = gr.Markdown(default_spec["description"]) | |
| system_prompt = gr.Textbox( | |
| label="System Prompt", | |
| lines=8, | |
| placeholder=CHAT_SYSTEM_PROMPT_PLACEHOLDER, | |
| value=default_scenarios[0]["system_prompt"] if default_scenarios else "" | |
| ) | |
| temperature_slider = gr.Slider(minimum=0.0, maximum=2.0, value=1.0, step=0.1, label="Temperature") | |
| # --- Event Listeners --- | |
| model_selector.change( | |
| fn=handle_model_change, | |
| inputs=[model_selector], | |
| outputs=[model_description, scenario_selector, system_prompt, message_examples_display] | |
| ) | |
| scenario_selector.select( | |
| fn=handle_scenario_selection, | |
| inputs=[model_selector], | |
| outputs=[system_prompt, message_examples_display] | |
| ) | |
| message_examples_display.click( | |
| fn=lambda value: value[0], | |
| inputs=[message_examples_display], | |
| outputs=[chat_input] | |
| ) | |
| return { | |
| "chatbot": chatbot, | |
| "chat_input": chat_input, | |
| "send_button": send_button, | |
| "system_prompt": system_prompt, | |
| "temperature_slider": temperature_slider, | |
| "model_selector": model_selector, | |
| } | |
