Spaces:
Sleeping
Sleeping
| from fastapi import Request | |
| from pipeline.preprocess import preprocess_input | |
| from pipeline.generator import generate_response | |
| from pipeline.postprocess import postprocess_pipeline, fallback_final_check | |
| from models.fallback_model import generate_fallback_response | |
| from prompt_builder import build_main_prompt, build_fallback_prompt # ์์ ๋ prompt ๋น๋ ์ฌ์ฉ | |
| async def handle_dialogue( | |
| request: Request, | |
| session_id: str, | |
| npc_id: str, | |
| user_input: str, | |
| context: dict, | |
| ) -> dict: | |
| """ | |
| ์ ์ฒด ๋ํ ์ฒ๋ฆฌ ํ์ดํ๋ผ์ธ: | |
| 1) preprocess_input() โ pre ๋ฐ์ดํฐ ์์ฑ | |
| 2) main ๊ฒฝ๋ก: main prompt โ main model โ postprocess_pipeline() | |
| 3) fallback ๊ฒฝ๋ก: fallback prompt โ fallback model โ fallback_final_check() | |
| """ | |
| # 1. Preprocess | |
| pre = await preprocess_input(request, session_id, npc_id, user_input, context) | |
| # 2. Fallback ๊ฒฝ๋ก | |
| if not pre.get("is_valid", True): | |
| # fallback prompt ๊ตฌ์ฑ (๋ด๋ถ์์ additional_trigger ๊ธฐ๋ฐ ๋ถ๊ธฐ) | |
| fb_prompt = build_fallback_prompt(pre, session_id, npc_id) | |
| # fallback model ํธ์ถ | |
| fb_raw = await generate_fallback_response(request, fb_prompt) | |
| # fallback ์ ์ฉ ์ต์ข ๊ฒ์ฆ | |
| fb_checked = await fallback_final_check( | |
| request=request, | |
| fb_response=fb_raw, | |
| player_utt=pre["player_utterance"], | |
| npc_config=pre["tags"], | |
| action_delta=pre.get("trigger_meta", {}) | |
| ) | |
| # payload ๊ตฌ์ฑ ํ ๋ฐํ | |
| return { | |
| "session_id" : session_id, | |
| "npc_output_text": fb_checked, | |
| "flags": {}, # fallback์ flag/delta ์ด๋ฏธ pre์์ ํ์ | |
| "deltas": pre.get("trigger_meta", {}).get("delta", {}), | |
| "meta": { | |
| "npc_id": pre["npc_id"], | |
| "quest_stage": pre["game_state"].get("quest_stage", "default"), | |
| "location": pre["game_state"].get("location", context.get("location", "unknown")) | |
| } | |
| } | |
| # 3. Main ๊ฒฝ๋ก | |
| main_prompt = build_main_prompt(pre, session_id, npc_id) | |
| # main model ํธ์ถ | |
| result = await generate_response(session_id, npc_id, main_prompt, max_tokens=200) | |
| # postprocess_pipeline์์ ์ต์ข payload ์์ฑ | |
| return_payload = await postprocess_pipeline( | |
| request=request, | |
| pre_data=pre, # preprocess ๊ฒฐ๊ณผ ์ ์ฒด ์ ๋ฌ | |
| model_payload=result, # main model ์ถ๋ ฅ | |
| context=context | |
| ) | |
| return return_payload | |