File size: 1,276 Bytes
ee2273a
 
 
914e8c4
ee2273a
914e8c4
 
 
 
 
 
 
 
 
ee2273a
 
914e8c4
 
 
 
 
 
ee2273a
 
 
914e8c4
 
 
ee2273a
914e8c4
 
 
 
 
ee2273a
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
# app.py

import gradio as gr
from mec_api import run_mec_pipeline

def mec_demo(user_input, force_hei, show_raw_uesp):
    fusion_prompt, instruction_block, uesp_packet = run_mec_pipeline(user_input, force_hei=force_hei)

    output_uesp_str = ""
    if show_raw_uesp:
        import json
        output_uesp_str = json.dumps(uesp_packet, indent=2)

    return fusion_prompt, instruction_block, output_uesp_str

with gr.Blocks() as demo:
    gr.Markdown("# Master Emotional Core™ (MEC™) — Functional Empathy Demo")
    gr.Markdown("Live demo of MEC Functional Empathy Pipeline — *Empathy-first AI. Built to protect.*")

    user_input = gr.Textbox(label="Your Input Sentence", placeholder="Type here...")
    force_hei = gr.Checkbox(label="Force HEI Mode", value=False)
    show_raw_uesp = gr.Checkbox(label="Show Raw UESP Packet (Dev Mode)", value=False)

    run_button = gr.Button("Run Demo")

    output_fusion = gr.Textbox(label="Fusion Prompt")
    output_instruction = gr.Textbox(label="LLM Instruction Block")
    output_uesp = gr.Textbox(label="Raw UESP Packet (if enabled)")

    run_button.click(
        mec_demo,
        inputs=[user_input, force_hei, show_raw_uesp],
        outputs=[output_fusion, output_instruction, output_uesp]
    )

demo.launch()