File size: 6,409 Bytes
d62bf80
 
 
c429d8c
 
 
d62bf80
 
c429d8c
 
 
 
 
 
 
d62bf80
c429d8c
 
 
 
 
 
 
 
 
 
 
d62bf80
 
 
c429d8c
 
d62bf80
c429d8c
d62bf80
c429d8c
 
 
 
 
 
 
 
 
 
 
d62bf80
c429d8c
 
d62bf80
c429d8c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d62bf80
c429d8c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
<!DOCTYPE html>
<html lang="en">
<head>
  <meta charset="UTF-8" />
  <meta name="viewport" content="width=device-width, initial-scale=1.0" />
  <title>πŸ’› Emotional Support Assistant</title>
  <script src="https://cdn.tailwindcss.com"></script>
  <style>
    body { transition: background 1s ease; }
    #mic-indicator {
      width: 20px; height: 20px; border-radius: 50%;
      margin-left: 10px; display: inline-block;
      background-color: gray;
      box-shadow: 0 0 5px rgba(0,0,0,0.2);
      transition: background-color 0.3s ease, box-shadow 0.3s ease;
    }
    .listening {
      background-color: #22c55e;
      box-shadow: 0 0 15px #22c55e;
    }
    .speaking {
      animation: pulse 1s infinite;
    }
    @keyframes pulse {
      0% { opacity: 0.3; }
      50% { opacity: 1; }
      100% { opacity: 0.3; }
    }
  </style>
</head>
<body class="min-h-screen flex flex-col items-center justify-center bg-gradient-to-br from-yellow-100 to-yellow-300 p-4">
  <h1 class="text-2xl font-bold mb-4 text-center">πŸ’› Emotional Support Assistant</h1>

  <div id="chat-box" class="w-full max-w-2xl h-96 overflow-y-auto bg-white p-4 rounded shadow mb-4 border"></div>

  <div class="flex gap-2 w-full max-w-2xl">
    <input id="user-input" type="text" placeholder="Type your message..."
           class="flex-1 border p-2 rounded" />
    <button id="send-btn" class="bg-blue-500 text-white px-4 py-2 rounded">Send</button>
  </div>

  <div class="mt-4 flex items-center gap-3">
    <button id="start-btn" class="bg-green-500 text-white px-4 py-2 rounded">πŸŽ™οΈ Start Listening</button>
    <button id="stop-btn" class="bg-red-500 text-white px-4 py-2 rounded">⏹️ Stop</button>
    <div id="mic-indicator" title="Mic status"></div>
  </div>

  <div id="speaking-status" class="text-gray-600 mt-3 hidden">
    πŸ€– Bot is speaking...
  </div>

  <script>
    const chatBox = document.getElementById("chat-box");
    const userInput = document.getElementById("user-input");
    const sendBtn = document.getElementById("send-btn");
    const startBtn = document.getElementById("start-btn");
    const stopBtn = document.getElementById("stop-btn");
    const micIndicator = document.getElementById("mic-indicator");
    const speakingStatus = document.getElementById("speaking-status");

    let recognition;
    let isListening = false;
    let synth = window.speechSynthesis;
    let currentUtterance = null;

    // 🎀 Initialize SpeechRecognition
    if ("webkitSpeechRecognition" in window || "SpeechRecognition" in window) {
      const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
      recognition = new SpeechRecognition();
      recognition.continuous = true;
      recognition.interimResults = false;
      recognition.lang = "en-US";

      recognition.onstart = () => {
        micIndicator.classList.add("listening");
        isListening = true;
        startBtn.disabled = true;
      };

      recognition.onend = () => {
        micIndicator.classList.remove("listening");
        isListening = false;
        startBtn.disabled = false;
      };

      recognition.onresult = (event) => {
        const text = event.results[event.results.length - 1][0].transcript.trim();
        userInput.value = text;
        sendMessage();
      };

      recognition.onerror = (event) => {
        console.error("Speech recognition error:", event.error);
        micIndicator.classList.remove("listening");
        isListening = false;
        startBtn.disabled = false;
      };
    } else {
      alert("Speech recognition not supported in this browser. Try using Chrome.");
    }

    // πŸ’¬ Send user message to Flask backend
    async function sendMessage() {
      const text = userInput.value.trim();
      if (!text) return;
      addMessage("πŸ§‘ You", text);
      userInput.value = "";

      try {
        const res = await fetch("/chat", {
          method: "POST",
          headers: { "Content-Type": "application/json" },
          body: JSON.stringify({ message: text }),
        });

        const data = await res.json();
        addMessage("πŸ€– Bot", data.text);
        speakText(data.text);
        updateBackground(data.emotion);
      } catch (err) {
        console.error("Error:", err);
      }
    }

    // πŸ—£οΈ Speak text aloud and show animation
    function speakText(text) {
      if (synth.speaking) synth.cancel();

      currentUtterance = new SpeechSynthesisUtterance(text);
      currentUtterance.lang = "en-US";

      speakingStatus.classList.remove("hidden");
      speakingStatus.classList.add("speaking");

      currentUtterance.onend = () => {
        speakingStatus.classList.add("hidden");
        speakingStatus.classList.remove("speaking");
      };

      synth.speak(currentUtterance);
    }

    // 🧠 Button handlers
    startBtn.onclick = () => {
      if (recognition && !isListening) recognition.start();
    };

    stopBtn.onclick = () => {
      if (isListening && recognition) recognition.stop();
      if (synth.speaking) synth.cancel();
      isListening = false;
      micIndicator.classList.remove("listening");
      speakingStatus.classList.add("hidden");
      startBtn.disabled = false;
    };

    sendBtn.onclick = sendMessage;

    // πŸ’¬ Display messages in chat
    function addMessage(sender, text) {
      const bubble = document.createElement("div");
      bubble.className = sender.includes("Bot")
        ? "text-left mb-2 bg-yellow-100 p-2 rounded"
        : "text-right mb-2 bg-blue-100 p-2 rounded";
      bubble.innerHTML = `<strong>${sender}:</strong> ${text}`;
      chatBox.appendChild(bubble);
      chatBox.scrollTop = chatBox.scrollHeight;
    }

    // 🎨 Change background smoothly by emotion
    function updateBackground(emotion) {
      let color;
      switch (emotion?.toLowerCase()) {
        case "happy": color = "linear-gradient(135deg, #fff176, #ffd54f)"; break;
        case "sad": color = "linear-gradient(135deg, #64b5f6, #1976d2)"; break;
        case "angry": color = "linear-gradient(135deg, #ff7043, #f44336)"; break;
        case "calm": color = "linear-gradient(135deg, #a5d6a7, #66bb6a)"; break;
        case "motivated": color = "linear-gradient(135deg, #ffb74d, #fb8c00)"; break;
        default: color = "linear-gradient(135deg, #e0e0e0, #bdbdbd)";
      }
      document.body.style.background = color;
    }
  </script>
</body>
</html>