LLm_file / app.py
Enoch1359's picture
Update app.py
4b6b65b verified
raw
history blame
957 Bytes
import streamlit as st
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
import os
load_dotenv("apiroute.env")
api_key=os.getenv("OPENAI_API_KEY")
api_base=os.getenv("OPENAI_API_BASE")
llm=ChatOpenAI(model_name="google/gemma-3n-e2b-it:free",temperature=0.7)
# Streamlit UI
st.set_page_config(page_title="Chatbot", layout="centered")
st.title("πŸ’¬ Chat with me")
# Chat history session state
if "history" not in st.session_state:
st.session_state.history = []
#Then place the prompt box at the bottom
user_input = st.text_input("Ask me anything:", key="input")
# Handle input
if user_input:
# Add user message to history
st.session_state.history.append(("You", user_input))
# Get response from the model
response = llm.invoke(user_input)
# Add bot response to history
st.session_state.history.append(("Bot", response.content))
# Clear the input box after submission
st.experimental_rerun()