talktomodel / app.py
rianders's picture
Update app.py
5ae2925
raw
history blame contribute delete
792 Bytes
import streamlit as st
from transformers import pipeline
# Load the model
qa_pipeline = pipeline("text-generation", model="EleutherAI/gpt-neo-125m")
# Streamlit interface
st.title("GPT-Neo 125M Q&A App")
# Example questions
example_questions = [
"Who is Spider-Man?",
"Who is Venom?",
"Who is OpenAI?",
"Who is Rocket Raccoon?"
]
st.subheader("Select an example question or type your own:")
selected_question = st.selectbox("Choose an example question", example_questions)
user_question = st.text_input("Or ask your own question:", value=selected_question)
# Generate answer
if user_question:
with st.spinner('Generating answer...'):
response = qa_pipeline(user_question, max_length=50)
answer = response[0]['generated_text']
st.write(answer)