README / app.py
Mentors4EDU's picture
Update app.py
ed7c259 verified
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
MODEL_NAME = "openpeerai/openpeerllm"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, torch_dtype="auto")
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
def chat(prompt, max_new_tokens=256, temperature=0.7):
outputs = generator(
prompt,
max_new_tokens=max_new_tokens,
temperature=temperature,
do_sample=True,
pad_token_id=tokenizer.eos_token_id
)
return outputs[0]['generated_text'][len(prompt):].strip()
MODEL_NAME = "openpeerai/openpeerllm"
iface = gr.Interface(
fn=chat,
inputs=[
gr.Textbox(lines=4, label="Enter your prompt"),
gr.Slider(16, 1024, value=256, step=16, label="Max new tokens"),
gr.Slider(0.1, 1.5, value=0.7, step=0.05, label="Temperature")
],
outputs="text",
title="OpenPeerLLM Text Demo",
description="Chat with OpenPeerLLM from HuggingFace!"
)
if __name__ == "__main__":
iface.launch()