Spaces:
Runtime error
Runtime error
| import random | |
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelWithLMHead | |
| tokenizer = AutoTokenizer.from_pretrained("cedpsam/chatbot_fr") | |
| model = AutoModelWithLMHead.from_pretrained("cedpsam/chatbot_fr") | |
| global_history=[] | |
| def chat(message, history): | |
| history = history or global_history | |
| prev="\n".join([""f"{a}\n{b}" for a ,b in global_history]) | |
| bot_input_ids=tokenizer.encode(f"{prev}\n {message}" + tokenizer.eos_token, return_tensors='pt') | |
| chat_history_ids = model.generate( | |
| bot_input_ids, max_length=80, | |
| pad_token_id=tokenizer.eos_token_id, | |
| top_p=0.92, top_k = 50, | |
| num_beams =16, | |
| max_time=30 | |
| ) | |
| response=tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True) | |
| history.append((message, response)) | |
| return history, history | |
| iface = gr.Interface( | |
| chat, | |
| ["text", "state"], | |
| ["chatbot", "state"], | |
| allow_screenshot=False, | |
| allow_flagging="never", | |
| ) | |
| iface.launch() | |