Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import requests | |
| import json | |
| import os | |
| LANGUAGES = ['Akan', 'Arabic', ' Assamese', 'Bambara', 'Bengali', 'Catalan', 'English', 'Spanish', ' Basque', 'French', ' Gujarati', 'Hindi', | |
| 'Indonesian', 'Igbo', 'Kikuyu', 'Kannada', 'Ganda', 'Lingala', 'Malayalam', 'Marathi', 'Nepali', 'Chichewa', 'Oriya', 'Panjabi', 'Portuguese', | |
| 'Kirundi', 'Kinyarwanda', 'Shona', 'Sotho', 'Swahili', 'Tamil', 'Telugu', 'Tswana', 'Tsonga', 'Twi', 'Urdu', 'Viêt Namese', 'Wolof', 'Xhosa', | |
| 'Yoruba', 'Chinese', 'Zulu'] | |
| API_URL = "https://api-inference.huggingface.co/models/bigscience/bloom" | |
| def translate(input, output, text): | |
| """Translate text from input language to output language""" | |
| instruction = f"""Translation in {input}: {text.strip()}<end> Translation in {output}:""" | |
| json_ = { | |
| "inputs": instruction, | |
| "parameters": { | |
| "return_full_text": True, | |
| "do_sample": False, | |
| "max_new_tokens": 250, | |
| }, | |
| "options": { | |
| "use_cache": True, | |
| "wait_for_model": True, | |
| }, | |
| } | |
| response = requests.request("POST", API_URL, json=json_) | |
| output = response.json()[0]['generated_text'] | |
| output = output.replace(instruction, '', 1) | |
| search_char = output.find(f'Translation in {output}') | |
| return output[(search_char+len(f'Translation in {output}') if search_char != -1 else 0):].split('<end>')[0] | |
| demo = gr.Blocks() | |
| with demo: | |
| gr.Markdown("<h1><center>Translation with Bloom</center></h1>") | |
| gr.Markdown("<center>Translation with bloom.</center>") | |
| with gr.Row(): | |
| input_lang = gr.Dropdown(LANGUAGES, value='English', label='Select input language') | |
| output_lang = gr.Dropdown(LANGUAGES, value='French', label='Select output language') | |
| input_text = gr.Textbox(label="Input", lines=6) | |
| output_text = gr.Textbox(lines=6, label="Output") | |
| buton = gr.Button("translate") | |
| buton.click(translate, inputs=[input_lang, output_lang, input_text], outputs=output_text) | |
| demo.launch(enable_queue=True, debug=True) | |