Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| # from transformers import pipeline | |
| # from transformers.utils import logging | |
| from huggingface_hub import InferenceClient | |
| from llama_index.core import VectorStoreIndex, SimpleDirectoryReader | |
| from llama_index.vector_stores.chroma import ChromaVectorStore | |
| from llama_index.core.vector_stores import (MetadataFilters, ExactMatchFilter, ) | |
| from llama_index.embeddings.huggingface import HuggingFaceEmbedding | |
| import torch | |
| from llama_index.core import ( | |
| VectorStoreIndex, | |
| Document, | |
| Settings, | |
| ) | |
| from llama_index.llms.huggingface import (HuggingFaceLLM, ) | |
| from llama_index.llms.huggingface_api import (HuggingFaceInferenceAPI, ) | |
| from llama_index.core.base.llms.types import ChatMessage | |
| from huggingface_hub import login | |
| import chromadb as chromadb | |
| from chromadb.utils import embedding_functions | |
| import shutil | |
| import os | |
| from io import StringIO | |
| from llama_index.core.memory import ChatMemoryBuffer | |
| memory = ChatMemoryBuffer.from_defaults(token_limit=1500) | |
| # | |
| last = 0 | |
| CHROMA_DATA_PATH = "chroma_data/" | |
| EMBED_MODEL = "sentence-transformers/all-MiniLM-L6-v2" # "BAAI/bge-m3" | |
| #LLM_NAME = "mistralai/Mistral-Nemo-Instruct-2407" | |
| #LLM_NAME = "sswiss-ai/apertus-70b-instruct" # provider: publicai | |
| #LLM_NAME = "openai/gpt-oss-20b" | |
| LLM_NAME = "swiss-ai/apertus-8b-instruct" | |
| #LLM_NAME = "aisingapore/Gemma-SEA-LION-v4-27B-IT" | |
| #LLM_NAME = "W4D/YugoGPT-7B-Instruct-GGUF" | |
| CHUNK_SIZE = 800 | |
| CHUNK_OVERLAP = 50 | |
| max_results = 3 | |
| min_len = 40 | |
| min_distance = 0.35 | |
| max_distance = 0.6 | |
| temperature = 0.7 | |
| max_tokens=5100 | |
| top_p=0.85 | |
| top_k=1000 | |
| frequency_penalty=0.0 | |
| repetition_penalty=1.12 | |
| presence_penalty=0.15 | |
| cs = "s0" | |
| sp_flag = True | |
| system_sr = "Zoveš se U-Chat AI asistent i pomažeš odgovorima korisniku usluga kompanije United Group. Korisnik postavlja pitanje ili problem na koji očekuje rešenje. " | |
| # " Ako ne znaš odgovor, reci da ne znaš, ne izmišljaj ga." | |
| system_sr += "Usluge kompanije United Group uključuju i kablovsku mrežu za digitalnu televiziju, pristup internetu, uređaj EON SMART BOX za TV sadržaj, kao i fiksnu telefoniju. " | |
| chroma_client = chromadb.PersistentClient(CHROMA_DATA_PATH) | |
| embedding_func = embedding_functions.SentenceTransformerEmbeddingFunction( | |
| model_name=EMBED_MODEL | |
| ) | |
| collection = chroma_client.get_or_create_collection( | |
| name="chroma_data", | |
| embedding_function=embedding_func, | |
| metadata={"hnsw:space": "cosine"}, | |
| ) | |
| last = collection.count() | |
| # | |
| HF_TOKEN = os.getenv("HF_TOKEN") | |
| # | |
| login(token=(HF_TOKEN)) | |
| #system_prompt = system_sr | |
| client = InferenceClient(LLM_NAME) | |
| # "facebook/blenderbot-400M-distill", facebook/blenderbot-400M-distill, stabilityai/stablelm-zephyr-3b, BAAI/bge-small-en-v1.5 | |
| Settings.llm = HuggingFaceInferenceAPI(model_name=LLM_NAME, | |
| # device_map="auto", | |
| # system_prompt = system_prompt, | |
| context_window=4096, | |
| max_new_tokens=3072, | |
| # stopping_ids=[50278, 50279, 50277, 1, 0], | |
| generate_kwargs={"temperature": temperature, "top_p":top_p, "repetition_penalty": repetition_penalty, | |
| "presence_penalty": presence_penalty, "frequency_penalty": frequency_penalty, | |
| "top_k": top_k, "do_sample": False }, | |
| # tokenizer_kwargs={"max_length": 4096}, | |
| tokenizer_name=LLM_NAME, | |
| hf_token = HF_TOKEN, | |
| src = "models", | |
| provider="publicai", | |
| ) | |
| # "BAAI/bge-m3" | |
| Settings.embed_model = HuggingFaceEmbedding(model_name=EMBED_MODEL) | |
| #documents = [Document(text="Content ..."), | |
| # ] | |
| #index = VectorStoreIndex.from_documents( | |
| # documents, | |
| #) | |
| vector_store = ChromaVectorStore(chroma_collection=collection) | |
| index = VectorStoreIndex.from_vector_store(vector_store, embed_model=Settings.embed_model) | |
| chat_engine = index.as_chat_engine(chat_mode="condense_plus_context", memory=memory, verbose=True) | |
| # best condense_question context condense_plus_context | |
| #query_engine = index.as_query_engine(verbose=True) | |
| def upload_file(filepath): | |
| documents = SimpleDirectoryReader(filepath).load_data() | |
| index = VectorStoreIndex.from_documents(documents) | |
| #query_engine = index.as_query_engine() | |
| #condense_question condense_plus_context | |
| chat_engine = index.as_chat_engine(verbose=True) | |
| return filepath | |
| def resetChat(): | |
| chat_engine.reset() | |
| print("Restarted!!!") | |
| return True | |
| def rag(input_text, history, jezik): # , file): | |
| global sp_flag | |
| # if (btn): | |
| # resetChat() | |
| # print(history, input_text) | |
| ## if (file): | |
| documents = [] | |
| #!!! for f in file: | |
| #!!! documents += SimpleDirectoryReader(f).load_data() | |
| # f = file + "*.pdf" | |
| ## pathname = os.path.dirname | |
| # shutil.copyfile(file.name, path) | |
| ## print("pathname=", pathname) | |
| ## print("basename=", os.path.basename(file)) | |
| ## print("filename=", file.name) | |
| ## documents = SimpleDirectoryReader(file).load_data() | |
| #!!! index2 = VectorStoreIndex.from_documents(documents) | |
| ## query_engine = index2.as_query_engine() | |
| # return query_engine.query(input_text) | |
| # return history.append({"role": "assistant", "content": query_engine.query(input_text)}) | |
| ## return history + [[input_text, query_engine.query(input_text)]] | |
| # collection.add( | |
| # documents=documents, | |
| # ids=[f"id{last+i}" for i in range(len(documents))], | |
| # metadatas=[{"state": "s0", "next": "s0", "used": False, "source": 'None', "page": -1, "lang": jezik } for i in range(len(documents)) ] | |
| # ) | |
| ## else: | |
| ### query_results = collection.query( | |
| #query_engine = index.as_query_engine( | |
| # similarity_top_k=3, | |
| # vector_store_query_mode="default", | |
| # filters=MetadataFilters( | |
| # filters=[ | |
| # ExactMatchFilter(key="lang", value=jezik), | |
| # ] | |
| # ), | |
| # alpha=None, | |
| # doc_ids=None, | |
| #) | |
| #query_results = index.query( | |
| # query_texts = [ input_text ], | |
| # n_results = max_results, | |
| # where = { "lang": jezik }, | |
| # #where = { "$and": [ {"lang": jezik}, {"page": { "$nin": [ -1 ]}}]}, | |
| # #where = { "$and": [ {"$and": [ { "$or": [ {"state": self.cs }, { "page": { "$nin": [ -1 ] } } ] } , { "used": False } ] } , | |
| # # {"lang": jezik } ] }, | |
| #) | |
| #jezik = "N/A" | |
| system_prompt = "" | |
| match jezik: | |
| case 'hrvatski': | |
| o_jezik = 'na hrvatskom jeziku, gramatički točno.' | |
| system_prompt = system_sr + "Call centar telefon je 095 1000 444 za privatne i 095 1000 500 za poslovne korisnike. Stranica podrške je <https://tele mach.hr/podrska>." + "Odgovaraj isključivo " + o_jezik | |
| case 'slovenski': | |
| o_jezik = 'v slovenščini, slovnično pravilen.' | |
| system_prompt = system_sr + "Call centar i pomoč za fizične uporabnike: 070 700 700.stran za podporo je <https://telemach.si/pomoc>. " + "Odgovor isključivo " + o_jezik | |
| case 'srpski': | |
| o_jezik = 'na srpskom jeziku, gramatički ispravno.' | |
| system_prompt = system_sr + "Call centar telefon je 19900 za sve korisnike. Stranica podrške je <https://sbb.rs/podrska/>. " + "Odgovaraj isključivo " + o_jezik | |
| case 'makedonski': | |
| o_jezik = 'на македонски јазикот граматички точно.' | |
| system_prompt = system_sr + "Stranica podrške je https://mn.nettvplus.com/me/podrska/ za NetTV. " + "Oдговори исклучиво " + o_jezik | |
| case 'Eksperimentalna opcija': | |
| o_jezik = 'N/A' | |
| system_prompt = system_sr + "Call centar telefon je 12755 za Crnu Goru, 0800 31111 za BIH, 070 700 700 u Sloveniji, 19900 u Srbiji, 095 1000 444 za hrvatske korisnike. Odgovori na jeziku istom kao i u postavljenom pitanju ili problemu korisnika." | |
| print("jezik: "+o_jezik) | |
| system_prompt = system_prompt + " Sledi pitanje ili problem korisnika, sa kojim dalje komuniciraš: " | |
| if sp_flag: | |
| sp_flag = False | |
| else: | |
| system_prompt = "" | |
| # if (o_jezik!='N/A'): | |
| # input_text += " - odgovori " + o_jezik + "." | |
| # Settings.llm.system_prompt = system_prompt | |
| response = chat_engine.chat(str({"role": "user", "content": system_prompt + input_text})).response | |
| # response = query_engine.query(input_text) | |
| return response | |
| # gr.Textbox(label="Pitanje:", lines=6), | |
| # outputs=[gr.Textbox(label="Odgovor:", lines=6)], | |
| # ChatMessage(role="assistant", content="Kako Vam mogu pomoći?") | |
| with gr.Blocks() as iface: | |
| ichat = gr.ChatInterface(fn=rag, | |
| title="UChat", | |
| description="Postavite pitanje ili opišite problem koji imate - nakon promene jezika ili pre početka nove sesije sa agentom pritisnite dugme 'Briši sve - razgovor ispočetka'", | |
| chatbot=gr.Chatbot(placeholder="Kako Vam mogu pomoći?", type="tuples", label="Agent podrške", height=350), | |
| textbox=gr.Textbox(placeholder="Pitanje ili opis problema", container=False, scale=7), | |
| autofocus = True, | |
| theme="soft", | |
| examples = [ | |
| ["Ne radi mi internet", "srpski", ], | |
| ["Možete li mi popraviti kompjuter koji koristi internet?", "srpski", ], | |
| ["Ne radi mi daljinski upravljač, šta da radim?", "srpski", ], | |
| ["EON daljinski upravljalnik mi ne deluje, kaj naj naredim?", "slovenski", ], | |
| ["Мојот кабелски прием не работи, што треба да направам?", "makedonski", ], | |
| ], | |
| cache_examples=False, | |
| retry_btn=None, | |
| undo_btn=None, | |
| clear_btn="Briši sve - razgovor ispočetka", | |
| additional_inputs = [gr.Dropdown(["slovenski", "hrvatski", "srpski", "makedonski", "Eksperimentalna opcija"], value="srpski", label="Jezik", info="komunikacije"), | |
| # gr.File() | |
| ], | |
| additional_inputs_accordion="Jezik i ostale opcije", | |
| ) | |
| # login_button = gr.LoginButton("Hugging Face login", size="lg") | |
| ichat.clear_btn.click(resetChat) | |
| #with gr.Blocks() as iface: | |
| # gr.Markdown("Uchat") | |
| # file_out = gr.File() | |
| # with gr.Row(): | |
| # with gr.Column(scale=1): | |
| # inp = gr.Textbox(label="Pitanje:", lines=6) | |
| # u = gr.UploadButton("Upload a file", file_count="single") | |
| # with gr.Column(scale=1): | |
| # out = gr.Textbox(label="Odgovor:", lines=6) | |
| # sub = gr.Button("Pokreni") | |
| # | |
| # u.upload(upload_file, u, file_out) | |
| # sub.click(rag, inp, out) | |
| iface.launch() |