Spaces:
Sleeping
Sleeping
Deutsch is now available, language can be switched, use different embedding models, ingest resources from two languages, also propmts, vector retrival supported for both languages now.
db81bb8
| from langchain.chains import create_retrieval_chain | |
| from langchain.chains.combine_documents import create_stuff_documents_chain | |
| from langchain_core.prompts import ChatPromptTemplate | |
| from app.models.model import LLM | |
| from app.managers import vector_manager as vm | |
| from langchain import hub | |
| from app.utils.prompts import prompts | |
| from app.utils import pdf2vector | |
| def get_qa_chain(language: str = "Deutsch"): | |
| db = vm.load_vectorstore(language) | |
| retriever = db.as_retriever(search_kwargs={"k": 5}) | |
| # qa_chain = RetrievalQA.from_chain_type( | |
| # llm=LLM, | |
| # retriever=retriever, | |
| # return_source_documents=True | |
| # ) | |
| system_prompt = prompts[language] | |
| prompt = ChatPromptTemplate.from_messages( | |
| [ | |
| ("system", system_prompt), | |
| ("human", "{input}"), | |
| ] | |
| ) | |
| # prompt = hub.pull("langchain-ai/retrieval-qa-chat") | |
| question_answer_chain = create_stuff_documents_chain(LLM, prompt) | |
| chain = create_retrieval_chain(retriever, question_answer_chain) | |
| # chain.invoke({"input": query}) | |
| return chain | |