Spaces:
Runtime error
Runtime error
Commit
·
647ba1b
1
Parent(s):
f509e9a
Update app.py
Browse files
app.py
CHANGED
|
@@ -2,7 +2,7 @@ import torch, os, argparse, shutil, textwrap, time, streamlit as st
|
|
| 2 |
from langchain.document_loaders import YoutubeLoader
|
| 3 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 4 |
from langchain.vectorstores import Chroma
|
| 5 |
-
from langchain.embeddings import OpenAIEmbeddings, HuggingFaceInstructEmbeddings,
|
| 6 |
from langchain.chains import RetrievalQA
|
| 7 |
from langchain.llms import OpenAI
|
| 8 |
from langchain.chat_models import ChatOpenAI
|
|
@@ -61,7 +61,7 @@ def chat():
|
|
| 61 |
|
| 62 |
vector_db = Chroma.from_documents(
|
| 63 |
documents,
|
| 64 |
-
embeddings =
|
| 65 |
)
|
| 66 |
repo_id = "tiiuae/falcon-7b-instruct"
|
| 67 |
qa_chain = RetrievalQA.from_chain_type(
|
|
|
|
| 2 |
from langchain.document_loaders import YoutubeLoader
|
| 3 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 4 |
from langchain.vectorstores import Chroma
|
| 5 |
+
from langchain.embeddings import OpenAIEmbeddings, HuggingFaceInstructEmbeddings, HuggingFaceBgeEmbeddings
|
| 6 |
from langchain.chains import RetrievalQA
|
| 7 |
from langchain.llms import OpenAI
|
| 8 |
from langchain.chat_models import ChatOpenAI
|
|
|
|
| 61 |
|
| 62 |
vector_db = Chroma.from_documents(
|
| 63 |
documents,
|
| 64 |
+
embeddings = HuggingFaceBgeEmbeddings(model_name=model_name, model_kwargs={'device': 'cuda' if torch.cuda.is_available() else 'cpu'}, encode_kwargs=encode_kwargs)
|
| 65 |
)
|
| 66 |
repo_id = "tiiuae/falcon-7b-instruct"
|
| 67 |
qa_chain = RetrievalQA.from_chain_type(
|