Update src/pdfchatbot.py
Browse files- src/pdfchatbot.py +3 -1
src/pdfchatbot.py
CHANGED
|
@@ -10,6 +10,8 @@ from langchain.chains import ConversationalRetrievalChain
|
|
| 10 |
from langchain.document_loaders import PyPDFLoader
|
| 11 |
from langchain.prompts import PromptTemplate
|
| 12 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
|
|
|
|
|
|
| 13 |
|
| 14 |
class PDFChatBot:
|
| 15 |
def __init__(self, config_path="config.yaml"):
|
|
@@ -147,7 +149,7 @@ class PDFChatBot:
|
|
| 147 |
self.load_model()
|
| 148 |
self.create_pipeline()
|
| 149 |
self.create_chain()
|
| 150 |
-
|
| 151 |
def generate_response(self, history, query, file):
|
| 152 |
"""
|
| 153 |
Generate a response based on user query and chat history.
|
|
|
|
| 10 |
from langchain.document_loaders import PyPDFLoader
|
| 11 |
from langchain.prompts import PromptTemplate
|
| 12 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 13 |
+
import spaces
|
| 14 |
+
|
| 15 |
|
| 16 |
class PDFChatBot:
|
| 17 |
def __init__(self, config_path="config.yaml"):
|
|
|
|
| 149 |
self.load_model()
|
| 150 |
self.create_pipeline()
|
| 151 |
self.create_chain()
|
| 152 |
+
@spaces.GPU
|
| 153 |
def generate_response(self, history, query, file):
|
| 154 |
"""
|
| 155 |
Generate a response based on user query and chat history.
|