Update src/pdfchatbot.py
Browse files- src/pdfchatbot.py +2 -3
src/pdfchatbot.py
CHANGED
|
@@ -105,9 +105,8 @@ class PDFChatBot:
|
|
| 105 |
self.model = AutoModelForCausalLM.from_pretrained(
|
| 106 |
self.config.get("autoModelForCausalLM"),
|
| 107 |
device_map='auto',
|
| 108 |
-
torch_dtype=torch.
|
| 109 |
token=True,
|
| 110 |
-
load_in_8bit=False
|
| 111 |
)
|
| 112 |
|
| 113 |
def create_pipeline(self):
|
|
@@ -118,7 +117,7 @@ class PDFChatBot:
|
|
| 118 |
model=self.model,
|
| 119 |
task='text-generation',
|
| 120 |
tokenizer=self.tokenizer,
|
| 121 |
-
max_new_tokens=
|
| 122 |
)
|
| 123 |
self.pipeline = HuggingFacePipeline(pipeline=pipe)
|
| 124 |
|
|
|
|
| 105 |
self.model = AutoModelForCausalLM.from_pretrained(
|
| 106 |
self.config.get("autoModelForCausalLM"),
|
| 107 |
device_map='auto',
|
| 108 |
+
torch_dtype=torch.float16,
|
| 109 |
token=True,
|
|
|
|
| 110 |
)
|
| 111 |
|
| 112 |
def create_pipeline(self):
|
|
|
|
| 117 |
model=self.model,
|
| 118 |
task='text-generation',
|
| 119 |
tokenizer=self.tokenizer,
|
| 120 |
+
max_new_tokens=1024
|
| 121 |
)
|
| 122 |
self.pipeline = HuggingFacePipeline(pipeline=pipe)
|
| 123 |
|