Spaces:
Runtime error
Runtime error
lhl
commited on
Commit
·
d259634
1
Parent(s):
1def3a1
Updated settings
Browse files
app.py
CHANGED
|
@@ -31,9 +31,8 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
| 31 |
model_name,
|
| 32 |
torch_dtype=torch.bfloat16,
|
| 33 |
device_map="auto",
|
| 34 |
-
|
| 35 |
-
#
|
| 36 |
-
load_in_4bit=True
|
| 37 |
)
|
| 38 |
streamer = TextIteratorStreamer(tokenizer, timeout=10.0, skip_prompt=True, skip_special_tokens=True)
|
| 39 |
|
|
@@ -50,6 +49,7 @@ def chat(message, history):
|
|
| 50 |
max_new_tokens=200,
|
| 51 |
do_sample=True,
|
| 52 |
temperature=0.7,
|
|
|
|
| 53 |
top_p=0.95,
|
| 54 |
eos_token_id=tokenizer.eos_token_id,
|
| 55 |
)
|
|
|
|
| 31 |
model_name,
|
| 32 |
torch_dtype=torch.bfloat16,
|
| 33 |
device_map="auto",
|
| 34 |
+
load_in_8bit=True,
|
| 35 |
+
# load_in_4bit=True
|
|
|
|
| 36 |
)
|
| 37 |
streamer = TextIteratorStreamer(tokenizer, timeout=10.0, skip_prompt=True, skip_special_tokens=True)
|
| 38 |
|
|
|
|
| 49 |
max_new_tokens=200,
|
| 50 |
do_sample=True,
|
| 51 |
temperature=0.7,
|
| 52 |
+
repetition_penalty=1.15,
|
| 53 |
top_p=0.95,
|
| 54 |
eos_token_id=tokenizer.eos_token_id,
|
| 55 |
)
|