Spaces:
Runtime error
Runtime error
fix: stream not supported for this model
Browse files
app.py
CHANGED
|
@@ -39,7 +39,7 @@ Falcon:"""
|
|
| 39 |
seed = 42
|
| 40 |
|
| 41 |
def generate(
|
| 42 |
-
prompt, history, system_prompt="<|endoftext|>", temperature=0.9, max_new_tokens=
|
| 43 |
):
|
| 44 |
temperature = float(temperature)
|
| 45 |
if temperature < 1e-2:
|
|
@@ -58,7 +58,7 @@ def generate(
|
|
| 58 |
seed = seed + 1
|
| 59 |
formatted_prompt = format_prompt(prompt, history, system_prompt)
|
| 60 |
|
| 61 |
-
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=
|
| 62 |
output = ""
|
| 63 |
|
| 64 |
for response in stream:
|
|
|
|
| 39 |
seed = 42
|
| 40 |
|
| 41 |
def generate(
|
| 42 |
+
prompt, history, system_prompt="<|endoftext|>", temperature=0.9, max_new_tokens=250, top_p=0.95, repetition_penalty=1.0,
|
| 43 |
):
|
| 44 |
temperature = float(temperature)
|
| 45 |
if temperature < 1e-2:
|
|
|
|
| 58 |
seed = seed + 1
|
| 59 |
formatted_prompt = format_prompt(prompt, history, system_prompt)
|
| 60 |
|
| 61 |
+
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=False, details=True, return_full_text=False)
|
| 62 |
output = ""
|
| 63 |
|
| 64 |
for response in stream:
|