Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -53,7 +53,6 @@ def stream_chat(
|
|
| 53 |
full_prompt += f"[INST]{message}[/INST]"
|
| 54 |
|
| 55 |
inputs = tokenizer(full_prompt, truncation=False, return_tensors="pt").to(device)
|
| 56 |
-
context_length = inputs.input_ids.shape[-1]
|
| 57 |
|
| 58 |
streamer = TextIteratorStreamer(tokenizer, timeout=60.0, skip_prompt=True, skip_special_tokens=True)
|
| 59 |
|
|
@@ -90,6 +89,7 @@ with gr.Blocks(css=CSS, theme="soft") as demo:
|
|
| 90 |
gr.Textbox(
|
| 91 |
value="You are a helpful assistant capable of generating long-form content.",
|
| 92 |
label="System Prompt",
|
|
|
|
| 93 |
),
|
| 94 |
gr.Slider(
|
| 95 |
minimum=0,
|
|
|
|
| 53 |
full_prompt += f"[INST]{message}[/INST]"
|
| 54 |
|
| 55 |
inputs = tokenizer(full_prompt, truncation=False, return_tensors="pt").to(device)
|
|
|
|
| 56 |
|
| 57 |
streamer = TextIteratorStreamer(tokenizer, timeout=60.0, skip_prompt=True, skip_special_tokens=True)
|
| 58 |
|
|
|
|
| 89 |
gr.Textbox(
|
| 90 |
value="You are a helpful assistant capable of generating long-form content.",
|
| 91 |
label="System Prompt",
|
| 92 |
+
visible=False,
|
| 93 |
),
|
| 94 |
gr.Slider(
|
| 95 |
minimum=0,
|