Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -6,7 +6,7 @@ from transformers import pipeline
|
|
| 6 |
# Load the model and tokenizer using the pipeline API
|
| 7 |
model_pipeline = pipeline("text-generation", model="grammarly/coedit-large")
|
| 8 |
|
| 9 |
-
def generate_text(input_text, temperature=0.9, max_new_tokens=50, top_p=0.95, top_k=50):
|
| 10 |
# Generate text using the model
|
| 11 |
output = model_pipeline(input_text, temperature=temperature, max_length=max_new_tokens, top_p=top_p, top_k=top_k)
|
| 12 |
# Extract and return the generated text
|
|
|
|
| 6 |
# Load the model and tokenizer using the pipeline API
|
| 7 |
model_pipeline = pipeline("text-generation", model="grammarly/coedit-large")
|
| 8 |
|
| 9 |
+
def generate_text(input_text, history, temperature=0.9, max_new_tokens=50, top_p=0.95, top_k=50):
|
| 10 |
# Generate text using the model
|
| 11 |
output = model_pipeline(input_text, temperature=temperature, max_length=max_new_tokens, top_p=top_p, top_k=top_k)
|
| 12 |
# Extract and return the generated text
|