Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -20,14 +20,14 @@ rm_tokenizer = AutoTokenizer.from_pretrained('OpenAssistant/reward-model-deberta
|
|
| 20 |
rm_model = AutoModelForSequenceClassification.from_pretrained('OpenAssistant/reward-model-deberta-v3-large-v2', torch_dtype=torch.bfloat16)
|
| 21 |
|
| 22 |
@spaces.GPU
|
| 23 |
-
def generate_text(usertitle, content, max_length, temperature, N=
|
| 24 |
input_text = f"title: {usertitle}\ncontent: {content}"
|
| 25 |
inputs = tokenizer(input_text, return_tensors='pt').to('cuda')
|
| 26 |
-
generated_sequences = model.generate(inputs['input_ids'],
|
| 27 |
decoded_sequences = [tokenizer.decode(g, skip_special_tokens=True) for g in generated_sequences]
|
| 28 |
|
| 29 |
def score(sequence):
|
| 30 |
-
inputs = rm_tokenizer(sequence, return_tensors='pt', padding=True, truncation=True, max_length=
|
| 31 |
with torch.no_grad():
|
| 32 |
outputs = rm_model(**inputs)
|
| 33 |
logits = outputs.logits
|
|
|
|
| 20 |
rm_model = AutoModelForSequenceClassification.from_pretrained('OpenAssistant/reward-model-deberta-v3-large-v2', torch_dtype=torch.bfloat16)
|
| 21 |
|
| 22 |
@spaces.GPU
|
| 23 |
+
def generate_text(usertitle, content, max_length, temperature, N=3):
|
| 24 |
input_text = f"title: {usertitle}\ncontent: {content}"
|
| 25 |
inputs = tokenizer(input_text, return_tensors='pt').to('cuda')
|
| 26 |
+
generated_sequences = model.generate(inputs['input_ids'], max_new_tokens=max_length, temperature=temperature, num_return_sequences=N, do_sample=True)
|
| 27 |
decoded_sequences = [tokenizer.decode(g, skip_special_tokens=True) for g in generated_sequences]
|
| 28 |
|
| 29 |
def score(sequence):
|
| 30 |
+
inputs = rm_tokenizer(sequence, return_tensors='pt', padding=True, truncation=True, max_length=512).to('cuda')
|
| 31 |
with torch.no_grad():
|
| 32 |
outputs = rm_model(**inputs)
|
| 33 |
logits = outputs.logits
|