Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -23,11 +23,19 @@ model, tokenizer = load_model_and_tokenizer()
|
|
| 23 |
|
| 24 |
|
| 25 |
def get_predict(title: str, abstract: str) -> (str, float, dict):
|
| 26 |
-
|
| 27 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
with torch.no_grad():
|
| 29 |
outputs = model(tokenized_text)
|
| 30 |
-
probs = torch.nn.functional.softmax(out.logits, dim=-1)
|
| 31 |
|
| 32 |
return list(sorted([(p, ind_to_target[i]) for i, p in enumerate(probs)], reversed=True))
|
| 33 |
|
|
|
|
| 23 |
|
| 24 |
|
| 25 |
def get_predict(title: str, abstract: str) -> (str, float, dict):
|
| 26 |
+
text = [title + tokenizer.sep_token + abstract[:128]]
|
| 27 |
|
| 28 |
+
|
| 29 |
+
tokens_info = tokenizer(
|
| 30 |
+
text,
|
| 31 |
+
padding=True,
|
| 32 |
+
truncation=True,
|
| 33 |
+
return_tensors="pt",
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
with torch.no_grad():
|
| 37 |
outputs = model(tokenized_text)
|
| 38 |
+
probs = torch.nn.functional.softmax(out.logits, dim=-1).tolist()[0]
|
| 39 |
|
| 40 |
return list(sorted([(p, ind_to_target[i]) for i, p in enumerate(probs)], reversed=True))
|
| 41 |
|