File size: 2,126 Bytes
30fc0eb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48f37f5
30fc0eb
48f37f5
30fc0eb
 
 
48f37f5
 
 
 
 
 
30fc0eb
 
 
48f37f5
 
30fc0eb
48f37f5
30fc0eb
 
 
 
 
 
 
 
48f37f5
 
 
 
 
30fc0eb
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
# app_offline_ner_min.py
import os
os.environ["TRANSFORMERS_OFFLINE"] = "1"   # force offline per HF docs
os.environ["TOKENIZERS_PARALLELISM"] = "false"

import torch
from transformers import AutoTokenizer, AutoModelForTokenClassification, pipeline
import gradio as gr

# point to your local snapshot downloaded by prepare_model.py
# path: ./models/biomedical-ner-all
HERE = os.path.dirname(os.path.abspath(__file__))
LOCAL_MODEL_DIR = os.path.join(HERE, "models", "biomedical-ner-all")

# load strictly from disk
tokenizer = AutoTokenizer.from_pretrained(LOCAL_MODEL_DIR, local_files_only=True)
model = AutoModelForTokenClassification.from_pretrained(LOCAL_MODEL_DIR, local_files_only=True)

device = 0 if torch.cuda.is_available() else -1
ner_pipe = pipeline(
    task="token-classification",                # NER
    model=model,
    tokenizer=tokenizer,
    aggregation_strategy="simple",              # merge subword tokens into entities
    device=device
)

def run_ner(text: str):
    if not text.strip():
        return {"text": "", "entities": []}, []

    out = ner_pipe(text)

    highlighted = {
        "text": text,
        "entities": [
            {
                "entity": r["entity_group"],
                "start": int(r["start"]),
                "end": int(r["end"]),
                "score": float(r["score"]),
            }
            for r in out
        ],
    }

    # list-of-lists in a fixed column order
    rows = [
        [r["entity_group"], r["word"], float(r["score"]), int(r["start"]), int(r["end"])]
        for r in out
    ]
    return highlighted, rows

with gr.Blocks() as demo:
    gr.Markdown("# 🩺 Biomedical NER (offline, local model)")
    inp = gr.Textbox(label="Enter text", value="Patient has a history of asthma treated with albuterol.")
    ner_view = gr.HighlightedText(label="Entities", combine_adjacent=True)
    table = gr.Dataframe(
        label="Raw predictions",
        headers=["entity", "word", "score", "start", "end"],  # <-- headers for list-of-lists
        interactive=False,
    )
    inp.change(run_ner, inp, [ner_view, table])

demo.launch(debug=True)