deepsodha commited on
Commit
0de700e
Β·
verified Β·
1 Parent(s): 69ce8b0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -50
app.py CHANGED
@@ -1,54 +1,34 @@
1
- import os
2
- import gradio as gr
3
  from transformers import pipeline
4
 
5
- # --- Prevent storage overflow: redirect caches to /tmp (auto-cleared) ---
6
- os.environ["TRANSFORMERS_CACHE"] = "/tmp/hf_cache"
7
- os.environ["HF_HOME"] = "/tmp/hf_home"
8
- os.environ["HF_DATASETS_CACHE"] = "/tmp/hf_datasets"
9
-
10
- # --- Pin model version for reproducibility ---
11
- MODEL_ID = "distilbert-base-cased-distilled-squad"
12
-
13
- # Lazy-load the model only once at startup
14
- def load_model():
15
- return pipeline("question-answering", model=MODEL_ID)
16
-
17
- qa = load_model()
18
-
19
- # --- Demo defaults ---
20
- EXAMPLE_CONTEXT = (
21
- "AxionX Digital builds model-training tools for AI developers. "
22
- "We fine-tune open-source LLMs for customer support, finance, and legal use cases. "
23
- "We also provide evaluation dashboards and fast private deployments."
24
- )
25
- EXAMPLE_QUESTION = "What does AxionX Digital build?"
26
-
27
- # --- Prediction function ---
28
- def predict(context, question):
29
- context = (context or "").strip()
30
- question = (question or "").strip()
31
- if not context or not question:
32
- return {"answer": "Please provide both context and a question.", "score": ""}
33
- res = qa(question=question, context=context)
34
- return {"answer": res.get("answer", ""), "score": round(float(res.get("score", 0.0)), 3)}
35
-
36
- # --- Gradio UI (with AxionX brand style) ---
37
- with gr.Blocks(title="AxionX Digital β€” AI QA Demo") as demo:
38
- gr.Markdown(
39
- """
40
- # πŸ€– **AxionX Digital β€” Question Answering Demo**
41
- Type a paragraph in **Context**, then ask a **Question** about it.
42
- *(Model: distilbert-base-cased-distilled-squad β€” lightweight for stability)*
43
- ---
44
- """
45
- )
46
- with gr.Row():
47
- ctx = gr.Textbox(label="Context", lines=8, value=EXAMPLE_CONTEXT)
48
- q = gr.Textbox(label="Question", value=EXAMPLE_QUESTION)
49
- btn = gr.Button("πŸ” Get Answer")
50
- out = gr.JSON(label="Result (answer, score)")
51
- btn.click(predict, inputs=[ctx, q], outputs=[out])
52
 
53
  if __name__ == "__main__":
54
- demo.launch(server_name="0.0.0.0", server_port=7860)
 
1
+ import streamlit as st
 
2
  from transformers import pipeline
3
 
4
+ def main():
5
+ st.set_page_config(page_title="Gradio QA Demo", page_icon="πŸ€–", layout="wide")
6
+ st.title("πŸ€– AxionX Digital β€” Question Answering Demo")
7
+
8
+ st.markdown("Type a paragraph in **Context**, then ask a **Question** about it.")
9
+
10
+ @st.cache_resource
11
+ def load_model():
12
+ return pipeline("question-answering", model="distilbert-base-cased-distilled-squad")
13
+
14
+ qa = load_model()
15
+
16
+ context = st.text_area("Context", value=(
17
+ "AxionX Digital builds model-training tools for AI developers. "
18
+ "We fine-tune open-source LLMs for customer support, finance, and legal use cases. "
19
+ "We also provide evaluation dashboards and fast private deployments."
20
+ ), height=200)
21
+
22
+ question = st.text_input("Question", value="What does AxionX Digital build?")
23
+
24
+ if st.button("πŸ” Get Answer"):
25
+ if not context.strip() or not question.strip():
26
+ st.warning("Please enter both context and question.")
27
+ else:
28
+ result = qa(question=question, context=context)
29
+ st.subheader("πŸ“„ Answer")
30
+ st.write(f"**Answer:** {result['answer']}")
31
+ st.write(f"**Confidence Score:** {round(result['score'], 3)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
 
33
  if __name__ == "__main__":
34
+ main()