leavoigt commited on
Commit
52c41b1
·
verified ·
1 Parent(s): 8281cfa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -62
app.py CHANGED
@@ -6,67 +6,38 @@ from utils.generator import generate
6
  # ---------------------------------------------------------------------
7
 
8
 
9
- import subprocess
10
- from pathlib import Path
11
- import gradio as gr
12
-
13
- FREEZE_FILE = "requirements_full.txt"
14
-
15
- def dump_installed_packages():
16
- """Generate the list of installed packages into a file."""
17
- try:
18
- with open(FREEZE_FILE, "w") as f:
19
- subprocess.run(["pip", "freeze"], stdout=f, check=True)
20
- except Exception as e:
21
- print(f"[ERROR] Could not write requirements: {e}")
22
-
23
- # Generate it once at runtime
24
- dump_installed_packages()
25
-
26
- with gr.Blocks() as demo:
27
- gr.Markdown("📦 Download the list of installed Python packages:")
28
- gr.File(value=FREEZE_FILE, label="requirements_full.txt")
29
-
30
- # IMPORTANT: This is required in Hugging Face Spaces using Docker
 
 
 
 
 
31
  if __name__ == "__main__":
32
- demo.launch(server_name="0.0.0.0", server_port=7860, mcp_server=True)
33
-
34
-
35
-
36
-
37
-
38
- # ui = gr.Interface(
39
- # fn=generate,
40
- # inputs=[
41
- # gr.Textbox(
42
- # label="Query",
43
- # lines=2,
44
- # placeholder="Enter query here",
45
- # info="The query to search for in the vector database"
46
- # ),
47
- # gr.Textbox(
48
- # label="Context",
49
- # lines=8,
50
- # placeholder="Paste relevant context here",
51
- # info="Provide the context/documents to use for answering. The API expects a list of dictionaries, but the UI should except anything"
52
- # ),
53
- # ],
54
- # outputs=gr.Textbox(
55
- # label="Generated Answer",
56
- # lines=6,
57
- # show_copy_button=True
58
- # ),
59
- # title="ChatFed Generation Module",
60
- # description="Ask questions based on provided context. Intended for use in RAG pipelines as an MCP server with other ChatFed modules (i.e. context supplied by semantic retriever service).",
61
- # api_name="generate"
62
- # )
63
-
64
- # # Launch with MCP server enabled
65
- # if __name__ == "__main__":
66
- # ui.launch(
67
- # server_name="0.0.0.0",
68
- # server_port=7860,
69
- # mcp_server=True,
70
- # show_error=True
71
- # )
72
 
 
6
  # ---------------------------------------------------------------------
7
 
8
 
9
+ ui = gr.Interface(
10
+ fn=generate,
11
+ inputs=[
12
+ gr.Textbox(
13
+ label="Query",
14
+ lines=2,
15
+ placeholder="Enter query here",
16
+ info="The query to search for in the vector database"
17
+ ),
18
+ gr.Textbox(
19
+ label="Context",
20
+ lines=8,
21
+ placeholder="Paste relevant context here",
22
+ info="Provide the context/documents to use for answering. The API expects a list of dictionaries, but the UI should except anything"
23
+ ),
24
+ ],
25
+ outputs=gr.Textbox(
26
+ label="Generated Answer",
27
+ lines=6,
28
+ show_copy_button=True
29
+ ),
30
+ title="ChatFed Generation Module",
31
+ description="Ask questions based on provided context. Intended for use in RAG pipelines as an MCP server with other ChatFed modules (i.e. context supplied by semantic retriever service).",
32
+ api_name="generate"
33
+ )
34
+
35
+ # Launch with MCP server enabled
36
  if __name__ == "__main__":
37
+ ui.launch(
38
+ server_name="0.0.0.0",
39
+ server_port=7860,
40
+ mcp_server=True,
41
+ show_error=True
42
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43