leavoigt commited on
Commit
f346328
·
verified ·
1 Parent(s): fef1c61

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -55
app.py CHANGED
@@ -6,60 +6,34 @@ from utils.generator import generate
6
  # ---------------------------------------------------------------------
7
 
8
 
9
- import subprocess
10
- from pathlib import Path
11
- import gradio as gr
12
-
13
- FREEZE_FILE = "requirements_full.txt"
14
-
15
- def dump_installed_packages():
16
- """Generate the list of installed packages into a file."""
17
- try:
18
- with open(FREEZE_FILE, "w") as f:
19
- subprocess.run(["pip", "freeze"], stdout=f, check=True)
20
- except Exception as e:
21
- print(f"[ERROR] Could not write requirements: {e}")
22
-
23
- # Generate it once at runtime
24
- dump_installed_packages()
25
-
26
- with gr.Blocks() as demo:
27
- gr.Markdown("📦 Download the list of installed Python packages:")
28
- gr.File(value=FREEZE_FILE, label="requirements_full.txt")
29
-
30
- # IMPORTANT: This is required in Hugging Face Spaces using Docker
 
31
  if __name__ == "__main__":
32
- demo.launch(server_name="0.0.0.0", server_port=7860)
33
-
34
-
35
- # ui = gr.Interface(
36
- # fn=generate,
37
- # inputs=[
38
- # gr.Textbox(
39
- # label="Query",
40
- # lines=2,
41
- # placeholder="Enter query here",
42
- # info="The query to search for in the vector database"
43
- # ),
44
- # gr.Textbox(
45
- # label="Context",
46
- # lines=8,
47
- # placeholder="Paste relevant context here",
48
- # info="Provide the context/documents to use for answering. The API expects a list of dictionaries, but the UI should except anything"
49
- # ),
50
- # ],
51
- # outputs=[gr.Text(label="Generated Answer", lines=6, show_copy_button=True)],
52
- # title="ChatFed Generation Module",
53
- # description="Ask questions based on provided context. Intended for use in RAG pipelines as an MCP server with other ChatFed modules (i.e. context supplied by semantic retriever service).",
54
- # api_name="generate"
55
- # )
56
-
57
- # # Launch with MCP server enabled
58
- # if __name__ == "__main__":
59
- # ui.launch(
60
- # server_name="0.0.0.0",
61
- # server_port=7860,
62
- # #mcp_server=True,
63
- # show_error=True
64
- # )
65
 
 
6
  # ---------------------------------------------------------------------
7
 
8
 
9
+ ui = gr.Interface(
10
+ fn=generate,
11
+ inputs=[
12
+ gr.Textbox(
13
+ label="Query",
14
+ lines=2,
15
+ placeholder="Enter query here",
16
+ info="The query to search for in the vector database"
17
+ ),
18
+ gr.Textbox(
19
+ label="Context",
20
+ lines=8,
21
+ placeholder="Paste relevant context here",
22
+ info="Provide the context/documents to use for answering. The API expects a list of dictionaries, but the UI should except anything"
23
+ ),
24
+ ],
25
+ outputs=[gr.Text(label="Generated Answer", lines=6, show_copy_button=True)],
26
+ title="ChatFed Generation Module",
27
+ description="Ask questions based on provided context. Intended for use in RAG pipelines as an MCP server with other ChatFed modules (i.e. context supplied by semantic retriever service).",
28
+ api_name="generate"
29
+ )
30
+
31
+ # Launch with MCP server enabled
32
  if __name__ == "__main__":
33
+ ui.launch(
34
+ server_name="0.0.0.0",
35
+ server_port=7860,
36
+ #mcp_server=True,
37
+ show_error=True
38
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39