Update app.py
Browse files
app.py
CHANGED
|
@@ -1,93 +1,132 @@
|
|
| 1 |
-
# app.py
|
| 2 |
|
| 3 |
-
from
|
| 4 |
-
from
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
|
| 6 |
-
import asyncio
|
| 7 |
import gradio as gr
|
| 8 |
-
from fastapi import FastAPI
|
| 9 |
-
from fastapi.staticfiles import StaticFiles
|
| 10 |
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
|
| 21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
-
#
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
model_state = gr.State(AVAILABLE_MODELS[0])
|
| 31 |
-
search_chk = gr.Checkbox()
|
| 32 |
-
lang_dd = gr.Dropdown(choices=["html", "python"], value="html")
|
| 33 |
-
hist_state = gr.State([])
|
| 34 |
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
inputs=[prompt_in, file_in, url_in, model_state,
|
| 43 |
-
search_chk, lang_dd, hist_state],
|
| 44 |
-
outputs=[code_out, hist_out, preview_out, chat_out],
|
| 45 |
-
)
|
| 46 |
-
|
| 47 |
-
# ────────��───────────────────────────────────────────────────────
|
| 48 |
-
# 2. Hybrid FastAPI server mounts:
|
| 49 |
-
# • / → static/ (index.html, style.css, index.js …)
|
| 50 |
-
# • /api/* → Gradio JSON (& websocket queue)
|
| 51 |
-
# ────────────────────────────────────────────────────────────────
|
| 52 |
-
app = FastAPI(title="SHASHA AI hybrid server")
|
| 53 |
|
| 54 |
-
#
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
|
| 59 |
-
#
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
)
|
| 65 |
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
await websocket.accept()
|
| 76 |
-
payload = await websocket.receive_json()
|
| 77 |
-
queue: asyncio.Queue[str] = asyncio.Queue()
|
| 78 |
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
await queue.put("__END__")
|
| 84 |
|
| 85 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 86 |
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
|
|
|
| 92 |
|
| 93 |
-
|
|
|
|
|
|
|
|
|
| 1 |
+
# app.py
|
| 2 |
|
| 3 |
+
from constants import *
|
| 4 |
+
from hf_client import get_inference_client, tavily_client
|
| 5 |
+
from tavily_search import enhance_query_with_search
|
| 6 |
+
from utils import *
|
| 7 |
+
from search_replace import *
|
| 8 |
+
from web_scraper import *
|
| 9 |
+
from deploy import *
|
| 10 |
|
|
|
|
| 11 |
import gradio as gr
|
|
|
|
|
|
|
| 12 |
|
| 13 |
+
def generation_code(
|
| 14 |
+
query: Optional[str],
|
| 15 |
+
image: Optional[gr.Image],
|
| 16 |
+
file: Optional[str],
|
| 17 |
+
website_url: Optional[str],
|
| 18 |
+
_setting: Dict[str, str],
|
| 19 |
+
_history: Optional[History],
|
| 20 |
+
_current_model: Dict,
|
| 21 |
+
enable_search: bool,
|
| 22 |
+
language: str,
|
| 23 |
+
provider: str
|
| 24 |
+
):
|
| 25 |
+
# (Exact same logic as in your monolith; omitted here for brevity)
|
| 26 |
+
# It should handle multimodal input, web scraping, file extraction,
|
| 27 |
+
# search/replace modifications, and stream responses via HF client.
|
| 28 |
+
...
|
| 29 |
|
| 30 |
+
with gr.Blocks(
|
| 31 |
+
theme=gr.themes.Base(
|
| 32 |
+
primary_hue="blue",
|
| 33 |
+
secondary_hue="gray",
|
| 34 |
+
neutral_hue="gray",
|
| 35 |
+
font=gr.themes.GoogleFont("Inter"),
|
| 36 |
+
font_mono=gr.themes.GoogleFont("JetBrains Mono"),
|
| 37 |
+
text_size=gr.themes.sizes.text_md,
|
| 38 |
+
spacing_size=gr.themes.sizes.spacing_md,
|
| 39 |
+
radius_size=gr.themes.sizes.radius_md
|
| 40 |
+
),
|
| 41 |
+
title="AnyCoder - AI Code Generator"
|
| 42 |
+
) as demo:
|
| 43 |
+
history = gr.State([])
|
| 44 |
+
setting = gr.State({"system": HTML_SYSTEM_PROMPT})
|
| 45 |
+
current_model = gr.State(AVAILABLE_MODELS[9])
|
| 46 |
+
open_panel = gr.State(None)
|
| 47 |
+
last_login_state = gr.State(None)
|
| 48 |
|
| 49 |
+
# Sidebar
|
| 50 |
+
with gr.Sidebar():
|
| 51 |
+
login_button = gr.LoginButton()
|
| 52 |
+
gr.Markdown("📥 Load Existing Project")
|
| 53 |
+
load_project_url = gr.Textbox(label="Hugging Face Space URL", placeholder="https://huggingface.co/spaces/username/project", lines=1)
|
| 54 |
+
load_project_btn = gr.Button("Import Project", variant="secondary", size="sm")
|
| 55 |
+
load_project_status = gr.Markdown(visible=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 56 |
|
| 57 |
+
gr.Markdown("---")
|
| 58 |
+
input_box = gr.Textbox(label="What would you like to build?", placeholder="Describe your application...", lines=3)
|
| 59 |
+
language_dropdown = gr.Dropdown(
|
| 60 |
+
choices=[
|
| 61 |
+
"html", "python", "c", "cpp", "markdown", "latex", "json",
|
| 62 |
+
"css", "javascript", "jinja2", "typescript", "yaml", "dockerfile",
|
| 63 |
+
"shell", "r", "sql", "transformers.js"
|
| 64 |
+
],
|
| 65 |
+
value="html",
|
| 66 |
+
label="Code Language"
|
| 67 |
+
)
|
| 68 |
+
website_url_input = gr.Textbox(label="Website for redesign", placeholder="https://example.com", lines=1)
|
| 69 |
+
file_input = gr.File(
|
| 70 |
+
label="Reference file",
|
| 71 |
+
file_types=[".pdf", ".txt", ".md", ".csv", ".docx", ".jpg", ".png"]
|
| 72 |
+
)
|
| 73 |
+
image_input = gr.Image(label="UI design image", visible=False)
|
| 74 |
+
search_toggle = gr.Checkbox(label="🔍 Web search", value=False)
|
| 75 |
+
model_dropdown = gr.Dropdown(
|
| 76 |
+
choices=[m["name"] for m in AVAILABLE_MODELS],
|
| 77 |
+
value=AVAILABLE_MODELS[9]["name"],
|
| 78 |
+
label="Model"
|
| 79 |
+
)
|
| 80 |
|
| 81 |
+
btn = gr.Button("Generate", variant="primary", size="lg")
|
| 82 |
+
clear_btn = gr.Button("Clear", variant="secondary", size="sm")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 83 |
|
| 84 |
+
# Main panel with tabs
|
| 85 |
+
with gr.Column():
|
| 86 |
+
with gr.Tabs():
|
| 87 |
+
with gr.Tab("Code"):
|
| 88 |
+
code_output = gr.Code(language="html", lines=25, interactive=True, label="Generated code")
|
| 89 |
+
with gr.Tab("Preview"):
|
| 90 |
+
sandbox = gr.HTML(label="Live preview")
|
| 91 |
+
with gr.Tab("History"):
|
| 92 |
+
history_output = gr.Chatbot(show_label=False, height=400, type="messages")
|
| 93 |
|
| 94 |
+
# Event hookups
|
| 95 |
+
load_project_btn.click(
|
| 96 |
+
fn=lambda url: handle_load_project(url),
|
| 97 |
+
inputs=[load_project_url],
|
| 98 |
+
outputs=[load_project_status, code_output, sandbox, load_project_url, history, history_output]
|
| 99 |
+
)
|
| 100 |
|
| 101 |
+
btn.click(
|
| 102 |
+
fn=generation_code,
|
| 103 |
+
inputs=[
|
| 104 |
+
input_box, image_input, file_input, website_url_input,
|
| 105 |
+
setting, history, current_model, search_toggle,
|
| 106 |
+
language_dropdown, gr.State("auto")
|
| 107 |
+
],
|
| 108 |
+
outputs=[code_output, history, sandbox, history_output]
|
| 109 |
+
)
|
|
|
|
|
|
|
|
|
|
| 110 |
|
| 111 |
+
clear_btn.click(
|
| 112 |
+
fn=lambda: ([], [], None, ""),
|
| 113 |
+
outputs=[history, history_output, file_input, website_url_input]
|
| 114 |
+
)
|
|
|
|
| 115 |
|
| 116 |
+
# Preview update
|
| 117 |
+
code_output.change(
|
| 118 |
+
fn=lambda code, lang: send_to_sandbox(code) if lang=="html" else "<div>No preview</div>",
|
| 119 |
+
inputs=[code_output, language_dropdown],
|
| 120 |
+
outputs=sandbox
|
| 121 |
+
)
|
| 122 |
|
| 123 |
+
# Model switch
|
| 124 |
+
model_dropdown.change(
|
| 125 |
+
fn=lambda name: (next(m for m in AVAILABLE_MODELS if m["name"]==name),),
|
| 126 |
+
inputs=model_dropdown,
|
| 127 |
+
outputs=current_model
|
| 128 |
+
)
|
| 129 |
|
| 130 |
+
if __name__ == "__main__":
|
| 131 |
+
demo.queue(api_open=False, default_concurrency_limit=20) \
|
| 132 |
+
.launch(show_api=False, ssr_mode=True, mcp_server=False)
|