TestAICodetoZip / app.py
MegaTronX's picture
Update app.py
ae71994 verified
import os
import gradio as gr
import spaces
import zipfile
import json
import re
import uuid
from huggingface_hub import hf_hub_download
from llama_cpp import Llama
from llama_cpp_agent import LlamaCppAgent, MessagesFormatterType
from llama_cpp_agent.providers import LlamaCppPythonProvider
MODEL_NAME = "neuraldaredevil-8b-abliterated-q4_k_m-imat.gguf"
MODEL_PATH = f"models/{MODEL_NAME}"
# Pre-download model
hf_hub_download(
repo_id="tHottie/NeuralDaredevil-8B-abliterated-Q4_K_M-GGUF",
filename=MODEL_NAME,
local_dir="./models",
)
LOG_FILE = "/tmp/agent_code_packager.log"
def log(msg: str):
try:
with open(LOG_FILE, "a", encoding="utf-8") as f:
f.write(msg + "\n")
except Exception:
pass
print(msg)
@spaces.GPU(duration=120)
def process_and_package(ai_response: str):
log("=== Starting process_and_package ===")
log(f"AI response input: {ai_response[:200]}...")
# Initialize model & agent
llm = Llama(
model_path=MODEL_PATH,
n_gpu_layers=0,
n_batch=512,
n_ctx=4096,
verbose=False
)
provider = LlamaCppPythonProvider(llm)
agent = LlamaCppAgent(
provider,
system_prompt=(
"You are a code packager. The user will paste text that may contain multiple code blocks. "
"Extract all code blocks, associate each with a filename if available, and return a JSON object "
"where keys are filenames and values are the full content. "
"ONLY return a raw JSON object — no explanations, no markdown, no text outside the JSON. "
"Example:\n\n"
"{\n \"app.py\": \"import gradio as gr\\n...\",\n \"utils.py\": \"def helper():\\n...\"\n}\n"
),
predefined_messages_formatter_type=MessagesFormatterType.GEMMA_2,
debug_output=True
)
# Call the agent
# Use get_chat_response (not run)
try:
settings = provider.get_provider_default_settings()
response = agent.get_chat_response(ai_response, llm_sampling_settings=settings)
except Exception as e:
log(f"Error in get_chat_response: {e}")
response = "" # fallback to empty
log("Agent response:")
log(response[:1000] if len(response) > 1000 else response)
# Parse output from agent
file_map = {}
try:
file_map = json.loads(response)
log("Parsed JSON file map keys: " + ", ".join(file_map.keys()))
except Exception as e:
log(f"JSON parse failed: {e}")
# fallback regex
pattern = r"Filename:\s*(?P<fname>[\w\.\-/]+)\s*```(?:[a-zA-Z0-9]*\n)?(?P<code>[\s\S]*?)```"
for m in re.finditer(pattern, response, re.DOTALL):
fname = m.group("fname").strip()
code = m.group("code").rstrip()
file_map[fname] = code
log("After regex fallback, keys: " + ", ".join(file_map.keys()))
if not file_map:
file_map["output.txt"] = response
log("Fallback to single file: output.txt")
# Write files & zip
temp_dir = f"/tmp/codeproj_{uuid.uuid4().hex}"
os.makedirs(temp_dir, exist_ok=True)
for fname, code in file_map.items():
dest = os.path.join(temp_dir, fname)
os.makedirs(os.path.dirname(dest), exist_ok=True)
with open(dest, "w", encoding="utf-8") as f:
f.write(code)
log(f"Wrote file: {dest}")
zip_name = f"code_bundle_{uuid.uuid4().hex}.zip"
zip_path = os.path.join("/tmp", zip_name)
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zf:
for root, _, files in os.walk(temp_dir):
for fn in files:
fullp = os.path.join(root, fn)
rel = os.path.relpath(fullp, temp_dir)
zf.write(fullp, arcname=rel)
log(f"Added to zip: {rel}")
log(f"Zip created: {zip_path}")
log("=== Finished process_and_package ===\n")
return zip_path
with gr.Blocks() as demo:
gr.Markdown("### Paste AI response below; get downloadable zip of parsed files")
inp = gr.Textbox(lines=15, label="AI response text")
btn = gr.Button("Generate Zip")
out = gr.File(label="Download .zip")
btn.click(fn=process_and_package, inputs=inp, outputs=out)
demo.launch()