Spaces:
Sleeping
Sleeping
File size: 4,278 Bytes
548ac73 5da2830 34b86b6 548ac73 2f55627 34b86b6 548ac73 2f55627 548ac73 8361a08 34b86b6 548ac73 8bc3eea 34b86b6 2f55627 34b86b6 8361a08 8bc3eea 34b86b6 2f55627 8bc3eea 5da2830 2f55627 34b86b6 5da2830 2f55627 ae71994 2f55627 5da2830 c1a6fab ae71994 8bc3eea 5da2830 8bc3eea 2f55627 8bc3eea 2f55627 8bc3eea 2f55627 8bc3eea 2f55627 8bc3eea 2f55627 8bc3eea 34b86b6 8bc3eea 2f55627 8bc3eea 2f55627 8bc3eea 2f55627 8bc3eea 2f55627 2925570 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 |
import os
import gradio as gr
import spaces
import zipfile
import json
import re
import uuid
from huggingface_hub import hf_hub_download
from llama_cpp import Llama
from llama_cpp_agent import LlamaCppAgent, MessagesFormatterType
from llama_cpp_agent.providers import LlamaCppPythonProvider
MODEL_NAME = "neuraldaredevil-8b-abliterated-q4_k_m-imat.gguf"
MODEL_PATH = f"models/{MODEL_NAME}"
# Pre-download model
hf_hub_download(
repo_id="tHottie/NeuralDaredevil-8B-abliterated-Q4_K_M-GGUF",
filename=MODEL_NAME,
local_dir="./models",
)
LOG_FILE = "/tmp/agent_code_packager.log"
def log(msg: str):
try:
with open(LOG_FILE, "a", encoding="utf-8") as f:
f.write(msg + "\n")
except Exception:
pass
print(msg)
@spaces.GPU(duration=120)
def process_and_package(ai_response: str):
log("=== Starting process_and_package ===")
log(f"AI response input: {ai_response[:200]}...")
# Initialize model & agent
llm = Llama(
model_path=MODEL_PATH,
n_gpu_layers=0,
n_batch=512,
n_ctx=4096,
verbose=False
)
provider = LlamaCppPythonProvider(llm)
agent = LlamaCppAgent(
provider,
system_prompt=(
"You are a code packager. The user will paste text that may contain multiple code blocks. "
"Extract all code blocks, associate each with a filename if available, and return a JSON object "
"where keys are filenames and values are the full content. "
"ONLY return a raw JSON object — no explanations, no markdown, no text outside the JSON. "
"Example:\n\n"
"{\n \"app.py\": \"import gradio as gr\\n...\",\n \"utils.py\": \"def helper():\\n...\"\n}\n"
),
predefined_messages_formatter_type=MessagesFormatterType.GEMMA_2,
debug_output=True
)
# Call the agent
# Use get_chat_response (not run)
try:
settings = provider.get_provider_default_settings()
response = agent.get_chat_response(ai_response, llm_sampling_settings=settings)
except Exception as e:
log(f"Error in get_chat_response: {e}")
response = "" # fallback to empty
log("Agent response:")
log(response[:1000] if len(response) > 1000 else response)
# Parse output from agent
file_map = {}
try:
file_map = json.loads(response)
log("Parsed JSON file map keys: " + ", ".join(file_map.keys()))
except Exception as e:
log(f"JSON parse failed: {e}")
# fallback regex
pattern = r"Filename:\s*(?P<fname>[\w\.\-/]+)\s*```(?:[a-zA-Z0-9]*\n)?(?P<code>[\s\S]*?)```"
for m in re.finditer(pattern, response, re.DOTALL):
fname = m.group("fname").strip()
code = m.group("code").rstrip()
file_map[fname] = code
log("After regex fallback, keys: " + ", ".join(file_map.keys()))
if not file_map:
file_map["output.txt"] = response
log("Fallback to single file: output.txt")
# Write files & zip
temp_dir = f"/tmp/codeproj_{uuid.uuid4().hex}"
os.makedirs(temp_dir, exist_ok=True)
for fname, code in file_map.items():
dest = os.path.join(temp_dir, fname)
os.makedirs(os.path.dirname(dest), exist_ok=True)
with open(dest, "w", encoding="utf-8") as f:
f.write(code)
log(f"Wrote file: {dest}")
zip_name = f"code_bundle_{uuid.uuid4().hex}.zip"
zip_path = os.path.join("/tmp", zip_name)
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zf:
for root, _, files in os.walk(temp_dir):
for fn in files:
fullp = os.path.join(root, fn)
rel = os.path.relpath(fullp, temp_dir)
zf.write(fullp, arcname=rel)
log(f"Added to zip: {rel}")
log(f"Zip created: {zip_path}")
log("=== Finished process_and_package ===\n")
return zip_path
with gr.Blocks() as demo:
gr.Markdown("### Paste AI response below; get downloadable zip of parsed files")
inp = gr.Textbox(lines=15, label="AI response text")
btn = gr.Button("Generate Zip")
out = gr.File(label="Download .zip")
btn.click(fn=process_and_package, inputs=inp, outputs=out)
demo.launch()
|