Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -13,19 +13,31 @@ from llama_cpp_agent.providers import LlamaCppPythonProvider
|
|
| 13 |
MODEL_NAME = "neuraldaredevil-8b-abliterated-q4_k_m-imat.gguf"
|
| 14 |
MODEL_PATH = f"models/{MODEL_NAME}"
|
| 15 |
|
| 16 |
-
#
|
| 17 |
hf_hub_download(
|
| 18 |
repo_id="tHottie/NeuralDaredevil-8B-abliterated-Q4_K_M-GGUF",
|
| 19 |
filename=MODEL_NAME,
|
| 20 |
local_dir="./models",
|
| 21 |
)
|
| 22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
@spaces.GPU(duration=120)
|
| 24 |
def process_and_package(ai_response: str):
|
| 25 |
-
|
|
|
|
|
|
|
|
|
|
| 26 |
llm = Llama(
|
| 27 |
model_path=MODEL_PATH,
|
| 28 |
-
# Use CPU / fallback settings (or GPU if supported)
|
| 29 |
n_gpu_layers=0,
|
| 30 |
n_batch=512,
|
| 31 |
n_ctx=4096,
|
|
@@ -35,37 +47,45 @@ def process_and_package(ai_response: str):
|
|
| 35 |
agent = LlamaCppAgent(
|
| 36 |
provider,
|
| 37 |
system_prompt=(
|
| 38 |
-
"You are a code parsing assistant. The user will paste
|
| 39 |
-
"
|
| 40 |
-
"
|
| 41 |
),
|
| 42 |
predefined_messages_formatter_type=MessagesFormatterType.GEMMA_2,
|
| 43 |
debug_output=True
|
| 44 |
)
|
| 45 |
|
| 46 |
-
#
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
|
| 51 |
-
#
|
| 52 |
file_map = {}
|
| 53 |
try:
|
| 54 |
-
file_map = json.loads(
|
|
|
|
| 55 |
except Exception as e:
|
| 56 |
-
|
| 57 |
-
#
|
| 58 |
pattern = r"Filename:\s*(?P<fname>[\w\.\-/]+)\s*```(?:[a-zA-Z0-9]*\n)?(?P<code>[\s\S]*?)```"
|
| 59 |
-
for m in re.finditer(pattern,
|
| 60 |
fname = m.group("fname").strip()
|
| 61 |
code = m.group("code").rstrip()
|
| 62 |
file_map[fname] = code
|
| 63 |
-
|
| 64 |
if not file_map:
|
| 65 |
-
|
| 66 |
-
|
| 67 |
|
| 68 |
-
# Write files
|
| 69 |
temp_dir = f"/tmp/codeproj_{uuid.uuid4().hex}"
|
| 70 |
os.makedirs(temp_dir, exist_ok=True)
|
| 71 |
|
|
@@ -74,6 +94,7 @@ def process_and_package(ai_response: str):
|
|
| 74 |
os.makedirs(os.path.dirname(dest), exist_ok=True)
|
| 75 |
with open(dest, "w", encoding="utf-8") as f:
|
| 76 |
f.write(code)
|
|
|
|
| 77 |
|
| 78 |
zip_name = f"code_bundle_{uuid.uuid4().hex}.zip"
|
| 79 |
zip_path = os.path.join("/tmp", zip_name)
|
|
@@ -83,14 +104,11 @@ def process_and_package(ai_response: str):
|
|
| 83 |
fullp = os.path.join(root, fn)
|
| 84 |
rel = os.path.relpath(fullp, temp_dir)
|
| 85 |
zf.write(fullp, arcname=rel)
|
|
|
|
| 86 |
|
|
|
|
|
|
|
| 87 |
return zip_path
|
| 88 |
|
| 89 |
with gr.Blocks() as demo:
|
| 90 |
-
gr.Markdown("### Paste
|
| 91 |
-
inp = gr.Textbox(lines=15, label="AI response text")
|
| 92 |
-
btn = gr.Button("Generate Zip")
|
| 93 |
-
out = gr.File(label="Download .zip")
|
| 94 |
-
btn.click(fn=process_and_package, inputs=inp, outputs=out)
|
| 95 |
-
|
| 96 |
-
demo.launch()
|
|
|
|
| 13 |
MODEL_NAME = "neuraldaredevil-8b-abliterated-q4_k_m-imat.gguf"
|
| 14 |
MODEL_PATH = f"models/{MODEL_NAME}"
|
| 15 |
|
| 16 |
+
# Pre-download model
|
| 17 |
hf_hub_download(
|
| 18 |
repo_id="tHottie/NeuralDaredevil-8B-abliterated-Q4_K_M-GGUF",
|
| 19 |
filename=MODEL_NAME,
|
| 20 |
local_dir="./models",
|
| 21 |
)
|
| 22 |
|
| 23 |
+
LOG_FILE = "/tmp/agent_code_packager.log"
|
| 24 |
+
|
| 25 |
+
def log(msg: str):
|
| 26 |
+
try:
|
| 27 |
+
with open(LOG_FILE, "a", encoding="utf-8") as f:
|
| 28 |
+
f.write(msg + "\n")
|
| 29 |
+
except Exception:
|
| 30 |
+
pass
|
| 31 |
+
print(msg)
|
| 32 |
+
|
| 33 |
@spaces.GPU(duration=120)
|
| 34 |
def process_and_package(ai_response: str):
|
| 35 |
+
log("=== Starting process_and_package ===")
|
| 36 |
+
log(f"AI response input: {ai_response[:200]}...")
|
| 37 |
+
|
| 38 |
+
# Initialize model & agent
|
| 39 |
llm = Llama(
|
| 40 |
model_path=MODEL_PATH,
|
|
|
|
| 41 |
n_gpu_layers=0,
|
| 42 |
n_batch=512,
|
| 43 |
n_ctx=4096,
|
|
|
|
| 47 |
agent = LlamaCppAgent(
|
| 48 |
provider,
|
| 49 |
system_prompt=(
|
| 50 |
+
"You are a code parsing assistant. The user will paste AI output referencing multiple code files. "
|
| 51 |
+
"Return a JSON object mapping each filename to its full source code text. "
|
| 52 |
+
"Do not return anything else."
|
| 53 |
),
|
| 54 |
predefined_messages_formatter_type=MessagesFormatterType.GEMMA_2,
|
| 55 |
debug_output=True
|
| 56 |
)
|
| 57 |
|
| 58 |
+
# Call the agent
|
| 59 |
+
# Use get_chat_response (not run)
|
| 60 |
+
try:
|
| 61 |
+
settings = provider.get_provider_default_settings()
|
| 62 |
+
response = agent.get_chat_response(ai_response, llm_sampling_settings=settings)
|
| 63 |
+
except Exception as e:
|
| 64 |
+
log(f"Error in get_chat_response: {e}")
|
| 65 |
+
response = "" # fallback to empty
|
| 66 |
+
|
| 67 |
+
log("Agent response:")
|
| 68 |
+
log(response[:1000] if len(response) > 1000 else response)
|
| 69 |
|
| 70 |
+
# Parse output from agent
|
| 71 |
file_map = {}
|
| 72 |
try:
|
| 73 |
+
file_map = json.loads(response)
|
| 74 |
+
log("Parsed JSON file map keys: " + ", ".join(file_map.keys()))
|
| 75 |
except Exception as e:
|
| 76 |
+
log(f"JSON parse failed: {e}")
|
| 77 |
+
# fallback regex
|
| 78 |
pattern = r"Filename:\s*(?P<fname>[\w\.\-/]+)\s*```(?:[a-zA-Z0-9]*\n)?(?P<code>[\s\S]*?)```"
|
| 79 |
+
for m in re.finditer(pattern, response, re.DOTALL):
|
| 80 |
fname = m.group("fname").strip()
|
| 81 |
code = m.group("code").rstrip()
|
| 82 |
file_map[fname] = code
|
| 83 |
+
log("After regex fallback, keys: " + ", ".join(file_map.keys()))
|
| 84 |
if not file_map:
|
| 85 |
+
file_map["output.txt"] = response
|
| 86 |
+
log("Fallback to single file: output.txt")
|
| 87 |
|
| 88 |
+
# Write files & zip
|
| 89 |
temp_dir = f"/tmp/codeproj_{uuid.uuid4().hex}"
|
| 90 |
os.makedirs(temp_dir, exist_ok=True)
|
| 91 |
|
|
|
|
| 94 |
os.makedirs(os.path.dirname(dest), exist_ok=True)
|
| 95 |
with open(dest, "w", encoding="utf-8") as f:
|
| 96 |
f.write(code)
|
| 97 |
+
log(f"Wrote file: {dest}")
|
| 98 |
|
| 99 |
zip_name = f"code_bundle_{uuid.uuid4().hex}.zip"
|
| 100 |
zip_path = os.path.join("/tmp", zip_name)
|
|
|
|
| 104 |
fullp = os.path.join(root, fn)
|
| 105 |
rel = os.path.relpath(fullp, temp_dir)
|
| 106 |
zf.write(fullp, arcname=rel)
|
| 107 |
+
log(f"Added to zip: {rel}")
|
| 108 |
|
| 109 |
+
log(f"Zip created: {zip_path}")
|
| 110 |
+
log("=== Finished process_and_package ===\n")
|
| 111 |
return zip_path
|
| 112 |
|
| 113 |
with gr.Blocks() as demo:
|
| 114 |
+
gr.Markdown("### Paste
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|