Spaces:
Sleeping
Sleeping
| import spaces | |
| import os | |
| import gradio as gr | |
| import zipfile | |
| import tempfile | |
| from llama_cpp import Llama | |
| from llama_cpp_agent import LlamaCppAgent, MessagesFormatterType | |
| from llama_cpp_agent.providers import LlamaCppPythonProvider | |
| from llama_cpp_agent.chat_history import BasicChatHistory | |
| from llama_cpp_agent.chat_history.messages import Roles | |
| from huggingface_hub import hf_hub_download | |
| MODEL_REPO = "tHottie/NeuralDaredevil-8B-abliterated-Q4_K_M-GGUF" | |
| MODEL_FILENAME = "neuraldaredevil-8b-abliterated-q4_k_m-imat.gguf" | |
| MODEL_PATH = f"models/{MODEL_FILENAME}" | |
| hf_hub_download(repo_id=MODEL_REPO, filename=MODEL_FILENAME, local_dir="./models") | |
| def generate_code_zip(prompt): | |
| chat_template = MessagesFormatterType.GEMMA_2 | |
| llm = Llama( | |
| model_path=MODEL_PATH, | |
| flash_attn=True, | |
| n_gpu_layers=81, | |
| n_batch=1024, | |
| n_ctx=8192, | |
| ) | |
| provider = LlamaCppPythonProvider(llm) | |
| agent = LlamaCppAgent( | |
| provider, | |
| system_prompt="You are an AI code generation agent. Respond with code blocks per file name.", | |
| predefined_messages_formatter_type=chat_template, | |
| debug_output=True | |
| ) | |
| messages = BasicChatHistory() | |
| messages.add_message({"role": Roles.user, "content": prompt}) | |
| stream = agent.get_chat_response( | |
| prompt, | |
| llm_sampling_settings=provider.get_provider_default_settings(), | |
| chat_history=messages, | |
| returns_streaming_generator=False, | |
| print_output=True | |
| ) | |
| response = stream if isinstance(stream, str) else "".join(stream) | |
| file_map = parse_code_blocks(response) | |
| temp_dir = tempfile.mkdtemp() | |
| zip_path = os.path.join(temp_dir, "generated_code.zip") | |
| with zipfile.ZipFile(zip_path, "w") as zipf: | |
| for filename, code in file_map.items(): | |
| full_path = os.path.join(temp_dir, filename) | |
| os.makedirs(os.path.dirname(full_path), exist_ok=True) | |
| with open(full_path, "w", encoding="utf-8") as f: | |
| f.write(code) | |
| zipf.write(full_path, arcname=filename) | |
| return zip_path | |
| def parse_code_blocks(text): | |
| import re | |
| file_blocks = {} | |
| pattern = r"###\s*(.*?)\n```(?:[a-zA-Z0-9+]*\n)?(.*?)```" | |
| matches = re.findall(pattern, text, re.DOTALL) | |
| for filename, content in matches: | |
| cleaned_filename = filename.strip() | |
| cleaned_code = content.strip() | |
| file_blocks[cleaned_filename] = cleaned_code | |
| return file_blocks | |
| def interface_fn(prompt): | |
| try: | |
| zip_path = generate_code_zip(prompt) | |
| return zip_path | |
| except Exception as e: | |
| return f"Error: {e}" | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## AI Code Packager (GGUF-Powered)") | |
| prompt = gr.Textbox(label="Enter AI response or code instructions") | |
| output = gr.File(label="Download Zip") | |
| btn = gr.Button("Generate Zip") | |
| btn.click(fn=interface_fn, inputs=prompt, outputs=output) | |
| demo.launch() | |