MegaTronX commited on
Commit
313dc05
·
verified ·
1 Parent(s): 35e44f5

Upload 5 files

Browse files
Files changed (5) hide show
  1. app.py +26 -44
  2. model_runner.py +22 -0
  3. parser.py +30 -0
  4. requirements.txt +2 -0
  5. zip_util.py +38 -0
app.py CHANGED
@@ -1,52 +1,34 @@
1
- import spaces
2
  import gradio as gr
3
- from huggingface_hub import hf_hub_download
 
 
 
4
 
5
- # Download the GGUF model
6
- hf_hub_download(
7
- repo_id="tHottie/NeuralDaredevil-8B-abliterated-Q4_K_M-GGUF",
8
- filename="neuraldaredevil-8b-abliterated-q4_k_m-imat.gguf",
9
- local_dir="./models"
10
- )
11
 
12
- @spaces.GPU(duration=120)
13
- def process_code_response(code_response):
14
- # Process the code response to extract the relevant files
15
- # For example, you could use a regular expression to find the file paths in the text
16
- import re
17
- file_paths = re.findall(r'file: (.+)', code_response)
18
-
19
- # Create a zip file with the extracted files
20
- import zipfile
21
- with zipfile.ZipFile('output.zip', 'w') as zip_file:
22
- for file_path in file_paths:
23
- zip_file.write(file_path)
24
 
25
- @spaces.GPU(duration=120)
26
- def respond(message):
27
- # Call the GGUF model to generate a response
28
- llm = Llama(
29
- model_path=f"models/neuraldaredevil-8b-abliterated-q4_k_m-imat.gguf",
30
- flash_attn=True,
31
- n_gpu_layers=81,
32
- n_batch=1024,
33
- n_ctx=8192,
34
- )
35
-
36
- # Process the response to extract code files
37
- processed_response = process_code_response(message)
38
-
39
- return processed_response
40
 
41
- def main():
42
- interface = gr.Interface(
43
- fn=respond,
44
- inputs="text",
45
- outputs="file",
46
- title="GGUF Code Generator"
47
  )
 
48
 
49
- interface.launch()
 
50
 
51
- if __name__ == "__main__":
52
- main()
 
 
1
  import gradio as gr
2
+ from model_runner import ModelRunner
3
+ from parser import parse_to_files
4
+ from zip_util import make_zip_from_files
5
+ import os
6
 
7
+ # Path or model identifier for your GGUF model
8
+ MODEL_PATH = os.environ.get("GGUF_MODEL_PATH", "path/to/your/model.gguf")
 
 
 
 
9
 
10
+ # Instantiate once
11
+ runner = ModelRunner(MODEL_PATH)
 
 
 
 
 
 
 
 
 
 
12
 
13
+ def process_and_zip(ai_response: str) -> str:
14
+ """
15
+ Given the pasted AI response, do model interpretation parse → zip → return zip path
16
+ """
17
+ model_output = runner.interpret_code_description(ai_response)
18
+ file_list = parse_to_files(model_output)
19
+ zip_path = make_zip_from_files(file_list)
20
+ return zip_path
 
 
 
 
 
 
 
21
 
22
+ with gr.Blocks() as demo:
23
+ gr.Markdown("### Paste your AI model response (with code descriptions) below")
24
+ input_box = gr.Textbox(lines=15, placeholder="Paste AI response here")
25
+ output_zip = gr.DownloadButton(
26
+ "Download code zip",
27
+ value=lambda resp: resp, # will be provided after processing
28
  )
29
+ submit = gr.Button("Generate Code Bundle")
30
 
31
+ # When clicked, run process_and_zip
32
+ submit.click(fn=process_and_zip, inputs=input_box, outputs=output_zip)
33
 
34
+ demo.launch()
 
model_runner.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from llama_cpp import Llama # assuming llama-cpp-python is used to run GGUF
3
+
4
+ class ModelRunner:
5
+ def __init__(self, model_path: str):
6
+ # Initialize the model once
7
+ self.llm = Llama(model_path=model_path)
8
+
9
+ def interpret_code_description(self, prompt: str) -> str:
10
+ """
11
+ Given the pasted AI response (prompt), return the model’s parsing output
12
+ (e.g. JSON or a DSL).
13
+ """
14
+ # You may want to wrap the prompt with instructions
15
+ wrapped = (
16
+ "You are a code parser. The following is a description of multiple files with code. "
17
+ "Produce as output a JSON object mapping filenames to file contents. "
18
+ "If something is ambiguous, try your best.\n\n"
19
+ + prompt
20
+ )
21
+ resp = self.llm(prompt=wrapped, max_tokens=2048)
22
+ return resp["choices"][0]["text"]
parser.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import re
3
+ from typing import List, Tuple, Dict
4
+
5
+ def parse_to_files(model_output: str) -> List[Tuple[str, str]]:
6
+ """
7
+ Parses the model’s output (ideally JSON) to a list of (filename, content).
8
+ Fallback heuristics if output is not strictly JSON.
9
+ """
10
+ # First try JSON parsing
11
+ try:
12
+ obj = json.loads(model_output)
13
+ files = []
14
+ for fname, content in obj.items():
15
+ files.append((fname, content))
16
+ return files
17
+ except json.JSONDecodeError:
18
+ # fallback: look for patterns like:
19
+ # Filename: xyz.py \n```python\n ... \n```
20
+ pattern = r"Filename:\s*(?P<fname>[\w\.\-/]+)\s*```(?:[a-zA-Z0-9]*)\n(?P<code>[\s\S]*?)```"
21
+ matches = re.finditer(pattern, model_output)
22
+ files = []
23
+ for m in matches:
24
+ fname = m.group("fname").strip()
25
+ code = m.group("code")
26
+ files.append((fname, code))
27
+ if files:
28
+ return files
29
+ # ultimate fallback: write everything into a single file
30
+ return [("all_code.txt", model_output)]
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ gradio>=3.0
2
+ llama-cpp-python
zip_util.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import zipfile
3
+ import uuid
4
+ from typing import List, Tuple
5
+
6
+ def make_zip_from_files(
7
+ files: List[Tuple[str, str]],
8
+ zip_name: str = None,
9
+ out_dir: str = "/tmp"
10
+ ) -> str:
11
+ """
12
+ Take a list of (filename, content) tuples, write them into a temp folder,
13
+ zip them, and return the path to the zip file.
14
+ """
15
+ if zip_name is None:
16
+ zip_name = f"code_bundle_{uuid.uuid4().hex}.zip"
17
+ zip_path = os.path.join(out_dir, zip_name)
18
+ # Make a temp working directory
19
+ temp_dir = os.path.join(out_dir, f"code_dir_{uuid.uuid4().hex}")
20
+ os.makedirs(temp_dir, exist_ok=True)
21
+
22
+ for fname, content in files:
23
+ # ensure subdirectories exist
24
+ full_path = os.path.join(temp_dir, fname)
25
+ os.makedirs(os.path.dirname(full_path), exist_ok=True)
26
+ with open(full_path, "w", encoding="utf-8") as f:
27
+ f.write(content)
28
+
29
+ # create zip
30
+ with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zf:
31
+ for root, _, fnames in os.walk(temp_dir):
32
+ for f in fnames:
33
+ full = os.path.join(root, f)
34
+ # store relative path inside zip
35
+ rel = os.path.relpath(full, start=temp_dir)
36
+ zf.write(full, arcname=rel)
37
+
38
+ return zip_path