Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,15 +1,10 @@
|
|
| 1 |
import os
|
| 2 |
-
import sys
|
| 3 |
import subprocess
|
| 4 |
import streamlit as st
|
| 5 |
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
|
| 6 |
import black
|
| 7 |
from pylint import lint
|
| 8 |
from io import StringIO
|
| 9 |
-
import openai
|
| 10 |
-
|
| 11 |
-
# Set your OpenAI API key here
|
| 12 |
-
openai.api_key = "YOUR_OPENAI_API_KEY"
|
| 13 |
|
| 14 |
HUGGING_FACE_REPO_URL = "https://huggingface.co/spaces/acecalisto3/DevToolKit"
|
| 15 |
PROJECT_ROOT = "projects"
|
|
@@ -41,6 +36,7 @@ class AIAgent:
|
|
| 41 |
agent_prompt = f"""
|
| 42 |
As an elite expert developer, my name is {self.name}. I possess a comprehensive understanding of the following areas:
|
| 43 |
{skills_str}
|
|
|
|
| 44 |
I am confident that I can leverage my expertise to assist you in developing and deploying cutting-edge web applications. Please feel free to ask any questions or present any challenges you may encounter.
|
| 45 |
"""
|
| 46 |
return agent_prompt
|
|
@@ -158,22 +154,6 @@ def terminal_interface(command, project_name=None):
|
|
| 158 |
st.session_state.current_state['toolbox']['terminal_output'] = result.stderr
|
| 159 |
return result.stderr
|
| 160 |
|
| 161 |
-
def code_editor_interface(code):
|
| 162 |
-
try:
|
| 163 |
-
formatted_code = black.format_str(code, mode=black.FileMode())
|
| 164 |
-
except black.NothingChanged:
|
| 165 |
-
formatted_code = code
|
| 166 |
-
result = StringIO()
|
| 167 |
-
sys.stdout = result
|
| 168 |
-
sys.stderr = result
|
| 169 |
-
(pylint_stdout, pylint_stderr) = lint.py_run(code, return_std=True)
|
| 170 |
-
sys.stdout = sys.__stdout__
|
| 171 |
-
sys.stderr = sys.__stderr__
|
| 172 |
-
lint_message = pylint_stdout.getvalue() + pylint_stderr.getvalue()
|
| 173 |
-
st.session_state.current_state['toolbox']['formatted_code'] = formatted_code
|
| 174 |
-
st.session_state.current_state['toolbox']['lint_message'] = lint_message
|
| 175 |
-
return formatted_code, lint_message
|
| 176 |
-
|
| 177 |
def summarize_text(text):
|
| 178 |
summarizer = pipeline("summarization")
|
| 179 |
summary = summarizer(text, max_length=50, min_length=25, do_sample=False)
|
|
@@ -186,10 +166,25 @@ def sentiment_analysis(text):
|
|
| 186 |
st.session_state.current_state['toolbox']['sentiment'] = sentiment[0]
|
| 187 |
return sentiment[0]
|
| 188 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 189 |
def translate_code(code, input_language, output_language):
|
| 190 |
# Define a dictionary to map programming languages to their corresponding file extensions
|
| 191 |
language_extensions = {
|
| 192 |
-
|
| 193 |
}
|
| 194 |
|
| 195 |
# Add code to handle edge cases such as invalid input and unsupported programming languages
|
|
@@ -413,4 +408,4 @@ elif app_mode == "Workspace Chat App":
|
|
| 413 |
|
| 414 |
# Display current state for debugging
|
| 415 |
st.sidebar.subheader("Current State")
|
| 416 |
-
st.sidebar.json(st.session_state.current_state)
|
|
|
|
| 1 |
import os
|
|
|
|
| 2 |
import subprocess
|
| 3 |
import streamlit as st
|
| 4 |
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
|
| 5 |
import black
|
| 6 |
from pylint import lint
|
| 7 |
from io import StringIO
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
|
| 9 |
HUGGING_FACE_REPO_URL = "https://huggingface.co/spaces/acecalisto3/DevToolKit"
|
| 10 |
PROJECT_ROOT = "projects"
|
|
|
|
| 36 |
agent_prompt = f"""
|
| 37 |
As an elite expert developer, my name is {self.name}. I possess a comprehensive understanding of the following areas:
|
| 38 |
{skills_str}
|
| 39 |
+
|
| 40 |
I am confident that I can leverage my expertise to assist you in developing and deploying cutting-edge web applications. Please feel free to ask any questions or present any challenges you may encounter.
|
| 41 |
"""
|
| 42 |
return agent_prompt
|
|
|
|
| 154 |
st.session_state.current_state['toolbox']['terminal_output'] = result.stderr
|
| 155 |
return result.stderr
|
| 156 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 157 |
def summarize_text(text):
|
| 158 |
summarizer = pipeline("summarization")
|
| 159 |
summary = summarizer(text, max_length=50, min_length=25, do_sample=False)
|
|
|
|
| 166 |
st.session_state.current_state['toolbox']['sentiment'] = sentiment[0]
|
| 167 |
return sentiment[0]
|
| 168 |
|
| 169 |
+
# ... [rest of the translate_code function, but remove the OpenAI API call and replace it with your own logic] ...
|
| 170 |
+
|
| 171 |
+
def generate_code(code_idea):
|
| 172 |
+
# Replace this with a call to a Hugging Face model or your own logic
|
| 173 |
+
# For example, using a text-generation pipeline:
|
| 174 |
+
generator = pipeline('text-generation', model='gpt4o')
|
| 175 |
+
generated_code = generator(code_idea, max_length=10000, num_return_sequences=1)[0]['generated_text']
|
| 176 |
+
messages=[
|
| 177 |
+
{"role": "system", "content": "You are an expert software developer."},
|
| 178 |
+
{"role": "user", "content": f"Generate a Python code snippet for the following idea:\n\n{code_idea}"}
|
| 179 |
+
]
|
| 180 |
+
st.session_state.current_state['toolbox']['generated_code'] = generated_code
|
| 181 |
+
|
| 182 |
+
return generated_code
|
| 183 |
+
|
| 184 |
def translate_code(code, input_language, output_language):
|
| 185 |
# Define a dictionary to map programming languages to their corresponding file extensions
|
| 186 |
language_extensions = {
|
| 187 |
+
|
| 188 |
}
|
| 189 |
|
| 190 |
# Add code to handle edge cases such as invalid input and unsupported programming languages
|
|
|
|
| 408 |
|
| 409 |
# Display current state for debugging
|
| 410 |
st.sidebar.subheader("Current State")
|
| 411 |
+
st.sidebar.json(st.session_state.current_state)
|