Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -9,7 +9,7 @@ import docker
|
|
| 9 |
from huggingface_hub import HfApi, create_repo
|
| 10 |
import importlib
|
| 11 |
import os
|
| 12 |
-
from transformers import
|
| 13 |
|
| 14 |
# Initialize Flask app
|
| 15 |
app = Flask(__name__)
|
|
@@ -119,7 +119,7 @@ plugin_manager.load_plugins()
|
|
| 119 |
|
| 120 |
# AI Assistant
|
| 121 |
model = AutoModelForSequenceClassification.from_pretrained("microsoft/CodeGPT-small-py")
|
| 122 |
-
codex_pipeline = pipeline("
|
| 123 |
|
| 124 |
hf_api = HfApi()
|
| 125 |
|
|
@@ -133,7 +133,7 @@ def generate_app(user_idea, project_name):
|
|
| 133 |
|
| 134 |
# Generate code using Codex
|
| 135 |
prompt = f"""Create a simple Streamlit app for the project named '{project_name}'. The app should display the following summary: '{summary}'."""
|
| 136 |
-
generated_code = codex_pipeline(prompt)[0]['generated_text']
|
| 137 |
|
| 138 |
# Save the generated code to a file in the project directory
|
| 139 |
with open(os.path.join(project_path, "app.py"), "w") as f:
|
|
|
|
| 9 |
from huggingface_hub import HfApi, create_repo
|
| 10 |
import importlib
|
| 11 |
import os
|
| 12 |
+
from transformers import pipeline, AutoModelForSequenceClassification
|
| 13 |
|
| 14 |
# Initialize Flask app
|
| 15 |
app = Flask(__name__)
|
|
|
|
| 119 |
|
| 120 |
# AI Assistant
|
| 121 |
model = AutoModelForSequenceClassification.from_pretrained("microsoft/CodeGPT-small-py")
|
| 122 |
+
codex_pipeline = pipeline("text-generation", model=model)
|
| 123 |
|
| 124 |
hf_api = HfApi()
|
| 125 |
|
|
|
|
| 133 |
|
| 134 |
# Generate code using Codex
|
| 135 |
prompt = f"""Create a simple Streamlit app for the project named '{project_name}'. The app should display the following summary: '{summary}'."""
|
| 136 |
+
generated_code = codex_pipeline(prompt, max_length=516)[0]['generated_text']
|
| 137 |
|
| 138 |
# Save the generated code to a file in the project directory
|
| 139 |
with open(os.path.join(project_path, "app.py"), "w") as f:
|