peihsin0715
commited on
Commit
·
2c00b52
1
Parent(s):
3ca6cb8
Fix data saving
Browse files- backend/server.py +2 -2
- backend/utils/utils.py +2 -2
backend/server.py
CHANGED
|
@@ -418,7 +418,7 @@ def run_pipeline():
|
|
| 418 |
_GENERATION_RESULTS = evaluated_results
|
| 419 |
|
| 420 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
| 421 |
-
os.makedirs("
|
| 422 |
output_file = f"/tmp/pipeline_generation_{timestamp}.csv"
|
| 423 |
evaluated_results.to_csv(output_file, index=False)
|
| 424 |
results['generation_file'] = output_file
|
|
@@ -589,7 +589,7 @@ def run_pipeline():
|
|
| 589 |
|
| 590 |
|
| 591 |
if __name__ == '__main__':
|
| 592 |
-
os.makedirs("
|
| 593 |
print("Starting minimal Flask server...")
|
| 594 |
print("Available endpoints:")
|
| 595 |
print(" GET /health - Health check")
|
|
|
|
| 418 |
_GENERATION_RESULTS = evaluated_results
|
| 419 |
|
| 420 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
| 421 |
+
os.makedirs("tmp", exist_ok=True)
|
| 422 |
output_file = f"/tmp/pipeline_generation_{timestamp}.csv"
|
| 423 |
evaluated_results.to_csv(output_file, index=False)
|
| 424 |
results['generation_file'] = output_file
|
|
|
|
| 589 |
|
| 590 |
|
| 591 |
if __name__ == '__main__':
|
| 592 |
+
os.makedirs("tmp", exist_ok=True)
|
| 593 |
print("Starting minimal Flask server...")
|
| 594 |
print("Available endpoints:")
|
| 595 |
print(" GET /health - Health check")
|
backend/utils/utils.py
CHANGED
|
@@ -102,7 +102,7 @@ def load_model_and_tokenizer(model_name: str):
|
|
| 102 |
print(f"Traceback: {traceback.format_exc()}")
|
| 103 |
raise RuntimeError(f"Failed to load model '{model_name}': {e}")
|
| 104 |
|
| 105 |
-
def finetune(train_texts, tokenizer, model, num_epochs=20, output_dir='
|
| 106 |
train_path = f"/tmp/train.txt"
|
| 107 |
|
| 108 |
with open(train_path, "w", encoding="utf-8") as f:
|
|
@@ -575,7 +575,7 @@ def _ensure_plot_saved(
|
|
| 575 |
target: float = None,
|
| 576 |
bins: int = 30,
|
| 577 |
) -> str:
|
| 578 |
-
os.makedirs("
|
| 579 |
path = os.path.join("data", f"{basename}.png")
|
| 580 |
|
| 581 |
plt.figure(figsize=(8, 5))
|
|
|
|
| 102 |
print(f"Traceback: {traceback.format_exc()}")
|
| 103 |
raise RuntimeError(f"Failed to load model '{model_name}': {e}")
|
| 104 |
|
| 105 |
+
def finetune(train_texts, tokenizer, model, num_epochs=20, output_dir='/temp/'):
|
| 106 |
train_path = f"/tmp/train.txt"
|
| 107 |
|
| 108 |
with open(train_path, "w", encoding="utf-8") as f:
|
|
|
|
| 575 |
target: float = None,
|
| 576 |
bins: int = 30,
|
| 577 |
) -> str:
|
| 578 |
+
os.makedirs("tmp", exist_ok=True)
|
| 579 |
path = os.path.join("data", f"{basename}.png")
|
| 580 |
|
| 581 |
plt.figure(figsize=(8, 5))
|