Spaces:
Sleeping
Sleeping
Commit
·
06d0c78
1
Parent(s):
d346c50
Add system
Browse files- app.py +5 -2
- system/pledge_tracking.py +5 -4
app.py
CHANGED
|
@@ -175,7 +175,10 @@ def run_model():
|
|
| 175 |
"events": events
|
| 176 |
}
|
| 177 |
default_log_path = f"{FEEDBACK_DIR}/feedback_{timestamp}_{user_id}.jsonl"
|
| 178 |
-
|
|
|
|
|
|
|
|
|
|
| 179 |
with open(default_log_path, "w") as f:
|
| 180 |
f.write(json.dumps(log_entry, indent=1))
|
| 181 |
|
|
@@ -188,7 +191,7 @@ def run_model():
|
|
| 188 |
repo_type="dataset",
|
| 189 |
token=HF_TOKEN
|
| 190 |
)
|
| 191 |
-
|
| 192 |
|
| 193 |
except Exception as e:
|
| 194 |
traceback.print_exc()
|
|
|
|
| 175 |
"events": events
|
| 176 |
}
|
| 177 |
default_log_path = f"{FEEDBACK_DIR}/feedback_{timestamp}_{user_id}.jsonl"
|
| 178 |
+
step_id = outputs["step_id"]
|
| 179 |
+
if update_status:
|
| 180 |
+
update_status(step_id, "All done!")
|
| 181 |
+
step_id += 1
|
| 182 |
with open(default_log_path, "w") as f:
|
| 183 |
f.write(json.dumps(log_entry, indent=1))
|
| 184 |
|
|
|
|
| 191 |
repo_type="dataset",
|
| 192 |
token=HF_TOKEN
|
| 193 |
)
|
| 194 |
+
|
| 195 |
|
| 196 |
except Exception as e:
|
| 197 |
traceback.print_exc()
|
system/pledge_tracking.py
CHANGED
|
@@ -165,7 +165,7 @@ def run_pipeline(claim, pledge_date, pledge_author, start_date, timestamp, user_
|
|
| 165 |
events_num = count_total_events(extracted_event_path)
|
| 166 |
|
| 167 |
if update_fn:
|
| 168 |
-
update_fn(step_id, f"We have extracted {events_num}")
|
| 169 |
step_id+=1
|
| 170 |
|
| 171 |
|
|
@@ -185,9 +185,9 @@ def run_pipeline(claim, pledge_date, pledge_author, start_date, timestamp, user_
|
|
| 185 |
sorted_event_path = f"{pipeline_base_dir}/sorted_events.xlsx"
|
| 186 |
df.to_excel(sorted_event_path, index=False)
|
| 187 |
|
| 188 |
-
if update_fn:
|
| 189 |
-
|
| 190 |
-
|
| 191 |
|
| 192 |
return {
|
| 193 |
"claim_json": claim_json_path,
|
|
@@ -199,6 +199,7 @@ def run_pipeline(claim, pledge_date, pledge_author, start_date, timestamp, user_
|
|
| 199 |
"meta_data_dir": meta_data_dir,
|
| 200 |
"unsorted_events": extracted_event_path,
|
| 201 |
"sorted_events": sorted_event_path,
|
|
|
|
| 202 |
}
|
| 203 |
|
| 204 |
|
|
|
|
| 165 |
events_num = count_total_events(extracted_event_path)
|
| 166 |
|
| 167 |
if update_fn:
|
| 168 |
+
update_fn(step_id, f"We have extracted {events_num} events from the documents.")
|
| 169 |
step_id+=1
|
| 170 |
|
| 171 |
|
|
|
|
| 185 |
sorted_event_path = f"{pipeline_base_dir}/sorted_events.xlsx"
|
| 186 |
df.to_excel(sorted_event_path, index=False)
|
| 187 |
|
| 188 |
+
# if update_fn:
|
| 189 |
+
# update_fn(step_id, "All done!")
|
| 190 |
+
# step_id += 1
|
| 191 |
|
| 192 |
return {
|
| 193 |
"claim_json": claim_json_path,
|
|
|
|
| 199 |
"meta_data_dir": meta_data_dir,
|
| 200 |
"unsorted_events": extracted_event_path,
|
| 201 |
"sorted_events": sorted_event_path,
|
| 202 |
+
"step_id": step_id
|
| 203 |
}
|
| 204 |
|
| 205 |
|