Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -265,24 +265,23 @@ def send_to_openai(prompt: str, api_key: str) -> str:
|
|
| 265 |
logging.error(f"Error with OpenAI API: {e}")
|
| 266 |
return f"Error with OpenAI API: {e}"
|
| 267 |
|
| 268 |
-
def
|
| 269 |
return f"""
|
| 270 |
() => {{
|
| 271 |
try {{
|
| 272 |
-
const
|
|
|
|
|
|
|
|
|
|
| 273 |
navigator.clipboard.writeText(text);
|
| 274 |
return "Copied to clipboard!";
|
| 275 |
}} catch (e) {{
|
| 276 |
console.error(e);
|
| 277 |
-
return "Failed to copy
|
| 278 |
}}
|
| 279 |
}}
|
| 280 |
"""
|
| 281 |
|
| 282 |
-
def open_chatgpt_old() -> str:
|
| 283 |
-
webbrowser.open_new_tab('https://chat.openai.com')
|
| 284 |
-
return "Opening ChatGPT in new tab"
|
| 285 |
-
|
| 286 |
def open_chatgpt() -> str:
|
| 287 |
"""Open ChatGPT in new browser tab"""
|
| 288 |
return """window.open('https://chat.openai.com/', '_blank');"""
|
|
@@ -583,38 +582,38 @@ with gr.Blocks(theme=gr.themes.Default()) as demo:
|
|
| 583 |
def handle_model_selection(choice):
|
| 584 |
"""Handle model selection and update UI"""
|
| 585 |
ctx_size = get_model_context_size(choice)
|
| 586 |
-
return
|
| 587 |
-
|
| 588 |
-
|
| 589 |
-
|
| 590 |
-
|
| 591 |
-
|
| 592 |
|
| 593 |
# PDF Processing Handlers
|
| 594 |
def handle_pdf_process(pdf, fmt, ctx_size):
|
| 595 |
"""Process PDF and update UI state"""
|
| 596 |
if not pdf:
|
| 597 |
-
return
|
| 598 |
-
|
| 599 |
-
|
| 600 |
-
|
| 601 |
-
|
| 602 |
-
|
| 603 |
-
download_files
|
| 604 |
-
|
| 605 |
|
| 606 |
try:
|
| 607 |
# Extract and format text
|
| 608 |
text = extract_text_from_pdf(pdf.name)
|
| 609 |
if text.startswith("Error"):
|
| 610 |
-
return
|
| 611 |
-
|
| 612 |
-
|
| 613 |
-
|
| 614 |
-
|
| 615 |
-
|
| 616 |
-
|
| 617 |
-
|
| 618 |
|
| 619 |
formatted_text = format_content(text, fmt)
|
| 620 |
snippets_list = split_into_snippets(formatted_text, ctx_size)
|
|
@@ -624,35 +623,35 @@ with gr.Blocks(theme=gr.themes.Default()) as demo:
|
|
| 624 |
f.write(formatted_text)
|
| 625 |
download_file = f.name
|
| 626 |
|
| 627 |
-
return
|
| 628 |
-
|
| 629 |
-
|
| 630 |
-
|
| 631 |
-
|
| 632 |
-
|
| 633 |
-
|
| 634 |
-
|
| 635 |
|
| 636 |
except Exception as e:
|
| 637 |
error_msg = f"Error processing PDF: {str(e)}"
|
| 638 |
logging.error(error_msg)
|
| 639 |
-
return
|
| 640 |
-
|
| 641 |
-
|
| 642 |
-
|
| 643 |
-
|
| 644 |
-
|
| 645 |
-
|
| 646 |
-
|
| 647 |
|
| 648 |
def handle_snippet_selection(choice, snippets_list):
|
| 649 |
"""Handle snippet selection and update prompt"""
|
| 650 |
if not snippets_list:
|
| 651 |
-
return
|
| 652 |
-
|
| 653 |
-
|
| 654 |
-
download_files
|
| 655 |
-
|
| 656 |
|
| 657 |
try:
|
| 658 |
idx = get_snippet_index(choice)
|
|
@@ -662,40 +661,27 @@ with gr.Blocks(theme=gr.themes.Default()) as demo:
|
|
| 662 |
with tempfile.NamedTemporaryFile(delete=False, mode='w', suffix='.txt') as f:
|
| 663 |
f.write(selected_snippet)
|
| 664 |
|
| 665 |
-
return
|
| 666 |
-
|
| 667 |
-
|
| 668 |
-
|
| 669 |
-
|
| 670 |
|
| 671 |
except Exception as e:
|
| 672 |
error_msg = f"Error selecting snippet: {str(e)}"
|
| 673 |
logging.error(error_msg)
|
| 674 |
-
return
|
| 675 |
-
|
| 676 |
-
|
| 677 |
-
|
| 678 |
-
|
| 679 |
|
| 680 |
# Copy button handlers
|
| 681 |
-
def copy_text_js(element_id: str) -> str:
|
| 682 |
-
return f"""
|
| 683 |
-
() => {{
|
| 684 |
-
const text = document.querySelector('#{element_id} textarea').value;
|
| 685 |
-
navigator.clipboard.writeText(text);
|
| 686 |
-
return "Copied to clipboard!";
|
| 687 |
-
}}
|
| 688 |
-
"""
|
| 689 |
-
|
| 690 |
def handle_prompt_generation(snippet_text, template, snippet_choice, snippets_list):
|
| 691 |
"""Generate prompt from selected snippet"""
|
| 692 |
if not snippet_text or not snippets_list:
|
| 693 |
-
return
|
| 694 |
-
|
| 695 |
-
generated_prompt: "",
|
| 696 |
-
download_files: None
|
| 697 |
-
}
|
| 698 |
-
|
| 699 |
try:
|
| 700 |
idx = get_snippet_index(snippet_choice)
|
| 701 |
prompt = generate_prompt(snippets_list[idx], template or "Summarize the following text:")
|
|
@@ -704,20 +690,12 @@ with gr.Blocks(theme=gr.themes.Default()) as demo:
|
|
| 704 |
with tempfile.NamedTemporaryFile(delete=False, mode='w', suffix='.txt') as f:
|
| 705 |
f.write(prompt)
|
| 706 |
|
| 707 |
-
return
|
| 708 |
-
progress_status: "Prompt generated successfully!",
|
| 709 |
-
generated_prompt: prompt,
|
| 710 |
-
download_files: [f.name]
|
| 711 |
-
}
|
| 712 |
|
| 713 |
except Exception as e:
|
| 714 |
error_msg = f"Error generating prompt: {str(e)}"
|
| 715 |
logging.error(error_msg)
|
| 716 |
-
return
|
| 717 |
-
progress_status: error_msg,
|
| 718 |
-
generated_prompt: "",
|
| 719 |
-
download_files: None
|
| 720 |
-
}
|
| 721 |
|
| 722 |
def handle_copy_action(text):
|
| 723 |
"""Handle copy to clipboard action"""
|
|
@@ -743,34 +721,34 @@ with gr.Blocks(theme=gr.themes.Default()) as demo:
|
|
| 743 |
generate_prompt_btn.click(
|
| 744 |
handle_prompt_generation,
|
| 745 |
inputs=[generated_prompt, custom_prompt, snippet_selector, snippets],
|
| 746 |
-
outputs=
|
| 747 |
-
progress_status
|
| 748 |
-
generated_prompt
|
| 749 |
-
download_files
|
| 750 |
-
|
| 751 |
)
|
| 752 |
|
| 753 |
# Snippet handling
|
| 754 |
snippet_selector.change(
|
| 755 |
handle_snippet_selection,
|
| 756 |
inputs=[snippet_selector, snippets],
|
| 757 |
-
outputs=
|
| 758 |
-
progress_status
|
| 759 |
-
generated_prompt
|
| 760 |
-
download_files
|
| 761 |
-
|
| 762 |
)
|
| 763 |
|
| 764 |
# Model selection
|
| 765 |
model_choice.change(
|
| 766 |
handle_model_selection,
|
| 767 |
inputs=[model_choice],
|
| 768 |
-
outputs=
|
| 769 |
-
hf_options
|
| 770 |
-
groq_options
|
| 771 |
-
openai_options
|
| 772 |
-
context_size
|
| 773 |
-
|
| 774 |
)
|
| 775 |
|
| 776 |
hf_model.change(
|
|
@@ -786,19 +764,18 @@ with gr.Blocks(theme=gr.themes.Default()) as demo:
|
|
| 786 |
)
|
| 787 |
|
| 788 |
# Context size buttons
|
| 789 |
-
"""
|
| 790 |
for size_name, size_value in CONTEXT_SIZES.items():
|
| 791 |
-
gr.Button(
|
| 792 |
-
|
| 793 |
-
|
| 794 |
-
|
| 795 |
-
).
|
| 796 |
-
lambda
|
| 797 |
None,
|
| 798 |
context_size
|
| 799 |
-
)
|
| 800 |
|
| 801 |
-
# Download handlers
|
| 802 |
for btn, content in [
|
| 803 |
(download_full_text, pdf_content),
|
| 804 |
(download_snippet, generated_prompt),
|
|
@@ -808,7 +785,7 @@ with gr.Blocks(theme=gr.themes.Default()) as demo:
|
|
| 808 |
btn.click(
|
| 809 |
lambda x: [x] if x else None,
|
| 810 |
inputs=[content],
|
| 811 |
-
outputs=download_files
|
| 812 |
)
|
| 813 |
|
| 814 |
# Copy button handlers
|
|
|
|
| 265 |
logging.error(f"Error with OpenAI API: {e}")
|
| 266 |
return f"Error with OpenAI API: {e}"
|
| 267 |
|
| 268 |
+
def copy_text_js(element_id: str) -> str:
|
| 269 |
return f"""
|
| 270 |
() => {{
|
| 271 |
try {{
|
| 272 |
+
const elem = document.querySelector('#{element_id} textarea');
|
| 273 |
+
if (!elem) throw new Error('Element not found');
|
| 274 |
+
const text = elem.value;
|
| 275 |
+
if (!text) throw new Error('No text to copy');
|
| 276 |
navigator.clipboard.writeText(text);
|
| 277 |
return "Copied to clipboard!";
|
| 278 |
}} catch (e) {{
|
| 279 |
console.error(e);
|
| 280 |
+
return "Failed to copy: " + e.message;
|
| 281 |
}}
|
| 282 |
}}
|
| 283 |
"""
|
| 284 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 285 |
def open_chatgpt() -> str:
|
| 286 |
"""Open ChatGPT in new browser tab"""
|
| 287 |
return """window.open('https://chat.openai.com/', '_blank');"""
|
|
|
|
| 582 |
def handle_model_selection(choice):
|
| 583 |
"""Handle model selection and update UI"""
|
| 584 |
ctx_size = get_model_context_size(choice)
|
| 585 |
+
return (
|
| 586 |
+
gr.update(visible=choice == "HuggingFace Inference"), # hf_options
|
| 587 |
+
gr.update(visible=choice == "Groq API"), # groq_options
|
| 588 |
+
gr.update(visible=choice == "OpenAI ChatGPT"), # openai_options
|
| 589 |
+
gr.update(value=ctx_size) # context_size
|
| 590 |
+
)
|
| 591 |
|
| 592 |
# PDF Processing Handlers
|
| 593 |
def handle_pdf_process(pdf, fmt, ctx_size):
|
| 594 |
"""Process PDF and update UI state"""
|
| 595 |
if not pdf:
|
| 596 |
+
return (
|
| 597 |
+
"Please upload a PDF file.", # progress_status
|
| 598 |
+
"", # processed_text
|
| 599 |
+
"", # pdf_content
|
| 600 |
+
[], # snippets
|
| 601 |
+
gr.update(choices=[], value=None), # snippet_selector
|
| 602 |
+
None # download_files
|
| 603 |
+
)
|
| 604 |
|
| 605 |
try:
|
| 606 |
# Extract and format text
|
| 607 |
text = extract_text_from_pdf(pdf.name)
|
| 608 |
if text.startswith("Error"):
|
| 609 |
+
return (
|
| 610 |
+
text,
|
| 611 |
+
"",
|
| 612 |
+
"",
|
| 613 |
+
[],
|
| 614 |
+
gr.update(choices=[], value=None),
|
| 615 |
+
None
|
| 616 |
+
)
|
| 617 |
|
| 618 |
formatted_text = format_content(text, fmt)
|
| 619 |
snippets_list = split_into_snippets(formatted_text, ctx_size)
|
|
|
|
| 623 |
f.write(formatted_text)
|
| 624 |
download_file = f.name
|
| 625 |
|
| 626 |
+
return (
|
| 627 |
+
f"PDF processed successfully! Generated {len(snippets_list)} snippets.",
|
| 628 |
+
formatted_text,
|
| 629 |
+
formatted_text,
|
| 630 |
+
snippets_list,
|
| 631 |
+
gr.update(choices=update_snippet_choices(snippets_list), value="Snippet 1 of " + str(len(snippets_list))),
|
| 632 |
+
[download_file]
|
| 633 |
+
)
|
| 634 |
|
| 635 |
except Exception as e:
|
| 636 |
error_msg = f"Error processing PDF: {str(e)}"
|
| 637 |
logging.error(error_msg)
|
| 638 |
+
return (
|
| 639 |
+
error_msg,
|
| 640 |
+
"",
|
| 641 |
+
"",
|
| 642 |
+
[],
|
| 643 |
+
gr.update(choices=[], value=None),
|
| 644 |
+
None
|
| 645 |
+
)
|
| 646 |
|
| 647 |
def handle_snippet_selection(choice, snippets_list):
|
| 648 |
"""Handle snippet selection and update prompt"""
|
| 649 |
if not snippets_list:
|
| 650 |
+
return (
|
| 651 |
+
"No snippets available.", # progress_status
|
| 652 |
+
"", # generated_prompt
|
| 653 |
+
None # download_files
|
| 654 |
+
)
|
| 655 |
|
| 656 |
try:
|
| 657 |
idx = get_snippet_index(choice)
|
|
|
|
| 661 |
with tempfile.NamedTemporaryFile(delete=False, mode='w', suffix='.txt') as f:
|
| 662 |
f.write(selected_snippet)
|
| 663 |
|
| 664 |
+
return (
|
| 665 |
+
f"Selected snippet {idx + 1}",
|
| 666 |
+
selected_snippet,
|
| 667 |
+
[f.name]
|
| 668 |
+
)
|
| 669 |
|
| 670 |
except Exception as e:
|
| 671 |
error_msg = f"Error selecting snippet: {str(e)}"
|
| 672 |
logging.error(error_msg)
|
| 673 |
+
return (
|
| 674 |
+
error_msg,
|
| 675 |
+
"",
|
| 676 |
+
None
|
| 677 |
+
)
|
| 678 |
|
| 679 |
# Copy button handlers
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 680 |
def handle_prompt_generation(snippet_text, template, snippet_choice, snippets_list):
|
| 681 |
"""Generate prompt from selected snippet"""
|
| 682 |
if not snippet_text or not snippets_list:
|
| 683 |
+
return "No text available for prompt generation.", "", None
|
| 684 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
| 685 |
try:
|
| 686 |
idx = get_snippet_index(snippet_choice)
|
| 687 |
prompt = generate_prompt(snippets_list[idx], template or "Summarize the following text:")
|
|
|
|
| 690 |
with tempfile.NamedTemporaryFile(delete=False, mode='w', suffix='.txt') as f:
|
| 691 |
f.write(prompt)
|
| 692 |
|
| 693 |
+
return "Prompt generated successfully!", prompt, [f.name]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 694 |
|
| 695 |
except Exception as e:
|
| 696 |
error_msg = f"Error generating prompt: {str(e)}"
|
| 697 |
logging.error(error_msg)
|
| 698 |
+
return error_msg, "", None
|
|
|
|
|
|
|
|
|
|
|
|
|
| 699 |
|
| 700 |
def handle_copy_action(text):
|
| 701 |
"""Handle copy to clipboard action"""
|
|
|
|
| 721 |
generate_prompt_btn.click(
|
| 722 |
handle_prompt_generation,
|
| 723 |
inputs=[generated_prompt, custom_prompt, snippet_selector, snippets],
|
| 724 |
+
outputs=[
|
| 725 |
+
progress_status,
|
| 726 |
+
generated_prompt,
|
| 727 |
+
download_files
|
| 728 |
+
]
|
| 729 |
)
|
| 730 |
|
| 731 |
# Snippet handling
|
| 732 |
snippet_selector.change(
|
| 733 |
handle_snippet_selection,
|
| 734 |
inputs=[snippet_selector, snippets],
|
| 735 |
+
outputs=[
|
| 736 |
+
progress_status,
|
| 737 |
+
generated_prompt,
|
| 738 |
+
download_files
|
| 739 |
+
]
|
| 740 |
)
|
| 741 |
|
| 742 |
# Model selection
|
| 743 |
model_choice.change(
|
| 744 |
handle_model_selection,
|
| 745 |
inputs=[model_choice],
|
| 746 |
+
outputs=[
|
| 747 |
+
hf_options,
|
| 748 |
+
groq_options,
|
| 749 |
+
openai_options,
|
| 750 |
+
context_size
|
| 751 |
+
]
|
| 752 |
)
|
| 753 |
|
| 754 |
hf_model.change(
|
|
|
|
| 764 |
)
|
| 765 |
|
| 766 |
# Context size buttons
|
|
|
|
| 767 |
for size_name, size_value in CONTEXT_SIZES.items():
|
| 768 |
+
gr.Button(
|
| 769 |
+
size_name,
|
| 770 |
+
size="sm",
|
| 771 |
+
scale=1
|
| 772 |
+
).click(
|
| 773 |
+
lambda v=size_value: gr.update(value=v),
|
| 774 |
None,
|
| 775 |
context_size
|
| 776 |
+
)
|
| 777 |
|
| 778 |
+
# Download handlers (simplified)
|
| 779 |
for btn, content in [
|
| 780 |
(download_full_text, pdf_content),
|
| 781 |
(download_snippet, generated_prompt),
|
|
|
|
| 785 |
btn.click(
|
| 786 |
lambda x: [x] if x else None,
|
| 787 |
inputs=[content],
|
| 788 |
+
outputs=[download_files]
|
| 789 |
)
|
| 790 |
|
| 791 |
# Copy button handlers
|