Spaces:
Sleeping
Sleeping
| #!/usr/bin/env python3 | |
| """ | |
| Hugging Face Space app for CoEdIT Handler | |
| """ | |
| import gradio as gr | |
| import sys | |
| import os | |
| import json | |
| from flask import Flask, request, jsonify | |
| from flask_cors import CORS | |
| # Add current directory to path so we can import handler | |
| sys.path.append(os.path.dirname(os.path.abspath(__file__))) | |
| from handler import EndpointHandler | |
| # Initialize the handler | |
| print("🚀 Initializing CoEdIT Handler...") | |
| try: | |
| handler = EndpointHandler("grammarly/coedit-large") | |
| print("✅ Handler initialized successfully") | |
| except Exception as e: | |
| print(f"❌ Failed to initialize handler: {e}") | |
| handler = None | |
| def process_text(text, num_return_sequences=1, temperature=1.0): | |
| """Process text through the CoEdIT handler""" | |
| if handler is None: | |
| return "❌ Handler not initialized. Please check the logs." | |
| try: | |
| # Prepare input for the handler | |
| inputs = { | |
| "inputs": [text], | |
| "parameters": { | |
| "num_return_sequences": num_return_sequences, | |
| "temperature": temperature | |
| } | |
| } | |
| # Process through handler | |
| result = handler(inputs) | |
| if result.get("success", False): | |
| results = result.get("results", []) | |
| if results: | |
| enhanced = results[0].get("enhanced_sentence", "") | |
| changes = results[0].get("changes", []) | |
| # Format the response | |
| response = f"**Enhanced Text:**\n{enhanced}\n\n" | |
| if changes: | |
| response += "**Changes Made:**\n" | |
| for i, change in enumerate(changes, 1): | |
| original = change.get("original_phrase", "") | |
| new = change.get("new_phrase", "") | |
| if original and new: | |
| response += f"{i}. '{original}' → '{new}'\n" | |
| return response | |
| else: | |
| return "No results returned." | |
| else: | |
| return f"❌ Error: {result.get('error', 'Unknown error')}" | |
| except Exception as e: | |
| return f"❌ Error processing text: {str(e)}" | |
| # Create Gradio interface | |
| def create_interface(): | |
| with gr.Blocks(title="CoEdIT Handler", theme=gr.themes.Soft()) as demo: | |
| gr.Markdown(""" | |
| # CoEdIT Text Editor | |
| This is a custom handler for the Grammarly CoEdIT model, providing grammar correction and text enhancement. | |
| """) | |
| with gr.Row(): | |
| with gr.Column(): | |
| input_text = gr.Textbox( | |
| label="Input Text", | |
| placeholder="Fix the grammar: When I grow up, I start to understand what he said is quite right.", | |
| lines=3 | |
| ) | |
| with gr.Row(): | |
| num_sequences = gr.Slider( | |
| minimum=1, | |
| maximum=5, | |
| value=1, | |
| step=1, | |
| label="Number of variations" | |
| ) | |
| temperature = gr.Slider( | |
| minimum=0.1, | |
| maximum=2.0, | |
| value=1.0, | |
| step=0.1, | |
| label="Temperature" | |
| ) | |
| process_btn = gr.Button("Process Text", variant="primary") | |
| with gr.Column(): | |
| output_text = gr.Markdown(label="Enhanced Text") | |
| # Example texts | |
| gr.Examples( | |
| examples=[ | |
| "Fix the grammar: When I grow up, I start to understand what he said is quite right.", | |
| "Make this text coherent: Their flight is weak. They run quickly through the tree canopy.", | |
| "Rewrite to make this easier to understand: A storm surge is what forecasters consider a hurricane's most treacherous aspect.", | |
| "Paraphrase this: Do you know where I was born?", | |
| "Write this more formally: omg i love that song im listening to it right now" | |
| ], | |
| inputs=input_text | |
| ) | |
| # Event handlers | |
| process_btn.click( | |
| fn=process_text, | |
| inputs=[input_text, num_sequences, temperature], | |
| outputs=output_text | |
| ) | |
| # API endpoint info | |
| gr.Markdown(""" | |
| ## API Endpoint | |
| This Space also provides an API endpoint at `/predict` for programmatic access: | |
| ```bash | |
| curl -X POST "https://your-space-url.hf.space/predict" \\ | |
| -H "Content-Type: application/json" \\ | |
| -d '{"inputs": ["Your text here"]}' | |
| ``` | |
| """) | |
| return demo | |
| # Create Flask app for API endpoints | |
| app = Flask(__name__) | |
| CORS(app) # Enable CORS for cross-origin requests | |
| def api_predict(): | |
| """API endpoint for inference calls from external applications""" | |
| try: | |
| # Get JSON data from request | |
| data = request.get_json() | |
| if not data: | |
| return jsonify({ | |
| "success": False, | |
| "error": "No JSON data provided" | |
| }), 400 | |
| # Extract inputs and parameters | |
| inputs = data.get('inputs', []) | |
| parameters = data.get('parameters', {}) | |
| # If inputs is a single string, wrap it in a list | |
| if isinstance(inputs, str): | |
| inputs = [inputs] | |
| if not inputs: | |
| return jsonify({ | |
| "success": False, | |
| "error": "No inputs provided" | |
| }), 400 | |
| # Process through handler | |
| result = handler({ | |
| "inputs": inputs, | |
| "parameters": parameters | |
| }) | |
| return jsonify(result) | |
| except Exception as e: | |
| return jsonify({ | |
| "success": False, | |
| "error": f"Error processing request: {str(e)}" | |
| }), 500 | |
| def health_check(): | |
| """Health check endpoint""" | |
| return jsonify({ | |
| "status": "healthy", | |
| "handler_initialized": handler is not None | |
| }) | |
| def api_info(): | |
| """API information endpoint""" | |
| return jsonify({ | |
| "name": "CoEdIT Grammar Corrector API", | |
| "version": "1.0.0", | |
| "description": "API for grammar correction and text enhancement using Grammarly CoEdIT model", | |
| "endpoints": { | |
| "/predict": "POST - Main inference endpoint", | |
| "/health": "GET - Health check", | |
| "/info": "GET - API information" | |
| }, | |
| "input_format": { | |
| "inputs": "List of strings or single string to process", | |
| "parameters": { | |
| "num_return_sequences": "Number of variations to generate (default: 1)", | |
| "temperature": "Sampling temperature (default: 1.0)" | |
| } | |
| }, | |
| "example_request": { | |
| "inputs": ["Fix the grammar: When I grow up, I start to understand what he said is quite right."], | |
| "parameters": { | |
| "num_return_sequences": 1, | |
| "temperature": 1.0 | |
| } | |
| } | |
| }) | |
| # Create the interface | |
| if __name__ == "__main__": | |
| demo = create_interface() | |
| # Launch both Gradio and Flask | |
| # Gradio will run on port 7860, Flask on port 7861 | |
| import threading | |
| def run_flask(): | |
| app.run(host="0.0.0.0", port=7861, debug=False) | |
| # Start Flask in a separate thread | |
| flask_thread = threading.Thread(target=run_flask) | |
| flask_thread.daemon = True | |
| flask_thread.start() | |
| # Launch Gradio | |
| demo.launch(server_name="0.0.0.0", server_port=7860) | |