Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, Query | |
| from transformers import pipeline | |
| app = FastAPI() | |
| # Load a different model, replace "bert-base-uncased" with your desired model | |
| pipe = pipeline(model="openai-community/gpt2") | |
| def read_root(): | |
| return {"message": "API is live. Use the /predict endpoint."} | |
| def predict(text: str = Query(..., description="Input text for model inference")): | |
| result = pipe(text) | |
| return {"predictions": result} | |