sayanAIAI commited on
Commit
05ced71
·
verified ·
1 Parent(s): 25c3da1

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +39 -37
main.py CHANGED
@@ -1,37 +1,39 @@
1
- from flask import Flask, render_template, request, jsonify
2
- from transformers import pipeline, AutoTokenizer
3
-
4
- app = Flask(__name__)
5
-
6
- model_name = "sshleifer/distilbart-cnn-12-6"
7
- tokenizer = AutoTokenizer.from_pretrained(model_name)
8
- summarizer = pipeline("summarization", model=model_name)
9
-
10
- @app.route("/")
11
- def index():
12
- return render_template("index.html")
13
-
14
- @app.route("/summarize", methods=["POST"])
15
- def summarize():
16
- try:
17
- data = request.get_json()
18
- text = data.get("text", "").strip()
19
- if not text:
20
- return jsonify({"error": "No text provided"}), 400
21
-
22
- input_tokens = tokenizer.encode(text, return_tensors="pt")
23
- input_len = input_tokens.shape[1]
24
-
25
- max_len = max(10, min(100, input_len // 2))
26
- min_len = max(5, max_len // 2)
27
-
28
- summary = summarizer(text, max_length=max_len, min_length=min_len, do_sample=False)[0]['summary_text']
29
- return jsonify({"summary": summary})
30
- except Exception as e:
31
- return jsonify({"error": str(e)}), 500
32
-
33
-
34
-
35
- if __name__ == "__main__":
36
- app.run(debug=True)
37
-
 
 
 
1
+ import os
2
+ os.environ['TRANSFORMERS_CACHE'] = '/tmp'
3
+ from flask import Flask, render_template, request, jsonify
4
+ from transformers import pipeline, AutoTokenizer
5
+
6
+ app = Flask(__name__)
7
+
8
+ model_name = "sshleifer/distilbart-cnn-12-6"
9
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
10
+ summarizer = pipeline("summarization", model=model_name)
11
+
12
+ @app.route("/")
13
+ def index():
14
+ return render_template("index.html")
15
+
16
+ @app.route("/summarize", methods=["POST"])
17
+ def summarize():
18
+ try:
19
+ data = request.get_json()
20
+ text = data.get("text", "").strip()
21
+ if not text:
22
+ return jsonify({"error": "No text provided"}), 400
23
+
24
+ input_tokens = tokenizer.encode(text, return_tensors="pt")
25
+ input_len = input_tokens.shape[1]
26
+
27
+ max_len = max(10, min(100, input_len // 2))
28
+ min_len = max(5, max_len // 2)
29
+
30
+ summary = summarizer(text, max_length=max_len, min_length=min_len, do_sample=False)[0]['summary_text']
31
+ return jsonify({"summary": summary})
32
+ except Exception as e:
33
+ return jsonify({"error": str(e)}), 500
34
+
35
+
36
+
37
+ if __name__ == "__main__":
38
+ app.run(debug=True)
39
+