Update app.py
Browse files
app.py
CHANGED
|
@@ -1,16 +1,13 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
import uvicorn
|
| 5 |
import json
|
| 6 |
import requests
|
| 7 |
from flask import Flask, request, jsonify
|
| 8 |
-
|
| 9 |
|
| 10 |
|
| 11 |
app = Flask(__name__)
|
| 12 |
|
| 13 |
-
rq = requests.
|
| 14 |
|
| 15 |
model_names = [
|
| 16 |
"meta-llama/Meta-Llama-3-70B-Instruct",
|
|
@@ -32,7 +29,7 @@ model_names = [
|
|
| 32 |
|
| 33 |
|
| 34 |
|
| 35 |
-
def
|
| 36 |
|
| 37 |
url = "https://api.deepinfra.com/v1/openai/chat/completions"
|
| 38 |
headers ={
|
|
@@ -62,6 +59,45 @@ def DeepinFra(Api:str, messages:list ,model:str, max_tokens: int = 512, temperat
|
|
| 62 |
|
| 63 |
return "Response content: " + result.text
|
| 64 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 65 |
|
| 66 |
|
| 67 |
|
|
@@ -70,13 +106,33 @@ def generate_text():
|
|
| 70 |
data = request.json
|
| 71 |
message = data.get("message")
|
| 72 |
Api = data.get("api_key")
|
| 73 |
-
model_name = data.get("model_name", "
|
| 74 |
max_tokens = data.get("max_tokens", 512)
|
| 75 |
temperature = data.get("temperature", 0.7)
|
|
|
|
| 76 |
|
| 77 |
if not message or not Api:
|
| 78 |
return jsonify({"error": "Missing required fields"}), 400
|
| 79 |
|
| 80 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
|
| 82 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import uvicorn
|
| 2 |
import json
|
| 3 |
import requests
|
| 4 |
from flask import Flask, request, jsonify
|
| 5 |
+
from flask import Response, stream_with_context
|
| 6 |
|
| 7 |
|
| 8 |
app = Flask(__name__)
|
| 9 |
|
| 10 |
+
rq = requests.Session()
|
| 11 |
|
| 12 |
model_names = [
|
| 13 |
"meta-llama/Meta-Llama-3-70B-Instruct",
|
|
|
|
| 29 |
|
| 30 |
|
| 31 |
|
| 32 |
+
def DeepinFra_No_stream(Api:str, messages:list ,model:str = "meta-llama/Meta-Llama-3-70B-Instruct", max_tokens: int = 512, temperature: float = 0.7):
|
| 33 |
|
| 34 |
url = "https://api.deepinfra.com/v1/openai/chat/completions"
|
| 35 |
headers ={
|
|
|
|
| 59 |
|
| 60 |
return "Response content: " + result.text
|
| 61 |
|
| 62 |
+
def DeepinFra_stream(Api:str, messages:list ,model: str = "meta-llama/Meta-Llama-3-70B-Instruct", max_tokens: int = 512, temperature: float = 0.7):
|
| 63 |
+
|
| 64 |
+
url = "https://api.deepinfra.com/v1/openai/chat/completions"
|
| 65 |
+
headers ={
|
| 66 |
+
"Authorization" : f"Bearer {Api}",
|
| 67 |
+
'Content-Type': 'application/json',
|
| 68 |
+
'Accept': 'text/event-stream',
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
data = json.dumps(
|
| 75 |
+
{
|
| 76 |
+
'model': model,
|
| 77 |
+
'messages': messages,
|
| 78 |
+
'temperature': temperature,
|
| 79 |
+
'max_tokens': max_tokens,
|
| 80 |
+
'stream': True
|
| 81 |
+
}, separators=(',', ':')
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
try:
|
| 85 |
+
result = rq.post(url=url, headers=headers, data=data, stream=True)
|
| 86 |
+
|
| 87 |
+
for line in result.iter_lines():
|
| 88 |
+
if line:
|
| 89 |
+
line = line.decode('utf-8')
|
| 90 |
+
data_json = line.split('data: ')[1]
|
| 91 |
+
data = json.loads(data_json)
|
| 92 |
+
try:
|
| 93 |
+
content = data['choices'][0]['delta']['content']
|
| 94 |
+
yield content
|
| 95 |
+
except:
|
| 96 |
+
break
|
| 97 |
+
except:
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
return "Response content: " + result.text
|
| 101 |
|
| 102 |
|
| 103 |
|
|
|
|
| 106 |
data = request.json
|
| 107 |
message = data.get("message")
|
| 108 |
Api = data.get("api_key")
|
| 109 |
+
model_name = data.get("model_name", "meta-llama/Meta-Llama-3-70B-Instruct")
|
| 110 |
max_tokens = data.get("max_tokens", 512)
|
| 111 |
temperature = data.get("temperature", 0.7)
|
| 112 |
+
stream = data.get("stream", True)
|
| 113 |
|
| 114 |
if not message or not Api:
|
| 115 |
return jsonify({"error": "Missing required fields"}), 400
|
| 116 |
|
| 117 |
+
def generate_response(stream: bool):
|
| 118 |
+
if stream:
|
| 119 |
+
for response in DeepinFra_stream(Api=Api, messages=message, model=model_name, max_tokens=max_tokens,
|
| 120 |
+
temperature=temperature):
|
| 121 |
+
yield json.dumps({"response": response}) + "\n"
|
| 122 |
+
else:
|
| 123 |
+
response = DeepinFra_No_stream(Api=Api, messages=message, model=model_name, max_tokens=max_tokens,
|
| 124 |
+
temperature=temperature)
|
| 125 |
+
yield json.dumps({"response": response}) + "\n"
|
| 126 |
+
|
| 127 |
+
return Response(stream_with_context(generate_response(stream)), content_type='application/json'), 200
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
@app.route("/info", methods=["GET"])
|
| 132 |
+
def get_info():
|
| 133 |
+
return jsonify({"model_names": model_names}), 200
|
| 134 |
+
|
| 135 |
+
|
| 136 |
|
| 137 |
+
if __name__=="__main__":
|
| 138 |
+
app.run()
|