Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -5,32 +5,29 @@ import torch
|
|
| 5 |
# 加载模型和分词器
|
| 6 |
model_name = "defog/sqlcoder-7b-2" # 使用更新的模型以提高性能
|
| 7 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 8 |
-
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto") #
|
| 9 |
|
| 10 |
-
def generate_sql(user_question, create_table_statements):
|
| 11 |
-
|
| 12 |
-
prompt = f"Generate a SQL query to answer this question: `{user_question}`\nDDL statements:\n{create_table_statements}\nThe following SQL query best answers the question `{user_question}`:"
|
| 13 |
|
| 14 |
-
# 编码输入
|
| 15 |
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
|
| 16 |
-
|
| 17 |
-
# 生成输出
|
| 18 |
-
with torch.no_grad():
|
| 19 |
-
outputs = model.generate(**inputs, max_length=150)
|
| 20 |
-
|
| 21 |
-
# 解码输出
|
| 22 |
sql_query = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
|
|
|
| 23 |
return sql_query
|
| 24 |
|
| 25 |
-
#
|
| 26 |
with gr.Blocks() as demo:
|
| 27 |
gr.Markdown("## SQL Query Generator")
|
| 28 |
-
user_question = gr.Textbox(label="User Question", placeholder="请输入您的问题...")
|
| 29 |
-
create_table_statements = gr.Textbox(label="DDL Statements", placeholder="请输入表的DDL语句...")
|
| 30 |
-
sql_output = gr.Textbox(label="Generated SQL Query", interactive=False)
|
| 31 |
|
| 32 |
-
|
| 33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
|
| 35 |
|
| 36 |
if __name__ == "__main__":
|
|
|
|
| 5 |
# 加载模型和分词器
|
| 6 |
model_name = "defog/sqlcoder-7b-2" # 使用更新的模型以提高性能
|
| 7 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 8 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto") # 降低内存占用
|
| 9 |
|
| 10 |
+
def generate_sql(user_question, create_table_statements, instructions=""):
|
| 11 |
+
prompt = f"Generate a SQL query to answer this question: `{user_question}`\n{instructions}\n\nDDL statements:\n{create_table_statements}\n<|eot_id|>"
|
|
|
|
| 12 |
|
|
|
|
| 13 |
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
|
| 14 |
+
outputs = model.generate(**inputs, max_length=150)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
sql_query = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
| 16 |
+
|
| 17 |
return sql_query
|
| 18 |
|
| 19 |
+
# Gradio 接口
|
| 20 |
with gr.Blocks() as demo:
|
| 21 |
gr.Markdown("## SQL Query Generator")
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
+
user_question = gr.Textbox(label="User Question", placeholder="请输入您的问题...", value="从纽约的客户那里获得的总收入是多少?")
|
| 24 |
+
create_table_statements = gr.Textbox(label="Create Table Statements", placeholder="请输入DDL语句...", value="CREATE TABLE customers (id INT, city VARCHAR(50), revenue DECIMAL);")
|
| 25 |
+
instructions = gr.Textbox(label="Instructions (可选)", placeholder="请输入额外说明...", value="")
|
| 26 |
+
|
| 27 |
+
submit_btn = gr.Button("生成 SQL 查询")
|
| 28 |
+
output = gr.Textbox(label="生成的 SQL 查询")
|
| 29 |
+
|
| 30 |
+
submit_btn.click(generate_sql, inputs=[user_question, create_table_statements, instructions], outputs=output)
|
| 31 |
|
| 32 |
|
| 33 |
if __name__ == "__main__":
|