Update app.py
Browse files
app.py
CHANGED
|
@@ -21,10 +21,15 @@ except Exception as e:
|
|
| 21 |
st.stop()
|
| 22 |
|
| 23 |
# 3. 问答函数
|
| 24 |
-
def answer_question(
|
| 25 |
# 4. 初始化 Gemma 模型
|
| 26 |
try:
|
| 27 |
-
llm = HuggingFaceEndpoint(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
except Exception as e:
|
| 29 |
st.error(f"Gemma 模型加载失败:{e}")
|
| 30 |
st.stop()
|
|
@@ -51,7 +56,7 @@ def answer_question(gemma, temp, max, question):
|
|
| 51 |
# 6. Streamlit 界面
|
| 52 |
st.title("Gemma 知识库问答系统")
|
| 53 |
|
| 54 |
-
gemma = st.selectbox("repo-id", ("google/gemma-
|
| 55 |
temperature = st.number_input("temperature", value=1.0)
|
| 56 |
max_length = st.number_input("max_length", value=1024)
|
| 57 |
question = st.text_area("请输入问题", "Gemma 有哪些特点?")
|
|
|
|
| 21 |
st.stop()
|
| 22 |
|
| 23 |
# 3. 问答函数
|
| 24 |
+
def answer_question(repo_id, temperature, max_length, question):
|
| 25 |
# 4. 初始化 Gemma 模型
|
| 26 |
try:
|
| 27 |
+
llm = HuggingFaceEndpoint(
|
| 28 |
+
repo_id=repo_id,
|
| 29 |
+
temperature=temperature,
|
| 30 |
+
max_length=max_length,
|
| 31 |
+
huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN
|
| 32 |
+
)
|
| 33 |
except Exception as e:
|
| 34 |
st.error(f"Gemma 模型加载失败:{e}")
|
| 35 |
st.stop()
|
|
|
|
| 56 |
# 6. Streamlit 界面
|
| 57 |
st.title("Gemma 知识库问答系统")
|
| 58 |
|
| 59 |
+
gemma = st.selectbox("repo-id", ("google/gemma-7b-it", "google/gemma-2b-it", "google/recurrentgemma-2b-it"), 2)
|
| 60 |
temperature = st.number_input("temperature", value=1.0)
|
| 61 |
max_length = st.number_input("max_length", value=1024)
|
| 62 |
question = st.text_area("请输入问题", "Gemma 有哪些特点?")
|