Vageesh1 commited on
Commit
383f65e
·
1 Parent(s): c5ef059

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -9
app.py CHANGED
@@ -15,11 +15,8 @@ import sys
15
  import pandas as pd
16
 
17
  def conversational_chat(query):
18
- result = chain({"question": query,
19
- "chat_history": st.session_state['history']})
20
- st.session_state['history'].append((query, result["answer"]))
21
-
22
- return result["answer"]
23
 
24
 
25
  user_api_key = st.sidebar.text_input(
@@ -32,10 +29,16 @@ if user_api_key is not None and user_api_key.strip() != "":
32
 
33
  #setting up the LLM
34
  repo_id = "tiiuae/falcon-7b-instruct"
35
- chain = ConversationalRetrievalChain.from_llm(
36
- llm = HuggingFaceHub(huggingfacehub_api_token=user_api_key,
37
- repo_id=repo_id,
38
- model_kwargs={"temperature":0.6, "max_new_tokens":2000}))
 
 
 
 
 
 
39
 
40
 
41
  if 'history' not in st.session_state:
 
15
  import pandas as pd
16
 
17
  def conversational_chat(query):
18
+ output = llm_chain.predict(human_input=query)
19
+ return output
 
 
 
20
 
21
 
22
  user_api_key = st.sidebar.text_input(
 
29
 
30
  #setting up the LLM
31
  repo_id = "tiiuae/falcon-7b-instruct"
32
+ prompt = PromptTemplate(
33
+ input_variables=["history", "human_input"],
34
+ template=template
35
+ )
36
+ llm_chain = LLMChain(
37
+ llm=HuggingFaceHub(huggingfacehub_api_token=huggingfacehub_api_token, repo_id="tiiuae/falcon-7b-instruct", model_kwargs={"temperature": 0.2}),
38
+ prompt=prompt,
39
+ verbose=True,
40
+ memory=ConversationBufferWindowMemory(k=2)
41
+ )
42
 
43
 
44
  if 'history' not in st.session_state: