Spaces:
Sleeping
Sleeping
Add history to prompt
Browse files- api_calls.py +2 -1
- app.py +1 -1
api_calls.py
CHANGED
@@ -20,13 +20,14 @@ def call_api(api_path, api_params):
|
|
20 |
)
|
21 |
return response.json()
|
22 |
|
23 |
-
def api_rag_qa_chain_demo(openai_model_name, query, year, company_name):
|
24 |
api_path = "qa/demo"
|
25 |
api_params = {
|
26 |
"openai_model_name": openai_model_name,
|
27 |
"query": query,
|
28 |
"year": year,
|
29 |
"company_name": company_name,
|
|
|
30 |
}
|
31 |
return call_api_stream(api_path, api_params)
|
32 |
|
|
|
20 |
)
|
21 |
return response.json()
|
22 |
|
23 |
+
def api_rag_qa_chain_demo(openai_model_name, query, year, company_name, history):
|
24 |
api_path = "qa/demo"
|
25 |
api_params = {
|
26 |
"openai_model_name": openai_model_name,
|
27 |
"query": query,
|
28 |
"year": year,
|
29 |
"company_name": company_name,
|
30 |
+
"prev_turn_of_conversation": history,
|
31 |
}
|
32 |
return call_api_stream(api_path, api_params)
|
33 |
|
app.py
CHANGED
@@ -43,7 +43,7 @@ def esgsumm_exe(openai_model_name, year, company_name, tone):
|
|
43 |
|
44 |
def esgqabot(history, openai_model_name, year, company_name):
|
45 |
query = history[-1][0]
|
46 |
-
response = api_rag_qa_chain_demo(openai_model_name, query, year, company_name)
|
47 |
history[-1][1] = ""
|
48 |
for chunk in response.iter_content(chunk_size=32):
|
49 |
if chunk:
|
|
|
43 |
|
44 |
def esgqabot(history, openai_model_name, year, company_name):
|
45 |
query = history[-1][0]
|
46 |
+
response = api_rag_qa_chain_demo(openai_model_name, query, year, company_name, history[:-1])
|
47 |
history[-1][1] = ""
|
48 |
for chunk in response.iter_content(chunk_size=32):
|
49 |
if chunk:
|