acloudfan commited on
Commit
a19c4ee
·
verified ·
1 Parent(s): 6dfc009

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -26
app.py CHANGED
@@ -16,7 +16,7 @@ from langchain_core.messages import HumanMessage, AIMessage
16
  # Without this you will need to copy/paste the API key with every change
17
  try:
18
  # CHANGE the location of the file
19
- load_dotenv('C:\\Users\\raj\\.jupyter\\.env')
20
  # Add the API key to the session - use it for populating the interface
21
  if os.getenv('OPENAI_API_KEY'):
22
  st.session_state['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY')
@@ -35,6 +35,12 @@ if 'OPENAI_API_KEY' in st.session_state:
35
  else:
36
  openai_api_key = st.sidebar.text_input('OpenAI API key',placeholder='copy & paste your OpenAI API key')
37
 
 
 
 
 
 
 
38
  ### 2. Define utility functions to invoke the LLM
39
 
40
  # Create an instance of the LLM for summarization
@@ -83,7 +89,7 @@ def get_llm_response(prompt):
83
  return response
84
 
85
  # Initialize the session state memory
86
- if 'MEMORY' not in st.session_state:
87
  memory = ConversationSummaryMemory(
88
  llm = get_summarization_llm(),
89
  human_prefix='user',
@@ -98,35 +104,37 @@ if 'MEMORY' not in st.session_state:
98
  # This is needed as streamlit re-runs the entire script when user provides input in a widget
99
  # https://docs.streamlit.io/develop/api-reference/chat/st.chat_message
100
 
101
- for msg in st.session_state['MEMORY'].chat_memory.messages:
102
 
103
- if (isinstance(msg, HumanMessage)):
104
- st.chat_message('user').write(msg.content)
105
- elif (isinstance(msg, AIMessage)):
106
- st.chat_message('ai').write(msg.content)
107
- else:
108
- print('System message: ', msg.content)
109
-
 
 
 
110
 
111
- ### 4. Create the *chat_input* element to get the user query
112
- # Interface for user input
113
- prompt = st.chat_input(placeholder='Your input here')
114
 
115
- ### 5. Process the query received from user
116
- if prompt and openai_api_key:
117
 
118
- # Write the user prompt as chat message
119
- st.chat_message('user').write(prompt)
120
 
121
- # Invoke the LLM
122
- response = get_llm_response(prompt)
123
 
124
- # Write the response as chat_message
125
- st.chat_message('ai').write(response['response'])
126
 
127
- ### 6. Write out the current content of the context
128
- st.divider()
129
- st.subheader('Context/Summary:')
130
 
131
- # Print the state of the buffer
132
- st.session_state['MEMORY'].buffer
 
16
  # Without this you will need to copy/paste the API key with every change
17
  try:
18
  # CHANGE the location of the file
19
+ load_dotenv('C:\\Users\\raj\\.jupyter\\.env1')
20
  # Add the API key to the session - use it for populating the interface
21
  if os.getenv('OPENAI_API_KEY'):
22
  st.session_state['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY')
 
35
  else:
36
  openai_api_key = st.sidebar.text_input('OpenAI API key',placeholder='copy & paste your OpenAI API key')
37
 
38
+ if len(openai_api_key) == 0 :
39
+ "provide valid OpenAI API Key !!"
40
+ st.stop()
41
+
42
+ # print(type(openai_api_key))
43
+ # st.stop()
44
  ### 2. Define utility functions to invoke the LLM
45
 
46
  # Create an instance of the LLM for summarization
 
89
  return response
90
 
91
  # Initialize the session state memory
92
+ if 'MEMORY' not in st.session_state :
93
  memory = ConversationSummaryMemory(
94
  llm = get_summarization_llm(),
95
  human_prefix='user',
 
104
  # This is needed as streamlit re-runs the entire script when user provides input in a widget
105
  # https://docs.streamlit.io/develop/api-reference/chat/st.chat_message
106
 
 
107
 
108
+ if openai_api_key is not None:
109
+ for msg in st.session_state['MEMORY'].chat_memory.messages:
110
+
111
+ if (isinstance(msg, HumanMessage)):
112
+ st.chat_message('user').write(msg.content)
113
+ elif (isinstance(msg, AIMessage)):
114
+ st.chat_message('ai').write(msg.content)
115
+ else:
116
+ print('System message: ', msg.content)
117
+
118
 
119
+ ### 4. Create the *chat_input* element to get the user query
120
+ # Interface for user input
121
+ prompt = st.chat_input(placeholder='Your input here')
122
 
123
+ ### 5. Process the query received from user
124
+ if prompt and openai_api_key:
125
 
126
+ # Write the user prompt as chat message
127
+ st.chat_message('user').write(prompt)
128
 
129
+ # Invoke the LLM
130
+ response = get_llm_response(prompt)
131
 
132
+ # Write the response as chat_message
133
+ st.chat_message('ai').write(response['response'])
134
 
135
+ ### 6. Write out the current content of the context
136
+ st.divider()
137
+ st.subheader('Context/Summary:')
138
 
139
+ # Print the state of the buffer
140
+ st.session_state['MEMORY'].buffer