garyd1 commited on
Commit
6a2930d
·
verified ·
1 Parent(s): 3d3ad6c

Beta version 5

Browse files
Files changed (1) hide show
  1. app.py +23 -13
app.py CHANGED
@@ -13,10 +13,13 @@ from langchain.schema import HumanMessage, SystemMessage, AIMessage
13
  st.set_page_config(page_title = "Magical Healer")
14
  st.header("Welcome, How can I help you?")
15
 
16
- #General Instruction
 
 
 
17
  if "sessionMessages" not in st.session_state:
18
  st.session_state["sessionMessages"]=[]
19
-
20
  if "sessionMessages" not in st.session_state:
21
  st.session_state.sessionMessage=[
22
  SystemMessage(content="You are a medievel magical healer known for your peculiar sarcasm")
@@ -34,20 +37,27 @@ llm = ChatGoogleGenerativeAI(
34
  )
35
 
36
  #User message
37
- def get_text():
38
- input_text=st.text_input("You: ", key=input)
39
- return input_text
40
-
41
- #Working with Responses
42
  def load_answer(question):
43
- #This is code, where we are adding new message to the model
44
  st.session_state.sessionMessages.append(HumanMessage(content=question))
45
- #We will get output from the model
46
- assistant_answer=llm.invoke(st.session_state.sessionMessages)
47
- #Appending the assistance answer in conversation
48
- st.session_state.sessionMessages.append(AIMessage(content = assistant_answer))
 
 
 
 
 
 
 
49
 
50
- return assistant_answer.content
 
 
 
 
 
 
51
 
52
  def get_text():
53
  input_text=st.text_input("You: ", key=input)
 
13
  st.set_page_config(page_title = "Magical Healer")
14
  st.header("Welcome, How can I help you?")
15
 
16
+
17
+ class AIMessage(BaseModel):
18
+ content: str
19
+
20
  if "sessionMessages" not in st.session_state:
21
  st.session_state["sessionMessages"]=[]
22
+ #General Instruction
23
  if "sessionMessages" not in st.session_state:
24
  st.session_state.sessionMessage=[
25
  SystemMessage(content="You are a medievel magical healer known for your peculiar sarcasm")
 
37
  )
38
 
39
  #User message
 
 
 
 
 
40
  def load_answer(question):
 
41
  st.session_state.sessionMessages.append(HumanMessage(content=question))
42
+ assistant_response = llm.invoke(st.session_state.sessionMessages)
43
+
44
+ # Assuming assistant_response is an object with a 'content' attribute
45
+ if hasattr(assistant_response, 'content') and isinstance(assistant_response.content, str):
46
+ processed_content = assistant_response.content
47
+ st.session_state.sessionMessages.append(AIMessage(content=processed_content))
48
+ else:
49
+ st.error("Invalid response received from AI.")
50
+ processed_content = "Sorry, I couldn't process your request."
51
+
52
+ return processed_content
53
 
54
+ #Working with Responses
55
+ #def load_answer(question):
56
+
57
+ # st.session_state.sessionMessages.append(HumanMessage(content=question))
58
+ # assistant_answer=llm.invoke(st.session_state.sessionMessages)
59
+ # st.session_state.sessionMessages.append(AIMessage(content = assistant_answer))
60
+ # return assistant_answer.content
61
 
62
  def get_text():
63
  input_text=st.text_input("You: ", key=input)