Spaces:
Sleeping
Sleeping
update stream
Browse files- chat_history.db +2 -2
- controllers/gra_02_openInterpreter/OpenInterpreter.py +4 -4
- workspace/notes.txt +1 -0
- workspace/sample.py +0 -0
chat_history.db
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:013830f73a6fc4b4a17d1e5780eee991d3ab4b26fcc2f34eeb39a5a70f6351aa
|
3 |
+
size 1806336
|
controllers/gra_02_openInterpreter/OpenInterpreter.py
CHANGED
@@ -128,15 +128,15 @@ def chat_with_interpreter(message, history=None, a=None, b=None, c=None, d=None)
|
|
128 |
full_response = ""
|
129 |
recent_messages = get_recent_messages()
|
130 |
|
131 |
-
for role, message_type, content in recent_messages:
|
132 |
-
|
133 |
-
|
134 |
|
135 |
user_entry = {"role": "user", "type": "message", "content": message}
|
136 |
interpreter.messages.append(user_entry)
|
137 |
add_message_to_db("user", "message", message)
|
138 |
|
139 |
-
for chunk in interpreter.chat(message, display=False, stream=
|
140 |
if isinstance(chunk, dict):
|
141 |
full_response = format_response(chunk, full_response)
|
142 |
else:
|
|
|
128 |
full_response = ""
|
129 |
recent_messages = get_recent_messages()
|
130 |
|
131 |
+
#for role, message_type, content in recent_messages:
|
132 |
+
# entry = {"role": role, "type": message_type, "content": content}
|
133 |
+
# interpreter.messages.append(entry)
|
134 |
|
135 |
user_entry = {"role": "user", "type": "message", "content": message}
|
136 |
interpreter.messages.append(user_entry)
|
137 |
add_message_to_db("user", "message", message)
|
138 |
|
139 |
+
for chunk in interpreter.chat(message, display=False, stream=False):
|
140 |
if isinstance(chunk, dict):
|
141 |
full_response = format_response(chunk, full_response)
|
142 |
else:
|
workspace/notes.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Hello, World!
|
workspace/sample.py
ADDED
File without changes
|