Spaces:
Sleeping
Sleeping
Update appStore/rag.py
Browse files- appStore/rag.py +3 -3
appStore/rag.py
CHANGED
@@ -210,15 +210,15 @@ def run_query(context, label):
|
|
210 |
)
|
211 |
|
212 |
# iterate and print stream
|
213 |
-
for message in chat_completion:
|
214 |
-
|
215 |
|
216 |
# instantiate ChatCompletion as a generator object (stream is set to True)
|
217 |
# response = completion_with_backoff(model=model_select, messages=[{"role": "user", "content": get_prompt(context, label)}], stream=True)
|
218 |
# iterate through the streamed output
|
219 |
report = []
|
220 |
res_box = st.empty()
|
221 |
-
for chunk in
|
222 |
# extract the object containing the text (totally different structure when streaming)
|
223 |
chunk_message = chunk['choices'][0]['delta']
|
224 |
# test to make sure there is text in the object (some don't have)
|
|
|
210 |
)
|
211 |
|
212 |
# iterate and print stream
|
213 |
+
# for message in chat_completion:
|
214 |
+
# print(message.choices[0].delta.content, end="")
|
215 |
|
216 |
# instantiate ChatCompletion as a generator object (stream is set to True)
|
217 |
# response = completion_with_backoff(model=model_select, messages=[{"role": "user", "content": get_prompt(context, label)}], stream=True)
|
218 |
# iterate through the streamed output
|
219 |
report = []
|
220 |
res_box = st.empty()
|
221 |
+
for chunk in client.chat_completion(messages, max_tokens=10, stream=True)::
|
222 |
# extract the object containing the text (totally different structure when streaming)
|
223 |
chunk_message = chunk['choices'][0]['delta']
|
224 |
# test to make sure there is text in the object (some don't have)
|