Spaces:
Running
on
T4
Running
on
T4
Remove prints
#2
by
osanseviero
- opened
app.py
CHANGED
@@ -22,7 +22,6 @@ pipe = pipeline("image-to-text", model=model_id, model_kwargs={"quantization_con
|
|
22 |
def extract_response_pairs(text):
|
23 |
turns = re.split(r'(USER:|ASSISTANT:)', text)[1:]
|
24 |
turns = [turn.strip() for turn in turns if turn.strip()]
|
25 |
-
print(f"conv turns are {turns[1::2]}")
|
26 |
conv_list = []
|
27 |
for i in range(0, len(turns[1::2]), 2):
|
28 |
if i + 1 < len(turns[1::2]):
|
@@ -87,7 +86,6 @@ def bot(history_chat, text_input, image,
|
|
87 |
chat_state_list[-1][1] += character
|
88 |
time.sleep(0.05)
|
89 |
# yield history but with last response being streamed
|
90 |
-
print(chat_state_list)
|
91 |
yield chat_state_list
|
92 |
|
93 |
|
|
|
22 |
def extract_response_pairs(text):
|
23 |
turns = re.split(r'(USER:|ASSISTANT:)', text)[1:]
|
24 |
turns = [turn.strip() for turn in turns if turn.strip()]
|
|
|
25 |
conv_list = []
|
26 |
for i in range(0, len(turns[1::2]), 2):
|
27 |
if i + 1 < len(turns[1::2]):
|
|
|
86 |
chat_state_list[-1][1] += character
|
87 |
time.sleep(0.05)
|
88 |
# yield history but with last response being streamed
|
|
|
89 |
yield chat_state_list
|
90 |
|
91 |
|