Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
430d29a
1
Parent(s):
17a1863
Remove redundant pixel values logging and adjust message handling in predict function
Browse files
app.py
CHANGED
@@ -217,18 +217,20 @@ def predict(state,
|
|
217 |
pixel_values = None
|
218 |
if image_path is not None:
|
219 |
pixel_values = load_image(image_path, max_num=max_input_tiles).to(torch.bfloat16).cuda()
|
|
|
220 |
if pixel_values is not None:
|
|
|
|
|
221 |
# Check the first user message to see if it is an image
|
222 |
index, first_user_message = state.get_user_message(source=state.USER, position='first')
|
223 |
if first_user_message is not None and \
|
224 |
DEFAULT_IMAGE_TOKEN not in first_user_message:
|
225 |
state.update_message(state.USER, DEFAULT_IMAGE_TOKEN + "\n" + first_user_message, None, index)
|
226 |
-
|
227 |
history = state.get_history()
|
228 |
logger.info(f"==== History ====\n{history}")
|
229 |
_, message = state.get_user_message(source=state.USER, position='last')
|
230 |
|
231 |
-
logger.info(f"==== Lenght Pixel values ====\n{len(pixel_values)}")
|
232 |
|
233 |
response, conv_history = model.chat(tokenizer,
|
234 |
pixel_values,
|
|
|
217 |
pixel_values = None
|
218 |
if image_path is not None:
|
219 |
pixel_values = load_image(image_path, max_num=max_input_tiles).to(torch.bfloat16).cuda()
|
220 |
+
|
221 |
if pixel_values is not None:
|
222 |
+
logger.info(f"==== Lenght Pixel values ====\n{len(pixel_values)}")
|
223 |
+
|
224 |
# Check the first user message to see if it is an image
|
225 |
index, first_user_message = state.get_user_message(source=state.USER, position='first')
|
226 |
if first_user_message is not None and \
|
227 |
DEFAULT_IMAGE_TOKEN not in first_user_message:
|
228 |
state.update_message(state.USER, DEFAULT_IMAGE_TOKEN + "\n" + first_user_message, None, index)
|
229 |
+
|
230 |
history = state.get_history()
|
231 |
logger.info(f"==== History ====\n{history}")
|
232 |
_, message = state.get_user_message(source=state.USER, position='last')
|
233 |
|
|
|
234 |
|
235 |
response, conv_history = model.chat(tokenizer,
|
236 |
pixel_values,
|