Spaces:
Running
Running
chore: format comment
Browse files
app.py
CHANGED
@@ -5,7 +5,7 @@ import spaces
|
|
5 |
import subprocess
|
6 |
|
7 |
# from issue: https://discuss.huggingface.co/t/how-to-install-flash-attention-on-hf-gradio-space/70698/2
|
8 |
-
# InternVL2
|
9 |
subprocess.run(
|
10 |
"pip install flash-attn --no-build-isolation",
|
11 |
env={"FLASH_ATTENTION_SKIP_CUDA_BUILD": "TRUE"},
|
@@ -42,9 +42,7 @@ def predict(input_img, questions):
|
|
42 |
predictions = inference(question=questions, image=input_img)
|
43 |
return str(predictions)
|
44 |
except Exception as e:
|
45 |
-
# 捕获异常,并将错误信息转换为字符串
|
46 |
error_message = "❌" + str(e)
|
47 |
-
# 抛出gradio.Error来展示错误弹窗
|
48 |
raise gr.Error(error_message, duration=25)
|
49 |
|
50 |
|
@@ -55,7 +53,7 @@ gradio_app = gr.Interface(
|
|
55 |
"text",
|
56 |
],
|
57 |
outputs="text",
|
58 |
-
title=
|
59 |
)
|
60 |
|
61 |
if __name__ == "__main__":
|
|
|
5 |
import subprocess
|
6 |
|
7 |
# from issue: https://discuss.huggingface.co/t/how-to-install-flash-attention-on-hf-gradio-space/70698/2
|
8 |
+
# InternVL2 needs flash_attn
|
9 |
subprocess.run(
|
10 |
"pip install flash-attn --no-build-isolation",
|
11 |
env={"FLASH_ATTENTION_SKIP_CUDA_BUILD": "TRUE"},
|
|
|
42 |
predictions = inference(question=questions, image=input_img)
|
43 |
return str(predictions)
|
44 |
except Exception as e:
|
|
|
45 |
error_message = "❌" + str(e)
|
|
|
46 |
raise gr.Error(error_message, duration=25)
|
47 |
|
48 |
|
|
|
53 |
"text",
|
54 |
],
|
55 |
outputs="text",
|
56 |
+
title='ask me anything',
|
57 |
)
|
58 |
|
59 |
if __name__ == "__main__":
|