Spaces:
Sleeping
Sleeping
AlshimaaGamalAlsaied
commited on
Commit
•
d9cdee7
1
Parent(s):
b85ecdd
SEE with yolov7
Browse files
app.py
CHANGED
@@ -94,11 +94,10 @@ image_interface = gr.Interface(
|
|
94 |
gr.inputs.Image(type="pil", label="Input Image"),
|
95 |
gr.inputs.Dropdown(
|
96 |
choices=[
|
97 |
-
"alshimaa/
|
98 |
-
"alshimaa/model_yolo7",
|
99 |
#"kadirnar/yolov7-v0.1",
|
100 |
],
|
101 |
-
default="alshimaa/
|
102 |
label="Model",
|
103 |
)
|
104 |
#gr.inputs.Slider(minimum=320, maximum=1280, default=640, step=32, label="Image Size")
|
@@ -107,7 +106,7 @@ image_interface = gr.Interface(
|
|
107 |
],
|
108 |
outputs=gr.outputs.Image(type="filepath", label="Output Image"),
|
109 |
title="Smart Environmental Eye (SEE)",
|
110 |
-
examples=[['image1.jpg', 'alshimaa/
|
111 |
cache_examples=True,
|
112 |
theme='huggingface',
|
113 |
)
|
@@ -119,11 +118,10 @@ video_interface = gr.Interface(
|
|
119 |
gr.inputs.Video(source = "upload", type = "mp4", label = "Input Video"),
|
120 |
gr.inputs.Dropdown(
|
121 |
choices=[
|
122 |
-
"alshimaa/
|
123 |
-
"alshimaa/model_yolo7",
|
124 |
#"kadirnar/yolov7-v0.1",
|
125 |
],
|
126 |
-
default="alshimaa/
|
127 |
label="Model",
|
128 |
),
|
129 |
],
|
@@ -143,108 +141,3 @@ if __name__ == "__main__":
|
|
143 |
[image_interface, video_interface],
|
144 |
["Run on Images", "Run on Videos"],
|
145 |
).launch()
|
146 |
-
|
147 |
-
# import subprocess
|
148 |
-
# import tempfile
|
149 |
-
# import time
|
150 |
-
# from pathlib import Path
|
151 |
-
|
152 |
-
# import cv2
|
153 |
-
# import gradio as gr
|
154 |
-
|
155 |
-
# from inferer import Inferer
|
156 |
-
|
157 |
-
# pipeline = Inferer("alshimaa/model_yolo7", device='cuda')
|
158 |
-
|
159 |
-
|
160 |
-
# def fn_image(image, conf_thres, iou_thres):
|
161 |
-
# return pipeline(image, conf_thres, iou_thres)
|
162 |
-
|
163 |
-
|
164 |
-
# def fn_video(video_file, conf_thres, iou_thres, start_sec, duration):
|
165 |
-
# start_timestamp = time.strftime("%H:%M:%S", time.gmtime(start_sec))
|
166 |
-
# end_timestamp = time.strftime("%H:%M:%S", time.gmtime(start_sec + duration))
|
167 |
-
|
168 |
-
# suffix = Path(video_file).suffix
|
169 |
-
|
170 |
-
# clip_temp_file = tempfile.NamedTemporaryFile(suffix=suffix)
|
171 |
-
# subprocess.call(
|
172 |
-
# f"ffmpeg -y -ss {start_timestamp} -i {video_file} -to {end_timestamp} -c copy {clip_temp_file.name}".split()
|
173 |
-
# )
|
174 |
-
|
175 |
-
# # Reader of clip file
|
176 |
-
# cap = cv2.VideoCapture(clip_temp_file.name)
|
177 |
-
|
178 |
-
# # This is an intermediary temp file where we'll write the video to
|
179 |
-
# # Unfortunately, gradio doesn't play too nice with videos rn so we have to do some hackiness
|
180 |
-
# # with ffmpeg at the end of the function here.
|
181 |
-
# with tempfile.NamedTemporaryFile(suffix=".mp4") as temp_file:
|
182 |
-
# out = cv2.VideoWriter(temp_file.name, cv2.VideoWriter_fourcc(*"MP4V"), 30, (1280, 720))
|
183 |
-
|
184 |
-
# num_frames = 0
|
185 |
-
# max_frames = duration * 30
|
186 |
-
# while cap.isOpened():
|
187 |
-
# try:
|
188 |
-
# ret, frame = cap.read()
|
189 |
-
# if not ret:
|
190 |
-
# break
|
191 |
-
# except Exception as e:
|
192 |
-
# print(e)
|
193 |
-
# continue
|
194 |
-
# print("FRAME DTYPE", type(frame))
|
195 |
-
# out.write(pipeline(frame, conf_thres, iou_thres))
|
196 |
-
# num_frames += 1
|
197 |
-
# print("Processed {} frames".format(num_frames))
|
198 |
-
# if num_frames == max_frames:
|
199 |
-
# break
|
200 |
-
|
201 |
-
# out.release()
|
202 |
-
|
203 |
-
# # Aforementioned hackiness
|
204 |
-
# out_file = tempfile.NamedTemporaryFile(suffix="out.mp4", delete=False)
|
205 |
-
# subprocess.run(f"ffmpeg -y -loglevel quiet -stats -i {temp_file.name} -c:v libx264 {out_file.name}".split())
|
206 |
-
|
207 |
-
# return out_file.name
|
208 |
-
|
209 |
-
|
210 |
-
# image_interface = gr.Interface(
|
211 |
-
# fn=fn_image,
|
212 |
-
# inputs=[
|
213 |
-
# "image",
|
214 |
-
# gr.Slider(0, 1, value=0.5, label="Confidence Threshold"),
|
215 |
-
# gr.Slider(0, 1, value=0.5, label="IOU Threshold"),
|
216 |
-
# ],
|
217 |
-
# outputs=gr.Image(type="file"),
|
218 |
-
# examples=[["image1.jpg", 0.5, 0.5], ["image2.jpg", 0.25, 0.45], ["image3.jpg", 0.25, 0.45]],
|
219 |
-
# title="Smart Environmental Eye (SEE)",
|
220 |
-
# allow_flagging=False,
|
221 |
-
# allow_screenshot=False,
|
222 |
-
# )
|
223 |
-
|
224 |
-
# video_interface = gr.Interface(
|
225 |
-
# fn=fn_video,
|
226 |
-
# inputs=[
|
227 |
-
# gr.Video(type="file"),
|
228 |
-
# gr.Slider(0, 1, value=0.25, label="Confidence Threshold"),
|
229 |
-
# gr.Slider(0, 1, value=0.45, label="IOU Threshold"),
|
230 |
-
# gr.Slider(0, 10, value=0, label="Start Second", step=1),
|
231 |
-
# gr.Slider(0, 10 if pipeline.device.type != 'cpu' else 3, value=4, label="Duration", step=1),
|
232 |
-
# ],
|
233 |
-
# outputs=gr.Video(type="file", format="mp4"),
|
234 |
-
# # examples=[
|
235 |
-
# # ["video.mp4", 0.25, 0.45, 0, 2],
|
236 |
-
|
237 |
-
# # ],
|
238 |
-
# title="Smart Environmental Eye (SEE)",
|
239 |
-
# allow_flagging=False,
|
240 |
-
# allow_screenshot=False,
|
241 |
-
# )
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
# if __name__ == "__main__":
|
246 |
-
# gr.TabbedInterface(
|
247 |
-
# [image_interface, video_interface],
|
248 |
-
# ["Run on Images", "Run on Videos"],
|
249 |
-
# ).launch()
|
250 |
-
|
|
|
94 |
gr.inputs.Image(type="pil", label="Input Image"),
|
95 |
gr.inputs.Dropdown(
|
96 |
choices=[
|
97 |
+
"alshimaa/yolov7",
|
|
|
98 |
#"kadirnar/yolov7-v0.1",
|
99 |
],
|
100 |
+
default="alshimaa/yolov7",
|
101 |
label="Model",
|
102 |
)
|
103 |
#gr.inputs.Slider(minimum=320, maximum=1280, default=640, step=32, label="Image Size")
|
|
|
106 |
],
|
107 |
outputs=gr.outputs.Image(type="filepath", label="Output Image"),
|
108 |
title="Smart Environmental Eye (SEE)",
|
109 |
+
examples=[['image1.jpg', 'alshimaa/yolov7', 640, 0.25, 0.45], ['image2.jpg', 'alshimaa/yolov7', 640, 0.25, 0.45], ['image3.jpg', 'alshimaa/yolov7', 640, 0.25, 0.45]],
|
110 |
cache_examples=True,
|
111 |
theme='huggingface',
|
112 |
)
|
|
|
118 |
gr.inputs.Video(source = "upload", type = "mp4", label = "Input Video"),
|
119 |
gr.inputs.Dropdown(
|
120 |
choices=[
|
121 |
+
"alshimaa/yolov7",
|
|
|
122 |
#"kadirnar/yolov7-v0.1",
|
123 |
],
|
124 |
+
default="alshimaa/yolov7",
|
125 |
label="Model",
|
126 |
),
|
127 |
],
|
|
|
141 |
[image_interface, video_interface],
|
142 |
["Run on Images", "Run on Videos"],
|
143 |
).launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
yolov7
CHANGED
@@ -1 +1 @@
|
|
1 |
-
Subproject commit
|
|
|
1 |
+
Subproject commit 139f2ba03d7ed9965695775507ecb8fb7ca2b326
|