Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -84,6 +84,9 @@ def detect(img,model):
|
|
84 |
#stride = int(model.stride.max()) # model stride
|
85 |
#imgsz = check_img_size(imgsz, s=stride) # check img_size
|
86 |
print(weights)
|
|
|
|
|
|
|
87 |
if weights == 'yolopv2.pt':
|
88 |
stride =32
|
89 |
model = torch.jit.load(weights,map_location=device)
|
@@ -191,4 +194,4 @@ def detect(img,model):
|
|
191 |
return Image.fromarray(im0[:,:,::-1])
|
192 |
|
193 |
|
194 |
-
gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolopv2","yolop"])], gr.Image(type="pil"),title="Yolopv2",examples=[["example.jpeg", "yolopv2"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>YOLOPv2</a>
|
|
|
84 |
#stride = int(model.stride.max()) # model stride
|
85 |
#imgsz = check_img_size(imgsz, s=stride) # check img_size
|
86 |
print(weights)
|
87 |
+
if weights == 'yolop.pt':
|
88 |
+
weights = 'End-to-end.pth'
|
89 |
+
|
90 |
if weights == 'yolopv2.pt':
|
91 |
stride =32
|
92 |
model = torch.jit.load(weights,map_location=device)
|
|
|
194 |
return Image.fromarray(im0[:,:,::-1])
|
195 |
|
196 |
|
197 |
+
gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolopv2","yolop"])], gr.Image(type="pil"),title="Yolopv2",examples=[["example.jpeg", "yolopv2"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>YOLOPv2</a> 🚀: Better, Faster, Stronger for Panoptic driving Perception").launch()
|