Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -84,9 +84,8 @@ def detect(img,model):
|
|
84 |
#model = attempt_load(weights, map_location=device) # load FP32 model
|
85 |
#stride = int(model.stride.max()) # model stride
|
86 |
#imgsz = check_img_size(imgsz, s=stride) # check img_size
|
87 |
-
print(weights)
|
88 |
if weights == 'yolopv2.pt':
|
89 |
-
|
90 |
stride =32
|
91 |
model = torch.jit.load(weights,map_location=device)
|
92 |
model.eval()
|
@@ -99,6 +98,7 @@ def detect(img,model):
|
|
99 |
|
100 |
t0 = time.time()
|
101 |
for path, img, im0s, vid_cap in dataset:
|
|
|
102 |
img = torch.from_numpy(img).to(device)
|
103 |
img = img.half() if half else img.float() # uint8 to fp16/32
|
104 |
img /= 255.0 # 0 - 255 to 0.0 - 1.0
|
@@ -197,6 +197,7 @@ def detect(img,model):
|
|
197 |
|
198 |
|
199 |
for i, (path, img, img_det, vid_cap,shapes) in tqdm(enumerate(dataset),total = len(dataset)):
|
|
|
200 |
img = transform(img).to(device)
|
201 |
img = img.half() if half else img.float() # uint8 to fp16/32
|
202 |
if img.ndimension() == 3:
|
|
|
84 |
#model = attempt_load(weights, map_location=device) # load FP32 model
|
85 |
#stride = int(model.stride.max()) # model stride
|
86 |
#imgsz = check_img_size(imgsz, s=stride) # check img_size
|
|
|
87 |
if weights == 'yolopv2.pt':
|
88 |
+
|
89 |
stride =32
|
90 |
model = torch.jit.load(weights,map_location=device)
|
91 |
model.eval()
|
|
|
98 |
|
99 |
t0 = time.time()
|
100 |
for path, img, im0s, vid_cap in dataset:
|
101 |
+
print(img.shape)
|
102 |
img = torch.from_numpy(img).to(device)
|
103 |
img = img.half() if half else img.float() # uint8 to fp16/32
|
104 |
img /= 255.0 # 0 - 255 to 0.0 - 1.0
|
|
|
197 |
|
198 |
|
199 |
for i, (path, img, img_det, vid_cap,shapes) in tqdm(enumerate(dataset),total = len(dataset)):
|
200 |
+
print(img.shape)
|
201 |
img = transform(img).to(device)
|
202 |
img = img.half() if half else img.float() # uint8 to fp16/32
|
203 |
if img.ndimension() == 3:
|