hank1996 commited on
Commit
f1c5cc4
·
1 Parent(s): f922b4c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -7
app.py CHANGED
@@ -78,13 +78,16 @@ def detect(img,model):
78
  nms_time = AverageMeter()
79
 
80
  # Load model
81
- print(weights)
82
- stride =32
83
- model = torch.jit.load(weights)
84
- print(model)
85
 
86
- model = model.to(device)
87
- print(111111111)
 
 
 
 
88
 
89
 
90
  # Set Dataloader
@@ -189,4 +192,4 @@ def detect(img,model):
189
  return Image.fromarray(im0[:,:,::-1])
190
 
191
 
192
- gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolopv2"])], gr.Image(type="pil"),title="Yolopv2",examples=[["horses.jpeg", "yolopv2"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>WongKinYiu/yolov7</a> Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors").launch()
 
78
  nms_time = AverageMeter()
79
 
80
  # Load model
81
+ model = attempt_load(weights, map_location=device) # load FP32 model
82
+ stride = int(model.stride.max()) # model stride
83
+ imgsz = check_img_size(imgsz, s=stride) # check img_size
 
84
 
85
+ #stride =32
86
+ #model = torch.jit.load(weights)
87
+ #print(model)
88
+
89
+ #model = model.to(device)
90
+ #print(111111111)
91
 
92
 
93
  # Set Dataloader
 
192
  return Image.fromarray(im0[:,:,::-1])
193
 
194
 
195
+ gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolov7-e6e","yolopv2"])], gr.Image(type="pil"),title="Yolopv2",examples=[["horses.jpeg", "yolov7-e6e"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>WongKinYiu/yolov7</a> Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors").launch()