Daniel Cerda Escobar commited on
Commit
53b109e
·
1 Parent(s): ca59b6a

Update files

Browse files
Files changed (2) hide show
  1. app.py +3 -3
  2. utils.py +1 -1
app.py CHANGED
@@ -92,21 +92,21 @@ with col2:
92
  with col3:
93
  st.markdown('##### Set model parameters')
94
  slice_size = st.slider(
95
- label = 'Select Slice Size',
96
  min_value=256,
97
  max_value=1024,
98
  value=768,
99
  step=256
100
  )
101
  overlap_ratio = st.slider(
102
- label = 'Select Overlap Ratio',
103
  min_value=0.0,
104
  max_value=0.5,
105
  value=0.1,
106
  step=0.1
107
  )
108
  postprocess_match_threshold = st.slider(
109
- label = 'Select Confidence Threshold',
110
  min_value = 0.0,
111
  max_value = 1.0,
112
  value = 0.75,
 
92
  with col3:
93
  st.markdown('##### Set model parameters')
94
  slice_size = st.slider(
95
+ label = 'Slice Size',
96
  min_value=256,
97
  max_value=1024,
98
  value=768,
99
  step=256
100
  )
101
  overlap_ratio = st.slider(
102
+ label = 'Overlap Ratio',
103
  min_value=0.0,
104
  max_value=0.5,
105
  value=0.1,
106
  step=0.1
107
  )
108
  postprocess_match_threshold = st.slider(
109
+ label = 'Confidence Threshold',
110
  min_value = 0.0,
111
  max_value = 1.0,
112
  value = 0.75,
utils.py CHANGED
@@ -30,7 +30,7 @@ def sahi_yolov8m_inference(
30
  image=numpy.array(image),
31
  object_prediction_list=prediction_result.object_prediction_list,
32
  rect_th=2,
33
- text_size=0.5
34
  )
35
 
36
  output = Image.fromarray(visual_result["image"])
 
30
  image=numpy.array(image),
31
  object_prediction_list=prediction_result.object_prediction_list,
32
  rect_th=2,
33
+ text_size=1
34
  )
35
 
36
  output = Image.fromarray(visual_result["image"])