| Inference Score |
214 |
201 |
106.5% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F32)
|
96
|
92
|
104.3% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F16)
|
94
|
87
|
108.0% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (I8)
|
149
|
136
|
109.6% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F32)
|
359
|
344
|
104.4% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F16)
|
351
|
348
|
100.9% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (I8)
|
549
|
534
|
102.8% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F32)
|
134
|
136
|
98.5% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F16)
|
141
|
134
|
105.2% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (I8)
|
141
|
139
|
101.4% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F32)
|
157
|
147
|
106.8% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F16)
|
158
|
146
|
108.2% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (I8)
|
244
|
214
|
114.0% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F32)
|
303
|
297
|
102.0% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F16)
|
305
|
298
|
102.3% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (I8)
|
373
|
325
|
114.8% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F32)
|
286
|
262
|
109.2% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F16)
|
290
|
262
|
110.7% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F32)
|
212
|
198
|
107.1% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F16)
|
209
|
188
|
111.2% |
|
samsung SM-M166P
|
|
|
samsung SM-A166P
|
|