| Inference Score |
456 |
456 |
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (F32)
|
304
|
304
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (F16)
|
502
|
502
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (I8)
|
484
|
484
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (F32)
|
636
|
636
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (F16)
|
1142
|
1142
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (I8)
|
1144
|
1144
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (F32)
|
221
|
221
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (F16)
|
502
|
502
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (I8)
|
507
|
507
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (F32)
|
252
|
252
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (F16)
|
395
|
395
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (I8)
|
379
|
379
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (F32)
|
614
|
614
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (F16)
|
1107
|
1107
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (I8)
|
1102
|
1102
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Text Classification (F32)
|
385
|
385
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Text Classification (F16)
|
292
|
292
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Machine Translation (F32)
|
176
|
176
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Machine Translation (F16)
|
174
|
174
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|