| Inference Score |
489 |
489 |
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (F32)
|
316
|
316
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (F16)
|
572
|
572
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (I8)
|
539
|
539
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (F32)
|
634
|
634
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (F16)
|
1163
|
1163
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (I8)
|
1194
|
1194
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (F32)
|
233
|
233
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (F16)
|
552
|
552
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (I8)
|
523
|
523
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (F32)
|
276
|
276
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (F16)
|
425
|
425
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (I8)
|
430
|
430
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (F32)
|
647
|
647
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (F16)
|
1188
|
1188
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (I8)
|
1101
|
1101
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Text Classification (F32)
|
327
|
327
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Text Classification (F16)
|
327
|
327
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Machine Translation (F32)
|
219
|
219
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Machine Translation (F16)
|
217
|
217
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|