| Inference Score |
480 |
480 |
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (F32)
|
307
|
307
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (F16)
|
550
|
550
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (I8)
|
522
|
522
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (F32)
|
645
|
645
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (F16)
|
1235
|
1235
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (I8)
|
1227
|
1227
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (F32)
|
246
|
246
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (F16)
|
552
|
552
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (I8)
|
528
|
528
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (F32)
|
291
|
291
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (F16)
|
435
|
435
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (I8)
|
436
|
436
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (F32)
|
640
|
640
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (F16)
|
1166
|
1166
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (I8)
|
1084
|
1084
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Text Classification (F32)
|
199
|
199
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Text Classification (F16)
|
405
|
405
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Machine Translation (F32)
|
198
|
198
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Machine Translation (F16)
|
203
|
203
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|