| Inference Score |
497 |
497 |
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (F32)
|
326
|
326
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (F16)
|
592
|
592
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (I8)
|
564
|
564
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (F32)
|
640
|
640
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (F16)
|
1215
|
1215
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (I8)
|
1223
|
1223
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (F32)
|
217
|
217
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (F16)
|
551
|
551
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (I8)
|
554
|
554
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (F32)
|
294
|
294
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (F16)
|
462
|
462
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (I8)
|
441
|
441
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (F32)
|
690
|
690
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (F16)
|
1326
|
1326
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (I8)
|
1185
|
1185
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Text Classification (F32)
|
408
|
408
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Text Classification (F16)
|
343
|
343
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Machine Translation (F32)
|
162
|
162
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Machine Translation (F16)
|
170
|
170
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|