| Inference Score |
469 |
469 |
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (F32)
|
304
|
304
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (F16)
|
572
|
572
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Classification (I8)
|
538
|
538
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (F32)
|
615
|
615
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (F16)
|
1198
|
1198
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Image Segmentation (I8)
|
1234
|
1234
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (F32)
|
239
|
239
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (F16)
|
572
|
572
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Pose Estimation (I8)
|
563
|
563
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (F32)
|
287
|
287
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (F16)
|
442
|
442
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Object Detection (I8)
|
416
|
416
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (F32)
|
672
|
672
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (F16)
|
1160
|
1160
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Face Detection (I8)
|
1077
|
1077
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Text Classification (F32)
|
230
|
230
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Text Classification (F16)
|
274
|
274
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Machine Translation (F32)
|
182
|
182
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|
|
Machine Translation (F16)
|
172
|
172
|
100.0% |
|
samsung SM-M166P
|
|
|
samsung SM-M166P
|
|