| Inference Score |
467 |
467 |
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F32)
|
305
|
305
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F16)
|
534
|
534
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (I8)
|
507
|
507
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F32)
|
632
|
632
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F16)
|
1159
|
1159
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (I8)
|
1156
|
1156
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F32)
|
234
|
234
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F16)
|
519
|
519
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (I8)
|
515
|
515
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F32)
|
267
|
267
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F16)
|
435
|
435
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (I8)
|
409
|
409
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F32)
|
632
|
632
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F16)
|
1177
|
1177
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (I8)
|
1045
|
1045
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F32)
|
240
|
240
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F16)
|
303
|
303
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F32)
|
212
|
212
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F16)
|
212
|
212
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|