| Inference Score |
361 |
361 |
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F32)
|
197
|
197
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F16)
|
585
|
585
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (I8)
|
528
|
528
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F32)
|
379
|
379
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F16)
|
531
|
531
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (I8)
|
1089
|
1089
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F32)
|
253
|
253
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F16)
|
554
|
554
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (I8)
|
278
|
278
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F32)
|
291
|
291
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F16)
|
508
|
508
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (I8)
|
382
|
382
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F32)
|
530
|
530
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F16)
|
706
|
706
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (I8)
|
634
|
634
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F32)
|
136
|
136
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F16)
|
123
|
123
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F32)
|
186
|
186
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F16)
|
190
|
190
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|