| Inference Score |
485 |
485 |
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F32)
|
327
|
327
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F16)
|
592
|
592
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (I8)
|
561
|
561
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F32)
|
642
|
642
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F16)
|
1229
|
1229
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (I8)
|
1235
|
1235
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F32)
|
229
|
229
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F16)
|
569
|
569
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (I8)
|
551
|
551
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F32)
|
300
|
300
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F16)
|
464
|
464
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (I8)
|
457
|
457
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F32)
|
656
|
656
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F16)
|
1256
|
1256
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (I8)
|
1126
|
1126
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F32)
|
225
|
225
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F16)
|
339
|
339
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F32)
|
180
|
180
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F16)
|
175
|
175
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|