| Inference Score |
460 |
460 |
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F32)
|
298
|
298
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F16)
|
536
|
536
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (I8)
|
495
|
495
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F32)
|
607
|
607
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F16)
|
1137
|
1137
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (I8)
|
1155
|
1155
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F32)
|
222
|
222
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F16)
|
505
|
505
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (I8)
|
514
|
514
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F32)
|
263
|
263
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F16)
|
420
|
420
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (I8)
|
419
|
419
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F32)
|
679
|
679
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F16)
|
1102
|
1102
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (I8)
|
1024
|
1024
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F32)
|
280
|
280
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F16)
|
273
|
273
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F32)
|
201
|
201
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F16)
|
203
|
203
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|