| Inference Score |
478 |
478 |
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F32)
|
310
|
310
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (F16)
|
553
|
553
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Classification (I8)
|
527
|
527
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F32)
|
646
|
646
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (F16)
|
1232
|
1232
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Image Segmentation (I8)
|
1229
|
1229
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F32)
|
241
|
241
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (F16)
|
569
|
569
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Pose Estimation (I8)
|
559
|
559
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F32)
|
285
|
285
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (F16)
|
422
|
422
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Object Detection (I8)
|
421
|
421
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F32)
|
683
|
683
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (F16)
|
1323
|
1323
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Face Detection (I8)
|
1205
|
1205
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F32)
|
203
|
203
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Text Classification (F16)
|
297
|
297
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F32)
|
190
|
190
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|
|
Machine Translation (F16)
|
194
|
194
|
100.0% |
|
samsung SM-A166P
|
|
|
samsung SM-A166P
|
|