Inference Score |
201 |
661 |
30.4% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Image Classification (F32)
|
96
|
440
|
21.8% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Image Classification (F16)
|
90
|
714
|
12.6% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Image Classification (I8)
|
127
|
668
|
19.0% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Image Segmentation (F32)
|
333
|
908
|
36.7% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Image Segmentation (F16)
|
347
|
1531
|
22.7% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Image Segmentation (I8)
|
572
|
1569
|
36.5% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Pose Estimation (F32)
|
119
|
426
|
27.9% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Pose Estimation (F16)
|
118
|
782
|
15.1% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Pose Estimation (I8)
|
136
|
778
|
17.5% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Object Detection (F32)
|
166
|
373
|
44.5% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Object Detection (F16)
|
161
|
498
|
32.3% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Object Detection (I8)
|
197
|
507
|
38.9% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Face Detection (F32)
|
312
|
825
|
37.8% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Face Detection (F16)
|
300
|
1446
|
20.7% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Face Detection (I8)
|
311
|
1304
|
23.8% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Text Classification (F32)
|
274
|
437
|
62.7% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Text Classification (F16)
|
275
|
545
|
50.5% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Machine Translation (F32)
|
203
|
305
|
66.6% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|
Machine Translation (F16)
|
192
|
309
|
62.1% |
samsung SM-A166P
|
|
samsung SM-X216B
|
|