Inference Score |
940 |
470 |
200.0% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Image Classification (F32)
|
771
|
302
|
255.3% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Image Classification (F16)
|
773
|
559
|
138.3% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Image Classification (I8)
|
452
|
504
|
89.7% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Image Segmentation (F32)
|
1104
|
615
|
179.5% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Image Segmentation (F16)
|
1082
|
1160
|
93.3% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Image Segmentation (I8)
|
1447
|
1165
|
124.2% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Pose Estimation (F32)
|
423
|
238
|
177.7% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Pose Estimation (F16)
|
415
|
560
|
74.1% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Pose Estimation (I8)
|
482
|
539
|
89.4% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Object Detection (F32)
|
1094
|
291
|
375.9% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Object Detection (F16)
|
1090
|
436
|
250.0% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Object Detection (I8)
|
746
|
441
|
169.2% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Face Detection (F32)
|
1327
|
662
|
200.5% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Face Detection (F16)
|
1307
|
1129
|
115.8% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Face Detection (I8)
|
856
|
1053
|
81.3% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Text Classification (F32)
|
1189
|
208
|
571.6% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Text Classification (F16)
|
1296
|
295
|
439.3% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Machine Translation (F32)
|
1961
|
207
|
947.3% |
iPhone14,2
|
|
samsung SM-A166P
|
|
Machine Translation (F16)
|
1972
|
209
|
943.5% |
iPhone14,2
|
|
samsung SM-A166P
|
|