| Inference Score |
598 |
598 |
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Image Classification (F32)
|
424
|
424
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Image Classification (F16)
|
675
|
675
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Image Classification (I8)
|
632
|
632
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Image Segmentation (F32)
|
885
|
885
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Image Segmentation (F16)
|
1556
|
1556
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Image Segmentation (I8)
|
1455
|
1455
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Pose Estimation (F32)
|
422
|
422
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Pose Estimation (F16)
|
762
|
762
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Pose Estimation (I8)
|
757
|
757
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Object Detection (F32)
|
283
|
283
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Object Detection (F16)
|
361
|
361
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Object Detection (I8)
|
344
|
344
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Face Detection (F32)
|
779
|
779
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Face Detection (F16)
|
1393
|
1393
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Face Detection (I8)
|
1259
|
1259
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Text Classification (F32)
|
383
|
383
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Text Classification (F16)
|
455
|
455
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Machine Translation (F32)
|
279
|
279
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|
|
Machine Translation (F16)
|
279
|
279
|
100.0% |
|
samsung SM-X216B
|
|
|
samsung SM-X216B
|
|