diff --git a/benchmark/color_table.svg b/benchmark/color_table.svg index 666d31c0..480584c0 100644 --- a/benchmark/color_table.svg +++ b/benchmark/color_table.svg @@ -103,116 +103,116 @@ z - + - - Khadas VIM3 + + Atlas 200I DK A2 - - A311D + + Ascend 310B - - CPU + + CPU - - + - - Khadas VIM4 + + Atlas 200 DK - - A311D2 + + Ascend 310 - - CPU + + CPU - - + - - Khadas Edge2 + + Khadas VIM3 - - RK3588S + + A311D - - CPU + + CPU - - + - - Atlas 200 DK + + Khadas VIM4 - - Ascend 310 + + A311D2 - - CPU + + CPU - - + - - Atlas 200I DK A2 + + Khadas Edge2 - - Ascend 310B + + RK3588S - - CPU + + CPU - @@ -488,64 +488,64 @@ z +" style="fill: #bfe47a; stroke: #000000; stroke-linejoin: miter"/> - 4.62 + 6.67 - +" style="fill: #cfeb85; stroke: #000000; stroke-linejoin: miter"/> - 4.27 + 7.82 - +" style="fill: #9bd469; stroke: #000000; stroke-linejoin: miter"/> - 2.30 + 4.62 - +" style="fill: #91d068; stroke: #000000; stroke-linejoin: miter"/> - 7.82 + 4.27 - +" style="fill: #57b65f; stroke: #000000; stroke-linejoin: miter"/> - 6.67 + 2.30 - @@ -712,64 +712,64 @@ z +" style="fill: #fed481; stroke: #000000; stroke-linejoin: miter"/> - 55.04 + 78.90 - +" style="fill: #fdbb6c; stroke: #000000; stroke-linejoin: miter"/> - 39.94 + 92.21 - +" style="fill: #fffbb8; stroke: #000000; stroke-linejoin: miter"/> - 28.94 + 55.04 - +" style="fill: #e6f59d; stroke: #000000; stroke-linejoin: miter"/> - 92.21 + 39.94 - +" style="fill: #c7e77f; stroke: #000000; stroke-linejoin: miter"/> - 78.90 + 28.94 - @@ -936,64 +936,64 @@ z +" style="fill: #e0f295; stroke: #000000; stroke-linejoin: miter"/> - 29.50 + 36.94 - +" style="fill: #eef8a8; stroke: #000000; stroke-linejoin: miter"/> - 17.28 + 42.93 - +" style="fill: #cbe982; stroke: #000000; stroke-linejoin: miter"/> - 12.44 + 29.50 - +" style="fill: #98d368; stroke: #000000; stroke-linejoin: miter"/> - 42.93 + 17.28 - +" style="fill: #7ac665; stroke: #000000; stroke-linejoin: miter"/> - 36.94 + 12.44 - @@ -1160,64 +1160,64 @@ z +" style="fill: #f2faae; stroke: #000000; stroke-linejoin: miter"/> - 128.58 + 155.73 - +" style="fill: #fff5ae; stroke: #000000; stroke-linejoin: miter"/> - 81.46 + 201.99 - +" style="fill: #e0f295; stroke: #000000; stroke-linejoin: miter"/> - 53.83 + 128.58 - +" style="fill: #b5df74; stroke: #000000; stroke-linejoin: miter"/> - 201.99 + 81.46 - +" style="fill: #8ecf67; stroke: #000000; stroke-linejoin: miter"/> - 155.73 + 53.83 - @@ -1384,64 +1384,64 @@ z +" style="fill: #fcaa5f; stroke: #000000; stroke-linejoin: miter"/> - 1108.12 + 1407.75 - +" style="fill: #ef633f; stroke: #000000; stroke-linejoin: miter"/> - 805.54 + 1875.33 - +" style="fill: #fed481; stroke: #000000; stroke-linejoin: miter"/> - 554.30 + 1108.12 - +" style="fill: #fff6b0; stroke: #000000; stroke-linejoin: miter"/> - 1875.33 + 805.54 - +" style="fill: #e6f59d; stroke: #000000; stroke-linejoin: miter"/> - 1407.75 + 554.30 - @@ -1608,64 +1608,64 @@ z +" style="fill: #bbe278; stroke: #000000; stroke-linejoin: miter"/> - 179.93 + 253.05 - +" style="fill: #d1ec86; stroke: #000000; stroke-linejoin: miter"/> - 136.14 + 313.66 - +" style="fill: #9bd469; stroke: #000000; stroke-linejoin: miter"/> - 98.03 + 179.93 - +" style="fill: #7fc866; stroke: #000000; stroke-linejoin: miter"/> - 313.66 + 136.14 - +" style="fill: #63bc62; stroke: #000000; stroke-linejoin: miter"/> - 253.05 + 98.03 - @@ -1832,64 +1832,64 @@ z +" style="fill: #afdd70; stroke: #000000; stroke-linejoin: miter"/> - 211.02 + 219.28 - +" style="fill: #dff293; stroke: #000000; stroke-linejoin: miter"/> - 148.82 + 360.26 - +" style="fill: #abdb6d; stroke: #000000; stroke-linejoin: miter"/> - 103.42 + 211.02 - +" style="fill: #89cc67; stroke: #000000; stroke-linejoin: miter"/> - 360.26 + 148.82 - +" style="fill: #69be63; stroke: #000000; stroke-linejoin: miter"/> - 219.28 + 103.42 - @@ -2056,64 +2056,64 @@ z +" style="fill: #afdd70; stroke: #000000; stroke-linejoin: miter"/> - 210.19 + 217.18 - +" style="fill: #dff293; stroke: #000000; stroke-linejoin: miter"/> - 148.91 + 361.22 - +" style="fill: #abdb6d; stroke: #000000; stroke-linejoin: miter"/> - 103.41 + 210.19 - +" style="fill: #89cc67; stroke: #000000; stroke-linejoin: miter"/> - 361.22 + 148.91 - +" style="fill: #69be63; stroke: #000000; stroke-linejoin: miter"/> - 217.18 + 103.41 - @@ -2280,64 +2280,64 @@ z +" style="fill: #96d268; stroke: #000000; stroke-linejoin: miter"/> - 185.45 + 158.82 - +" style="fill: #cdea83; stroke: #000000; stroke-linejoin: miter"/> - 127.49 + 289.82 - +" style="fill: #a5d86a; stroke: #000000; stroke-linejoin: miter"/> - 67.15 + 185.45 - +" style="fill: #82c966; stroke: #000000; stroke-linejoin: miter"/> - 289.82 + 127.49 - +" style="fill: #4eb15d; stroke: #000000; stroke-linejoin: miter"/> - 158.82 + 67.15 - @@ -2504,64 +2504,64 @@ z +" style="fill: #9bd469; stroke: #000000; stroke-linejoin: miter"/> - 197.16 + 169.22 - +" style="fill: #d5ed88; stroke: #000000; stroke-linejoin: miter"/> - 135.27 + 318.96 - +" style="fill: #a9da6c; stroke: #000000; stroke-linejoin: miter"/> - 70.63 + 197.16 - +" style="fill: #84ca66; stroke: #000000; stroke-linejoin: miter"/> - 318.96 + 135.27 - +" style="fill: #51b35e; stroke: #000000; stroke-linejoin: miter"/> - 169.22 + 70.63 - @@ -2728,64 +2728,64 @@ z +" style="fill: #feca79; stroke: #000000; stroke-linejoin: miter"/> - 335.75 + 417.31 - +" style="fill: #fcaa5f; stroke: #000000; stroke-linejoin: miter"/> - 219.81 + 499.55 - +" style="fill: #fee797; stroke: #000000; stroke-linejoin: miter"/> - 151.10 + 335.75 - +" style="fill: #f1f9ac; stroke: #000000; stroke-linejoin: miter"/> - 499.55 + 219.81 - +" style="fill: #cfeb85; stroke: #000000; stroke-linejoin: miter"/> - 417.31 + 151.10 - @@ -2952,64 +2952,64 @@ z +" style="fill: #c9e881; stroke: #000000; stroke-linejoin: miter"/> - 59.27 + 70.20 - +" style="fill: #e2f397; stroke: #000000; stroke-linejoin: miter"/> - 38.73 + 92.66 - +" style="fill: #b9e176; stroke: #000000; stroke-linejoin: miter"/> - 27.45 + 59.27 - +" style="fill: #91d068; stroke: #000000; stroke-linejoin: miter"/> - 92.66 + 38.73 - +" style="fill: #73c264; stroke: #000000; stroke-linejoin: miter"/> - 70.20 + 27.45 - @@ -3176,64 +3176,64 @@ z +" style="fill: #bbe278; stroke: #000000; stroke-linejoin: miter"/> - 52.17 + 61.72 - +" style="fill: #d5ed88; stroke: #000000; stroke-linejoin: miter"/> - 33.68 + 79.39 - +" style="fill: #addc6f; stroke: #000000; stroke-linejoin: miter"/> - 22.95 + 52.17 - +" style="fill: #84ca66; stroke: #000000; stroke-linejoin: miter"/> - 79.39 + 33.68 - +" style="fill: #66bd63; stroke: #000000; stroke-linejoin: miter"/> - 61.72 + 22.95 - @@ -3400,64 +3400,64 @@ z +" style="fill: #b1de71; stroke: #000000; stroke-linejoin: miter"/> - 71.92 + 78.01 - +" style="fill: #cbe982; stroke: #000000; stroke-linejoin: miter"/> - 47.68 + 102.49 - +" style="fill: #abdb6d; stroke: #000000; stroke-linejoin: miter"/> - 29.63 + 71.92 - +" style="fill: #84ca66; stroke: #000000; stroke-linejoin: miter"/> - 102.49 + 47.68 - +" style="fill: #5ab760; stroke: #000000; stroke-linejoin: miter"/> - 78.01 + 29.63 - @@ -3624,64 +3624,64 @@ z - --- + --- - - --- + --- - - --- + --- - - --- + --- - - --- + --- - @@ -3848,64 +3848,64 @@ z +" style="fill: #fee18d; stroke: #000000; stroke-linejoin: miter"/> - 327.07 + 285.75 - +" style="fill: #f26841; stroke: #000000; stroke-linejoin: miter"/> - 218.22 + 521.46 - +" style="fill: #fece7c; stroke: #000000; stroke-linejoin: miter"/> - 148.01 + 327.07 - +" style="fill: #fffab6; stroke: #000000; stroke-linejoin: miter"/> - 521.46 + 218.22 - +" style="fill: #e2f397; stroke: #000000; stroke-linejoin: miter"/> - 285.75 + 148.01 - @@ -4072,64 +4072,64 @@ z +" style="fill: #e0f295; stroke: #000000; stroke-linejoin: miter"/> - 78.26 + 92.56 - +" style="fill: #feec9f; stroke: #000000; stroke-linejoin: miter"/> - 54.45 + 159.80 - +" style="fill: #d1ec86; stroke: #000000; stroke-linejoin: miter"/> - 45.03 + 78.26 - +" style="fill: #addc6f; stroke: #000000; stroke-linejoin: miter"/> - 159.80 + 54.45 - +" style="fill: #9bd469; stroke: #000000; stroke-linejoin: miter"/> - 92.56 + 45.03 - @@ -4296,64 +4296,64 @@ z +" style="fill: #b5df74; stroke: #000000; stroke-linejoin: miter"/> - 35.80 + 56.00 - +" style="fill: #c5e67e; stroke: #000000; stroke-linejoin: miter"/> - 25.83 + 67.85 - +" style="fill: #8ccd67; stroke: #000000; stroke-linejoin: miter"/> - 20.22 + 35.80 - +" style="fill: #70c164; stroke: #000000; stroke-linejoin: miter"/> - 67.85 + 25.83 - +" style="fill: #5db961; stroke: #000000; stroke-linejoin: miter"/> - 56.00 + 20.22 - @@ -4520,64 +4520,64 @@ z +" style="fill: #87cb67; stroke: #000000; stroke-linejoin: miter"/> - 83.22 + 90.13 - +" style="fill: #b3df72; stroke: #000000; stroke-linejoin: miter"/> - 57.22 + 145.83 - +" style="fill: #7fc866; stroke: #000000; stroke-linejoin: miter"/> - 42.90 + 83.22 - +" style="fill: #63bc62; stroke: #000000; stroke-linejoin: miter"/> - 145.83 + 57.22 - +" style="fill: #4bb05c; stroke: #000000; stroke-linejoin: miter"/> - 90.13 + 42.90 - @@ -4744,64 +4744,64 @@ z +" style="fill: #b7e075; stroke: #000000; stroke-linejoin: miter"/> - 75.38 + 83.16 - +" style="fill: #e3f399; stroke: #000000; stroke-linejoin: miter"/> - 53.06 + 134.02 - +" style="fill: #addc6f; stroke: #000000; stroke-linejoin: miter"/> - 37.06 + 75.38 - +" style="fill: #8ccd67; stroke: #000000; stroke-linejoin: miter"/> - 134.02 + 53.06 - +" style="fill: #6bbf64; stroke: #000000; stroke-linejoin: miter"/> - 83.16 + 37.06 - @@ -4968,64 +4968,64 @@ z +" style="fill: #57b65f; stroke: #000000; stroke-linejoin: miter"/> - 32.20 + 37.02 - +" style="fill: #cbe982; stroke: #000000; stroke-linejoin: miter"/> - 29.47 + 143.62 - +" style="fill: #4eb15d; stroke: #000000; stroke-linejoin: miter"/> - 14.02 + 32.20 - +" style="fill: #45ad5b; stroke: #000000; stroke-linejoin: miter"/> - 143.62 + 29.47 - +" style="fill: #199750; stroke: #000000; stroke-linejoin: miter"/> - 37.02 + 14.02 - diff --git a/benchmark/table_config.yaml b/benchmark/table_config.yaml index dbd6c99c..c84f62f9 100644 --- a/benchmark/table_config.yaml +++ b/benchmark/table_config.yaml @@ -170,6 +170,14 @@ Devices: display_info: "Intel\n12700K\nCPU" platform: "CPU" + - name: "Atlas 200I DK A2" + display_info: "Atlas 200I DK A2\nAscend 310B\nCPU" + platform: "CPU" + + - name: "Atlas 200 DK" + display_info: "Atlas 200 DK\nAscend 310\nCPU" + platform: "CPU" + - name: "Khadas VIM3" display_info: "Khadas VIM3\nA311D\nCPU" platform: "CPU" @@ -182,14 +190,6 @@ Devices: display_info: "Khadas Edge2\nRK3588S\nCPU" platform: "CPU" - - name: "Atlas 200 DK" - display_info: "Atlas 200 DK\nAscend 310\nCPU" - platform: "CPU" - - - name: "Atlas 200I DK A2" - display_info: "Atlas 200I DK A2\nAscend 310B\nCPU" - platform: "CPU" - - name: "Jetson Nano B01" display_info: "Jetson Nano\nB01\nCPU" platform: "CPU" diff --git a/reports/2023-4.9.0/assets/benchmark_table_4.9.0.png b/reports/2023-4.9.0/assets/benchmark_table_4.9.0.png new file mode 100644 index 00000000..d0cd6fa1 --- /dev/null +++ b/reports/2023-4.9.0/assets/benchmark_table_4.9.0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:978f537c9a967810915ba049ffb667fb41c2b9b22be90c0c334a350e1ac37505 +size 311950 diff --git a/reports/2023-4.9.0/opencv_zoo_report-cn-2023-4.9.0.md b/reports/2023-4.9.0/opencv_zoo_report-cn-2023-4.9.0.md new file mode 100644 index 00000000..85c8d7bf --- /dev/null +++ b/reports/2023-4.9.0/opencv_zoo_report-cn-2023-4.9.0.md @@ -0,0 +1,53 @@ +# OpenCV Model Zoo报告 - 模型、板卡和性能基准结果分析 + +[![benchmark_table](assets/benchmark_table_4.9.0.png)](benchmark_table) + +[OpenCV Model Zoo](https://github.com/opencv/opencv_zoo)项目于2021年9月启动。从那时起,我们已收集了43个模型权重,涵盖19个任务,并添加了13种硬件设置,涵盖不同的CPU架构(x86-64、ARM和RISC-V)以及不同的计算单元(CPU、GPU和NPU)。所有这些模型和硬件都经过我们的全面测试,并保证与OpenCV的最新版本(目前是4.9.0)兼容,如我们的基准表所示。 + +## Models + +截至此版本发布,我们在opencv_zoo中共有43个模型权重,涵盖了总共19个任务。这些模型是考虑到许可证的,这意味着基本上您可以为任何目的使用opencv_zoo中的所有模型,甚至用于商业用途。它们主要来自以下4个来源: + +- OpenCV中国团队。YuNet人脸检测模型由我们团队的一名成员开发和维护。 +- OpenCV Area Chair。这是由OpenCV基金会启动的一个项目,详情可以在[这里](https://opencv.org/opencv-area-chairs/)找到。人脸识别的SFace模型和面部表情识别的FER模型是由Area Chair邓教授贡献的。 +- 与OpenCV的合作。人体分割模型来自百度PaddlePaddle,修改后的YuNet用于车牌检测来自[watrix.ai](watrix.ai)。 +- OpenCV社区。从2022年开始,我们在Google Summer of Code(GSoC)计划中有关于模型贡献的项目想法。GSoC学生已成功贡献了6个模型,涵盖了目标检测、目标跟踪和光流估计等任务。 + +我们欢迎您的贡献! + +此外,我们为每个模型提供了在最新版本的OpenCV中可立即使用的Python和C++演示。我们还提供了[可视化样例](https://github.com/opencv/opencv_zoo?tab=readme-ov-file#some-examples),以便开发者们更好地了解任务和输出的类型。 + +## Boards + +opencv_zoo中有13种硬件设置,其中之一是搭载Intel i7-12700K的 PC,其他都是单板计算机(SBC)。它们按CPU架构分类如下: + + + +x86-64: + +- Intel Core i7-12700K:8 P核(3.60GHz,4.90GHz turbo),4 E核(2.70GHz,3.80GHz turbo),20线程。 + +ARM: + +| 板卡 | SoC 型号 | CPU 型号 | GPU 型号 | NPU 性能(Int8) | +| ----- | --- | --- | --- | --- | +| Khadas VIM3 | Amlogic A311D | 2.20GHz 四核 Cortex-A73 + 1.80GHz 双核 Cortex-A53 | ARM G52 | 5 TOPS | +| Khadas VIM4 | Amlogic A311D2 | 2.2GHz 四核 ARM Cortex-A73 + 2.0GHz 四核 Cortex-A53 | Mali G52MP8(8EE) 800Mhz | 3.2 TOPS | +| Khadas Edge 2 | Rockchip RK3588S | 2.25GHz 四核 Cortex-A76 + 1.80GHz 四核 Cortex-A55 | 1GHz ARM Mali-G610 | 6 TOPS | +| Raspberry Pi 4B | Broadcom BCM2711 | 1.5GHz 四核 Cortex-A72 | *未知* | *无* | +| Horizon Sunrise X3 PI | Sunrise X3 | 1.2GHz 四核 Cortex-A53 | *未知* | 5 TOPS,双核伯努利架构| +| MAIX-III AXera-Pi | AXera AX620A | 四核 Cortex-A7 | *未知* | 3.6 TOPS | +| Toybrick RV1126 | Rockchip RV1126 | 四核 Cortex-A7 | *未知* | 2.0 TOPS | +| NVIDIA Jetson Nano B01 | *未知* | 1.43GHz 四核 Cortex-A57 | 128 核 NVIDIA Maxwell | *无* | +| NVIDIA Jetson Nano Orin | *未知* | 6 核 Cortex®-A78AE | 1024 核 NVIDIA Ampere | *无* | +| Atlas 200 DK | *未知* | *未知* | *未知* | 22 TOPS,Ascend 310 | +| Atlas 200I DK A2 | *未知* | 1.0GHz 四核 | *未知* | 8 TOPS,Ascend 310B | + +RISC-V: + +| 板卡 | SoC 型号 | CPU 型号 | GPU 型号 | +| ----- | --------- | --------- | --------- | +| StarFive VisionFive 2 | StarFive JH7110 | 1.5GHz 四核 RISC-V 64 位 | 600MHz IMG BXE-4-32 MC1 | +| Allwinner Nezha D1 | Allwinner D1 | 1.0GHz 单核 RISC-V 64 位,RVV-0.7.1 | *未知* | + +我们的目标是在边缘设备上进行高效计算!在过去的几年中,我们(OpenCV)中国团队,已经在dnn模块针对ARM架构优化的方面付出了大量努力,特别关注卷积神经网络的卷积内核优化和Vision Transformers的GEMM内核优化。更值得一提的是,我们为dnn模块引入了NPU支持,支持Khadas VIM3、Atlas 200 DK 和Atlas 200I DK A2上的 NPU。在 NPU 上运行模型可以帮助将计算负载从CPU分配到NPU,甚至可以达到更快的推理速度(例如,在 Atlas 200 DK 上 Ascend 310 的测试结果)。 diff --git a/reports/2023-4.9.0/opencv_zoo_report-en-2023-4.9.0.md b/reports/2023-4.9.0/opencv_zoo_report-en-2023-4.9.0.md new file mode 100644 index 00000000..122bb136 --- /dev/null +++ b/reports/2023-4.9.0/opencv_zoo_report-en-2023-4.9.0.md @@ -0,0 +1,56 @@ +# OpenCV Model Zoo Report - Models, Boards and Benchmark Result Analysis + + +[![benchmark_table](assets/benchmark_table_4.9.0.png)](benchmark_table) + +[OpenCV Model Zoo](https://github.com/opencv/opencv_zoo) was started back in September, 2021. Since then, we have collected 43 model weights covering 19 tasks and added 13 hardware setups covering different CPU architectures (x86-64, ARM and RISC-V) and different computing units (CPU, GPU and NPU). All these models and hardware are fully tested by us and guaranteed to work with latest release of OpenCV (currently 4.9.0) as our benchmark table shown. + +## Models + +As of this release, we have 43 model weights covering 19 tasks in total in the zoo. These models are collected with licenses in mind, meaning you can bascially use all the models in the zoo for whatever purposes you want, even for commercial purpose. They are collected from mainly 4 sources: + +- OpenCV China team. The YuNet model for face detection is developed and maintained by one of our team members. +- OpenCV Area Chair. This is a program started by OpenCV Foundation, details can be found [here](https://opencv.org/opencv-area-chairs/). The SFace model for face recognition and FER model for facial expression recognition are contributed from one of the Area Chairs Prof. Deng. +- Cooperation with OpenCV. The HumanSeg model for human segmentation is from Baidu PaddlePaddle, and the modified YuNet for license plate detection is from [watrix.ai](watrix.ai). +- Community. Started from 2022, we have project ideas for model contribution in the Google Summer of Code (GSoC) program. GSoC students have successfully contributed 6 models covering tasks such as object detection, object tracking and optical flow estimation. + +We welcome your contribution! + +Besides, demos in Python and C++, which work out-of-the-box with latest OpenCV, are also provided for each model. We also provide [visual examples](https://github.com/opencv/opencv_zoo?tab=readme-ov-file#some-examples) so that people can better understand what the task is and what kind of the output is. + +## Boards + +There are 13 hardware setups in the zoo, one of them is a PC with Intel i7-12700K, and the others are single board computers (SBC). They are categorized by CPU architecture as follows: + + + +x86-64: + +- Intel Core i7-12700K: 8 P-core (3.60GHz, 4.90GHz turbo), 4 E-core (2.70GHz, 3.80GHz turbo), 20 threads. + + +ARM: + +| Board | SoC model | CPU model | GPU model | NPU Performance (Int8) | +| ----- | --- | --- | --- | --- | +| Khadas VIM3 | Amlogic A311D | 2.20GHz Quad-core Cortex-A73 + 1.80GHz Dual-core Cortex-A53 | ARM G52 | 5 TOPS | +| Khadas VIM4 | Amlogic A311D2 | 2.2GHz Quad-core ARM Cortex-A73 + 2.0GHz Quad-core Cortex-A53 | Mali G52MP8(8EE) 800Mhz | 3.2 TOPS | +| Khadas Edge 2 | Rockchip RK3588S | 2.25GHz Quad-core Cortex-A76 + 1.80GHz Quad-core Cortex-A55 | 1GHz ARM Mali-G610 | 6 TOPS | +| Raspberry Pi 4B | Broadcom BCM2711 | 1.5GHz Quad-core Cortex-A72 | *Unknown* | *No* | +| Horizon Sunrise X3 PI | Sunrise X3 | 1.2GHz Quad-core Cortex-A53 | *Unkown* | 5 TOPS, Dual-core Bernoulli Arch| +| MAIX-III AXera-Pi | AXera AX620A | Quad-core Cortex-A7 | *Unknown* | 3.6 TOPS | +| Toybrick RV1126 | Rockchip RV1126 | Quad-core Cortex-A7 | *Unknown* | 2.0 TOPS | +| NVIDIA Jetson Nano B01 | *Unknown* | 1.43GHz Quad-core Cortex-A57 | 128-core NVIDIA Maxwell | *No* | +| NVIDIA Jetson Nano Orin | *Unknown* | 6-core Cortex®-A78AE | 1024-core NVIDIA Ampere | *No* | +| Atlas 200 DK | *Unknown* | *Unknown* | *Unknown* | 22 TOPS, Ascend 310 | +| Atlas 200I DK A2 | *Unknown* | 1.0GHz Quad-core | *Unknown* | 8 TOPS, Ascend 310B | + + +RISC-V: + +| Board | SoC model | CPU model | GPU model | +| ----- | --------- | --------- | --------- | +| StarFive VisionFive 2 | StarFive JH7110 | 1.5GHz Quad-core RISC-V 64-bit | 600MHz IMG BXE-4-32 MC1 | +| Allwinner Nezha D1 | Allwinner D1 | 1.0GHz single-core RISC-V 64-bit, RVV-0.7.1 | *Unknown* | + +We are targetting on efficient computing on edge devices! In the past few years, we, the OpenCV China team, have spent most of our effort in optimizing dnn module for ARM architecture, focusing especially on convolution kernel optimization for ConvNets and GEMM kernel optimization for Vision Transformers. What's even more worth mentioning is that we introduce NPU support for the dnn module, supporing the NPU in Khadas VIM3, Atlas 200 DK and Atlas 200I DK A2. Running the model on NPU can help distribute computing loads from CPU to NPU and even reaching a faster inference speed (see benchmark results on Ascend 310 on Atlas 200 DK for example). diff --git a/reports/README.md b/reports/README.md new file mode 100644 index 00000000..623e0d9f --- /dev/null +++ b/reports/README.md @@ -0,0 +1,3 @@ +# Reports on models and boards + +Here we present reports on models and boards in the zoo per major release of OpenCV since 4.9.0.