forked from openvinotoolkit/openvino
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'master' into github_actions/local_caches
- Loading branch information
Showing
91 changed files
with
2,465 additions
and
1,368 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -21,7 +21,7 @@ jobs: | |
lfs: 'true' | ||
|
||
- name: Install apt-get dependencies | ||
uses: awalsh128/[email protected].1 | ||
uses: awalsh128/[email protected].2 | ||
with: | ||
packages: graphviz texlive liblua5.2-0 libclang1-9 libclang-cpp9 | ||
version: 3.0 | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -30,7 +30,7 @@ jobs: | |
submodules: 'true' | ||
|
||
- name: Install OpenCL | ||
uses: awalsh128/[email protected].1 | ||
uses: awalsh128/[email protected].2 | ||
if: runner.os == 'Linux' | ||
with: | ||
packages: ocl-icd-opencl-dev opencl-headers | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
61 changes: 61 additions & 0 deletions
61
docs/articles_en/get-started/install-openvino-overview/install-openvino-npm.rst
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
.. {#openvino_docs_install_guides_installing_openvino_npm} | ||
Install Intel® Distribution of OpenVINO™ Toolkit from npm Registry | ||
================================================================== | ||
|
||
.. meta:: | ||
:description: Learn how to install OpenVINO™ Runtime on Windows, Linux, and | ||
macOS operating systems, using the npm registry. | ||
|
||
|
||
.. note:: | ||
|
||
Note that the npm distribution: | ||
|
||
* offers the JavaScript API only | ||
* is dedicated to users of all major OSes: Windows, Linux, and macOS | ||
(all x86_64 / arm64 architectures) | ||
* macOS offers support only for CPU inference | ||
|
||
.. tab-set:: | ||
|
||
.. tab-item:: System Requirements | ||
:sync: system-requirements | ||
|
||
- Windows, Linux, macOS | ||
- x86, ARM (Windows ARM not supported) | ||
|
||
.. tab-item:: Software Requirements | ||
:sync: software-requirements | ||
|
||
`Node.js version 20.5.1 and higher <https://nodejs.org/en/download/>`__ | ||
|
||
|
||
Installing OpenVINO Node.js | ||
########################### | ||
|
||
1. Make sure that you have installed `Node.js and npm <https://nodejs.org/en/download>`__ | ||
on your system. | ||
2. Navigate to your project directory and run the following command in the terminal: | ||
|
||
.. code-block:: sh | ||
npm install openvino-node | ||
.. note:: | ||
|
||
The *openvino-node* npm package runs in Node.js environment only and provides | ||
a subset of :doc:`OpenVINO Runtime C++ API <../../api/c_cpp_api/group__ov__cpp__api>`. | ||
|
||
What's Next? | ||
#################### | ||
|
||
Now that you’ve installed OpenVINO npm package, you’re ready to run your own machine | ||
learning applications! Explore :doc:`OpenVINO Node.js API <../../api/nodejs_api/nodejs_api>` | ||
to learn more about how to integrate a model in Node.js applications. | ||
|
||
Additional Resources | ||
#################### | ||
|
||
- Intel® Distribution of OpenVINO™ toolkit home page: https://software.intel.com/en-us/openvino-toolkit | ||
- For IoT Libraries & Code Samples, see `Intel® IoT Developer Kit <https://github.com/intel-iot-devkit>`__. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,75 @@ | ||
# Copyright (C) 2018-2024 Intel Corporation | ||
# SPDX-License-Identifier: Apache-2.0 | ||
|
||
#! [dataset] | ||
import nncf | ||
import torch | ||
|
||
calibration_loader = torch.utils.data.DataLoader(...) | ||
|
||
def transform_fn(data_item): | ||
images, _ = data_item | ||
return {input_name: images.numpy()} # input_name should be taken from the model, | ||
# e.g. model.graph.input[0].name | ||
|
||
calibration_dataset = nncf.Dataset(calibration_loader, transform_fn) | ||
validation_dataset = nncf.Dataset(calibration_loader, transform_fn) | ||
#! [dataset] | ||
|
||
#! [validation] | ||
import numpy as np | ||
import torch | ||
from sklearn.metrics import accuracy_score | ||
|
||
import onnx | ||
import onnxruntime | ||
|
||
|
||
def validate(model: onnx.ModelProto, | ||
validation_loader: torch.utils.data.DataLoader) -> float: | ||
predictions = [] | ||
references = [] | ||
|
||
input_name = model.graph.input[0].name | ||
serialized_model = model.SerializeToString() | ||
session = onnxruntime.InferenceSession(serialized_model, providers=["CPUExecutionProvider"]) | ||
output_names = [output.name for output in session.get_outputs()] | ||
|
||
for images, target in validation_loader: | ||
pred = session.run(output_names, input_feed={input_name: images.numpy()})[0] | ||
predictions.append(np.argmax(pred, axis=1)) | ||
references.append(target) | ||
|
||
predictions = np.concatenate(predictions, axis=0) | ||
references = np.concatenate(references, axis=0) | ||
return accuracy_score(predictions, references) | ||
#! [validation] | ||
|
||
#! [quantization] | ||
import onnx | ||
|
||
model = onnx.load("model_path") | ||
|
||
quantized_model = nncf.quantize_with_accuracy_control( | ||
model, | ||
calibration_dataset=calibration_dataset, | ||
validation_dataset=validation_dataset, | ||
validation_fn=validate, | ||
max_drop=0.01, | ||
drop_type=nncf.DropType.ABSOLUTE, | ||
) | ||
#! [quantization] | ||
|
||
#! [inference] | ||
import openvino as ov | ||
|
||
# convert ONNX model to OpenVINO model | ||
ov_quantized_model = ov.convert_model(quantized_model) | ||
|
||
# compile the model to transform quantized operations to int8 | ||
model_int8 = ov.compile_model(ov_quantized_model) | ||
|
||
input_fp32 = ... # FP32 model input | ||
res = model_int8(input_fp32) | ||
|
||
#! [inference] |
Oops, something went wrong.