From ed11740c17cce40fa553bd9cf30ad052bed7f544 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Mon, 26 Aug 2024 13:10:53 +0200 Subject: [PATCH 01/30] Initial commit --- templates/rhel8/dist/dev.dockerfile.j2 | 12 +++++++++--- templates/rhel8/env/dev_env.dockerfile.j2 | 1 + templates/rhel8/env/runtime_env.dockerfile.j2 | 1 + templates/ubuntu20/dist/dev.dockerfile.j2 | 12 ++++++++---- templates/ubuntu20/env/dev_env.dockerfile.j2 | 1 + templates/ubuntu20/env/runtime_env.dockerfile.j2 | 1 + templates/ubuntu22/dist/dev.dockerfile.j2 | 12 ++++++++---- templates/ubuntu22/env/dev_env.dockerfile.j2 | 1 + templates/ubuntu22/env/runtime_env.dockerfile.j2 | 1 + 9 files changed, 31 insertions(+), 11 deletions(-) diff --git a/templates/rhel8/dist/dev.dockerfile.j2 b/templates/rhel8/dist/dev.dockerfile.j2 index 695f6898..8b172509 100644 --- a/templates/rhel8/dist/dev.dockerfile.j2 +++ b/templates/rhel8/dist/dev.dockerfile.j2 @@ -5,10 +5,16 @@ ARG OPENVINO_WHEELS_URL # hadolint ignore=SC2102,DL3033 RUN yum install -y cmake git && yum clean all && \ if [ -z "$OPENVINO_WHEELS_URL" ]; then \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="$OPENVINO_WHEELS_VERSION" --extra-index-url https://download.pytorch.org/whl/cpu; \ + + ${PYTHON_VER} -m pip install --no-cache-dir openvino=="${OPENVINO_WHEELS_VERSION}" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --extra-index-url https://download.pytorch.org/whl/cpu; \ else \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="$OPENVINO_WHEELS_VERSION" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="$OPENVINO_WHEELS_VERSION" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu ; \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu ; \ fi # download source for PyPi LGPL packages diff --git a/templates/rhel8/env/dev_env.dockerfile.j2 b/templates/rhel8/env/dev_env.dockerfile.j2 index dcf972ec..481a0712 100644 --- a/templates/rhel8/env/dev_env.dockerfile.j2 +++ b/templates/rhel8/env/dev_env.dockerfile.j2 @@ -8,4 +8,5 @@ ENV TBB_DIR=/opt/intel/openvino/runtime/3rdparty/tbb/cmake ENV ngraph_DIR=/opt/intel/openvino/runtime/cmake ENV OpenVINO_DIR=/opt/intel/openvino/runtime/cmake ENV INTEL_OPENVINO_DIR=/opt/intel/openvino +ENV OV_TOKENIZER_PREBUILD_EXTENSION_PATH=/opt/intel/openvino/runtime/lib/intel64/libopenvino_tokenizers.so ENV PKG_CONFIG_PATH=/opt/intel/openvino/runtime/lib/intel64/pkgconfig diff --git a/templates/rhel8/env/runtime_env.dockerfile.j2 b/templates/rhel8/env/runtime_env.dockerfile.j2 index 9eb25ba5..476d274d 100644 --- a/templates/rhel8/env/runtime_env.dockerfile.j2 +++ b/templates/rhel8/env/runtime_env.dockerfile.j2 @@ -8,4 +8,5 @@ ENV TBB_DIR=/opt/intel/openvino/runtime/3rdparty/tbb/cmake ENV ngraph_DIR=/opt/intel/openvino/runtime/cmake ENV OpenVINO_DIR=/opt/intel/openvino/runtime/cmake ENV INTEL_OPENVINO_DIR=/opt/intel/openvino +ENV OV_TOKENIZER_PREBUILD_EXTENSION_PATH=/opt/intel/openvino/runtime/lib/intel64/libopenvino_tokenizers.so ENV PKG_CONFIG_PATH=/opt/intel/openvino/runtime/lib/intel64/pkgconfig diff --git a/templates/ubuntu20/dist/dev.dockerfile.j2 b/templates/ubuntu20/dist/dev.dockerfile.j2 index e4983e2b..f7a1a1a3 100644 --- a/templates/ubuntu20/dist/dev.dockerfile.j2 +++ b/templates/ubuntu20/dist/dev.dockerfile.j2 @@ -5,11 +5,15 @@ ARG OPENVINO_WHEELS_URL # hadolint ignore=SC2102 RUN apt-get update && apt-get install -y --no-install-recommends cmake make git && rm -rf /var/lib/apt/lists/* && \ if [ -z "$OPENVINO_WHEELS_URL" ]; then \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino=="$OPENVINO_WHEELS_VERSION" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="$OPENVINO_WHEELS_VERSION" --extra-index-url https://download.pytorch.org/whl/cpu; \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino=="${OPENVINO_WHEELS_VERSION}" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --extra-index-url https://download.pytorch.org/whl/cpu; \ else \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="$OPENVINO_WHEELS_VERSION" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="$OPENVINO_WHEELS_VERSION" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu; \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu; \ fi WORKDIR ${INTEL_OPENVINO_DIR}/licensing diff --git a/templates/ubuntu20/env/dev_env.dockerfile.j2 b/templates/ubuntu20/env/dev_env.dockerfile.j2 index dcf972ec..481a0712 100644 --- a/templates/ubuntu20/env/dev_env.dockerfile.j2 +++ b/templates/ubuntu20/env/dev_env.dockerfile.j2 @@ -8,4 +8,5 @@ ENV TBB_DIR=/opt/intel/openvino/runtime/3rdparty/tbb/cmake ENV ngraph_DIR=/opt/intel/openvino/runtime/cmake ENV OpenVINO_DIR=/opt/intel/openvino/runtime/cmake ENV INTEL_OPENVINO_DIR=/opt/intel/openvino +ENV OV_TOKENIZER_PREBUILD_EXTENSION_PATH=/opt/intel/openvino/runtime/lib/intel64/libopenvino_tokenizers.so ENV PKG_CONFIG_PATH=/opt/intel/openvino/runtime/lib/intel64/pkgconfig diff --git a/templates/ubuntu20/env/runtime_env.dockerfile.j2 b/templates/ubuntu20/env/runtime_env.dockerfile.j2 index 9eb25ba5..476d274d 100644 --- a/templates/ubuntu20/env/runtime_env.dockerfile.j2 +++ b/templates/ubuntu20/env/runtime_env.dockerfile.j2 @@ -8,4 +8,5 @@ ENV TBB_DIR=/opt/intel/openvino/runtime/3rdparty/tbb/cmake ENV ngraph_DIR=/opt/intel/openvino/runtime/cmake ENV OpenVINO_DIR=/opt/intel/openvino/runtime/cmake ENV INTEL_OPENVINO_DIR=/opt/intel/openvino +ENV OV_TOKENIZER_PREBUILD_EXTENSION_PATH=/opt/intel/openvino/runtime/lib/intel64/libopenvino_tokenizers.so ENV PKG_CONFIG_PATH=/opt/intel/openvino/runtime/lib/intel64/pkgconfig diff --git a/templates/ubuntu22/dist/dev.dockerfile.j2 b/templates/ubuntu22/dist/dev.dockerfile.j2 index 41862a4b..99f22a0a 100644 --- a/templates/ubuntu22/dist/dev.dockerfile.j2 +++ b/templates/ubuntu22/dist/dev.dockerfile.j2 @@ -5,11 +5,15 @@ ARG OPENVINO_WHEELS_URL # hadolint ignore=SC2102 RUN apt-get update && apt-get install -y --no-install-recommends cmake make git && rm -rf /var/lib/apt/lists/* && \ if [ -z "$OPENVINO_WHEELS_URL" ]; then \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino=="$OPENVINO_WHEELS_VERSION" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="$OPENVINO_WHEELS_VERSION" --extra-index-url https://download.pytorch.org/whl/cpu; \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino=="${OPENVINO_WHEELS_VERSION}" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --extra-index-url https://download.pytorch.org/whl/cpu; \ else \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="$OPENVINO_WHEELS_VERSION" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="$OPENVINO_WHEELS_VERSION" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu; \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu; \ fi WORKDIR ${INTEL_OPENVINO_DIR}/licensing diff --git a/templates/ubuntu22/env/dev_env.dockerfile.j2 b/templates/ubuntu22/env/dev_env.dockerfile.j2 index 3b28b8cc..3c29bc5a 100644 --- a/templates/ubuntu22/env/dev_env.dockerfile.j2 +++ b/templates/ubuntu22/env/dev_env.dockerfile.j2 @@ -7,4 +7,5 @@ ENV TBB_DIR=/opt/intel/openvino/runtime/3rdparty/tbb/cmake ENV ngraph_DIR=/opt/intel/openvino/runtime/cmake ENV OpenVINO_DIR=/opt/intel/openvino/runtime/cmake ENV INTEL_OPENVINO_DIR=/opt/intel/openvino +ENV OV_TOKENIZER_PREBUILD_EXTENSION_PATH=/opt/intel/openvino/runtime/lib/intel64/libopenvino_tokenizers.so ENV PKG_CONFIG_PATH=/opt/intel/openvino/runtime/lib/intel64/pkgconfig diff --git a/templates/ubuntu22/env/runtime_env.dockerfile.j2 b/templates/ubuntu22/env/runtime_env.dockerfile.j2 index 0fc5daba..8f6f8a28 100644 --- a/templates/ubuntu22/env/runtime_env.dockerfile.j2 +++ b/templates/ubuntu22/env/runtime_env.dockerfile.j2 @@ -7,4 +7,5 @@ ENV TBB_DIR=/opt/intel/openvino/runtime/3rdparty/tbb/cmake ENV ngraph_DIR=/opt/intel/openvino/runtime/cmake ENV OpenVINO_DIR=/opt/intel/openvino/runtime/cmake ENV INTEL_OPENVINO_DIR=/opt/intel/openvino +ENV OV_TOKENIZER_PREBUILD_EXTENSION_PATH=/opt/intel/openvino/runtime/lib/intel64/libopenvino_tokenizers.so ENV PKG_CONFIG_PATH=/opt/intel/openvino/runtime/lib/intel64/pkgconfig From 9d1eb465872c1501606ba871dba1be4ba7155d88 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Mon, 26 Aug 2024 13:33:08 +0200 Subject: [PATCH 02/30] arg_parser: Fixed regex for new GenAI archives --- utils/arg_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index 26bbb627..3f142ae6 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -497,7 +497,7 @@ def parse_args(name: str, description: str): # noqa parser.error('Insufficient arguments. Provide --package_url ' 'or --distribution (with optional --product_version) arguments') if args.mode != 'gen_dockerfile' or args.rhel_platform == 'autobuild': - dev_version = re.search(r'^\d{4}\.\d\.\d\.dev\d{8}$', args.product_version) + dev_version = re.search(r'^\d{4}\.\d\.\d\.(?:d\.)?dev\d{8}$', args.product_version) if dev_version: args.product_version = dev_version.group() else: @@ -513,7 +513,7 @@ def parse_args(name: str, description: str): # noqa f'and {args.distribution} distribution. Please specify --package_url directly.') if args.package_url and not args.build_id: - dev_version = re.search(r'p_(\d{4}\.\d\.\d\.dev\d{8})', args.package_url) + dev_version = re.search(r'_(\d{4}\.\d\.\d\.(?:d\.)?dev\d{8})', args.package_url) if dev_version: # save product version and build version as YYYY.U.V.devYYYYMMDD args.product_version = dev_version.group(1) From b809b3099277c62500745d6b8aed6ec80dc09eb2 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Mon, 26 Aug 2024 13:37:43 +0200 Subject: [PATCH 03/30] Add more debug for arg_parser --- utils/arg_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index 3f142ae6..9f5c45e9 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -276,8 +276,8 @@ def add_dist_args(cls, parser: argparse.ArgumentParser): def fail_if_product_version_not_supported(product_version: str, parser: DockerCIArgumentParser): if product_version < '2022.1': - parser.error('This version of the DockerHub CI framework does not support OpenVINO releases earlier than ' - '2022.1.0. Please use previous versions of the DockerHub CI.') + parser.error(f'This version of the DockerHub CI framework does not support OpenVINO releases earlier than ' + '2022.1.0. Current detected product version {product_version}. Please use previous versions of the DockerHub CI.') def parse_args(name: str, description: str): # noqa """Parse all the args set up above""" From f6015ecc5daadd2c56c1cb5710c359fa27c169c9 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Mon, 26 Aug 2024 13:40:31 +0200 Subject: [PATCH 04/30] Replaced f-string with format because of old py version. --- utils/arg_parser.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index 9f5c45e9..ad2a0bc4 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -276,8 +276,9 @@ def add_dist_args(cls, parser: argparse.ArgumentParser): def fail_if_product_version_not_supported(product_version: str, parser: DockerCIArgumentParser): if product_version < '2022.1': - parser.error(f'This version of the DockerHub CI framework does not support OpenVINO releases earlier than ' - '2022.1.0. Current detected product version {product_version}. Please use previous versions of the DockerHub CI.') + parser.error('This version of the DockerHub CI framework does not support OpenVINO releases earlier than ' + '2022.1.0. Current detected product version {product_version}. Please use previous versions ' + 'of the DockerHub CI.'.format(product_version=product_version)) def parse_args(name: str, description: str): # noqa """Parse all the args set up above""" From 28d2c30abbbd41a46e86d8dbdaef2cc8d5087223 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Mon, 26 Aug 2024 14:14:29 +0200 Subject: [PATCH 05/30] Reworked regex. Updated debug info --- utils/arg_parser.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index ad2a0bc4..5cbb7a93 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -276,9 +276,9 @@ def add_dist_args(cls, parser: argparse.ArgumentParser): def fail_if_product_version_not_supported(product_version: str, parser: DockerCIArgumentParser): if product_version < '2022.1': - parser.error('This version of the DockerHub CI framework does not support OpenVINO releases earlier than ' - '2022.1.0. Current detected product version {product_version}. Please use previous versions ' - 'of the DockerHub CI.'.format(product_version=product_version)) + parser.error(f'This version of the DockerHub CI framework does not support OpenVINO releases earlier than ' + '2022.1.0. Current detected product version "{product_version}". Please use previous versions ' + 'of the DockerHub CI.') def parse_args(name: str, description: str): # noqa """Parse all the args set up above""" @@ -438,6 +438,7 @@ def parse_args(name: str, description: str): # noqa 'is supported only for RHEL-based images') if hasattr(args, 'product_version') and args.product_version: + logger.info(f'Found product version {args.product_version} in arguments.') fail_if_product_version_not_supported(args.product_version, parser) product_version = re.search(r'^\d{4}\.\d$', args.product_version) if product_version: @@ -498,7 +499,7 @@ def parse_args(name: str, description: str): # noqa parser.error('Insufficient arguments. Provide --package_url ' 'or --distribution (with optional --product_version) arguments') if args.mode != 'gen_dockerfile' or args.rhel_platform == 'autobuild': - dev_version = re.search(r'^\d{4}\.\d\.\d\.(?:d\.)?dev\d{8}$', args.product_version) + dev_version = re.search(r'^\d{4}\.(?:\d\.){2,3}dev\d{8}$', args.product_version) if dev_version: args.product_version = dev_version.group() else: @@ -514,13 +515,14 @@ def parse_args(name: str, description: str): # noqa f'and {args.distribution} distribution. Please specify --package_url directly.') if args.package_url and not args.build_id: - dev_version = re.search(r'_(\d{4}\.\d\.\d\.(?:d\.)?dev\d{8})', args.package_url) + logger.info(f'Parsing product version in the package_url...') + dev_version = re.search(r'_(\d{4}\.(?:\d\.){2,3}dev\d{8})_)', args.package_url) if dev_version: # save product version and build version as YYYY.U.V.devYYYYMMDD args.product_version = dev_version.group(1) args.build_id = args.product_version else: - build_id = re.search(r'_(\d{4}\.\d\.\d)\.(\d{3,4})', args.package_url) + build_id = re.search(r'_(\d{4}\.(?:\d\.){2,3})\.(\d{3,4})', args.package_url) if build_id: # save product version YYYY.U.V.BBB args.build_id = '.'.join(build_id.groups()) @@ -576,7 +578,7 @@ def parse_args(name: str, description: str): # noqa if args.mode == 'test' and not args.product_version: match = re.search(r':(\d{4}\.\d\.\d)', str(args.tags)) if not match and args.package_url: - match = re.search(r'p_(\d{4}\.\d\.\d)', args.package_url) + match = re.search(r'_(\d{4}\.\d\.\d)', args.package_url) if match: # save product version YYYY.U.V args.product_version = match.group(1) From 29c7a73fcd52c1d8f83bc357cd024695958a0b20 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Mon, 26 Aug 2024 14:20:41 +0200 Subject: [PATCH 06/30] Fixed regex. Reworked whole script with black linter --- utils/arg_parser.py | 761 +++++++++++++++++++++++++------------------- 1 file changed, 431 insertions(+), 330 deletions(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index 5cbb7a93..349df9cc 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -12,8 +12,7 @@ import logging from utils.loader import INTEL_OPENVINO_VERSION -from utils.utilities import (check_internal_local_path, - check_printable_utf8_chars) +from utils.utilities import check_internal_local_path, check_printable_utf8_chars logger = logging.getLogger(__name__) @@ -21,11 +20,22 @@ class DockerCIArgumentParser(argparse.ArgumentParser): """CLI argument parser for this framework""" - SUPPORTED_OS: typing.List = ['ubuntu18', 'ubuntu20', 'ubuntu22', 'winserver2019', 'windows20h2', 'rhel8'] + SUPPORTED_OS: typing.List = [ + "ubuntu18", + "ubuntu20", + "ubuntu22", + "winserver2019", + "windows20h2", + "rhel8", + ] def __init__(self, prog: typing.Optional[str] = None, description: typing.Optional[str] = None): - super().__init__(prog=prog, description=description, - formatter_class=argparse.RawTextHelpFormatter, add_help=True) + super().__init__( + prog=prog, + description=description, + formatter_class=argparse.RawTextHelpFormatter, + add_help=True, + ) @staticmethod def set_default_subparser(name: str): @@ -37,162 +47,163 @@ def set_default_subparser(name: str): def add_image_args(parser: argparse.ArgumentParser): """Adding args needed to manage the built Docker image""" parser.add_argument( - '-t', - '--tags', - metavar='IMAGE_NAME:TAG', - action='append', - required=' test' in parser.prog, + "-t", + "--tags", + metavar="IMAGE_NAME:TAG", + action="append", + required=" test" in parser.prog, help='Source image name and optionally a tags in the "IMAGE_NAME:TAG" format. ' - 'Default is _: and latest. You can specify some tags.', + "Default is _: and latest. You can specify some tags.", ) parser.add_argument( - '--tag_postfix', - metavar='_NAME', - default='', - help='Add special postfix to the end of tag image. ' - 'Image name will be like this _:', + "--tag_postfix", + metavar="_NAME", + default="", + help="Add special postfix to the end of tag image. " + "Image name will be like this _:", ) @staticmethod def add_linter_check_args(parser: argparse.ArgumentParser): parser.add_argument( - '--linter_check', - metavar='NAME', - action='append', + "--linter_check", + metavar="NAME", + action="append", default=[], - help='Enable linter check for image and dockerfile. ' - 'It installs additional 3d-party docker images or executable files. ' - 'Available tests: ' - 'hadolint (https://github.com/hadolint/hadolint), ' - 'dive (https://github.com/wagoodman/dive)', + help="Enable linter check for image and dockerfile. " + "It installs additional 3d-party docker images or executable files. " + "Available tests: " + "hadolint (https://github.com/hadolint/hadolint), " + "dive (https://github.com/wagoodman/dive)", ) @staticmethod def add_build_args(parser: argparse.ArgumentParser): """Adding args needed to build the Docker image""" parser.add_argument( - '--wheels_url', - metavar='URL', - default='', - help='URL to HTML page with links or local path relative to openvino folder to search for OpenVINO wheels ' - '(will be used in a dockerfile as pip install --find-links value). ' - 'By default, openvino and openvino_dev will be installed from PyPi', + "--wheels_url", + metavar="URL", + default="", + help="URL to HTML page with links or local path relative to openvino folder to search for OpenVINO wheels " + "(will be used in a dockerfile as pip install --find-links value). " + "By default, openvino and openvino_dev will be installed from PyPi", ) parser.add_argument( - '--image_json_path', - help='Provide path to save image data in .json format file. ' - 'By default, it is stored in the logs folder.') + "--image_json_path", + help="Provide path to save image data in .json format file. " + "By default, it is stored in the logs folder.", + ) parser.add_argument( - '--dockerfile_name', - metavar='NAME', - help='Name of the Dockerfile, that will be generated from templates. ' - 'Format is "openvino___.dockerfile"', + "--dockerfile_name", + metavar="NAME", + help="Name of the Dockerfile, that will be generated from templates. " + 'Format is "openvino___.dockerfile"', ) parser.add_argument( - '-d', - '--device', - metavar='NAME', - action='append', - help='Target inference hardware: cpu, gpu, vpu, hddl. Default is all. ' - 'Dockerfile name format has the first letter from device name, ' - 'e.g. for CPU, HDDL it will be openvino_ch__.dockerfile', + "-d", + "--device", + metavar="NAME", + action="append", + help="Target inference hardware: cpu, gpu, vpu, hddl. Default is all. " + "Dockerfile name format has the first letter from device name, " + "e.g. for CPU, HDDL it will be openvino_ch__.dockerfile", ) parser.add_argument( - '-py', - '--python', - choices=['python37', 'python38', 'python310'], - help='Python interpreter for docker image, currently default is python38', + "-py", + "--python", + choices=["python37", "python38", "python310"], + help="Python interpreter for docker image, currently default is python38", ) parser.add_argument( - '--cmake', - choices=['cmake34', 'cmake314'], - default='cmake314', - help='CMake for Windows docker image, default CMake 3.14. For Linux images it is used default for OS.', + "--cmake", + choices=["cmake34", "cmake314"], + default="cmake314", + help="CMake for Windows docker image, default CMake 3.14. For Linux images it is used default for OS.", ) parser.add_argument( - '--msbuild', - choices=['msbuild2019', 'msbuild2019_online'], - help='MSBuild Tools for Windows docker image.' - 'MSBuild Tools are licensed as a supplement your existing Visual Studio license. ' - 'Please don’t share the image with MSBuild 2019 on a public Docker Hub.', + "--msbuild", + choices=["msbuild2019", "msbuild2019_online"], + help="MSBuild Tools for Windows docker image." + "MSBuild Tools are licensed as a supplement your existing Visual Studio license. " + "Please don’t share the image with MSBuild 2019 on a public Docker Hub.", ) parser.add_argument( - '--pre_stage_msbuild', - choices=['msbuild2019', 'msbuild2019_online'], - help='MSBuild Tools for Windows docker image to use on the first stage. ' - 'Can be required to build some thirdparty dependencies from source code. ' - 'MSBuild Tools are licensed as a supplement your existing Visual Studio license. ', + "--pre_stage_msbuild", + choices=["msbuild2019", "msbuild2019_online"], + help="MSBuild Tools for Windows docker image to use on the first stage. " + "Can be required to build some thirdparty dependencies from source code. " + "MSBuild Tools are licensed as a supplement your existing Visual Studio license. ", ) parser.add_argument( - '-l', - '--layers', - metavar='NAME', - action='append', + "-l", + "--layers", + metavar="NAME", + action="append", default=[], - help='Setup your layer. Use name of .dockerfile.j2 file located in ' - '/templates//layers folder. ' - 'Layer will be added to the end of product dockerfile.', + help="Setup your layer. Use name of .dockerfile.j2 file located in " + "/templates//layers folder. " + "Layer will be added to the end of product dockerfile.", ) parser.add_argument( - '--build_arg', - metavar='VAR_NAME=VALUE', - action='append', + "--build_arg", + metavar="VAR_NAME=VALUE", + action="append", default=[], - help='Specify build or template arguments for your layer. ' - 'You can use "no_samples=True" to remove OMZ, IE samples and demos from final docker image. ' - 'Set "INSTALL_SOURCES=yes" to download source for 3d party LGPL/GPL dependencies.', + help="Specify build or template arguments for your layer. " + 'You can use "no_samples=True" to remove OMZ, IE samples and demos from final docker image. ' + 'Set "INSTALL_SOURCES=yes" to download source for 3d party LGPL/GPL dependencies.', ) parser.add_argument( - '--no-cache', - dest='no_cache', - action='store_true', - help='Specify if image should be built without cache. False by default.', + "--no-cache", + dest="no_cache", + action="store_true", + help="Specify if image should be built without cache. False by default.", ) @staticmethod def add_test_args(parser: argparse.ArgumentParser): """Adding args needed to run tests on the built Docker image""" parser.add_argument( - '-k', - metavar='EXPRESSION', - default='', - dest='test_expression', - help='Run tests which match the given substring expression for pytest -k.', + "-k", + metavar="EXPRESSION", + default="", + dest="test_expression", + help="Run tests which match the given substring expression for pytest -k.", ) parser.add_argument( - '-m', - metavar='MARKEXPR', - default='', - dest='test_mark_expression', - help='Run tests which matching given mark expression for pytest -m', + "-m", + metavar="MARKEXPR", + default="", + dest="test_mark_expression", + help="Run tests which matching given mark expression for pytest -m", ) parser.add_argument( - '--sdl_check', - metavar='NAME', - action='append', + "--sdl_check", + metavar="NAME", + action="append", default=[], - help='Enable SDL check for docker host and image. ' - 'It installs additional 3d-party docker images or executable files. ' - 'Available tests: ' - 'snyk (https://github.com/snyk/snyk), ' - 'bench_security (https://github.com/docker/docker-bench-security)', + help="Enable SDL check for docker host and image. " + "It installs additional 3d-party docker images or executable files. " + "Available tests: " + "snyk (https://github.com/snyk/snyk), " + "bench_security (https://github.com/docker/docker-bench-security)", ) parser.add_argument( - '--nightly', - action='store_true', + "--nightly", + action="store_true", default=False, help=argparse.SUPPRESS, # Skip tests for regular builds ) @@ -201,16 +212,16 @@ def add_test_args(parser: argparse.ArgumentParser): def add_deploy_args(parser: argparse.ArgumentParser): """Adding args needed to publish the built Docker image to a repository""" parser.add_argument( - '-r', - '--registry', - metavar='URL:PORT', + "-r", + "--registry", + metavar="URL:PORT", required=True, help='Registry host and optionally a port in the "host:port" format', ) parser.add_argument( - '--nightly_save_path', - default='', + "--nightly_save_path", + default="", help=argparse.SUPPRESS, # Setup saving docker image as a binary file ) @@ -218,129 +229,137 @@ def add_deploy_args(parser: argparse.ArgumentParser): def add_dist_args(cls, parser: argparse.ArgumentParser): """Adding arg needed to customize the generated dockerfile""" parser.add_argument( - '-os', + "-os", choices=cls.SUPPORTED_OS, - default='', - help='Operation System for docker image.', + default="", + help="Operation System for docker image.", + ) + + parser.add_argument( + "-dist", + "--distribution", + choices=["base", "runtime", "dev", "dev_no_samples", "custom"], + required=" test" in parser.prog, + help="Distribution type: dev, dev_no_samples, runtime or " + "base (with CPU only and without installing dependencies). " + "Using key --file and " + "-p are mandatory to build base distribution image." + "base dockerfiles are stored in /dockerfiles/ folder.", ) parser.add_argument( - '-dist', - '--distribution', - choices=['base', 'runtime', 'dev', 'dev_no_samples', 'custom'], - required=' test' in parser.prog, - help='Distribution type: dev, dev_no_samples, runtime or ' - 'base (with CPU only and without installing dependencies). ' - 'Using key --file and ' - '-p are mandatory to build base distribution image.' - 'base dockerfiles are stored in /dockerfiles/ folder.', - ) - - parser.add_argument('-p', - '--product_version', - default='', - help='Product version in format: YYYY.U[.BBB], where BBB - build number is optional.') + "-p", + "--product_version", + default="", + help="Product version in format: YYYY.U[.BBB], where BBB - build number is optional.", + ) parser.add_argument( - '-w', - '--wheels_version', - default='', - help='Version specifier of OpenVINO wheels to install (will be passed to pip install). ' - 'Will be equal to product version by default.', + "-w", + "--wheels_version", + default="", + help="Version specifier of OpenVINO wheels to install (will be passed to pip install). " + "Will be equal to product version by default.", ) parser.add_argument( - '-s', - '--source', - choices=['url', 'local'], - default='url', - help='Source of the package: external URL or relative local path. By default: url.', + "-s", + "--source", + choices=["url", "local"], + default="url", + help="Source of the package: external URL or relative local path. By default: url.", ) parser.add_argument( - '-u', - '--package_url', - metavar='URL', - default='', - help='Package external or local url, use http://, https://, ftp:// access scheme or ' - 'relative local path', + "-u", + "--package_url", + metavar="URL", + default="", + help="Package external or local url, use http://, https://, ftp:// access scheme or " + "relative local path", ) parser.add_argument( - '-f', - '--file', - metavar='NAME', - help='Name of the Dockerfile, that uses to build an image.', + "-f", + "--file", + metavar="NAME", + help="Name of the Dockerfile, that uses to build an image.", ) def fail_if_product_version_not_supported(product_version: str, parser: DockerCIArgumentParser): - if product_version < '2022.1': - parser.error(f'This version of the DockerHub CI framework does not support OpenVINO releases earlier than ' - '2022.1.0. Current detected product version "{product_version}". Please use previous versions ' - 'of the DockerHub CI.') + if product_version < "2022.1": + parser.error( + f"This version of the DockerHub CI framework does not support OpenVINO releases earlier than " + '2022.1.0. Current detected product version "{product_version}". Please use previous versions ' + "of the DockerHub CI." + ) + def parse_args(name: str, description: str): # noqa """Parse all the args set up above""" parser = DockerCIArgumentParser(name, description) - subparsers = parser.add_subparsers(dest='mode') + subparsers = parser.add_subparsers(dest="mode") - gen_dockerfile_subparser = subparsers.add_parser('gen_dockerfile', help='Generate a dockerfile to ' - 'dockerfiles/ folder') + gen_dockerfile_subparser = subparsers.add_parser( + "gen_dockerfile", help="Generate a dockerfile to " "dockerfiles/ folder" + ) parser.add_build_args(gen_dockerfile_subparser) parser.add_linter_check_args(gen_dockerfile_subparser) parser.add_dist_args(gen_dockerfile_subparser) rhel_platform_group = gen_dockerfile_subparser.add_mutually_exclusive_group() rhel_platform_group.add_argument( - '--rhel_platform', - choices=['docker', 'openshift', 'autobuild'], - default='docker', - help='Specify target platform to generate RHEL dockerfiles (default is docker). ' - 'Choose autobuild option for Red Hat portal Build System.', + "--rhel_platform", + choices=["docker", "openshift", "autobuild"], + default="docker", + help="Specify target platform to generate RHEL dockerfiles (default is docker). " + "Choose autobuild option for Red Hat portal Build System.", ) rhel_platform_group.add_argument( - '--openshift', - action='store_const', - dest='rhel_platform', - const='openshift', + "--openshift", + action="store_const", + dest="rhel_platform", + const="openshift", default=False, - help='Create a dockerfile intended to build on Red Hat OpenShift Container Platform (RHEL images only). ' - 'Alias for --rhel_platform=openshift', + help="Create a dockerfile intended to build on Red Hat OpenShift Container Platform (RHEL images only). " + "Alias for --rhel_platform=openshift", ) - build_subparser = subparsers.add_parser('build', help='Build a docker image') + build_subparser = subparsers.add_parser("build", help="Build a docker image") parser.add_build_args(build_subparser) parser.add_linter_check_args(build_subparser) parser.add_dist_args(build_subparser) parser.add_image_args(build_subparser) - build_test_subparser = subparsers.add_parser('build_test', help='Build and test a docker image') + build_test_subparser = subparsers.add_parser("build_test", help="Build and test a docker image") parser.add_build_args(build_test_subparser) parser.add_linter_check_args(build_test_subparser) parser.add_dist_args(build_test_subparser) parser.add_image_args(build_test_subparser) parser.add_test_args(build_test_subparser) - test_subparser = subparsers.add_parser('test', help='Test a local docker image') + test_subparser = subparsers.add_parser("test", help="Test a local docker image") parser.add_linter_check_args(test_subparser) parser.add_dist_args(test_subparser) parser.add_image_args(test_subparser) parser.add_test_args(test_subparser) test_subparser.add_argument( - '-r', - '--registry', - metavar='URL:PORT', - default='', + "-r", + "--registry", + metavar="URL:PORT", + default="", help='Registry host and optionally a port in the "host:port" format. ' - 'Will be used to pull the image if it does not exist', + "Will be used to pull the image if it does not exist", ) - deploy_subparser = subparsers.add_parser('deploy', help='Deploy a docker image') + deploy_subparser = subparsers.add_parser("deploy", help="Deploy a docker image") parser.add_image_args(deploy_subparser) parser.add_deploy_args(deploy_subparser) - all_subparser = subparsers.add_parser('all', help='Build, test and deploy a docker image. [Default option]') + all_subparser = subparsers.add_parser( + "all", help="Build, test and deploy a docker image. [Default option]" + ) parser.add_build_args(all_subparser) parser.add_linter_check_args(all_subparser) parser.add_dist_args(all_subparser) @@ -348,7 +367,7 @@ def parse_args(name: str, description: str): # noqa parser.add_test_args(all_subparser) parser.add_deploy_args(all_subparser) - parser.set_default_subparser('all') + parser.set_default_subparser("all") args = parser.parse_args() @@ -360,255 +379,337 @@ def parse_args(name: str, description: str): # noqa elif isinstance(arg_val, str): check_printable_utf8_chars(arg_val) - for attr_name in ('package_url', 'file', 'image_json_path'): + for attr_name in ("package_url", "file", "image_json_path"): if hasattr(args, attr_name) and getattr(args, attr_name): check_internal_local_path(getattr(args, attr_name)) - if args.mode != 'deploy' and args.package_url and args.source == 'local' and not args.package_url.startswith(( - 'http://', 'https://', 'ftp://')): + if ( + args.mode != "deploy" + and args.package_url + and args.source == "local" + and not args.package_url.startswith(("http://", "https://", "ftp://")) + ): args.package_url = str(pathlib.Path(args.package_url).as_posix()) - if args.mode not in ('test', 'deploy') and hasattr(args, 'distribution') and args.distribution == 'custom': - parser.error('For a custom distribution, only test and deploy modes are available.') - - if hasattr(args, 'sdl_check') and args.sdl_check and ( - 'snyk' not in args.sdl_check and 'bench_security' not in args.sdl_check): - parser.error('Incorrect arguments for --sdl_check. Available tests: snyk, bench_security') - - if hasattr(args, 'linter_check') and args.linter_check and ( - 'hadolint' not in args.linter_check and 'dive' not in args.linter_check): - parser.error('Incorrect arguments for --linter_check. Available tests: hadolint, dive') - - if args.mode in ('build', 'build_test', 'all') and args.distribution == 'base' and not args.file: - parser.error('The following argument is required: -f/--file') - - if args.mode == 'deploy' and not args.tags: - parser.error('The following argument is required: -t/--tags') - - if hasattr(args, 'os') and not args.os: + if ( + args.mode not in ("test", "deploy") + and hasattr(args, "distribution") + and args.distribution == "custom" + ): + parser.error("For a custom distribution, only test and deploy modes are available.") + + if ( + hasattr(args, "sdl_check") + and args.sdl_check + and ("snyk" not in args.sdl_check and "bench_security" not in args.sdl_check) + ): + parser.error("Incorrect arguments for --sdl_check. Available tests: snyk, bench_security") + + if ( + hasattr(args, "linter_check") + and args.linter_check + and ("hadolint" not in args.linter_check and "dive" not in args.linter_check) + ): + parser.error("Incorrect arguments for --linter_check. Available tests: hadolint, dive") + + if ( + args.mode in ("build", "build_test", "all") + and args.distribution == "base" + and not args.file + ): + parser.error("The following argument is required: -f/--file") + + if args.mode == "deploy" and not args.tags: + parser.error("The following argument is required: -t/--tags") + + if hasattr(args, "os") and not args.os: possible_os: typing.Set[str] = set() if args.package_url: possible_os.update(filter(lambda os: os in args.package_url, parser.SUPPORTED_OS)) - if hasattr(args, 'tags') and args.tags: + if hasattr(args, "tags") and args.tags: for tag in args.tags: possible_os.update(filter(lambda os: os in tag, parser.SUPPORTED_OS)) # noqa: B023 if len(possible_os) == 1: args.os = possible_os.pop() else: - parser.error('Can not get image OS from package URL or tags. ' - 'Please specify -os directly') - - if args.mode in ('gen_dockerfile', 'build', 'build_test', - 'all') and args.distribution == 'dev_no_samples' and 'ubuntu' not in args.os: - parser.error('Distribution dev_no_samples is available only for Ubuntu operation system') - - if args.mode == 'gen_dockerfile' and args.distribution == 'base': - parser.error('Generating dockerfile for base distribution is not available. ' - 'Use generated base dockerfiles are stored in /dockerfiles/ folder') + parser.error( + "Can not get image OS from package URL or tags. " "Please specify -os directly" + ) + + if ( + args.mode in ("gen_dockerfile", "build", "build_test", "all") + and args.distribution == "dev_no_samples" + and "ubuntu" not in args.os + ): + parser.error("Distribution dev_no_samples is available only for Ubuntu operation system") + + if args.mode == "gen_dockerfile" and args.distribution == "base": + parser.error( + "Generating dockerfile for base distribution is not available. " + "Use generated base dockerfiles are stored in /dockerfiles/ folder" + ) - if args.mode == 'test' and not (args.tags and args.distribution): - parser.error('Options --tags and --distribution are mandatory. Image operation system is "ubuntu18"' - ' by default.') + if args.mode == "test" and not (args.tags and args.distribution): + parser.error( + 'Options --tags and --distribution are mandatory. Image operation system is "ubuntu18"' + " by default." + ) - if args.mode == 'test' and 'runtime' in args.distribution and not args.package_url: - logger.info('\nYou can run samples/demos on runtime docker image. ' - 'Please provide --package_url key with path to dev distribution package in ' - 'http/https/ftp access scheme or a local file in the project location as dependent package ' - 'to run all available tests.\n') + if args.mode == "test" and "runtime" in args.distribution and not args.package_url: + logger.info( + "\nYou can run samples/demos on runtime docker image. " + "Please provide --package_url key with path to dev distribution package in " + "http/https/ftp access scheme or a local file in the project location as dependent package " + "to run all available tests.\n" + ) - if args.mode in ('deploy', 'all') and not hasattr(args, 'registry'): - parser.error('Option --registry is mandatory for this mode.') + if args.mode in ("deploy", "all") and not hasattr(args, "registry"): + parser.error("Option --registry is mandatory for this mode.") - if hasattr(args, 'image_json_path') and args.image_json_path: + if hasattr(args, "image_json_path") and args.image_json_path: args.image_json_path = pathlib.Path(args.image_json_path).absolute() if args.image_json_path.is_symlink(): - parser.error('Do not use symlink and hard link for --image_json_path key. It is an insecure way.') + parser.error( + "Do not use symlink and hard link for --image_json_path key. It is an insecure way." + ) - if hasattr(args, 'file') and args.file: + if hasattr(args, "file") and args.file: args.file = pathlib.Path(args.file).absolute() if args.file.is_symlink(): - parser.error('Do not use symlink and hard link for --file key. It is an insecure way. ') + parser.error("Do not use symlink and hard link for --file key. It is an insecure way. ") if not args.file.exists(): - parser.error(f'Cannot find specified Dockerfile: {str(args.file)}.') - - if not hasattr(args, 'rhel_platform'): - args.rhel_platform = 'docker' - if args.rhel_platform != 'docker' and args.os != 'rhel8': - parser.error('Dockerfile generation intended for non-Docker platforms ' - 'is supported only for RHEL-based images') + parser.error(f"Cannot find specified Dockerfile: {str(args.file)}.") + + if not hasattr(args, "rhel_platform"): + args.rhel_platform = "docker" + if args.rhel_platform != "docker" and args.os != "rhel8": + parser.error( + "Dockerfile generation intended for non-Docker platforms " + "is supported only for RHEL-based images" + ) - if hasattr(args, 'product_version') and args.product_version: - logger.info(f'Found product version {args.product_version} in arguments.') + if hasattr(args, "product_version") and args.product_version: + logger.info(f"Found product version {args.product_version} in arguments.") fail_if_product_version_not_supported(args.product_version, parser) - product_version = re.search(r'^\d{4}\.\d$', args.product_version) + product_version = re.search(r"^\d{4}\.\d$", args.product_version) if product_version: # save product version YYYY.U as YYYY.U.0 - args.product_version = f'{product_version.group()}.0' - - if args.mode in ('gen_dockerfile', 'build', 'build_test', 'all'): - if args.package_url and not args.package_url.startswith(('http://', 'https://', 'ftp://')): - if args.source == 'local' and not pathlib.Path(args.package_url).exists(): - parser.error('Provided local path of the package should be relative to folder ' - f'or should be an http/https/ftp access scheme: {args.package_url}') - elif args.source == 'url' and args.distribution != 'base': - parser.error('Provided URL is not supported, use http://, https:// or ftp:// access scheme') - elif args.source == 'local' and pathlib.Path(args.package_url).is_symlink(): - parser.error('Do not use symlink and hard link to specify local package url. ' - 'It is an insecure way.') + args.product_version = f"{product_version.group()}.0" + + if args.mode in ("gen_dockerfile", "build", "build_test", "all"): + if args.package_url and not args.package_url.startswith(("http://", "https://", "ftp://")): + if args.source == "local" and not pathlib.Path(args.package_url).exists(): + parser.error( + "Provided local path of the package should be relative to folder " + f"or should be an http/https/ftp access scheme: {args.package_url}" + ) + elif args.source == "url" and args.distribution != "base": + parser.error( + "Provided URL is not supported, use http://, https:// or ftp:// access scheme" + ) + elif args.source == "local" and pathlib.Path(args.package_url).is_symlink(): + parser.error( + "Do not use symlink and hard link to specify local package url. " + "It is an insecure way." + ) if not args.python: - if args.os in ('ubuntu22'): - args.python = 'python310' + if args.os in ("ubuntu22"): + args.python = "python310" else: - args.python = 'python38' + args.python = "python38" - if args.python == 'python38' and 'win' in args.os: - if not hasattr(args, 'pre_stage_msbuild') or not args.pre_stage_msbuild: - parser.error('Option --pre_stage_msbuild is required for Windows images to build the latest version ' - 'of Python 3.8') + if args.python == "python38" and "win" in args.os: + if not hasattr(args, "pre_stage_msbuild") or not args.pre_stage_msbuild: + parser.error( + "Option --pre_stage_msbuild is required for Windows images to build the latest version " + "of Python 3.8" + ) if not args.distribution and args.package_url: - if '_runtime_' in args.package_url: - args.distribution = 'runtime' - elif '_dev_' in args.package_url: - args.distribution = 'dev' + if "_runtime_" in args.package_url: + args.distribution = "runtime" + elif "_dev_" in args.package_url: + args.distribution = "dev" else: - parser.error(f'Cannot get distribution type from the package URL provided. {args.package_url} ' - 'Please specify --distribution directly.') + parser.error( + f"Cannot get distribution type from the package URL provided. {args.package_url} " + "Please specify --distribution directly." + ) # set installation method for the package - args.install_type = 'copy' + args.install_type = "copy" # workaround for https://bugs.python.org/issue16399 issue - if not args.device and 'win' not in args.os: - if args.distribution == 'base': - args.device = ['cpu'] - elif args.os == 'rhel8': - args.device = ['cpu', 'gpu'] + if not args.device and "win" not in args.os: + if args.distribution == "base": + args.device = ["cpu"] + elif args.os == "rhel8": + args.device = ["cpu", "gpu"] else: - args.device = ['cpu', 'gpu'] # 2022.3 v/h not supported + args.device = ["cpu", "gpu"] # 2022.3 v/h not supported elif not args.device: - args.device = ['cpu'] + args.device = ["cpu"] if not args.package_url and not args.product_version: latest_public_version = max(INTEL_OPENVINO_VERSION.__iter__()) - args.product_version = '2022.2.0' if latest_public_version <= '2022.2.0' else latest_public_version - args.build_id = '' + args.product_version = ( + "2022.2.0" if latest_public_version <= "2022.2.0" else latest_public_version + ) + args.build_id = "" - if not args.package_url and args.distribution not in ('base',): + if not args.package_url and args.distribution not in ("base",): if not args.distribution or not args.product_version: - parser.error('Insufficient arguments. Provide --package_url ' - 'or --distribution (with optional --product_version) arguments') - if args.mode != 'gen_dockerfile' or args.rhel_platform == 'autobuild': - dev_version = re.search(r'^\d{4}\.(?:\d\.){2,3}dev\d{8}$', args.product_version) + parser.error( + "Insufficient arguments. Provide --package_url " + "or --distribution (with optional --product_version) arguments" + ) + if args.mode != "gen_dockerfile" or args.rhel_platform == "autobuild": + dev_version = re.search(r"^\d{4}\.(?:\d\.){2,3}dev\d{8}$", args.product_version) if dev_version: args.product_version = dev_version.group() else: - lts_version = re.search(r'(\d{4}\.\d\.\d)', args.product_version) + lts_version = re.search(r"(\d{4}\.\d\.\d)", args.product_version) if lts_version: args.product_version = lts_version.group() # save product version YYYY.U.V else: - parser.error(f'Cannot find package url for {args.product_version} version') + parser.error(f"Cannot find package url for {args.product_version} version") with contextlib.suppress(KeyError): - args.package_url = INTEL_OPENVINO_VERSION[args.product_version][args.os][args.distribution] + args.package_url = INTEL_OPENVINO_VERSION[args.product_version][args.os][ + args.distribution + ] if not args.package_url: - parser.error(f'Cannot find package url for {args.product_version} version ' - f'and {args.distribution} distribution. Please specify --package_url directly.') + parser.error( + f"Cannot find package url for {args.product_version} version " + f"and {args.distribution} distribution. Please specify --package_url directly." + ) if args.package_url and not args.build_id: - logger.info(f'Parsing product version in the package_url...') - dev_version = re.search(r'_(\d{4}\.(?:\d\.){2,3}dev\d{8})_)', args.package_url) + logger.info(f"Parsing product version in the package_url...") + dev_version = re.search(r"_(\d{4}\.(?:\d\.){2,3}dev\d{8})_", args.package_url) if dev_version: # save product version and build version as YYYY.U.V.devYYYYMMDD args.product_version = dev_version.group(1) args.build_id = args.product_version else: - build_id = re.search(r'_(\d{4}\.(?:\d\.){2,3})\.(\d{3,4})', args.package_url) + build_id = re.search(r"_(\d{4}\.(?:\d\.){2,3})\.(\d{3,4})", args.package_url) if build_id: # save product version YYYY.U.V.BBB - args.build_id = '.'.join(build_id.groups()) + args.build_id = ".".join(build_id.groups()) # save product version YYYY.U.V args.product_version = build_id.group(1) else: args.build_id = args.product_version if not args.dockerfile_name: - devices = ''.join([d[0] for d in args.device]) - layers = '_'.join(args.layers) - openshift = 'openshift_' if args.rhel_platform == 'openshift' else '' + devices = "".join([d[0] for d in args.device]) + layers = "_".join(args.layers) + openshift = "openshift_" if args.rhel_platform == "openshift" else "" version = args.product_version if layers: - args.dockerfile_name = f'openvino_{openshift}{layers}_{version}.dockerfile' + args.dockerfile_name = f"openvino_{openshift}{layers}_{version}.dockerfile" else: - args.dockerfile_name = f'openvino_{devices}_{openshift}{args.distribution}_{version}.dockerfile' + args.dockerfile_name = ( + f"openvino_{devices}_{openshift}{args.distribution}_{version}.dockerfile" + ) - if not hasattr(args, 'wheels_version') or not args.wheels_version: - args.wheels_version = (args.product_version if args.build_id == args.product_version - else f'{args.product_version}.*') + if not hasattr(args, "wheels_version") or not args.wheels_version: + args.wheels_version = ( + args.product_version + if args.build_id == args.product_version + else f"{args.product_version}.*" + ) - if not hasattr(args, 'tags') or not args.tags: - layers = '_'.join(args.layers) - tgl_postfix = '' + if not hasattr(args, "tags") or not args.tags: + layers = "_".join(args.layers) + tgl_postfix = "" if layers: - args.tags = [f'{args.os}_{layers}:' - f'{args.build_id if args.build_id else args.product_version}{tgl_postfix}', - f'{args.os}_{layers}:latest'] - if hasattr(args, 'tag_postfix') and args.tag_postfix: - args.tags.append(f'{args.os}_{layers}:{args.build_id if args.build_id else args.product_version}' - f'{tgl_postfix}{args.tag_postfix}') - elif args.distribution == 'base': - args.tags = [f'{args.os}_{args.distribution}_cpu:' - f'{args.product_version}', - f'{args.os}_{args.distribution}_cpu:latest'] - if hasattr(args, 'tag_postfix') and args.tag_postfix: - args.tags.append(f'{args.os}_{args.distribution}_cpu:' - f'{args.product_version}{args.tag_postfix}') + args.tags = [ + f"{args.os}_{layers}:" + f"{args.build_id if args.build_id else args.product_version}{tgl_postfix}", + f"{args.os}_{layers}:latest", + ] + if hasattr(args, "tag_postfix") and args.tag_postfix: + args.tags.append( + f"{args.os}_{layers}:{args.build_id if args.build_id else args.product_version}" + f"{tgl_postfix}{args.tag_postfix}" + ) + elif args.distribution == "base": + args.tags = [ + f"{args.os}_{args.distribution}_cpu:" f"{args.product_version}", + f"{args.os}_{args.distribution}_cpu:latest", + ] + if hasattr(args, "tag_postfix") and args.tag_postfix: + args.tags.append( + f"{args.os}_{args.distribution}_cpu:" + f"{args.product_version}{args.tag_postfix}" + ) else: - args.tags = [f'{args.os}_{args.distribution}:' - f'{args.build_id if args.build_id else args.product_version}{tgl_postfix}', - f'{args.os}_{args.distribution}:latest'] - if hasattr(args, 'tag_postfix') and args.tag_postfix: - args.tags.append(f'{args.os}_{args.distribution}:' - f'{args.build_id if args.build_id else args.product_version}' - f'{tgl_postfix}{args.tag_postfix}') - - if args.mode not in ('test', 'deploy'): + args.tags = [ + f"{args.os}_{args.distribution}:" + f"{args.build_id if args.build_id else args.product_version}{tgl_postfix}", + f"{args.os}_{args.distribution}:latest", + ] + if hasattr(args, "tag_postfix") and args.tag_postfix: + args.tags.append( + f"{args.os}_{args.distribution}:" + f"{args.build_id if args.build_id else args.product_version}" + f"{tgl_postfix}{args.tag_postfix}" + ) + + if args.mode not in ("test", "deploy"): args.year = args.build_id[:4] if args.build_id else args.product_version[:4] - if args.mode == 'test' and not args.product_version: - match = re.search(r':(\d{4}\.\d\.\d)', str(args.tags)) + if args.mode == "test" and not args.product_version: + match = re.search(r":(\d{4}\.\d\.\d)", str(args.tags)) if not match and args.package_url: - match = re.search(r'_(\d{4}\.\d\.\d)', args.package_url) + match = re.search(r"_(\d{4}\.\d\.\d)", args.package_url) if match: # save product version YYYY.U.V args.product_version = match.group(1) - elif args.distribution == 'custom': + elif args.distribution == "custom": latest_public_version = list(INTEL_OPENVINO_VERSION.keys())[-1] - args.product_version = '2022.2.0' if latest_public_version <= '2022.2.0' else latest_public_version + args.product_version = ( + "2022.2.0" if latest_public_version <= "2022.2.0" else latest_public_version + ) else: - parser.error('Cannot get product_version from the package URL and docker image. ' - 'Please specify --product_version directly.') + parser.error( + "Cannot get product_version from the package URL and docker image. " + "Please specify --product_version directly." + ) - if args.mode in ('test') and (not hasattr(args, 'wheels_version') or not args.wheels_version): + if args.mode in ("test") and (not hasattr(args, "wheels_version") or not args.wheels_version): latest_public_version = max(INTEL_OPENVINO_VERSION.__iter__()) - latest_public_version = '2022.2.0' if latest_public_version <= '2022.2.0' else latest_public_version - args.wheels_version = args.product_version if hasattr(args, 'product_version') else latest_public_version + latest_public_version = ( + "2022.2.0" if latest_public_version <= "2022.2.0" else latest_public_version + ) + args.wheels_version = ( + args.product_version if hasattr(args, "product_version") else latest_public_version + ) - if hasattr(args, 'product_version'): + if hasattr(args, "product_version"): fail_if_product_version_not_supported(args.product_version, parser) - if hasattr(args, 'distribution') and args.distribution == 'custom': - if subprocess.call(['docker', 'run', '--rm', args.tags[0], 'ls', 'extras/opencv'], # nosec - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) != 0: - args.distribution = 'custom-no-cv' + if hasattr(args, "distribution") and args.distribution == "custom": + if ( + subprocess.call( + ["docker", "run", "--rm", args.tags[0], "ls", "extras/opencv"], # nosec + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + != 0 + ): + args.distribution = "custom-no-cv" else: - args.distribution = 'custom-full' + args.distribution = "custom-full" - if hasattr(args, 'distribution'): - if not args.package_url and args.mode == 'test' and args.distribution == 'custom-no-cv': + if hasattr(args, "distribution"): + if not args.package_url and args.mode == "test" and args.distribution == "custom-no-cv": if args.product_version in INTEL_OPENVINO_VERSION: - args.package_url = INTEL_OPENVINO_VERSION[args.product_version][args.os]['dev'] + args.package_url = INTEL_OPENVINO_VERSION[args.product_version][args.os]["dev"] else: - parser.error(f'Cannot find URL to package with test dependencies for {args.product_version} release. ' - f'Please specify --package_url directly') + parser.error( + f"Cannot find URL to package with test dependencies for {args.product_version} release. " + f"Please specify --package_url directly" + ) return args From e95af9608f05b031c505e643c9b17dfa58e1472a Mon Sep 17 00:00:00 2001 From: Anokhov Date: Mon, 26 Aug 2024 14:47:04 +0200 Subject: [PATCH 07/30] Updated copy.dockerfile with possible tar.gz extension instead of tgz --- templates/ubuntu18/install/copy.dockerfile.j2 | 4 ++-- templates/ubuntu20/install/copy.dockerfile.j2 | 4 ++-- templates/ubuntu22/install/copy.dockerfile.j2 | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/templates/ubuntu18/install/copy.dockerfile.j2 b/templates/ubuntu18/install/copy.dockerfile.j2 index 684450ca..0ac650a4 100644 --- a/templates/ubuntu18/install/copy.dockerfile.j2 +++ b/templates/ubuntu18/install/copy.dockerfile.j2 @@ -5,7 +5,7 @@ ENV INTEL_OPENVINO_DIR /opt/intel/openvino # Creating user openvino and adding it to groups"users" RUN useradd -ms /bin/bash -G users openvino -RUN tar -xzf "${TEMP_DIR}"/*.tgz && \ +RUN find "${TEMP_DIR}" -name "*.tgz" -o -name "*.tar.gz" -exec tar -xzf {} \; && \ OV_BUILD="$(find . -maxdepth 1 -type d -name "*openvino*" | grep -oP '(?<=_)\d+.\d+.\d.\d+')" && \ OV_YEAR="$(echo $OV_BUILD | grep -oP '^[^\d]*(\d+)')" && \ OV_FOLDER="$(find . -maxdepth 1 -type d -name "*openvino*")" && \ @@ -14,7 +14,7 @@ RUN tar -xzf "${TEMP_DIR}"/*.tgz && \ rm -rf "${TEMP_DIR:?}"/"$OV_FOLDER" && \ ln --symbolic /opt/intel/openvino_"$OV_BUILD"/ /opt/intel/openvino && \ ln --symbolic /opt/intel/openvino_"$OV_BUILD"/ /opt/intel/openvino_"$OV_YEAR" && \ - rm -rf ${INTEL_OPENVINO_DIR}/tools/workbench && rm -rf ${TEMP_DIR} && \ + rm -rf ${TEMP_DIR} && \ chown -R openvino /opt/intel/openvino_"$OV_BUILD" {% if no_samples is defined or 'dev_no_samples' == distribution %} RUN rm -rf ${INTEL_OPENVINO_DIR}/samples diff --git a/templates/ubuntu20/install/copy.dockerfile.j2 b/templates/ubuntu20/install/copy.dockerfile.j2 index 4d73b316..2d00ff1d 100644 --- a/templates/ubuntu20/install/copy.dockerfile.j2 +++ b/templates/ubuntu20/install/copy.dockerfile.j2 @@ -5,7 +5,7 @@ ENV INTEL_OPENVINO_DIR=/opt/intel/openvino # Creating user openvino and adding it to groups"users" RUN useradd -ms /bin/bash -G users openvino -RUN tar -xzf "${TEMP_DIR}"/*.tgz && \ +RUN find "${TEMP_DIR}" -name "*.tgz" -o -name "*.tar.gz" -exec tar -xzf {} \; && \ OV_BUILD="$(find . -maxdepth 1 -type d -name "*openvino*" | grep -oP '(?<=_)\d+.\d+.\d.\d+')" && \ OV_YEAR="$(echo "$OV_BUILD" | grep -oP '^[^\d]*(\d+)')" && \ OV_FOLDER="$(find . -maxdepth 1 -type d -name "*openvino*")" && \ @@ -14,7 +14,7 @@ RUN tar -xzf "${TEMP_DIR}"/*.tgz && \ rm -rf "${TEMP_DIR:?}"/"$OV_FOLDER" && \ ln --symbolic /opt/intel/openvino_"$OV_BUILD"/ /opt/intel/openvino && \ ln --symbolic /opt/intel/openvino_"$OV_BUILD"/ /opt/intel/openvino_"$OV_YEAR" && \ - rm -rf "${INTEL_OPENVINO_DIR}/tools/workbench" && rm -rf "${TEMP_DIR}" && \ + rm -rf "${TEMP_DIR}" && \ chown -R openvino /opt/intel/openvino_"$OV_BUILD" {% if no_samples is defined or 'dev_no_samples' == distribution %} RUN rm -rf "${INTEL_OPENVINO_DIR}/samples" diff --git a/templates/ubuntu22/install/copy.dockerfile.j2 b/templates/ubuntu22/install/copy.dockerfile.j2 index 4d73b316..2d00ff1d 100644 --- a/templates/ubuntu22/install/copy.dockerfile.j2 +++ b/templates/ubuntu22/install/copy.dockerfile.j2 @@ -5,7 +5,7 @@ ENV INTEL_OPENVINO_DIR=/opt/intel/openvino # Creating user openvino and adding it to groups"users" RUN useradd -ms /bin/bash -G users openvino -RUN tar -xzf "${TEMP_DIR}"/*.tgz && \ +RUN find "${TEMP_DIR}" -name "*.tgz" -o -name "*.tar.gz" -exec tar -xzf {} \; && \ OV_BUILD="$(find . -maxdepth 1 -type d -name "*openvino*" | grep -oP '(?<=_)\d+.\d+.\d.\d+')" && \ OV_YEAR="$(echo "$OV_BUILD" | grep -oP '^[^\d]*(\d+)')" && \ OV_FOLDER="$(find . -maxdepth 1 -type d -name "*openvino*")" && \ @@ -14,7 +14,7 @@ RUN tar -xzf "${TEMP_DIR}"/*.tgz && \ rm -rf "${TEMP_DIR:?}"/"$OV_FOLDER" && \ ln --symbolic /opt/intel/openvino_"$OV_BUILD"/ /opt/intel/openvino && \ ln --symbolic /opt/intel/openvino_"$OV_BUILD"/ /opt/intel/openvino_"$OV_YEAR" && \ - rm -rf "${INTEL_OPENVINO_DIR}/tools/workbench" && rm -rf "${TEMP_DIR}" && \ + rm -rf "${TEMP_DIR}" && \ chown -R openvino /opt/intel/openvino_"$OV_BUILD" {% if no_samples is defined or 'dev_no_samples' == distribution %} RUN rm -rf "${INTEL_OPENVINO_DIR}/samples" From f438fb0ec24161fbdf2fac07502120658abfe45a Mon Sep 17 00:00:00 2001 From: Anokhov Date: Mon, 26 Aug 2024 15:35:06 +0200 Subject: [PATCH 08/30] Fixed wheels version install for openvino-tokenizers & openvino-genai --- templates/rhel8/dist/dev.dockerfile.j2 | 8 ++++---- templates/ubuntu20/dist/dev.dockerfile.j2 | 8 ++++---- templates/ubuntu22/dist/dev.dockerfile.j2 | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/templates/rhel8/dist/dev.dockerfile.j2 b/templates/rhel8/dist/dev.dockerfile.j2 index 8b172509..0e6b8b1d 100644 --- a/templates/rhel8/dist/dev.dockerfile.j2 +++ b/templates/rhel8/dist/dev.dockerfile.j2 @@ -7,13 +7,13 @@ RUN yum install -y cmake git && yum clean all && \ if [ -z "$OPENVINO_WHEELS_URL" ]; then \ ${PYTHON_VER} -m pip install --no-cache-dir openvino=="${OPENVINO_WHEELS_VERSION}" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-genai=="${OPENVINO_WHEELS_VERSION}" && \ ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --extra-index-url https://download.pytorch.org/whl/cpu; \ else \ ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-genai=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu ; \ fi diff --git a/templates/ubuntu20/dist/dev.dockerfile.j2 b/templates/ubuntu20/dist/dev.dockerfile.j2 index f7a1a1a3..5a5e5d72 100644 --- a/templates/ubuntu20/dist/dev.dockerfile.j2 +++ b/templates/ubuntu20/dist/dev.dockerfile.j2 @@ -6,13 +6,13 @@ ARG OPENVINO_WHEELS_URL RUN apt-get update && apt-get install -y --no-install-recommends cmake make git && rm -rf /var/lib/apt/lists/* && \ if [ -z "$OPENVINO_WHEELS_URL" ]; then \ ${PYTHON_VER} -m pip install --no-cache-dir openvino=="${OPENVINO_WHEELS_VERSION}" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-genai=="${OPENVINO_WHEELS_VERSION}" && \ ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --extra-index-url https://download.pytorch.org/whl/cpu; \ else \ ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-genai=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu; \ fi diff --git a/templates/ubuntu22/dist/dev.dockerfile.j2 b/templates/ubuntu22/dist/dev.dockerfile.j2 index 99f22a0a..b3f8c61d 100644 --- a/templates/ubuntu22/dist/dev.dockerfile.j2 +++ b/templates/ubuntu22/dist/dev.dockerfile.j2 @@ -6,13 +6,13 @@ ARG OPENVINO_WHEELS_URL RUN apt-get update && apt-get install -y --no-install-recommends cmake make git && rm -rf /var/lib/apt/lists/* && \ if [ -z "$OPENVINO_WHEELS_URL" ]; then \ ${PYTHON_VER} -m pip install --no-cache-dir openvino=="${OPENVINO_WHEELS_VERSION}" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino-genai=="${OPENVINO_WHEELS_VERSION}" && \ ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --extra-index-url https://download.pytorch.org/whl/cpu; \ else \ ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-genai=="${OPENVINO_WHEELS_VERSION}.0" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-genai=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu; \ fi From 207d37dd226c0027336cd3332b55bc0ba213a534 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Fri, 30 Aug 2024 13:09:39 +0200 Subject: [PATCH 09/30] arg_parser: Fixed regex for build_id --- utils/arg_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index 349df9cc..892449b0 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -591,7 +591,7 @@ def parse_args(name: str, description: str): # noqa args.product_version = dev_version.group(1) args.build_id = args.product_version else: - build_id = re.search(r"_(\d{4}\.(?:\d\.){2,3})\.(\d{3,4})", args.package_url) + build_id = re.search(r"_(\d{4}\.(?:\d\.){2,3})\.(\d{3,4})?", args.package_url) if build_id: # save product version YYYY.U.V.BBB args.build_id = ".".join(build_id.groups()) From 6140efc7a134f130f40d5b484b18c37c67ca7976 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Tue, 3 Sep 2024 13:12:26 +0200 Subject: [PATCH 10/30] Reset changes for RHEL --- templates/rhel8/dist/dev.dockerfile.j2 | 12 +++--------- templates/rhel8/env/dev_env.dockerfile.j2 | 1 - templates/rhel8/env/runtime_env.dockerfile.j2 | 1 - 3 files changed, 3 insertions(+), 11 deletions(-) diff --git a/templates/rhel8/dist/dev.dockerfile.j2 b/templates/rhel8/dist/dev.dockerfile.j2 index 0e6b8b1d..695f6898 100644 --- a/templates/rhel8/dist/dev.dockerfile.j2 +++ b/templates/rhel8/dist/dev.dockerfile.j2 @@ -5,16 +5,10 @@ ARG OPENVINO_WHEELS_URL # hadolint ignore=SC2102,DL3033 RUN yum install -y cmake git && yum clean all && \ if [ -z "$OPENVINO_WHEELS_URL" ]; then \ - - ${PYTHON_VER} -m pip install --no-cache-dir openvino=="${OPENVINO_WHEELS_VERSION}" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino-genai=="${OPENVINO_WHEELS_VERSION}" && \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --extra-index-url https://download.pytorch.org/whl/cpu; \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="$OPENVINO_WHEELS_VERSION" --extra-index-url https://download.pytorch.org/whl/cpu; \ else \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-tokenizers=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino-genai=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu ; \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="$OPENVINO_WHEELS_VERSION" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="$OPENVINO_WHEELS_VERSION" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu ; \ fi # download source for PyPi LGPL packages diff --git a/templates/rhel8/env/dev_env.dockerfile.j2 b/templates/rhel8/env/dev_env.dockerfile.j2 index 481a0712..dcf972ec 100644 --- a/templates/rhel8/env/dev_env.dockerfile.j2 +++ b/templates/rhel8/env/dev_env.dockerfile.j2 @@ -8,5 +8,4 @@ ENV TBB_DIR=/opt/intel/openvino/runtime/3rdparty/tbb/cmake ENV ngraph_DIR=/opt/intel/openvino/runtime/cmake ENV OpenVINO_DIR=/opt/intel/openvino/runtime/cmake ENV INTEL_OPENVINO_DIR=/opt/intel/openvino -ENV OV_TOKENIZER_PREBUILD_EXTENSION_PATH=/opt/intel/openvino/runtime/lib/intel64/libopenvino_tokenizers.so ENV PKG_CONFIG_PATH=/opt/intel/openvino/runtime/lib/intel64/pkgconfig diff --git a/templates/rhel8/env/runtime_env.dockerfile.j2 b/templates/rhel8/env/runtime_env.dockerfile.j2 index 476d274d..9eb25ba5 100644 --- a/templates/rhel8/env/runtime_env.dockerfile.j2 +++ b/templates/rhel8/env/runtime_env.dockerfile.j2 @@ -8,5 +8,4 @@ ENV TBB_DIR=/opt/intel/openvino/runtime/3rdparty/tbb/cmake ENV ngraph_DIR=/opt/intel/openvino/runtime/cmake ENV OpenVINO_DIR=/opt/intel/openvino/runtime/cmake ENV INTEL_OPENVINO_DIR=/opt/intel/openvino -ENV OV_TOKENIZER_PREBUILD_EXTENSION_PATH=/opt/intel/openvino/runtime/lib/intel64/libopenvino_tokenizers.so ENV PKG_CONFIG_PATH=/opt/intel/openvino/runtime/lib/intel64/pkgconfig From b93e772a4afdd873f5a5ac4cd8b69254796f6a14 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Tue, 3 Sep 2024 13:21:11 +0200 Subject: [PATCH 11/30] Fixed installing OV wheels for RHEL8 dev image in case of OPENVINO_WHEELS_URL --- templates/rhel8/dist/dev.dockerfile.j2 | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/templates/rhel8/dist/dev.dockerfile.j2 b/templates/rhel8/dist/dev.dockerfile.j2 index 695f6898..067f249f 100644 --- a/templates/rhel8/dist/dev.dockerfile.j2 +++ b/templates/rhel8/dist/dev.dockerfile.j2 @@ -5,10 +5,11 @@ ARG OPENVINO_WHEELS_URL # hadolint ignore=SC2102,DL3033 RUN yum install -y cmake git && yum clean all && \ if [ -z "$OPENVINO_WHEELS_URL" ]; then \ - ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="$OPENVINO_WHEELS_VERSION" --extra-index-url https://download.pytorch.org/whl/cpu; \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino=="${OPENVINO_WHEELS_VERSION}" && \ + ${PYTHON_VER} -m pip install --no-cache-dir openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --extra-index-url https://download.pytorch.org/whl/cpu; \ else \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="$OPENVINO_WHEELS_VERSION" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ - ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="$OPENVINO_WHEELS_VERSION" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu ; \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" && \ + ${PYTHON_VER} -m pip install --no-cache-dir --pre openvino_dev[caffe,kaldi,mxnet,onnx,pytorch,tensorflow2]=="${OPENVINO_WHEELS_VERSION}" --trusted-host=* --find-links "$OPENVINO_WHEELS_URL" --extra-index-url https://download.pytorch.org/whl/cpu ; \ fi # download source for PyPi LGPL packages From aaddd2bd4237da9b85375bd774f641de20fec50f Mon Sep 17 00:00:00 2001 From: Anokhov Date: Tue, 3 Sep 2024 13:28:31 +0200 Subject: [PATCH 12/30] Fixed find cmd for installing from archive --- templates/ubuntu20/install/copy.dockerfile.j2 | 2 +- templates/ubuntu22/install/copy.dockerfile.j2 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/templates/ubuntu20/install/copy.dockerfile.j2 b/templates/ubuntu20/install/copy.dockerfile.j2 index 2d00ff1d..377e21c6 100644 --- a/templates/ubuntu20/install/copy.dockerfile.j2 +++ b/templates/ubuntu20/install/copy.dockerfile.j2 @@ -5,7 +5,7 @@ ENV INTEL_OPENVINO_DIR=/opt/intel/openvino # Creating user openvino and adding it to groups"users" RUN useradd -ms /bin/bash -G users openvino -RUN find "${TEMP_DIR}" -name "*.tgz" -o -name "*.tar.gz" -exec tar -xzf {} \; && \ +RUN find "${TEMP_DIR}" \( -name "*.tgz" -o -name "*.tar.gz" \) -exec tar -xzf {} \; && \ OV_BUILD="$(find . -maxdepth 1 -type d -name "*openvino*" | grep -oP '(?<=_)\d+.\d+.\d.\d+')" && \ OV_YEAR="$(echo "$OV_BUILD" | grep -oP '^[^\d]*(\d+)')" && \ OV_FOLDER="$(find . -maxdepth 1 -type d -name "*openvino*")" && \ diff --git a/templates/ubuntu22/install/copy.dockerfile.j2 b/templates/ubuntu22/install/copy.dockerfile.j2 index 2d00ff1d..377e21c6 100644 --- a/templates/ubuntu22/install/copy.dockerfile.j2 +++ b/templates/ubuntu22/install/copy.dockerfile.j2 @@ -5,7 +5,7 @@ ENV INTEL_OPENVINO_DIR=/opt/intel/openvino # Creating user openvino and adding it to groups"users" RUN useradd -ms /bin/bash -G users openvino -RUN find "${TEMP_DIR}" -name "*.tgz" -o -name "*.tar.gz" -exec tar -xzf {} \; && \ +RUN find "${TEMP_DIR}" \( -name "*.tgz" -o -name "*.tar.gz" \) -exec tar -xzf {} \; && \ OV_BUILD="$(find . -maxdepth 1 -type d -name "*openvino*" | grep -oP '(?<=_)\d+.\d+.\d.\d+')" && \ OV_YEAR="$(echo "$OV_BUILD" | grep -oP '^[^\d]*(\d+)')" && \ OV_FOLDER="$(find . -maxdepth 1 -type d -name "*openvino*")" && \ From 692931735f9c14ec70495b7dbb06c6c798b81d24 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Tue, 3 Sep 2024 14:54:33 +0200 Subject: [PATCH 13/30] arg_parser.py: Fixed bandit scan --- utils/arg_parser.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index 892449b0..1de43b30 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -658,7 +658,12 @@ def parse_args(name: str, description: str): # noqa ) if args.mode not in ("test", "deploy"): - args.year = args.build_id[:4] if args.build_id else args.product_version[:4] + if args.build_id: + args.year = args.build_id[:4] + elif args.product_version: + args.year = args.product_version[:4] + else: + args.year = None if args.mode == "test" and not args.product_version: match = re.search(r":(\d{4}\.\d\.\d)", str(args.tags)) From ed4adfefe6084b442acd8361f53341cb870844ba Mon Sep 17 00:00:00 2001 From: Anokhov Date: Tue, 3 Sep 2024 15:10:53 +0200 Subject: [PATCH 14/30] arg_parser: Bandit scan fix --- utils/arg_parser.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index 1de43b30..41e2afac 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -288,7 +288,9 @@ def add_dist_args(cls, parser: argparse.ArgumentParser): def fail_if_product_version_not_supported(product_version: str, parser: DockerCIArgumentParser): - if product_version < "2022.1": + if product_version is None: + parser.error("Product version is not specified.") + elif product_version < "2022.1": parser.error( f"This version of the DockerHub CI framework does not support OpenVINO releases earlier than " '2022.1.0. Current detected product version "{product_version}". Please use previous versions ' From a649a42332114fe2e47a372857198f103f86a4c6 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Tue, 3 Sep 2024 15:24:27 +0200 Subject: [PATCH 15/30] Added ignoring errors B603 B607 for subprocess.call --- utils/arg_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index 41e2afac..c433fcb3 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -699,8 +699,8 @@ def parse_args(name: str, description: str): # noqa if hasattr(args, "distribution") and args.distribution == "custom": if ( - subprocess.call( - ["docker", "run", "--rm", args.tags[0], "ls", "extras/opencv"], # nosec + subprocess.call( # nosec B603 B607 + ["docker", "run", "--rm", args.tags[0], "ls", "extras/opencv"], # nosec B603 B607 stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) From 5a7bf4de13087cbfc36fe36bab5b9252fc3b80cc Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 13:40:39 +0200 Subject: [PATCH 16/30] Actualized test_arg_parser.py --- utils/tests/test_arg_parser.py | 110 ++++++++++++++++----------------- 1 file changed, 55 insertions(+), 55 deletions(-) diff --git a/utils/tests/test_arg_parser.py b/utils/tests/test_arg_parser.py index c0095f18..dc7ae30c 100644 --- a/utils/tests/test_arg_parser.py +++ b/utils/tests/test_arg_parser.py @@ -55,7 +55,7 @@ pytest.param( { 'mode': 'build', - 'package_url': 'openvino_dev_p_2022.1.0.320.zip', + 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.dev20240905_x86_64.tar.gz', 'source': 'local', }, { @@ -72,57 +72,57 @@ pytest.param( { 'mode': 'build', - 'package_url': 'openvino_dev_p_2022.1.0.320.zip', + 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.dev20240905_x86_64.tar.gz', 'source': 'local', 'tag_postfix': '_qqq', }, { 'device': ['cpu', 'gpu'], - 'dockerfile_name': 'openvino_cg_dev_2022.1.0.dockerfile', + 'dockerfile_name': 'openvino_cg_dev_2024.3.0.dockerfile', 'python': 'python38', - 'tags': ['ubuntu18_dev:2022.1.0.320', 'ubuntu18_dev:latest', 'ubuntu18_dev:2022.1.0.320_qqq'], + 'tags': ['ubuntu18_dev:2022.1.0.320', 'ubuntu18_dev:latest', 'ubuntu18_dev:2024.5.0.0.dev20240905_qqq'], 'distribution': 'dev', 'install_type': 'copy', - 'product_version': '2022.1.0', + 'product_version': '2024.5.0.0.dev20240905', }, id='check tag postfix', ), - pytest.param( - { - 'mode': 'build', - 'package_url': 'openvino_dev_p_2022.1.0.320.zip', - 'source': 'local', - 'os': 'winserver2019', - 'pre_stage_msbuild': 'msbuild2019_online', - }, - { - 'device': ['cpu'], - 'dockerfile_name': 'openvino_c_dev_2022.1.0.dockerfile', - 'python': 'python38', - 'tags': ['winserver2019_dev:2022.1.0.320', 'winserver2019_dev:latest'], - 'distribution': 'dev', - 'product_version': '2022.1.0', - }, - id='winserver2019', - ), - pytest.param( - { - 'mode': 'build', - 'package_url': 'openvino_dev_p_2022.1.0.320.zip', - 'distribution': 'base', - 'file': 'openvino_c_base_2022.1.dockerfile', - 'source': 'local', - }, - { - 'device': ['cpu'], - 'dockerfile_name': 'openvino_c_base_2022.1.0.dockerfile', - 'python': 'python38', - 'tags': ['ubuntu18_base_cpu:2022.1.0', 'ubuntu18_base_cpu:latest'], - 'distribution': 'base', - 'product_version': '2022.1.0', - }, - id='ubuntu base', - ), + # pytest.param( + # { + # 'mode': 'build', + # 'package_url': 'openvino_dev_p_2022.1.0.320.zip', + # 'source': 'local', + # 'os': 'winserver2019', + # 'pre_stage_msbuild': 'msbuild2019_online', + # }, + # { + # 'device': ['cpu'], + # 'dockerfile_name': 'openvino_c_dev_2022.1.0.dockerfile', + # 'python': 'python38', + # 'tags': ['winserver2019_dev:2022.1.0.320', 'winserver2019_dev:latest'], + # 'distribution': 'dev', + # 'product_version': '2022.1.0', + # }, + # id='winserver2019', + # ), + # pytest.param( + # { + # 'mode': 'build', + # 'package_url': 'openvino_dev_p_2022.1.0.320.zip', + # 'distribution': 'base', + # 'file': 'openvino_c_base_2022.1.dockerfile', + # 'source': 'local', + # }, + # { + # 'device': ['cpu'], + # 'dockerfile_name': 'openvino_c_base_2022.1.0.dockerfile', + # 'python': 'python38', + # 'tags': ['ubuntu18_base_cpu:2022.1.0', 'ubuntu18_base_cpu:latest'], + # 'distribution': 'base', + # 'product_version': '2022.1.0', + # }, + # id='ubuntu base', + # ), pytest.param( { 'mode': 'build', @@ -160,35 +160,35 @@ pytest.param( { 'mode': 'build', - 'package_url': 'l_openvino_toolkit_dev_ubuntu18_p_2022.1.0.dev20211216.tgz', + 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.dev20240905_x86_64.tar.gz', 'source': 'local', }, { 'device': ['cpu', 'gpu'], 'python': 'python38', - 'dockerfile_name': 'openvino_cg_dev_2022.1.0.dev20211216.dockerfile', - 'tags': ['ubuntu18_dev:2022.1.0.dev20211216', 'ubuntu18_dev:latest'], + 'dockerfile_name': 'openvino_cg_dev_2024.5.0.0.dev20240905.dockerfile', + 'tags': ['ubuntu22_dev:2024.5.0.0.dev20240905', 'ubuntu22_dev:latest'], 'distribution': 'dev', - 'product_version': '2022.1.0.dev20211216', - 'build_id': '2022.1.0.dev20211216', + 'product_version': '2024.5.0.0.dev20240905', + 'build_id': '2024.5.0.0.dev20240905', }, id='dev product version', ), pytest.param( { 'mode': 'build', - 'package_url': 'openvino_dev_p_2022.1.0.320.zip', + 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.tar.gz', 'source': 'local', 'tags': ['my_tag:latest'], - 'device': ['cpu', 'hddl'], + 'device': ['cpu', 'gpu'], }, { - 'device': ['cpu', 'hddl'], + 'device': ['cpu', 'gpu'], 'python': 'python38', - 'dockerfile_name': 'openvino_ch_dev_2022.1.0.dockerfile', + 'dockerfile_name': 'openvino_ch_dev_2024.5.0.dockerfile', 'tags': ['my_tag:latest'], 'distribution': 'dev', - 'product_version': '2022.1.0', + 'product_version': '2024.5.0.0', }, id='set tags and device manually', ), @@ -235,17 +235,17 @@ pytest.param( { 'mode': 'all', - 'package_url': 'openvino_dev_p_2022.1.0.320.zip', + 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0_x86_64.tar.gz', 'source': 'local', 'registry': 'https://deploy', }, { 'device': ['cpu', 'gpu'], 'python': 'python38', - 'tags': ['ubuntu18_dev:2022.1.0.320', 'ubuntu18_dev:latest'], - 'dockerfile_name': 'openvino_cg_dev_2022.1.0.dockerfile', + 'tags': ['ubuntu22_dev:2024.5.0.0', 'ubuntu22_dev:latest'], + 'dockerfile_name': 'openvino_cg_dev_2024.5.0.0.dockerfile', 'distribution': 'dev', - 'product_version': '2022.1.0', + 'product_version': '2024.5.0.0', }, id='Successful all', ), From 5e13551dbd646934a126301962f315a5ff045d7a Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 13:46:18 +0200 Subject: [PATCH 17/30] Updated GH workflows with upload-artifact@v4 --- .github/workflows/build_test_lin.yml | 2 +- .github/workflows/build_test_win.yml | 2 +- .github/workflows/codestyle.yml | 2 +- .github/workflows/hadolint_check.yml | 2 +- .github/workflows/images_build_check.yml | 2 +- .github/workflows/manual_publish.yml | 2 +- .github/workflows/update_dockerfiles.yml | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build_test_lin.yml b/.github/workflows/build_test_lin.yml index 9c081fb9..83d2838b 100644 --- a/.github/workflows/build_test_lin.yml +++ b/.github/workflows/build_test_lin.yml @@ -41,7 +41,7 @@ jobs: run: docker images - name: Collecting artifacts if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: logs path: | diff --git a/.github/workflows/build_test_win.yml b/.github/workflows/build_test_win.yml index c6eeeb1b..10072651 100644 --- a/.github/workflows/build_test_win.yml +++ b/.github/workflows/build_test_win.yml @@ -41,7 +41,7 @@ jobs: run: docker images - name: Collecting artifacts if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: logs path: | diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml index 59167e68..e0dab5b0 100644 --- a/.github/workflows/codestyle.yml +++ b/.github/workflows/codestyle.yml @@ -42,7 +42,7 @@ jobs: - name: Collecting codestyle artifacts if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: codestyle_checks path: | diff --git a/.github/workflows/hadolint_check.yml b/.github/workflows/hadolint_check.yml index 18e8573c..357e4c49 100644 --- a/.github/workflows/hadolint_check.yml +++ b/.github/workflows/hadolint_check.yml @@ -37,7 +37,7 @@ jobs: done < <(find dockerfiles/ \( -name '*.dockerfile' -o -name 'Dockerfile' \) -print0) exit "$has_issues" - name: Collecting artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 if: ${{ always() }} with: name: hadolint_results diff --git a/.github/workflows/images_build_check.yml b/.github/workflows/images_build_check.yml index b71e3a52..576d87e1 100644 --- a/.github/workflows/images_build_check.yml +++ b/.github/workflows/images_build_check.yml @@ -47,7 +47,7 @@ jobs: run: docker images - name: Collecting artifacts if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: logs path: | diff --git a/.github/workflows/manual_publish.yml b/.github/workflows/manual_publish.yml index 9f0df675..f1c2aa98 100644 --- a/.github/workflows/manual_publish.yml +++ b/.github/workflows/manual_publish.yml @@ -82,7 +82,7 @@ jobs: run: docker logout openvino.azurecr.io - name: Collecting artifacts if: ${{ matrix.os == github.event.inputs.host_os }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: logs path: | diff --git a/.github/workflows/update_dockerfiles.yml b/.github/workflows/update_dockerfiles.yml index 658ca9b0..bf21ec6f 100644 --- a/.github/workflows/update_dockerfiles.yml +++ b/.github/workflows/update_dockerfiles.yml @@ -66,7 +66,7 @@ jobs: echo "Pull Request URL - ${{ steps.create_pr.outputs.pull-request-url }}" echo "Pull Request Operation - ${{ steps.create_pr.outputs.pull-request-operation }}" - name: Collecting artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: logs path: | From 493d9ed95c1da768a6e0b2e493c033c73275707c Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 13:55:21 +0200 Subject: [PATCH 18/30] test_arg_parser.py: Added passing OS --- utils/tests/test_arg_parser.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/utils/tests/test_arg_parser.py b/utils/tests/test_arg_parser.py index dc7ae30c..8f44b1dc 100644 --- a/utils/tests/test_arg_parser.py +++ b/utils/tests/test_arg_parser.py @@ -55,35 +55,37 @@ pytest.param( { 'mode': 'build', - 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.dev20240905_x86_64.tar.gz', + 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0_x86_64.tar.gz', + 'os': 'ubuntu22', 'source': 'local', }, { 'device': ['cpu', 'gpu'], - 'dockerfile_name': 'openvino_cg_dev_2022.1.0.dockerfile', + 'dockerfile_name': 'openvino_cg_dev_2024.5.0.0.dockerfile', 'python': 'python38', - 'tags': ['ubuntu18_dev:2022.1.0.320', 'ubuntu18_dev:latest'], + 'tags': ['ubuntu22_dev:2024.5.0.0', 'ubuntu22_dev:latest'], 'distribution': 'dev', 'install_type': 'copy', - 'product_version': '2022.1.0', + 'product_version': '2024.5.0.0', }, id='parse product_version and distribution from package_url', ), pytest.param( { 'mode': 'build', - 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.dev20240905_x86_64.tar.gz', + 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0_x86_64.tar.gz', + 'os': 'ubuntu22', 'source': 'local', 'tag_postfix': '_qqq', }, { 'device': ['cpu', 'gpu'], - 'dockerfile_name': 'openvino_cg_dev_2024.3.0.dockerfile', + 'dockerfile_name': 'openvino_cg_dev_2024.5.0.0.dockerfile', 'python': 'python38', - 'tags': ['ubuntu18_dev:2022.1.0.320', 'ubuntu18_dev:latest', 'ubuntu18_dev:2024.5.0.0.dev20240905_qqq'], + 'tags': ['ubuntu22_dev:2024.5.0.0', 'ubuntu22_dev:latest', 'ubuntu22_dev:2024.5.0.0_qqq'], 'distribution': 'dev', 'install_type': 'copy', - 'product_version': '2024.5.0.0.dev20240905', + 'product_version': '2024.5.0.0', }, id='check tag postfix', ), @@ -161,6 +163,7 @@ { 'mode': 'build', 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.dev20240905_x86_64.tar.gz', + 'os': 'ubuntu22', 'source': 'local', }, { @@ -179,6 +182,7 @@ 'mode': 'build', 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.tar.gz', 'source': 'local', + 'os': 'ubuntu22', 'tags': ['my_tag:latest'], 'device': ['cpu', 'gpu'], }, @@ -237,6 +241,7 @@ 'mode': 'all', 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0_x86_64.tar.gz', 'source': 'local', + 'os': 'ubuntu22', 'registry': 'https://deploy', }, { From c4797fd48a624e0ec157ed6db8a012305f851a78 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 14:01:03 +0200 Subject: [PATCH 19/30] test_arg_parser.py: Added distribution --- utils/tests/test_arg_parser.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/utils/tests/test_arg_parser.py b/utils/tests/test_arg_parser.py index 8f44b1dc..5cffbccd 100644 --- a/utils/tests/test_arg_parser.py +++ b/utils/tests/test_arg_parser.py @@ -56,6 +56,7 @@ { 'mode': 'build', 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0_x86_64.tar.gz', + 'distribution': 'dev', 'os': 'ubuntu22', 'source': 'local', }, @@ -68,13 +69,14 @@ 'install_type': 'copy', 'product_version': '2024.5.0.0', }, - id='parse product_version and distribution from package_url', + id='parse product_version from package_url', ), pytest.param( { 'mode': 'build', 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0_x86_64.tar.gz', 'os': 'ubuntu22', + 'distribution': 'dev', 'source': 'local', 'tag_postfix': '_qqq', }, @@ -164,6 +166,7 @@ 'mode': 'build', 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.dev20240905_x86_64.tar.gz', 'os': 'ubuntu22', + 'distribution': 'dev', 'source': 'local', }, { @@ -183,6 +186,7 @@ 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.tar.gz', 'source': 'local', 'os': 'ubuntu22', + 'distribution': 'dev', 'tags': ['my_tag:latest'], 'device': ['cpu', 'gpu'], }, @@ -242,6 +246,7 @@ 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0_x86_64.tar.gz', 'source': 'local', 'os': 'ubuntu22', + 'distribution': 'dev', 'registry': 'https://deploy', }, { From 041fbd62a8b95fccecf9c85512634ed1fa1d1947 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 14:52:39 +0200 Subject: [PATCH 20/30] codestyle.yml: Make unique name for the artifacts workflow --- .github/workflows/codestyle.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml index e0dab5b0..ad154a66 100644 --- a/.github/workflows/codestyle.yml +++ b/.github/workflows/codestyle.yml @@ -44,6 +44,6 @@ jobs: if: ${{ always() }} uses: actions/upload-artifact@v4 with: - name: codestyle_checks + name: codestyle_checks_${{ matrix.os }} path: | ./utils_unittests.html From 446d02120a96044ff026f05ffa9cbe8e629f8b8f Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 14:58:06 +0200 Subject: [PATCH 21/30] test_arg_parser.py: Fixed old URL names --- utils/tests/test_arg_parser.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/utils/tests/test_arg_parser.py b/utils/tests/test_arg_parser.py index 5cffbccd..779b5836 100644 --- a/utils/tests/test_arg_parser.py +++ b/utils/tests/test_arg_parser.py @@ -275,15 +275,15 @@ 'mode': 'test', 'tags': ['custom:no-cv'], 'distribution': 'custom', - 'product_version': '2022.1.0', - 'package_url': 'https://storage.openvinotoolkit.org/repositories/openvino/packages/2022.1.0/' - 'l_openvino_toolkit_dev_ubuntu18_p_2022.1.0.582.tgz', + 'product_version': '2024.3.0', + 'package_url': 'https://storage.openvinotoolkit.org/repositories/openvino/packages/2024.3/linux/' + 'l_openvino_toolkit_ubuntu22_2024.3.0.16041.1e3b88e4e3f_x86_64.tgz', }, { 'distribution': 'custom-no-cv', - 'product_version': '2022.1.0', - 'package_url': 'https://storage.openvinotoolkit.org/repositories/openvino/packages/2022.1.0/' - 'l_openvino_toolkit_dev_ubuntu18_p_2022.1.0.582.tgz', + 'product_version': '2024.3.0', + 'package_url': 'https://storage.openvinotoolkit.org/repositories/openvino/packages/2024.3/linux/' + 'l_openvino_toolkit_ubuntu22_2024.3.0.16041.1e3b88e4e3f_x86_64.tgz', }, id='Successful test custom image', ), From dbedc8d99ac84848c6c77dc03cdd4d62a2d4ac9f Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 15:03:59 +0200 Subject: [PATCH 22/30] arg_parser.py: Fixed code style checks --- utils/arg_parser.py | 640 ++++++++++++++++++++++---------------------- 1 file changed, 320 insertions(+), 320 deletions(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index c433fcb3..64bb493f 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -21,12 +21,12 @@ class DockerCIArgumentParser(argparse.ArgumentParser): """CLI argument parser for this framework""" SUPPORTED_OS: typing.List = [ - "ubuntu18", - "ubuntu20", - "ubuntu22", - "winserver2019", - "windows20h2", - "rhel8", + 'ubuntu18', + 'ubuntu20', + 'ubuntu22', + 'winserver2019', + 'windows20h2', + 'rhel8', ] def __init__(self, prog: typing.Optional[str] = None, description: typing.Optional[str] = None): @@ -47,163 +47,163 @@ def set_default_subparser(name: str): def add_image_args(parser: argparse.ArgumentParser): """Adding args needed to manage the built Docker image""" parser.add_argument( - "-t", - "--tags", - metavar="IMAGE_NAME:TAG", - action="append", - required=" test" in parser.prog, - help='Source image name and optionally a tags in the "IMAGE_NAME:TAG" format. ' - "Default is _: and latest. You can specify some tags.", + '-t', + '--tags', + metavar='IMAGE_NAME:TAG', + action='append', + required=' test' in parser.prog, + help='Source image name and optionally a tags in the \'IMAGE_NAME:TAG\' format. ' + 'Default is _: and latest. You can specify some tags.', ) parser.add_argument( - "--tag_postfix", - metavar="_NAME", - default="", - help="Add special postfix to the end of tag image. " - "Image name will be like this _:", + '--tag_postfix', + metavar='_NAME', + default='', + help='Add special postfix to the end of tag image. ' + 'Image name will be like this _:', ) @staticmethod def add_linter_check_args(parser: argparse.ArgumentParser): parser.add_argument( - "--linter_check", - metavar="NAME", - action="append", + '--linter_check', + metavar='NAME', + action='append', default=[], - help="Enable linter check for image and dockerfile. " - "It installs additional 3d-party docker images or executable files. " - "Available tests: " - "hadolint (https://github.com/hadolint/hadolint), " - "dive (https://github.com/wagoodman/dive)", + help='Enable linter check for image and dockerfile. ' + 'It installs additional 3d-party docker images or executable files. ' + 'Available tests: ' + 'hadolint (https://github.com/hadolint/hadolint), ' + 'dive (https://github.com/wagoodman/dive)', ) @staticmethod def add_build_args(parser: argparse.ArgumentParser): """Adding args needed to build the Docker image""" parser.add_argument( - "--wheels_url", - metavar="URL", - default="", - help="URL to HTML page with links or local path relative to openvino folder to search for OpenVINO wheels " - "(will be used in a dockerfile as pip install --find-links value). " - "By default, openvino and openvino_dev will be installed from PyPi", + '--wheels_url', + metavar='URL', + default='', + help='URL to HTML page with links or local path relative to openvino folder to search for OpenVINO wheels ' + '(will be used in a dockerfile as pip install --find-links value). ' + 'By default, openvino and openvino_dev will be installed from PyPi', ) parser.add_argument( - "--image_json_path", - help="Provide path to save image data in .json format file. " - "By default, it is stored in the logs folder.", + '--image_json_path', + help='Provide path to save image data in .json format file. ' + 'By default, it is stored in the logs folder.', ) parser.add_argument( - "--dockerfile_name", - metavar="NAME", - help="Name of the Dockerfile, that will be generated from templates. " - 'Format is "openvino___.dockerfile"', + '--dockerfile_name', + metavar='NAME', + help='Name of the Dockerfile, that will be generated from templates. ' + 'Format is \'openvino___.dockerfile\'', ) parser.add_argument( - "-d", - "--device", - metavar="NAME", - action="append", - help="Target inference hardware: cpu, gpu, vpu, hddl. Default is all. " - "Dockerfile name format has the first letter from device name, " - "e.g. for CPU, HDDL it will be openvino_ch__.dockerfile", + '-d', + '--device', + metavar='NAME', + action='append', + help='Target inference hardware: cpu, gpu, vpu, hddl. Default is all. ' + 'Dockerfile name format has the first letter from device name, ' + 'e.g. for CPU, HDDL it will be openvino_ch__.dockerfile', ) parser.add_argument( - "-py", - "--python", - choices=["python37", "python38", "python310"], - help="Python interpreter for docker image, currently default is python38", + '-py', + '--python', + choices=['python37', 'python38', 'python310'], + help='Python interpreter for docker image, currently default is python38', ) parser.add_argument( - "--cmake", - choices=["cmake34", "cmake314"], - default="cmake314", - help="CMake for Windows docker image, default CMake 3.14. For Linux images it is used default for OS.", + '--cmake', + choices=['cmake34', 'cmake314'], + default='cmake314', + help='CMake for Windows docker image, default CMake 3.14. For Linux images it is used default for OS.', ) parser.add_argument( - "--msbuild", - choices=["msbuild2019", "msbuild2019_online"], - help="MSBuild Tools for Windows docker image." - "MSBuild Tools are licensed as a supplement your existing Visual Studio license. " - "Please don’t share the image with MSBuild 2019 on a public Docker Hub.", + '--msbuild', + choices=['msbuild2019', 'msbuild2019_online'], + help='MSBuild Tools for Windows docker image.' + 'MSBuild Tools are licensed as a supplement your existing Visual Studio license. ' + 'Please don’t share the image with MSBuild 2019 on a public Docker Hub.', ) parser.add_argument( - "--pre_stage_msbuild", - choices=["msbuild2019", "msbuild2019_online"], - help="MSBuild Tools for Windows docker image to use on the first stage. " - "Can be required to build some thirdparty dependencies from source code. " - "MSBuild Tools are licensed as a supplement your existing Visual Studio license. ", + '--pre_stage_msbuild', + choices=['msbuild2019', 'msbuild2019_online'], + help='MSBuild Tools for Windows docker image to use on the first stage. ' + 'Can be required to build some thirdparty dependencies from source code. ' + 'MSBuild Tools are licensed as a supplement your existing Visual Studio license. ', ) parser.add_argument( - "-l", - "--layers", - metavar="NAME", - action="append", + '-l', + '--layers', + metavar='NAME', + action='append', default=[], - help="Setup your layer. Use name of .dockerfile.j2 file located in " - "/templates//layers folder. " - "Layer will be added to the end of product dockerfile.", + help='Setup your layer. Use name of .dockerfile.j2 file located in ' + '/templates//layers folder. ' + 'Layer will be added to the end of product dockerfile.', ) parser.add_argument( - "--build_arg", - metavar="VAR_NAME=VALUE", - action="append", + '--build_arg', + metavar='VAR_NAME=VALUE', + action='append', default=[], - help="Specify build or template arguments for your layer. " - 'You can use "no_samples=True" to remove OMZ, IE samples and demos from final docker image. ' - 'Set "INSTALL_SOURCES=yes" to download source for 3d party LGPL/GPL dependencies.', + help='Specify build or template arguments for your layer. ' + 'You can use \'no_samples=True\' to remove OMZ, IE samples and demos from final docker image. ' + 'Set \'INSTALL_SOURCES=yes\' to download source for 3d party LGPL/GPL dependencies.', ) parser.add_argument( - "--no-cache", - dest="no_cache", - action="store_true", - help="Specify if image should be built without cache. False by default.", + '--no-cache', + dest='no_cache', + action='store_true', + help='Specify if image should be built without cache. False by default.', ) @staticmethod def add_test_args(parser: argparse.ArgumentParser): """Adding args needed to run tests on the built Docker image""" parser.add_argument( - "-k", - metavar="EXPRESSION", - default="", - dest="test_expression", - help="Run tests which match the given substring expression for pytest -k.", + '-k', + metavar='EXPRESSION', + default='', + dest='test_expression', + help='Run tests which match the given substring expression for pytest -k.', ) parser.add_argument( - "-m", - metavar="MARKEXPR", - default="", - dest="test_mark_expression", - help="Run tests which matching given mark expression for pytest -m", + '-m', + metavar='MARKEXPR', + default='', + dest='test_mark_expression', + help='Run tests which matching given mark expression for pytest -m', ) parser.add_argument( - "--sdl_check", - metavar="NAME", - action="append", + '--sdl_check', + metavar='NAME', + action='append', default=[], - help="Enable SDL check for docker host and image. " - "It installs additional 3d-party docker images or executable files. " - "Available tests: " - "snyk (https://github.com/snyk/snyk), " - "bench_security (https://github.com/docker/docker-bench-security)", + help='Enable SDL check for docker host and image. ' + 'It installs additional 3d-party docker images or executable files. ' + 'Available tests: ' + 'snyk (https://github.com/snyk/snyk), ' + 'bench_security (https://github.com/docker/docker-bench-security)', ) parser.add_argument( - "--nightly", - action="store_true", + '--nightly', + action='store_true', default=False, help=argparse.SUPPRESS, # Skip tests for regular builds ) @@ -212,16 +212,16 @@ def add_test_args(parser: argparse.ArgumentParser): def add_deploy_args(parser: argparse.ArgumentParser): """Adding args needed to publish the built Docker image to a repository""" parser.add_argument( - "-r", - "--registry", - metavar="URL:PORT", + '-r', + '--registry', + metavar='URL:PORT', required=True, - help='Registry host and optionally a port in the "host:port" format', + help='Registry host and optionally a port in the \'host:port\' format', ) parser.add_argument( - "--nightly_save_path", - default="", + '--nightly_save_path', + default='', help=argparse.SUPPRESS, # Setup saving docker image as a binary file ) @@ -229,71 +229,71 @@ def add_deploy_args(parser: argparse.ArgumentParser): def add_dist_args(cls, parser: argparse.ArgumentParser): """Adding arg needed to customize the generated dockerfile""" parser.add_argument( - "-os", + '-os', choices=cls.SUPPORTED_OS, - default="", - help="Operation System for docker image.", + default='', + help='Operation System for docker image.', ) parser.add_argument( - "-dist", - "--distribution", - choices=["base", "runtime", "dev", "dev_no_samples", "custom"], - required=" test" in parser.prog, - help="Distribution type: dev, dev_no_samples, runtime or " - "base (with CPU only and without installing dependencies). " - "Using key --file and " - "-p are mandatory to build base distribution image." - "base dockerfiles are stored in /dockerfiles/ folder.", + '-dist', + '--distribution', + choices=['base', 'runtime', 'dev', 'dev_no_samples', 'custom'], + required=' test' in parser.prog, + help='Distribution type: dev, dev_no_samples, runtime or ' + 'base (with CPU only and without installing dependencies). ' + 'Using key --file and ' + '-p are mandatory to build base distribution image.' + 'base dockerfiles are stored in /dockerfiles/ folder.', ) parser.add_argument( - "-p", - "--product_version", - default="", - help="Product version in format: YYYY.U[.BBB], where BBB - build number is optional.", + '-p', + '--product_version', + default='', + help='Product version in format: YYYY.U[.BBB], where BBB - build number is optional.', ) parser.add_argument( - "-w", - "--wheels_version", - default="", - help="Version specifier of OpenVINO wheels to install (will be passed to pip install). " - "Will be equal to product version by default.", + '-w', + '--wheels_version', + default='', + help='Version specifier of OpenVINO wheels to install (will be passed to pip install). ' + 'Will be equal to product version by default.', ) parser.add_argument( - "-s", - "--source", - choices=["url", "local"], - default="url", - help="Source of the package: external URL or relative local path. By default: url.", + '-s', + '--source', + choices=['url', 'local'], + default='url', + help='Source of the package: external URL or relative local path. By default: url.', ) parser.add_argument( - "-u", - "--package_url", - metavar="URL", - default="", - help="Package external or local url, use http://, https://, ftp:// access scheme or " - "relative local path", + '-u', + '--package_url', + metavar='URL', + default='', + help='Package external or local url, use http://, https://, ftp:// access scheme or ' + 'relative local path', ) parser.add_argument( - "-f", - "--file", - metavar="NAME", - help="Name of the Dockerfile, that uses to build an image.", + '-f', + '--file', + metavar='NAME', + help='Name of the Dockerfile, that uses to build an image.', ) def fail_if_product_version_not_supported(product_version: str, parser: DockerCIArgumentParser): if product_version is None: - parser.error("Product version is not specified.") - elif product_version < "2022.1": + parser.error('Product version is not specified.') + elif product_version < '2022.1': parser.error( f"This version of the DockerHub CI framework does not support OpenVINO releases earlier than " - '2022.1.0. Current detected product version "{product_version}". Please use previous versions ' + "2022.1.0. Current detected product version '{product_version}'. Please use previous versions " "of the DockerHub CI." ) @@ -302,65 +302,65 @@ def parse_args(name: str, description: str): # noqa """Parse all the args set up above""" parser = DockerCIArgumentParser(name, description) - subparsers = parser.add_subparsers(dest="mode") + subparsers = parser.add_subparsers(dest='mode') gen_dockerfile_subparser = subparsers.add_parser( - "gen_dockerfile", help="Generate a dockerfile to " "dockerfiles/ folder" + 'gen_dockerfile', help='Generate a dockerfile to ' 'dockerfiles/ folder' ) parser.add_build_args(gen_dockerfile_subparser) parser.add_linter_check_args(gen_dockerfile_subparser) parser.add_dist_args(gen_dockerfile_subparser) rhel_platform_group = gen_dockerfile_subparser.add_mutually_exclusive_group() rhel_platform_group.add_argument( - "--rhel_platform", - choices=["docker", "openshift", "autobuild"], - default="docker", - help="Specify target platform to generate RHEL dockerfiles (default is docker). " - "Choose autobuild option for Red Hat portal Build System.", + '--rhel_platform', + choices=['docker', 'openshift', 'autobuild'], + default='docker', + help='Specify target platform to generate RHEL dockerfiles (default is docker). ' + 'Choose autobuild option for Red Hat portal Build System.', ) rhel_platform_group.add_argument( - "--openshift", - action="store_const", - dest="rhel_platform", - const="openshift", + '--openshift', + action='store_const', + dest='rhel_platform', + const='openshift', default=False, - help="Create a dockerfile intended to build on Red Hat OpenShift Container Platform (RHEL images only). " - "Alias for --rhel_platform=openshift", + help='Create a dockerfile intended to build on Red Hat OpenShift Container Platform (RHEL images only). ' + 'Alias for --rhel_platform=openshift', ) - build_subparser = subparsers.add_parser("build", help="Build a docker image") + build_subparser = subparsers.add_parser('build', help='Build a docker image') parser.add_build_args(build_subparser) parser.add_linter_check_args(build_subparser) parser.add_dist_args(build_subparser) parser.add_image_args(build_subparser) - build_test_subparser = subparsers.add_parser("build_test", help="Build and test a docker image") + build_test_subparser = subparsers.add_parser('build_test', help='Build and test a docker image') parser.add_build_args(build_test_subparser) parser.add_linter_check_args(build_test_subparser) parser.add_dist_args(build_test_subparser) parser.add_image_args(build_test_subparser) parser.add_test_args(build_test_subparser) - test_subparser = subparsers.add_parser("test", help="Test a local docker image") + test_subparser = subparsers.add_parser('test', help='Test a local docker image') parser.add_linter_check_args(test_subparser) parser.add_dist_args(test_subparser) parser.add_image_args(test_subparser) parser.add_test_args(test_subparser) test_subparser.add_argument( - "-r", - "--registry", - metavar="URL:PORT", - default="", - help='Registry host and optionally a port in the "host:port" format. ' - "Will be used to pull the image if it does not exist", + '-r', + '--registry', + metavar='URL:PORT', + default='', + help='Registry host and optionally a port in the \'host:port\' format. ' + 'Will be used to pull the image if it does not exist', ) - deploy_subparser = subparsers.add_parser("deploy", help="Deploy a docker image") + deploy_subparser = subparsers.add_parser('deploy', help='Deploy a docker image') parser.add_image_args(deploy_subparser) parser.add_deploy_args(deploy_subparser) all_subparser = subparsers.add_parser( - "all", help="Build, test and deploy a docker image. [Default option]" + 'all', help='Build, test and deploy a docker image. [Default option]' ) parser.add_build_args(all_subparser) parser.add_linter_check_args(all_subparser) @@ -369,7 +369,7 @@ def parse_args(name: str, description: str): # noqa parser.add_test_args(all_subparser) parser.add_deploy_args(all_subparser) - parser.set_default_subparser("all") + parser.set_default_subparser('all') args = parser.parse_args() @@ -381,285 +381,285 @@ def parse_args(name: str, description: str): # noqa elif isinstance(arg_val, str): check_printable_utf8_chars(arg_val) - for attr_name in ("package_url", "file", "image_json_path"): + for attr_name in ('package_url', 'file', 'image_json_path'): if hasattr(args, attr_name) and getattr(args, attr_name): check_internal_local_path(getattr(args, attr_name)) if ( - args.mode != "deploy" + args.mode != 'deploy' and args.package_url - and args.source == "local" - and not args.package_url.startswith(("http://", "https://", "ftp://")) + and args.source == 'local' + and not args.package_url.startswith(('http://', 'https://', 'ftp://')) ): args.package_url = str(pathlib.Path(args.package_url).as_posix()) if ( - args.mode not in ("test", "deploy") - and hasattr(args, "distribution") - and args.distribution == "custom" + args.mode not in ('test', 'deploy') + and hasattr(args, 'distribution') + and args.distribution == 'custom' ): - parser.error("For a custom distribution, only test and deploy modes are available.") + parser.error('For a custom distribution, only test and deploy modes are available.') if ( - hasattr(args, "sdl_check") + hasattr(args, 'sdl_check') and args.sdl_check - and ("snyk" not in args.sdl_check and "bench_security" not in args.sdl_check) + and ('snyk' not in args.sdl_check and 'bench_security' not in args.sdl_check) ): - parser.error("Incorrect arguments for --sdl_check. Available tests: snyk, bench_security") + parser.error('Incorrect arguments for --sdl_check. Available tests: snyk, bench_security') if ( - hasattr(args, "linter_check") + hasattr(args, 'linter_check') and args.linter_check - and ("hadolint" not in args.linter_check and "dive" not in args.linter_check) + and ('hadolint' not in args.linter_check and 'dive' not in args.linter_check) ): - parser.error("Incorrect arguments for --linter_check. Available tests: hadolint, dive") + parser.error('Incorrect arguments for --linter_check. Available tests: hadolint, dive') if ( - args.mode in ("build", "build_test", "all") - and args.distribution == "base" + args.mode in ('build', 'build_test', 'all') + and args.distribution == 'base' and not args.file ): - parser.error("The following argument is required: -f/--file") + parser.error('The following argument is required: -f/--file') - if args.mode == "deploy" and not args.tags: - parser.error("The following argument is required: -t/--tags") + if args.mode == 'deploy' and not args.tags: + parser.error('The following argument is required: -t/--tags') - if hasattr(args, "os") and not args.os: + if hasattr(args, 'os') and not args.os: possible_os: typing.Set[str] = set() if args.package_url: possible_os.update(filter(lambda os: os in args.package_url, parser.SUPPORTED_OS)) - if hasattr(args, "tags") and args.tags: + if hasattr(args, 'tags') and args.tags: for tag in args.tags: possible_os.update(filter(lambda os: os in tag, parser.SUPPORTED_OS)) # noqa: B023 if len(possible_os) == 1: args.os = possible_os.pop() else: parser.error( - "Can not get image OS from package URL or tags. " "Please specify -os directly" + 'Can not get image OS from package URL or tags. ' 'Please specify -os directly' ) if ( - args.mode in ("gen_dockerfile", "build", "build_test", "all") - and args.distribution == "dev_no_samples" - and "ubuntu" not in args.os + args.mode in ('gen_dockerfile', 'build', 'build_test', 'all') + and args.distribution == 'dev_no_samples' + and 'ubuntu' not in args.os ): - parser.error("Distribution dev_no_samples is available only for Ubuntu operation system") + parser.error('Distribution dev_no_samples is available only for Ubuntu operation system') - if args.mode == "gen_dockerfile" and args.distribution == "base": + if args.mode == 'gen_dockerfile' and args.distribution == 'base': parser.error( - "Generating dockerfile for base distribution is not available. " - "Use generated base dockerfiles are stored in /dockerfiles/ folder" + 'Generating dockerfile for base distribution is not available. ' + 'Use generated base dockerfiles are stored in /dockerfiles/ folder' ) - if args.mode == "test" and not (args.tags and args.distribution): + if args.mode == 'test' and not (args.tags and args.distribution): parser.error( - 'Options --tags and --distribution are mandatory. Image operation system is "ubuntu18"' - " by default." + 'Options --tags and --distribution are mandatory. Image operation system is \'ubuntu18\'' + ' by default.' ) - if args.mode == "test" and "runtime" in args.distribution and not args.package_url: + if args.mode == 'test' and 'runtime' in args.distribution and not args.package_url: logger.info( - "\nYou can run samples/demos on runtime docker image. " - "Please provide --package_url key with path to dev distribution package in " - "http/https/ftp access scheme or a local file in the project location as dependent package " - "to run all available tests.\n" + '\nYou can run samples/demos on runtime docker image. ' + 'Please provide --package_url key with path to dev distribution package in ' + 'http/https/ftp access scheme or a local file in the project location as dependent package ' + 'to run all available tests.\n' ) - if args.mode in ("deploy", "all") and not hasattr(args, "registry"): - parser.error("Option --registry is mandatory for this mode.") + if args.mode in ('deploy', 'all') and not hasattr(args, 'registry'): + parser.error('Option --registry is mandatory for this mode.') - if hasattr(args, "image_json_path") and args.image_json_path: + if hasattr(args, 'image_json_path') and args.image_json_path: args.image_json_path = pathlib.Path(args.image_json_path).absolute() if args.image_json_path.is_symlink(): parser.error( - "Do not use symlink and hard link for --image_json_path key. It is an insecure way." + 'Do not use symlink and hard link for --image_json_path key. It is an insecure way.' ) - if hasattr(args, "file") and args.file: + if hasattr(args, 'file') and args.file: args.file = pathlib.Path(args.file).absolute() if args.file.is_symlink(): - parser.error("Do not use symlink and hard link for --file key. It is an insecure way. ") + parser.error('Do not use symlink and hard link for --file key. It is an insecure way. ') if not args.file.exists(): - parser.error(f"Cannot find specified Dockerfile: {str(args.file)}.") + parser.error(f'Cannot find specified Dockerfile: {str(args.file)}.') - if not hasattr(args, "rhel_platform"): - args.rhel_platform = "docker" - if args.rhel_platform != "docker" and args.os != "rhel8": + if not hasattr(args, 'rhel_platform'): + args.rhel_platform = 'docker' + if args.rhel_platform != 'docker' and args.os != 'rhel8': parser.error( - "Dockerfile generation intended for non-Docker platforms " - "is supported only for RHEL-based images" + 'Dockerfile generation intended for non-Docker platforms ' + 'is supported only for RHEL-based images' ) - if hasattr(args, "product_version") and args.product_version: - logger.info(f"Found product version {args.product_version} in arguments.") + if hasattr(args, 'product_version') and args.product_version: + logger.info(f'Found product version {args.product_version} in arguments.') fail_if_product_version_not_supported(args.product_version, parser) - product_version = re.search(r"^\d{4}\.\d$", args.product_version) + product_version = re.search(r'^\d{4}\.\d$', args.product_version) if product_version: # save product version YYYY.U as YYYY.U.0 - args.product_version = f"{product_version.group()}.0" + args.product_version = f'{product_version.group()}.0' - if args.mode in ("gen_dockerfile", "build", "build_test", "all"): - if args.package_url and not args.package_url.startswith(("http://", "https://", "ftp://")): - if args.source == "local" and not pathlib.Path(args.package_url).exists(): + if args.mode in ('gen_dockerfile', 'build', 'build_test', 'all'): + if args.package_url and not args.package_url.startswith(('http://', 'https://', 'ftp://')): + if args.source == 'local' and not pathlib.Path(args.package_url).exists(): parser.error( - "Provided local path of the package should be relative to folder " - f"or should be an http/https/ftp access scheme: {args.package_url}" + 'Provided local path of the package should be relative to folder ' + f'or should be an http/https/ftp access scheme: {args.package_url}' ) - elif args.source == "url" and args.distribution != "base": + elif args.source == 'url' and args.distribution != 'base': parser.error( - "Provided URL is not supported, use http://, https:// or ftp:// access scheme" + 'Provided URL is not supported, use http://, https:// or ftp:// access scheme' ) - elif args.source == "local" and pathlib.Path(args.package_url).is_symlink(): + elif args.source == 'local' and pathlib.Path(args.package_url).is_symlink(): parser.error( - "Do not use symlink and hard link to specify local package url. " - "It is an insecure way." + 'Do not use symlink and hard link to specify local package url. ' + 'It is an insecure way.' ) if not args.python: - if args.os in ("ubuntu22"): - args.python = "python310" + if args.os in 'ubuntu22': + args.python = 'python310' else: - args.python = "python38" + args.python = 'python38' - if args.python == "python38" and "win" in args.os: - if not hasattr(args, "pre_stage_msbuild") or not args.pre_stage_msbuild: + if args.python == 'python38' and 'win' in args.os: + if not hasattr(args, 'pre_stage_msbuild') or not args.pre_stage_msbuild: parser.error( - "Option --pre_stage_msbuild is required for Windows images to build the latest version " - "of Python 3.8" + 'Option --pre_stage_msbuild is required for Windows images to build the latest version ' + 'of Python 3.8' ) if not args.distribution and args.package_url: - if "_runtime_" in args.package_url: - args.distribution = "runtime" - elif "_dev_" in args.package_url: - args.distribution = "dev" + if '_runtime_' in args.package_url: + args.distribution = 'runtime' + elif '_dev_' in args.package_url: + args.distribution = 'dev' else: parser.error( - f"Cannot get distribution type from the package URL provided. {args.package_url} " - "Please specify --distribution directly." + f'Cannot get distribution type from the package URL provided. {args.package_url} ' + 'Please specify --distribution directly.' ) # set installation method for the package - args.install_type = "copy" + args.install_type = 'copy' # workaround for https://bugs.python.org/issue16399 issue - if not args.device and "win" not in args.os: - if args.distribution == "base": - args.device = ["cpu"] - elif args.os == "rhel8": - args.device = ["cpu", "gpu"] + if not args.device and 'win' not in args.os: + if args.distribution == 'base': + args.device = ['cpu'] + elif args.os == 'rhel8': + args.device = ['cpu', 'gpu'] else: - args.device = ["cpu", "gpu"] # 2022.3 v/h not supported + args.device = ['cpu', 'gpu'] # 2022.3 v/h not supported elif not args.device: - args.device = ["cpu"] + args.device = ['cpu'] if not args.package_url and not args.product_version: latest_public_version = max(INTEL_OPENVINO_VERSION.__iter__()) args.product_version = ( - "2022.2.0" if latest_public_version <= "2022.2.0" else latest_public_version + '2022.2.0' if latest_public_version <= '2022.2.0' else latest_public_version ) - args.build_id = "" + args.build_id = '' - if not args.package_url and args.distribution not in ("base",): + if not args.package_url and args.distribution not in ('base',): if not args.distribution or not args.product_version: parser.error( - "Insufficient arguments. Provide --package_url " - "or --distribution (with optional --product_version) arguments" + 'Insufficient arguments. Provide --package_url ' + 'or --distribution (with optional --product_version) arguments' ) - if args.mode != "gen_dockerfile" or args.rhel_platform == "autobuild": - dev_version = re.search(r"^\d{4}\.(?:\d\.){2,3}dev\d{8}$", args.product_version) + if args.mode != 'gen_dockerfile' or args.rhel_platform == 'autobuild': + dev_version = re.search(r'^\d{4}\.(?:\d\.){2,3}dev\d{8}$', args.product_version) if dev_version: args.product_version = dev_version.group() else: - lts_version = re.search(r"(\d{4}\.\d\.\d)", args.product_version) + lts_version = re.search(r'(\d{4}\.\d\.\d)', args.product_version) if lts_version: args.product_version = lts_version.group() # save product version YYYY.U.V else: - parser.error(f"Cannot find package url for {args.product_version} version") + parser.error(f'Cannot find package url for {args.product_version} version') with contextlib.suppress(KeyError): args.package_url = INTEL_OPENVINO_VERSION[args.product_version][args.os][ args.distribution ] if not args.package_url: parser.error( - f"Cannot find package url for {args.product_version} version " - f"and {args.distribution} distribution. Please specify --package_url directly." + f'Cannot find package url for {args.product_version} version ' + f'and {args.distribution} distribution. Please specify --package_url directly.' ) if args.package_url and not args.build_id: - logger.info(f"Parsing product version in the package_url...") - dev_version = re.search(r"_(\d{4}\.(?:\d\.){2,3}dev\d{8})_", args.package_url) + logger.info(f'Parsing product version in the package_url...') + dev_version = re.search(r'_(\d{4}\.(?:\d\.){2,3}dev\d{8})_', args.package_url) if dev_version: # save product version and build version as YYYY.U.V.devYYYYMMDD args.product_version = dev_version.group(1) args.build_id = args.product_version else: - build_id = re.search(r"_(\d{4}\.(?:\d\.){2,3})\.(\d{3,4})?", args.package_url) + build_id = re.search(r'_(\d{4}\.(?:\d\.){2,3})\.(\d{3,4})?', args.package_url) if build_id: # save product version YYYY.U.V.BBB - args.build_id = ".".join(build_id.groups()) + args.build_id = '.'.join(build_id.groups()) # save product version YYYY.U.V args.product_version = build_id.group(1) else: args.build_id = args.product_version if not args.dockerfile_name: - devices = "".join([d[0] for d in args.device]) - layers = "_".join(args.layers) - openshift = "openshift_" if args.rhel_platform == "openshift" else "" + devices = ''.join([d[0] for d in args.device]) + layers = '_'.join(args.layers) + openshift = 'openshift_' if args.rhel_platform == 'openshift' else '' version = args.product_version if layers: - args.dockerfile_name = f"openvino_{openshift}{layers}_{version}.dockerfile" + args.dockerfile_name = f'openvino_{openshift}{layers}_{version}.dockerfile' else: args.dockerfile_name = ( - f"openvino_{devices}_{openshift}{args.distribution}_{version}.dockerfile" + f'openvino_{devices}_{openshift}{args.distribution}_{version}.dockerfile' ) - if not hasattr(args, "wheels_version") or not args.wheels_version: + if not hasattr(args, 'wheels_version') or not args.wheels_version: args.wheels_version = ( args.product_version if args.build_id == args.product_version - else f"{args.product_version}.*" + else f'{args.product_version}.*' ) - if not hasattr(args, "tags") or not args.tags: - layers = "_".join(args.layers) - tgl_postfix = "" + if not hasattr(args, 'tags') or not args.tags: + layers = '_'.join(args.layers) + tgl_postfix = '' if layers: args.tags = [ - f"{args.os}_{layers}:" - f"{args.build_id if args.build_id else args.product_version}{tgl_postfix}", - f"{args.os}_{layers}:latest", + f'{args.os}_{layers}:' + f'{args.build_id if args.build_id else args.product_version}{tgl_postfix}', + f'{args.os}_{layers}:latest', ] - if hasattr(args, "tag_postfix") and args.tag_postfix: + if hasattr(args, 'tag_postfix') and args.tag_postfix: args.tags.append( - f"{args.os}_{layers}:{args.build_id if args.build_id else args.product_version}" - f"{tgl_postfix}{args.tag_postfix}" + f'{args.os}_{layers}:{args.build_id if args.build_id else args.product_version}' + f'{tgl_postfix}{args.tag_postfix}' ) - elif args.distribution == "base": + elif args.distribution == 'base': args.tags = [ - f"{args.os}_{args.distribution}_cpu:" f"{args.product_version}", - f"{args.os}_{args.distribution}_cpu:latest", + f'{args.os}_{args.distribution}_cpu:' f'{args.product_version}', + f'{args.os}_{args.distribution}_cpu:latest', ] - if hasattr(args, "tag_postfix") and args.tag_postfix: + if hasattr(args, 'tag_postfix') and args.tag_postfix: args.tags.append( - f"{args.os}_{args.distribution}_cpu:" - f"{args.product_version}{args.tag_postfix}" + f'{args.os}_{args.distribution}_cpu:' + f'{args.product_version}{args.tag_postfix}' ) else: args.tags = [ - f"{args.os}_{args.distribution}:" - f"{args.build_id if args.build_id else args.product_version}{tgl_postfix}", - f"{args.os}_{args.distribution}:latest", + f'{args.os}_{args.distribution}:' + f'{args.build_id if args.build_id else args.product_version}{tgl_postfix}', + f'{args.os}_{args.distribution}:latest', ] - if hasattr(args, "tag_postfix") and args.tag_postfix: + if hasattr(args, 'tag_postfix') and args.tag_postfix: args.tags.append( - f"{args.os}_{args.distribution}:" - f"{args.build_id if args.build_id else args.product_version}" - f"{tgl_postfix}{args.tag_postfix}" + f'{args.os}_{args.distribution}:' + f'{args.build_id if args.build_id else args.product_version}' + f'{tgl_postfix}{args.tag_postfix}' ) - if args.mode not in ("test", "deploy"): + if args.mode not in ('test', 'deploy'): if args.build_id: args.year = args.build_id[:4] elif args.product_version: @@ -667,56 +667,56 @@ def parse_args(name: str, description: str): # noqa else: args.year = None - if args.mode == "test" and not args.product_version: - match = re.search(r":(\d{4}\.\d\.\d)", str(args.tags)) + if args.mode == 'test' and not args.product_version: + match = re.search(r':(\d{4}\.\d\.\d)', str(args.tags)) if not match and args.package_url: - match = re.search(r"_(\d{4}\.\d\.\d)", args.package_url) + match = re.search(r'_(\d{4}\.\d\.\d)', args.package_url) if match: # save product version YYYY.U.V args.product_version = match.group(1) - elif args.distribution == "custom": + elif args.distribution == 'custom': latest_public_version = list(INTEL_OPENVINO_VERSION.keys())[-1] args.product_version = ( - "2022.2.0" if latest_public_version <= "2022.2.0" else latest_public_version + '2022.2.0' if latest_public_version <= '2022.2.0' else latest_public_version ) else: parser.error( - "Cannot get product_version from the package URL and docker image. " - "Please specify --product_version directly." + 'Cannot get product_version from the package URL and docker image. ' + 'Please specify --product_version directly.' ) - if args.mode in ("test") and (not hasattr(args, "wheels_version") or not args.wheels_version): + if args.mode in 'test' and (not hasattr(args, 'wheels_version') or not args.wheels_version): latest_public_version = max(INTEL_OPENVINO_VERSION.__iter__()) latest_public_version = ( - "2022.2.0" if latest_public_version <= "2022.2.0" else latest_public_version + '2022.2.0' if latest_public_version <= '2022.2.0' else latest_public_version ) args.wheels_version = ( - args.product_version if hasattr(args, "product_version") else latest_public_version + args.product_version if hasattr(args, 'product_version') else latest_public_version ) - if hasattr(args, "product_version"): + if hasattr(args, 'product_version'): fail_if_product_version_not_supported(args.product_version, parser) - if hasattr(args, "distribution") and args.distribution == "custom": + if hasattr(args, 'distribution') and args.distribution == 'custom': if ( subprocess.call( # nosec B603 B607 - ["docker", "run", "--rm", args.tags[0], "ls", "extras/opencv"], # nosec B603 B607 + ['docker', 'run', '--rm', args.tags[0], 'ls', 'extras/opencv'], # nosec B603 B607 stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) != 0 ): - args.distribution = "custom-no-cv" + args.distribution = 'custom-no-cv' else: - args.distribution = "custom-full" + args.distribution = 'custom-full' - if hasattr(args, "distribution"): - if not args.package_url and args.mode == "test" and args.distribution == "custom-no-cv": + if hasattr(args, 'distribution'): + if not args.package_url and args.mode == 'test' and args.distribution == 'custom-no-cv': if args.product_version in INTEL_OPENVINO_VERSION: - args.package_url = INTEL_OPENVINO_VERSION[args.product_version][args.os]["dev"] + args.package_url = INTEL_OPENVINO_VERSION[args.product_version][args.os]['dev'] else: parser.error( - f"Cannot find URL to package with test dependencies for {args.product_version} release. " - f"Please specify --package_url directly" + f'Cannot find URL to package with test dependencies for {args.product_version} release. ' + f'Please specify --package_url directly' ) return args From b79e569749fcedc445d2de2fc2c4f333bcfe8e02 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 15:12:16 +0200 Subject: [PATCH 23/30] arg_parser.py: Fixed regex --- utils/arg_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index 64bb493f..f6e6a55a 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -593,7 +593,7 @@ def parse_args(name: str, description: str): # noqa args.product_version = dev_version.group(1) args.build_id = args.product_version else: - build_id = re.search(r'_(\d{4}\.(?:\d\.){2,3})\.(\d{3,4})?', args.package_url) + build_id = re.search(r'_(\d{4}\.(?:\d\.){2,3})(\d+?)?', args.package_url) if build_id: # save product version YYYY.U.V.BBB args.build_id = '.'.join(build_id.groups()) From 3eb0437d6e7e2e2f3fd72cfb584552a996585ce0 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 15:17:00 +0200 Subject: [PATCH 24/30] Rollback quotes via black.exe and ignore it in flake8 --- setup.cfg | 1 + utils/arg_parser.py | 640 ++++++++++++++++++++++---------------------- 2 files changed, 321 insertions(+), 320 deletions(-) diff --git a/setup.cfg b/setup.cfg index a7064efc..31d9cc60 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,6 +9,7 @@ ignore = DAR101, DAR201, DAR401, D107, D415, I201, T001, S404, S603, G004, B009, enable-extensions=G per-file-ignores = tests/*: D100,D101,D102,D104,S108,CFQ004,PT023 + utils/*: Q000 tests/conftest.py: D100,D101,D102,D103,D104,CFQ004 [pydocstyle] diff --git a/utils/arg_parser.py b/utils/arg_parser.py index f6e6a55a..f7492b29 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -21,12 +21,12 @@ class DockerCIArgumentParser(argparse.ArgumentParser): """CLI argument parser for this framework""" SUPPORTED_OS: typing.List = [ - 'ubuntu18', - 'ubuntu20', - 'ubuntu22', - 'winserver2019', - 'windows20h2', - 'rhel8', + "ubuntu18", + "ubuntu20", + "ubuntu22", + "winserver2019", + "windows20h2", + "rhel8", ] def __init__(self, prog: typing.Optional[str] = None, description: typing.Optional[str] = None): @@ -47,163 +47,163 @@ def set_default_subparser(name: str): def add_image_args(parser: argparse.ArgumentParser): """Adding args needed to manage the built Docker image""" parser.add_argument( - '-t', - '--tags', - metavar='IMAGE_NAME:TAG', - action='append', - required=' test' in parser.prog, - help='Source image name and optionally a tags in the \'IMAGE_NAME:TAG\' format. ' - 'Default is _: and latest. You can specify some tags.', + "-t", + "--tags", + metavar="IMAGE_NAME:TAG", + action="append", + required=" test" in parser.prog, + help="Source image name and optionally a tags in the 'IMAGE_NAME:TAG' format. " + "Default is _: and latest. You can specify some tags.", ) parser.add_argument( - '--tag_postfix', - metavar='_NAME', - default='', - help='Add special postfix to the end of tag image. ' - 'Image name will be like this _:', + "--tag_postfix", + metavar="_NAME", + default="", + help="Add special postfix to the end of tag image. " + "Image name will be like this _:", ) @staticmethod def add_linter_check_args(parser: argparse.ArgumentParser): parser.add_argument( - '--linter_check', - metavar='NAME', - action='append', + "--linter_check", + metavar="NAME", + action="append", default=[], - help='Enable linter check for image and dockerfile. ' - 'It installs additional 3d-party docker images or executable files. ' - 'Available tests: ' - 'hadolint (https://github.com/hadolint/hadolint), ' - 'dive (https://github.com/wagoodman/dive)', + help="Enable linter check for image and dockerfile. " + "It installs additional 3d-party docker images or executable files. " + "Available tests: " + "hadolint (https://github.com/hadolint/hadolint), " + "dive (https://github.com/wagoodman/dive)", ) @staticmethod def add_build_args(parser: argparse.ArgumentParser): """Adding args needed to build the Docker image""" parser.add_argument( - '--wheels_url', - metavar='URL', - default='', - help='URL to HTML page with links or local path relative to openvino folder to search for OpenVINO wheels ' - '(will be used in a dockerfile as pip install --find-links value). ' - 'By default, openvino and openvino_dev will be installed from PyPi', + "--wheels_url", + metavar="URL", + default="", + help="URL to HTML page with links or local path relative to openvino folder to search for OpenVINO wheels " + "(will be used in a dockerfile as pip install --find-links value). " + "By default, openvino and openvino_dev will be installed from PyPi", ) parser.add_argument( - '--image_json_path', - help='Provide path to save image data in .json format file. ' - 'By default, it is stored in the logs folder.', + "--image_json_path", + help="Provide path to save image data in .json format file. " + "By default, it is stored in the logs folder.", ) parser.add_argument( - '--dockerfile_name', - metavar='NAME', - help='Name of the Dockerfile, that will be generated from templates. ' - 'Format is \'openvino___.dockerfile\'', + "--dockerfile_name", + metavar="NAME", + help="Name of the Dockerfile, that will be generated from templates. " + "Format is 'openvino___.dockerfile'", ) parser.add_argument( - '-d', - '--device', - metavar='NAME', - action='append', - help='Target inference hardware: cpu, gpu, vpu, hddl. Default is all. ' - 'Dockerfile name format has the first letter from device name, ' - 'e.g. for CPU, HDDL it will be openvino_ch__.dockerfile', + "-d", + "--device", + metavar="NAME", + action="append", + help="Target inference hardware: cpu, gpu, vpu, hddl. Default is all. " + "Dockerfile name format has the first letter from device name, " + "e.g. for CPU, HDDL it will be openvino_ch__.dockerfile", ) parser.add_argument( - '-py', - '--python', - choices=['python37', 'python38', 'python310'], - help='Python interpreter for docker image, currently default is python38', + "-py", + "--python", + choices=["python37", "python38", "python310"], + help="Python interpreter for docker image, currently default is python38", ) parser.add_argument( - '--cmake', - choices=['cmake34', 'cmake314'], - default='cmake314', - help='CMake for Windows docker image, default CMake 3.14. For Linux images it is used default for OS.', + "--cmake", + choices=["cmake34", "cmake314"], + default="cmake314", + help="CMake for Windows docker image, default CMake 3.14. For Linux images it is used default for OS.", ) parser.add_argument( - '--msbuild', - choices=['msbuild2019', 'msbuild2019_online'], - help='MSBuild Tools for Windows docker image.' - 'MSBuild Tools are licensed as a supplement your existing Visual Studio license. ' - 'Please don’t share the image with MSBuild 2019 on a public Docker Hub.', + "--msbuild", + choices=["msbuild2019", "msbuild2019_online"], + help="MSBuild Tools for Windows docker image." + "MSBuild Tools are licensed as a supplement your existing Visual Studio license. " + "Please don’t share the image with MSBuild 2019 on a public Docker Hub.", ) parser.add_argument( - '--pre_stage_msbuild', - choices=['msbuild2019', 'msbuild2019_online'], - help='MSBuild Tools for Windows docker image to use on the first stage. ' - 'Can be required to build some thirdparty dependencies from source code. ' - 'MSBuild Tools are licensed as a supplement your existing Visual Studio license. ', + "--pre_stage_msbuild", + choices=["msbuild2019", "msbuild2019_online"], + help="MSBuild Tools for Windows docker image to use on the first stage. " + "Can be required to build some thirdparty dependencies from source code. " + "MSBuild Tools are licensed as a supplement your existing Visual Studio license. ", ) parser.add_argument( - '-l', - '--layers', - metavar='NAME', - action='append', + "-l", + "--layers", + metavar="NAME", + action="append", default=[], - help='Setup your layer. Use name of .dockerfile.j2 file located in ' - '/templates//layers folder. ' - 'Layer will be added to the end of product dockerfile.', + help="Setup your layer. Use name of .dockerfile.j2 file located in " + "/templates//layers folder. " + "Layer will be added to the end of product dockerfile.", ) parser.add_argument( - '--build_arg', - metavar='VAR_NAME=VALUE', - action='append', + "--build_arg", + metavar="VAR_NAME=VALUE", + action="append", default=[], - help='Specify build or template arguments for your layer. ' - 'You can use \'no_samples=True\' to remove OMZ, IE samples and demos from final docker image. ' - 'Set \'INSTALL_SOURCES=yes\' to download source for 3d party LGPL/GPL dependencies.', + help="Specify build or template arguments for your layer. " + "You can use 'no_samples=True' to remove OMZ, IE samples and demos from final docker image. " + "Set 'INSTALL_SOURCES=yes' to download source for 3d party LGPL/GPL dependencies.", ) parser.add_argument( - '--no-cache', - dest='no_cache', - action='store_true', - help='Specify if image should be built without cache. False by default.', + "--no-cache", + dest="no_cache", + action="store_true", + help="Specify if image should be built without cache. False by default.", ) @staticmethod def add_test_args(parser: argparse.ArgumentParser): """Adding args needed to run tests on the built Docker image""" parser.add_argument( - '-k', - metavar='EXPRESSION', - default='', - dest='test_expression', - help='Run tests which match the given substring expression for pytest -k.', + "-k", + metavar="EXPRESSION", + default="", + dest="test_expression", + help="Run tests which match the given substring expression for pytest -k.", ) parser.add_argument( - '-m', - metavar='MARKEXPR', - default='', - dest='test_mark_expression', - help='Run tests which matching given mark expression for pytest -m', + "-m", + metavar="MARKEXPR", + default="", + dest="test_mark_expression", + help="Run tests which matching given mark expression for pytest -m", ) parser.add_argument( - '--sdl_check', - metavar='NAME', - action='append', + "--sdl_check", + metavar="NAME", + action="append", default=[], - help='Enable SDL check for docker host and image. ' - 'It installs additional 3d-party docker images or executable files. ' - 'Available tests: ' - 'snyk (https://github.com/snyk/snyk), ' - 'bench_security (https://github.com/docker/docker-bench-security)', + help="Enable SDL check for docker host and image. " + "It installs additional 3d-party docker images or executable files. " + "Available tests: " + "snyk (https://github.com/snyk/snyk), " + "bench_security (https://github.com/docker/docker-bench-security)", ) parser.add_argument( - '--nightly', - action='store_true', + "--nightly", + action="store_true", default=False, help=argparse.SUPPRESS, # Skip tests for regular builds ) @@ -212,16 +212,16 @@ def add_test_args(parser: argparse.ArgumentParser): def add_deploy_args(parser: argparse.ArgumentParser): """Adding args needed to publish the built Docker image to a repository""" parser.add_argument( - '-r', - '--registry', - metavar='URL:PORT', + "-r", + "--registry", + metavar="URL:PORT", required=True, - help='Registry host and optionally a port in the \'host:port\' format', + help="Registry host and optionally a port in the 'host:port' format", ) parser.add_argument( - '--nightly_save_path', - default='', + "--nightly_save_path", + default="", help=argparse.SUPPRESS, # Setup saving docker image as a binary file ) @@ -229,68 +229,68 @@ def add_deploy_args(parser: argparse.ArgumentParser): def add_dist_args(cls, parser: argparse.ArgumentParser): """Adding arg needed to customize the generated dockerfile""" parser.add_argument( - '-os', + "-os", choices=cls.SUPPORTED_OS, - default='', - help='Operation System for docker image.', + default="", + help="Operation System for docker image.", ) parser.add_argument( - '-dist', - '--distribution', - choices=['base', 'runtime', 'dev', 'dev_no_samples', 'custom'], - required=' test' in parser.prog, - help='Distribution type: dev, dev_no_samples, runtime or ' - 'base (with CPU only and without installing dependencies). ' - 'Using key --file and ' - '-p are mandatory to build base distribution image.' - 'base dockerfiles are stored in /dockerfiles/ folder.', + "-dist", + "--distribution", + choices=["base", "runtime", "dev", "dev_no_samples", "custom"], + required=" test" in parser.prog, + help="Distribution type: dev, dev_no_samples, runtime or " + "base (with CPU only and without installing dependencies). " + "Using key --file and " + "-p are mandatory to build base distribution image." + "base dockerfiles are stored in /dockerfiles/ folder.", ) parser.add_argument( - '-p', - '--product_version', - default='', - help='Product version in format: YYYY.U[.BBB], where BBB - build number is optional.', + "-p", + "--product_version", + default="", + help="Product version in format: YYYY.U[.BBB], where BBB - build number is optional.", ) parser.add_argument( - '-w', - '--wheels_version', - default='', - help='Version specifier of OpenVINO wheels to install (will be passed to pip install). ' - 'Will be equal to product version by default.', + "-w", + "--wheels_version", + default="", + help="Version specifier of OpenVINO wheels to install (will be passed to pip install). " + "Will be equal to product version by default.", ) parser.add_argument( - '-s', - '--source', - choices=['url', 'local'], - default='url', - help='Source of the package: external URL or relative local path. By default: url.', + "-s", + "--source", + choices=["url", "local"], + default="url", + help="Source of the package: external URL or relative local path. By default: url.", ) parser.add_argument( - '-u', - '--package_url', - metavar='URL', - default='', - help='Package external or local url, use http://, https://, ftp:// access scheme or ' - 'relative local path', + "-u", + "--package_url", + metavar="URL", + default="", + help="Package external or local url, use http://, https://, ftp:// access scheme or " + "relative local path", ) parser.add_argument( - '-f', - '--file', - metavar='NAME', - help='Name of the Dockerfile, that uses to build an image.', + "-f", + "--file", + metavar="NAME", + help="Name of the Dockerfile, that uses to build an image.", ) def fail_if_product_version_not_supported(product_version: str, parser: DockerCIArgumentParser): if product_version is None: - parser.error('Product version is not specified.') - elif product_version < '2022.1': + parser.error("Product version is not specified.") + elif product_version < "2022.1": parser.error( f"This version of the DockerHub CI framework does not support OpenVINO releases earlier than " "2022.1.0. Current detected product version '{product_version}'. Please use previous versions " @@ -302,65 +302,65 @@ def parse_args(name: str, description: str): # noqa """Parse all the args set up above""" parser = DockerCIArgumentParser(name, description) - subparsers = parser.add_subparsers(dest='mode') + subparsers = parser.add_subparsers(dest="mode") gen_dockerfile_subparser = subparsers.add_parser( - 'gen_dockerfile', help='Generate a dockerfile to ' 'dockerfiles/ folder' + "gen_dockerfile", help="Generate a dockerfile to " "dockerfiles/ folder" ) parser.add_build_args(gen_dockerfile_subparser) parser.add_linter_check_args(gen_dockerfile_subparser) parser.add_dist_args(gen_dockerfile_subparser) rhel_platform_group = gen_dockerfile_subparser.add_mutually_exclusive_group() rhel_platform_group.add_argument( - '--rhel_platform', - choices=['docker', 'openshift', 'autobuild'], - default='docker', - help='Specify target platform to generate RHEL dockerfiles (default is docker). ' - 'Choose autobuild option for Red Hat portal Build System.', + "--rhel_platform", + choices=["docker", "openshift", "autobuild"], + default="docker", + help="Specify target platform to generate RHEL dockerfiles (default is docker). " + "Choose autobuild option for Red Hat portal Build System.", ) rhel_platform_group.add_argument( - '--openshift', - action='store_const', - dest='rhel_platform', - const='openshift', + "--openshift", + action="store_const", + dest="rhel_platform", + const="openshift", default=False, - help='Create a dockerfile intended to build on Red Hat OpenShift Container Platform (RHEL images only). ' - 'Alias for --rhel_platform=openshift', + help="Create a dockerfile intended to build on Red Hat OpenShift Container Platform (RHEL images only). " + "Alias for --rhel_platform=openshift", ) - build_subparser = subparsers.add_parser('build', help='Build a docker image') + build_subparser = subparsers.add_parser("build", help="Build a docker image") parser.add_build_args(build_subparser) parser.add_linter_check_args(build_subparser) parser.add_dist_args(build_subparser) parser.add_image_args(build_subparser) - build_test_subparser = subparsers.add_parser('build_test', help='Build and test a docker image') + build_test_subparser = subparsers.add_parser("build_test", help="Build and test a docker image") parser.add_build_args(build_test_subparser) parser.add_linter_check_args(build_test_subparser) parser.add_dist_args(build_test_subparser) parser.add_image_args(build_test_subparser) parser.add_test_args(build_test_subparser) - test_subparser = subparsers.add_parser('test', help='Test a local docker image') + test_subparser = subparsers.add_parser("test", help="Test a local docker image") parser.add_linter_check_args(test_subparser) parser.add_dist_args(test_subparser) parser.add_image_args(test_subparser) parser.add_test_args(test_subparser) test_subparser.add_argument( - '-r', - '--registry', - metavar='URL:PORT', - default='', - help='Registry host and optionally a port in the \'host:port\' format. ' - 'Will be used to pull the image if it does not exist', + "-r", + "--registry", + metavar="URL:PORT", + default="", + help="Registry host and optionally a port in the 'host:port' format. " + "Will be used to pull the image if it does not exist", ) - deploy_subparser = subparsers.add_parser('deploy', help='Deploy a docker image') + deploy_subparser = subparsers.add_parser("deploy", help="Deploy a docker image") parser.add_image_args(deploy_subparser) parser.add_deploy_args(deploy_subparser) all_subparser = subparsers.add_parser( - 'all', help='Build, test and deploy a docker image. [Default option]' + "all", help="Build, test and deploy a docker image. [Default option]" ) parser.add_build_args(all_subparser) parser.add_linter_check_args(all_subparser) @@ -369,7 +369,7 @@ def parse_args(name: str, description: str): # noqa parser.add_test_args(all_subparser) parser.add_deploy_args(all_subparser) - parser.set_default_subparser('all') + parser.set_default_subparser("all") args = parser.parse_args() @@ -381,285 +381,285 @@ def parse_args(name: str, description: str): # noqa elif isinstance(arg_val, str): check_printable_utf8_chars(arg_val) - for attr_name in ('package_url', 'file', 'image_json_path'): + for attr_name in ("package_url", "file", "image_json_path"): if hasattr(args, attr_name) and getattr(args, attr_name): check_internal_local_path(getattr(args, attr_name)) if ( - args.mode != 'deploy' + args.mode != "deploy" and args.package_url - and args.source == 'local' - and not args.package_url.startswith(('http://', 'https://', 'ftp://')) + and args.source == "local" + and not args.package_url.startswith(("http://", "https://", "ftp://")) ): args.package_url = str(pathlib.Path(args.package_url).as_posix()) if ( - args.mode not in ('test', 'deploy') - and hasattr(args, 'distribution') - and args.distribution == 'custom' + args.mode not in ("test", "deploy") + and hasattr(args, "distribution") + and args.distribution == "custom" ): - parser.error('For a custom distribution, only test and deploy modes are available.') + parser.error("For a custom distribution, only test and deploy modes are available.") if ( - hasattr(args, 'sdl_check') + hasattr(args, "sdl_check") and args.sdl_check - and ('snyk' not in args.sdl_check and 'bench_security' not in args.sdl_check) + and ("snyk" not in args.sdl_check and "bench_security" not in args.sdl_check) ): - parser.error('Incorrect arguments for --sdl_check. Available tests: snyk, bench_security') + parser.error("Incorrect arguments for --sdl_check. Available tests: snyk, bench_security") if ( - hasattr(args, 'linter_check') + hasattr(args, "linter_check") and args.linter_check - and ('hadolint' not in args.linter_check and 'dive' not in args.linter_check) + and ("hadolint" not in args.linter_check and "dive" not in args.linter_check) ): - parser.error('Incorrect arguments for --linter_check. Available tests: hadolint, dive') + parser.error("Incorrect arguments for --linter_check. Available tests: hadolint, dive") if ( - args.mode in ('build', 'build_test', 'all') - and args.distribution == 'base' + args.mode in ("build", "build_test", "all") + and args.distribution == "base" and not args.file ): - parser.error('The following argument is required: -f/--file') + parser.error("The following argument is required: -f/--file") - if args.mode == 'deploy' and not args.tags: - parser.error('The following argument is required: -t/--tags') + if args.mode == "deploy" and not args.tags: + parser.error("The following argument is required: -t/--tags") - if hasattr(args, 'os') and not args.os: + if hasattr(args, "os") and not args.os: possible_os: typing.Set[str] = set() if args.package_url: possible_os.update(filter(lambda os: os in args.package_url, parser.SUPPORTED_OS)) - if hasattr(args, 'tags') and args.tags: + if hasattr(args, "tags") and args.tags: for tag in args.tags: possible_os.update(filter(lambda os: os in tag, parser.SUPPORTED_OS)) # noqa: B023 if len(possible_os) == 1: args.os = possible_os.pop() else: parser.error( - 'Can not get image OS from package URL or tags. ' 'Please specify -os directly' + "Can not get image OS from package URL or tags. " "Please specify -os directly" ) if ( - args.mode in ('gen_dockerfile', 'build', 'build_test', 'all') - and args.distribution == 'dev_no_samples' - and 'ubuntu' not in args.os + args.mode in ("gen_dockerfile", "build", "build_test", "all") + and args.distribution == "dev_no_samples" + and "ubuntu" not in args.os ): - parser.error('Distribution dev_no_samples is available only for Ubuntu operation system') + parser.error("Distribution dev_no_samples is available only for Ubuntu operation system") - if args.mode == 'gen_dockerfile' and args.distribution == 'base': + if args.mode == "gen_dockerfile" and args.distribution == "base": parser.error( - 'Generating dockerfile for base distribution is not available. ' - 'Use generated base dockerfiles are stored in /dockerfiles/ folder' + "Generating dockerfile for base distribution is not available. " + "Use generated base dockerfiles are stored in /dockerfiles/ folder" ) - if args.mode == 'test' and not (args.tags and args.distribution): + if args.mode == "test" and not (args.tags and args.distribution): parser.error( - 'Options --tags and --distribution are mandatory. Image operation system is \'ubuntu18\'' - ' by default.' + "Options --tags and --distribution are mandatory. Image operation system is 'ubuntu18'" + " by default." ) - if args.mode == 'test' and 'runtime' in args.distribution and not args.package_url: + if args.mode == "test" and "runtime" in args.distribution and not args.package_url: logger.info( - '\nYou can run samples/demos on runtime docker image. ' - 'Please provide --package_url key with path to dev distribution package in ' - 'http/https/ftp access scheme or a local file in the project location as dependent package ' - 'to run all available tests.\n' + "\nYou can run samples/demos on runtime docker image. " + "Please provide --package_url key with path to dev distribution package in " + "http/https/ftp access scheme or a local file in the project location as dependent package " + "to run all available tests.\n" ) - if args.mode in ('deploy', 'all') and not hasattr(args, 'registry'): - parser.error('Option --registry is mandatory for this mode.') + if args.mode in ("deploy", "all") and not hasattr(args, "registry"): + parser.error("Option --registry is mandatory for this mode.") - if hasattr(args, 'image_json_path') and args.image_json_path: + if hasattr(args, "image_json_path") and args.image_json_path: args.image_json_path = pathlib.Path(args.image_json_path).absolute() if args.image_json_path.is_symlink(): parser.error( - 'Do not use symlink and hard link for --image_json_path key. It is an insecure way.' + "Do not use symlink and hard link for --image_json_path key. It is an insecure way." ) - if hasattr(args, 'file') and args.file: + if hasattr(args, "file") and args.file: args.file = pathlib.Path(args.file).absolute() if args.file.is_symlink(): - parser.error('Do not use symlink and hard link for --file key. It is an insecure way. ') + parser.error("Do not use symlink and hard link for --file key. It is an insecure way. ") if not args.file.exists(): - parser.error(f'Cannot find specified Dockerfile: {str(args.file)}.') + parser.error(f"Cannot find specified Dockerfile: {str(args.file)}.") - if not hasattr(args, 'rhel_platform'): - args.rhel_platform = 'docker' - if args.rhel_platform != 'docker' and args.os != 'rhel8': + if not hasattr(args, "rhel_platform"): + args.rhel_platform = "docker" + if args.rhel_platform != "docker" and args.os != "rhel8": parser.error( - 'Dockerfile generation intended for non-Docker platforms ' - 'is supported only for RHEL-based images' + "Dockerfile generation intended for non-Docker platforms " + "is supported only for RHEL-based images" ) - if hasattr(args, 'product_version') and args.product_version: - logger.info(f'Found product version {args.product_version} in arguments.') + if hasattr(args, "product_version") and args.product_version: + logger.info(f"Found product version {args.product_version} in arguments.") fail_if_product_version_not_supported(args.product_version, parser) - product_version = re.search(r'^\d{4}\.\d$', args.product_version) + product_version = re.search(r"^\d{4}\.\d$", args.product_version) if product_version: # save product version YYYY.U as YYYY.U.0 - args.product_version = f'{product_version.group()}.0' + args.product_version = f"{product_version.group()}.0" - if args.mode in ('gen_dockerfile', 'build', 'build_test', 'all'): - if args.package_url and not args.package_url.startswith(('http://', 'https://', 'ftp://')): - if args.source == 'local' and not pathlib.Path(args.package_url).exists(): + if args.mode in ("gen_dockerfile", "build", "build_test", "all"): + if args.package_url and not args.package_url.startswith(("http://", "https://", "ftp://")): + if args.source == "local" and not pathlib.Path(args.package_url).exists(): parser.error( - 'Provided local path of the package should be relative to folder ' - f'or should be an http/https/ftp access scheme: {args.package_url}' + "Provided local path of the package should be relative to folder " + f"or should be an http/https/ftp access scheme: {args.package_url}" ) - elif args.source == 'url' and args.distribution != 'base': + elif args.source == "url" and args.distribution != "base": parser.error( - 'Provided URL is not supported, use http://, https:// or ftp:// access scheme' + "Provided URL is not supported, use http://, https:// or ftp:// access scheme" ) - elif args.source == 'local' and pathlib.Path(args.package_url).is_symlink(): + elif args.source == "local" and pathlib.Path(args.package_url).is_symlink(): parser.error( - 'Do not use symlink and hard link to specify local package url. ' - 'It is an insecure way.' + "Do not use symlink and hard link to specify local package url. " + "It is an insecure way." ) if not args.python: - if args.os in 'ubuntu22': - args.python = 'python310' + if args.os in "ubuntu22": + args.python = "python310" else: - args.python = 'python38' + args.python = "python38" - if args.python == 'python38' and 'win' in args.os: - if not hasattr(args, 'pre_stage_msbuild') or not args.pre_stage_msbuild: + if args.python == "python38" and "win" in args.os: + if not hasattr(args, "pre_stage_msbuild") or not args.pre_stage_msbuild: parser.error( - 'Option --pre_stage_msbuild is required for Windows images to build the latest version ' - 'of Python 3.8' + "Option --pre_stage_msbuild is required for Windows images to build the latest version " + "of Python 3.8" ) if not args.distribution and args.package_url: - if '_runtime_' in args.package_url: - args.distribution = 'runtime' - elif '_dev_' in args.package_url: - args.distribution = 'dev' + if "_runtime_" in args.package_url: + args.distribution = "runtime" + elif "_dev_" in args.package_url: + args.distribution = "dev" else: parser.error( - f'Cannot get distribution type from the package URL provided. {args.package_url} ' - 'Please specify --distribution directly.' + f"Cannot get distribution type from the package URL provided. {args.package_url} " + "Please specify --distribution directly." ) # set installation method for the package - args.install_type = 'copy' + args.install_type = "copy" # workaround for https://bugs.python.org/issue16399 issue - if not args.device and 'win' not in args.os: - if args.distribution == 'base': - args.device = ['cpu'] - elif args.os == 'rhel8': - args.device = ['cpu', 'gpu'] + if not args.device and "win" not in args.os: + if args.distribution == "base": + args.device = ["cpu"] + elif args.os == "rhel8": + args.device = ["cpu", "gpu"] else: - args.device = ['cpu', 'gpu'] # 2022.3 v/h not supported + args.device = ["cpu", "gpu"] # 2022.3 v/h not supported elif not args.device: - args.device = ['cpu'] + args.device = ["cpu"] if not args.package_url and not args.product_version: latest_public_version = max(INTEL_OPENVINO_VERSION.__iter__()) args.product_version = ( - '2022.2.0' if latest_public_version <= '2022.2.0' else latest_public_version + "2022.2.0" if latest_public_version <= "2022.2.0" else latest_public_version ) - args.build_id = '' + args.build_id = "" - if not args.package_url and args.distribution not in ('base',): + if not args.package_url and args.distribution not in ("base",): if not args.distribution or not args.product_version: parser.error( - 'Insufficient arguments. Provide --package_url ' - 'or --distribution (with optional --product_version) arguments' + "Insufficient arguments. Provide --package_url " + "or --distribution (with optional --product_version) arguments" ) - if args.mode != 'gen_dockerfile' or args.rhel_platform == 'autobuild': - dev_version = re.search(r'^\d{4}\.(?:\d\.){2,3}dev\d{8}$', args.product_version) + if args.mode != "gen_dockerfile" or args.rhel_platform == "autobuild": + dev_version = re.search(r"^\d{4}\.(?:\d\.){2,3}dev\d{8}$", args.product_version) if dev_version: args.product_version = dev_version.group() else: - lts_version = re.search(r'(\d{4}\.\d\.\d)', args.product_version) + lts_version = re.search(r"(\d{4}\.\d\.\d)", args.product_version) if lts_version: args.product_version = lts_version.group() # save product version YYYY.U.V else: - parser.error(f'Cannot find package url for {args.product_version} version') + parser.error(f"Cannot find package url for {args.product_version} version") with contextlib.suppress(KeyError): args.package_url = INTEL_OPENVINO_VERSION[args.product_version][args.os][ args.distribution ] if not args.package_url: parser.error( - f'Cannot find package url for {args.product_version} version ' - f'and {args.distribution} distribution. Please specify --package_url directly.' + f"Cannot find package url for {args.product_version} version " + f"and {args.distribution} distribution. Please specify --package_url directly." ) if args.package_url and not args.build_id: - logger.info(f'Parsing product version in the package_url...') - dev_version = re.search(r'_(\d{4}\.(?:\d\.){2,3}dev\d{8})_', args.package_url) + logger.info(f"Parsing product version in the package_url...") + dev_version = re.search(r"_(\d{4}\.(?:\d\.){2,3}dev\d{8})_", args.package_url) if dev_version: # save product version and build version as YYYY.U.V.devYYYYMMDD args.product_version = dev_version.group(1) args.build_id = args.product_version else: - build_id = re.search(r'_(\d{4}\.(?:\d\.){2,3})(\d+?)?', args.package_url) + build_id = re.search(r"_(\d{4}\.(?:\d\.){2,3})(\d+?)?", args.package_url) if build_id: # save product version YYYY.U.V.BBB - args.build_id = '.'.join(build_id.groups()) + args.build_id = ".".join(build_id.groups()) # save product version YYYY.U.V args.product_version = build_id.group(1) else: args.build_id = args.product_version if not args.dockerfile_name: - devices = ''.join([d[0] for d in args.device]) - layers = '_'.join(args.layers) - openshift = 'openshift_' if args.rhel_platform == 'openshift' else '' + devices = "".join([d[0] for d in args.device]) + layers = "_".join(args.layers) + openshift = "openshift_" if args.rhel_platform == "openshift" else "" version = args.product_version if layers: - args.dockerfile_name = f'openvino_{openshift}{layers}_{version}.dockerfile' + args.dockerfile_name = f"openvino_{openshift}{layers}_{version}.dockerfile" else: args.dockerfile_name = ( - f'openvino_{devices}_{openshift}{args.distribution}_{version}.dockerfile' + f"openvino_{devices}_{openshift}{args.distribution}_{version}.dockerfile" ) - if not hasattr(args, 'wheels_version') or not args.wheels_version: + if not hasattr(args, "wheels_version") or not args.wheels_version: args.wheels_version = ( args.product_version if args.build_id == args.product_version - else f'{args.product_version}.*' + else f"{args.product_version}.*" ) - if not hasattr(args, 'tags') or not args.tags: - layers = '_'.join(args.layers) - tgl_postfix = '' + if not hasattr(args, "tags") or not args.tags: + layers = "_".join(args.layers) + tgl_postfix = "" if layers: args.tags = [ - f'{args.os}_{layers}:' - f'{args.build_id if args.build_id else args.product_version}{tgl_postfix}', - f'{args.os}_{layers}:latest', + f"{args.os}_{layers}:" + f"{args.build_id if args.build_id else args.product_version}{tgl_postfix}", + f"{args.os}_{layers}:latest", ] - if hasattr(args, 'tag_postfix') and args.tag_postfix: + if hasattr(args, "tag_postfix") and args.tag_postfix: args.tags.append( - f'{args.os}_{layers}:{args.build_id if args.build_id else args.product_version}' - f'{tgl_postfix}{args.tag_postfix}' + f"{args.os}_{layers}:{args.build_id if args.build_id else args.product_version}" + f"{tgl_postfix}{args.tag_postfix}" ) - elif args.distribution == 'base': + elif args.distribution == "base": args.tags = [ - f'{args.os}_{args.distribution}_cpu:' f'{args.product_version}', - f'{args.os}_{args.distribution}_cpu:latest', + f"{args.os}_{args.distribution}_cpu:" f"{args.product_version}", + f"{args.os}_{args.distribution}_cpu:latest", ] - if hasattr(args, 'tag_postfix') and args.tag_postfix: + if hasattr(args, "tag_postfix") and args.tag_postfix: args.tags.append( - f'{args.os}_{args.distribution}_cpu:' - f'{args.product_version}{args.tag_postfix}' + f"{args.os}_{args.distribution}_cpu:" + f"{args.product_version}{args.tag_postfix}" ) else: args.tags = [ - f'{args.os}_{args.distribution}:' - f'{args.build_id if args.build_id else args.product_version}{tgl_postfix}', - f'{args.os}_{args.distribution}:latest', + f"{args.os}_{args.distribution}:" + f"{args.build_id if args.build_id else args.product_version}{tgl_postfix}", + f"{args.os}_{args.distribution}:latest", ] - if hasattr(args, 'tag_postfix') and args.tag_postfix: + if hasattr(args, "tag_postfix") and args.tag_postfix: args.tags.append( - f'{args.os}_{args.distribution}:' - f'{args.build_id if args.build_id else args.product_version}' - f'{tgl_postfix}{args.tag_postfix}' + f"{args.os}_{args.distribution}:" + f"{args.build_id if args.build_id else args.product_version}" + f"{tgl_postfix}{args.tag_postfix}" ) - if args.mode not in ('test', 'deploy'): + if args.mode not in ("test", "deploy"): if args.build_id: args.year = args.build_id[:4] elif args.product_version: @@ -667,56 +667,56 @@ def parse_args(name: str, description: str): # noqa else: args.year = None - if args.mode == 'test' and not args.product_version: - match = re.search(r':(\d{4}\.\d\.\d)', str(args.tags)) + if args.mode == "test" and not args.product_version: + match = re.search(r":(\d{4}\.\d\.\d)", str(args.tags)) if not match and args.package_url: - match = re.search(r'_(\d{4}\.\d\.\d)', args.package_url) + match = re.search(r"_(\d{4}\.\d\.\d)", args.package_url) if match: # save product version YYYY.U.V args.product_version = match.group(1) - elif args.distribution == 'custom': + elif args.distribution == "custom": latest_public_version = list(INTEL_OPENVINO_VERSION.keys())[-1] args.product_version = ( - '2022.2.0' if latest_public_version <= '2022.2.0' else latest_public_version + "2022.2.0" if latest_public_version <= "2022.2.0" else latest_public_version ) else: parser.error( - 'Cannot get product_version from the package URL and docker image. ' - 'Please specify --product_version directly.' + "Cannot get product_version from the package URL and docker image. " + "Please specify --product_version directly." ) - if args.mode in 'test' and (not hasattr(args, 'wheels_version') or not args.wheels_version): + if args.mode in "test" and (not hasattr(args, "wheels_version") or not args.wheels_version): latest_public_version = max(INTEL_OPENVINO_VERSION.__iter__()) latest_public_version = ( - '2022.2.0' if latest_public_version <= '2022.2.0' else latest_public_version + "2022.2.0" if latest_public_version <= "2022.2.0" else latest_public_version ) args.wheels_version = ( - args.product_version if hasattr(args, 'product_version') else latest_public_version + args.product_version if hasattr(args, "product_version") else latest_public_version ) - if hasattr(args, 'product_version'): + if hasattr(args, "product_version"): fail_if_product_version_not_supported(args.product_version, parser) - if hasattr(args, 'distribution') and args.distribution == 'custom': + if hasattr(args, "distribution") and args.distribution == "custom": if ( - subprocess.call( # nosec B603 B607 - ['docker', 'run', '--rm', args.tags[0], 'ls', 'extras/opencv'], # nosec B603 B607 + subprocess.call( # nosec B603 B607 + ["docker", "run", "--rm", args.tags[0], "ls", "extras/opencv"], # nosec B603 B607 stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) != 0 ): - args.distribution = 'custom-no-cv' + args.distribution = "custom-no-cv" else: - args.distribution = 'custom-full' + args.distribution = "custom-full" - if hasattr(args, 'distribution'): - if not args.package_url and args.mode == 'test' and args.distribution == 'custom-no-cv': + if hasattr(args, "distribution"): + if not args.package_url and args.mode == "test" and args.distribution == "custom-no-cv": if args.product_version in INTEL_OPENVINO_VERSION: - args.package_url = INTEL_OPENVINO_VERSION[args.product_version][args.os]['dev'] + args.package_url = INTEL_OPENVINO_VERSION[args.product_version][args.os]["dev"] else: parser.error( - f'Cannot find URL to package with test dependencies for {args.product_version} release. ' - f'Please specify --package_url directly' + f"Cannot find URL to package with test dependencies for {args.product_version} release. " + f"Please specify --package_url directly" ) return args From 988d83c99207de350025b92fdd9a62d710f53368 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 15:39:57 +0200 Subject: [PATCH 25/30] disabled C812 --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 31d9cc60..760bbaa6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,7 +9,7 @@ ignore = DAR101, DAR201, DAR401, D107, D415, I201, T001, S404, S603, G004, B009, enable-extensions=G per-file-ignores = tests/*: D100,D101,D102,D104,S108,CFQ004,PT023 - utils/*: Q000 + utils/*: Q000,C812 tests/conftest.py: D100,D101,D102,D103,D104,CFQ004 [pydocstyle] From 3dd5907304fbf2dacd89e41232ba4f5386318687 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 15:49:40 +0200 Subject: [PATCH 26/30] Linter fix. Updated messages in tests --- utils/arg_parser.py | 6 +++--- utils/tests/test_arg_parser.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index f7492b29..071bbdef 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -292,8 +292,8 @@ def fail_if_product_version_not_supported(product_version: str, parser: DockerCI parser.error("Product version is not specified.") elif product_version < "2022.1": parser.error( - f"This version of the DockerHub CI framework does not support OpenVINO releases earlier than " - "2022.1.0. Current detected product version '{product_version}'. Please use previous versions " + "This version of the DockerHub CI framework does not support OpenVINO releases earlier than " + f"2022.1.0. Current detected product version {product_version}. Please use previous versions " "of the DockerHub CI." ) @@ -586,7 +586,7 @@ def parse_args(name: str, description: str): # noqa ) if args.package_url and not args.build_id: - logger.info(f"Parsing product version in the package_url...") + logger.info(f"Parsing product version in the package_url {args.package_url}...") dev_version = re.search(r"_(\d{4}\.(?:\d\.){2,3}dev\d{8})_", args.package_url) if dev_version: # save product version and build version as YYYY.U.V.devYYYYMMDD diff --git a/utils/tests/test_arg_parser.py b/utils/tests/test_arg_parser.py index 779b5836..00088ecc 100644 --- a/utils/tests/test_arg_parser.py +++ b/utils/tests/test_arg_parser.py @@ -394,7 +394,7 @@ def test_arg_parser_success(mock_exists, mock_parser, args, res): 'distribution': 'dev', 'test_expression': 'cpu', }, - 'Options --tags and --distribution are mandatory. Image operation system is "ubuntu18" by default.', + "Options --tags and --distribution are mandatory. Image operation system is 'ubuntu18' by default.", id='Test without --tags', ), pytest.param( @@ -403,7 +403,7 @@ def test_arg_parser_success(mock_exists, mock_parser, args, res): 'test_expression': 'cpu', 'tags': ['test:latest'], }, - 'Options --tags and --distribution are mandatory. Image operation system is "ubuntu18" by default.', + "Options --tags and --distribution are mandatory. Image operation system is 'ubuntu18' by default.", id='Test without --distribution', ), pytest.param( From 404e2d761532284286fbb1607d5899030fe41e2a Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 16:08:04 +0200 Subject: [PATCH 27/30] arg_parser.py: Fixed regex --- utils/arg_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/arg_parser.py b/utils/arg_parser.py index 071bbdef..4566e11a 100644 --- a/utils/arg_parser.py +++ b/utils/arg_parser.py @@ -593,7 +593,7 @@ def parse_args(name: str, description: str): # noqa args.product_version = dev_version.group(1) args.build_id = args.product_version else: - build_id = re.search(r"_(\d{4}\.(?:\d\.){2,3})(\d+?)?", args.package_url) + build_id = re.search(r"_(\d{4}\.(?:\d\.){2,3}\d*)", args.package_url) if build_id: # save product version YYYY.U.V.BBB args.build_id = ".".join(build_id.groups()) From c2072f2009ff5055e396495eb284b9a0e72651d0 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 16:10:40 +0200 Subject: [PATCH 28/30] test_arg_parser.py: Switched from python38 to python310 --- utils/tests/test_arg_parser.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/utils/tests/test_arg_parser.py b/utils/tests/test_arg_parser.py index 00088ecc..a9e2a0d5 100644 --- a/utils/tests/test_arg_parser.py +++ b/utils/tests/test_arg_parser.py @@ -45,7 +45,7 @@ { 'device': ['cpu', 'gpu'], 'dockerfile_name': 'openvino_cg_dev_2022.1.0.dockerfile', - 'python': 'python38', + 'python': 'python310', 'tags': ['ubuntu18_dev:2022.1.0', 'ubuntu18_dev:latest'], 'image_json_path': pathlib.Path('image_data.json').absolute(), 'install_type': 'copy', @@ -63,7 +63,7 @@ { 'device': ['cpu', 'gpu'], 'dockerfile_name': 'openvino_cg_dev_2024.5.0.0.dockerfile', - 'python': 'python38', + 'python': 'python310', 'tags': ['ubuntu22_dev:2024.5.0.0', 'ubuntu22_dev:latest'], 'distribution': 'dev', 'install_type': 'copy', @@ -83,7 +83,7 @@ { 'device': ['cpu', 'gpu'], 'dockerfile_name': 'openvino_cg_dev_2024.5.0.0.dockerfile', - 'python': 'python38', + 'python': 'python310', 'tags': ['ubuntu22_dev:2024.5.0.0', 'ubuntu22_dev:latest', 'ubuntu22_dev:2024.5.0.0_qqq'], 'distribution': 'dev', 'install_type': 'copy', @@ -102,7 +102,7 @@ # { # 'device': ['cpu'], # 'dockerfile_name': 'openvino_c_dev_2022.1.0.dockerfile', - # 'python': 'python38', + # 'python': 'python310', # 'tags': ['winserver2019_dev:2022.1.0.320', 'winserver2019_dev:latest'], # 'distribution': 'dev', # 'product_version': '2022.1.0', @@ -120,7 +120,7 @@ # { # 'device': ['cpu'], # 'dockerfile_name': 'openvino_c_base_2022.1.0.dockerfile', - # 'python': 'python38', + # 'python': 'python310', # 'tags': ['ubuntu18_base_cpu:2022.1.0', 'ubuntu18_base_cpu:latest'], # 'distribution': 'base', # 'product_version': '2022.1.0', @@ -136,7 +136,7 @@ }, { 'device': ['cpu', 'gpu'], - 'python': 'python38', + 'python': 'python310', 'dockerfile_name': 'openvino_cg_dev_2022.1.0.dockerfile', 'tags': ['ubuntu18_dev:2022.1.0', 'ubuntu18_dev:latest'], 'distribution': 'dev', @@ -153,7 +153,7 @@ }, { 'device': ['cpu', 'gpu'], - 'python': 'python38', + 'python': 'python310', 'dockerfile_name': 'openvino_cg_dev_2022.1.0.dockerfile', 'tags': ['ubuntu18_dev:2022.1.0', 'ubuntu18_dev:latest'], 'distribution': 'dev', @@ -171,7 +171,7 @@ }, { 'device': ['cpu', 'gpu'], - 'python': 'python38', + 'python': 'python310', 'dockerfile_name': 'openvino_cg_dev_2024.5.0.0.dev20240905.dockerfile', 'tags': ['ubuntu22_dev:2024.5.0.0.dev20240905', 'ubuntu22_dev:latest'], 'distribution': 'dev', @@ -192,7 +192,7 @@ }, { 'device': ['cpu', 'gpu'], - 'python': 'python38', + 'python': 'python310', 'dockerfile_name': 'openvino_ch_dev_2024.5.0.dockerfile', 'tags': ['my_tag:latest'], 'distribution': 'dev', @@ -251,7 +251,7 @@ }, { 'device': ['cpu', 'gpu'], - 'python': 'python38', + 'python': 'python310', 'tags': ['ubuntu22_dev:2024.5.0.0', 'ubuntu22_dev:latest'], 'dockerfile_name': 'openvino_cg_dev_2024.5.0.0.dockerfile', 'distribution': 'dev', From b2eb6209348384ceae020c3215c8f4737e141d52 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 16:19:07 +0200 Subject: [PATCH 29/30] test_arg_parser.py: Fixed dockerfile_name --- utils/tests/test_arg_parser.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/utils/tests/test_arg_parser.py b/utils/tests/test_arg_parser.py index a9e2a0d5..59244804 100644 --- a/utils/tests/test_arg_parser.py +++ b/utils/tests/test_arg_parser.py @@ -39,6 +39,7 @@ 'package_url': 'openvino.zip', 'distribution': 'dev', 'source': 'local', + 'os': 'ubuntu22', 'product_version': '2022.1.0', 'image_json_path': 'image_data.json', }, @@ -132,6 +133,7 @@ 'mode': 'build', 'package_url': 'openvino_dev_p_2022.1.320.zip', 'source': 'local', + 'os': 'ubuntu22', 'product_version': '2022.1.0', }, { @@ -149,6 +151,7 @@ 'mode': 'build', 'package_url': 'openvino_dev_p_2022.1.320.zip', 'source': 'local', + 'os': 'ubuntu22', 'product_version': '2022.1', }, { @@ -183,7 +186,7 @@ pytest.param( { 'mode': 'build', - 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0.tar.gz', + 'package_url': 'openvino_genai_ubuntu22_2024.5.0.0_x86_64.tar.gz', 'source': 'local', 'os': 'ubuntu22', 'distribution': 'dev', @@ -193,7 +196,7 @@ { 'device': ['cpu', 'gpu'], 'python': 'python310', - 'dockerfile_name': 'openvino_ch_dev_2024.5.0.dockerfile', + 'dockerfile_name': 'openvino_cg_dev_2024.5.0.dockerfile', 'tags': ['my_tag:latest'], 'distribution': 'dev', 'product_version': '2024.5.0.0', From aa4a054edf9662802ea1a1fa7919d05bcf81ec30 Mon Sep 17 00:00:00 2001 From: Anokhov Date: Wed, 11 Sep 2024 16:22:38 +0200 Subject: [PATCH 30/30] Fixed tests --- utils/tests/test_arg_parser.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/utils/tests/test_arg_parser.py b/utils/tests/test_arg_parser.py index 59244804..aa38b1ee 100644 --- a/utils/tests/test_arg_parser.py +++ b/utils/tests/test_arg_parser.py @@ -47,7 +47,7 @@ 'device': ['cpu', 'gpu'], 'dockerfile_name': 'openvino_cg_dev_2022.1.0.dockerfile', 'python': 'python310', - 'tags': ['ubuntu18_dev:2022.1.0', 'ubuntu18_dev:latest'], + 'tags': ['ubuntu22_dev:2022.1.0', 'ubuntu22_dev:latest'], 'image_json_path': pathlib.Path('image_data.json').absolute(), 'install_type': 'copy', }, @@ -140,7 +140,7 @@ 'device': ['cpu', 'gpu'], 'python': 'python310', 'dockerfile_name': 'openvino_cg_dev_2022.1.0.dockerfile', - 'tags': ['ubuntu18_dev:2022.1.0', 'ubuntu18_dev:latest'], + 'tags': ['ubuntu22_dev:2022.1.0', 'ubuntu22_dev:latest'], 'distribution': 'dev', 'product_version': '2022.1.0', }, @@ -158,7 +158,7 @@ 'device': ['cpu', 'gpu'], 'python': 'python310', 'dockerfile_name': 'openvino_cg_dev_2022.1.0.dockerfile', - 'tags': ['ubuntu18_dev:2022.1.0', 'ubuntu18_dev:latest'], + 'tags': ['ubuntu22_dev:2022.1.0', 'ubuntu22_dev:latest'], 'distribution': 'dev', 'product_version': '2022.1.0', }, @@ -196,7 +196,7 @@ { 'device': ['cpu', 'gpu'], 'python': 'python310', - 'dockerfile_name': 'openvino_cg_dev_2024.5.0.dockerfile', + 'dockerfile_name': 'openvino_cg_dev_2024.5.0.0.dockerfile', 'tags': ['my_tag:latest'], 'distribution': 'dev', 'product_version': '2024.5.0.0',