Skip to content

Commit ce1abc9

Browse files
authoredMay 27, 2021
Add unittests for custom Python environments (triton-inference-server#2897)
* Add unittests for Python environments * Fix libarchive dependency * Add checks for installed NumPy version
1 parent 5d18021 commit ce1abc9

File tree

5 files changed

+220
-14
lines changed

5 files changed

+220
-14
lines changed
 

‎Dockerfile.QA

+2
Original file line numberDiff line numberDiff line change
@@ -252,9 +252,11 @@ RUN if [ $(cat /etc/os-release | grep 'VERSION_ID="20.04"' | wc -l) -ne 0 ]; the
252252
fi
253253

254254
# CI/QA for memcheck requires valgrind
255+
# libarchive-dev is required by Python backend
255256
RUN apt-get update && apt-get install -y --no-install-recommends \
256257
curl \
257258
libopencv-dev \
259+
libarchive-dev \
258260
libopencv-core-dev \
259261
libzmq3-dev \
260262
python3-dev \

‎build.py

+5-6
Original file line numberDiff line numberDiff line change
@@ -489,7 +489,7 @@ def create_dockerfile_buildbase(ddir, dockerfile_name, argmap, backends):
489489
490490
# libcurl4-openSSL-dev is needed for GCS
491491
# python3-dev is needed by Torchvision
492-
# python3-pip is needed by python backend
492+
# python3-pip and libarchive-dev is needed by python backend
493493
# uuid-dev and pkg-config is needed for Azure Storage
494494
RUN apt-get update && \
495495
apt-get install -y --no-install-recommends \
@@ -513,14 +513,13 @@ def create_dockerfile_buildbase(ddir, dockerfile_name, argmap, backends):
513513
unzip \
514514
wget \
515515
zlib1g-dev \
516+
libarchive-dev \
516517
pkg-config \
517518
uuid-dev && \
518519
rm -rf /var/lib/apt/lists/*
519520
520-
# grpcio-tools grpcio-channelz are needed by python backend
521521
RUN pip3 install --upgrade pip && \
522-
pip3 install --upgrade wheel setuptools docker && \
523-
pip3 install grpcio-tools grpcio-channelz
522+
pip3 install --upgrade wheel setuptools docker
524523
525524
# Server build requires recent version of CMake (FetchContent required)
526525
RUN wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | \
@@ -648,11 +647,11 @@ def create_dockerfile_linux(ddir, dockerfile_name, argmap, backends, repoagents,
648647
# python3, python3-pip and some pip installs required for the python backend
649648
RUN apt-get update && \
650649
apt-get install -y --no-install-recommends \
651-
python3 \
650+
python3 libarchive-dev \
652651
python3-pip && \
653652
pip3 install --upgrade pip && \
654653
pip3 install --upgrade wheel setuptools && \
655-
pip3 install --upgrade grpcio-tools grpcio-channelz numpy && \
654+
pip3 install --upgrade numpy && \
656655
rm -rf /var/lib/apt/lists/*
657656
'''
658657
df += '''

‎qa/L0_backend_python/test.sh

+120-8
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ EXPECTED_NUM_TESTS="10"
3131

3232
SERVER=/opt/tritonserver/bin/tritonserver
3333
BASE_SERVER_ARGS="--model-repository=`pwd`/models --log-verbose=1"
34+
PYTHON_BACKEND_BRANCH=$PYTHON_BACKEND_REPO_TAG
3435
SERVER_ARGS=$BASE_SERVER_ARGS
3536
SERVER_LOG="./inference_server.log"
3637
REPO_VERSION=${NVIDIA_TRITON_SERVER_VERSION}
@@ -42,6 +43,34 @@ get_shm_pages() {
4243
echo ${#shm_pages[@]}
4344
}
4445

46+
install_conda() {
47+
rm -rf ./miniconda
48+
file_name="Miniconda3-py38_4.9.2-Linux-x86_64.sh"
49+
wget https://repo.anaconda.com/miniconda/$file_name
50+
51+
# install miniconda in silent mode
52+
bash $file_name -p ./miniconda -b
53+
54+
# activate conda
55+
eval "$(./miniconda/bin/conda shell.bash hook)"
56+
}
57+
58+
create_conda_env() {
59+
python_version=$1
60+
env_name=$2
61+
conda create -n $env_name python=$python_version -y
62+
conda activate $env_name
63+
conda install conda-pack -y
64+
}
65+
66+
create_python_backend_stub() {
67+
rm -rf python_backend
68+
git clone https://github.com/triton-inference-server/python_backend -b $PYTHON_BACKEND_BRANCH
69+
(cd python_backend/ && mkdir builddir && cd builddir && \
70+
cmake -DTRITON_BACKEND_REPO_TAG=$TRITON_BACKEND_REPO_TAG -DTRITON_COMMON_REPO_TAG=$TRITON_COMMON_REPO_TAG -DTRITON_CORE_REPO_TAG=$TRITON_CORE_REPO_TAG ../ && \
71+
make -j18 triton-python-backend-stub)
72+
}
73+
4574
rm -fr *.log ./models
4675

4776
mkdir -p models/identity_fp32/1/
@@ -163,6 +192,7 @@ set -e
163192

164193
kill $SERVER_PID
165194
wait $SERVER_PID
195+
sleep 4
166196

167197
current_num_pages=`get_shm_pages`
168198
if [ $current_num_pages -ne $prev_num_pages ]; then
@@ -261,9 +291,11 @@ if [ "$SERVER_PID" == "0" ]; then
261291
cat $SERVER_LOG
262292
exit 1
263293
fi
294+
set -e
264295

265296
kill $SERVER_PID
266297
wait $SERVER_PID
298+
sleep 5
267299

268300
current_num_pages=`get_shm_pages`
269301
if [ $current_num_pages -ne $prev_num_pages ]; then
@@ -272,9 +304,10 @@ if [ $current_num_pages -ne $prev_num_pages ]; then
272304
echo -e "\n***\n*** Test Failed. Shared memory pages where not cleaned properly.
273305
Shared memory pages before starting triton equals to $prev_num_pages
274306
and shared memory pages after starting triton equals to $current_num_pages \n***"
275-
RET=1
307+
exit 1
276308
fi
277309

310+
set +e
278311
grep "name 'undefined_variable' is not defined" $SERVER_LOG
279312

280313
if [ $? -ne 0 ]; then
@@ -284,6 +317,7 @@ if [ $? -ne 0 ]; then
284317
fi
285318
set -e
286319

320+
set +e
287321
# Test KIND_GPU
288322
rm -rf models/
289323
mkdir -p models/add_sub_gpu/1/
@@ -299,13 +333,15 @@ if [ "$SERVER_PID" == "0" ]; then
299333
fi
300334

301335
if [ $? -ne 0 ]; then
302-
cat $CLIENT_LOG
336+
cat $SERVER_LOG
303337
echo -e "\n***\n*** KIND_GPU model test failed \n***"
304338
RET=1
305339
fi
340+
set -e
306341

307342
kill $SERVER_PID
308343
wait $SERVER_PID
344+
sleep 5
309345

310346
current_num_pages=`get_shm_pages`
311347
if [ $current_num_pages -ne $prev_num_pages ]; then
@@ -314,7 +350,7 @@ if [ $current_num_pages -ne $prev_num_pages ]; then
314350
echo -e "\n***\n*** Test Failed. Shared memory pages where not cleaned properly.
315351
Shared memory pages before starting triton equals to $prev_num_pages
316352
and shared memory pages after starting triton equals to $current_num_pages \n***"
317-
RET=1
353+
exit 1
318354
fi
319355

320356
# Test Multi file models
@@ -325,6 +361,7 @@ cp ../python_models/identity_fp32/config.pbtxt ./models/multi_file/
325361
(cd models/multi_file && \
326362
sed -i "s/^name:.*/name: \"multi_file\"/" config.pbtxt)
327363

364+
set +e
328365
prev_num_pages=`get_shm_pages`
329366
run_server
330367
if [ "$SERVER_PID" == "0" ]; then
@@ -334,22 +371,25 @@ if [ "$SERVER_PID" == "0" ]; then
334371
fi
335372

336373
if [ $? -ne 0 ]; then
337-
cat $CLIENT_LOG
374+
cat $SERVER_LOG
338375
echo -e "\n***\n*** multi-file model test failed \n***"
339376
RET=1
340377
fi
341378

379+
set +e
342380
kill $SERVER_PID
343381
wait $SERVER_PID
382+
sleep 5
383+
set -e
344384

345385
current_num_pages=`get_shm_pages`
346386
if [ $current_num_pages -ne $prev_num_pages ]; then
347-
cat $CLIENT_LOG
387+
cat $SERVER_LOG
348388
ls /dev/shm
349389
echo -e "\n***\n*** Test Failed. Shared memory pages where not cleaned properly.
350390
Shared memory pages before starting triton equals to $prev_num_pages
351391
and shared memory pages after starting triton equals to $current_num_pages \n***"
352-
RET=1
392+
exit 1
353393
fi
354394

355395
# Test environment variable propagation
@@ -367,9 +407,11 @@ if [ "$SERVER_PID" == "0" ]; then
367407
cat $SERVER_LOG
368408
exit 1
369409
fi
410+
set +e
370411

371412
kill $SERVER_PID
372413
wait $SERVER_PID
414+
sleep 5
373415

374416
current_num_pages=`get_shm_pages`
375417
if [ $current_num_pages -ne $prev_num_pages ]; then
@@ -378,7 +420,7 @@ if [ $current_num_pages -ne $prev_num_pages ]; then
378420
echo -e "\n***\n*** Test Failed. Shared memory pages where not cleaned properly.
379421
Shared memory pages before starting triton equals to $prev_num_pages
380422
and shared memory pages after starting triton equals to $current_num_pages \n***"
381-
RET=1
423+
exit 1
382424
fi
383425

384426
rm -fr ./models
@@ -405,14 +447,84 @@ $page_size."
405447
RET=1
406448
fi
407449
done
450+
set +e
408451

409452
kill $SERVER_PID
410453
wait $SERVER_PID
454+
sleep 5
455+
456+
rm -fr ./models
457+
rm -rf *.tar.gz
458+
apt update && apt install software-properties-common rapidjson-dev -y
459+
wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | \
460+
gpg --dearmor - | \
461+
tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null && \
462+
apt-add-repository 'deb https://apt.kitware.com/ubuntu/ focal main' && \
463+
apt-get update && \
464+
apt-get install -y --no-install-recommends \
465+
cmake-data=3.18.4-0kitware1ubuntu20.04.1 cmake=3.18.4-0kitware1ubuntu20.04.1
466+
install_conda
467+
468+
# Create a model with python 3.9 version
469+
create_conda_env "3.9" "python-3-9"
470+
conda install numpy=1.20.1 -y
471+
create_python_backend_stub
472+
conda-pack -o python3.9.tar.gz
473+
path_to_conda_pack=`pwd`/python3.9.tar.gz
474+
mkdir -p models/python_3_9/1/
475+
cp ../python_models/python_version/config.pbtxt ./models/python_3_9
476+
(cd models/python_3_9 && \
477+
sed -i "s/^name:.*/name: \"python_3_9\"/" config.pbtxt && \
478+
echo "parameters: {key: \"EXECUTION_ENV_PATH\", value: {string_value: \"$path_to_conda_pack\"}}">> config.pbtxt)
479+
cp ../python_models/python_version/model.py ./models/python_3_9/1/
480+
cp python_backend/builddir/triton_python_backend_stub ./models/python_3_9
481+
conda deactivate
482+
483+
# Create a model with python 3.6 version
484+
create_conda_env "3.6" "python-3-6"
485+
conda install numpy=1.18.1 -y
486+
conda-pack -o python3.6.tar.gz
487+
path_to_conda_pack=`pwd`/python3.6.tar.gz
488+
create_python_backend_stub
489+
mkdir -p models/python_3_6/1/
490+
cp ../python_models/python_version/config.pbtxt ./models/python_3_6
491+
(cd models/python_3_6 && \
492+
sed -i "s/^name:.*/name: \"python_3_6\"/" config.pbtxt && \
493+
echo "parameters: {key: \"EXECUTION_ENV_PATH\", value: {string_value: \"$path_to_conda_pack\"}}" >> config.pbtxt)
494+
cp ../python_models/python_version/model.py ./models/python_3_6/1/
495+
cp python_backend/builddir/triton_python_backend_stub ./models/python_3_6
496+
497+
run_server
498+
if [ "$SERVER_PID" == "0" ]; then
499+
echo -e "\n***\n*** Failed to start $SERVER\n***"
500+
cat $SERVER_LOG
501+
exit 1
502+
fi
503+
set +e
504+
505+
kill $SERVER_PID
506+
wait $SERVER_PID
507+
sleep 5
508+
509+
grep "Python version is 3.6 and NumPy version is 1.18.1" $SERVER_LOG
510+
if [ $? -ne 0 ]; then
511+
cat $SERVER_LOG
512+
echo -e "\n***\n*** Python 3.6 and NumPy 1.18.1 was not found in Triton logs. \n***"
513+
RET=1
514+
fi
515+
516+
grep "Python version is 3.9 and NumPy version is 1.20.1" $SERVER_LOG
517+
if [ $? -ne 0 ]; then
518+
cat $SERVER_LOG
519+
echo -e "\n***\n*** Python 3.9 and NumPy 1.20.1 was not found in Triton logs. \n***"
520+
RET=1
521+
fi
522+
set -e
411523

412524
if [ $RET -eq 0 ]; then
413525
echo -e "\n***\n*** Test Passed\n***"
414526
else
415-
cat $CLIENT_LOG
527+
cat $SERVER_LOG
416528
echo -e "\n***\n*** Test FAILED\n***"
417529
fi
418530

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
2+
#
3+
# Redistribution and use in source and binary forms, with or without
4+
# modification, are permitted provided that the following conditions
5+
# are met:
6+
# * Redistributions of source code must retain the above copyright
7+
# notice, this list of conditions and the following disclaimer.
8+
# * Redistributions in binary form must reproduce the above copyright
9+
# notice, this list of conditions and the following disclaimer in the
10+
# documentation and/or other materials provided with the distribution.
11+
# * Neither the name of NVIDIA CORPORATION nor the names of its
12+
# contributors may be used to endorse or promote products derived
13+
# from this software without specific prior written permission.
14+
#
15+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
16+
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18+
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
19+
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20+
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21+
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22+
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23+
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26+
27+
name: "python_version"
28+
backend: "python"
29+
30+
input [
31+
{
32+
name: "INPUT"
33+
data_type: TYPE_FP32
34+
dims: [ 1 ]
35+
}
36+
]
37+
output [
38+
{
39+
name: "OUTPUT"
40+
data_type: TYPE_FP32
41+
dims: [ 1 ]
42+
}
43+
]
44+
45+
instance_group [{ kind: KIND_CPU }]
+48
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
2+
#
3+
# Redistribution and use in source and binary forms, with or without
4+
# modification, are permitted provided that the following conditions
5+
# are met:
6+
# * Redistributions of source code must retain the above copyright
7+
# notice, this list of conditions and the following disclaimer.
8+
# * Redistributions in binary form must reproduce the above copyright
9+
# notice, this list of conditions and the following disclaimer in the
10+
# documentation and/or other materials provided with the distribution.
11+
# * Neither the name of NVIDIA CORPORATION nor the names of its
12+
# contributors may be used to endorse or promote products derived
13+
# from this software without specific prior written permission.
14+
#
15+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
16+
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17+
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18+
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
19+
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20+
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21+
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22+
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23+
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26+
27+
import numpy as np
28+
import sys
29+
30+
sys.path.append('../../')
31+
import triton_python_backend_utils as pb_utils
32+
33+
34+
class TritonPythonModel:
35+
36+
def initialize(self, args):
37+
self.model_config = args['model_config']
38+
print(f'Python version is {sys.version_info.major}.{sys.version_info.minor} and NumPy version is {np.version.version}')
39+
40+
def execute(self, requests):
41+
""" This function is called on inference request.
42+
"""
43+
responses = []
44+
for request in requests:
45+
input_tensor = pb_utils.get_input_tensor_by_name(request, "INPUT0")
46+
out_tensor = pb_utils.Tensor("OUTPUT0", input_tensor.as_numpy())
47+
responses.append(pb_utils.InferenceResponse([out_tensor]))
48+
return responses

0 commit comments

Comments
 (0)
Please sign in to comment.