Skip to content

Commit 723efe2

Browse files
authored
[NvTensorRtRtx EP] Changes for the TensorRt RTX dll name change (microsoft#24703)
### Description <!-- Describe your changes. --> 1 added the changes for the latest TesnorRt RTX name change ### Motivation and Context <!-- - Why is this change required? What problem does it solve? - If it fixes an open issue, please link to the issue here. --> The current name of the TensorRt Rtx DLL will create the confusion for devs @ankan-ban @gedoensmax @jywu-msft --------- Co-authored-by: iraut <[email protected]>
1 parent df1dba7 commit 723efe2

File tree

2 files changed

+18
-40
lines changed

2 files changed

+18
-40
lines changed

cmake/onnxruntime_providers_nv.cmake

Lines changed: 17 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -36,44 +36,30 @@
3636

3737

3838
file(READ ${TENSORRT_INCLUDE_DIR}/NvInferVersion.h NVINFER_VER_CONTENT)
39-
string(REGEX MATCH "define NV_TENSORRT_MAJOR * +([0-9]+)" NV_TENSORRT_MAJOR "${NVINFER_VER_CONTENT}")
40-
string(REGEX REPLACE "define NV_TENSORRT_MAJOR * +([0-9]+)" "\\1" NV_TENSORRT_MAJOR "${NV_TENSORRT_MAJOR}")
41-
string(REGEX MATCH "define NV_TENSORRT_MINOR * +([0-9]+)" NV_TENSORRT_MINOR "${NVINFER_VER_CONTENT}")
42-
string(REGEX REPLACE "define NV_TENSORRT_MINOR * +([0-9]+)" "\\1" NV_TENSORRT_MINOR "${NV_TENSORRT_MINOR}")
43-
string(REGEX MATCH "define NV_TENSORRT_PATCH * +([0-9]+)" NV_TENSORRT_PATCH "${NVINFER_VER_CONTENT}")
44-
string(REGEX REPLACE "define NV_TENSORRT_PATCH * +([0-9]+)" "\\1" NV_TENSORRT_PATCH "${NV_TENSORRT_PATCH}")
45-
math(EXPR NV_TENSORRT_MAJOR_INT "${NV_TENSORRT_MAJOR}")
46-
math(EXPR NV_TENSORRT_MINOR_INT "${NV_TENSORRT_MINOR}")
47-
math(EXPR NV_TENSORRT_PATCH_INT "${NV_TENSORRT_PATCH}")
48-
49-
if (NV_TENSORRT_MAJOR)
50-
MESSAGE(STATUS "NV_TENSORRT_MAJOR is ${NV_TENSORRT_MAJOR}")
39+
string(REGEX MATCH "define TRT_MAJOR_RTX * +([0-9]+)" NV_TRT_MAJOR_RTX "${NVINFER_VER_CONTENT}")
40+
string(REGEX REPLACE "define TRT_MAJOR_RTX * +([0-9]+)" "\\1" NV_TRT_MAJOR_RTX "${NV_TRT_MAJOR_RTX}")
41+
string(REGEX MATCH "define TRT_MINOR_RTX * +([0-9]+)" NV_TRT_MINOR_RTX "${NVINFER_VER_CONTENT}")
42+
string(REGEX REPLACE "define TRT_MINOR_RTX * +([0-9]+)" "\\1" NV_TRT_MINOR_RTX "${NV_TRT_MINOR_RTX}")
43+
math(EXPR NV_TRT_MAJOR_RTX_INT "${NV_TRT_MAJOR_RTX}")
44+
math(EXPR NV_TRT_MINOR_RTX_INT "${NV_TRT_MINOR_RTX}")
45+
46+
if (NV_TRT_MAJOR_RTX)
47+
MESSAGE(STATUS "NV_TRT_MAJOR_RTX is ${NV_TRT_MAJOR_RTX}")
5148
else()
52-
MESSAGE(STATUS "Can't find NV_TENSORRT_MAJOR macro")
49+
MESSAGE(STATUS "Can't find NV_TRT_MAJOR_RTX macro")
5350
endif()
5451

55-
# Check TRT version >= 10.0.1.6
56-
if ((NV_TENSORRT_MAJOR_INT GREATER 10) OR
57-
(NV_TENSORRT_MAJOR_INT EQUAL 10 AND NV_TENSORRT_MINOR_INT GREATER 0) OR
58-
(NV_TENSORRT_MAJOR_INT EQUAL 10 AND NV_TENSORRT_PATCH_INT GREATER 0))
59-
set(TRT_GREATER_OR_EQUAL_TRT_10_GA ON)
60-
else()
61-
message( FATAL_ERROR "Only TensorRT 10.x or higher is supported." )
62-
endif()
63-
64-
# TensorRT 10 GA onwards, the TensorRT libraries will have major version appended to the end on Windows,
65-
# for example, nvinfer_10.dll, nvonnxparser_10.dll ...
66-
if (WIN32 AND TRT_GREATER_OR_EQUAL_TRT_10_GA)
67-
set(NVINFER_LIB "nvinfer_${NV_TENSORRT_MAJOR}")
68-
set(PARSER_LIB "nvonnxparser_${NV_TENSORRT_MAJOR}")
52+
if (WIN32)
53+
set(NVINFER_LIB "tensorrt_rtx_${NV_TRT_MAJOR_RTX}_${NV_TRT_MINOR_RTX}")
54+
set(PARSER_LIB "tensorrt_onnxparser_rtx_${NV_TRT_MAJOR_RTX}_${NV_TRT_MINOR_RTX}")
6955
endif()
7056

7157
if (NOT NVINFER_LIB)
72-
set(NVINFER_LIB "nvinfer")
58+
set(NVINFER_LIB "tensorrt_rtx")
7359
endif()
7460

7561
if (NOT PARSER_LIB)
76-
set(PARSER_LIB "nvonnxparser")
62+
set(PARSER_LIB "tensorrt_onnxparser_rtx")
7763
endif()
7864

7965
MESSAGE(STATUS "Looking for ${NVINFER_LIB}")
@@ -100,9 +86,8 @@
10086
set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER} ${TENSORRT_LIBRARY_NVONNXPARSER})
10187
MESSAGE(STATUS "Find TensorRT libs at ${TENSORRT_LIBRARY}")
10288
else()
103-
if (TRT_GREATER_OR_EQUAL_TRT_10_GA)
104-
set(ONNX_USE_LITE_PROTO ON)
105-
endif()
89+
set(ONNX_USE_LITE_PROTO ON)
90+
10691
onnxruntime_fetchcontent_declare(
10792
onnx_tensorrt
10893
URL ${DEP_URL_onnx_tensorrt}

onnxruntime/core/providers/nv_tensorrt_rtx/nv_execution_provider.cc

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2581,11 +2581,7 @@ Status NvExecutionProvider::CreateNodeComputeInfoFromGraph(const GraphViewer& gr
25812581
if (mem_size > max_ctx_mem_size_) {
25822582
max_ctx_mem_size_ = mem_size;
25832583
}
2584-
#if NV_TENSORRT_MAJOR < 10
2585-
trt_context = std::unique_ptr<nvinfer1::IExecutionContext>(trt_engine->createExecutionContextWithoutDeviceMemory());
2586-
#else
25872584
trt_context = std::unique_ptr<nvinfer1::IExecutionContext>(trt_engine->createExecutionContext(nvinfer1::ExecutionContextAllocationStrategy::kUSER_MANAGED));
2588-
#endif
25892585
} else {
25902586
trt_context = std::unique_ptr<nvinfer1::IExecutionContext>(trt_engine->createExecutionContext());
25912587
}
@@ -2966,11 +2962,8 @@ Status NvExecutionProvider::CreateNodeComputeInfoFromPrecompiledEngine(const Gra
29662962
if (mem_size > max_ctx_mem_size_) {
29672963
max_ctx_mem_size_ = mem_size;
29682964
}
2969-
#if NV_TENSORRT_MAJOR < 10
2970-
trt_context = std::unique_ptr<nvinfer1::IExecutionContext>(trt_engine->createExecutionContextWithoutDeviceMemory());
2971-
#else
29722965
trt_context = std::unique_ptr<nvinfer1::IExecutionContext>(trt_engine->createExecutionContext(nvinfer1::ExecutionContextAllocationStrategy::kUSER_MANAGED));
2973-
#endif
2966+
29742967
} else {
29752968
trt_context = std::unique_ptr<nvinfer1::IExecutionContext>(trt_engine->createExecutionContext());
29762969
}

0 commit comments

Comments
 (0)