Skip to content

Commit a89e22b

Browse files
gedoensmaxSanket Kale
authored andcommitted
Make TRT plugins optional (microsoft#25261)
### Description The parser does no longer link agains the plugin library but also loads it dynamic. Due to that I think we should also make the library optional in ORT. @chilo-ms
1 parent 66f868d commit a89e22b

File tree

3 files changed

+49
-20
lines changed

3 files changed

+49
-20
lines changed

cmake/onnxruntime_providers_tensorrt.cmake

Lines changed: 5 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -72,26 +72,21 @@
7272
endif()
7373

7474
# TensorRT 10 GA onwards, the TensorRT libraries will have major version appended to the end on Windows,
75-
# for example, nvinfer_10.dll, nvinfer_plugin_10.dll, nvonnxparser_10.dll ...
75+
# for example, nvinfer_10.dll, nvonnxparser_10.dll ...
7676
if (WIN32 AND TRT_GREATER_OR_EQUAL_TRT_10_GA)
7777
set(NVINFER_LIB "nvinfer_${NV_TENSORRT_MAJOR}")
78-
set(NVINFER_PLUGIN_LIB "nvinfer_plugin_${NV_TENSORRT_MAJOR}")
7978
set(PARSER_LIB "nvonnxparser_${NV_TENSORRT_MAJOR}")
8079
endif()
8180

8281
if (NOT NVINFER_LIB)
8382
set(NVINFER_LIB "nvinfer")
8483
endif()
8584

86-
if (NOT NVINFER_PLUGIN_LIB)
87-
set(NVINFER_PLUGIN_LIB "nvinfer_plugin")
88-
endif()
89-
9085
if (NOT PARSER_LIB)
9186
set(PARSER_LIB "nvonnxparser")
9287
endif()
9388

94-
MESSAGE(STATUS "Looking for ${NVINFER_LIB} and ${NVINFER_PLUGIN_LIB}")
89+
MESSAGE(STATUS "Looking for ${NVINFER_LIB}")
9590

9691
find_library(TENSORRT_LIBRARY_INFER ${NVINFER_LIB}
9792
HINTS ${TENSORRT_ROOT}
@@ -101,14 +96,6 @@
10196
MESSAGE(STATUS "Can't find ${NVINFER_LIB}")
10297
endif()
10398

104-
find_library(TENSORRT_LIBRARY_INFER_PLUGIN ${NVINFER_PLUGIN_LIB}
105-
HINTS ${TENSORRT_ROOT}
106-
PATH_SUFFIXES lib lib64 lib/x64)
107-
108-
if (NOT TENSORRT_LIBRARY_INFER_PLUGIN)
109-
MESSAGE(STATUS "Can't find ${NVINFER_PLUGIN_LIB}")
110-
endif()
111-
11299
if (onnxruntime_USE_TENSORRT_BUILTIN_PARSER)
113100
MESSAGE(STATUS "Looking for ${PARSER_LIB}")
114101

@@ -120,7 +107,7 @@
120107
MESSAGE(STATUS "Can't find ${PARSER_LIB}")
121108
endif()
122109

123-
set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER} ${TENSORRT_LIBRARY_INFER_PLUGIN} ${TENSORRT_LIBRARY_NVONNXPARSER})
110+
set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER} ${TENSORRT_LIBRARY_NVONNXPARSER})
124111
MESSAGE(STATUS "Find TensorRT libs at ${TENSORRT_LIBRARY}")
125112
else()
126113
if (TRT_GREATER_OR_EQUAL_TRT_10_GA)
@@ -153,15 +140,15 @@
153140
endif()
154141
# Static libraries are just nvonnxparser_static on all platforms
155142
set(onnxparser_link_libs nvonnxparser_static)
156-
set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER} ${TENSORRT_LIBRARY_INFER_PLUGIN})
143+
set(TENSORRT_LIBRARY ${TENSORRT_LIBRARY_INFER})
157144
MESSAGE(STATUS "Find TensorRT libs at ${TENSORRT_LIBRARY}")
158145
endif()
159146

160147
# ${TENSORRT_LIBRARY} is empty if we link nvonnxparser_static.
161148
# nvonnxparser_static is linked against tensorrt libraries in onnx-tensorrt
162149
# See https://github.com/onnx/onnx-tensorrt/blob/8af13d1b106f58df1e98945a5e7c851ddb5f0791/CMakeLists.txt#L121
163150
# However, starting from TRT 10 GA, nvonnxparser_static doesn't link against tensorrt libraries.
164-
# Therefore, the above code finds ${TENSORRT_LIBRARY_INFER} and ${TENSORRT_LIBRARY_INFER_PLUGIN}.
151+
# Therefore, the above code finds ${TENSORRT_LIBRARY_INFER}.
165152
if(onnxruntime_CUDA_MINIMAL)
166153
set(trt_link_libs ${CMAKE_DL_LIBS} ${TENSORRT_LIBRARY})
167154
else()

onnxruntime/core/providers/nv_tensorrt_rtx/nv_execution_provider.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1304,7 +1304,7 @@ std::vector<AllocatorPtr> NvExecutionProvider::CreatePreferredAllocators() {
13041304

13051305
AllocatorCreationInfo pinned_allocator_info(
13061306
[](OrtDevice::DeviceId device_id) {
1307-
return std::make_unique<CUDAPinnedAllocator>(device_id, CUDA_PINNED);
1307+
return std::make_unique<CUDAPinnedAllocator>(CUDA_PINNED, device_id);
13081308
},
13091309
narrow<OrtDevice::DeviceId>(device_id_));
13101310

onnxruntime/core/providers/tensorrt/tensorrt_execution_provider_custom_ops.cc

Lines changed: 43 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,25 @@
77
#include "tensorrt_execution_provider_custom_ops.h"
88
#include "tensorrt_execution_provider.h"
99

10+
// The filename extension for a shared library is different per platform
11+
#ifdef _WIN32
12+
#define LIBRARY_PREFIX
13+
#define LIBRARY_EXTENSION ORT_TSTR(".dll")
14+
#elif defined(__APPLE__)
15+
#define LIBRARY_PREFIX "lib"
16+
#define LIBRARY_EXTENSION ".dylib"
17+
#else
18+
#define LIBRARY_PREFIX "lib"
19+
#define LIBRARY_EXTENSION ".so"
20+
#endif
21+
22+
#ifdef _WIN32
23+
#define ORT_DEF2STR_HELPER(x) L#x
24+
#else
25+
#define ORT_DEF2STR_HELPER(X) #X
26+
#endif
27+
#define ORT_DEF2STR(x) ORT_DEF2STR_HELPER(x)
28+
1029
namespace onnxruntime {
1130
extern TensorrtLogger& GetTensorrtLogger(bool verbose);
1231

@@ -58,8 +77,31 @@ common::Status CreateTensorRTCustomOpDomainList(std::vector<OrtCustomOpDomain*>&
5877
// Get all registered TRT plugins from registry
5978
LOGS_DEFAULT(VERBOSE) << "[TensorRT EP] Getting all registered TRT plugins from TRT plugin registry ...";
6079
TensorrtLogger trt_logger = GetTensorrtLogger(false);
61-
initLibNvInferPlugins(&trt_logger, "");
80+
try {
81+
void* library_handle = nullptr;
82+
const auto& env = onnxruntime::GetDefaultEnv();
83+
#if NV_TENSORRT_MAJOR < 10
84+
auto full_path = env.GetRuntimePath() +
85+
PathString(LIBRARY_PREFIX ORT_TSTR("nvinfer_plugin") LIBRARY_EXTENSION);
86+
#else
87+
#ifdef _WIN32
88+
auto full_path = PathString(LIBRARY_PREFIX ORT_TSTR("nvinfer_plugin_" ORT_DEF2STR(NV_TENSORRT_MAJOR)) LIBRARY_EXTENSION);
89+
#else
90+
auto full_path = PathString(LIBRARY_PREFIX ORT_TSTR("nvinfer_plugin") LIBRARY_EXTENSION ORT_TSTR("." ORT_DEF2STR(NV_TENSORRT_MAJOR)));
91+
#endif
92+
#endif
93+
94+
ORT_THROW_IF_ERROR(env.LoadDynamicLibrary(full_path, false, &library_handle));
6295

96+
bool (*dyn_initLibNvInferPlugins)(void* logger, char const* libNamespace);
97+
ORT_THROW_IF_ERROR(env.GetSymbolFromLibrary(library_handle, "initLibNvInferPlugins", (void**)&dyn_initLibNvInferPlugins));
98+
if (!dyn_initLibNvInferPlugins(&trt_logger, "")) {
99+
LOGS_DEFAULT(INFO) << "[TensorRT EP] Default plugin library was found but was not able to initialize default plugins.";
100+
}
101+
LOGS_DEFAULT(INFO) << "[TensorRT EP] Default plugins successfully loaded.";
102+
} catch (const std::exception&) {
103+
LOGS_DEFAULT(INFO) << "[TensorRT EP] Default plugin library is not on the path and is therefore ignored";
104+
}
63105
int num_plugin_creator = 0;
64106
auto plugin_creators = getPluginRegistry()->getAllCreators(&num_plugin_creator);
65107
std::unordered_set<std::string> registered_plugin_names;

0 commit comments

Comments
 (0)