33 bool build(std::string dataDirs, std::string onnxFileName, std::string engineDirs, std::string engineName,
bool rebuildEngine);
35 bool load(std::string enginePath);
37 bool infer(
float* inputData);
48 std::shared_ptr<nvinfer1::ICudaEngine>
mEngine;
49 std::shared_ptr<nvinfer1::IExecutionContext>
context;
60 std::unique_ptr<nvinfer1::INetworkDefinition>& network, std::unique_ptr<nvinfer1::IBuilderConfig>& config,
61 std::unique_ptr<nvonnxparser::IParser>& parser, std::string onnxFileName, std::string dataDirs);
65bool TensorrtEngine::build(std::string dataDirs, std::string onnxFileName, std::string engineDirs, std::string engineName,
bool rebuildEngine)
67 const std::string enginePath = engineDirs +
"/" + engineName;
69 std::cerr <<
"made a new logger" << std::endl;
72 std::cout <<
"Engine found, not regenerating..." << std::endl;
73 return load(enginePath);
75 std::cout <<
"Engine not found... Let's build a new one." << std::endl;
79 auto builder = std::unique_ptr<nvinfer1::IBuilder>(nvinfer1::createInferBuilder(
gLogger));
82 std::cerr <<
"Couldn't createInferBuilder" << std::endl;
86 auto explicitBatch = 1U <<
static_cast<uint32_t
>(nvinfer1::NetworkDefinitionCreationFlag::kEXPLICIT_BATCH);
87 auto network = std::unique_ptr<nvinfer1::INetworkDefinition>(builder->createNetworkV2(explicitBatch));
90 std::cerr <<
"Couldn't createNetworkV2" << std::endl;
94 auto config = std::unique_ptr<nvinfer1::IBuilderConfig>(builder->createBuilderConfig());
97 std::cerr <<
"Couldn't createBuilderConfig" << std::endl;
101 auto parser = std::unique_ptr<nvonnxparser::IParser>(nvonnxparser::createParser(*network,
gLogger));
104 std::cerr <<
"Couldn't createParser" << std::endl;
108 auto constructed =
constructNetwork(builder, network, config, parser, onnxFileName, dataDirs);
111 std::cerr <<
"Couldn't constructNetwork" << std::endl;
119 std::cerr <<
"Couldn't makeCudaStream" << std::endl;
122 config->setProfileStream(*profileStream);
125 nvinfer1::IOptimizationProfile *optProfile = builder->createOptimizationProfile();
126 const auto input = network->getInput(0);
127 const auto output = network->getOutput(0);
129 const auto inputDims = input->getDimensions();
130 const auto outputDims = output->getDimensions();
145 config->addOptimizationProfile(optProfile);
147 std::unique_ptr<IHostMemory> plan{builder->buildSerializedNetwork(*network, *config)};
150 std::cerr <<
"Couldn't buildSerializedNetwork" << std::endl;
154 mRuntime = std::shared_ptr<nvinfer1::IRuntime>(createInferRuntime(
gLogger));
157 std::cerr <<
"Couldn't createInferRuntime" << std::endl;
161 mEngine = std::shared_ptr<nvinfer1::ICudaEngine>(
165 std::cerr <<
"Couldn't deserializeCudaEngine" << std::endl;
169 std::ofstream outfile(enginePath, std::ofstream::binary);
170 outfile.write(
reinterpret_cast<const char *
>(plan->data()), plan->size());
171 std::cout <<
"Successfully saved engine to " << enginePath << std::endl;
179 std::cout << enginePath << std::endl;
181 std::ifstream file(enginePath, std::ios::binary | std::ios::ate);
182 if (!file.is_open()) {
183 std::cerr <<
"Error, unable to open engine file from " << enginePath << std::endl;
186 std::streamsize size = file.tellg();
187 file.seekg(0, std::ios::beg);
189 std::cerr <<
"Error, invalid engine file size for " << enginePath << std::endl;
193 std::vector<char> engineBuffer(size);
194 if (!file.read(engineBuffer.data(), size)) {
195 std::cout <<
"Error, unable to read engine file from " << enginePath << std::endl;
199 mRuntime = std::shared_ptr<nvinfer1::IRuntime>{nvinfer1::createInferRuntime(
gLogger)};
201 std::cerr <<
"Error, failed to create inference runtime." << std::endl;
204 if (engineBuffer.size() != size) {
205 std::cerr <<
"Error, incomplete read of engine data" << std::endl;
208 std::cerr << engineBuffer.data() << std::endl;
209 std::cerr << engineBuffer.size() << std::endl;
210 mEngine = std::shared_ptr<nvinfer1::ICudaEngine>(
mRuntime->deserializeCudaEngine(engineBuffer.data(), engineBuffer.size()));
212 int numIOTensors =
mEngine->getNbIOTensors();
213 std::cout <<
"Number of IO Tensors: " << numIOTensors << std::endl;
231 std::unique_ptr<nvinfer1::INetworkDefinition>& network, std::unique_ptr<nvinfer1::IBuilderConfig>& config,
232 std::unique_ptr<nvonnxparser::IParser>& parser, std::string onnxFileName, std::string dataDirs)
234 const std::string onnxFilePath = dataDirs+
"/" + onnxFileName;
235 std::cout <<
"ONNX file: " << onnxFilePath << std::endl;
236 auto parsed = parser->parseFromFile(onnxFilePath.c_str(),0);
242 config->setFlag(BuilderFlag::kFP16);
bool constructNetwork(std::unique_ptr< nvinfer1::IBuilder > &builder, std::unique_ptr< nvinfer1::INetworkDefinition > &network, std::unique_ptr< nvinfer1::IBuilderConfig > &config, std::unique_ptr< nvonnxparser::IParser > &parser, std::string onnxFileName, std::string dataDirs)