API
nnReconstructor.hpp
Go to the documentation of this file.
1 /** \file nnReconstructor.hpp
2  * \brief The MagAO-X generic ImageStreamIO stream integrator
3  *
4  * \ingroup app_files
5  */
6 
7 #ifndef nnReconstructor_hpp
8 #define nnReconstructor_hpp
9 
10 #include <NvInfer.h>
11 #include <cuda_runtime_api.h>
12 #include <iostream>
13 #include <fstream>
14 #include <vector>
15 #include <limits>
16 
17 #include <mx/improc/eigenCube.hpp>
18 #include <mx/improc/eigenImage.hpp>
19 
20 #include "../../libMagAOX/libMagAOX.hpp" //Note this is included on command line to trigger pch
21 #include "../../magaox_git_version.h"
22 
23 using namespace nvinfer1;
24 
25 // Logger for TensorRT info/warning/errors
26 class Logger : public ILogger {
27  void log(Severity severity, const char* msg) noexcept override {
28  if (severity <= Severity::kWARNING) {
29  std::cout << msg << std::endl;
30  }
31  }
32 };
33 
34 // #define MAGAOX_CURRENT_SHA1 0
35 // #define MAGAOX_REPO_MODIFIED 0
36 
37 namespace MagAOX
38 {
39 namespace app
40 {
41 
42 class nnReconstructor : public MagAOXApp<true>, public dev::shmimMonitor<nnReconstructor>
43 {
44  // Give the test harness access.
45  friend class nnReconstructor_test;
46 
47  friend class dev::shmimMonitor<nnReconstructor>;
48 
49  // The base shmimMonitor type
51 
52  /// Floating point type in which to do all calculations.
53  typedef float realT;
54 
55  public:
56  /** \name app::dev Configurations
57  *@{
58  */
59 
60  ///@}
61 
62  protected:
63  /** \name Configurable Parameters
64  *@{
65  */
66  std::string dataDirs; // Location where the data (onnx file, engine, WFS reference) is stored
67  std::string onnxFileName; // Name of the onnx files
68  std::string engineName; // Name of the engine
69  std::string engineDirs; // Name of the engine
70  bool rebuildEngine; // If true, it will rebuild the engine and save it at engineName
71 
72  Logger logger; // The tensorRT logger
73  std::vector<char> engineData; // for loading the engine file.
74 
75  IRuntime* runtime {nullptr};
76  ICudaEngine* engine {nullptr};
77  IExecutionContext* context {nullptr};
78  int inputC {0};
79  int inputH {0};
80  int inputW {0};
81  int inputSize {0};
82  int outputSize {0};
83 
84  float* d_input {nullptr};
85  float* d_output {nullptr};
86 
87  float imageNorm; // Normalization constant for the image intensities
88  float modalNorm; // Normalization constant for the modal coefficients
89 
90  int m_pupPix; // Number of pixels in the pupil used for the Neural Network
91  int pup_offset1_x; // Horizontal offset to the first set of pupils
92  int pup_offset1_y; // Vertical offset to the first set of pupils
93  int pup_offset2_x; // Horizontal offset to the second set of pupils
94  int pup_offset2_y; // Horizontal offset to the second set of pupils
96  unsigned long frame_counter{ 0 };
97 
98  uint32_t Nmodes{ 0 }; // Number of modes to reconstruct
99  int Npup{ 4 }; // Number of pupils
100  float *modeval{ nullptr };
101  float *pp_image{ nullptr };
102 
103  size_t m_pwfsWidth{ 0 }; ///< The width of the image
104  size_t m_pwfsHeight{ 0 }; ///< The height of the image.
105 
106  uint8_t m_pwfsDataType{ 0 }; ///< The ImageStreamIO type code.
107  size_t m_pwfsTypeSize{ 0 }; ///< The size of the type, in bytes.
108 
109  // variables for sending the output to aol_modevals
110  std::string m_modevalChannel;
112  uint32_t m_modevalWidth {0}; ///< The width of the shmim
113  uint32_t m_modevalHeight {0}; ///< The height of the shmim
114  uint8_t m_modevalDataType {0}; ///< The ImageStreamIO type code.
115  size_t m_modevalTypeSize {0}; ///< The size of the type, in bytes.
116 
117  bool m_modevalOpened {false};
118  bool m_modevalRestart {false};
119 
120  public:
121  /// Default c'tor.
122  nnReconstructor();
123 
124  /// D'tor, declared and defined for noexcept.
125  ~nnReconstructor() noexcept
126  {
127  }
128 
129  virtual void setupConfig();
130 
131  /// Implementation of loadConfig logic, separated for testing.
132  /** This is called by loadConfig().
133  */
134  int loadConfigImpl(
135  mx::app::appConfigurator &_config /**< [in] an application configuration from which to load values*/ );
136 
137  virtual void loadConfig();
138 
139  /// Startup function
140  /**
141  *
142  */
143  virtual int appStartup();
144 
145  /// Implementation of the FSM for nnReconstructor.
146  /**
147  * \returns 0 on no critical error
148  * \returns -1 on an error requiring shutdown
149  */
150  virtual int appLogic();
151 
152  /// Shutdown the app.
153  /**
154  *
155  */
156  virtual int appShutdown();
157 
158  // Custom functions
159  int send_to_shmim();
160 
161  void load_engine(const std::string filename);
162  void create_engine_context();
163  void prepare_engine_memory();
164  void cleanup_engine_memory();
165  void cleanup_engine_context();
166 
167  // void build_engine(){};
168 
169  protected:
170  int allocate( const dev::shmimT &dummy /**< [in] tag to differentiate shmimMonitor parents.*/ );
171 
172  int processImage( void *curr_src, ///< [in] pointer to start of current frame.
173  const dev::shmimT &dummy ///< [in] tag to differentiate shmimMonitor parents.
174  );
175 };
176 
177 void nnReconstructor::load_engine(const std::string filename) {
178 
179  std::ifstream file(filename, std::ios::binary);
180  if (!file) {
181  std::cout << "Error opening " << filename << std::endl;
182  }
183 
184  file.seekg(0, std::ios::end);
185 
186  engineData = std::vector<char>(file.tellg());
187  file.seekg(0, std::ios::beg);
188  file.read(engineData.data(), engineData.size());
189 
190 }
191 
192 void nnReconstructor::create_engine_context(){
193 
194  // Create the runtime and deserialize the engine
195  runtime = createInferRuntime(logger);
196  if (!runtime) {
197  std::cout << "Failed to createInferRuntime\n";
198  }
199 
200  engine = runtime->deserializeCudaEngine(engineData.data(), engineData.size());
201  if (!engine) {
202  std::cout << "Failed to deserialize CUDA engine.\n";
203  } else {
204  std::cout << "Deserialized CUDA engine.\n";
205  }
206 
207  context = engine->createExecutionContext();
208 
209 
210  int numIOTensors = engine->getNbIOTensors();
211  std::cout << "Number of IO Tensors: " << numIOTensors << std::endl;
212 
213  auto inputName = engine->getIOTensorName(0);
214  auto outputName = engine->getIOTensorName(1);
215  std::cout << "Tensor IO names: " << inputName << " " << outputName << std::endl;
216 
217  const auto inputDims = engine->getTensorShape(inputName);
218  const auto outputDims = engine->getTensorShape(outputName);
219  inputC = inputDims.d[1];
220  inputH = inputDims.d[2];
221  inputW = inputDims.d[3];
222  inputSize = inputC * inputH * inputW;
223 
224  outputSize = outputDims.d[1];
225  std::cout << "Tensor input dimensions: " << inputC << "x" << inputH << "x" << inputW << std::endl;
226  std::cout << "Tensor output dimensions: " << outputSize << std::endl;
227 
228 }
229 
230 void nnReconstructor::prepare_engine_memory(){
231 
232  // Allocate device memory for input and output
233  cudaMalloc((void**)&d_input, inputSize * sizeof(float));
234  cudaMalloc((void**)&d_output, outputSize * sizeof(float));
235 
236 }
237 
238 void nnReconstructor::cleanup_engine_memory(){
239  if(d_input)
240  cudaFree(d_input);
241 
242  if(d_output)
243  cudaFree(d_output);
244 }
245 
246 void nnReconstructor::cleanup_engine_context(){
247  if(context)
248  delete context;
249  if(engine)
250  delete engine;
251  if(runtime)
252  delete runtime;
253 };
254 
255 inline int nnReconstructor::send_to_shmim()
256 {
257  // Check if processImage is running
258  // while(m_dmStream.md[0].write == 1);
259 
260  m_modevalStream.md[0].write = 1;
261  memcpy( m_modevalStream.array.raw, modeval, outputSize * m_modevalTypeSize );
262  m_modevalStream.md[0].cnt0++;
263  m_modevalStream.md[0].write = 0;
264 
265  ImageStreamIO_sempost( &m_modevalStream, -1 );
266 
267  return 0;
268 }
269 
270 inline nnReconstructor::nnReconstructor() : MagAOXApp( MAGAOX_CURRENT_SHA1, MAGAOX_REPO_MODIFIED )
271 {
272  return;
273 }
274 
276 {
277  std::cout << "setupConfig()" << std::endl;
278  shmimMonitorT::setupConfig( config );
279 
280  config.add( "parameters.dataDirs",
281  "",
282  "parameters.dataDirs",
283  argType::Required,
284  "parameters",
285  "dataDirs",
286  false,
287  "string",
288  "The path to the directory with the onnx model." );
289 
290  config.add( "parameters.engineDirs",
291  "",
292  "parameters.engineDirs",
293  argType::Required,
294  "parameters",
295  "engineDirs",
296  false,
297  "string",
298  "The path to the directory with the TRT engine." );
299 
300  config.add( "parameters.onnxFileName",
301  "",
302  "parameters.onnxFileName",
303  argType::Required,
304  "parameters",
305  "onnxFileName",
306  false,
307  "string",
308  "Name of the Neural Net ONNX file" );
309 
310  config.add( "parameters.engineName",
311  "",
312  "parameters.engineName",
313  argType::Required,
314  "parameters",
315  "engineName",
316  false,
317  "string",
318  "Name of the TRT engine." );
319  config.add( "parameters.rebuildEngine",
320  "",
321  "parameters.rebuildEngine",
322  argType::Required,
323  "parameters",
324  "rebuildEngine",
325  false,
326  "bool",
327  "If true the engine will be rebuild." );
328 
329  config.add( "parameters.imageNorm",
330  "",
331  "parameters.imageNorm",
332  argType::Required,
333  "parameters",
334  "imageNorm",
335  false,
336  "float",
337  "Normalization term for the preprocessed images." );
338 
339  config.add( "parameters.modalNorm",
340  "",
341  "parameters.modalNorm",
342  argType::Required,
343  "parameters",
344  "modalNorm",
345  false,
346  "float",
347  "Normalization term for the modal coefficients." );
348 
349  config.add( "parameters.channel",
350  "",
351  "parameters.channel",
352  argType::Required,
353  "parameters",
354  "channel",
355  false,
356  "string",
357  "The output channel." );
358 
359  config.add( "parameters.m_pupPix",
360  "",
361  "parameters.m_pupPix",
362  argType::Required,
363  "parameters",
364  "m_pupPix",
365  false,
366  "int",
367  "Number of pixels across a PWFS pupil." );
368 
369  config.add( "parameters.pup_offset1_x",
370  "",
371  "parameters.pup_offset1_x",
372  argType::Required,
373  "parameters",
374  "pup_offset1_x",
375  false,
376  "int",
377  "Horizontal offset to the top left of the closest set op PWFS pupils." );
378 
379  config.add( "parameters.pup_offset1_y",
380  "",
381  "parameters.pup_offset1_y",
382  argType::Required,
383  "parameters",
384  "pup_offset1_y",
385  false,
386  "int",
387  "Vertical offset to the top left of the closest set op PWFS pupils." );
388 
389  config.add( "parameters.pup_offset2_x",
390  "",
391  "parameters.pup_offset2_x",
392  argType::Required,
393  "parameters",
394  "pup_offset2_x",
395  false,
396  "int",
397  "Horizontal offset to the top left of the furthest set op PWFS pupils." );
398 
399  config.add( "parameters.pup_offset2_y",
400  "",
401  "parameters.pup_offset2_y",
402  argType::Required,
403  "parameters",
404  "pup_offset2_y",
405  false,
406  "int",
407  "Vertical offset to the top left of the furthest set op PWFS pupils." );
408 
409 }
410 
411 inline int nnReconstructor::loadConfigImpl( mx::app::appConfigurator &_config )
412 {
413  std::cout << "loadConfigImpl()" << std::endl;
414  shmimMonitorT::loadConfig( config );
415 
416  _config( dataDirs, "parameters.dataDirs" );
417  _config( engineDirs, "parameters.engineDirs" );
418  _config( onnxFileName, "parameters.onnxFileName" );
419  _config( engineName, "parameters.engineName" );
420  _config( rebuildEngine, "parameters.rebuildEngine" );
421 
422  _config( imageNorm, "parameters.imageNorm" );
423  _config( modalNorm, "parameters.modalNorm" );
424  _config( m_modevalChannel, "parameters.channel");
425 
426  _config( m_pupPix, "parameters.m_pupPix" );
427  _config( pup_offset1_x, "parameters.pup_offset1_x" );
428  _config( pup_offset1_y, "parameters.pup_offset1_y" );
429  _config( pup_offset2_x, "parameters.pup_offset2_x" );
430  _config( pup_offset2_y, "parameters.pup_offset2_y" );
431 
432  if( true )
433  {
434  std::cout << "Debug configuration loading: " << std::endl;
435  std::cout << "dataDirs: " << dataDirs << std::endl;
436  std::cout << "engineDirs: " << engineDirs << std::endl;
437  std::cout << "onnxFileName: " << onnxFileName << std::endl;
438  std::cout << "engineName: " << engineName << std::endl;
439  std::cout << "rebuildEngine: " << rebuildEngine << std::endl;
440  std::cout << "imageNorm: " << imageNorm << std::endl;
441  std::cout << "modalNorm: " << modalNorm << std::endl;
442  std::cout << "modeval Channel: " << m_modevalChannel << std::endl;
443 
444  std::cout << "m_pupPix: " << m_pupPix << std::endl;
445  std::cout << "pup_offset1_x: " << pup_offset1_x << std::endl;
446  std::cout << "pup_offset1_y: " << pup_offset1_y << std::endl;
447  std::cout << "pup_offset2_x: " << pup_offset2_x << std::endl;
448  std::cout << "pup_offset2_y: " << pup_offset2_y << std::endl;
449  }
450 
451  return 0;
452 }
453 
455 {
456  loadConfigImpl( config );
457 }
458 
460 {
461  if( shmimMonitorT::appStartup() < 0 )
462  {
463  return log<software_error, -1>( { __FILE__, __LINE__ } );
464  }
465 
466  std::string full_filepath = engineDirs + "/" + engineName;
467  std::cout << "file: " << full_filepath << std::endl;
468 
469  load_engine(full_filepath);
472 
473  // state(stateCodes::READY);
475  return 0;
476 }
477 
479 {
480  if( shmimMonitorT::appLogic() < 0 )
481  {
482  return log<software_error, -1>( { __FILE__, __LINE__ } );
483  }
484 
485  std::unique_lock<std::mutex> lock( m_indiMutex );
486 
487  if( shmimMonitorT::updateINDI() < 0 )
488  {
489  log<software_error>( { __FILE__, __LINE__ } );
490  }
491 
492  return 0;
493 }
494 
496 {
498 
499  if( pp_image )
500  {
501  delete[] pp_image;
502  }
503 
506 
507  return 0;
508 }
509 
510 inline int nnReconstructor::allocate( const dev::shmimT &dummy )
511 {
512  std::cout << "allocate()" << std::endl;
513  static_cast<void>( dummy ); // be unused
514 
515  // Wavefront sensor setup
518  std::cout << "Width: " << m_pwfsWidth << " Height: " << m_pwfsHeight << std::endl;
519 
521  std::cout << "Pixels: " << pixels_per_quadrant << std::endl;
522  pp_image = new float[Npup * pixels_per_quadrant];
523  memset( pp_image, 0, sizeof( float ) * Npup * pixels_per_quadrant );
524 
525  std::cout << "Close shmims" << std::endl;
526  // Allocate the DM shmim interface
527  if(m_modevalOpened){
528  ImageStreamIO_closeIm(&m_modevalStream);
529  }
530 
531  std::cout << "Open shmims" << std::endl;
532  m_modevalOpened = false;
533  m_modevalRestart = false; //Set this up front, since we're about to restart.
534 
535  if( ImageStreamIO_openIm(&m_modevalStream, m_modevalChannel.c_str()) == 0){
536  if(m_modevalStream.md[0].sem < 10){
537  ImageStreamIO_closeIm(&m_modevalStream);
538  }else{
539  m_modevalOpened = true;
540  }
541  }
542 
543  std::cout << "Done!" << std::endl;
544  if(!m_modevalOpened){
545  log<text_log>( m_modevalChannel + " not opened.", logPrio::LOG_NOTICE);
546  return -1;
547  }else{
548  m_modevalWidth = m_modevalStream.md->size[0];
549  m_modevalHeight = m_modevalStream.md->size[1];
550 
551  m_modevalDataType = m_modevalStream.md->datatype;
552  m_modevalTypeSize = sizeof(float);
553 
554  log<text_log>( "Opened " + m_modevalChannel + " " + std::to_string(m_modevalWidth) + " x " + std::to_string(m_modevalHeight) + " with data type: " + std::to_string(m_modevalDataType), logPrio::LOG_NOTICE);
555  }
556 
557 
558  return 0;
559 }
560 
561 inline int nnReconstructor::processImage( void *curr_src, const dev::shmimT &dummy )
562 {
563  static_cast<void>( dummy ); // be unused
564 
565  // aol_imwfs2 is reference and dark subtracted and is power normalized.
566  Eigen::Map<eigenImage<unsigned short>> pwfsIm(static_cast<unsigned short *>( curr_src ), m_pwfsHeight, m_pwfsWidth );
567 
568  // Split up the four pupils for the Neural Network.
569  int ki = 0;
570 
571  for( int col_i = 0; col_i < m_pupPix; ++col_i )
572  {
573  for( int row_i = 0; row_i < m_pupPix; ++row_i )
574  {
575  pp_image[ki] = imageNorm * (realT)pwfsIm(pup_offset1_y + row_i, pup_offset1_x + col_i );
576  pp_image[ki + pixels_per_quadrant] = imageNorm * (realT)pwfsIm( pup_offset1_y + row_i, pup_offset2_x + col_i );
577  pp_image[ki + 2 * pixels_per_quadrant] = imageNorm * (realT)pwfsIm( pup_offset2_y + row_i, pup_offset1_x + col_i );
578  pp_image[ki + 3 * pixels_per_quadrant] = imageNorm * (realT)pwfsIm( pup_offset2_y + row_i, pup_offset2_x + col_i );
579 
580  ++ki;
581  }
582  }
583 
584  // Copy input data to device
585  // cudaMemcpy(d_input, pp_image, inputSize * sizeof(float), cudaMemcpyHostToDevice);
586 
587  // Run inference
588  // void* buffers[] = {d_input, d_output};
589  // context->executeV2(buffers);
590 
591  // Copy output data back to host
592  // cudaMemcpy(modeval, d_output, outputSize * sizeof(float), cudaMemcpyDeviceToHost);
593 
594  if(frame_counter % 2000 == 0)
595  std::cout << "HOWDY" << std::endl;
596 
597  // Send modal coefficients to the correct stream
598  // send_to_shmim();
599 
600  frame_counter++;
601 
602  return 0;
603 }
604 
605 } // namespace app
606 } // namespace MagAOX
607 
608 #endif // nnReconstructor_hpp
void log(Severity severity, const char *msg) noexcept override
The base-class for MagAO-X applications.
Definition: MagAOXApp.hpp:73
stateCodes::stateCodeT state()
Get the current state code.
Definition: MagAOXApp.hpp:2297
static int log(const typename logT::messageT &msg, logPrioT level=logPrio::LOG_DEFAULT)
Make a log entry.
Definition: MagAOXApp.hpp:1804
std::mutex m_indiMutex
Mutex for locking INDI communications.
Definition: MagAOXApp.hpp:545
uint32_t m_width
The width of the images in the stream.
int setupConfig(mx::app::appConfigurator &config)
Setup the configuration system.
int updateINDI()
Update the INDI properties for this device controller.
int appLogic()
Checks the shmimMonitor thread.
uint32_t m_height
The height of the images in the stream.
int appShutdown()
Shuts down the shmimMonitor thread.
int loadConfig(mx::app::appConfigurator &config)
load the configuration system results
float realT
Floating point type in which to do all calculations.
size_t m_pwfsWidth
The width of the image.
virtual int appShutdown()
Shutdown the app.
size_t m_pwfsHeight
The height of the image.
int loadConfigImpl(mx::app::appConfigurator &_config)
Implementation of loadConfig logic, separated for testing.
int allocate(const dev::shmimT &dummy)
uint32_t m_modevalHeight
The height of the shmim.
virtual int appStartup()
Startup function.
size_t m_modevalTypeSize
The size of the type, in bytes.
virtual int appLogic()
Implementation of the FSM for nnReconstructor.
void load_engine(const std::string filename)
int processImage(void *curr_src, const dev::shmimT &dummy)
uint8_t m_modevalDataType
The ImageStreamIO type code.
std::vector< char > engineData
~nnReconstructor() noexcept
D'tor, declared and defined for noexcept.
uint32_t m_modevalWidth
The width of the shmim.
dev::shmimMonitor< nnReconstructor > shmimMonitorT
@ OPERATING
The device is operating, other than homing.
Definition: stateCodes.hpp:55
std::ostream & cout()
std::stringstream msg
std::unique_lock< std::mutex > lock(m_indiMutex)
Definition: dm.hpp:24
constexpr static logPrioT LOG_NOTICE
A normal but significant condition.
Definition: logPriority.hpp:46
Software ERR log entry.