55#include <driver_types.h>
159 const char8_t*
const modelContent,
160 uint32_t
const modelContentSize,
184 const char8_t*
const modelContent,
185 uint32_t
const modelContentSize,
NVIDIA DriveWorks API: Core Methods
NVIDIA DriveWorks: DNN Plugin Interface
NVIDIA DriveWorks API: Data Conditioner Methods
dwStatus
Status definition.
NVIDIA DriveWorks API: Core Status Methods
NVIDIA DriveWorks API: DNNTensor Structures and Methods
float float32_t
Specifies POD types.
struct dwContextObject * dwContextHandle_t
Context handle.
dwDataConditionerParams dataConditionerParams
DataConditioner parameters for running this network.
struct dwDNNObject * dwDNNHandle_t
Handles representing Deep Neural Network interface.
DW_API_PUBLIC dwStatus dwDNN_infer(dwDNNTensorHandle_t *const outputTensors, uint32_t const outputTensorCount, dwConstDNNTensorHandle_t *const inputTensors, uint32_t const inputTensorCount, dwDNNHandle_t const network)
Runs inference pipeline on the given input.
DW_API_PUBLIC dwStatus dwDNN_getInputBlobCount(uint32_t *const count, dwDNNHandle_t const network)
Gets the input blob count.
DW_API_PUBLIC dwStatus dwDNN_getCUDAStream(cudaStream_t *const stream, dwDNNHandle_t const network)
Gets the CUDA stream used by the feature list.
DW_API_PUBLIC dwStatus dwDNN_inferRaw(float32_t *const *const dOutput, const float32_t *const *const dInput, uint32_t const batchsize, dwDNNHandle_t const network)
Forwards pass from all input blobs to all output blobs.
DW_API_PUBLIC dwStatus dwDNN_getOutputTensorProperties(dwDNNTensorProperties *const tensorProps, uint32_t const blobIndex, dwDNNHandle_t const network)
Gets the output tensor properties at blobIndex.
DW_API_PUBLIC dwStatus dwDNN_initializeTensorRTFromFile(dwDNNHandle_t *const network, const char8_t *const modelFilename, const dwDNNPluginConfiguration *const pluginConfiguration, dwProcessorType const processorType, dwContextHandle_t const context)
Creates and initializes a TensorRT Network from file.
DW_API_PUBLIC dwStatus dwDNN_initializeTensorRTFromMemory(dwDNNHandle_t *const network, const char8_t *const modelContent, uint32_t const modelContentSize, const dwDNNPluginConfiguration *const pluginConfiguration, dwProcessorType const processorType, dwContextHandle_t const context)
Creates and initializes a TensorRT Network from memory.
DW_API_PUBLIC dwStatus dwDNN_inferSIO(float32_t *const dOutput, const float32_t *const dInput, uint32_t const batchsize, dwDNNHandle_t const network)
Forwards pass from the first input blob to the first output blob (a shortcut for a single input - sin...
DW_API_PUBLIC dwStatus dwDNN_setCUDAStream(cudaStream_t const stream, dwDNNHandle_t const network)
Sets the CUDA stream for infer operations.
DW_API_PUBLIC dwStatus dwDNN_getInputTensorProperties(dwDNNTensorProperties *const tensorProps, uint32_t const blobIndex, dwDNNHandle_t const network)
Gets the input tensor properties at blobIndex.
DW_API_PUBLIC dwStatus dwDNN_getOutputSize(dwBlobSize *const blobSize, uint32_t const blobIndex, dwDNNHandle_t const network)
Gets the output blob size at blobIndex.
DW_API_PUBLIC dwStatus dwDNN_getMetaData(dwDNNMetaData *const metaData, dwDNNHandle_t const network)
Returns the metadata for the associated network model.
DW_API_PUBLIC dwStatus dwDNN_getOutputBlobCount(uint32_t *const count, dwDNNHandle_t const network)
Gets the output blob count.
DW_API_PUBLIC dwStatus dwDNN_getInputSize(dwBlobSize *const blobSize, uint32_t const blobIndex, dwDNNHandle_t const network)
Gets the input blob size at blobIndex.
DW_API_PUBLIC dwStatus dwDNN_initializeTensorRTFromFileWithEngineId(dwDNNHandle_t *const network, const char8_t *const modelFilename, const dwDNNPluginConfiguration *const pluginConfiguration, dwProcessorType const processorType, uint32_t engineId, dwContextHandle_t const context)
Creates and initializes a TensorRT Network from file with DLA Engine ID.
DW_API_PUBLIC dwStatus dwDNN_initializeTensorRTFromMemoryWithEngineId(dwDNNHandle_t *const network, const char8_t *const modelContent, uint32_t const modelContentSize, const dwDNNPluginConfiguration *const pluginConfiguration, dwProcessorType const processorType, uint32_t engineId, dwContextHandle_t const context)
Creates and initializes a TensorRT Network from memory with DLA Engine ID.
DW_API_PUBLIC dwStatus dwDNN_getOutputIndex(uint32_t *const blobIndex, const char8_t *const blobName, dwDNNHandle_t const network)
Gets the index of an output blob with a given blob name.
struct dwDNNObject const * dwConstDNNHandle_t
DW_API_PUBLIC dwStatus dwDNN_reset(dwDNNHandle_t const network)
Resets a given network.
DW_API_PUBLIC dwStatus dwDNN_getInputIndex(uint32_t *const blobIndex, const char8_t *const blobName, dwDNNHandle_t const network)
Gets the index of an input blob with a given blob name.
DW_API_PUBLIC dwStatus dwDNN_release(dwDNNHandle_t const network)
Releases a given network.
Specified plugin configuration.
struct dwDNNTensorObject * dwDNNTensorHandle_t
Handles representing Deep Neural Network interface.
struct dwDNNTensorObject const * dwConstDNNTensorHandle_t
Specifies DNNTensor properties.