TensorRT  7.2.1.6
NVIDIA TensorRT
Looking for a C++ dev who knows TensorRT?
I'm looking for work. Hire me!
sample::anonymous_namespace{sampleInference.cpp}::Iteration Class Reference

Inference iteration and streams management. More...

Collaboration diagram for sample::anonymous_namespace{sampleInference.cpp}::Iteration:

Public Member Functions

 Iteration (int id, const InferenceOptions &inference, nvinfer1::IExecutionContext &context, Bindings &bindings)
 
void query (bool skipTransfers)
 
float sync (const TimePoint &cpuStart, const TrtCudaEvent &gpuStart, std::vector< InferenceTrace > &trace, bool skipTransfers)
 
void syncAll (const TimePoint &cpuStart, const TrtCudaEvent &gpuStart, std::vector< InferenceTrace > &trace, bool skipTransfers)
 
void wait (TrtCudaEvent &gpuStart)
 
void setInputData ()
 
void fetchOutputData ()
 

Private Member Functions

void moveNext ()
 
TrtCudaStreamgetStream (StreamType t)
 
TrtCudaEventgetEvent (EventType t)
 
void record (EventType e, StreamType s)
 
void recordEnqueueTime ()
 
TimePoint getEnqueueTime (bool start)
 
void wait (EventType e, StreamType s)
 
InferenceTrace getTrace (const TimePoint &cpuStart, const TrtCudaEvent &gpuStart, bool skipTransfers)
 
void createEnqueueFunction (const InferenceOptions &inference, nvinfer1::IExecutionContext &context, Bindings &bindings)
 

Private Attributes

BindingsmBindings
 
TrtCudaGraph mGraph
 
EnqueueFunction mEnqueue
 
int mStreamId {0}
 
int mNext {0}
 
int mDepth {2}
 
std::vector< bool > mActive
 
MultiStream mStream
 
std::vector< MultiEventmEvents
 
int enqueueStart {0}
 
std::vector< EnqueueTimesmEnqueueTimes
 

Detailed Description

Inference iteration and streams management.

Constructor & Destructor Documentation

◆ Iteration()

sample::anonymous_namespace{sampleInference.cpp}::Iteration::Iteration ( int  id,
const InferenceOptions inference,
nvinfer1::IExecutionContext context,
Bindings bindings 
)
inline

Member Function Documentation

◆ query()

void sample::anonymous_namespace{sampleInference.cpp}::Iteration::query ( bool  skipTransfers)
inline

◆ sync()

float sample::anonymous_namespace{sampleInference.cpp}::Iteration::sync ( const TimePoint cpuStart,
const TrtCudaEvent gpuStart,
std::vector< InferenceTrace > &  trace,
bool  skipTransfers 
)
inline

◆ syncAll()

void sample::anonymous_namespace{sampleInference.cpp}::Iteration::syncAll ( const TimePoint cpuStart,
const TrtCudaEvent gpuStart,
std::vector< InferenceTrace > &  trace,
bool  skipTransfers 
)
inline

◆ wait() [1/2]

void sample::anonymous_namespace{sampleInference.cpp}::Iteration::wait ( TrtCudaEvent gpuStart)
inline

◆ setInputData()

void sample::anonymous_namespace{sampleInference.cpp}::Iteration::setInputData ( )
inline
Here is the caller graph for this function:

◆ fetchOutputData()

void sample::anonymous_namespace{sampleInference.cpp}::Iteration::fetchOutputData ( )
inline

◆ moveNext()

void sample::anonymous_namespace{sampleInference.cpp}::Iteration::moveNext ( )
inlineprivate

◆ getStream()

TrtCudaStream& sample::anonymous_namespace{sampleInference.cpp}::Iteration::getStream ( StreamType  t)
inlineprivate

◆ getEvent()

TrtCudaEvent& sample::anonymous_namespace{sampleInference.cpp}::Iteration::getEvent ( EventType  t)
inlineprivate

◆ record()

void sample::anonymous_namespace{sampleInference.cpp}::Iteration::record ( EventType  e,
StreamType  s 
)
inlineprivate

◆ recordEnqueueTime()

void sample::anonymous_namespace{sampleInference.cpp}::Iteration::recordEnqueueTime ( )
inlineprivate

◆ getEnqueueTime()

TimePoint sample::anonymous_namespace{sampleInference.cpp}::Iteration::getEnqueueTime ( bool  start)
inlineprivate

◆ wait() [2/2]

void sample::anonymous_namespace{sampleInference.cpp}::Iteration::wait ( EventType  e,
StreamType  s 
)
inlineprivate

◆ getTrace()

InferenceTrace sample::anonymous_namespace{sampleInference.cpp}::Iteration::getTrace ( const TimePoint cpuStart,
const TrtCudaEvent gpuStart,
bool  skipTransfers 
)
inlineprivate

◆ createEnqueueFunction()

void sample::anonymous_namespace{sampleInference.cpp}::Iteration::createEnqueueFunction ( const InferenceOptions inference,
nvinfer1::IExecutionContext context,
Bindings bindings 
)
inlineprivate

Member Data Documentation

◆ mBindings

Bindings& sample::anonymous_namespace{sampleInference.cpp}::Iteration::mBindings
private

◆ mGraph

TrtCudaGraph sample::anonymous_namespace{sampleInference.cpp}::Iteration::mGraph
private

◆ mEnqueue

EnqueueFunction sample::anonymous_namespace{sampleInference.cpp}::Iteration::mEnqueue
private

◆ mStreamId

int sample::anonymous_namespace{sampleInference.cpp}::Iteration::mStreamId {0}
private

◆ mNext

int sample::anonymous_namespace{sampleInference.cpp}::Iteration::mNext {0}
private

◆ mDepth

int sample::anonymous_namespace{sampleInference.cpp}::Iteration::mDepth {2}
private

◆ mActive

std::vector<bool> sample::anonymous_namespace{sampleInference.cpp}::Iteration::mActive
private

◆ mStream

MultiStream sample::anonymous_namespace{sampleInference.cpp}::Iteration::mStream
private

◆ mEvents

std::vector<MultiEvent> sample::anonymous_namespace{sampleInference.cpp}::Iteration::mEvents
private

◆ enqueueStart

int sample::anonymous_namespace{sampleInference.cpp}::Iteration::enqueueStart {0}
private

◆ mEnqueueTimes

std::vector<EnqueueTimes> sample::anonymous_namespace{sampleInference.cpp}::Iteration::mEnqueueTimes
private

The documentation for this class was generated from the following file: