First the device and the image processing engine is configured.
If for any reason a format is needed that is NOT supported by Impact Acquire the application should select the format that matches best and has to perform the final conversion on its own then.
Later the AVIHelper::startRecordingEngine() is called to configure the encoder and file access:
Once a frame is acquired the frame is appended to the file.
Finally the file is closed and allocated resources are freed by calling the AVIHelper::stopRecordEngine() or the destructor of the AVIHelper class.
#include <algorithm>
#include <chrono>
#include <functional>
#include <iostream>
#include <map>
#include <apps/Common/FFmpegIncludePrologue.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#include <libavutil/imgutils.h>
#include <apps/Common/FFmpegIncludeEpilogue.h>
#if defined(LIBAVCODEC_VERSION_MAJOR) && (LIBAVCODEC_VERSION_MAJOR < 57)
# error Outdated libavcodec header package detected! We need at least a 3.x release of the FFmpeg package in order for this code to compile!
#endif
#if defined(LIBAVFORMAT_VERSION_MAJOR) && (LIBAVFORMAT_VERSION_MAJOR < 57)
# error Outdated libavformat header package detected! We need at least a 3.x release of the FFmpeg package in order for this code to compile!
#endif
#if defined(LIBAVUTIL_VERSION_MAJOR) && (LIBAVUTIL_VERSION_MAJOR < 55)
# error Outdated libavutil header package detected! We need at least a 3.x release of the FFmpeg package in order for this code to compile!
#endif
#include <apps/Common/exampleHelper.h>
#include <mvIMPACT_CPP/mvIMPACT_acquire.h>
#include <mvIMPACT_CPP/mvIMPACT_acquire_GenICam.h>
#include <mvIMPACT_CPP/mvIMPACT_acquire_helper.h>
#ifdef _WIN32
# include <mvDisplay/Include/mvIMPACT_acquire_display.h>
# define USE_DISPLAY
#endif
using namespace std;
typedef map<string, AVCodecID> StringToCodecMap;
class AVIHelper
{
private:
static StringToCodecMap s_supportedCodecs_;
AVPacket* pPkt_;
AVFrame* pFrame_;
AVCodecContext* pC_;
AVPixelFormat pixelFormat_;
AVFormatContext* pFC_;
bool boMustOpenAndCloseFile_;
int64_type firstTimestamp_us_;
string getErrorMessageFromCode( int code )
{
char buf[AV_ERROR_MAX_STRING_SIZE];
av_strerror( code, buf, sizeof( buf ) );
return buf;
}
void encode( AVCodecContext* pEncCtx, AVFrame* pFrame )
{
if( !pFrame )
{
cout << "Flushing encoder stream (not gotten any frame)" << endl;
}
int ret = avcodec_send_frame( pEncCtx, pFrame );
if( ret < 0 )
{
cout << "Error sending a frame for encoding" << endl;
exit( 1 );
}
while( ret >= 0 )
{
av_init_packet( pPkt_ );
ret = avcodec_receive_packet( pEncCtx, pPkt_ );
if( ( ret == AVERROR( EAGAIN ) ) || ( ret == AVERROR_EOF ) )
{
return;
}
else if( ret < 0 )
{
cout << "Error during encoding" << endl;
exit( 1 );
}
if( ( pPkt_->pts % 100 ) == 0 )
{
cout << "Writing encoded frame " << pPkt_->pts << " (size=" << pPkt_->size << ")" << endl;
}
pPkt_->stream_index = 0;
ret = av_interleaved_write_frame( pFC_, pPkt_ );
if( ret < 0 )
{
cout << "Error while writing video frame" << endl;
}
av_packet_unref( pPkt_ );
}
}
int64_type getOptimalBitRateValue( int64_type width, int64_type height ) const
{
if( ( pC_->framerate.num != FRAME_RATE ) && ( pC_->framerate.den != 1 ) )
{
cout << "Unable to continue! Press [ENTER] to end the application" << endl;
cin.get();
exit( 1 );
}
return width * height * 3;
}
static void populateCodecMap( void )
{
if( s_supportedCodecs_.empty() )
{
s_supportedCodecs_["MPEG2VIDEO"] = AV_CODEC_ID_MPEG2VIDEO;
s_supportedCodecs_["H264"] = AV_CODEC_ID_H264;
s_supportedCodecs_["H265"] = AV_CODEC_ID_H265;
}
}
public:
explicit AVIHelper() : pPkt_( nullptr ), pFrame_( nullptr ), pC_ ( nullptr ), pixelFormat_( AV_PIX_FMT_NONE ),
pFC_( nullptr ), boMustOpenAndCloseFile_( false )
{
#if (LIBAVCODEC_VERSION_MAJOR < 58)
avcodec_register_all();
#endif
populateCodecMap();
}
~AVIHelper()
{
stopRecordingEngine();
}
enum
{
FRAME_RATE = 25,
DEFAULT_TIMESCALE = 1000000
};
static bool isCodecSupported( const string& codec )
{
populateCodecMap();
return s_supportedCodecs_.find( codec ) != s_supportedCodecs_.end();
}
static const StringToCodecMap& getSupportedCodecs( void )
{
populateCodecMap();
return s_supportedCodecs_;
}
int startRecordingEngine( int64_type width, int64_type height, AVCodecID codecToUse, int crf, int bitrate, const string& fileName )
{
string crf_str = std::to_string( crf );
firstTimestamp_us_ = 0;
string fullFileName( fileName );
int timescale = DEFAULT_TIMESCALE;
pixelFormat_ = AV_PIX_FMT_YUV422P;
switch( codecToUse )
{
case AV_CODEC_ID_MPEG2VIDEO:
fullFileName += ".m2v";
timescale = FRAME_RATE;
break;
case AV_CODEC_ID_H264:
case AV_CODEC_ID_H265:
fullFileName += ".mp4";
break;
default:
cout << "Codec not found!" << endl;
return 1;
}
AVOutputFormat* pOFormat = av_guess_format( NULL, fullFileName.c_str(), NULL );
if( ( avformat_alloc_output_context2( &pFC_, pOFormat, NULL, fullFileName.c_str() ) ) < 0 )
{
return 1;
}
const AVCodec* pCodec = avcodec_find_encoder( codecToUse );
if( !pCodec )
{
cout << "Codec not found" << endl;
return 1;
}
pOFormat->video_codec = codecToUse;
AVStream* pVideoStream = avformat_new_stream( pFC_, pCodec );
pVideoStream->time_base = AVRational {1, timescale};
pVideoStream->avg_frame_rate = { FRAME_RATE, 1 };
pVideoStream->codecpar->codec_id = codecToUse;
pVideoStream->id = pFC_->nb_streams - 1;
pVideoStream->codecpar->codec_id = pOFormat->video_codec;
pVideoStream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
pVideoStream->codecpar->width = static_cast<int>( width );
pVideoStream->codecpar->height = static_cast<int>( height );
pVideoStream->codecpar->format = pixelFormat_;
if( codecToUse == AV_CODEC_ID_MPEG2VIDEO )
{
pVideoStream->codecpar->bit_rate = bitrate;
}
pC_ = avcodec_alloc_context3( pCodec );
if( !pC_ )
{
cout << "Could not allocate video codec context!" << endl;
return 1;
}
pC_->max_b_frames = 1;
AVDictionary* pOptParams = nullptr;
pC_->time_base = pVideoStream->time_base;
pC_->gop_size = 10;
pC_->pkt_timebase = pC_->time_base;
switch( pVideoStream->codecpar->codec_id )
{
case AV_CODEC_ID_H264:
av_opt_set( pC_->priv_data, "crf", crf_str.c_str(), AV_OPT_SEARCH_CHILDREN );
av_opt_set( pC_->priv_data, "preset", "slow", AV_OPT_SEARCH_CHILDREN );
break;
case AV_CODEC_ID_H265:
av_opt_set( pC_->priv_data, "crf", crf_str.c_str(), AV_OPT_SEARCH_CHILDREN );
av_opt_set( pC_->priv_data, "preset", "fast", AV_OPT_SEARCH_CHILDREN );
break;
default:
break;
}
int ret = avcodec_parameters_to_context( pC_, pVideoStream->codecpar );
if( ret != 0 )
{
cout << "Could not pass stream parameters to codec context: " << getErrorMessageFromCode( ret ) << ", ret: " << ret << endl;
}
boMustOpenAndCloseFile_ = !( pOFormat->flags & AVFMT_NOFILE );
if( boMustOpenAndCloseFile_ )
{
avio_open( &pFC_->pb, fullFileName.c_str(), AVIO_FLAG_WRITE );
}
av_dump_format( pFC_, 0, fullFileName.c_str(), 1 );
pPkt_ = av_packet_alloc();
if( !pPkt_ )
{
cout << "Could not allocate AVPacket structure!" << endl;
return 1;
}
pC_->width = pVideoStream->codecpar->width;
pC_->height = pVideoStream->codecpar->height;
pC_->framerate = AVRational {FRAME_RATE, 1};
pC_->time_base = pVideoStream->time_base;
pC_->gop_size = 10;
pC_->pix_fmt = pixelFormat_;
pC_->bit_rate = getOptimalBitRateValue( width, height );
pC_->strict_std_compliance = FF_COMPLIANCE_UNOFFICIAL;
ret = avcodec_open2( pC_, pCodec, NULL );
if( ret < 0 )
{
cout << "Could not open codec: " << getErrorMessageFromCode( ret ) << ", ret: " << ret << endl;
return 1;
}
pFrame_ = av_frame_alloc();
if( !pFrame_ )
{
cout << "Could not allocate video frame" << endl;
return 1;
}
pFrame_->format = pC_->pix_fmt;
pFrame_->width = pC_->width;
pFrame_->height = pC_->height;
ret = av_frame_get_buffer( pFrame_, 32 );
if( ret < 0 )
{
cout << "Could not allocate the video frame data" << endl;
return 1;
}
const int result = avformat_write_header( pFC_, &pOptParams );
if( pOptParams )
{
av_dict_free( &pOptParams );
pOptParams = nullptr;
}
if( result != 0 )
{
cout << "Could not write file header!" << endl;
return 1;
}
return 0;
}
void writeFrame( shared_ptr<Request> pReq, int cnt )
{
const ImageBuffer* pIB = pReq->getImageBufferDesc().getBuffer();
fflush( stdout );
av_frame_make_writable( pFrame_ );
{
case ibpfYUV422Packed:
switch( pixelFormat_ )
{
case AV_PIX_FMT_YUV422P:
for(
int y = 0; y < pIB->
iHeight; y++ )
{
uint8_t* pDstY = pFrame_->data[0] + y * pFrame_->linesize[0];
uint8_t* pDstU = pFrame_->data[1] + y * pFrame_->linesize[1];
uint8_t* pDstV = pFrame_->data[2] + y * pFrame_->linesize[2];
for(
int x = 0; x < pIB->
iWidth / 2; x++ )
{
*pDstY++ = *pSrc++;
*pDstU++ = *pSrc++;
*pDstY++ = *pSrc++;
*pDstV++ = *pSrc++;
}
}
break;
case AV_PIX_FMT_YUV420P:
for(
int y = 0; y < pIB->
iHeight; y++ )
{
uint8_t* pDstY = pFrame_->data[0] + y * pFrame_->linesize[0];
uint8_t* pDstU = pFrame_->data[1] + ( y / 2 ) * pFrame_->linesize[1];
uint8_t* pDstV = pFrame_->data[2] + ( y / 2 ) * pFrame_->linesize[2];
for(
int x = 0; x < pIB->
iWidth / 2; x++ )
{
if( ( y % 2 ) == 0 )
{
*pDstY++ = *pSrc++;
*pDstU++ = *pSrc++;
*pDstY++ = *pSrc++;
*pDstV++ = *pSrc++;
}
else
{
*pDstY++ = pSrc[0];
*pDstY++ = pSrc[2];
pSrc += 4;
}
}
}
break;
default:
assert( !"Unhandled FFmpeg pixel format detected!" );
break;
}
break;
case ibpfYUV422Planar:
switch( pixelFormat_ )
{
case AV_PIX_FMT_YUV422P:
for(
int y = 0; y < pIB->
iHeight; y++ )
{
for( int channel = 0; channel < 3; channel++ )
{
}
}
break;
case AV_PIX_FMT_YUV420P:
for(
int y = 0; y < pIB->
iHeight; y++ )
{
for( int channel = 0; channel < 3; channel++ )
{
if( channel == 0 )
{
}
else if( ( y % 2 ) == 0 )
{
}
}
}
break;
default:
assert( !"Unhandled FFmpeg pixel format detected!" );
break;
}
break;
default:
assert( !"Unhandled " PRODUCT_NAME " pixel format detected!" );
break;
}
if( firstTimestamp_us_ == 0 )
{
firstTimestamp_us_ = pReq->infoTimeStamp_us.read();
}
switch( pC_->codec_id )
{
case AV_CODEC_ID_H264:
case AV_CODEC_ID_H265:
pFrame_->pts = pReq->infoTimeStamp_us.read() - firstTimestamp_us_;
break;
default:
pFrame_->pts = cnt;
break;
}
encode( pC_, pFrame_ );
}
void stopRecordingEngine( void )
{
if( pC_ && pFC_ )
{
encode( pC_, nullptr );
av_write_trailer( pFC_ );
if( boMustOpenAndCloseFile_ )
{
avio_close( pFC_->pb );
boMustOpenAndCloseFile_ = false;
}
}
if( pC_ )
{
avcodec_free_context( &pC_ );
pC_ = nullptr;
}
if( pFC_ )
{
avformat_free_context( pFC_ );
pFC_ = nullptr;
}
if( pFrame_ )
{
av_frame_free( &pFrame_ );
pFrame_ = nullptr;
}
if( pPkt_ )
{
av_packet_free( &pPkt_ );
pPkt_ = nullptr;
}
}
};
StringToCodecMap AVIHelper::s_supportedCodecs_ = StringToCodecMap();
struct ThreadParameter
{
AVIHelper aviHelper_;
unsigned int requestsCaptured_;
#ifdef USE_DISPLAY
#endif
explicit ThreadParameter(
Device* pDev ) : pDev_( pDev ), aviHelper_(), requestsCaptured_( 0 ), statistics_( pDev )
#ifdef USE_DISPLAY
, displayWindow_( "mvIMPACT_acquire sample, Device " + pDev_->serial.read() )
#endif
{}
ThreadParameter( const ThreadParameter& src ) = delete;
ThreadParameter& operator=( const ThreadParameter& rhs ) = delete;
};
void myThreadCallback( shared_ptr<Request> pRequest, ThreadParameter& threadParameter )
{
++threadParameter.requestsCaptured_;
if( threadParameter.requestsCaptured_ % 100 == 0 )
{
const Statistics& s = threadParameter.statistics_;
cout << "Info from " << threadParameter.pDev_->serial.read()
}
if( pRequest->isOK() )
{
#ifdef USE_DISPLAY
threadParameter.displayWindow_.GetImageDisplay().SetImage( pRequest );
threadParameter.displayWindow_.GetImageDisplay().Update();
#else
cout << "Image captured: " << pRequest->imageOffsetX.read() << "x" << pRequest->imageOffsetY.read() << "@" << pRequest->imageWidth.read() << "x" << pRequest->imageHeight.read() << endl;
#endif
threadParameter.aviHelper_.writeFrame( pRequest, threadParameter.requestsCaptured_ );
}
else
{
cout << "Error: " << pRequest->requestResult.readS() << endl;
}
}
void displayCommandLineOptions( void )
{
cout << "Available parameters:" << endl
<< " 'serial' or 's' to specify the serial number of the device to use" << endl
<< " 'codec' or 'c' to specify the name of the codec to use" << endl
<< " 'recordingTime' or 'rt' to specify the recording time in ms. If not specified pressing ENTER will terminate the recording" << endl
<< " Only H.264/H.265 codec:" << endl
<< " 'constantRateFactor' or 'crf' to specify the ratio between encoding speed and quality (default: 23, lower value results in bigger files)." << endl
<< " Only MPEG2 codec:" << endl
<< " 'bitrate' or 'b' to specify the maximum average bitrate in kBit/s of the MPEG2 video." << endl
<< endl
<< "USAGE EXAMPLE:" << endl
<< " ContinuousCaptureFFmpeg s=VD* codec=H264 rt=1000 crf=18" << endl << endl;
}
AVCodecID selectCodecFromUserInput( void )
{
const StringToCodecMap& validCodecs = AVIHelper::getSupportedCodecs();
StringToCodecMap::size_type index = 0;
cout << "Codecs currently supported by this class (FFmpeg supports a lot more, contributions welcome):" << endl;
for( const auto& codec : validCodecs )
{
cout << " [" << index++ << "]: " << codec.first << endl;
}
cout << endl;
cout << "Please select a codec: ";
StringToCodecMap::size_type codecNr = 0;
cin >> codecNr;
cin.get();
if( codecNr > validCodecs.size() )
{
return AV_CODEC_ID_NONE;
}
StringToCodecMap::const_iterator it = validCodecs.begin();
for( StringToCodecMap::size_type i = 0; i < codecNr; i++ )
{
++it;
}
return it->second;
}
void selectQualityFromUserInput( AVCodecID codec, unsigned int& crf, unsigned int& bitrate )
{
switch( codec )
{
case AV_CODEC_ID_H264:
case AV_CODEC_ID_H265:
cout << endl
<< "Please select a constant rate factor (crf) (0-51) for encoding quality (default: 23)" << endl
<< "Lower values will result in better quality, faster encoding but bigger file size: ";
cin >> crf;
cin.get();
if( crf > 51 )
{
cout << "CRF out of range (0-51), using 28..." << endl;
crf = 28;
}
break;
case AV_CODEC_ID_MPEG2VIDEO:
cout << endl
<< "Please select a average bitrate in kBit/s for encoding quality (default=6000): ";
cin >> bitrate;
cin.get();
if( bitrate > 50000 )
{
cout << "Bitrate out of range (0-50.000), using 6000 kBit/s" << endl;
bitrate = 6000;
}
break;
default:
cout << "Unsupported video codec: " << codec << "! Cannot obtain quality parameters since I don't know what to do with it." << endl;
break;
}
}
int main( int argc, char* argv[] )
{
AVCodecID avCodec = AV_CODEC_ID_NONE;
unsigned int recordingTime = 0;
unsigned int crf = 23;
unsigned int bitrate = 6000;
if( argc > 1 )
{
bool boInvalidCommandLineParameterDetected = false;
for( int i = 1; i < argc; i++ )
{
const string param( argv[i] );
const string::size_type keyEnd = param.find_first_of( "=" );
if( ( keyEnd == string::npos ) || ( keyEnd == param.length() - 1 ) )
{
cout << "Invalid command-line parameter: '" << param << "' (ignored)." << endl;
boInvalidCommandLineParameterDetected = true;
}
else
{
const string key = param.substr( 0, keyEnd );
const string value = param.substr( keyEnd + 1 );
if( ( key == "serial" ) || ( key == "s" ) )
{
{
}
}
else if( ( key == "codec" ) || ( key == "c" ) )
{
const StringToCodecMap::const_iterator itCodec = AVIHelper::getSupportedCodecs().find( value );
if( itCodec != AVIHelper::getSupportedCodecs().end() )
{
avCodec = itCodec->second;
cout << "Using codec: " << itCodec->first << endl;
}
}
else if( ( key == "recordingTime" ) || ( key == "rt" ) )
{
recordingTime = static_cast<unsigned int>( atoi( value.c_str() ) );
}
else if( ( key == "constantRateFactor" ) || ( key == "crf" ) )
{
crf = static_cast<unsigned int>( atoi( value.c_str() ) );
}
else if( ( key == "bitrate" ) || ( key == "b" ) )
{
bitrate = static_cast<unsigned int>( atoi( value.c_str() ) );
}
else
{
cout << "Invalid command-line parameter: '" << param << "' (ignored)." << endl;
boInvalidCommandLineParameterDetected = true;
}
}
}
if( boInvalidCommandLineParameterDetected )
{
displayCommandLineOptions();
}
}
else
{
cout << "No command-line parameters specified." << endl;
displayCommandLineOptions();
}
if( pDev == nullptr )
{
pDev = getDeviceFromUserInput( devMgr );
}
if( pDev == nullptr )
{
cout << "Unable to continue! Press [ENTER] to end the application" << endl;
cin.get();
return 1;
}
if( avCodec == AV_CODEC_ID_NONE )
{
avCodec = selectCodecFromUserInput();
selectQualityFromUserInput( avCodec, crf, bitrate );
}
cout <<
"Initialising the device '" << pDev->
serial.
read() <<
"'. This might take some time..." << endl;
try
{
}
{
cout <<
"An error occurred while opening the device " << pDev->
serial.
read()
return 1;
}
int64_type imageWidth = {0};
int64_type imageHeight = {0};
try
{
{
if( usc.userSetSelector.isValid() && usc.userSetSelector.isWriteable() && usc.userSetLoad.isValid() )
{
usc.userSetSelector.writeS( "Default" );
usc.userSetLoad.call();
}
if( ifc.width.isValid() && ifc.height.isValid() )
{
imageWidth = ifc.width.read();
imageHeight = ifc.height.read();
}
if( acq.acquisitionFrameRateEnable.isValid() && acq.acquisitionFrameRateEnable.isWriteable() )
{
acq.acquisitionFrameRateEnable.write( bTrue );
acq.acquisitionFrameRate.write( static_cast<double>( AVIHelper::FRAME_RATE ) );
}
}
else
{
if( bds.aoiWidth.isValid() && bds.aoiHeight.isValid() )
{
imageWidth = bds.aoiWidth.read();
imageHeight = bds.aoiHeight.read();
}
}
id.pixelFormat.write( idpfYUV422Planar );
}
{
cout <<
"An error occurred while configuring the device " << pDev->
serial.
read()
<< "Press [ENTER] to end the application..." << endl;
cin.get();
return 1;
}
ThreadParameter threadParam( pDev );
ostringstream oss;
oss << pDev->
serial.
readS() <<
"@" << imageWidth <<
"x" << imageHeight;
threadParam.aviHelper_.startRecordingEngine( imageWidth, imageHeight, avCodec, crf, bitrate, oss.str() );
requestProvider.acquisitionStart( myThreadCallback, ref( threadParam ) );
if( recordingTime == 0 )
{
cout << "Press [ENTER] to stop the acquisition thread" << endl;
cin.get();
}
else
{
cout << "Recording for " << recordingTime << "ms now." << endl;
this_thread::sleep_for( chrono::milliseconds( recordingTime ) );
}
requestProvider.acquisitionStop();
threadParam.aviHelper_.stopRecordingEngine();
return 0;
}
A base class for camera related settings(Device specific interface layout only).
Definition mvIMPACT_acquire.h:18744
std::string name(void) const
Returns the name of the component referenced by this object.
Definition mvIMPACT_acquire.h:1206
bool isValid(void) const
Checks if the internal component referenced by this object is still valid.
Definition mvIMPACT_acquire.h:1721
Grants access to devices that can be operated by this software interface.
Definition mvIMPACT_acquire.h:7171
Device * getDeviceBySerial(const std::string &serial="", unsigned int devNr=0, char wildcard=' *') const
Tries to locate a device via the serial number.
Definition mvIMPACT_acquire.h:7524
This class and its functions represent an actual device detected by this interface in the current sys...
Definition mvIMPACT_acquire.h:6118
PropertyS serial
A string property (read-only) containing the serial number of this device.
Definition mvIMPACT_acquire.h:6551
void open(void)
Opens a device.
Definition mvIMPACT_acquire.h:6420
PropertyIDeviceInterfaceLayout interfaceLayout
An enumerated integer property which can be used to define which interface layout shall be used when ...
Definition mvIMPACT_acquire.h:6644
ZYX read(int index=0) const
Reads a value from a property.
Definition mvIMPACT_acquire.h:4300
Category for the acquisition and trigger control features.
Definition mvIMPACT_acquire_GenICam.h:2115
Category that contains the User Set control features.
Definition mvIMPACT_acquire_GenICam.h:9632
Properties to define the format of resulting images.
Definition mvIMPACT_acquire.h:12376
A base class for exceptions generated by Impact Acquire.
Definition mvIMPACT_acquire.h:256
std::string getErrorCodeAsString(void) const
Returns a string representation of the error associated with the exception.
Definition mvIMPACT_acquire.h:288
std::string read(int index=0) const
Reads a value from a property.
Definition mvIMPACT_acquire.h:5323
std::string readS(int index=0, const std::string &format="") const
Reads data from this property as a string.
Definition mvIMPACT_acquire.h:3340
Contains basic statistical information.
Definition mvIMPACT_acquire.h:14509
PropertyF framesPerSecond
A float property (read-only) containing the current number of frames captured per second.
Definition mvIMPACT_acquire.h:14586
PropertyF captureTime_s
A float property (read-only) containing the overall time an image request spent in the device drivers...
Definition mvIMPACT_acquire.h:14560
PropertyI errorCount
An integer property (read-only) containing the overall count of image requests which returned with an...
Definition mvIMPACT_acquire.h:14568
A class that can be used to display images in a window.
Definition mvIMPACT_acquire_display.h:606
A helper class that can be used to implement a simple continuous acquisition from a device.
Definition mvIMPACT_acquire_helper.h:432
int iHeight
The height of the image in pixel or lines.
Definition mvImageBuffer.h:98
int iWidth
The width of the image in pixel.
Definition mvImageBuffer.h:100
int iLinePitch
The offset (in bytes) to the next line of this channel.
Definition mvImageBuffer.h:70
int iChannelOffset
The offset (in bytes) to the next channel.
Definition mvImageBuffer.h:68
void * vpData
The starting address of the image.
Definition mvImageBuffer.h:157
ChannelData * pChannels
A pointer to an array of channel specific image data.
Definition mvImageBuffer.h:166
TImageBufferPixelFormat pixelFormat
The pixel format of this image.
Definition mvImageBuffer.h:106
Fully describes a captured image.
Definition mvImageBuffer.h:94
This namespace contains classes and functions that can be used to display images.
This namespace contains classes and functions belonging to the image acquisition module of this SDK.
Definition mvCommonDataTypes.h:34