bool configureDevice( Device* pDev )
{
ImageFormatControl ifc( pDev );
if( ifc.componentSelector.isValid() && ifc.componentSelector.isWriteable() )
{
if( !supportsEnumStringValue( ifc.componentSelector, "Intensity" ) )
{
return false;
}
ifc.componentSelector.writeS( "Intensity" );
ifc.componentEnable.write( TBoolean::bTrue );
if( !supportsEnumStringValue( ifc.componentSelector, "Disparity" ) )
{
return false;
}
ifc.componentSelector.writeS( "Disparity" );
ifc.componentEnable.write( TBoolean::bTrue );
}
else
{
return false;
}
AcquisitionControl acq( pDev );
if( !acq.exposureAuto.isValid() ||
!acq.exposureAuto.isWriteable() ||
!acq.acquisitionMultiPartMode.isValid() ||
!acq.acquisitionMultiPartMode.isWriteable() )
{
return false;
}
if( !supportsEnumStringValue( acq.exposureAuto, "Continuous" ) ||
!supportsEnumStringValue( acq.acquisitionMultiPartMode, "SynchronizedComponents" ) )
{
return false;
}
acq.exposureAuto.writeS( "Continuous" );
acq.acquisitionMultiPartMode.writeS( "SynchronizedComponents" );
ChunkDataControl cdc( pDev );
if( !cdc.chunkModeActive.isValid() || !cdc.chunkModeActive.isWriteable() )
{
return false;
}
cdc.chunkModeActive.write( TBoolean::bTrue );
DepthControl dctl( pDev );
if( !dctl.depthAcquisitionMode.isValid() ||
!dctl.depthAcquisitionMode.isWriteable() ||
!dctl.depthExposureAdaptTimeout.isValid() ||
!dctl.depthExposureAdaptTimeout.isWriteable() ||
!dctl.depthQuality.isValid() ||
!dctl.depthQuality.isWriteable() ||
!dctl.depthMinDepth.isValid() ||
!dctl.depthMinDepth.isWriteable() ||
!dctl.depthMaxDepth.isValid() ||
!dctl.depthMaxDepth.isWriteable() )
{
return false;
}
if( !supportsEnumStringValue( dctl.depthAcquisitionMode, "SingleFrameOut1" ) ||
!supportsEnumStringValue( dctl.depthQuality, "Medium" ) ||
!supportsValue( dctl.depthExposureAdaptTimeout, 0.0 ) ||
!supportsValue( dctl.depthMinDepth, 1.0 ) ||
!supportsValue( dctl.depthMaxDepth, 3.0 ) )
{
return false;
}
dctl.depthAcquisitionMode.writeS( "SingleFrameOut1" );
dctl.depthQuality.writeS( "Medium" );
dctl.depthExposureAdaptTimeout.write( 0.0 );
dctl.depthMinDepth.write( 1.0 );
dctl.depthMaxDepth.write( 3.0 );
return true;
}
void myThreadCallback( shared_ptr<Request> pRequest, ThreadParameter& threadParameter )
{
threadParameter.dctl_.depthAcquisitionTrigger.call();
if( threadParameter.requestsCaptured_ % 100 == 0 )
{
const Statistics& s = threadParameter.statistics_;
cout << "Info from " << threadParameter.pDev_->serial.read()
<< ": " << s.framesPerSecond.name() << ": " << s.framesPerSecond.readS()
<< ", " << s.errorCount.name() << ": " << s.errorCount.readS()
<< ", " << s.captureTime_s.name() << ": " << s.captureTime_s.readS() << endl;
}
if( pRequest->isOK() )
{
const unsigned int bufferPartCount = pRequest->getBufferPartCount();
if( bufferPartCount > 0 )
{
for( unsigned int i = 0; i < bufferPartCount; i++ )
{
const BufferPart& bufferPart( pRequest->getBufferPart( i ) );
#ifdef USE_DISPLAY
const TBufferPartDataType bufferDataType = bufferPart.dataType.read();
cout << "Image captured: " << bufferPart.width.read() << "x" << bufferPart.height.read() << " buffer contains: " << bufferPart.dataType.readS() << " data" << endl;
if( bufferDataType == bpdt2DImage )
{
threadParameter.displayWindowPrimary_.GetImageDisplay().SetDisplayMode( TDisplayMode::DM_Default );
threadParameter.displayWindowPrimary_.GetImageDisplay().SetImage( bufferPart.getImageBufferDesc().getBuffer() );
threadParameter.displayWindowPrimary_.GetImageDisplay().Update();
}
else if( bufferDataType == bpdt3DImage )
{
threadParameter.displayWindowSecondary_.GetImageDisplay().SetDisplayMode( TDisplayMode::DM_Default );
threadParameter.displayWindowSecondary_.GetImageDisplay().SetImage( bufferPart.getImageBufferDesc().getBuffer() );
threadParameter.displayWindowSecondary_.GetImageDisplay().Update();
if( threadParameter.pointCloutCalculationAllowed )
{
disparityToPointCloud( pRequest, threadParameter.pDev_ );
}
}
else
{
cout << "The data type of buffer part " << i << " of the current request is reported as " << bufferPart.dataType.readS() << ", which will NOT be handled by this example application" << endl;
}
#else
cout << "Image captured: " << bufferPart.width.read() << "x" << bufferPart.height.read() << "buffer contains: " << bufferPart.dataType.readS() << " data" << endl;
#endif
}
}
}
else
{
cout << "Error: " << pRequest->requestResult.readS() << endl;
}
}
#include <apps/Common/exampleHelper.h>
#include <common/crt/mvstdlib.h>
#include <fstream>
#include <string>
#include <iostream>
#include <mvIMPACT_CPP/mvIMPACT_acquire_helper.h>
#include <mvIMPACT_CPP/mvIMPACT_acquire.h>
#include <mvIMPACT_CPP/mvIMPACT_acquire_GenICam.h>
#ifdef _WIN32
# include <mvDisplay/Include/mvIMPACT_acquire_display.h>
# define USE_DISPLAY
#endif
using namespace std;
bool checkFileExists( const string& fullPath )
{
ifstream file( fullPath.c_str() );
return file.good();
}
inline int getenv(
const string& name,
string* pVal = 0 )
{
size_t bufSize = 0;
int result = mv_getenv_s( &bufSize, 0, 0, name.c_str() );
if( result == 0 )
{
if( bufSize > 0 )
{
auto_array_ptr<char> buf( bufSize );
result = mv_getenv_s( &bufSize, buf.get(), buf.parCnt(), name.c_str() );
if( ( result == 0 ) && pVal )
{
*pVal = string( buf.get() );
}
return 1;
}
}
return 0;
}
bool isSGMProducerAvailable( void )
{
static const string s_pathVariable( ( sizeof( void* ) == 8 ) ? "GENICAM_GENTL64_PATH" : "GENICAM_GENTL32_PATH" );
#if defined(linux) || defined(__linux) || defined(__linux__) || defined(__APPLE__)
const static string PATH_SEPARATOR( ":" );
#elif defined(_WIN32) || defined(WIN32) || defined(__WIN32__)
const static string PATH_SEPARATOR( ";" );
#else
# error Unsupported target platform
#endif
string pathValue;
if( getenv( s_pathVariable, &pathValue ) > 0 )
{
cout << "Checking for SGM Producer presence. This might take some time..." << endl;
const static string s_libName = "bvs_sgm_producer.cti";
if( !pathValue.empty() )
{
string::size_type posStart = 0;
string::size_type posEnd = 0;
while( ( posStart = pathValue.find( PATH_SEPARATOR, posStart ) ) != string::npos )
{
const string filePath = pathValue.substr( posEnd, posStart - posEnd );
if( checkFileExists( filePath + "/" + s_libName ) )
{
return true;
}
posEnd = posStart + PATH_SEPARATOR.length();
posStart += 1;
}
if( checkFileExists( pathValue.substr( posEnd ) + "/" + s_libName ) )
{
return true;
}
cout << "Error: " << s_libName << " not found" << endl;
}
else
{
cout << "Unable to continue! No " << s_pathVariable << " environment variable is empty. Please follow manual or ask " << COMPANY_NAME << " technical support for advice";
}
}
else
{
cout << "Unable to continue! No " << s_pathVariable << " environment variable found. Please follow manual or ask " << COMPANY_NAME << " technical support for advice";
}
return false;
}
bool isPointCloudCalculationAllowed( void )
{
cout << "Do you want to calculate the point cloud for each received image (please note, this will consume quite some CPU resources)? (Y/N): ";
char pointCloudSelection;
cin >> pointCloudSelection;
cin.get();
return tolower( pointCloudSelection ) == 'y';
}
void disparityToPointCloud( shared_ptr<Request> pRequest,
Device* pDev )
{
const double scan3DPrincipalPointU = s3dc.scan3dPrincipalPointU.read();
const double scan3DPrincipalPointV = s3dc.scan3dPrincipalPointV.read();
const double baseline = s3dc.scan3dBaseline.read();
const double scan3dCoordScale = s3dc.scan3dCoordinateScale.read();
const double scan3DFocalLength = s3dc.scan3dFocalLength.read();
map<double, pair<double, double>>pointCloudData;
for( int y = 0; y < pRequest->imageHeight.read(); y++ )
{
unsigned short* p = reinterpret_cast< unsigned short* >( ( char* )pRequest->imageData.read() + y * pRequest->imageLinePitch.read() );
for( int x = 0; x < pRequest->imageWidth.read(); x++ )
{
const double dik = *p++ * scan3dCoordScale;
const double px = ( x + 0.5 - scan3DPrincipalPointU ) * ( baseline / dik );
const double py = ( y + 0.5 - scan3DPrincipalPointV ) * ( baseline / dik );
const double pz = ( scan3DFocalLength * ( baseline / dik ) );
pointCloudData.emplace( pz, make_pair( px, py ) );
}
}
}
int setBandwidth(
Device* pDev )
{
cout << "Do you want to limit your network bandwidth (required if you use a single GigE connection instead of 2.5 GigE)? (Y/N): ";
char selection;
cin >> selection;
if( tolower( selection ) == 'y' )
{
dc.deviceLinkThroughputLimitMode.write( TBoolean::bTrue );
dc.deviceLinkThroughputLimit.write( 90000000 );
}
cin.get();
return 0;
}
struct ThreadParameter
{
unsigned int requestsCaptured_;
const bool pointCloutCalculationAllowed;
#ifdef USE_DISPLAY
#endif
explicit ThreadParameter(
Device* pDev ) : pDev_( pDev ), requestsCaptured_( 0 ), statistics_( pDev ), dctl_( pDev ), pointCloutCalculationAllowed { isPointCloudCalculationAllowed() }
#ifdef USE_DISPLAY
, displayWindowPrimary_( "mvIMPACT_acquire sample, Device " + pDev_->serial.read() + " 2D Image" )
, displayWindowSecondary_( "mvIMPACT_acquire sample, Device " + pDev_->serial.read() + " Disparity Image" )
#endif
{}
ThreadParameter( const ThreadParameter& src ) = delete;
ThreadParameter& operator=( const ThreadParameter& rhs ) = delete;
};
void myThreadCallback( shared_ptr<Request> pRequest, ThreadParameter& threadParameter )
{
threadParameter.dctl_.depthAcquisitionTrigger.call();
if( threadParameter.requestsCaptured_ % 100 == 0 )
{
const Statistics& s = threadParameter.statistics_;
cout << "Info from " << threadParameter.pDev_->serial.read()
}
if( pRequest->isOK() )
{
const unsigned int bufferPartCount = pRequest->getBufferPartCount();
if( bufferPartCount > 0 )
{
for( unsigned int i = 0; i < bufferPartCount; i++ )
{
const BufferPart& bufferPart( pRequest->getBufferPart( i ) );
#ifdef USE_DISPLAY
cout << "Image captured: " << bufferPart.width.read() << "x" << bufferPart.height.read() << " buffer contains: " << bufferPart.dataType.readS() << " data" << endl;
if( bufferDataType == bpdt2DImage )
{
threadParameter.displayWindowPrimary_.GetImageDisplay().SetDisplayMode( TDisplayMode::DM_Default );
threadParameter.displayWindowPrimary_.GetImageDisplay().SetImage( bufferPart.getImageBufferDesc().getBuffer() );
threadParameter.displayWindowPrimary_.GetImageDisplay().Update();
}
else if( bufferDataType == bpdt3DImage )
{
threadParameter.displayWindowSecondary_.GetImageDisplay().SetDisplayMode( TDisplayMode::DM_Default );
threadParameter.displayWindowSecondary_.GetImageDisplay().SetImage( bufferPart.getImageBufferDesc().getBuffer() );
threadParameter.displayWindowSecondary_.GetImageDisplay().Update();
if( threadParameter.pointCloutCalculationAllowed )
{
disparityToPointCloud( pRequest, threadParameter.pDev_ );
}
}
else
{
cout << "The data type of buffer part " << i << " of the current request is reported as " << bufferPart.dataType.readS() << ", which will NOT be handled by this example application" << endl;
}
#else
cout << "Image captured: " << bufferPart.width.read() << "x" << bufferPart.height.read() << "buffer contains: " << bufferPart.dataType.readS() << " data" << endl;
#endif
}
}
}
else
{
cout << "Error: " << pRequest->requestResult.readS() << endl;
}
}
bool isDeviceSupportedBySample(
const Device*
const pDev )
{
return( product.find( "BVS 3D-RV1" ) != string::npos );
}
bool configureDevice(
Device* pDev )
{
if( ifc.componentSelector.isValid() && ifc.componentSelector.isWriteable() )
{
if( !supportsEnumStringValue( ifc.componentSelector, "Intensity" ) )
{
return false;
}
ifc.componentSelector.writeS( "Intensity" );
ifc.componentEnable.write( TBoolean::bTrue );
if( !supportsEnumStringValue( ifc.componentSelector, "Disparity" ) )
{
return false;
}
ifc.componentSelector.writeS( "Disparity" );
ifc.componentEnable.write( TBoolean::bTrue );
}
else
{
return false;
}
if( !acq.exposureAuto.isValid() ||
!acq.exposureAuto.isWriteable() ||
!acq.acquisitionMultiPartMode.isValid() ||
!acq.acquisitionMultiPartMode.isWriteable() )
{
return false;
}
if( !supportsEnumStringValue( acq.exposureAuto, "Continuous" ) ||
!supportsEnumStringValue( acq.acquisitionMultiPartMode, "SynchronizedComponents" ) )
{
return false;
}
acq.exposureAuto.writeS( "Continuous" );
acq.acquisitionMultiPartMode.writeS( "SynchronizedComponents" );
if( !cdc.chunkModeActive.isValid() || !cdc.chunkModeActive.isWriteable() )
{
return false;
}
cdc.chunkModeActive.write( TBoolean::bTrue );
if( !dctl.depthAcquisitionMode.isValid() ||
!dctl.depthAcquisitionMode.isWriteable() ||
!dctl.depthExposureAdaptTimeout.isValid() ||
!dctl.depthExposureAdaptTimeout.isWriteable() ||
!dctl.depthQuality.isValid() ||
!dctl.depthQuality.isWriteable() ||
!dctl.depthMinDepth.isValid() ||
!dctl.depthMinDepth.isWriteable() ||
!dctl.depthMaxDepth.isValid() ||
!dctl.depthMaxDepth.isWriteable() )
{
return false;
}
if( !supportsEnumStringValue( dctl.depthAcquisitionMode, "SingleFrameOut1" ) ||
!supportsEnumStringValue( dctl.depthQuality, "Medium" ) ||
!supportsValue( dctl.depthExposureAdaptTimeout, 0.0 ) ||
!supportsValue( dctl.depthMinDepth, 1.0 ) ||
!supportsValue( dctl.depthMaxDepth, 3.0 ) )
{
return false;
}
dctl.depthAcquisitionMode.writeS( "SingleFrameOut1" );
dctl.depthQuality.writeS( "Medium" );
dctl.depthExposureAdaptTimeout.write( 0.0 );
dctl.depthMinDepth.write( 1.0 );
dctl.depthMaxDepth.write( 3.0 );
return true;
}
int main( void )
{
if( !isSGMProducerAvailable() )
{
cout << "Couldn't locate bvs_sgm_producer.cti file on your system. Please follow manual or ask " << COMPANY_NAME << " technical support for advice" << endl;
return 1;
}
Device* pDev = getDeviceFromUserInput( devMgr, isDeviceSupportedBySample );
if( pDev == nullptr )
{
cout << "Unable to continue! Press [ENTER] to end the application" << endl;
cin.get();
return 1;
}
try
{
}
{
cout <<
"An error occurred while opening the device(error code: " << e.
getErrorCode() <<
")." << endl
<< "Press [ENTER] to end the application" << endl;
cin.get();
return 1;
}
cout << "Initialising the device. This might take some time..." << endl;
if( !configureDevice( pDev ) )
{
cout << "Unable to continue! The selected device does not support some of the required features. Press [ENTER] to end the application" << endl;
cin.get();
return 1;
}
setBandwidth( pDev );
ThreadParameter threadParam( pDev );
cout << "Press [ENTER] to stop the acquisition thread" << endl;
threadParam.dctl_.depthAcquisitionTrigger.call();
requestProvider.acquisitionStart( myThreadCallback, ref( threadParam ) );
cin.get();
requestProvider.acquisitionStop();
return 0;
}
Contains information about a specific part of a captured buffer.
Definition mvIMPACT_acquire.h:8410
std::string name(void) const
Returns the name of the component referenced by this object.
Definition mvIMPACT_acquire.h:1206
Grants access to devices that can be operated by this software interface.
Definition mvIMPACT_acquire.h:7171
This class and its functions represent an actual device detected by this interface in the current sys...
Definition mvIMPACT_acquire.h:6118
PropertyS product
A string property (read-only) containing the product name of this device.
Definition mvIMPACT_acquire.h:6537
void open(void)
Opens a device.
Definition mvIMPACT_acquire.h:6420
PropertyIDeviceInterfaceLayout interfaceLayout
An enumerated integer property which can be used to define which interface layout shall be used when ...
Definition mvIMPACT_acquire.h:6644
PropertyIAcquisitionStartStopBehaviour acquisitionStartStopBehaviour
An enumerated integer property defining the start/stop behaviour during acquisition of this driver in...
Definition mvIMPACT_acquire.h:6800
Category for the acquisition and trigger control features.
Definition mvIMPACT_acquire_GenICam.h:2115
Category that contains the Chunk Data control features.
Definition mvIMPACT_acquire_GenICam.h:11823
Adjustment and triggering of the depth image for 3D control.
Definition mvIMPACT_acquire_GenICam.h:11048
Category for device information and control.
Definition mvIMPACT_acquire_GenICam.h:82
Category for control of 3D camera specific features.
Definition mvIMPACT_acquire_GenICam.h:10765
A base class for exceptions generated by Impact Acquire.
Definition mvIMPACT_acquire.h:256
int getErrorCode(void) const
Returns a unique numerical representation for this error.
Definition mvIMPACT_acquire.h:275
std::string readS(int index=0, const std::string &format="") const
Reads data from this property as a string.
Definition mvIMPACT_acquire.h:3340
Contains basic statistical information.
Definition mvIMPACT_acquire.h:14509
PropertyF framesPerSecond
A float property (read-only) containing the current number of frames captured per second.
Definition mvIMPACT_acquire.h:14586
PropertyF captureTime_s
A float property (read-only) containing the overall time an image request spent in the device drivers...
Definition mvIMPACT_acquire.h:14560
PropertyI errorCount
An integer property (read-only) containing the overall count of image requests which returned with an...
Definition mvIMPACT_acquire.h:14568
A class that can be used to display images in a window.
Definition mvIMPACT_acquire_display.h:606
A helper class that can be used to implement a simple continuous acquisition from a device.
Definition mvIMPACT_acquire_helper.h:432
TBufferPartDataType
Defines buffer part data types.
Definition TBufferPartDataType.h:41
This namespace contains classes and functions belonging to the GenICam specific part of the image acq...
Definition mvIMPACT_acquire.h:23827
This namespace contains classes and functions that can be used to display images.
This namespace contains classes and functions belonging to the image acquisition module of this SDK.
Definition mvCommonDataTypes.h:34