本文整理汇总了C++中MyWindow::callback方法的典型用法代码示例。如果您正苦于以下问题:C++ MyWindow::callback方法的具体用法?C++ MyWindow::callback怎么用?C++ MyWindow::callback使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类MyWindow
的用法示例。
在下文中一共展示了MyWindow::callback方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: main
//.........这里部分代码省略.........
cout << "Invalid command line parameter: '" << param << "' (ignored)." << endl;
}
}
}
}
else
{
cout << "No command line parameters specified. Available parameters:" << endl
<< " 'frameRate' or 'fr' to specify the frame rate(frames per second per sensor head) of the resulting data stream" << endl
<< " 'exposureTime' or 'et' to specifiy the exposure time per frame in us" << endl
<< " 'width' or 'w' to specifiy the width of the AOI" << endl
<< " 'height' or 'h' to specifiy the height of the AOI" << endl
<< endl
<< "USAGE EXAMPLE:" << endl
<< " SynchronousCaptureMultipleInputs et=5000 frameRate=5" << endl << endl;
}
Device* pDev = getDeviceFromUserInput( devMgr, isDeviceSupportedBySample );
if( !pDev )
{
cout << "Unable to continue!";
cout << "Press [ENTER] to end the application" << endl;
cin.get();
return 0;
}
try
{
cout << "Please note, that this sample (depending on the selected frame rate and resolution) might require a lot" << endl
<< "of network bandwidth, thus to achieve optimal results, it's crucial to have" << endl
<< " - a good, reliable network controller (we recommed the Intel PRO/1000 series)" << endl
<< " - the latest driver for the network controller installed" << endl
<< " - jumbo frames enabled and the receive and transmit buffer for the network controller set to max. values" << endl
<< " - the InterfaceMTU on the mvBlueLYNX-M7 set to its maximum value" << endl
<< endl
<< "In case of 'rrFrameIncomplete' errors the reason is most certainly to be found in one of"
<< "the requirments listed above not being met."
<< endl;
cout << "Will try to synchronize sensor heads(" << frameRate_Hz << " fps per head with " << exposureTime_us << "us exposure time per frame) now" << endl
<< "During this operation the device will be initialised. This might take some time..." << endl;
Connector connector( pDev );
CameraSettingsBlueCOUGAR cs( pDev );
try
{
if( width != -1 )
{
cs.aoiWidth.write( width );
}
if( height != -1 )
{
cs.aoiHeight.write( height );
}
}
catch( const ImpactAcquireException& e )
{
cout << "Failed to set up AOI: " << e.getErrorString() << "(" << e.getErrorCodeAsString() << ")" << endl;
}
const int SENSOR_HEAD_COUNT = connector.videoChannel.read( plMaxValue ) + 1;
setupHRTC( pDev, frameRate_Hz, exposureTime_us, SENSOR_HEAD_COUNT );
// initialise display windows
// IMPORTANT: It's NOT save to create multiple display windows in multiple threads!!!
ThreadParameter threadParam( pDev, SENSOR_HEAD_COUNT );
DeviceComponentLocator locator( pDev, dltSystemSettings );
PropertyI64 gevStreamChannelSelector;
locator.bindComponent( gevStreamChannelSelector, "GevStreamChannelSelector" );
PropertyI64 gevSCPD;
locator.bindComponent( gevSCPD, "GevSCPD" );
for( int i = 0; i < SENSOR_HEAD_COUNT; i++ )
{
gevStreamChannelSelector.write( i );
gevSCPD.write( interPacketDelay );
MyWindow* p = createWindow( cs.aoiWidth.read(), cs.aoiHeight.read() );
p->show();
p->callback( windowCallback );
threadParam.displayData.push_back( new DisplayInfo( p ) );
}
// start the execution of the 'live' thread.
cout << "Close any of the display windows to end the application" << endl;
liveLoop( &threadParam );
vector<DisplayInfo*>::size_type displayCount = threadParam.displayData.size();
for( vector<DisplayInfo*>::size_type i = 0; i < displayCount; i++ )
{
delete threadParam.displayData[i]->pDisp;
delete threadParam.displayData[i];
threadParam.displayData[i] = 0;
}
}
catch( const ImpactAcquireException& e )
{
// this e.g. might happen if the same device is already opened in another process...
cout << "An error occurred while configuring device " << pDev->serial.read()
<< "(error code: " << e.getErrorCodeAsString() << "). Press [ENTER] to end the application..." << endl;
cin.get();
}
return 0;
}
示例2: liveLoop
//-----------------------------------------------------------------------------
unsigned int liveLoop( Device* pDev, bool boStoreFrames, const string& settingName, int iWidth, int iHeight, bool boSingleShotMode )
//-----------------------------------------------------------------------------
{
cout << " == " << __FUNCTION__ << " - establish access to the statistic properties...." << endl;
// establish access to the statistic properties
Statistics statistics( pDev );
cout << " == " << __FUNCTION__ << " - create an interface to the device found...." << endl;
// create an interface to the device found
FunctionInterface fi( pDev );
if( !settingName.empty() )
{
cout << "Trying to load setting " << settingName << "..." << endl;
int result = fi.loadSetting( settingName );
if( result != DMR_NO_ERROR )
{
cout << "loadSetting( \"" << settingName << "\" ); call failed: " << ImpactAcquireException::getErrorCodeAsString( result ) << endl;
}
}
// depending on the device and its sensor, we set an appropriate output format for displaying
mvIMPACT::acquire::ImageDestination id( pDev );
if( !std::string( "mvBlueFOX" ).compare( pDev->family.readS() ) )
{
mvIMPACT::acquire::InfoBlueFOX ibf( pDev );
if( !std::string( "BayerMosaic" ).compare( ibf.sensorColorMode.readS() ) )
{
id.pixelFormat.writeS( "BGR888Packed" );
}
}
else if( !std::string( "mvBlueCOUGAR" ).compare( pDev->family.readS() ) )
{
mvIMPACT::acquire::GenICam::ImageFormatControl ifc( pDev );
if( s_boGreySensor )
{
ifc.pixelFormat.writeS( "Mono8" );
}
else
{
GenICamDeviceSetColorPixelFormat( pDev );
id.pixelFormat.writeS( "BGR888Packed" );
}
}
else if( !std::string( "mvBlueLYNX" ).compare( pDev->family.readS() ) )
{
mvIMPACT::acquire::GenICam::ImageFormatControl ifc( pDev );
if( s_boGreySensor )
{
ifc.pixelFormat.writeS( "Mono8" );
}
else
{
GenICamDeviceSetColorPixelFormat( pDev );
id.pixelFormat.writeS( "BGR888Packed" );
}
}
// Pre-fill the capture queue with ALL buffers currently available. In case the acquisition engine is operated
// manually, buffers can only be queued when they have been queued before the acquisition engine is started as well.
// Even though there can be more than 1, for this sample we will work with the default capture queue
int requestResult = DMR_NO_ERROR;
int requestCount = 0;
if( boSingleShotMode )
{
fi.imageRequestSingle();
++requestCount;
}
else
{
while( ( requestResult = fi.imageRequestSingle() ) == DMR_NO_ERROR )
{
++requestCount;
}
}
if( requestResult != DEV_NO_FREE_REQUEST_AVAILABLE )
{
cout << "Last result: " << requestResult << "(" << ImpactAcquireException::getErrorCodeAsString( requestResult ) << "), ";
}
cout << requestCount << " buffers requested";
SystemSettings ss( pDev );
if( ss.requestCount.hasMaxValue() )
{
cout << ", max request count: " << ss.requestCount.getMaxValue();
}
cout << endl;
cout << "Press <<ENTER>> to end the application!!" << endl;
MyWindow* pWindow = createWindow( iWidth, iHeight );
pWindow->show();
pWindow->callback( windowCallback );
manuallyStartAcquisitionIfNeeded( pDev, fi );
// run thread loop
const Request* pRequest = 0;
const unsigned int timeout_ms = 8000; // USB 1.1 on an embedded system needs a large timeout for the first image
//.........这里部分代码省略.........