本文整理汇总了C++中CameraPtr类的典型用法代码示例。如果您正苦于以下问题:C++ CameraPtr类的具体用法?C++ CameraPtr怎么用?C++ CameraPtr使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了CameraPtr类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: SetSubsplatStencil
void MultiResLayer::SetSubsplatStencil(CameraPtr const & vp_camera)
{
*subsplat_depth_deriv_tex_param_ = depth_deriative_tex_;
*subsplat_normal_cone_tex_param_ = normal_cone_tex_;
*subsplat_depth_normal_threshold_param_ = float2(0.001f * vp_camera->FarPlane(), 0.77f);
*subsplat_far_plane_param_ = float2(vp_camera->FarPlane(), 1.0f / vp_camera->FarPlane());
RenderEngine& re = Context::Instance().RenderFactoryInstance().RenderEngineInstance();
for (size_t i = 0; i < multi_res_fbs_.size(); ++ i)
{
re.BindFrameBuffer(multi_res_fbs_[i]);
multi_res_fbs_[i]->Clear(FrameBuffer::CBM_Color | FrameBuffer::CBM_Depth | FrameBuffer::CBM_Stencil, Color(0, 0, 0, 0), 0.0f, 128);
*subsplat_cur_lower_level_param_ = int2(static_cast<int>(i), static_cast<int>(i + 1));
*subsplat_is_not_first_last_level_param_ = int2(i > 0, i < multi_res_fbs_.size() - 1);
re.Render(*subsplat_stencil_tech_, *rl_quad_);
}
}
示例2: _setDefaultCamera
void _setDefaultCamera()
{
ScenePtr scene = _engine->getScene();
CameraPtr camera = _engine->getCamera();
FrameBufferPtr frameBuffer = _engine->getFrameBuffer();
const Vector2i& frameSize = frameBuffer->getSize();
const Boxf& worldBounds = scene->getWorldBounds();
const Vector3f& target = worldBounds.getCenter();
const Vector3f& diag = worldBounds.getSize();
Vector3f position = target;
position.z() -= diag.z();
const Vector3f up = Vector3f( 0.f, 1.f, 0.f );
camera->setInitialState( position, target, up );
camera->setAspectRatio(
static_cast< float >( frameSize.x()) /
static_cast< float >( frameSize.y()));
}
示例3: PrepareForLightPass
AVOID DeferredRenderer::PrepareForLightPass(CameraPtr pCamera)
{
//set vertex buffer with positions
m_pVertices->Set(0, 0);
//set vertex buffer with texture data
m_pTexCoords->Set(1, 0);
//bind matrix constant buffer to the pipeline
Mat4x4 trans;
//trans.CreateTranslation(pCamera->GetLookAt() + 10000 * pCamera->GetDir());
trans.CreateTranslation(pCamera->GetPosition() + 500* pCamera->GetDir());
Mat4x4 rot;
rot = rot.CreateRollPitchYaw(pCamera->GetRoll(), pCamera->GetPitch(), pCamera->GetYaw());
Mat4x4 WVP = rot * trans * pCamera->GetView() * CreateOrthoProjectionLH(SCREEN_WIDTH, SCREEN_HEIGHT, 1.0f, 1000.0f);
WVP.Transpose();
m_pMatrixBuffer->UpdateSubresource(0, NULL, &WVP, 0, 0);
m_pMatrixBuffer->Set(0, ST_Vertex);
struct CameraBuffer
{
Mat4x4 inverseViewProjection;
Vec pos;
};
CameraBuffer cameraBuffer;
Mat4x4 inverseViewProjection = pCamera->GetViewProjection();
inverseViewProjection.Inverse();
cameraBuffer.pos = pCamera->GetPosition();
cameraBuffer.inverseViewProjection = inverseViewProjection;
cameraBuffer.inverseViewProjection.Transpose();
m_pcbCameraPos->UpdateSubresource(0, nullptr, &pCamera->GetPosition(), 0, 0);
//m_pcbCameraPos->UpdateSubresource(0, NULL, &cameraBuffer, 0, 0);
m_pcbCameraPos->Set(0, ST_Pixel);
//pCamera->SetViewport();
SetGlobalViewport();
//set shader resources
m_pSSAOBlurredSRV->Set(6, ST_Pixel);
m_pDepthSRV->Set(8, ST_Pixel);
//set blending functionality
//this->BlendLightPass()->Set(nullptr);
}
示例4: processCamera
//---------------------------------------------------------------------
void SceneResource::processCamera(const Util::XmlNode * cameraNode) const
{
CameraPtr camera;
/// params
{
Util::String nearClipStr(mXmlReader->getAttribute(cameraNode, "near_clip"));
Util::real nearClip = boost::lexical_cast<Util::real>(nearClipStr);
Util::String farClipStr(mXmlReader->getAttribute(cameraNode, "far_clip"));
Util::real farClip = boost::lexical_cast<Util::real>(farClipStr);
Util::String viewportRectStr(mXmlReader->getAttribute(cameraNode, "viewport_rect"));
XMVECTOR viewportRect = Util::StringToVector(viewportRectStr, 4);
camera = boost::make_shared<Camera>(nearClip, farClip,
Util::UintPair(XMVectorGetIntX(viewportRect), XMVectorGetIntY(viewportRect)),
Util::UintPair(XMVectorGetIntZ(viewportRect), XMVectorGetIntW(viewportRect)));
IF_NULL_EXCEPTION(camera, "Camera creat failed!");
}
/// postion
{
Util::String posStr(mXmlReader->getAttribute(cameraNode, "position"));
camera->setPosition(Util::StringToVector(posStr, 3));
}
/// lookat
{
Util::String str(mXmlReader->getAttribute(cameraNode, "lookat"));
camera->lookAt(Util::StringToVector(str, 3));
}
/// movespeed
{
Util::String str(mXmlReader->getAttribute(cameraNode, "move_speed"));
camera->setMoveSpeed(boost::lexical_cast<Util::real>(str));
}
EngineManager::getSingleton().setCamera(camera);
}
示例5: draw
void Character::draw(ALLEGRO_DISPLAY *display, CameraPtr camera) {
Rect rect = OffsetRect(getRect(), camera->getOffset());
al_draw_filled_rectangle(
rect.getLeft(),
rect.getTop(),
rect.getRight(),
rect.getBottom(),
al_map_rgb(255, 0, 0));
}
示例6: mouse_movement_callback
void mouse_movement_callback(GLFWwindow *, double xpos, double ypos){
if(Director::getScene()->hasMainCamera()
&& !(Director::getScene()->getCamera()->fixedMouse())){
CameraPtr cam = Director::getScene()->getCamera();
double xoffset = (xpos - lastX)*mouse_sensitivity;
double yoffset = (ypos - lastY)*mouse_sensitivity;
lastX = xpos;
lastY = ypos;
float phi = cam->getPhi(), theta = cam->getTheta();
phi -= (float)yoffset * 360.0f / Global::ScreenHeight;
theta += (float)xoffset * 360.0f / Global::ScreenWidth;
if (phi > 80.0f) phi = 80.0f;
if (phi < -80.0f) phi = -80.0f;
cam->setAngles(theta, phi);
}
}
示例7: diff
Point diff(const PointPair &pp, CameraPtr view1, CameraPtr view2, const Model &) {
const Point &pp1 = pp.first;
const Point &pp2 = pp.second;
// Cast rays
Ray3d R1 = view1->unproject(pp1[0], pp1[1]);
Ray3d R2 = view2->unproject(pp2[0], pp2[1]);
Point out(1);
// Distance between two casted rays used as error metric
Ray3d::Point p1, p2;
R1.closestPoints(R2, p1, p2);
out[0] = (p1 - p2).norm();
// Scale by distance to image plane to approximate image-space distance
Vector3d mid = (p1 + p2) * 0.5;
Vector3d mid1 = view1->fromGlobalToLocal(mid);
Vector3d mid2 = view2->fromGlobalToLocal(mid);
const double v1 = (0.5 * view1->K()(0, 0) * out[0]) / mid1.z();
const double v2 = (0.5 * view2->K()(0, 0) * out[0]) / mid2.z();
out[0] = v1 + v2;
return out;
}
示例8: while
void CameraImageInput::read()
{
if(port.isNew()){
do {
port.read();
} while(port.isNew());
std::shared_ptr<Image> image;
auto srcImage = timedCameraImage.data.image;
switch(srcImage.format){
case Img::CF_RGB:
image = createImageFromRawData(srcImage, 3);
break;
case Img::CF_GRAY:
image = createImageFromRawData(srcImage, 1);
break;
#ifndef USE_BUILTIN_CAMERA_IMAGE_IDL
case Img::CF_JPEG:
case Img::CF_PNG:
#else
case Img::CF_GRAY_JPEG:
case Img::CF_RGB_JPEG:
#endif
image = createImageFromImageFormat(srcImage);
break;
default:
break;
}
if(image){
CameraPtr tmpCamera = camera;
callLater([tmpCamera, image]() mutable {
tmpCamera->setImage(image);
tmpCamera->notifyStateChange();
});
}
}
}
示例9: Camera
void IECoreArnold::RendererImplementation::camera( const std::string &name, const IECore::CompoundDataMap ¶meters )
{
CameraPtr cortexCamera = new Camera( name, 0, new CompoundData( parameters ) );
cortexCamera->addStandardParameters();
AtNode *arnoldCamera = CameraAlgo::convert( cortexCamera.get() );
string nodeName = boost::str( boost::format( "ieCoreArnold:camera:%s" ) % name );
AiNodeSetStr( arnoldCamera, "name", nodeName.c_str() );
AtNode *options = AiUniverseGetOptions();
AiNodeSetPtr( options, "camera", arnoldCamera );
applyTransformToNode( arnoldCamera );
const V2iData *resolution = cortexCamera->parametersData()->member<V2iData>( "resolution" );
AiNodeSetInt( options, "xres", resolution->readable().x );
AiNodeSetInt( options, "yres", resolution->readable().y );
const FloatData *pixelAspectRatio = cortexCamera->parametersData()->member<FloatData>( "pixelAspectRatio" );
AiNodeSetFlt( options, "aspect_ratio", 1.0f / pixelAspectRatio->readable() ); // arnold is y/x, we're x/y
}
示例10: Draw
//----------------------------------------------------------------------------
void ConvexRegionManager::Draw (Renderer& rkRenderer)
{
CameraPtr spCamera = rkRenderer.GetCamera();
Vector3f kEye = spCamera->GetLocation();
if ( m_bUseEyePlusNear )
kEye += spCamera->GetFrustumNear()*spCamera->GetDirection();
ConvexRegion* pkRegion = GetContainingRegion(kEye);
if ( pkRegion )
{
// inside the set of regions, start drawing with region of camera
pkRegion->Draw(rkRenderer);
}
else
{
// outside the set of regions, draw the outside scene (if it exists)
if ( GetOutside() )
GetOutside()->Draw(rkRenderer);
}
}
示例11: camera
IECore::CameraPtr GafferScene::camera( const ScenePlug *scene, const IECore::CompoundObject *globals )
{
ConstCompoundObjectPtr computedGlobals;
if( !globals )
{
computedGlobals = scene->globalsPlug()->getValue();
globals = computedGlobals.get();
}
if( const StringData *cameraPathData = globals->member<StringData>( "option:render:camera" ) )
{
ScenePlug::ScenePath cameraPath;
ScenePlug::stringToPath( cameraPathData->readable(), cameraPath );
return camera( scene, cameraPath, globals );
}
else
{
CameraPtr defaultCamera = new IECore::Camera();
applyCameraGlobals( defaultCamera.get(), globals );
return defaultCamera;
}
}
示例12: attachCamera
void SceneObject::attachCamera( CameraPtr cam )
{
AssertMsg( cam->getFunctionality() == Camera::kFunc_FPS, "Cannot attach non-FPS cameras" );
AssertMsg( mFPSCamera, "missing information about how to attach the camera" );
mCamera = cam;
mCamera->setTarget(mFPSCamera->target + getPosition());
if (mCamera && mFPSCamera)
{
mFPSCamera->UpdatePosition(this, mCamera, getPosition());
mFPSCamera->UpdateRotation(this, mCamera, getRotation());
}
}
示例13: kmlAltitudeModeFromString
void OGRLIBKMLLayer::SetCamera( const char* pszCameraLongitude,
const char* pszCameraLatitude,
const char* pszCameraAltitude,
const char* pszCameraHeading,
const char* pszCameraTilt,
const char* pszCameraRoll,
const char* pszCameraAltitudeMode )
{
int isGX = FALSE;
int iAltitudeMode = kmlAltitudeModeFromString(pszCameraAltitudeMode, isGX);
if( isGX == FALSE && iAltitudeMode == kmldom::ALTITUDEMODE_CLAMPTOGROUND )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Camera altitudeMode should be different from %s",
pszCameraAltitudeMode);
return;
}
KmlFactory *poKmlFactory = m_poOgrDS->GetKmlFactory();
CameraPtr camera = poKmlFactory->CreateCamera();
camera->set_latitude(CPLAtof(pszCameraLatitude));
camera->set_longitude(CPLAtof(pszCameraLongitude));
camera->set_altitude(CPLAtof(pszCameraAltitude));
if( pszCameraHeading != NULL )
camera->set_heading(CPLAtof(pszCameraHeading));
if( pszCameraTilt != NULL )
{
double dfTilt = CPLAtof(pszCameraTilt);
if( dfTilt >= 0 && dfTilt <= 90 )
camera->set_tilt(dfTilt);
else
CPLError(CE_Warning, CPLE_AppDefined, "Invalid value for tilt: %s",
pszCameraTilt);
}
if( pszCameraRoll != NULL )
camera->set_roll(CPLAtof(pszCameraRoll));
if( isGX )
camera->set_gx_altitudemode(iAltitudeMode);
else
camera->set_altitudemode(iAltitudeMode);
m_poKmlLayer->set_abstractview(camera);
}
示例14:
void
RenderPipelineLayer::assignLight(RenderScenePtr scene, CameraPtr camera) noexcept
{
assert(camera && scene);
_renderQueue[RenderQueue::RQ_LIGHTING][RenderPass::RP_LIGHTS].clear();
scene->computVisiableLight(camera->getViewProject(),
[&](LightPtr it)
{
this->addRenderData(RenderQueue::RQ_LIGHTING, RenderPass::RP_LIGHTS, it);
});
}
示例15: debug
void uvctest::testExposure() {
debug(LOG_DEBUG, DEBUG_LOG, 0, "get the first camera device");
CameraPtr camera = locator->getCamera(0);
int ccdindex = default_ccdid;
debug(LOG_DEBUG, DEBUG_LOG, 0, "get the CCD no %d", ccdindex);
CcdPtr ccd = camera->getCcd(ccdindex);
Exposure exposure(ccd->getInfo().getFrame(),
default_exposuretime);
debug(LOG_DEBUG, DEBUG_LOG, 0, "start an exposure: %s",
exposure.toString().c_str());
ccd->startExposure(exposure);
ccd->exposureStatus();
debug(LOG_DEBUG, DEBUG_LOG, 0, "retrieve an image");
ImageSequence imgseq = ccd->getImageSequence(2);
ImagePtr image = imgseq[imgseq.size() - 1];
debug(LOG_DEBUG, DEBUG_LOG, 0, "image retrieved");
// write the image to a file
unlink("test.fits");
FITSout file("test.fits");
file.write(image);
if (ccdindex == 2) {
DemosaicBilinear<unsigned char> demosaicer;
Image<unsigned char> *mosaicimg
= dynamic_cast<Image<unsigned char> *>(&*image);
if (NULL != mosaicimg) {
Image<RGB<unsigned char> > *demosaiced
= demosaicer(*mosaicimg);
ImagePtr demosaicedptr(demosaiced);
unlink("test-demosaiced.fits");
FITSout demosaicedfile("test-demosaiced.fits");
demosaicedfile.write(demosaicedptr);
} else {
debug(LOG_ERR, DEBUG_LOG, 0, "not a mosaic image");
}
}
}