本文整理汇总了C++中ovr_GetTimeInSeconds函数的典型用法代码示例。如果您正苦于以下问题:C++ ovr_GetTimeInSeconds函数的具体用法?C++ ovr_GetTimeInSeconds怎么用?C++ ovr_GetTimeInSeconds使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了ovr_GetTimeInSeconds函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: OVR_ASSERT
bool HSWDisplay::Dismiss()
{
#if HSWDISPLAY_DEBUGGING
if(GetKeyState(VK_SCROLL) & 0x0001) // If the scroll lock key is toggled on...
return false; // Make it so that the display doesn't dismiss, so we can debug this.
#endif
// If dismissal is not requested yet, mark it as such.
bool newlyRequested = false;
if(!DismissRequested)
{
DismissRequested = true;
newlyRequested = true;
}
// If displayed and time has elapsed, do the dismissal.
OVR_ASSERT(DismissibleTime <= (ovr_GetTimeInSeconds() + HSWDISPLAY_FIRST_DISMISSAL_TIME)); // Make sure the dismissal time is sane.
if (Displayed && (ovr_GetTimeInSeconds() >= DismissibleTime))
{
DismissInternal();
Displayed = false;
DismissRequested = false;
SDKRendered = false;
return true;
}
if(newlyRequested)
{ HSWDISPLAY_LOG(("[HSWDisplay] Dismiss(): Not permitted yet. Queued for timeout in %.1f seconds.", DismissibleTime - ovr_GetTimeInSeconds())); }
return false; // Cannot dismiss yet.
}
示例2: FlushGpuAndWaitTillTime
void DistortionRenderer::EndFrame(bool swapBuffers)
{
///QUESTION : Clear the screen?
///QUESTION : Ensure the screen is the render target
// Don't spin if we are explicitly asked not to
if (RState.DistortionCaps & ovrDistortionCap_TimeWarp &&
!(RState.DistortionCaps & ovrDistortionCap_ProfileNoTimewarpSpinWaits))
{
if (!TimeManager.NeedDistortionTimeMeasurement())
{
// Wait for timewarp distortion if it is time and Gpu idle
FlushGpuAndWaitTillTime(TimeManager.GetFrameTiming().TimewarpPointTime);
renderEndFrame();
}
else
{
// If needed, measure distortion time so that TimeManager can better estimate
// latency-reducing time-warp wait timing.
WaitUntilGpuIdle();
double distortionStartTime = ovr_GetTimeInSeconds();
renderEndFrame();
WaitUntilGpuIdle();
TimeManager.AddDistortionTimeMeasurement(ovr_GetTimeInSeconds() - distortionStartTime);
}
}
else
{
renderEndFrame();
}
if (LatencyTestActive)
{
renderLatencyQuad(LatencyTestDrawColor);
}
if (swapBuffers)
{
if (SwapChain)
{
SwapChain->Present(NULL, NULL, NULL, NULL, 0);
}
else
{
Device->Present( NULL, NULL, NULL, NULL );
}
// Force GPU to flush the scene, resulting in the lowest possible latency.
// It's critical that this flush is *after* present.
// Doesn't need to be done if running through the Oculus driver.
if (RState.OurHMDInfo.InCompatibilityMode &&
!(RState.DistortionCaps & ovrDistortionCap_ProfileNoTimewarpSpinWaits))
{
WaitUntilGpuIdle();
}
}
}
示例3: PlayMovie
void MoviePlayerView::ScrubBarClicked( const float progress )
{
// if we're rw/ff'ing, then stop and resume playback
if ( SeekSpeed != 0 )
{
SeekSpeed = 0;
PlayMovie();
SetSeekIcon( SeekSpeed );
NextSeekTime = 0;
}
// choke off the amount position changes we send to the media player
const double now = ovr_GetTimeInSeconds();
if ( now <= NextSeekTime )
{
return;
}
int position = Cinema.SceneMgr.MovieDuration * progress;
Native::SetPosition( Cinema.app, position );
ScrubBar.SetProgress( progress );
NextSeekTime = ovr_GetTimeInSeconds() + 0.1;
}
示例4: OVR_UNUSED
void DistortionRenderer::EndFrame(bool swapBuffers,
unsigned char* latencyTesterDrawColor, unsigned char* latencyTester2DrawColor)
{
OVR_UNUSED(swapBuffers);
OVR_UNUSED(latencyTesterDrawColor);
///QUESTION : Should I be clearing the screen?
///QUESTION : Should I be ensuring the screen is the render target
if (!TimeManager.NeedDistortionTimeMeasurement())
{
if (RState.DistortionCaps & ovrDistortionCap_TimeWarp)
{
// Wait for timewarp distortion if it is time and Gpu idle
WaitTillTimeAndFlushGpu(TimeManager.GetFrameTiming().TimewarpPointTime);
}
RenderBothDistortionMeshes();
}
else
{
// If needed, measure distortion time so that TimeManager can better estimate
// latency-reducing time-warp wait timing.
WaitUntilGpuIdle();
double distortionStartTime = ovr_GetTimeInSeconds();
RenderBothDistortionMeshes();
WaitUntilGpuIdle();
TimeManager.AddDistortionTimeMeasurement(ovr_GetTimeInSeconds() - distortionStartTime);
}
if(latencyTesterDrawColor)
{
///QUESTION : Is this still to be supported?
///renderLatencyQuad(latencyTesterDrawColor);
}
if(latencyTester2DrawColor)
{
// TODO:
}
if (swapBuffers)
{
if (swapChain)
{
swapChain->Present(NULL, NULL, NULL, NULL, 0);
}
else
{
device->Present( NULL, NULL, NULL, NULL );
}
// Force GPU to flush the scene, resulting in the lowest possible latency.
// It's critical that this flush is *after* present.
WaitUntilGpuIdle();
}
}
示例5: LOG
void MoviePlayerView::OneTimeInit( const char * launchIntent )
{
LOG( "MoviePlayerView::OneTimeInit" );
const double start = ovr_GetTimeInSeconds();
GazeUserId = Cinema.app->GetGazeCursor().GenerateUserId();
CreateMenu( Cinema.app, Cinema.app->GetVRMenuMgr(), Cinema.app->GetDefaultFont() );
LOG( "MoviePlayerView::OneTimeInit: %3.1f seconds", ovr_GetTimeInSeconds() - start );
}
示例6: ovr_GetTimeInSeconds
void OculusWorldDemoApp::ApplyDynamicResolutionScaling()
{
if (!DynamicRezScalingEnabled)
{
// Restore viewport rectangle in case dynamic res scaling was enabled before.
EyeTexture[0].Header.RenderViewport.Size = EyeRenderSize[0];
EyeTexture[1].Header.RenderViewport.Size = EyeRenderSize[1];
return;
}
// Demonstrate dynamic-resolution rendering.
// This demo is too simple to actually have a framerate that varies that much, so we'll
// just pretend this is trying to cope with highly dynamic rendering load.
float dynamicRezScale = 1.0f;
{
// Hacky stuff to make up a scaling...
// This produces value oscillating as follows: 0 -> 1 -> 0.
static double dynamicRezStartTime = ovr_GetTimeInSeconds();
float dynamicRezPhase = float ( ovr_GetTimeInSeconds() - dynamicRezStartTime );
const float dynamicRezTimeScale = 4.0f;
dynamicRezPhase /= dynamicRezTimeScale;
if ( dynamicRezPhase < 1.0f )
{
dynamicRezScale = dynamicRezPhase;
}
else if ( dynamicRezPhase < 2.0f )
{
dynamicRezScale = 2.0f - dynamicRezPhase;
}
else
{
// Reset it to prevent creep.
dynamicRezStartTime = ovr_GetTimeInSeconds();
dynamicRezScale = 0.0f;
}
// Map oscillation: 0.5 -> 1.0 -> 0.5
dynamicRezScale = dynamicRezScale * 0.5f + 0.5f;
}
Sizei sizeLeft = EyeRenderSize[0];
Sizei sizeRight = EyeRenderSize[1];
// This viewport is used for rendering and passed into ovrHmd_EndEyeRender.
EyeTexture[0].Header.RenderViewport.Size = Sizei(int(sizeLeft.w * dynamicRezScale),
int(sizeLeft.h * dynamicRezScale));
EyeTexture[1].Header.RenderViewport.Size = Sizei(int(sizeRight.w * dynamicRezScale),
int(sizeRight.h * dynamicRezScale));
}
示例7: defined
void GVRInterface::idle() {
#if defined(ANDROID) && defined(HAVE_LIBOVR)
if (!_inVRMode && ovr_IsHeadsetDocked()) {
qDebug() << "The headset just got docked - enter VR mode.";
enterVRMode();
} else if (_inVRMode) {
if (ovr_IsHeadsetDocked()) {
static int counter = 0;
// Get the latest head tracking state, predicted ahead to the midpoint of the time
// it will be displayed. It will always be corrected to the real values by
// time warp, but the closer we get, the less black will be pulled in at the edges.
const double now = ovr_GetTimeInSeconds();
static double prev;
const double rawDelta = now - prev;
prev = now;
const double clampedPrediction = std::min( 0.1, rawDelta * 2);
ovrSensorState sensor = ovrHmd_GetSensorState(OvrHmd, now + clampedPrediction, true );
auto ovrOrientation = sensor.Predicted.Pose.Orientation;
glm::quat newOrientation(ovrOrientation.w, ovrOrientation.x, ovrOrientation.y, ovrOrientation.z);
_client->setOrientation(newOrientation);
if (counter++ % 100000 == 0) {
qDebug() << "GetSensorState in frame" << counter << "-"
<< ovrOrientation.x << ovrOrientation.y << ovrOrientation.z << ovrOrientation.w;
}
} else {
qDebug() << "The headset was undocked - leaving VR mode.";
leaveVRMode();
}
}
OVR::KeyState& backKeyState = _mainWindow->getBackKeyState();
auto backEvent = backKeyState.Update(ovr_GetTimeInSeconds());
if (backEvent == OVR::KeyState::KEY_EVENT_LONG_PRESS) {
qDebug() << "Attemping to start the Platform UI Activity.";
ovr_StartPackageActivity(_ovr, PUI_CLASS_NAME, PUI_GLOBAL_MENU);
} else if (backEvent == OVR::KeyState::KEY_EVENT_DOUBLE_TAP || backEvent == OVR::KeyState::KEY_EVENT_SHORT_PRESS) {
qDebug() << "Got an event we should cancel for!";
} else if (backEvent == OVR::KeyState::KEY_EVENT_DOUBLE_TAP) {
qDebug() << "The button is down!";
}
#endif
}
示例8: ovr_GetTimeInSeconds
//================================
// OvrAnimComponent::Play
void OvrAnimComponent::Play()
{
AnimState = ANIMSTATE_PLAYING;
BaseTime = ovr_GetTimeInSeconds();
// on a play we offset the base frame to the current frame so a resume from pause doesn't restart
BaseFrame = CurFrame;
}
示例9: ovrHmd_GetHSWDisplayState
void OculusInterface::oculusDisplayWarning()
{
// Health and Safety Warning display state.
ovrHSWDisplayState hswDisplayState;
ovrHmd_GetHSWDisplayState(m_hmd, &hswDisplayState);
if (hswDisplayState.Displayed)
{
// Dismiss the warning if the user pressed the appropriate key or if the user
// is tapping the side of the HMD.
// If the user has requested to dismiss the warning via keyboard or controller input...
if (m_warningOff)
ovrHmd_DismissHSWDisplay(m_hmd);
else
{
// Detect a moderate tap on the side of the HMD.
ovrTrackingState ts = ovrHmd_GetTrackingState(m_hmd, ovr_GetTimeInSeconds());
if (ts.StatusFlags & ovrStatus_OrientationTracked)
{
const OVR::Vector3f v(ts.RawSensorData.Accelerometer.x,
ts.RawSensorData.Accelerometer.y,
ts.RawSensorData.Accelerometer.z);
// Arbitrary value and representing moderate tap on the side of the DK2 Rift.
if (v.LengthSq() > 250.f)
ovrHmd_DismissHSWDisplay(m_hmd);
}
}
}
}
示例10: ProcessShortcutButton
bool OptionSelectionMenu::OnGamepad(UInt32 buttonMask)
{
// Check global shortcuts first.
String s = ProcessShortcutButton(buttonMask);
if (!s.IsEmpty())
{
PopupMessage = s;
PopupMessageTimeout = ovr_GetTimeInSeconds() + 4.0f;
return true;
}
if (GetSubmenu() != NULL)
{
return GetSubmenu()->OnGamepad(buttonMask);
}
if (ToggleShortcut.MatchGamepadButton(buttonMask))
return true;
if (DisplayState == Display_None)
return false;
for (int i = 0; i < Nav_LAST; i++)
{
if (NavShortcuts[i].MatchGamepadButton(buttonMask))
return true;
}
// Let the caller process keystroke
return false;
}
示例11: UpdateText
//==============================
// OvrSliderComponent::OnFrameUpdate
eMsgStatus OvrSliderComponent::OnFrameUpdate( App * app, VrFrame const & vrFrame, OvrVRMenuMgr & menuMgr,
VRMenuObject * self, VRMenuEvent const & event )
{
if ( TouchDown )
{
UpdateText( menuMgr, self, BubbleId );
UpdateText( menuMgr, self, TextId );
}
if ( BubbleFadeOutTime > 0.0 )
{
if ( ovr_GetTimeInSeconds() >= BubbleFadeOutTime )
{
BubbleFadeOutTime = -1.0;
BubbleFader.StartFadeOut();
}
}
VRMenuObject * bubble = menuMgr.ToObject( self->ChildHandleForId( menuMgr, BubbleId ) );
if ( bubble != NULL )
{
float const fadeTime = 0.5f;
float const fadeRate = 1.0 / fadeTime;
BubbleFader.Update( fadeRate, vrFrame.DeltaSeconds );
Vector4f color = bubble->GetColor();
color.w = BubbleFader.GetFinalAlpha();
bubble->SetColor( color );
Vector4f textColor = bubble->GetTextColor();
textColor.w = color.w;
bubble->SetTextColor( textColor );
}
return MSG_STATUS_ALIVE;
}
示例12: ovr_GetTimeInSeconds
FrameTimeManager::Timing FrameTimeManager::GetFrameTiming(unsigned frameIndex)
{
Timing frameTiming = LocklessTiming.GetState();
if (frameTiming.ThisFrameTime != 0.0)
{
// If timing hasn't been initialized, starting based on "now" is the best guess.
frameTiming.InitTimingFromInputs(frameTiming.Inputs, RenderInfo.Shutter.Type,
ovr_GetTimeInSeconds(), frameIndex);
}
else if (frameIndex > frameTiming.FrameIndex)
{
unsigned frameDelta = frameIndex - frameTiming.FrameIndex;
double thisFrameTime = frameTiming.NextFrameTime +
double(frameDelta-1) * frameTiming.Inputs.FrameDelta;
// Don't run away too far into the future beyond rendering.
OVR_ASSERT(frameDelta < 6);
frameTiming.InitTimingFromInputs(frameTiming.Inputs, RenderInfo.Shutter.Type,
thisFrameTime, frameIndex);
}
return frameTiming;
}
示例13: FrameInfo
bool OculusBaseDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
_currentRenderFrameInfo = FrameInfo();
_currentRenderFrameInfo.sensorSampleTime = ovr_GetTimeInSeconds();;
_currentRenderFrameInfo.predictedDisplayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
auto trackingState = ovr_GetTrackingState(_session, _currentRenderFrameInfo.predictedDisplayTime, ovrTrue);
_currentRenderFrameInfo.renderPose = toGlm(trackingState.HeadPose.ThePose);
_currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;
std::array<glm::mat4, 2> handPoses;
// Make controller poses available to the presentation thread
ovr_for_each_hand([&](ovrHandType hand) {
static const auto REQUIRED_HAND_STATUS = ovrStatus_OrientationTracked & ovrStatus_PositionTracked;
if (REQUIRED_HAND_STATUS != (trackingState.HandStatusFlags[hand] & REQUIRED_HAND_STATUS)) {
return;
}
auto correctedPose = ovrControllerPoseToHandPose(hand, trackingState.HandPoses[hand]);
static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
handPoses[hand] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
});
withRenderThreadLock([&] {
_uiModelTransform = DependencyManager::get<CompositorHelper>()->getModelTransform();
_handPoses = handPoses;
_frameInfos[frameIndex] = _currentRenderFrameInfo;
});
return Parent::beginFrameRender(frameIndex);
}
示例14: printf
void OVRScene::RenderForOneEye(const float* pMview, const float* pPersp) const
{
printf("Rendering in OVRScene!\n");
if (m_bDraw == false)
return;
if (pMview == false)
return;
if (pPersp == false)
return;
const glm::mat4 modelview = glm::make_mat4(pMview);
const glm::mat4 projection = glm::make_mat4(pPersp);
// Assemble modelview matrix to lock camera in with real world geometry:
// We still have to use the assembled HMD stereo modelview matrices from OVRSDK05AppSkeleton,
// but we undo the effects of chassis yaw and position so the frustum follows the viewer.
if (m_pHmd != NULL)
{
const ovrTrackingState ts = ovrHmd_GetTrackingState(m_pHmd, ovr_GetTimeInSeconds());
const ovrPosef& cp = ts.CameraPose;
OVR::Matrix4f camMtx = OVR::Matrix4f();
camMtx *= OVR::Matrix4f::Translation(cp.Position)
* OVR::Matrix4f(OVR::Quatf(cp.Orientation));
glm::mat4 ogmat = glm::make_mat4(&camMtx.Transposed().M[0][0]);
DrawScene(modelview * ogmat, projection);
}
}
示例15: VR_OVR_InitSensor
ovrBool VR_OVR_InitSensor()
{
unsigned int sensorCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection;
if (sensorEnabled)
{
sensorEnabled = 0;
}
sensorCaps |= ovrTrackingCap_Position;
sensorEnabled = ovrHmd_ConfigureTracking(hmd,sensorCaps, ovrTrackingCap_Orientation);
if (sensorEnabled)
{
ovrTrackingState ss;
ss = ovrHmd_GetTrackingState(hmd, ovr_GetTimeInSeconds());
Com_Printf("VR_OVR: Successfully initialized sensors!\n");
if (ss.StatusFlags & ovrStatus_PositionConnected)
Com_Printf("...sensor has position tracking support\n");
if (ss.StatusFlags & ovrStatus_OrientationTracked)
Com_Printf("...orientation tracking enabled\n");
if (ss.StatusFlags & ovrStatus_PositionTracked)
Com_Printf("...position tracking enabled\n");
}
return sensorEnabled;
}