本文整理汇总了C#中OVREye类的典型用法代码示例。如果您正苦于以下问题:C# OVREye类的具体用法?C# OVREye怎么用?C# OVREye使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
OVREye类属于命名空间,在下文中一共展示了OVREye类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ConfigureCamera
private Camera ConfigureCamera(OVREye eye)
{
Transform anchor = (eye == OVREye.Left) ? leftEyeAnchor : rightEyeAnchor;
Camera cam = anchor.GetComponent<Camera>();
OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(eye);
cam.fieldOfView = eyeDesc.fov.y;
cam.aspect = eyeDesc.resolution.x / eyeDesc.resolution.y;
cam.rect = new Rect(0f, 0f, OVRManager.instance.virtualTextureScale, OVRManager.instance.virtualTextureScale);
cam.targetTexture = OVRManager.display.GetEyeTexture(eye);
// AA is documented to have no effect in deferred, but it causes black screens.
if (cam.actualRenderingPath == RenderingPath.DeferredLighting)
QualitySettings.antiAliasing = 0;
#if !UNITY_ANDROID || UNITY_EDITOR
#if OVR_USE_PROJ_MATRIX
cam.projectionMatrix = OVRManager.display.GetProjection((int)eye, cam.nearClipPlane, cam.farClipPlane);
#endif
#endif
return cam;
}
示例2: ConfigureEyeAnchor
private Transform ConfigureEyeAnchor(OVREye eye)
{
string name = eye.ToString() + "EyeAnchor";
Transform anchor = transform.Find(name);
if (anchor == null)
{
string oldName = "Camera" + eye.ToString();
anchor = transform.Find(oldName);
}
if (anchor == null)
anchor = new GameObject(name).transform;
anchor.parent = transform;
anchor.localScale = Vector3.one;
anchor.localPosition = Vector3.zero;
anchor.localRotation = Quaternion.identity;
return anchor;
}
示例3: EndEye
public static void EndEye(OVREye eye)
{
#if UNITY_ANDROID && !UNITY_EDITOR
RenderEventType eventType = (eye == OVREye.Left) ?
RenderEventType.LeftEyeEndFrame :
RenderEventType.RightEyeEndFrame;
int eyeTextureId = display.GetEyeTextureId(eye);
OVRPluginEvent.IssueWithData(eventType, eyeTextureId);
#endif
}
示例4: ConfigureCamera
private Camera ConfigureCamera(OVREye eye)
{
Transform anchor = (eye == OVREye.Left) ? leftEyeAnchor : rightEyeAnchor;
Camera cam = anchor.GetComponent<Camera>();
OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(eye);
cam.fieldOfView = eyeDesc.fov.y;
cam.aspect = eyeDesc.resolution.x / eyeDesc.resolution.y;
cam.rect = new Rect(0f, 0f, OVRManager.instance.virtualTextureScale, OVRManager.instance.virtualTextureScale);
cam.targetTexture = OVRManager.display.GetEyeTexture(eye);
cam.hdr = OVRManager.instance.hdr;
#if UNITY_ANDROID && !UNITY_EDITOR
// Enforce camera render order
cam.depth = (eye == OVREye.Left) ?
(int)RenderEventType.LeftEyeEndFrame :
(int)RenderEventType.RightEyeEndFrame;
// If we don't clear the color buffer with a glClear, tiling GPUs
// will be forced to do an "unresolve" and read back the color buffer information.
// The clear is free on PowerVR, and possibly Mali, but it is a performance cost
// on Adreno, and we would be better off if we had the ability to discard/invalidate
// the color buffer instead of clearing.
// NOTE: The color buffer is not being invalidated in skybox mode, forcing an additional,
// wasted color buffer read before the skybox is drawn.
bool hasSkybox = ((cam.clearFlags == CameraClearFlags.Skybox) &&
((cam.gameObject.GetComponent<Skybox>() != null) || (RenderSettings.skybox != null)));
cam.clearFlags = (hasSkybox) ? CameraClearFlags.Skybox : CameraClearFlags.SolidColor;
#endif
// When rendering monoscopic, we will use the left camera render for both eyes.
if (eye == OVREye.Right)
{
cam.enabled = !OVRManager.instance.monoscopic;
}
// AA is documented to have no effect in deferred, but it causes black screens.
if (cam.actualRenderingPath == RenderingPath.DeferredLighting)
QualitySettings.antiAliasing = 0;
#if !UNITY_ANDROID || UNITY_EDITOR
#if OVR_USE_PROJ_MATRIX
cam.projectionMatrix = OVRManager.display.GetProjection((int)eye, cam.nearClipPlane, cam.farClipPlane);
#endif
#endif
return cam;
}
示例5: ConfigureEyeAnchor
private Transform ConfigureEyeAnchor(Transform root, OVREye eye)
{
string name = eye.ToString() + eyeAnchorName;
Transform anchor = transform.Find(root.name + "/" + name);
if (anchor == null)
{
anchor = transform.Find(name);
}
if (anchor == null)
{
string legacyName = legacyEyeAnchorName + eye.ToString();
anchor = transform.Find(legacyName);
}
if (anchor == null)
{
anchor = new GameObject(name).transform;
}
anchor.name = name;
anchor.parent = root;
anchor.localScale = Vector3.one;
anchor.localPosition = Vector3.zero;
anchor.localRotation = Quaternion.identity;
return anchor;
}
示例6: ConfigureEyeTexture
private void ConfigureEyeTexture(int eyeBufferIndex, OVREye eye)
{
int eyeIndex = eyeBufferIndex + (int)eye;
EyeRenderDesc eyeDesc = eyeDescs[(int)eye];
eyeTextures[eyeIndex] = new RenderTexture(
(int)eyeDesc.resolution.x,
(int)eyeDesc.resolution.y,
(int)OVRManager.instance.eyeTextureDepth,
OVRManager.instance.eyeTextureFormat);
eyeTextures[eyeIndex].antiAliasing = (int)OVRManager.instance.eyeTextureAntiAliasing;
eyeTextures[eyeIndex].Create();
eyeTextureIds[eyeIndex] = eyeTextures[eyeIndex].GetNativeTextureID();
}
示例7: ConfigureEyeDesc
private void ConfigureEyeDesc(OVREye eye)
{
Vector2 texSize = Vector2.zero;
Vector2 fovSize = Vector2.zero;
#if !UNITY_ANDROID || UNITY_EDITOR
FovPort fovPort = OVRManager.capiHmd.GetDesc().DefaultEyeFov[(int)eye];
fovPort.LeftTan = fovPort.RightTan = Mathf.Max(fovPort.LeftTan, fovPort.RightTan);
fovPort.UpTan = fovPort.DownTan = Mathf.Max(fovPort.UpTan, fovPort.DownTan);
texSize = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fovPort, OVRManager.instance.nativeTextureScale).ToVector2();
fovSize = new Vector2(2f * Mathf.Rad2Deg * Mathf.Atan(fovPort.LeftTan), 2f * Mathf.Rad2Deg * Mathf.Atan(fovPort.UpTan));
#else
texSize = new Vector2(1024, 1024) * OVRManager.instance.nativeTextureScale;
fovSize = new Vector2(90, 90);
#endif
eyeDescs[(int)eye] = new EyeRenderDesc()
{
resolution = texSize,
fov = fovSize
};
}
示例8: GetEyeTextureId
/// <summary>
/// Gets the currently active render texture's native ID for the given eye.
/// </summary>
public int GetEyeTextureId(OVREye eye)
{
return eyeTextureIds[currEyeTextureIdx + (int)eye];
}
示例9: GetEyeTexture
/// <summary>
/// Gets the currently active render texture for the given eye.
/// </summary>
public RenderTexture GetEyeTexture(OVREye eye)
{
return eyeTextures[currEyeTextureIdx + (int)eye];
}
示例10: GetEyeRenderDesc
/// <summary>
/// Gets the resolution and field of view for the given eye.
/// </summary>
public EyeRenderDesc GetEyeRenderDesc(OVREye eye)
{
return eyeDescs[(int)eye];
}
示例11: GetEyePose
/// <summary>
/// Gets the pose of the given eye, predicted for the time when the current frame will scan out.
/// </summary>
public OVRPose GetEyePose(OVREye eye)
{
#if !UNITY_ANDROID || UNITY_EDITOR
bool updateEyePose = !(OVRManager.instance.timeWarp && OVRManager.instance.freezeTimeWarp);
if (updateEyePose)
{
eyePoses[(int)eye] = OVR_GetRenderPose(frameCount, (int)eye).ToPose();
}
return eyePoses[(int)eye];
#else
if (eye == OVREye.Left)
OVR_GetSensorState(
OVRManager.instance.monoscopic,
ref w,
ref x,
ref y,
ref z,
ref fov,
ref OVRManager.timeWarpViewNumber);
Quaternion rot = new Quaternion(-x, -y, z, w);
float eyeOffsetX = 0.5f * OVRManager.profile.ipd;
eyeOffsetX = (eye == OVREye.Left) ? -eyeOffsetX : eyeOffsetX;
float neckToEyeHeight = OVRManager.profile.eyeHeight - OVRManager.profile.neckHeight;
Vector3 headNeckModel = new Vector3(0.0f, neckToEyeHeight, OVRManager.profile.eyeDepth);
Vector3 pos = rot * (new Vector3(eyeOffsetX, 0.0f, 0.0f) + headNeckModel);
// Subtract the HNM pivot to avoid translating the camera when level
pos -= headNeckModel;
return new OVRPose
{
position = pos,
orientation = rot,
};
#endif
}
示例12: ConfigureEyeTexture
private void ConfigureEyeTexture(int eyeBufferIndex, OVREye eye, float scale)
{
int eyeIndex = eyeBufferIndex + (int)eye;
EyeRenderDesc eyeDesc = eyeDescs[(int)eye];
int w = (int)(eyeDesc.resolution.x * scale);
int h = (int)(eyeDesc.resolution.y * scale);
eyeTextures[eyeIndex] = new RenderTexture(w, h, OVRManager.instance.eyeTextureDepth, OVRManager.instance.eyeTextureFormat);
eyeTextures[eyeIndex].antiAliasing = (QualitySettings.antiAliasing == 0) ? 1 : QualitySettings.antiAliasing;
eyeTextures[eyeIndex].Create();
eyeTextureIds[eyeIndex] = eyeTextures[eyeIndex].GetNativeTextureID();
}
示例13: ConfigureEyeDesc
private void ConfigureEyeDesc(OVREye eye)
{
#if !UNITY_ANDROID || UNITY_EDITOR
HmdDesc desc = OVRManager.capiHmd.GetDesc();
FovPort fov = desc.DefaultEyeFov[(int)eye];
fov.LeftTan = fov.RightTan = Mathf.Max(fov.LeftTan, fov.RightTan);
fov.UpTan = fov.DownTan = Mathf.Max(fov.UpTan, fov.DownTan);
// Configure Stereo settings. Default pixel density is one texel per pixel.
float desiredPixelDensity = 1f;
Sizei texSize = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fov, desiredPixelDensity);
float fovH = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.LeftTan);
float fovV = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.UpTan);
eyeDescs[(int)eye] = new EyeRenderDesc()
{
resolution = texSize.ToVector2(),
fov = new Vector2(fovH, fovV)
};
#else
eyeDescs[(int)eye] = new EyeRenderDesc()
{
resolution = new Vector2(1024, 1024),
fov = new Vector2(90, 90)
};
#endif
}
示例14: GetEyePose
/// <summary>
/// Gets the pose of the given eye, predicted for the time when the current frame will scan out.
/// </summary>
public OVRPose GetEyePose(OVREye eye)
{
#if !UNITY_ANDROID || UNITY_EDITOR
bool updateEyePose = !(OVRManager.instance.timeWarp && OVRManager.instance.freezeTimeWarp);
if (updateEyePose)
{
eyePoses[(int)eye] = OVR_GetRenderPose(frameCount, (int)eye).ToPose();
}
return eyePoses[(int)eye];
#else
if (eye == OVREye.Left)
OVR_GetSensorState(
false,
ref w,
ref x,
ref y,
ref z,
ref fov,
ref OVRManager.timeWarpViewNumber);
Quaternion rot = new Quaternion(-x, -y, z, w);
float eyeOffsetX = 0.5f * OVRManager.profile.ipd;
eyeOffsetX = (eye == OVREye.Left) ? -eyeOffsetX : eyeOffsetX;
Vector3 pos = rot * new Vector3(eyeOffsetX, 0.0f, 0.0f);
return new OVRPose
{
position = pos,
orientation = rot,
};
#endif
}
示例15: ConfigureEyeDesc
private void ConfigureEyeDesc(OVREye eye)
{
Vector2 texSize = Vector2.zero;
Vector2 fovSize = Vector2.zero;
#if !UNITY_ANDROID || UNITY_EDITOR
if (!OVRManager.instance.isVRPresent)
return;
OVRPlugin.Sizei size = OVRPlugin.GetEyeTextureSize((OVRPlugin.Eye)eye);
OVRPlugin.Frustumf frustum = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye);
texSize = new Vector2(size.w, size.h);
fovSize = Mathf.Rad2Deg * new Vector2(frustum.fovX, frustum.fovY);
#else
texSize = new Vector2(1024, 1024) * OVRManager.instance.nativeTextureScale;
fovSize = new Vector2(90, 90);
#endif
eyeDescs[(int)eye] = new EyeRenderDesc()
{
resolution = texSize,
fov = fovSize
};
}