本文整理汇总了C#中Valve.VR.VRTextureBounds_t类的典型用法代码示例。如果您正苦于以下问题:C# VRTextureBounds_t类的具体用法?C# VRTextureBounds_t怎么用?C# VRTextureBounds_t使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
VRTextureBounds_t类属于Valve.VR命名空间,在下文中一共展示了VRTextureBounds_t类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: SteamVR
private SteamVR()
{
hmd = OpenVR.System;
Debug.Log("Connected to " + hmd_TrackingSystemName + ":" + hmd_SerialNumber);
compositor = OpenVR.Compositor;
overlay = OpenVR.Overlay;
// Setup render values
uint w = 0, h = 0;
hmd.GetRecommendedRenderTargetSize(ref w, ref h);
sceneWidth = (float)w;
sceneHeight = (float)h;
float l_left = 0.0f, l_right = 0.0f, l_top = 0.0f, l_bottom = 0.0f;
hmd.GetProjectionRaw(EVREye.Eye_Left, ref l_left, ref l_right, ref l_top, ref l_bottom);
float r_left = 0.0f, r_right = 0.0f, r_top = 0.0f, r_bottom = 0.0f;
hmd.GetProjectionRaw(EVREye.Eye_Right, ref r_left, ref r_right, ref r_top, ref r_bottom);
tanHalfFov = new Vector2(
Mathf.Max(-l_left, l_right, -r_left, r_right),
Mathf.Max(-l_top, l_bottom, -r_top, r_bottom));
textureBounds = new VRTextureBounds_t[2];
textureBounds[0].uMin = 0.5f + 0.5f * l_left / tanHalfFov.x;
textureBounds[0].uMax = 0.5f + 0.5f * l_right / tanHalfFov.x;
textureBounds[0].vMin = 0.5f - 0.5f * l_bottom / tanHalfFov.y;
textureBounds[0].vMax = 0.5f - 0.5f * l_top / tanHalfFov.y;
textureBounds[1].uMin = 0.5f + 0.5f * r_left / tanHalfFov.x;
textureBounds[1].uMax = 0.5f + 0.5f * r_right / tanHalfFov.x;
textureBounds[1].vMin = 0.5f - 0.5f * r_bottom / tanHalfFov.y;
textureBounds[1].vMax = 0.5f - 0.5f * r_top / tanHalfFov.y;
// Grow the recommended size to account for the overlapping fov
sceneWidth = sceneWidth / Mathf.Max(textureBounds[0].uMax - textureBounds[0].uMin, textureBounds[1].uMax - textureBounds[1].uMin);
sceneHeight = sceneHeight / Mathf.Max(textureBounds[0].vMax - textureBounds[0].vMin, textureBounds[1].vMax - textureBounds[1].vMin);
aspect = tanHalfFov.x / tanHalfFov.y;
fieldOfView = 2.0f * Mathf.Atan(tanHalfFov.y) * Mathf.Rad2Deg;
eyes = new SteamVR_Utils.RigidTransform[] {
new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(EVREye.Eye_Left)),
new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(EVREye.Eye_Right)) };
if (SystemInfo.graphicsDeviceVersion.StartsWith("OpenGL"))
graphicsAPI = EGraphicsAPIConvention.API_OpenGL;
else
graphicsAPI = EGraphicsAPIConvention.API_DirectX;
SteamVR_Utils.Event.Listen("initializing", OnInitializing);
SteamVR_Utils.Event.Listen("calibrating", OnCalibrating);
SteamVR_Utils.Event.Listen("out_of_range", OnOutOfRange);
SteamVR_Utils.Event.Listen("device_connected", OnDeviceConnected);
SteamVR_Utils.Event.Listen("new_poses", OnNewPoses);
}
示例2: VR_IVRCompositor_Submit
internal static extern VRCompositorError VR_IVRCompositor_Submit(IntPtr instancePtr, Hmd_Eye eEye, GraphicsAPIConvention eTextureType, IntPtr pTexture, ref VRTextureBounds_t pBounds, VRSubmitFlags_t nSubmitFlags);
示例3: SteamVR
private SteamVR(System.IntPtr pHmd, System.IntPtr pCompositor, System.IntPtr pOverlay)
{
hmd = new CVRSystem(pHmd);
Debug.Log("Connected to " + hmd_TrackingSystemName + ":" + hmd_SerialNumber);
compositor = new CVRCompositor(pCompositor);
overlay = new CVROverlay(pOverlay);
var capacity = compositor.GetLastError(null, 0);
if (capacity > 1)
{
var result = new System.Text.StringBuilder((int)capacity);
compositor.GetLastError(result, capacity);
Debug.Log("Compositor - " + result);
}
// Setup render values
uint w = 0, h = 0;
hmd.GetRecommendedRenderTargetSize(ref w, ref h);
sceneWidth = (float)w;
sceneHeight = (float)h;
float l_left = 0.0f, l_right = 0.0f, l_top = 0.0f, l_bottom = 0.0f;
hmd.GetProjectionRaw(Hmd_Eye.Eye_Left, ref l_left, ref l_right, ref l_top, ref l_bottom);
float r_left = 0.0f, r_right = 0.0f, r_top = 0.0f, r_bottom = 0.0f;
hmd.GetProjectionRaw(Hmd_Eye.Eye_Right, ref r_left, ref r_right, ref r_top, ref r_bottom);
tanHalfFov = new Vector2(
Mathf.Max(-l_left, l_right, -r_left, r_right),
Mathf.Max(-l_top, l_bottom, -r_top, r_bottom));
textureBounds = new VRTextureBounds_t[2];
textureBounds[0].uMin = 0.5f + 0.5f * l_left / tanHalfFov.x;
textureBounds[0].uMax = 0.5f + 0.5f * l_right / tanHalfFov.x;
textureBounds[0].vMin = 0.5f - 0.5f * l_bottom / tanHalfFov.y;
textureBounds[0].vMax = 0.5f - 0.5f * l_top / tanHalfFov.y;
textureBounds[1].uMin = 0.5f + 0.5f * r_left / tanHalfFov.x;
textureBounds[1].uMax = 0.5f + 0.5f * r_right / tanHalfFov.x;
textureBounds[1].vMin = 0.5f - 0.5f * r_bottom / tanHalfFov.y;
textureBounds[1].vMax = 0.5f - 0.5f * r_top / tanHalfFov.y;
Unity.SetSubmitParams(textureBounds[0], textureBounds[1], VRSubmitFlags_t.Submit_Default);
// Grow the recommended size to account for the overlapping fov
sceneWidth = sceneWidth / Mathf.Max(textureBounds[0].uMax - textureBounds[0].uMin, textureBounds[1].uMax - textureBounds[1].uMin);
sceneHeight = sceneHeight / Mathf.Max(textureBounds[0].vMax - textureBounds[0].vMin, textureBounds[1].vMax - textureBounds[1].vMin);
aspect = tanHalfFov.x / tanHalfFov.y;
fieldOfView = 2.0f * Mathf.Atan(tanHalfFov.y) * Mathf.Rad2Deg;
eyes = new SteamVR_Utils.RigidTransform[] {
new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(Hmd_Eye.Eye_Left)),
new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(Hmd_Eye.Eye_Right)) };
if (SystemInfo.graphicsDeviceVersion.StartsWith("OpenGL"))
graphicsAPI = GraphicsAPIConvention.API_OpenGL;
else
graphicsAPI = GraphicsAPIConvention.API_DirectX;
SteamVR_Utils.Event.Listen("initializing", OnInitializing);
SteamVR_Utils.Event.Listen("calibrating", OnCalibrating);
SteamVR_Utils.Event.Listen("out_of_range", OnOutOfRange);
SteamVR_Utils.Event.Listen("device_connected", OnDeviceConnected);
SteamVR_Utils.Event.Listen("new_poses", OnNewPoses);
}
示例4: SteamVR
private SteamVR(System.IntPtr pHmd, System.IntPtr pCompositor, System.IntPtr pOverlay)
{
hmd = new CVRSystem(pHmd);
Debug.Log("Connected to " + hmd_TrackingSystemName + ":" + hmd_SerialNumber);
compositor = new CVRCompositor(pCompositor);
overlay = new CVROverlay(pOverlay);
var device = new UnityGraphicsDevice();
GetUnityGraphicsDevice(ref device);
switch (device.type)
{
case GfxDeviceRenderer.kGfxRendererD3D11:
compositor.SetGraphicsDevice(Compositor_DeviceType.D3D11, device.ptr);
break;
case GfxDeviceRenderer.kGfxRendererOpenGL:
compositor.SetGraphicsDevice(Compositor_DeviceType.OpenGL, device.ptr);
break;
default:
throw new System.Exception("Unsupported device type.");
}
var capacity = compositor.GetLastError(null, 0);
if (capacity > 1)
{
var result = new System.Text.StringBuilder((int)capacity);
compositor.GetLastError(result, capacity);
Debug.Log("Compositor - " + result);
}
// Register for a callback if our graphics device goes away, so we can properly clean up.
var resetDelegate = new UnityResetDelegate(SteamVR.SafeDispose);
callbackHandle = GCHandle.Alloc(resetDelegate);
SetUnityResetCallback(Marshal.GetFunctionPointerForDelegate(resetDelegate));
// Hook up the render thread present event just in case we wind up needing to use this.
var error = HmdError.None;
SetUnityRenderCallback(OpenVR.GetGenericInterface(IVRHmdDistortPresent_Version, ref error));
// Setup render values
uint w = 0, h = 0;
hmd.GetRecommendedRenderTargetSize(ref w, ref h);
sceneWidth = (float)w;
sceneHeight = (float)h;
float l_left = 0.0f, l_right = 0.0f, l_top = 0.0f, l_bottom = 0.0f;
hmd.GetProjectionRaw(Hmd_Eye.Eye_Left, ref l_left, ref l_right, ref l_top, ref l_bottom);
float r_left = 0.0f, r_right = 0.0f, r_top = 0.0f, r_bottom = 0.0f;
hmd.GetProjectionRaw(Hmd_Eye.Eye_Right, ref r_left, ref r_right, ref r_top, ref r_bottom);
tanHalfFov = new Vector2(
Mathf.Max(-l_left, l_right, -r_left, r_right),
Mathf.Max(-l_top, l_bottom, -r_top, r_bottom));
textureBounds = new VRTextureBounds_t[2];
textureBounds[0].uMin = 0.5f + 0.5f * l_left / tanHalfFov.x;
textureBounds[0].uMax = 0.5f + 0.5f * l_right / tanHalfFov.x;
textureBounds[0].vMin = 0.5f - 0.5f * l_bottom / tanHalfFov.y;
textureBounds[0].vMax = 0.5f - 0.5f * l_top / tanHalfFov.y;
textureBounds[1].uMin = 0.5f + 0.5f * r_left / tanHalfFov.x;
textureBounds[1].uMax = 0.5f + 0.5f * r_right / tanHalfFov.x;
textureBounds[1].vMin = 0.5f - 0.5f * r_bottom / tanHalfFov.y;
textureBounds[1].vMax = 0.5f - 0.5f * r_top / tanHalfFov.y;
// Grow the recommended size to account for the overlapping fov
sceneWidth = sceneWidth / Mathf.Max(textureBounds[0].uMax - textureBounds[0].uMin, textureBounds[1].uMax - textureBounds[1].uMin);
sceneHeight = sceneHeight / Mathf.Max(textureBounds[0].vMax - textureBounds[0].vMin, textureBounds[1].vMax - textureBounds[1].vMin);
aspect = tanHalfFov.x / tanHalfFov.y;
fieldOfView = 2.0f * Mathf.Atan(tanHalfFov.y) * Mathf.Rad2Deg;
eyes = new SteamVR_Utils.RigidTransform[] {
new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(Hmd_Eye.Eye_Left)),
new SteamVR_Utils.RigidTransform(hmd.GetEyeToHeadTransform(Hmd_Eye.Eye_Right)) };
SteamVR_Utils.Event.Listen("initializing", OnInitializing);
SteamVR_Utils.Event.Listen("calibrating", OnCalibrating);
SteamVR_Utils.Event.Listen("out_of_range", OnOutOfRange);
SteamVR_Utils.Event.Listen("device_connected", OnDeviceConnected);
SteamVR_Utils.Event.Listen("new_poses", OnNewPoses);
}
示例5: GetOverlayTextureBounds
public abstract EVROverlayError GetOverlayTextureBounds(ulong ulOverlayHandle,ref VRTextureBounds_t pOverlayTextureBounds);
示例6: SetSubmitParams
public static extern void SetSubmitParams(VRTextureBounds_t boundsL, VRTextureBounds_t boundsR, EVRSubmitFlags nSubmitFlags);
示例7: VR_IVRCompositor_Submit
internal static extern EVRCompositorError VR_IVRCompositor_Submit(IntPtr instancePtr, EVREye eEye, ref Texture_t pTexture, ref VRTextureBounds_t pBounds, EVRSubmitFlags nSubmitFlags);
示例8: Submit
public override EVRCompositorError Submit(EVREye eEye,ref Texture_t pTexture,ref VRTextureBounds_t pBounds,EVRSubmitFlags nSubmitFlags)
{
CheckIfUsable();
EVRCompositorError result = VRNativeEntrypoints.VR_IVRCompositor_Submit(m_pVRCompositor,eEye,ref pTexture,ref pBounds,nSubmitFlags);
return result;
}
示例9: Submit
public EVRCompositorError Submit(EVREye eEye, ref Texture_t pTexture, ref VRTextureBounds_t pBounds, EVRSubmitFlags nSubmitFlags)
{
return this.FnTable.Submit(eEye, ref pTexture, ref pBounds, nSubmitFlags);
}
示例10: GetVideoStreamTextureSize
public EVRTrackedCameraError GetVideoStreamTextureSize(uint nDeviceIndex,EVRTrackedCameraFrameType eFrameType,ref VRTextureBounds_t pTextureBounds,ref uint pnWidth,ref uint pnHeight)
{
pnWidth = 0;
pnHeight = 0;
EVRTrackedCameraError result = FnTable.GetVideoStreamTextureSize(nDeviceIndex,eFrameType,ref pTextureBounds,ref pnWidth,ref pnHeight);
return result;
}
示例11: Submit
public abstract VRCompositorError Submit(Hmd_Eye eEye,IntPtr pTexture,ref VRTextureBounds_t pBounds);
示例12: VR_IVRCompositor_Submit
internal static extern VRCompositorError VR_IVRCompositor_Submit(IntPtr instancePtr, Hmd_Eye eEye, IntPtr pTexture, ref VRTextureBounds_t pBounds);
示例13: UpdateOverlay
public void UpdateOverlay(SteamVR vr)
{
if (texture != null)
{
var error = vr.overlay.ShowOverlay(handle);
if (error == VROverlayError.InvalidHandle || error == VROverlayError.UnknownOverlay)
{
if (vr.overlay.FindOverlay(key, ref handle) != VROverlayError.None)
return;
}
vr.overlay.SetOverlayTexture(handle, texture.GetNativeTexturePtr());
vr.overlay.SetOverlayAlpha(handle, alpha);
vr.overlay.SetOverlayGamma(handle, gamma);
vr.overlay.SetOverlayWidthInMeters(handle, scale);
var textureBounds = new VRTextureBounds_t();
textureBounds.uMin = (0 + uvOffset.x) * uvOffset.z;
textureBounds.vMin = (1 + uvOffset.y) * uvOffset.w;
textureBounds.uMax = (1 + uvOffset.x) * uvOffset.z;
textureBounds.vMax = (0 + uvOffset.y) * uvOffset.w;
vr.overlay.SetOverlayTextureBounds(handle, ref textureBounds);
var vecMouseScale = new HmdVector2_t();
vecMouseScale.v = new float[] { mouseScale.x, mouseScale.y };
vr.overlay.SetOverlayMouseScale(handle, ref vecMouseScale);
var vrcam = SteamVR_Render.Top();
if (vrcam != null && vrcam.origin != null)
{
var offset = new SteamVR_Utils.RigidTransform(vrcam.origin, transform);
offset.pos.x /= vrcam.origin.localScale.x;
offset.pos.y /= vrcam.origin.localScale.y;
offset.pos.z /= vrcam.origin.localScale.z;
offset.pos.z += distance;
var t = offset.ToHmdMatrix34();
vr.overlay.SetOverlayTransformAbsolute(handle, SteamVR_Render.instance.trackingSpace, ref t);
}
vr.overlay.SetOverlayVisibility(handle, visibility);
vr.overlay.SetOverlayInputMethod(handle, inputMethod);
if (curved || antialias)
highquality = true;
if (highquality)
{
vr.overlay.SetHighQualityOverlay(handle);
vr.overlay.SetOverlayFlag(handle, VROverlayFlags.Curved, curved);
vr.overlay.SetOverlayFlag(handle, VROverlayFlags.RGSS4X, antialias);
}
else if (vr.overlay.GetHighQualityOverlay() == handle)
{
vr.overlay.SetHighQualityOverlay(OpenVR.k_ulOverlayHandleInvalid);
}
}
else
{
vr.overlay.HideOverlay(handle);
}
}
示例14: CreateHiddenAreaMesh
public static Mesh CreateHiddenAreaMesh(HiddenAreaMesh_t src, VRTextureBounds_t bounds)
{
if (src.unTriangleCount == 0)
return null;
var data = new float[src.unTriangleCount * 3 * 2]; //HmdVector2_t
Marshal.Copy(src.pVertexData, data, 0, data.Length);
var vertices = new Vector3[src.unTriangleCount * 3 + 12];
var indices = new int[src.unTriangleCount * 3 + 24];
var x0 = 2.0f * bounds.uMin - 1.0f;
var x1 = 2.0f * bounds.uMax - 1.0f;
var y0 = 2.0f * bounds.vMin - 1.0f;
var y1 = 2.0f * bounds.vMax - 1.0f;
for (int i = 0, j = 0; i < src.unTriangleCount * 3; i++)
{
var x = Lerp(x0, x1, data[j++]);
var y = Lerp(y0, y1, data[j++]);
vertices[i] = new Vector3(x, y, 0.0f);
indices[i] = i;
}
// Add border
var offset = (int)src.unTriangleCount * 3;
var iVert = offset;
vertices[iVert++] = new Vector3(-1, -1, 0);
vertices[iVert++] = new Vector3(x0, -1, 0);
vertices[iVert++] = new Vector3(-1, 1, 0);
vertices[iVert++] = new Vector3(x0, 1, 0);
vertices[iVert++] = new Vector3(x1, -1, 0);
vertices[iVert++] = new Vector3(1, -1, 0);
vertices[iVert++] = new Vector3(x1, 1, 0);
vertices[iVert++] = new Vector3(1, 1, 0);
vertices[iVert++] = new Vector3(x0, y0, 0);
vertices[iVert++] = new Vector3(x1, y0, 0);
vertices[iVert++] = new Vector3(x0, y1, 0);
vertices[iVert++] = new Vector3(x1, y1, 0);
var iTri = offset;
indices[iTri++] = offset + 0;
indices[iTri++] = offset + 1;
indices[iTri++] = offset + 2;
indices[iTri++] = offset + 2;
indices[iTri++] = offset + 1;
indices[iTri++] = offset + 3;
indices[iTri++] = offset + 4;
indices[iTri++] = offset + 5;
indices[iTri++] = offset + 6;
indices[iTri++] = offset + 6;
indices[iTri++] = offset + 5;
indices[iTri++] = offset + 7;
indices[iTri++] = offset + 1;
indices[iTri++] = offset + 4;
indices[iTri++] = offset + 8;
indices[iTri++] = offset + 8;
indices[iTri++] = offset + 4;
indices[iTri++] = offset + 9;
indices[iTri++] = offset + 10;
indices[iTri++] = offset + 11;
indices[iTri++] = offset + 3;
indices[iTri++] = offset + 3;
indices[iTri++] = offset + 11;
indices[iTri++] = offset + 6;
var mesh = new Mesh();
mesh.vertices = vertices;
mesh.triangles = indices;
mesh.bounds = new Bounds(Vector3.zero, new Vector3(float.MaxValue, float.MaxValue, float.MaxValue)); // Prevent frustum culling from culling this mesh
return mesh;
}
示例15: Update
void Update()
{
if (Time.frameCount == prevFrameCount)
return;
prevFrameCount = Time.frameCount;
if (videostream.handle == 0)
return;
var vr = SteamVR.instance;
if (vr == null)
return;
var trackedCamera = OpenVR.TrackedCamera;
if (trackedCamera == null)
return;
var nativeTex = System.IntPtr.Zero;
var deviceTexture = (_texture != null) ? _texture : new Texture2D(2, 2);
var headerSize = (uint)System.Runtime.InteropServices.Marshal.SizeOf(header.GetType());
if (vr.graphicsAPI == EGraphicsAPIConvention.API_OpenGL)
{
if (glTextureId != 0)
trackedCamera.ReleaseVideoStreamTextureGL(videostream.handle, glTextureId);
if (trackedCamera.GetVideoStreamTextureGL(videostream.handle, frameType, ref glTextureId, ref header, headerSize) != EVRTrackedCameraError.None)
return;
nativeTex = (System.IntPtr)glTextureId;
}
else
{
if (trackedCamera.GetVideoStreamTextureD3D11(videostream.handle, frameType, deviceTexture.GetNativeTexturePtr(), ref nativeTex, ref header, headerSize) != EVRTrackedCameraError.None)
return;
}
if (_texture == null)
{
_texture = Texture2D.CreateExternalTexture((int)header.nWidth, (int)header.nHeight, TextureFormat.RGBA32, false, false, nativeTex);
uint width = 0, height = 0;
var frameBounds = new VRTextureBounds_t();
if (trackedCamera.GetVideoStreamTextureSize(deviceIndex, frameType, ref frameBounds, ref width, ref height) == EVRTrackedCameraError.None)
{
// Account for textures being upside-down in Unity.
frameBounds.vMin = 1.0f - frameBounds.vMin;
frameBounds.vMax = 1.0f - frameBounds.vMax;
this.frameBounds = frameBounds;
}
}
else
{
_texture.UpdateExternalTexture(nativeTex);
}
}