本文整理汇总了C#中OVRCameraRig类的典型用法代码示例。如果您正苦于以下问题:C# OVRCameraRig类的具体用法?C# OVRCameraRig怎么用?C# OVRCameraRig使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
OVRCameraRig类属于命名空间,在下文中一共展示了OVRCameraRig类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Awake
void Awake()
{
Controller = gameObject.GetComponent<CharacterController>();
if(Controller == null)
Debug.LogWarning("OVRPlayerController: No CharacterController attached.");
// We use OVRCameraRig to set rotations to cameras,
// and to be influenced by rotation
OVRCameraRig[] CameraControllers;
CameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();
if(CameraControllers.Length == 0)
Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
else if (CameraControllers.Length > 1)
Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
else
CameraController = CameraControllers[0];
DirXform = transform.Find("ForwardDirection");
if(DirXform == null)
Debug.LogWarning("OVRPlayerController: ForwardDirection game object not found. Do not use.");
#if UNITY_ANDROID && !UNITY_EDITOR
OVRManager.display.RecenteredPose += ResetOrientation;
#endif
}
示例2: Start
// Use this for initialization
void Start () {
ovrRig = GetComponent<OVRCameraRig> ();
controller = GameObject.Find ("GM");
endIt = true;
InvokeRepeating ("StartUI", 1, 1);
#if UNITY_EDITOR
power = 50;
#endif
}
示例3: Awake
void Awake()
{
// locate the camera rig so we can use it to get the current camera transform each frame
OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();
if( CameraRigs.Length == 0 )
Debug.LogWarning("OVRCamParent: No OVRCameraRig attached.");
else if (CameraRigs.Length > 1)
Debug.LogWarning("OVRCamParent: More then 1 OVRCameraRig attached.");
else
CameraRig = CameraRigs[0];
}
示例4: Awake
void Awake()
{
// We use OVRCameraRig to set rotations to cameras,
// and to be influenced by rotation
OVRCameraRig[] CameraControllers;
CameraControllers = gameObject.GetComponents<OVRCameraRig>();
if (CameraControllers.Length == 0)
Debug.LogWarning("PlayerController : No OVRCameraRig attached.");
else if (CameraControllers.Length > 1)
Debug.LogWarning("PlayerController : More then 1 OVRCameraRig attached.");
else
CameraController = CameraControllers[0];
YRotation = transform.rotation.eulerAngles.y;
}
示例5: Awake
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
// Find camera controller
OVRCameraRig[] cameraControllers;
cameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();
if (cameraControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached.");
}
else if (cameraControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached.");
}
else
{
cameraController = cameraControllers[0];
}
// Find player controller
OVRPlayerController[] playerControllers;
playerControllers = gameObject.GetComponentsInChildren<OVRPlayerController>();
if (playerControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached.");
}
else if (playerControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached.");
}
else
{
playerController = playerControllers[0];
}
StartCoroutine(hideGUIOnTime());
}
示例6: Awake
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
// Find camera controller
OVRCameraRig[] CameraControllers;
CameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();
if(CameraControllers.Length == 0)
Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached.");
else if (CameraControllers.Length > 1)
Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached.");
else{
CameraController = CameraControllers[0];
#if USE_NEW_GUI
OVRUGUI.CameraController = CameraController;
#endif
}
// Find player controller
OVRPlayerController[] PlayerControllers;
PlayerControllers = gameObject.GetComponentsInChildren<OVRPlayerController>();
if(PlayerControllers.Length == 0)
Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached.");
else if (PlayerControllers.Length > 1)
Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached.");
else{
PlayerController = PlayerControllers[0];
#if USE_NEW_GUI
OVRUGUI.PlayerController = PlayerController;
#endif
}
#if USE_NEW_GUI
// Create canvas for using new GUI
NewGUIObject = new GameObject();
NewGUIObject.name = "OVRGUIMain";
NewGUIObject.transform.parent = GameObject.Find("LeftEyeAnchor").transform;
RectTransform r = NewGUIObject.AddComponent<RectTransform>();
r.sizeDelta = new Vector2(100f, 100f);
r.localScale = new Vector3(0.001f, 0.001f, 0.001f);
r.localPosition = new Vector3(0.01f, 0.17f, 0.53f);
r.localEulerAngles = Vector3.zero;
Canvas c = NewGUIObject.AddComponent<Canvas>();
c.renderMode = RenderMode.World;
c.pixelPerfect = false;
#endif
}
示例7: Start
// Use this for initialization
void Start () {
ovrRig = GetComponent<OVRCameraRig> ();
}
示例8: UpdateTransform
/// <summary>
/// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
/// </summary>
public void UpdateTransform(OVRCameraRig rig)
{
Transform root = CameraRig.trackingSpace;
Transform centerEye = CameraRig.centerEyeAnchor;
Vector3 euler = transform.rotation.eulerAngles;
float rotateInfluence = SimulationRate * Time.deltaTime * RotationAmount * RotationScaleMultiplier;
euler.y += rotateInfluence;
if (HmdRotatesY)
{
Vector3 prevPos = root.position;
Quaternion prevRot = root.rotation;
print("X: " + centerEye.rotation.eulerAngles.x + " Y: " + centerEye.rotation.eulerAngles.y + " Z: " + centerEye.rotation.eulerAngles.z);
if ((centerEye.rotation.eulerAngles.y > 20.0f && centerEye.rotation.eulerAngles.y < 160.0f))
{
//transform.rotation = Quaternion.Euler(euler);
//transform.rotation = Quaternion.Euler(0.0f, 0.05f, 0.0f);
}
else
{
//transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);
}
root.position = prevPos;
root.rotation = prevRot;
}
//if (HmdRotatesY)
//{
// Vector3 prevPos = root.position;
// Quaternion prevRot = root.rotation;
// transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);
// root.position = prevPos;
// root.rotation = prevRot;
//}
}
示例9: OnInspectorGUI
public override void OnInspectorGUI()
{
serializedObject.Update();
EditorGUILayout.PropertyField(defaultPosition, new GUIContent("Default Position (meters)", "Head position before tracking starts"));
//EditorGUILayout.PropertyField(skeletonManager, new GUIContent("skeletonManager", "Can be None"));
if(serializedObject.targetObject is RUISTracker)
{
trackerScript = (RUISTracker) serializedObject.targetObject;
if(trackerScript)
ovrCameraRig = trackerScript.gameObject.GetComponentInChildren<OVRCameraRig>();
if(ovrCameraRig)
{
riftFound = true;
}
else
{
riftFound = false;
}
}
if(!riftFound)
{
EditorGUILayout.PropertyField(pickRotationSource, new GUIContent( "Pick Rotation Source", "If disabled, then the Rotation "
+ "Tracker is same as Position Tracker"));
}
EditorGUILayout.Space();
EditorGUILayout.PropertyField(headPositionInput, new GUIContent("Position Tracker", "Device that tracks the head position"));
EditorGUI.indentLevel += 2;
switch (headPositionInput.enumValueIndex)
{
case (int)RUISTracker.HeadPositionSource.OculusDK2:
EditorGUILayout.PropertyField(positionOffsetOculus, new GUIContent("Position Offset (meters)", "Adds an position offset to Oculus Rift's "
+ "tracked position. This should be zero when using Oculus Rift positional "
+ "tracking together with Kinect skeleton tracking."));
break;
case (int)RUISTracker.HeadPositionSource.Kinect1:
case (int)RUISTracker.HeadPositionSource.Kinect2:
positionPlayerID.intValue = Mathf.Clamp(positionPlayerID.intValue, 0, maxKinectSkeletons - 1);
if(positionNoiseCovarianceKinect.floatValue < minNoiseCovariance)
positionNoiseCovarianceKinect.floatValue = minNoiseCovariance;
EditorGUILayout.PropertyField(positionPlayerID, new GUIContent("Kinect Player Id", "Between 0 and 3"));
EditorGUILayout.PropertyField(positionJoint, new GUIContent("Joint", "Head is the best joint for tracking head position"));
EditorGUILayout.PropertyField(positionOffsetKinect, new GUIContent("Position Offset (meters)", "Kinect joint's position in "
+ "the tracked object's local coordinate system. Set these values "
+ "according to the joint's offset from the tracked object's "
+ "origin (head etc.). When using Kinect for head tracking, then zero "
+ "vector is the best choice if head is the position Joint."));
EditorGUILayout.PropertyField(filterPositionKinect, new GUIContent("Filter Position", "Enables simple Kalman filtering for position "
+ "tracking. Recommended for Kinect."));
if(filterPositionKinect.boolValue)
EditorGUILayout.PropertyField(positionNoiseCovarianceKinect, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: "
+ "a bigger value means smoother results but a slower "
+ "response to changes."));
break;
case (int)RUISTracker.HeadPositionSource.PSMove:
positionPSMoveID.intValue = Mathf.Clamp(positionPSMoveID.intValue, 0, maxPSMoveControllers - 1);
if(positionNoiseCovariancePSMove.floatValue < minNoiseCovariance)
positionNoiseCovariancePSMove.floatValue = minNoiseCovariance;
EditorGUILayout.PropertyField(positionPSMoveID, new GUIContent("PS Move ID", "Between 0 and 3"));
EditorGUILayout.PropertyField(positionOffsetPSMove, new GUIContent("Position Offset (meters)", "PS Move controller's position in "
+ "the tracked object's local coordinate system. Set these values "
+ "according to the controller's offset from the tracked object's "
+ "origin (head etc.)."));
EditorGUILayout.PropertyField(filterPositionPSMove, new GUIContent("Filter Position", "Enables simple Kalman filtering for position "
+ "tracking. Best left disabled for PS Move."));
if(filterPositionPSMove.boolValue)
EditorGUILayout.PropertyField(positionNoiseCovariancePSMove, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: "
+ "a bigger value means smoother results but a slower "
+ "response to changes."));
break;
case (int)RUISTracker.HeadPositionSource.RazerHydra:
if(positionNoiseCovarianceHydra.floatValue < minNoiseCovariance)
positionNoiseCovarianceHydra.floatValue = minNoiseCovariance;
EditorGUILayout.PropertyField(isRazerBaseMobile, new GUIContent("Moving Base Station", "Enable this if the Razer Hydra base station is "
+ "attached to something that is moving (e.g. Kinect tracked player's belt)"));
EditorGUILayout.PropertyField(positionRazerID, new GUIContent("Razer Hydra ID", "Either LEFT or RIGHT"));
EditorGUILayout.PropertyField(positionOffsetHydra, new GUIContent("Position Offset (meters)", "Razer Hydra controller's position in "
+ "the tracked object's local coordinate system. Set these values "
+ "according to the controller's offset from the tracked object's "
+ "origin (head etc.)."));
EditorGUILayout.PropertyField(filterPositionHydra, new GUIContent("Filter Position", "Enables simple Kalman filtering for position "
+ "tracking. Best left disabled for Razer Hydra."));
if(filterPositionHydra.boolValue)
EditorGUILayout.PropertyField(positionNoiseCovarianceHydra, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: "
+ "a bigger value means smoother results but a slower "
+ "response to changes."));
break;
case (int)RUISTracker.HeadPositionSource.InputTransform:
if(positionNoiseCovarianceTransform.floatValue < minNoiseCovariance)
positionNoiseCovarianceTransform.floatValue = minNoiseCovariance;
EditorGUILayout.PropertyField(positionInput, new GUIContent("Input Transform", "All other position trackers are supported "
+ "through this transform. Drag and drop here a transform "
//.........这里部分代码省略.........
示例10: UpdateTransform
/// <summary>
/// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
/// </summary>
public void UpdateTransform(OVRCameraRig rig)
{
Transform root = CameraRig.trackingSpace;
Transform centerEye = CameraRig.centerEyeAnchor;
if (HmdRotatesY)
{
Vector3 prevPos = root.position;
Quaternion prevRot = root.rotation;
transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);
root.position = prevPos;
root.rotation = prevRot;
}
}
示例11: SetOVRCameraController
/// <summary>
/// Sets the OVR camera controller.
/// </summary>
/// <param name="cameraController">Camera controller.</param>
public void SetOVRCameraController(ref OVRCameraRig cameraController)
{
CameraController = cameraController;
UIAnchor = CameraController.centerEyeAnchor;
}
示例12: Start
void Start(){
posI = 2;
gm = GameObject.Find ("GM").GetComponent<GM_Doors> ();
ovrRig = GetComponent<OVRCameraRig> ();
}
示例13: Awake
void Awake()
{
coordinateSystem = MonoBehaviour.FindObjectOfType(typeof(RUISCoordinateSystem)) as RUISCoordinateSystem;
localPosition = Vector3.zero;
localRotation = Quaternion.identity;
rawRotation = Quaternion.identity;
filterPos = new KalmanFilter();
filterPos.initialize(3,3);
filterPos.skipIdenticalMeasurements = true;
// filterRot = new KalmanFilter();
// filterRot.initialize(4,4);
// Mobile Razer Hydra base filtering
hydraBaseFilterPos = new KalmanFilter();
hydraBaseFilterPos.initialize(3,3);
hydraBaseFilterPos.skipIdenticalMeasurements = true;
// hydraBaseFilterRot = new KalmanFilter();
// hydraBaseFilterRot.initialize(4,4);
filterRot.skipIdenticalMeasurements = true;
// Yaw Drift Corrector invocations in Awake()
filterDrift = new KalmanFilter();
filterDrift.initialize(2,2);
transform.localPosition = defaultPosition;
eyeCenterPosition = defaultPosition;
measuredHeadPosition = defaultPosition;
hydraBasePosition = new Vector3(0, 0, 0);
hydraBaseRotation = Quaternion.identity;
ovrCameraRig = GetComponentInChildren<OVRCameraRig>();
if(ovrCameraRig != null && OVRManager.display != null && OVRManager.display.isPresent)
{
useOculusRiftRotation = true;
}
else
{
useOculusRiftRotation = false;
}
// Enforce rotation settings if rotation source is set to be same as position source
if (!pickRotationSource)
{
switch (headPositionInput)
{
case HeadPositionSource.Kinect1:
{
headRotationInput = HeadRotationSource.Kinect1;
rotationPlayerID = positionPlayerID;
rotationJoint = positionJoint;
break;
}
case HeadPositionSource.Kinect2:
{
headRotationInput = HeadRotationSource.Kinect2;
rotationPlayerID = positionPlayerID;
rotationJoint = positionJoint;
break;
}
case HeadPositionSource.PSMove:
{
headRotationInput = HeadRotationSource.PSMove;
rotationPSMoveID = positionPSMoveID;
break;
}
case HeadPositionSource.RazerHydra:
{
headRotationInput = HeadRotationSource.RazerHydra;
rotationRazerID = positionRazerID;
break;
}
case HeadPositionSource.InputTransform:
{
headRotationInput = HeadRotationSource.InputTransform;
rotationInput = positionInput;
break;
}
case HeadPositionSource.None:
{
headRotationInput = HeadRotationSource.None;
break;
}
}
}
filterPosition = false;
}
示例14: Awake
void Awake()
{
networkView = gameObject.GetComponent<NetworkView>();
bullet = Resources.Load<HomingBullet>(bulletPath);
Controller = gameObject.GetComponent<CharacterController>();
//if(Controller == null)
//Debug.LogWarning("OVRPlayerController: No CharacterController attached.");
// We use OVRCameraRig to set rotations to cameras,
// and to be influenced by rotation
OVRCameraRig[] CameraControllers;
CameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();
//if(CameraControllers.Length == 0)
//Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
//else if (CameraControllers.Length > 1)
//Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
//else
CameraController = CameraControllers[0];
DirXform = transform;
//if(DirXform == null)
//Debug.LogWarning("OVRPlayerController: ForwardDirection game object not found. Do not use.");
networkView.RPC("Respawn", RPCMode.All, Network.player);
#if UNITY_ANDROID && !UNITY_EDITOR
OVRManager.display.RecenteredPose += ResetOrientation;
#endif
}
示例15: Start
// Use this for initialization
void Start()
{
Application.targetFrameRate = TargetFrameRate;
if(OculusCamera==null) //Try to find OVRCameraRig component
OculusCamera = GameObject.FindObjectOfType<OVRCameraRig> ();
if (Configuration == null)
Configuration = gameObject.AddComponent<TELUBeeConfiguration>();
if(TargetMaterial!=null)
Init();
RobotConnector.AddDependencyNode (this);
GStreamerCore.Ref ();
}