当前位置: 首页>>代码示例>>C#>>正文


C# Size.ToString方法代码示例

本文整理汇总了C#中Size.ToString方法的典型用法代码示例。如果您正苦于以下问题:C# Size.ToString方法的具体用法?C# Size.ToString怎么用?C# Size.ToString使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Size的用法示例。


在下文中一共展示了Size.ToString方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: GetBounds

    public static bool GetBounds( Size size, ref HmdQuad_t pRect )
    {
        if (size == Size.Calibrated)
        {
            var initOpenVR = (!SteamVR.active && !SteamVR.usingNativeSupport);
            if (initOpenVR)
            {
                var error = EVRInitError.None;
                OpenVR.Init(ref error, EVRApplicationType.VRApplication_Other);
            }

            var chaperone = OpenVR.Chaperone;
            bool success = (chaperone != null) && chaperone.GetPlayAreaRect(ref pRect);
            if (!success)
                Debug.LogWarning("Failed to get Calibrated Play Area bounds!  Make sure you have tracking first, and that your space is calibrated.");

            if (initOpenVR)
                OpenVR.Shutdown();

            return success;
        }
        else
        {
            try
            {
                var str = size.ToString().Substring(1);
                var arr = str.Split(new char[] {'x'}, 2);

                // convert to half size in meters (from cm)
                var x = float.Parse(arr[0]) / 200;
                var z = float.Parse(arr[1]) / 200;

                pRect.vCorners0.v0 =  x;
                pRect.vCorners0.v1 =  0;
                pRect.vCorners0.v2 =  z;

                pRect.vCorners1.v0 =  x;
                pRect.vCorners1.v1 =  0;
                pRect.vCorners1.v2 = -z;

                pRect.vCorners2.v0 = -x;
                pRect.vCorners2.v1 =  0;
                pRect.vCorners2.v2 = -z;

                pRect.vCorners3.v0 = -x;
                pRect.vCorners3.v1 =  0;
                pRect.vCorners3.v2 =  z;

                return true;
            }
            catch {}
        }

        return false;
    }
开发者ID:CAOakleyII,项目名称:vive-project,代码行数:55,代码来源:SteamVR_PlayArea.cs

示例2: GetBounds

	public static bool GetBounds( Size size, ref HmdQuad_t pRect )
	{
		if (size == Size.Calibrated)
		{
			var error = EVRInitError.None;
			if (!SteamVR.active)
			{
				OpenVR.Init(ref error, EVRApplicationType.VRApplication_Other);
				if (error != EVRInitError.None)
					return false;
			}

			var pChaperone = OpenVR.GetGenericInterface(OpenVR.IVRChaperone_Version, ref error);
			if (pChaperone == System.IntPtr.Zero || error != EVRInitError.None)
			{
				if (!SteamVR.active)
					OpenVR.Shutdown();
				return false;
			}

			var chaperone = new CVRChaperone(pChaperone);

			bool success = chaperone.GetPlayAreaRect( ref pRect );
			if (!success)
				Debug.LogWarning("Failed to get Calibrated Play Area bounds!  Make sure you have tracking first, and that your space is calibrated.");

			if (!SteamVR.active)
				OpenVR.Shutdown();

			return success;
		}
		else
		{
			try
			{
				var str = size.ToString().Substring(1);
				var arr = str.Split(new char[] {'x'}, 2);
				// convert to half size in meters (from cm)
				var x = float.Parse(arr[0]) / 200;
				var z = float.Parse(arr[1]) / 200;
				pRect.vCorners = new HmdVector3_t[ 4 ];
				pRect.vCorners[ 0 ].v = new float[ 3 ] { x, 0, z };
				pRect.vCorners[ 1 ].v = new float[ 3 ] { x, 0, -z };
				pRect.vCorners[ 2 ].v = new float[ 3 ] { -x, 0, -z };
				pRect.vCorners[ 3 ].v = new float[ 3 ] { -x, 0, z };
				return true;
			}
			catch {}
		}

		return false;
	}
开发者ID:Black4Blade,项目名称:NewtonVR,代码行数:52,代码来源:SteamVR_PlayArea.cs

示例3: GenerateUrl

        private static string GenerateUrl(string question, Size size, int? maxImages)
        {
            var url = baseUrl;

            if (!string.IsNullOrEmpty(question))
            {
                url += "&q=" + question.Replace(" ", "%20");
            }

            if (!string.IsNullOrEmpty(size.ToString()))
            {
                url += "&imgsz=" + size;
            }

            if (maxImages != null)
            {
                url += "&rsz=" + maxImages;
            }

            return url;
        }
开发者ID:georgimanov,项目名称:WPF,代码行数:21,代码来源:DataPersister.cs

示例4: SizeToStringAndFromString

 public void SizeToStringAndFromString()
 {
     var s = new Size(2.23f, 3.45f);
     string sizeString = s.ToString();
     Assert.AreEqual(s, new Size(sizeString));
     Assert.Throws<Size.InvalidNumberOfComponents>(() => new Size("10"));
     Assert.Throws<Size.InvalidNumberOfComponents>(() => new Size("abc"));
 }
开发者ID:hillwhite,项目名称:DeltaEngine,代码行数:8,代码来源:SizeTests.cs

示例5: SizeToString

 public void SizeToString()
 {
     var s = new Size(2.23f, 3.45f);
     Assert.AreEqual("(2.23, 3.45)", s.ToString());
 }
开发者ID:hillwhite,项目名称:DeltaEngine,代码行数:5,代码来源:SizeTests.cs

示例6: init


//.........这里部分代码省略.........



										//set cameraparam
										int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
										camMatrix = new Mat (3, 3, CvType.CV_64FC1);
										camMatrix.put (0, 0, max_d);
										camMatrix.put (0, 1, 0);
										camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
										camMatrix.put (1, 0, 0);
										camMatrix.put (1, 1, max_d);
										camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
										camMatrix.put (2, 0, 0);
										camMatrix.put (2, 1, 0);
										camMatrix.put (2, 2, 1.0f);
										Debug.Log ("camMatrix " + camMatrix.dump ());

										distCoeffs = new MatOfDouble (0, 0, 0, 0);
										Debug.Log ("distCoeffs " + distCoeffs.dump ());

										//calibration camera
										Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
										double apertureWidth = 0;
										double apertureHeight = 0;
										double[] fovx = new double[1];
										double[] fovy = new double[1];
										double[] focalLength = new double[1];
										Point principalPoint = new Point ();
										double[] aspectratio = new double[1];


										Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

										Debug.Log ("imageSize " + imageSize.ToString ());
										Debug.Log ("apertureWidth " + apertureWidth);
										Debug.Log ("apertureHeight " + apertureHeight);
										Debug.Log ("fovx " + fovx [0]);
										Debug.Log ("fovy " + fovy [0]);
										Debug.Log ("focalLength " + focalLength [0]);
										Debug.Log ("principalPoint " + principalPoint.ToString ());
										Debug.Log ("aspectratio " + aspectratio [0]);

										//Adjust Unity Camera FOV
										for (int i = 0; i < ARCamera.Length; i++) {
												ARCamera [i].fieldOfView = (float)fovy [0];
										}
										

										
					
										markerDetector = new MarkerDetector (camMatrix, distCoeffs);


										//Marker Coordinate Initial Matrix
										lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
										Debug.Log ("lookAt " + lookAtM.ToString ());

										//OpenGL to Unity Coordinate System Convert Matrix
										//http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis.
										invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
										Debug.Log ("invertZM " + invertZM.ToString ());


					
										initDone = true;
					
开发者ID:prlosana,项目名称:MarkerBasedARSample,代码行数:66,代码来源:WebCamTextureMarkerBasedARSample.cs

示例7: GetCurrentValueCore


//.........这里部分代码省略.........
                case AnimationType.To: 

                    from = defaultOriginValue; 
                    to = _keyValues[0]; 

                    validateOrigin = true; 

                    break;

                case AnimationType.By: 

                    // According to the SMIL specification, a By animation is 
                    // always additive.  But we don't force this so that a 
                    // user can re-use a By animation and have it replace the
                    // animations that precede it in the list without having 
                    // to manually set the From value to the base value.

                    to          = _keyValues[0];
                    foundation  = defaultOriginValue; 

                    validateOrigin = true; 
 
                    break;
 
                case AnimationType.FromTo:

                    from    = _keyValues[0];
                    to      = _keyValues[1]; 

                    if (IsAdditive) 
                    { 
                        foundation = defaultOriginValue;
                        validateOrigin = true; 
                    }

                    break;
 
                case AnimationType.FromBy:
 
                    from    = _keyValues[0]; 
                    to      = AnimatedTypeHelpers.AddSize(_keyValues[0], _keyValues[1]);
 
                    if (IsAdditive)
                    {
                        foundation = defaultOriginValue;
                        validateOrigin = true; 
                    }
 
                    break; 

                default: 

                    Debug.Fail("Unknown animation type.");

                    break; 
            }
 
            if (validateOrigin 
                && !AnimatedTypeHelpers.IsValidAnimationValueSize(defaultOriginValue))
            { 
                throw new InvalidOperationException(
                    SR.Get(
                        SRID.Animation_Invalid_DefaultValue,
                        this.GetType(), 
                        "origin",
                        defaultOriginValue.ToString(CultureInfo.InvariantCulture))); 
            } 

            if (validateDestination 
                && !AnimatedTypeHelpers.IsValidAnimationValueSize(defaultDestinationValue))
            {
                throw new InvalidOperationException(
                    SR.Get( 
                        SRID.Animation_Invalid_DefaultValue,
                        this.GetType(), 
                        "destination", 
                        defaultDestinationValue.ToString(CultureInfo.InvariantCulture)));
            } 


            if (IsCumulative)
            { 
                double currentRepeat = (double)(animationClock.CurrentIteration - 1);
 
                if (currentRepeat > 0.0) 
                {
                    Size accumulator = AnimatedTypeHelpers.SubtractSize(to, from); 

                    accumulated = AnimatedTypeHelpers.ScaleSize(accumulator, currentRepeat);
                }
            } 

            // return foundation + accumulated + from + ((to - from) * progress) 
 
            return AnimatedTypeHelpers.AddSize(
                foundation, 
                AnimatedTypeHelpers.AddSize(
                    accumulated,
                    AnimatedTypeHelpers.InterpolateSize(from, to, progress)));
        } 
开发者ID:sjyanxin,项目名称:WPFSource,代码行数:101,代码来源:SizeAnimation.cs

示例8: Start

				// Use this for initialization
				void Start ()
				{

						gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
						Camera.main.orthographicSize = imgTexture.height / 2;

		
						Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4);
		
						Utils.texture2DToMat (imgTexture, imgMat);
						Debug.Log ("imgMat dst ToString " + imgMat.ToString ());

						//set cameraparam
						int max_d = Mathf.Max (imgMat.rows (), imgMat.cols ());
						Mat camMatrix = new Mat (3, 3, CvType.CV_64FC1);
						camMatrix.put (0, 0, max_d);
						camMatrix.put (0, 1, 0);
						camMatrix.put (0, 2, imgMat.cols () / 2.0f);
						camMatrix.put (1, 0, 0);
						camMatrix.put (1, 1, max_d);
						camMatrix.put (1, 2, imgMat.rows () / 2.0f);
						camMatrix.put (2, 0, 0);
						camMatrix.put (2, 1, 0);
						camMatrix.put (2, 2, 1.0f);
						Debug.Log ("camMatrix " + camMatrix.dump ());

						MatOfDouble distCoeffs = new MatOfDouble (0, 0, 0, 0);
						Debug.Log ("distCoeffs " + distCoeffs.dump ());


						//calibration camera
						Size imageSize = new Size (imgMat.cols (), imgMat.rows ());
						double apertureWidth = 0;
						double apertureHeight = 0;
						double[] fovx = new double[1];
						double[] fovy = new double[1];
						double[] focalLength = new double[1];
						Point principalPoint = new Point ();
						double[] aspectratio = new double[1];
		
						Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
		
						Debug.Log ("imageSize " + imageSize.ToString ());
						Debug.Log ("apertureWidth " + apertureWidth);
						Debug.Log ("apertureHeight " + apertureHeight);
						Debug.Log ("fovx " + fovx [0]);
						Debug.Log ("fovy " + fovy [0]);
						Debug.Log ("focalLength " + focalLength [0]);
						Debug.Log ("principalPoint " + principalPoint.ToString ());
						Debug.Log ("aspectratio " + aspectratio [0]);

						//Adjust Unity Camera FOV
						ARCamera.fieldOfView = (float)fovy [0];

//			ARCamera.projectionMatrix = ARCamera.projectionMatrix * Matrix4x4.Scale(new Vector3(-1, -1, 1));
//			gameObject.transform.localScale = new Vector3 (-1 * gameObject.transform.localScale.x, -1 * gameObject.transform.localScale.y, 1);


		 
						MarkerDetector markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign);

						markerDetector.processFrame (imgMat, 1);


						//Marker Coordinate Initial Matrix
						Matrix4x4 lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
						Debug.Log ("lookAt " + lookAtM.ToString ());

						//Marker to Camera Coordinate System Convert Matrix
						if (markerDetector.getTransformations ().Count > 0) {
								Matrix4x4 transformationM = markerDetector.getTransformations () [0];
								Debug.Log ("transformationM " + transformationM.ToString ());

								//OpenGL to Unity Coordinate System Convert Matrix
								//http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis. 
								Matrix4x4 invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
								Debug.Log ("invertZM " + invertZM.ToString ());

								Matrix4x4 worldToCameraM = lookAtM * transformationM * invertZM;
								Debug.Log ("worldToCameraM " + worldToCameraM.ToString ());

								ARCamera.worldToCameraMatrix = worldToCameraM;
		
						} else {
								Debug.LogWarning ("Marker is not detected");
						}



						Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);
		
						Utils.matToTexture2D (imgMat, texture);
		
						gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
				}
开发者ID:wlstks7,项目名称:MarkerBasedARSample,代码行数:96,代码来源:Texture2DMarkerBasedARSample.cs

示例9: ToStringTest

 public void ToStringTest()
 {
     Size sz = new Size(0, 0);
     Assert.Equal(string.Format(CultureInfo.CurrentCulture, "{{Width={0}, Height={1}}}", sz.Width, sz.Height), sz.ToString());
 }
开发者ID:jemmy655,项目名称:corefx,代码行数:5,代码来源:SizeTests.cs

示例10: init


//.........这里部分代码省略.........

            //										bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        Camera.main.orthographicSize = webCamTexture.height / 2;

                                        int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
                                        camMatrix = new Mat (3, 3, CvType.CV_64FC1);
                                        camMatrix.put (0, 0, max_d);
                                        camMatrix.put (0, 1, 0);
                                        camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
                                        camMatrix.put (1, 0, 0);
                                        camMatrix.put (1, 1, max_d);
                                        camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
                                        camMatrix.put (2, 0, 0);
                                        camMatrix.put (2, 1, 0);
                                        camMatrix.put (2, 2, 1.0f);

                                        Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
                                        double apertureWidth = 0;
                                        double apertureHeight = 0;
                                        double[] fovx = new double[1];
                                        double[] fovy = new double[1];
                                        double[] focalLength = new double[1];
                                        Point principalPoint = new Point ();
                                        double[] aspectratio = new double[1];

                                        Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

                                        Debug.Log ("imageSize " + imageSize.ToString ());
                                        Debug.Log ("apertureWidth " + apertureWidth);
                                        Debug.Log ("apertureHeight " + apertureHeight);
                                        Debug.Log ("fovx " + fovx [0]);
                                        Debug.Log ("fovy " + fovy [0]);
                                        Debug.Log ("focalLength " + focalLength [0]);
                                        Debug.Log ("principalPoint " + principalPoint.ToString ());
                                        Debug.Log ("aspectratio " + aspectratio [0]);

                                        ARCamera.fieldOfView = (float)fovy [0];

                                        Debug.Log ("camMatrix " + camMatrix.dump ());

                                        distCoeffs = new MatOfDouble (0, 0, 0, 0);
                                        Debug.Log ("distCoeffs " + distCoeffs.dump ());

                                        lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
                                        Debug.Log ("lookAt " + lookAtM.ToString ());

                                        invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
开发者ID:behiever,项目名称:FaceTrackerSample,代码行数:67,代码来源:FaceTrackerARSample.cs

示例11: init


//.........这里部分代码省略.........
						// Starts the camera
						webCamTexture.Play ();

						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
										while (webCamTexture.width <= 16) {
												webCamTexture.GetPixels32 ();
												yield return new WaitForEndOfFrame ();
										} 
										#endif
										#endif
										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

					
										colors = new Color32[webCamTexture.width * webCamTexture.height];
					
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
					
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);
					
										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

										updateLayout ();


										//set cameraparam
										int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
										camMatrix = new Mat (3, 3, CvType.CV_64FC1);
										camMatrix.put (0, 0, max_d);
										camMatrix.put (0, 1, 0);
										camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
										camMatrix.put (1, 0, 0);
										camMatrix.put (1, 1, max_d);
										camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
										camMatrix.put (2, 0, 0);
										camMatrix.put (2, 1, 0);
										camMatrix.put (2, 2, 1.0f);
										Debug.Log ("camMatrix " + camMatrix.dump ());

										distCoeffs = new MatOfDouble (0, 0, 0, 0);
										Debug.Log ("distCoeffs " + distCoeffs.dump ());

										//calibration camera
										Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
										double apertureWidth = 0;
										double apertureHeight = 0;
										double[] fovx = new double[1];
										double[] fovy = new double[1];
										double[] focalLength = new double[1];
										Point principalPoint = new Point ();
										double[] aspectratio = new double[1];


										Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

										Debug.Log ("imageSize " + imageSize.ToString ());
										Debug.Log ("apertureWidth " + apertureWidth);
										Debug.Log ("apertureHeight " + apertureHeight);
										Debug.Log ("fovx " + fovx [0]);
										Debug.Log ("fovy " + fovy [0]);
										Debug.Log ("focalLength " + focalLength [0]);
										Debug.Log ("principalPoint " + principalPoint.ToString ());
										Debug.Log ("aspectratio " + aspectratio [0]);

										//Adjust Unity Camera FOV
										for (int i = 0; i < ARCamera.Length; i++) {
												ARCamera [i].fieldOfView = (float)fovy [0];
										}
										

										
					
										markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign);


										//Marker Coordinate Initial Matrix
										lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
										Debug.Log ("lookAt " + lookAtM.ToString ());

										//OpenGL to Unity Coordinate System Convert Matrix
										//http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis.
										invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
										Debug.Log ("invertZM " + invertZM.ToString ());


										screenOrientation = Screen.orientation;
										initDone = true;
					
										break;
								} else {
										yield return 0;
								}
						}
				}
开发者ID:wlstks7,项目名称:MarkerBasedARSample,代码行数:101,代码来源:WebCamTextureMarkerBasedARSample.cs

示例12: init


//.........这里部分代码省略.........
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

            // カメラを起動します
            webCamTexture.Play();
            while (true)
            {
                // iOSの上webcamTexture.widthとwebcamTexture.heightを使用する場合は、それ以外の場合はこれら2つの値が16に等しくなり、webcamTexture.didUpdateThisFrame== 1まで待つ必要があります.
                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                #else
                    if (webCamTexture.didUpdateThisFrame)
                    {
                    #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
                        while (webCamTexture.width <= 16)
                        {
                        webCamTexture.GetPixels32 ();
                        yield return new WaitForEndOfFrame ();
                        }
                    #endif
                #endif
                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                    grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.GetComponent<Renderer>().material.mainTexture = texture;

                    updateLayout();

                    cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml"));
                    if (cascade.empty())
                    {
                        Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                    }

                    int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols());
                    camMatrix = new Mat(3, 3, CvType.CV_64FC1);
                    camMatrix.put(0, 0, max_d);
                    camMatrix.put(0, 1, 0);
                    camMatrix.put(0, 2, rgbaMat.cols() / 2.0f);
                    camMatrix.put(1, 0, 0);
                    camMatrix.put(1, 1, max_d);
                    camMatrix.put(1, 2, rgbaMat.rows() / 2.0f);
                    camMatrix.put(2, 0, 0);
                    camMatrix.put(2, 1, 0);
                    camMatrix.put(2, 2, 1.0f);

                    Size imageSize = new Size(rgbaMat.cols(), rgbaMat.rows());
                    double apertureWidth = 0;
                    double apertureHeight = 0;
                    double[] fovx = new double[1];
                    double[] fovy = new double[1];
                    double[] focalLength = new double[1];
                    Point principalPoint = new Point(); // 主点
                    double[] aspectratio = new double[1];

                    Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

                    Debug.Log("imageSize " + imageSize.ToString());
                    Debug.Log("apertureWidth " + apertureWidth);
                    Debug.Log("apertureHeight " + apertureHeight);
                    Debug.Log("fovx " + fovx[0]);
                    Debug.Log("fovy " + fovy[0]);
                    Debug.Log("focalLength " + focalLength[0]);
                    Debug.Log("--------------------------principalPoint");
                    Debug.Log("principalPoint " + principalPoint.ToString());
                    Debug.Log("--------------------------principalPoint");

                    Debug.Log("aspectratio " + aspectratio[0]);

                    ARCamera.fieldOfView = (float)fovy[0];

                    Debug.Log("camMatrix " + camMatrix.dump());

                    distCoeffs = new MatOfDouble(0, 0, 0, 0);
                    Debug.Log("distCoeffs " + distCoeffs.dump());

                    lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0));
                    Debug.Log("lookAt " + lookAtM.ToString());

                    invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));

                    screenOrientation = Screen.orientation;
                    initDone = true;
                    break;
                    }
                    else
                    {
                        yield return 0;
                    }
                }
            }
开发者ID:muripoLife,项目名称:EyesAuthentication,代码行数:101,代码来源:Eye.cs

示例13: OnWebCamTextureToMatHelperInited

				/// <summary>
				/// Raises the web cam texture to mat helper inited event.
				/// </summary>
				public void OnWebCamTextureToMatHelperInited ()
				{
						Debug.Log ("OnWebCamTextureToMatHelperInited");
			
						Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
			
						colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
						texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);



						gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
			
						Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
			
						float width = 0;
						float height = 0;
			
						width = gameObject.transform.localScale.x;
						height = gameObject.transform.localScale.y;

						float imageScale = 1.0f;
						float widthScale = (float)Screen.width / width;
						float heightScale = (float)Screen.height / height;
						if (widthScale < heightScale) {
								Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
								imageScale = (float)Screen.height / (float)Screen.width;
						} else {
								Camera.main.orthographicSize = height / 2;
						}
			
						gameObject.GetComponent<Renderer> ().material.mainTexture = texture;




						grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
									
						cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
						if (cascade.empty ()) {
								Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
						}
									
									
						int max_d = Mathf.Max (webCamTextureMat.rows (), webCamTextureMat.cols ());
						camMatrix = new Mat (3, 3, CvType.CV_64FC1);
						camMatrix.put (0, 0, max_d);
						camMatrix.put (0, 1, 0);
						camMatrix.put (0, 2, webCamTextureMat.cols () / 2.0f);
						camMatrix.put (1, 0, 0);
						camMatrix.put (1, 1, max_d);
						camMatrix.put (1, 2, webCamTextureMat.rows () / 2.0f);
						camMatrix.put (2, 0, 0);
						camMatrix.put (2, 1, 0);
						camMatrix.put (2, 2, 1.0f);
									
						Size imageSize = new Size (webCamTextureMat.cols () * imageScale, webCamTextureMat.rows () * imageScale);
						double apertureWidth = 0;
						double apertureHeight = 0;
						double[] fovx = new double[1];
						double[] fovy = new double[1];
						double[] focalLength = new double[1];
						Point principalPoint = new Point ();
						double[] aspectratio = new double[1];
									
									
									
									
						Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
									
						Debug.Log ("imageSize " + imageSize.ToString ());
						Debug.Log ("apertureWidth " + apertureWidth);
						Debug.Log ("apertureHeight " + apertureHeight);
						Debug.Log ("fovx " + fovx [0]);
						Debug.Log ("fovy " + fovy [0]);
						Debug.Log ("focalLength " + focalLength [0]);
						Debug.Log ("principalPoint " + principalPoint.ToString ());
						Debug.Log ("aspectratio " + aspectratio [0]);
									
									
						if (Screen.height > Screen.width) {
								ARCamera.fieldOfView = (float)fovx [0];
						} else {
								ARCamera.fieldOfView = (float)fovy [0];
						}

									
						Debug.Log ("camMatrix " + camMatrix.dump ());
									
									
						distCoeffs = new MatOfDouble (0, 0, 0, 0);
						Debug.Log ("distCoeffs " + distCoeffs.dump ());
									
									
									
						lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
						Debug.Log ("lookAt " + lookAtM.ToString ());
//.........这里部分代码省略.........
开发者ID:mosnyder,项目名称:facerace,代码行数:101,代码来源:FaceTrackerARSample.cs

示例14: init


//.........这里部分代码省略.........
			
			
						// Starts the camera
						webCamTexture.Play ();


						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
										while (webCamTexture.width <= 16) {
												webCamTexture.GetPixels32 ();
												yield return new WaitForEndOfFrame ();
										} 
										#endif
								#endif
										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);
					
										colors = new Color32[webCamTexture.width * webCamTexture.height];
					
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
										grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
					
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

										updateLayout ();

										cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
										if (cascade.empty ()) {
												Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
										}


										int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
										camMatrix = new Mat (3, 3, CvType.CV_64FC1);
										camMatrix.put (0, 0, max_d);
										camMatrix.put (0, 1, 0);
										camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
										camMatrix.put (1, 0, 0);
										camMatrix.put (1, 1, max_d);
										camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
										camMatrix.put (2, 0, 0);
										camMatrix.put (2, 1, 0);
										camMatrix.put (2, 2, 1.0f);
					
										Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
										double apertureWidth = 0;
										double apertureHeight = 0;
										double[] fovx = new double[1];
										double[] fovy = new double[1];
										double[] focalLength = new double[1];
										Point principalPoint = new Point ();
										double[] aspectratio = new double[1];
					
					
					
					
										Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
					
										Debug.Log ("imageSize " + imageSize.ToString ());
										Debug.Log ("apertureWidth " + apertureWidth);
										Debug.Log ("apertureHeight " + apertureHeight);
										Debug.Log ("fovx " + fovx [0]);
										Debug.Log ("fovy " + fovy [0]);
										Debug.Log ("focalLength " + focalLength [0]);
										Debug.Log ("principalPoint " + principalPoint.ToString ());
										Debug.Log ("aspectratio " + aspectratio [0]);
					
					
										ARCamera.fieldOfView = (float)fovy [0];
					
										Debug.Log ("camMatrix " + camMatrix.dump ());
					
					
										distCoeffs = new MatOfDouble (0, 0, 0, 0);
										Debug.Log ("distCoeffs " + distCoeffs.dump ());
					
					
					
										lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
										Debug.Log ("lookAt " + lookAtM.ToString ());
					
										invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));


										screenOrientation = Screen.orientation;
										initDone = true;
					
										break;
								} else {
										yield return 0;
								}
						}
				}
开发者ID:Thecontrarian,项目名称:unity-blink-detection,代码行数:101,代码来源:FaceTrackerARSample.cs


注:本文中的Size.ToString方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。