当前位置: 首页>>代码示例>>C#>>正文


C# Mat.channels方法代码示例

本文整理汇总了C#中Mat.channels方法的典型用法代码示例。如果您正苦于以下问题:C# Mat.channels方法的具体用法?C# Mat.channels怎么用?C# Mat.channels使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在Mat的用法示例。


在下文中一共展示了Mat.channels方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: convert_image

		Mat convert_image (Mat im)
		{
				Mat I = null; 
				if (im.channels () == 1) {
						if (im.type () != CvType.CV_32F) {
								I = new Mat ();
								im.convertTo (I, CvType.CV_32F); 
						} else {
								I = im;
						}
				} else {
						if (im.channels () == 3) {
								Mat img = new Mat ();
								Imgproc.cvtColor (im, img, Imgproc.COLOR_RGBA2GRAY);
								if (img.type () != CvType.CV_32F) {
										I = new Mat ();
										img.convertTo (I, CvType.CV_32F); 
								} else {
										I = img;
								}
						} else {
								Debug.Log ("Unsupported image type!");
						}
				}
				Core.add (I, new Scalar (1.0), I);
				Core.log (I, I);
				return I;
		}
开发者ID:mosnyder,项目名称:facerace,代码行数:28,代码来源:PatchModel.cs

示例2: Start

        // Use this for initialization
        void Start()
        {
            Texture2D imgTexture = Resources.Load ("chessboard") as Texture2D;

                        Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC3);

                        Utils.texture2DToMat (imgTexture, imgMat);
                        Debug.Log ("imgMat dst ToString " + imgMat.ToString ());

                        Mat grayMat = new Mat ();
                        Imgproc.cvtColor (imgMat, grayMat, Imgproc.COLOR_RGB2GRAY);

                        Imgproc.Canny (grayMat, grayMat, 50, 200);

                        Mat lines = new Mat ();

                        Imgproc.HoughLinesP (grayMat, lines, 1, Mathf.PI / 180, 50, 50, 10);

            //						Debug.Log ("lines toStirng " + lines.ToString ());
            //						Debug.Log ("lines dump" + lines.dump ());

                        int[] linesArray = new int[lines.cols () * lines.rows () * lines.channels ()];
                        lines.get (0, 0, linesArray);

                        for (int i = 0; i < linesArray.Length; i=i+4) {
                                Core.line (imgMat, new Point (linesArray [i + 0], linesArray [i + 1]), new Point (linesArray [i + 2], linesArray [i + 3]), new Scalar (255, 0, 0), 2);
                        }

                        Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);
                        Utils.matToTexture2D (imgMat, texture);

                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
        }
开发者ID:prlosana,项目名称:OpenCVForUnity,代码行数:34,代码来源:HoughLinesPSample.cs

示例3: detect

	public List<Point[]> detect (Mat im, float scaleFactor, int minNeighbours, OpenCVForUnity.Size minSize)
	{
		//convert image to greyscale
		Mat gray = null;
		if (im.channels () == 1) {
			gray = im;
		} else {
			gray = new Mat ();
			Imgproc.cvtColor (im, gray, Imgproc.COLOR_RGBA2GRAY);
		}


		using (Mat equalizeHistMat = new Mat ()) 
		using (MatOfRect faces = new MatOfRect ()) {
			
			Imgproc.equalizeHist (gray, equalizeHistMat);

			detector.detectMultiScale (equalizeHistMat, faces, scaleFactor, minNeighbours, 0
				| Objdetect.CASCADE_FIND_BIGGEST_OBJECT
				| Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size ());
			
			
			if (faces.rows () < 1) {
				return new List<Point[]> ();
			}
			return convertMatOfRectToPoints (faces);
		}
				
	}
开发者ID:mosnyder,项目名称:facerace,代码行数:29,代码来源:FaceDetector.cs

示例4: init

		private IEnumerator init ()
		{

				if (webCamTexture != null) {
						webCamTexture.Stop ();
						initDone = false;

						rgbaMat.Dispose ();
						grayMat.Dispose ();
						lineMat.Dispose ();
						maskMat.Dispose ();

						bgMat.Dispose ();
				}
		
				// Checks how many and which cameras are available on the device
				for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
			

						if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {

				
								Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
								
								webCamDevice = WebCamTexture.devices [cameraIndex];

								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
	
								break;
						}

			
				}
		
				if (webCamTexture == null) {
						webCamDevice = WebCamTexture.devices [0];
						webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
				}

				Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

		
				// Starts the camera
				webCamTexture.Play ();
		
		
				

				while (true) {

						//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
						#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
			if (webCamTexture.width > 16 && webCamTexture.height > 16) {
						#else
						if (webCamTexture.didUpdateThisFrame) {
								#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
					while (webCamTexture.width <= 16) {
						webCamTexture.GetPixels32 ();
						yield return new WaitForEndOfFrame ();
					} 
								#endif
								#endif

								Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
								Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);


								colors = new Color32[webCamTexture.width * webCamTexture.height];
				
								rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
								grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
								lineMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
								maskMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

								//create a striped background.
								bgMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1, new Scalar (255));
								for (int i = 0; i < bgMat.rows ()*2.5f; i=i+4) {
										Core.line (bgMat, new Point (0, 0 + i), new Point (bgMat.cols (), -bgMat.cols () + i), new Scalar (0), 1);
								}
				
								dstMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
				
								grayPixels = new byte[grayMat.cols () * grayMat.rows () * grayMat.channels ()];
								maskPixels = new byte[maskMat.cols () * maskMat.rows () * maskMat.channels ()];
				
								texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

								gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

								updateLayout ();

								screenOrientation = Screen.orientation;
								initDone = true;

								break;
						} else {
								yield return 0;
						}
				}
		}
开发者ID:chaffman,项目名称:OpenCVForUnity,代码行数:100,代码来源:ComicFilterSample.cs

示例5: OnWebCamTextureToMatHelperInited

        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log ("OnWebCamTextureToMatHelperInited");

                        Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();

                        colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
                        texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);

                        grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
                        lineMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
                        maskMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);

                        //create a striped background.
                        bgMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1, new Scalar (255));
                        for (int i = 0; i < bgMat.rows ()*2.5f; i=i+4) {
                                Imgproc.line (bgMat, new Point (0, 0 + i), new Point (bgMat.cols (), -bgMat.cols () + i), new Scalar (0), 1);
                        }

                        dstMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);

                        grayPixels = new byte[grayMat.cols () * grayMat.rows () * grayMat.channels ()];
                        maskPixels = new byte[maskMat.cols () * maskMat.rows () * maskMat.channels ()];

                        gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);

                        Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

                        float width = 0;
                        float height = 0;

                        width = gameObject.transform.localScale.x;
                        height = gameObject.transform.localScale.y;

                        float widthScale = (float)Screen.width / width;
                        float heightScale = (float)Screen.height / height;
                        if (widthScale < heightScale) {
                                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                        } else {
                                Camera.main.orthographicSize = height / 2;
                        }

                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
        }
开发者ID:ygx2011,项目名称:OpenCVForUnity,代码行数:47,代码来源:ComicFilterSample.cs

示例6: track

		public bool track (Mat im, FaceTrackerParams p)
		{
				if (points.Count <= 0)
						return false;


				//convert image to greyscale
				Mat gray = null;
				if (im.channels () == 1) {
						gray = im;
				} else {
						gray = new Mat ();
						Imgproc.cvtColor (im, gray, Imgproc.COLOR_RGBA2GRAY);
				}

				//initialise
//				if (!tracking)
//						points = detector.detect (gray, p.scaleFactor, p.minNeighbours, p.minSize);

				for (int i = 0; i < points.Count; i++) {
						if (points [i].Length != smodel.npts ())
								return false;
			
						//fit
						for (int level = 0; level < p.ssize.Count; level++) {
								points [i] = fit (gray, points [i], p.ssize [level], p.robust, p.itol, p.ftol);
						}
				}

				return true;
		}
开发者ID:mosnyder,项目名称:facerace,代码行数:31,代码来源:FaceTracker.cs

示例7: init

		private IEnumerator init ()
		{

				if (webCamTexture != null) {
						webCamTexture.Stop ();
						initDone = false;

						rgbaMat.Dispose ();
						grayMat.Dispose ();
						lineMat.Dispose ();
						maskMat.Dispose ();

						bgMat.Dispose ();
				}
		
				// Checks how many and which cameras are available on the device
				for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
			

						if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

				
								Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
								
								webCamDevice = WebCamTexture.devices [cameraIndex];

								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
	
								break;
						}

			
				}
		
				if (webCamTexture == null) {
						webCamDevice = WebCamTexture.devices [0];
						webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
				}

				Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

		
				// Starts the camera
				webCamTexture.Play ();
		
		
				

				while (true) {

						//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
						#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
			if (webCamTexture.width > 16 && webCamTexture.height > 16) {
						#else
						if (webCamTexture.didUpdateThisFrame) {
								#endif

								Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
								Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);


								colors = new Color32[webCamTexture.width * webCamTexture.height];
				
								rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
								grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
								lineMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
								maskMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

								//create a striped background.
								bgMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1, new Scalar (255));
								for (int i = 0; i < bgMat.rows ()*2.5f; i=i+4) {
										Core.line (bgMat, new Point (0, 0 + i), new Point (bgMat.cols (), -bgMat.cols () + i), new Scalar (0), 1);
								}
				
								dstMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
				
								grayPixels = new byte[grayMat.cols () * grayMat.rows () * grayMat.channels ()];
								maskPixels = new byte[maskMat.cols () * maskMat.rows () * maskMat.channels ()];
				
								texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);
				
								gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
								#if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
				gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
								#endif
//								gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

								gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

//								bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//								float scaleX = 1;
//								float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//								if (webCamTexture.videoRotationAngle == 270)
//										scaleY = -1.0f;
//								gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


								gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

								#if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
//.........这里部分代码省略.........
开发者ID:NotYours180,项目名称:OpenCVForUnity,代码行数:101,代码来源:ComicFilterSample.cs

示例8: Start

	void Start ()
	{
		sensor = KinectSensor.GetDefault ();
		
		if (sensor != null) {
			reader = sensor.ColorFrameSource.OpenReader ();
			
			FrameDescription frameDesc = sensor.ColorFrameSource.CreateFrameDescription (ColorImageFormat.Rgba);

			
			texture = new Texture2D (frameDesc.Width, frameDesc.Height, TextureFormat.RGBA32, false);
			data = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];
			
			if (!sensor.IsOpen) {
				sensor.Open ();
			}


			rgbaMat = new Mat (texture.height, texture.width, CvType.CV_8UC4);
			
			Debug.Log ("rgbaMat " + rgbaMat.ToString ());
			
			gameObject.transform.localScale = new Vector3 (texture.width, texture.height, 1);
			
			gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
			
			Camera.main.orthographicSize = texture.height / 2;
			
			
			// sepia
			sepiaKernel = new Mat (4, 4, CvType.CV_32F);
			sepiaKernel.put (0, 0, /* R */0.189f, 0.769f, 0.393f, 0f);
			sepiaKernel.put (1, 0, /* G */0.168f, 0.686f, 0.349f, 0f);
			sepiaKernel.put (2, 0, /* B */0.131f, 0.534f, 0.272f, 0f);
			sepiaKernel.put (3, 0, /* A */0.000f, 0.000f, 0.000f, 1f);
			
			
			// pixelize
			pixelizeIntermediateMat = new Mat ();
			pixelizeSize0 = new Size ();
			
			
			//comic
			comicGrayMat = new Mat (texture.height, texture.width, CvType.CV_8UC1);
			comicLineMat = new Mat (texture.height, texture.width, CvType.CV_8UC1);
			comicMaskMat = new Mat (texture.height, texture.width, CvType.CV_8UC1);
			
			//create a striped background.
			comicBgMat = new Mat (texture.height, texture.width, CvType.CV_8UC1, new Scalar (255));
			for (int i = 0; i < comicBgMat.rows ()*2.5f; i=i+4) {
				Core.line (comicBgMat, new Point (0, 0 + i), new Point (comicBgMat.cols (), -comicBgMat.cols () + i), new Scalar (0), 1);
			}
			
			comicDstMat = new Mat (texture.height, texture.width, CvType.CV_8UC1);
			
			comicGrayPixels = new byte[comicGrayMat.cols () * comicGrayMat.rows () * comicGrayMat.channels ()];
			comicMaskPixels = new byte[comicMaskMat.cols () * comicMaskMat.rows () * comicMaskMat.channels ()];
		} else {
			UnityEngine.Debug.LogError ("No ready Kinect found!");
		}


	}
开发者ID:ly774508966,项目名称:KinectWithOpenCVForUnitySample,代码行数:63,代码来源:ColorFrameSample.cs


注:本文中的Mat.channels方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。