本文整理汇总了C#中IInputArray类的典型用法代码示例。如果您正苦于以下问题:C# IInputArray类的具体用法?C# IInputArray怎么用?C# IInputArray使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
IInputArray类属于命名空间,在下文中一共展示了IInputArray类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: StopSignDetector
public StopSignDetector(IInputArray stopSignModel)
{
_detector = new SURF(500);
using (Mat redMask = new Mat())
{
GetRedPixelMask(stopSignModel, redMask);
_modelKeypoints = new VectorOfKeyPoint();
_modelDescriptors = new Mat();
_detector.DetectAndCompute(redMask, null, _modelKeypoints, _modelDescriptors, false);
if (_modelKeypoints.Size == 0)
throw new Exception("No image feature has been found in the stop sign model");
}
_modelDescriptorMatcher = new BFMatcher(DistanceType.L2);
_modelDescriptorMatcher.Add(_modelDescriptors);
_octagon = new VectorOfPoint(
new Point[]
{
new Point(1, 0),
new Point(2, 0),
new Point(3, 1),
new Point(3, 2),
new Point(2, 3),
new Point(1, 3),
new Point(0, 2),
new Point(0, 1)
});
}
示例2: WarpAffine
/// <summary>
/// Applies an affine transformation to an image.
/// </summary>
/// <param name="src">Source image</param>
/// <param name="dst">Destination image</param>
/// <param name="mapMatrix">2x3 transformation matrix</param>
/// <param name="dsize">Size of the output image.</param>
/// <param name="interpMethod">Interpolation method</param>
/// <param name="warpMethod">Warp method</param>
/// <param name="borderMode">Pixel extrapolation method</param>
/// <param name="borderValue">A value used to fill outliers</param>
public static void WarpAffine(IInputArray src, IOutputArray dst, IInputArray mapMatrix, Size dsize, CvEnum.Inter interpMethod = CvEnum.Inter.Linear, CvEnum.Warp warpMethod = CvEnum.Warp.Default, CvEnum.BorderType borderMode = CvEnum.BorderType.Constant, MCvScalar borderValue = new MCvScalar())
{
using (InputArray iaSrc = src.GetInputArray())
using (OutputArray oaDst = dst.GetOutputArray())
using (InputArray iaMapMatrix = mapMatrix.GetInputArray())
cveWarpAffine(iaSrc, oaDst, iaMapMatrix, ref dsize, (int)interpMethod | (int)warpMethod, borderMode, ref borderValue);
}
示例3: FindStereoCorrespondence
/// <summary>
/// Computes disparity map for the input rectified stereo pair.
/// </summary>
/// <param name="left">The left single-channel, 8-bit image</param>
/// <param name="right">The right image of the same size and the same type</param>
/// <param name="disparity">The disparity map</param>
/// <param name="stream">Use a Stream to call the function asynchronously (non-blocking) or null to call the function synchronously (blocking).</param>
public void FindStereoCorrespondence(IInputArray left, IInputArray right, IOutputArray disparity, Stream stream = null)
{
using (InputArray iaLeft = left.GetInputArray())
using (InputArray iaRight = right.GetInputArray())
using (OutputArray oaDisparity = disparity.GetOutputArray())
CudaInvoke.cudaStereoBMFindStereoCorrespondence(_ptr, iaLeft, iaRight, oaDisparity, stream);
}
示例4: Detect
/// <summary>
/// Find the good features to track
/// </summary>
public void Detect(IInputArray image, IOutputArray corners, IInputArray mask = null, Stream stream = null)
{
using (InputArray iaImage = image.GetInputArray())
using (OutputArray oaCorners = corners.GetOutputArray())
using (InputArray iaMask = (mask == null ? mask.GetInputArray() : InputArray.GetEmpty()))
CudaInvoke.cudaCornersDetectorDetect(_ptr, iaImage, oaCorners, iaMask, stream);
}
示例5: Computer3DPointsFromStereoPair
/// <summary>
/// Given the left and right image, computer the disparity map and the 3D point cloud.
/// </summary>
/// <param name="left">The left image</param>
/// <param name="right">The right image</param>
/// <param name="outputDisparityMap">The left disparity map</param>
/// <param name="points">The 3D point cloud within a [-0.5, 0.5] cube</param>
private static void Computer3DPointsFromStereoPair(IInputArray left, IInputArray right, Mat outputDisparityMap, Mat points)
{
Size size;
using (InputArray ia = left.GetInputArray())
size = ia.GetSize();
using (StereoBM stereoSolver = new StereoBM())
{
stereoSolver.Compute(left, right, outputDisparityMap);
float scale = Math.Max(size.Width, size.Height);
//Construct a simple Q matrix, if you have a matrix from cvStereoRectify, you should use that instead
using (Matrix<double> q = new Matrix<double>(
new double[,]
{
{1.0, 0.0, 0.0, -size.Width/2}, //shift the x origin to image center
{0.0, -1.0, 0.0, size.Height/2}, //shift the y origin to image center and flip it upside down
{0.0, 0.0, -1.0, 0.0}, //Multiply the z value by -1.0,
{0.0, 0.0, 0.0, scale}
})) //scale the object's coordinate to within a [-0.5, 0.5] cube
{
CvInvoke.ReprojectImageTo3D(outputDisparityMap, points, q, false, DepthType.Cv32F);
}
//points = PointCollection.ReprojectImageTo3D(outputDisparityMap, q);
}
}
示例6: DetectMultiScale
/// <summary>
/// Detects objects of different sizes in the input image.
/// </summary>
/// <param name="image">Matrix of type CV_8U containing an image where objects should be detected.</param>
/// <param name="objects">Buffer to store detected objects (rectangles).</param>
/// <param name="stream">Use a Stream to call the function asynchronously (non-blocking) or null to call the function synchronously (blocking).</param>
public void DetectMultiScale(IInputArray image, IOutputArray objects, Stream stream = null)
{
using (InputArray iaImage = image.GetInputArray())
using (OutputArray oaObjects = objects.GetOutputArray())
CudaInvoke.cudaCascadeClassifierDetectMultiScale(_ptr, iaImage, oaObjects,
stream == null ? IntPtr.Zero : stream.Ptr);
}
示例7: Compute
/// <summary>
/// Computes disparity map for the specified stereo pair
/// </summary>
/// <param name="matcher">The stereo matcher</param>
/// <param name="left">Left 8-bit single-channel image.</param>
/// <param name="right">Right image of the same size and the same type as the left one.</param>
/// <param name="disparity">Output disparity map. It has the same size as the input images. Some algorithms, like StereoBM or StereoSGBM compute 16-bit fixed-point disparity map (where each disparity value has 4 fractional bits), whereas other algorithms output 32-bit floating-point disparity map</param>
public static void Compute(this IStereoMatcher matcher, IInputArray left, IInputArray right, IOutputArray disparity)
{
using (InputArray iaLeft = left.GetInputArray())
using (InputArray iaRight = right.GetInputArray())
using (OutputArray oaDisparity = disparity.GetOutputArray())
CvStereoMatcherCompute(matcher.StereoMatcherPtr, iaLeft, iaRight, oaDisparity);
}
示例8: SetSVMDetector
/// <summary>
/// Set the SVM detector
/// </summary>
/// <param name="detector">The SVM detector</param>
public void SetSVMDetector(IInputArray detector)
{
using (InputArray iaDetector = detector.GetInputArray())
{
CudaInvoke.cudaHOGSetSVMDetector(_ptr, iaDetector);
}
}
示例9: Imwrite
/// <summary>
/// Saves the image to the specified file. The image format is chosen depending on the filename extension, see cvLoadImage. Only 8-bit single-channel or 3-channel (with 'BGR' channel order) images can be saved using this function. If the format, depth or channel order is different, use cvCvtScale and cvCvtColor to convert it before saving, or use universal cvSave to save the image to XML or YAML format
/// </summary>
/// <param name="filename">The name of the file to be saved to</param>
/// <param name="image">The image to be saved</param>
/// <param name="parameters">The parameters</param>
/// <returns>true if success</returns>
public static bool Imwrite(String filename, IInputArray image, params int[] parameters)
{
using (Util.VectorOfInt vec = new Util.VectorOfInt())
{
if (parameters.Length > 0)
vec.Push(parameters);
using (CvString s = new CvString(filename))
using (InputArray iaImage = image.GetInputArray())
{
#if !(__IOS__ || __ANDROID__ || NETFX_CORE)
bool containsUnicode = (s.Length != filename.Length);
if (containsUnicode &&
(Emgu.Util.Platform.OperationSystem != OS.MacOSX) &&
(Emgu.Util.Platform.OperationSystem != OS.Linux))
{
//Handle unicode in Windows platform
//Work around for Open CV ticket:
//https://github.com/Itseez/opencv/issues/4292
//https://github.com/Itseez/opencv/issues/4866
System.IO.FileInfo fi = new System.IO.FileInfo(filename);
using (VectorOfByte vb = new VectorOfByte())
{
CvInvoke.Imencode(fi.Extension, image, vb, parameters);
byte[] arr = vb.ToArray();
System.IO.File.WriteAllBytes(filename, arr);
return true;
}
}
else
#endif
return cveImwrite(s, iaImage, vec);
}
}
}
示例10: Inpaint
/// <summary>
/// Reconstructs the selected image area from the pixel near the area boundary. The function may be used to remove dust and scratches from a scanned photo, or to remove undesirable objects from still images or video.
/// </summary>
/// <param name="src">The input 8-bit 1-channel or 3-channel image</param>
/// <param name="mask">The inpainting mask, 8-bit 1-channel image. Non-zero pixels indicate the area that needs to be inpainted</param>
/// <param name="dst">The output image of the same format and the same size as input</param>
/// <param name="flags">The inpainting method</param>
/// <param name="inpaintRadius">The radius of circular neighborhood of each point inpainted that is considered by the algorithm</param>
public static void Inpaint(IInputArray src, IInputArray mask, IOutputArray dst, double inpaintRadius, CvEnum.InpaintType flags)
{
using (InputArray iaSrc = src.GetInputArray())
using (InputArray iaMask = mask.GetInputArray())
using (OutputArray oaDst = dst.GetOutputArray())
cveInpaint(iaSrc, iaMask, oaDst, inpaintRadius, flags);
}
示例11: Calc
public static void Calc(this ICudaDenseOpticalFlow denseFlow, IInputArray i0, IInputArray i1, IInputOutputArray flow, Stream stream = null)
{
using (InputArray iaI0 = i0.GetInputArray())
using (InputArray iaI1 = i1.GetInputArray())
using (InputOutputArray ioaFlow = flow.GetInputOutputArray())
cudaDenseOpticalFlowCalc(denseFlow.DenseOpticalFlowPtr, iaI0, iaI1, ioaFlow, (stream == null) ? IntPtr.Zero : stream.Ptr);
}
示例12: Index
/*
/// <summary>
/// Create an auto-tuned flann index
/// </summary>
/// <param name="values">A row by row matrix of descriptors</param>
/// <param name="targetPrecision">Precision desired, use 0.9 if not sure</param>
/// <param name="buildWeight">build tree time weighting factor, use 0.01 if not sure</param>
/// <param name="memoryWeight">index memory weighting factor, use 0 if not sure</param>
/// <param name="sampleFraction">what fraction of the dataset to use for autotuning, use 0.1 if not sure</param>
public Index(IInputArray values, float targetPrecision, float buildWeight, float memoryWeight, float sampleFraction)
{
using (InputArray iaValues = values.GetInputArray())
_ptr = CvFlannIndexCreateAutotuned(iaValues, targetPrecision, buildWeight, memoryWeight, sampleFraction);
}*/
#endregion
/// <summary>
/// Perform k-nearest-neighbours (KNN) search
/// </summary>
/// <param name="queries">A row by row matrix of descriptors to be query for nearest neighbours</param>
/// <param name="indices">The result of the indices of the k-nearest neighbours</param>
/// <param name="squareDistances">The square of the Eculidean distance between the neighbours</param>
/// <param name="knn">Number of nearest neighbors to search for</param>
/// <param name="checks">The number of times the tree(s) in the index should be recursively traversed. A
/// higher value for this parameter would give better search precision, but also take more
/// time. If automatic configuration was used when the index was created, the number of
/// checks required to achieve the specified precision was also computed, in which case
/// this parameter is ignored </param>
public void KnnSearch(IInputArray queries, IOutputArray indices, IOutputArray squareDistances, int knn, int checks)
{
using (InputArray iaQueries = queries.GetInputArray())
using (OutputArray oaIndices = indices.GetOutputArray())
using (OutputArray oaSquareDistances = squareDistances.GetOutputArray())
CvFlannIndexKnnSearch(_ptr, iaQueries, oaIndices, oaSquareDistances, knn, checks);
}
示例13: Calc
/// <summary>
/// Calculates an optical flow.
/// </summary>
/// <param name="i0">First 8-bit single-channel input image.</param>
/// <param name="i1">Second input image of the same size and the same type as prev.</param>
/// <param name="flow">Computed flow image that has the same size as prev and type CV_32FC2 </param>
/// <param name="opticalFlow">The dense optical flow object</param>
public static void Calc(this IDenseOpticalFlow opticalFlow, IInputArray i0, IInputArray i1, IInputOutputArray flow)
{
using (InputArray iaI0 = i0.GetInputArray())
using (InputArray iaI1 = i1.GetInputArray())
using (InputOutputArray ioaFlow = flow.GetInputOutputArray())
CvInvoke.cveDenseOpticalFlowCalc(opticalFlow.DenseOpticalFlowPtr, iaI0, iaI1, ioaFlow);
}
示例14: KnnMatch
/// <summary>
/// Find the k-nearest match
/// </summary>
/// <param name="queryDescriptors">An n x m matrix of descriptors to be query for nearest neighbors. n is the number of descriptor and m is the size of the descriptor</param>
/// <param name="k">Number of nearest neighbors to search for</param>
/// <param name="mask">Can be null if not needed. An n x 1 matrix. If 0, the query descriptor in the corresponding row will be ignored.</param>
/// <param name="matches">Matches. Each matches[i] is k or less matches for the same query descriptor.</param>
public void KnnMatch(IInputArray queryDescriptors, IInputArray trainDescriptors, VectorOfVectorOfDMatch matches, int k, IInputArray mask = null, bool compactResult = false)
{
using (InputArray iaQueryDescriptors = queryDescriptors.GetInputArray())
using (InputArray iaTrainDescriptors = trainDescriptors.GetInputArray() )
using (InputArray iaMask = (mask == null ? InputArray.GetEmpty() : mask.GetInputArray()))
CudaInvoke.cveCudaDescriptorMatcherKnnMatch(_ptr, iaQueryDescriptors, iaTrainDescriptors, matches, k, iaMask, compactResult);
}
示例15: GetRedPixelMask
/// <summary>
/// Compute the red pixel mask for the given image.
/// A red pixel is a pixel where: 20 < hue < 160 AND saturation > 10
/// </summary>
/// <param name="image">The color image to find red mask from</param>
/// <param name="mask">The red pixel mask</param>
private static void GetRedPixelMask(IInputArray image, IInputOutputArray mask)
{
bool useUMat;
using (InputOutputArray ia = mask.GetInputOutputArray())
useUMat = ia.IsUMat;
using (IImage hsv = useUMat ? (IImage)new UMat() : (IImage)new Mat())
using (IImage s = useUMat ? (IImage)new UMat() : (IImage)new Mat())
{
CvInvoke.CvtColor(image, hsv, ColorConversion.Bgr2Hsv);
CvInvoke.ExtractChannel(hsv, mask, 0);
CvInvoke.ExtractChannel(hsv, s, 1);
//the mask for hue less than 20 or larger than 160
using (ScalarArray lower = new ScalarArray(20))
using (ScalarArray upper = new ScalarArray(160))
CvInvoke.InRange(mask, lower, upper, mask);
CvInvoke.BitwiseNot(mask, mask);
//s is the mask for saturation of at least 10, this is mainly used to filter out white pixels
CvInvoke.Threshold(s, s, 10, 255, ThresholdType.Binary);
CvInvoke.BitwiseAnd(mask, s, mask, null);
}
}