本文整理汇总了C#中OpenCvSharp.CvMemStorage.Clear方法的典型用法代码示例。如果您正苦于以下问题:C# CvMemStorage.Clear方法的具体用法?C# CvMemStorage.Clear怎么用?C# CvMemStorage.Clear使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类OpenCvSharp.CvMemStorage
的用法示例。
在下文中一共展示了CvMemStorage.Clear方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: FaceDetect
public FaceDetect()
{
CheckMemoryLeak();
// CvHaarClassifierCascade, cvHaarDetectObjects
CvColor[] colors = new CvColor[]{
new CvColor(0,0,255),
new CvColor(0,128,255),
new CvColor(0,255,255),
new CvColor(0,255,0),
new CvColor(255,128,0),
new CvColor(255,255,0),
new CvColor(255,0,0),
new CvColor(255,0,255),
};
const double Scale = 1.14;
const double ScaleFactor = 1.0850;
const int MinNeighbors = 2;
using (IplImage img = new IplImage(FilePath.Image.Yalta, LoadMode.Color))
using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
{
using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
{
Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
Cv.Resize(gray, smallImg, Interpolation.Linear);
Cv.EqualizeHist(smallImg, smallImg);
}
using (var cascade = CvHaarClassifierCascade.FromFile(FilePath.Text.HaarCascade))
using (var storage = new CvMemStorage())
{
storage.Clear();
// 顔の検出
Stopwatch watch = Stopwatch.StartNew();
CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
watch.Stop();
Console.WriteLine("detection time = {0}ms\n", watch.ElapsedMilliseconds);
// 検出した箇所にまるをつける
for (int i = 0; i < faces.Total; i++)
{
CvRect r = faces[i].Value.Rect;
CvPoint center = new CvPoint
{
X = Cv.Round((r.X + r.Width * 0.5) * Scale),
Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
};
int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
}
}
// ウィンドウに表示
CvWindow.ShowImages(img);
}
}
示例2: Squares
public Squares()
{
// create memory storage that will contain all the dynamic data
CvMemStorage storage = new CvMemStorage(0);
for (int i = 0; i < _names.Length; i++)
{
// load i-th image
using (IplImage img = new IplImage(_names[i], LoadMode.Color))
{
// create window and a trackbar (slider) with parent "image" and set callback
// (the slider regulates upper threshold, passed to Canny edge detector)
Cv.NamedWindow(WindowName, WindowMode.AutoSize);
// find and draw the squares
DrawSquares(img, FindSquares4(img, storage));
}
// clear memory storage - reset free space position
storage.Clear();
// wait for key.
// Also the function cvWaitKey takes care of event processing
int c = Cv.WaitKey(0);
if ((char)c == 27)
break;
}
Cv.DestroyWindow(WindowName);
}
示例3: FaceDetect
public System.Drawing.Bitmap FaceDetect(IplImage src)
{
// CvHaarClassifierCascade, cvHaarDetectObjects
// 얼굴을 검출하기 위해서 Haar 분류기의 캐스케이드를 이용한다
CvColor[] colors = new CvColor[]{
new CvColor(0,0,255),
new CvColor(0,128,255),
new CvColor(0,255,255),
new CvColor(0,255,0),
new CvColor(255,128,0),
new CvColor(255,255,0),
new CvColor(255,0,0),
new CvColor(255,0,255),
};
const double scale = 1.04;
const double scaleFactor = 1.139;
const int minNeighbors = 1;
using (IplImage img = src.Clone())
using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / scale), Cv.Round(img.Height / scale)), BitDepth.U8, 1))
{
// 얼굴 검출을 위한 화상을 생성한다.
using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
{
Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
Cv.Resize(gray, smallImg, Interpolation.Linear);
Cv.EqualizeHist(smallImg, smallImg);
}
using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(Environment.CurrentDirectory + "\\" + "haarcascade_frontalface_alt.xml"))
using (CvMemStorage storage = new CvMemStorage())
{
storage.Clear();
// 얼굴을 검출한다.
CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, scaleFactor, minNeighbors, 0, new CvSize(20, 20));
// 검출한 얼굴에 검은색 원을 덮어씌운다.
for (int i = 0; i < faces.Total; i++)
{
CvRect r = faces[i].Value.Rect;
CvPoint center = new CvPoint
{
X = Cv.Round((r.X + r.Width * 0.5) * scale),
Y = Cv.Round((r.Y + r.Height * 0.5) * scale)
};
int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale);
img.Circle(center, radius, new CvColor(0, 0, 0), -1, LineType.Link8, 0);
}
}
FindFace = img.Clone();
//생성한 IplImage 화상을 비트맵으로 변환해 반환한다.
return FindFace.ToBitmap(System.Drawing.Imaging.PixelFormat.Format24bppRgb);
}
}
示例4: Update
// Update is called once per frame
void Update()
{
IplImage frame = Cv.QueryFrame(capture);
using (IplImage img = Cv.CloneImage(frame))
using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
{
using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
{
Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
Cv.Resize(gray, smallImg, Interpolation.Linear);
Cv.EqualizeHist(smallImg, smallImg);
}
using (CvMemStorage storage = new CvMemStorage())
{
storage.Clear();
CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(64, 64));
for (int i = 0; i < faces.Total; i++)
{
CvRect r = faces[i].Value.Rect;
CvPoint center = new CvPoint
{
X = Cv.Round((r.X + r.Width * 0.5) * Scale),
Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
};
int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
}
if (faces.Total > 0)
{
CvRect r = faces[0].Value.Rect;
facepos = new Vector2((r.X + r.Width / 2.0f) / CAPTURE_WIDTH, (r.Y + r.Height / 2.0f) / CAPTURE_HEIGHT);
}
else
{
facepos = Vector2.zero;
}
if(facepos.x >= 0.2 && facepos.x <= 0.7 && facepos.y >= 0.2 && facepos.x <= 0.7)
{
isFaceInCapture = true;
}
else
{
isFaceInCapture = false;
}
}
Cv.ShowImage("FaceDetect", img);
}
}
示例5: FaceDetect
public void FaceDetect()
{
// CvHaarClassifierCascade, cvHaarDetectObjects
// 顔を検出するためにHaar分類器のカスケードを用いる
CvColor[] colors = new CvColor[]{
new CvColor(0,0,255),
new CvColor(0,128,255),
new CvColor(0,255,255),
new CvColor(0,255,0),
new CvColor(255,128,0),
new CvColor(255,255,0),
new CvColor(255,0,0),
new CvColor(255,0,255),
};
const double Scale = 1.14;
const double ScaleFactor = 1.0850;
const int MinNeighbors = 2;
using (IplImage img = new IplImage(Application.dataPath + TestImageName, LoadMode.Color))
using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
{
// 顔検出用の画像の生成
using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
{
Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
Cv.Resize(gray, smallImg, Interpolation.Linear);
Cv.EqualizeHist(smallImg, smallImg);
}
//using (CvHaarClassifierCascade cascade = Cv.Load<CvHaarClassifierCascade>(Const.XmlHaarcascade)) // どっちでも可
using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(Application.dataPath + TestTextName)) //
using (CvMemStorage storage = new CvMemStorage())
{
storage.Clear();
// 顔の検出
Stopwatch watch = Stopwatch.StartNew();
CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
watch.Stop();
// Console.WriteLine("detection time = {0}ms\n", watch.ElapsedMilliseconds);
UnityEngine.Debug.Log("detection time = " + watch.ElapsedMilliseconds + " ms");
int i=0;
for (i = 0; i < faces.Total; i++)
{
CvRect r = faces[i].Value.Rect;
CvPoint center = new CvPoint
{
X = Cv.Round((r.X + r.Width * 0.5) * Scale),
Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
};
int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
}
}
// ウィンドウに表示
CvWindow.ShowImages(img);
}
}
示例6: EyeDetect
public EyeDetect()
{
CvColor[] colors = new CvColor[]{
new CvColor(0,0,255),
new CvColor(0,128,255),
new CvColor(0,255,255),
new CvColor(0,255,0),
new CvColor(255,128,0),
new CvColor(255,255,0),
new CvColor(255,0,0),
new CvColor(255,0,255),
};
const double Scale = 1.25;
const double ScaleFactor = 2.5;
const int MinNeighbors = 2;
using (CvCapture cap = CvCapture.FromCamera(1))
using (CvWindow w = new CvWindow("Eye Tracker"))
{
while (CvWindow.WaitKey(10) < 0)
{
using (IplImage img = cap.QueryFrame())
using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
{
using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
{
Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
Cv.Resize(gray, smallImg, Interpolation.Linear);
Cv.EqualizeHist(smallImg, smallImg);
}
using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("C:\\Program Files\\OpenCV\\data\\haarcascades\\haarcascade_eye.xml"))
using (CvMemStorage storage = new CvMemStorage())
{
storage.Clear();
Stopwatch watch = Stopwatch.StartNew();
CvSeq<CvAvgComp> eyes = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
watch.Stop();
//Console.WriteLine("detection time = {0}msn", watch.ElapsedMilliseconds);
for (int i = 0; i < eyes.Total; i++)
{
CvRect r = eyes[i].Value.Rect;
CvPoint center = new CvPoint
{
X = Cv.Round((r.X + r.Width * 0.5) * Scale),
Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
};
int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
}
}
w.Image = img;
}
}
}
}
示例7: FaceDe
public static IplImage FaceDe(IplImage src)
{
CvColor[] colors = new CvColor[]{
new CvColor(0,0,255),
new CvColor(0,128,255),
new CvColor(0,255,255),
new CvColor(0,255,0),
new CvColor(255,128,0),
new CvColor(255,255,0),
new CvColor(255,0,0),
new CvColor(255,0,255),
};
const double Scale = 1.04;
const double ScaleFactor = 1.139;
const int MinNeighbors = 2;
//CvArr waraiotoko = Cv.LoadImage("j");
IplImage warai = Cv.LoadImage("C:\\Users\\tamago\\Documents\\Visual Studio 2010\\project\\facematch_sample\\facematch_sample\\warai_flat.png");
using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(src.Width / Scale), Cv.Round(src.Height / Scale)), BitDepth.U8, 1))
{
// 顔検出用の画像の生成
using (IplImage gray = new IplImage(src.Size, BitDepth.U8, 1))
{
Cv.CvtColor(src, gray, ColorConversion.BgrToGray);
Cv.Resize(gray, smallImg, Interpolation.Linear);
Cv.EqualizeHist(smallImg, smallImg);
}
//using (CvHaarClassifierCascade cascade = Cv.Load<CvHaarClassifierCascade>(Const.XmlHaarcascade)) // どっちでも可
using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("C:\\Users\\tamago\\Documents\\Visual Studio 2010\\project\\facematch_sample\\facematch_sample\\haarcascade_frontalface_alt.xml"))
using (CvMemStorage storage = new CvMemStorage())
{
storage.Clear();
// 顔の検出
CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
// モザイク処理
for (int d = 0; d < faces.Total; d++)
{
CvRect r = faces[d].Value.Rect;
CvSize size = new CvSize(r.Width + 30, r.Height + 30);
using (IplImage img_laugh_resized = new IplImage(size, warai.Depth, warai.NChannels))
{
Cv.Resize(warai, img_laugh_resized, Interpolation.NearestNeighbor);
int i_max = (((r.X + img_laugh_resized.Width) > src.Width) ? src.Width - r.X : img_laugh_resized.Width);
int j_max = (((r.Y + img_laugh_resized.Height) > src.Height) ? src.Height - r.Y : img_laugh_resized.Height);
for (int j = 0; j < img_laugh_resized.Width; ++j)
{
for (int i = 0; i < img_laugh_resized.Height; ++i)
{
CvColor color = img_laugh_resized[i, j];
if (img_laugh_resized[i, j].Val1 != 0) src[r.Y + i, r.X + j] = color;//img_laugh_resized[i, j];
}
}
}
}
return src;
}
}
}
示例8: MarkerRecog
public void MarkerRecog()
{
IplImage img_gray = new IplImage(imgSrc.Size, BitDepth.U8, 1);
IplImage img_bin = new IplImage(img_gray.Size, BitDepth.U8, 1);
Cv.CvtColor(imgSrc, img_gray, ColorConversion.BgrToGray);
//추가부분 히스토그램 평활화
Cv.EqualizeHist(img_gray, img_gray);
//컬러영상을 흑백영상으로
Cv.Copy(img_gray, img_bin);
//트래시홀드
Cv.AdaptiveThreshold(img_bin, img_bin, 255, AdaptiveThresholdType.MeanC, ThresholdType.BinaryInv, 11, 2);
//Cv.AdaptiveThreshold(img_bin, img_bin, 255, AdaptiveThresholdType.MeanC, ThresholdType.BinaryInv, 71, 45);
//Cv.AdaptiveThreshold(img_bin, img_bin, 255, AdaptiveThresholdType.MeanC, ThresholdType.BinaryInv, 301, 45);
//개발 참고부분 - 이진영상 보기
/*
IplImage timg = new IplImage(new CvSize(800, 600), BitDepth.U8, 1);
Cv.Resize(img_bin, timg);
Cv.ShowImage("binimg", timg);
timg.ReleaseData();
*/
CvMemStorage storage = new CvMemStorage(0);
CvMemStorage storage2 = new CvMemStorage(0);
storage.Clear();
CvSeq<CvPoint> contours;
CvSeq<CvPoint> approxcontours;
int ncontours = Cv.FindContours(img_bin, storage, out contours, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxSimple);
TargetList.Clear();
try
{
if (ncontours > 0)
{
//검출된 contours 단순화하기
approxcontours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage2, ApproxPolyMethod.DP, 1.0, true);
//개발자 참고용
//tmpDrawContour(ref img_gray, ref approxcontours);
if (approxcontours != null)
{
FindMarkerInContour(ref approxcontours, ref storage2);
//tmpDrawBoxes(ref img_gray);
GetMarkerCode(ref img_gray);
contours.ClearSeq();
approxcontours.ClearSeq();
}
}
}
catch (Exception ee)
{
string msg = ee.Message;
}
Cv.ReleaseMemStorage(storage);
Cv.ReleaseMemStorage(storage2);
//storage.Dispose();
img_gray.ReleaseData();
img_bin.ReleaseData();
}