本文整理汇总了C#中System.Image.CopyBlank方法的典型用法代码示例。如果您正苦于以下问题:C# Image.CopyBlank方法的具体用法?C# Image.CopyBlank怎么用?C# Image.CopyBlank使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类System.Image
的用法示例。
在下文中一共展示了Image.CopyBlank方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Update
/// <summary>
/// Update the motion history with the specific image and the specific timestamp
/// </summary>
/// <param name="foregroundMask">The foreground of the image to be added to history</param>
/// <param name="timestamp">The time when the image is captured</param>
public void Update(Image<Gray, Byte> foregroundMask, DateTime timestamp)
{
_lastTime = timestamp;
TimeSpan ts = _lastTime.Subtract(_initTime);
_foregroundMask = foregroundMask;
if (_mhi == null) _mhi = new Image<Gray, float>(foregroundMask.Size);
if (_mask == null) _mask = foregroundMask.CopyBlank();
if (_orientation == null) _orientation = new Image<Gray, float>(foregroundMask.Size);
CvInvoke.cvUpdateMotionHistory(foregroundMask.Ptr, _mhi, ts.TotalSeconds, _mhiDuration);
double scale = 255.0 / _mhiDuration;
CvInvoke.cvConvertScale(_mhi.Ptr, _mask.Ptr, scale, (_mhiDuration - ts.TotalSeconds) * scale);
CvInvoke.cvCalcMotionGradient(_mhi.Ptr, _mask.Ptr, _orientation.Ptr, _maxTimeDelta, _minTimeDelta, 3);
}
示例2: Update
/// <summary>
/// Update the motion history with the specific image and the specific timestamp
/// </summary>
/// <param name="image">The image to be added to history</param>
/// <param name="timestamp">The time when the image is captured</param>
public void Update(Image<Gray, Byte> image, DateTime timestamp)
{
_lastTime = timestamp;
TimeSpan ts = _lastTime.Subtract(_initTime);
if (_buffer.Count == _bufferMax)
{
_buffer.Dequeue();
}
_buffer.Enqueue(image);
if (_silh == null) _silh = image.CopyBlank();
if (_mhi == null) _mhi = new Image<Gray, float>(image.Size);
if (_mask == null) _mask = image.CopyBlank();
if (_orientation == null) _orientation = new Image<Gray, float>(image.Size);
CvInvoke.cvAbsDiff(image.Ptr, _buffer.Peek().Ptr, _silh.Ptr);
CvInvoke.cvThreshold(_silh.Ptr, _silh.Ptr, _diffThresh, 1, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY);
CvInvoke.cvUpdateMotionHistory(_silh.Ptr, _mhi, ts.TotalSeconds, _mhiDuration);
double scale = 255.0 / _mhiDuration;
CvInvoke.cvConvertScale(_mhi.Ptr, _mask.Ptr, scale, (_mhiDuration - ts.TotalSeconds) * scale);
CvInvoke.cvCalcMotionGradient(_mhi.Ptr, _mask.Ptr, _orientation.Ptr, _maxTimeDelta, _minTimeDelta, 3);
}
示例3: RetainImportantQuantizedOrientations
/// <summary>
/// Take only those orientations that have MINIMAL_NUM_OF_SAME_ORIENTED_PIXELS in 3x3 negborhood.
/// Perfroms angle transformation into binary form ([0..7] -> [1, 2, 4, 8, ..., 128]) as well.
/// </summary>
/// <param name="qunatizedOrientionImg">Quantized orientation image where angles are represented by lables [0..GlobalParameters.NUM_OF_QUNATIZED_ORIENTATIONS] (invalid orientation label included).</param>
/// <param name="minSameOrientations">Minimal number of same orientations for 3x3 neigborhood. The range is: [0..9] (3x3 neigborhood).</param>
private static Image<Gray, Byte> RetainImportantQuantizedOrientations(Image<Gray, Byte> qunatizedOrientionImg, int minSameOrientations)
{
if (minSameOrientations < 0 || minSameOrientations > 9 /*3x3 neigborhood*/)
throw new Exception("Minimal number of same orientations should be in: [0..9].");
//debugImg = new Image<Hsv, byte>(orientDegImg.Width, orientDegImg.Height);
//debugImg = null;
int qOrinetStride = qunatizedOrientionImg.Stride;
int qOrinetAllign = qunatizedOrientionImg.Stride - qunatizedOrientionImg.Width;
byte* qOrinetUnfilteredPtr = (byte*)qunatizedOrientionImg.ImageData + qOrinetStride + 1;
Image<Gray, Byte> quantizedFilteredOrient = qunatizedOrientionImg.CopyBlank();
byte* qOrinetFilteredPtr = (byte*)quantizedFilteredOrient.ImageData + qOrinetStride + 1;
//Debug.Assert(qunatizedOrientionImg.Stride == quantizedFilteredOrient.Stride);
int imgWidth = qunatizedOrientionImg.Width;
int imgHeight = qunatizedOrientionImg.Height;
for (int j = 1; j < imgHeight - 1; j++)
{
for (int i = 1; i < imgWidth - 1; i++)
{
if (*qOrinetUnfilteredPtr != INVALID_QUANTIZED_ORIENTATION)
{
byte[] histogram = new byte[INVALID_QUANTIZED_ORIENTATION + 1]; //gleda se susjedstvo 3x3
histogram[qOrinetUnfilteredPtr[-qOrinetStride - 1]]++; histogram[qOrinetUnfilteredPtr[-qOrinetStride + 0]]++; histogram[qOrinetUnfilteredPtr[-qOrinetStride + 1]]++;
histogram[qOrinetUnfilteredPtr[-1]]++; histogram[qOrinetUnfilteredPtr[0]]++; histogram[qOrinetUnfilteredPtr[+1]]++;
histogram[qOrinetUnfilteredPtr[+qOrinetStride - 1]]++; histogram[qOrinetUnfilteredPtr[+qOrinetStride + 0]]++; histogram[qOrinetUnfilteredPtr[+qOrinetStride + 1]]++;
int maxBinVotes = 0; byte quantizedAngle = 0;
for (byte histBinIdx = 0; histBinIdx < GlobalParameters.NUM_OF_QUNATIZED_ORIENTATIONS /*discard invalid orientation*/; histBinIdx++)
{
if (histogram[histBinIdx] > maxBinVotes)
{
maxBinVotes = histogram[histBinIdx];
quantizedAngle = histBinIdx;
}
}
if (maxBinVotes >= minSameOrientations)
*qOrinetFilteredPtr = (byte)(1 << quantizedAngle); //[1,2,4,8...128] (8 orientations)
//*qOrinetFilteredPtr = (byte)(1 << *qOrinetUnfilteredPtr); //[1,2,4,8...128] (8 orientations)
//debugImg[j, i] = new Hsv((*qOrinetFilteredPtr-1) * 35, 100, 100);
}
qOrinetUnfilteredPtr++;
qOrinetFilteredPtr++;
}
qOrinetUnfilteredPtr += 1 + qOrinetAllign + 1;
qOrinetFilteredPtr += 1 + qOrinetAllign + 1; //preskoči zadnji piksel, poravnanje, i početni piksel
}
//magnitudeImg.Save("magnitude.bmp");
//quantizedFilteredOrient.Save("quantizedImg.bmp");
return quantizedFilteredOrient;
}
示例4: SpreadOrientations
private static Image<Gray, Byte> SpreadOrientations(Image<Gray, Byte> quantizedOrientationImage, int neghborhood)
{
byte* srcImgPtr = (byte*)quantizedOrientationImage.ImageData;
int imgStride = quantizedOrientationImage.Stride;
Image<Gray, Byte> destImg = quantizedOrientationImage.CopyBlank();
byte* destImgPtr = (byte*)destImg.ImageData;
int imgHeight = destImg.Height;
int imgWidth = destImg.Width;
for (int row = 0; row < neghborhood; row++)
{
int subImageHeight = imgHeight - row;
for (int col = 0; col < neghborhood; col++)
{
OrImageBits(&srcImgPtr[col], destImgPtr,
imgStride,
imgWidth - col, subImageHeight);
}
srcImgPtr += imgStride;
}
return destImg;
}
示例5: Process
private void Process(Image<Gray, byte> left, Image<Gray, byte> right)
{
if (configuration.CalibrationStatus == CalibrationStatus.Calibrated)
{
var leftImage = left.CopyBlank();
CvInvoke.Remap(left, leftImage, calibration.LeftMapX, calibration.LeftMapY,
Inter.Linear);
var rightImage = right.CopyBlank();
CvInvoke.Remap(right, rightImage, calibration.RightMapX, calibration.RightMapY,
Inter.Linear);
}
var disparityMap = disparity.Solve(left, right);
UpdateResult(disparityMap);
}