本文整理汇总了C++中cvAbsDiff函数的典型用法代码示例。如果您正苦于以下问题:C++ cvAbsDiff函数的具体用法?C++ cvAbsDiff怎么用?C++ cvAbsDiff使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了cvAbsDiff函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: CheckImage
static int CheckImage(IplImage* image, char* file, char* /*funcname*/)
{
//printf("loading %s\n", file );
IplImage* read = cvLoadImage( file, 1 );
if( !read )
{
trsWrite( ATS_CON | ATS_LST, "can't read image\n" );
return 1;
}
int err = 0;
#if 0
{
IplImage* temp = cvCloneImage( read );
cvAbsDiff( image, read, temp );
cvThreshold( temp, temp, 0, 255, CV_THRESH_BINARY );
cvvNamedWindow( "Original", 0 );
cvvNamedWindow( "Diff", 0 );
cvvShowImage( "Original", read );
cvvShowImage( "Diff", temp );
cvvWaitKey(0);
cvvDestroyWindow( "Original" );
cvvDestroyWindow( "Diff" );
}
#endif
cvAbsDiff( image, read, read );
cvThreshold( read, read, 0, 1, CV_THRESH_BINARY );
err = cvRound( cvNorm( read, 0, CV_L1 ))/3;
cvReleaseImage( &read );
return err;
}
示例2: cvAbsDiff
IplImage* CMotionDetector::calculus() {
if (m_pImageTrois == NULL) return NULL;
if (m_pImageUnDeux == NULL) m_pImageUnDeux = cvCloneImage(m_pImageUn);
if (m_pImageDeuxTrois == NULL) m_pImageDeuxTrois = cvCloneImage(m_pImageUn);
if (m_pImageResult == NULL) m_pImageResult = cvCloneImage(m_pImageUn);
cvAbsDiff(m_pImageUn, m_pImageDeux, m_pImageUnDeux);
cvAbsDiff(m_pImageDeux, m_pImageTrois, m_pImageDeuxTrois);
cvAnd(m_pImageUnDeux, m_pImageDeuxTrois, m_pImageResult);
cvThreshold(m_pImageResult, m_pImageResult, 50, 255, CV_THRESH_BINARY);
cvDilate(m_pImageResult, m_pImageResult, 0, 4);
emit calculusNewImage(m_pImageResult);
return m_pImageResult;
}
示例3: cvZero
/* Standard Deviation */
IplImage* motionDetection::getStandardDeviationFrame(void) {
// Initialize
cvZero(mSum);
for (int i = 0; i < mFrameNumber; ++i) {
// frame[i] <= | frame[i] - Background Model |
cvAbsDiff(mpFrame[i], m_imgBackgroundModel, mTmp8U);
// uchar->float
cvConvert(mTmp8U, mTmp);
// mTmp = mTmp * mTmp
cvPow(mTmp, mTmp, 2.0);
// add mSum += mTmp
cvAdd(mSum, mTmp, mSum);
}
// variance: mTmp <= mSum / (mFrameNumber-1)
for (int i = 0; i < mSize.height; ++i) {
for (int j = 0; j < mSize.width; ++j) {
((float*)(mTmp->imageData + i*mTmp->widthStep))[j] = ((float*)(mSum->imageData + i*mSum->widthStep))[j] / (mFrameNumber - 1);
}
}
// standard deviation
cvPow(mTmp, mTmp, 0.5);
// float->uchar
cvConvert(mTmp, m_imgStandardDeviation);
return m_imgStandardDeviation;
}
示例4: detect_object
void detect_object(IplImage *image, IplImage *pBkImg, IplImage *pFrImg, CvMat *pFrameMat, CvMat *pBkMat, CvMat *pFrMat,int thre_limit)
{
nFrmNum++;
cvCvtColor(image, pFrImg, CV_BGR2GRAY);
cvConvert(pFrImg, pFrameMat);
//高斯滤波
cvSmooth(pFrameMat, pFrameMat, CV_GAUSSIAN, 3, 0, 0);
//当前帧减去背景图像并取绝对值
cvAbsDiff(pFrameMat, pBkMat, pFrMat);
//二值化前景图像
cvThreshold(pFrMat, pFrImg,thre_limit, 255.0, CV_THRESH_BINARY);
/*形态学滤波*/
//IplConvKernel* element = cvCreateStructuringElementEx(2, 2, 0, 0, CV_SHAPE_RECT);
//cvErode(pFrImg, pFrImg,element, 1); // 腐蚀
//delete element;
//element = cvCreateStructuringElementEx(2, 2, 1, 1, CV_SHAPE_RECT);
//cvDilate(pFrImg, pFrImg, element, 1); //膨胀
//delete element;
cvErode(pFrImg, pFrImg,0, 1); // 腐蚀
cvDilate(pFrImg, pFrImg,0, 1); //膨胀
//滑动平均更新背景(求平均)
cvRunningAvg(pFrameMat, pBkMat, 0.004, 0);
//将背景矩阵转化为图像格式,用以显示
cvConvert(pBkMat, pBkImg);
cvShowImage("background", pFrImg);
// cvShowImage("background", pBkImg);
}
示例5: ofLogError
//--------------------------------------------------------------------------------
void ofxCvGrayscaleImage::absDiff( ofxCvGrayscaleImage& mom,
ofxCvGrayscaleImage& dad ) {
if( !mom.bAllocated ){
ofLogError("ofxCvGrayscaleImage") << "absDiff(): first source image (mom) not allocated";
return;
}
if( !dad.bAllocated ){
ofLogError("ofxCvGrayscaleImage") << "absDiff(): second source image (dad) not allocated";
return;
}
if( !bAllocated ){
ofLogNotice("ofxCvGrayscaleImage") << "absDiff(): allocating to match dimensions: "
<< mom.getWidth() << " " << mom.getHeight();
allocate(mom.getWidth(), mom.getHeight());
}
ofRectangle roi = getROI();
ofRectangle momRoi = mom.getROI();
ofRectangle dadRoi = dad.getROI();
if( (momRoi.width == roi.width && momRoi.height == roi.height ) &&
(dadRoi.width == roi.width && dadRoi.height == roi.height ) )
{
cvAbsDiff( mom.getCvImage(), dad.getCvImage(), cvImage );
flagImageChanged();
} else {
ofLogError("ofxCvGrayscaleImage") << "absDiff(): source image size mismatch between first (mom) & second (dad) image";
}
}
示例6: frame_dif
void frame_dif(IplImage* image, IplImage* image_pass, IplImage* res,IplImage* res0, IplImage* pFrImg,IplImage* pFrame,int thre_limit)
{
cvZero(pFrame);
cvAbsDiff(image, image_pass, res0);
cvCvtColor(res0, res, CV_RGB2GRAY);
cvThreshold(res, res, thre_limit, 255, CV_THRESH_BINARY);
unsigned char data1, data2, data;
int i, j;
int width = pFrame->width;
int height = pFrame->height;
for (i = 0; i < height; i++)
for (j = 0; j < width; j++)
{
data1 = (unsigned char)res->imageData[i * width + j];
data2 = (unsigned char)pFrImg->imageData[i * width + j];
if (data1 == 255 || data2 == 255)
{
data = 255;
pFrame->imageData[i * width + j] = (char)data;
}
}
cvCopy(image, image_pass, NULL);
}
示例7: x
void x(IplImage *img1, IplImage *img2, IplImage *imgsize)
{
IplImage *imggray1;
IplImage *imggray2;
IplImage *imggray3;
// grayscale buffers
imggray1 = cvCreateImage( cvSize( imgsize->width, imgsize->height ), IPL_DEPTH_8U, 1);
imggray2 = cvCreateImage( cvSize( imgsize->width, imgsize->height ), IPL_DEPTH_8U, 1);
imggray3 = cvCreateImage( cvSize( imgsize->width, imgsize->height ), IPL_DEPTH_8U, 1);
IplImage *hsv1 = cvCloneImage(img1);
IplImage *hsv2 = cvCloneImage(img2);
cvCvtColor( img2, imggray2, CV_RGB2GRAY );
cvCvtColor(img1, hsv2, CV_BGR2HSV);
cvSetImageCOI(hsv2, 1);
cvCopy(hsv2, imggray2, 0);
// convert rgb to grayscale
cvCvtColor( img1, imggray1, CV_RGB2GRAY );
cvCvtColor(img2, hsv1, CV_BGR2HSV);
cvSetImageCOI(hsv1, 1);
cvCopy(hsv1, imggray1, 0);
// compute difference
cvAbsDiff( imggray1, imggray2, imggray3 );
cvShowImage( "video", imggray3 );
cvReleaseImage(&imggray1);
cvReleaseImage(&imggray2);
cvReleaseImage(&imggray3);
cvReleaseImage(&hsv1);
cvReleaseImage(&hsv2);
}
示例8: gst_motiondetect_apply
static gboolean gst_motiondetect_apply (
IplImage * cvReferenceImage, const IplImage * cvCurrentImage,
const IplImage * cvMaskImage, float noiseThreshold)
{
IplConvKernel *kernel = cvCreateStructuringElementEx (3, 3, 1, 1,
CV_SHAPE_ELLIPSE, NULL);
int threshold = (int)((1 - noiseThreshold) * 255);
IplImage *cvAbsDiffImage = cvReferenceImage;
double maxVal = -1.0;
cvAbsDiff( cvReferenceImage, cvCurrentImage, cvAbsDiffImage );
cvThreshold (cvAbsDiffImage, cvAbsDiffImage, threshold, 255,
CV_THRESH_BINARY);
cvErode (cvAbsDiffImage, cvAbsDiffImage, kernel, 1);
cvReleaseStructuringElement(&kernel);
cvMinMaxLoc(cvAbsDiffImage, NULL, &maxVal, NULL, NULL, cvMaskImage );
if (maxVal > 0) {
return TRUE;
} else {
return FALSE;
}
}
示例9: main
int main ( int argc, char **argv )
{
// use first camera attached to computer
// image data structures
IplImage *img1;
IplImage *img2;
IplImage *imggray1;
IplImage *imggray2;
IplImage *imggray3;
// load image one
img1 = cvLoadImage( argv[1] );
// grayscale buffers
imggray1 = cvCreateImage( cvGetSize( img1 ), IPL_DEPTH_8U, 1);
imggray2 = cvCreateImage( cvGetSize( img1 ), IPL_DEPTH_8U, 1);
imggray3 = cvCreateImage( cvGetSize( img1 ), IPL_DEPTH_8U, 1);
// convert rgb to grayscale
cvCvtColor( img1, imggray1, CV_RGB2GRAY );
// load image two
img2 = cvLoadImage( argv[2] );
// convert rgb to grayscale
cvCvtColor( img2, imggray2, CV_RGB2GRAY );
// compute difference
cvAbsDiff( imggray1, imggray2, imggray3 );
cvSaveImage( argv[3], imggray3 );
return 0;
}
示例10: ofLog
//--------------------------------------------------------------------------------
void ofxCvGrayscaleImage::absDiff( ofxCvGrayscaleImage& mom,
ofxCvGrayscaleImage& dad ) {
if( !mom.bAllocated ){
ofLog(OF_LOG_ERROR, "in absDiff, mom needs to be allocated");
return;
}
if( !dad.bAllocated ){
ofLog(OF_LOG_ERROR, "in absDiff, dad needs to be allocated");
return;
}
if( !bAllocated ){
ofLog(OF_LOG_NOTICE, "in absDiff, allocating to match dimensions");
allocate(mom.getWidth(), mom.getHeight());
}
ofRectangle roi = getROI();
ofRectangle momRoi = mom.getROI();
ofRectangle dadRoi = dad.getROI();
if( (momRoi.width == roi.width && momRoi.height == roi.height ) &&
(dadRoi.width == roi.width && dadRoi.height == roi.height ) )
{
cvAbsDiff( mom.getCvImage(), dad.getCvImage(), cvImage );
flagImageChanged();
} else {
ofLog(OF_LOG_ERROR, "in absDiff, images are different sizes");
}
}
示例11: detect_motion
static int detect_motion(struct motion_detection *md, AVFrame *frame) {
IplImage *tmp;
AVPicture pict;
tmp = md->cur;
md->cur = md->prev;
md->prev = tmp;
avpicture_fill(&pict, md->buffer, PIX_FMT_GRAY8, md->cam->codec->width, md->cam->codec->height);
sws_scale(md->img_convert_ctx, (const uint8_t* const*)frame->data, frame->linesize, 0, md->cam->codec->height, (uint8_t* const*)pict.data, pict.linesize);
memcpy(md->cur->imageData, pict.data[0], md->cur->imageSize);
md->cur->widthStep = pict.linesize[0];
cvAbsDiff(md->cur, md->prev, md->silh);
cvThreshold(md->silh, md->silh, md->cam->threshold, 250, CV_THRESH_BINARY);
int density = 0;
for(int i=0; i < md->silh->height; i++) {
uint8_t* ptr = (uint8_t*)md->silh->imageData + i * md->silh->widthStep;
for(int j=0; j < md->silh->width; j++)
if(*(ptr+j) > 0)
density += 1;
}
if((float)density / (float)(md->silh->height * md->silh->width) > 0.01) {
return 1;
} else {
return 0;
}
}
示例12: main
int main( int argc, char** argv )
{
char* filename = argc == 2 ? argv[1] : (char*)"1-small.jpg";
if( (image = cvLoadImage( filename, CV_LOAD_IMAGE_COLOR)) == 0 )
return -1;
//cvNamedWindow("orig", CV_WINDOW_AUTOSIZE);
//cvShowImage("orig", image);
// Extract red channel of image
red = cvCreateImage(cvSize(image->width,image->height), IPL_DEPTH_8U, 1);
green = cvCreateImage(cvSize(image->width,image->height), IPL_DEPTH_8U, 1);
cvSplit(image, NULL, green, red, NULL);
red_edge = cvCreateImage(cvSize(image->width,image->height), IPL_DEPTH_8U, 1);
green_edge = cvCreateImage(cvSize(image->width,image->height), IPL_DEPTH_8U, 1);
cvCanny(red, red_edge, low, high, 3);
cvCanny(green, green_edge, low, high, 3);
edge = cvCreateImage(cvSize(image->width,image->height), IPL_DEPTH_8U, 1);
cvAbsDiff(red_edge, green_edge, edge);
final = cvCreateImage(cvSize((image->width&-2)/2,(image->height&-2)/2), IPL_DEPTH_8U, 1);
示例13: cvCreateImage
/// ****************************************************
///
/// CARTOON FILTER
///
/// ****************************************************
bool testApp::cvFilterCartoon(ofxCvColorImage &src, ofxCvColorImage &dst, int w, int h)
{
//CvtColor(src, dst, code)
//cv::cvtColor(inputFrame, bgr, CV_BGRA2BGR);
// cv::pyrMeanShiftFiltering(bgr.clone(), bgr, sp, sr);
// PyrMeanShiftFiltering(src, dst, sp, sr, max_level=1, termcrit=(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 5, 1))
// Temporary storage.
IplImage* pyr = cvCreateImage( cvSize(w,h), IPL_DEPTH_8U, 3 );
IplImage* edges = cvCreateImage( cvSize(w,h), IPL_DEPTH_8U, 1 );
IplImage* edgesRgb = cvCreateImage( cvSize(w,h), IPL_DEPTH_8U, 3 );
//cvSet(s, cvScalar(0,0,0));
ofxCvGrayscaleImage tempGrayImg;
tempGrayImg.allocate(w, h);
tempGrayImg.setFromColorImage(src);
//------------------------------
cvPyrMeanShiftFiltering(src.getCvImage(), pyr, 10, 10);
// cv::Canny(gray, edges, 150, 150);
cvCanny(tempGrayImg.getCvImage(), edges, 150,150);
cvCvtColor(edges, edgesRgb, CV_GRAY2RGB);
cvAbsDiff(pyr, edgesRgb, pyr);
//cvAbsDiff(colorImg.getCvImage(), lastFrame.getCvImage(), colorDiff.getCvImage());
dst.setFromPixels((unsigned char *)pyr->imageData, w, h);
return true;
}
示例14: ObtenerMaximo
double* ObtenerMaximo(IplImage* Imagen, STFrame* FrameData, CvRect Roi) {
// obtener matriz de distancias normalizadas al background
if (SHOW_VALIDATION_DATA == 1)
printf(" \n\n Busqueda del máximo umbral...");
IplImage* IDif = 0;
IplImage* peso = 0;
CvSize size = cvSize(Imagen->width, Imagen->height); // get current frame size
if (!IDif || IDif->width != size.width || IDif->height != size.height) {
cvReleaseImage(&IDif);
cvReleaseImage(&peso);
IDif = cvCreateImage(cvSize(FrameData->BGModel->width,
FrameData->BGModel->height), IPL_DEPTH_8U, 1); // imagen diferencia abs(I(pi)-u(p(i))
peso = cvCreateImage(cvSize(FrameData->BGModel->width,
FrameData->BGModel->height), IPL_DEPTH_32F, 1);//Imagen resultado wi ( pesos)
cvZero(IDif);
cvZero(peso);
}
// |I(p)-u(p)|/0(p)
cvAbsDiff(Imagen, FrameData->BGModel, IDif);
cvDiv(IDif, FrameData->IDesvf, peso);
// Buscar máximo
double* Maximo = 0;
cvMinMaxLoc(peso, Maximo, 0, 0, 0, FrameData->FG);
return Maximo;
}
示例15: cvReleaseCapture
void MainWindow::BackgroundDiff()
{
ui->alpha_slider->setEnabled(true);
cvReleaseCapture(&pCapture);
pCapture=cvCaptureFromCAM(0);
// IplImage* pFrame=NULL;
nFrameNum=0;
while(pFrame = cvQueryFrame( pCapture ))
{
nFrameNum++;
//如果是第一帧,需要申请内存,并初始化
if(nFrameNum == 1)
{
pBkImg = cvCreateImage(cvSize(pFrame->width, pFrame->height),IPL_DEPTH_8U,1);
pFrImg = cvCreateImage(cvSize(pFrame->width, pFrame->height), IPL_DEPTH_8U,1);
pBkMat = cvCreateMat(pFrame->height, pFrame->width, CV_32FC1);
pFrMat = cvCreateMat(pFrame->height, pFrame->width, CV_32FC1);
pFrameMat = cvCreateMat(pFrame->height, pFrame->width, CV_32FC1);
//转化成单通道图像再处理
cvCvtColor(pFrame, pBkImg, CV_BGR2GRAY);
cvCvtColor(pFrame, pFrImg, CV_BGR2GRAY);
cvConvert(pFrImg, pFrameMat);
cvConvert(pFrImg, pFrMat);
cvConvert(pFrImg, pBkMat);
}
else
{
cvCvtColor(pFrame, pFrImg, CV_BGR2GRAY);
cvConvert(pFrImg, pFrameMat);
//先做高斯滤波,以平滑图像
cvSmooth(pFrameMat, pFrameMat, CV_GAUSSIAN, 3, 0, 0);
//当前帧跟背景图相减
cvAbsDiff(pFrameMat, pBkMat, pFrMat);
//二值化前景图
cvDilate(pFrMat,pFrMat);
cvErode(pFrMat,pFrMat);
cvThreshold(pFrMat, pFrImg, lowThreshold, 255.0, CV_THRESH_BINARY);
//更新背景
cvRunningAvg(pFrameMat, pBkMat, alpha,0);
//将背景转化为图像格式,用以显示
cvConvert(pBkMat, pBkImg);
pFrame->origin = IPL_ORIGIN_BL;
pFrImg->origin = IPL_ORIGIN_BL;
pBkImg->origin = IPL_ORIGIN_BL;
}
if(27==cvWaitKey(33))
break;
MainWindow::Display(pFrame,pBkImg,pFrImg);
}
}