本文整理汇总了C++中cv::Mat::setTo方法的典型用法代码示例。如果您正苦于以下问题:C++ Mat::setTo方法的具体用法?C++ Mat::setTo怎么用?C++ Mat::setTo使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类cv::Mat
的用法示例。
在下文中一共展示了Mat::setTo方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: mouse_events
void mouse_events(int event, int x, int y, int flags, void* userdata) {
if (event == cv::EVENT_LBUTTONDOWN) {
cv::Point center = cv::Point(x, y);
pthread_mutex_lock(&render_buffer_mutex);
movement_buffer.setTo(cv::Scalar::all(0));
cv::circle(movement_buffer, center, 50, cv::Scalar(255, 255, 255),
CV_FILLED);
tracking_object = false;
step_tracking();
pthread_mutex_unlock(&render_buffer_mutex);
}
else if (event == cv::EVENT_RBUTTONDOWN) {
pthread_mutex_lock(&render_buffer_mutex);
movement_buffer.setTo(cv::Scalar::all(0));
tracking_object = false;
features_current.clear();
pthread_mutex_unlock(&render_buffer_mutex);
}
else if (event == cv::EVENT_MOUSEMOVE) {
cursor_pos = cv::Point(x, y);
}
}
示例2: normalize_residuals
void normalize_residuals(cv::Mat& residual, cv::Mat& counts)
{
cv::Mat mask = counts == 0;
counts.setTo(cv::Scalar(1.0f), mask);
residual /= counts;
cv::Mat mask_certainty = counts < 2;
residual.setTo(cv::Scalar(0.0f), mask_certainty);
}
示例3: flood
void flood(const cv::Mat &edges, const cv::Point &anchor,
const unsigned short label, cv::Mat &labels,
const uchar edge, const unsigned int threshold)
{
assert(label > 0);
assert(edges.type() == CV_8UC1);
assert(labels.type() == CV_16UC1);
assert(edges.rows == labels.rows);
assert(edges.cols == labels.cols);
cv::Mat mask(edges.rows, edges.cols, CV_8UC1, cv::Scalar::all(FLOOD_MASK_DEFAULT));
if(edges.at<uchar>(anchor.y, anchor.x) == edge)
return;
std::deque<cv::Point> Q;
Q.push_back(anchor);
const static int N_X[] = {-1, 0, 0, 1};
const static int N_Y[] = { 0,-1, 1, 0};
unsigned int size = 0;
while(!Q.empty()) {
cv::Point ca = Q.front();
Q.pop_front();
/// ALREADY VISITED
if(labels.at<unsigned short>(ca.y, ca.x) != FLOOD_LABEL_DEFAULT ||
mask.at<uchar>(ca.y, ca.x) != FLOOD_MASK_DEFAULT)
continue;
/// NOT ALREADY VISITED
mask.at<uchar>(ca.y, ca.x) = FLOOD_MASK_SET;
++size;
for(int n = 0 ; n < 4 ; ++n) {
cv::Point neighbour(ca.x + N_X[n], ca.y + N_Y[n]);
if(neighbour.x < 0 || neighbour.x >= edges.cols ||
neighbour.y < 0 || neighbour.y >= edges.rows)
continue;
if(edges.at<uchar>(neighbour.y, neighbour.x) != edge) {
Q.push_back(neighbour);
}
}
}
if(size > threshold) {
labels.setTo(cv::Scalar::all(label), mask);
} else {
labels.setTo(cv::Scalar::all(FLOOD_LABEL_IMPLODE), mask);
}
}
示例4: morphology_skeleton
/**
* @brief create skeleton by morphology operations
* @param input : input image
* @param output : output image
* @param kernel : structure element of the morphology operations
*/
void morphology_skeleton(cv::Mat &input, cv::Mat &output, cv::Mat const &kernel)
{
if(input.type() != CV_8U){
throw std::runtime_error(COMMON_DEBUG_MESSAGE + "input.type() != CV_8U");
}
if(input.data == output.data){
output = cv::Mat::zeros(input.size(), CV_8U);
}else{
output.create(input.size(), CV_8U);
output.setTo(0);
}
cv::Mat temp;
cv::Mat eroded;
bool done;
do
{
cv::erode(input, eroded, kernel);
cv::dilate(eroded, temp, kernel); // temp = open(img)
cv::subtract(input, temp, temp);
cv::bitwise_or(output, temp, output);
eroded.copyTo(input);
done = (cv::countNonZero(input) == 0);
} while (!done);
input = output;
}
示例5: DataProcessing
DataProcessing()
{
//Initialize size of image
rows = 800;
cols = 800;
//Initialize image
image = cv::Mat::zeros(cols, rows, CV_8UC3);
image.setTo(cv::Scalar(255, 255, 255));
//Initialize location of rover
rover_rows = rows/2;
rover_cols = cols/2;
//Initialize compass
compassValue = 0;
compassStartPoint = -1;
//Initialize max max_distance
maxPointDifference = 20;
//Pub and Sub
turnPub = nh.advertise<geometry_msgs::PoseArray>("/data_processing/clusters", 2);
straightPub = nh.advertise<geometry_msgs::PoseArray>("/data_processing/clusters", 4);
switchRowsPub = nh.advertise<geometry_msgs::PoseArray>("/data_processing/points", 2);
sub = nh.subscribe("/lidar_data/points_data", 360, &DataProcessing::ProcessingCallback, this);
sub = nh.subscribe("/arduino/compass_value", 1, &DataProcessing::CompassCallback, this);
sub = nh.subscribe("/arduino/compass_start_point", 1, &DataProcessing::CompassStartPointCallback, this);
cv::namedWindow(OPENCV_WINDOW);
}
示例6: extractCandidateShadowPixels
void SrTextureShadRem::extractCandidateShadowPixels(const cv::Mat& grayFrame, const ConnCompGroup& fg,
const cv::Mat& grayBg, cv::Mat& candidateShadows) {
candidateShadows.create(grayFrame.size(), CV_8U);
candidateShadows.setTo(cv::Scalar(0));
for (int cc = 0; cc < (int) fg.comps.size(); ++cc) {
const ConnComp& object = fg.comps[cc];
for (int p = 0; p < (int) object.pixels.size(); ++p) {
int x = object.pixels[p].x;
int y = object.pixels[p].y;
double frVal = grayFrame.at<uchar>(y, x);
double bgVal = grayBg.at<uchar>(y, x);
double gain = 0;
if (frVal < bgVal) {
gain = 1 - (frVal / bgVal) / (bgVal - frVal);
}
if (gain > params.gainThreshold) {
candidateShadows.at<uchar>(y, x) = 255;
}
}
}
}
示例7: edgeDetectionFct
// determine the edge detection function g
void edgeDetectionFct(cv::Mat I_Vec3b, cv::Mat &g)
{
// convert image from 0 ... 255 to 0 ... 1
cv::Mat I = cv::Mat(I_Vec3b.size(), CV_32FC3);
I_Vec3b.convertTo(I, CV_32FC3, 1/255.0f, 0); // ATTENTION: if 32F type images have negative values included, they have to be converted!
// compute the grayscale image and the edge detection function
cv::Mat grayI(I.rows, I.cols, CV_32FC1); // float 0 ... 1, 1 channel
grayI.setTo(0);
g.create(I.rows, I.cols, CV_32FC1);
g.setTo(0);
for(int r = 0; r < I.rows; r++){
for(int c = 0; c < I.cols; c++){
float channel_r = (float) I.at<cv::Vec3f>(r,c)[2];
float channel_g = (float) I.at<cv::Vec3f>(r,c)[1];
float channel_b = (float) I.at<cv::Vec3f>(r,c)[0];
grayI.at<float>(r,c) = sqrtf( channel_r * channel_r + channel_g * channel_g + channel_b * channel_b );
}
}
blur(grayI,grayI,Size(3,3));
float norm_gradI;
for(int r = 0; r < I.rows - 1; r++){
for(int c = 0; c < I.cols - 1; c++){
norm_gradI = sqrtf(
(grayI.at<float>(r ,c+1) - grayI.at<float>(r,c) ) * ( grayI.at<float>(r, c+1) - grayI.at<float>(r,c) )
+ (grayI.at<float>(r+1,c ) - grayI.at<float>(r,c) ) * ( grayI.at<float>(r+1,c ) - grayI.at<float>(r,c) )
);
g.at<float>(r,c) = exp(- 5 * norm_gradI );
}
}
}
示例8: drawLineImage
void drawLineImage(cv::Mat& img, int cols, int rows, int w_cnt, int h_cnt, int line_width){
img = cv::Mat(rows, cols, CV_8UC3);
img.setTo(0);
int dh = rows/(h_cnt);
int pos_y = dh;
cv::line(img, cv::Point(0,line_width/2), cv::Point(cols,line_width/2), cv::Scalar(255,0,0),line_width);
cv::line(img, cv::Point(0,rows-line_width/2), cv::Point(cols,rows-line_width/2), cv::Scalar(255,0,0),line_width);
for (int i=1; i< h_cnt; ++i, pos_y+=dh){
cv::line(img, cv::Point(0,pos_y), cv::Point(cols,pos_y), cv::Scalar(255,255,255),line_width);
}
int dw = cols/(w_cnt);
int pos_x = dw;
cv::line(img, cv::Point(line_width/2,0), cv::Point(line_width/2,cols), cv::Scalar(255,0,0),line_width);
cv::line(img, cv::Point(cols-line_width/2-5,0), cv::Point(cols-line_width/2-5,rows), cv::Scalar(255,0,0),line_width);
for (int i=1; i< w_cnt; ++i, pos_x+=dw){
cv::line(img, cv::Point(pos_x,0), cv::Point(pos_x, rows), cv::Scalar(255,255,255),line_width);
}
}
示例9: getSigmas
void PixelEnvironmentModel::getSigmas(cv::Mat& vars, bool normalize){
if (vars.rows != height_ || vars.cols != width_ || vars.type() != CV_32FC1){
ROS_INFO("new sigma imags");
vars = cv::Mat(height_,width_,CV_32FC1);
}
vars.setTo(0);
for (int y=0; y<height_; ++y)
for (int x=0; x<width_; ++x){
if (mask_set && mask_.at<uchar>(y,x) == 0) continue;
if (gaussians[y][x].initialized)
vars.at<float>(y,x) = gaussians[y][x].sigma();
}
if (normalize){
double min_val, max_val;
cv::minMaxLoc(vars,&min_val,&max_val, NULL,NULL, mask_);
ROS_INFO("normalizing: %f %f", min_val, max_val);
vars = (vars-min_val)/(max_val-min_val);
}
}
示例10: getForeground_prob
/// point is foreground if it is not within N sigma
void PixelEnvironmentModel::getForeground_prob(const Cloud& cloud, float N, cv::Mat& foreground){
if (foreground.rows != height_ || foreground.cols != width_ || foreground.type() != CV_8UC1){
foreground = cv::Mat(height_,width_,CV_8UC1);
}
foreground.setTo(0);
for (int y=0; y<height_; ++y)
for (int x=0; x<width_; ++x){
if (mask_set && mask_.at<uchar>(y,x) == 0) continue;
bool inited = gaussians[y][x].initialized;
float current = norm(cloud.at(x,y));
if (current < 0) continue; // nan point
if (!inited || (current < gaussians[y][x].mean && !gaussians[y][x].isWithinNSigma(current,N))){
foreground.at<uchar>(y,x) = 255;
}
}
cv::medianBlur(foreground,foreground,3);
// cv::erode(foreground,foreground,cv::Mat(),cv::Point(-1,-1),2);
// cv::dilate(foreground,foreground,cv::Mat(),cv::Point(-1,-1),2);
}
示例11: showMask
void Background_substraction::showMask(const std::vector<cv::Point2i>& mask, cv::Mat& img){
img.setTo(0);
for (uint i=0; i<mask.size(); ++i){
img.at<uchar>(mask[i].y,mask[i].x) = 255;
}
}
示例12: segmentHand
void segmentHand(cv::Mat &mask, Rect ®ion, const cv::Mat &depth)
{
CV_Assert(mask.type() == CV_8UC1);
CV_Assert(depth.type() == CV_16UC1);
CV_Assert(mask.rows == depth.rows);
CV_Assert(mask.cols == depth.cols);
mask.setTo(EMPTY);
pair<int, int> current = searchNearestPixel(depth, region);
if (current.first < 0){
return;
}
int rowcount = region.height, colcount = region.width;
Mat visited(depth.rows, depth.cols, CV_8U, Scalar(0));
double mean = depth.at<unsigned short>(current.first,current.second);
int minx=depth.cols,miny=depth.rows,maxx=0,maxy=0;
unsigned short dv = 0;
int pixelcount = 1;
_pixels.push(current);
while((!_pixels.empty()) & (pixelcount < _maxObjectSize))
{
current = _pixels.front();
_pixels.pop();
dv = depth.at<unsigned short>(current.first,current.second);
if (current.first < minx) minx = current.first;
else if (current.first > maxx) maxx = current.first;
if (current.second < miny) miny = current.second;
else if (current.second > maxy) maxy = current.second;
if ( current.first + 1 < rowcount+region.y && visited.at<uchar>(current.first+1, current.second) == 0 ){
visited.at<uchar>(current.first+1, current.second) = 255;
processNeighbor(pixelcount,mean,mask,current.first + 1,current.second,depth);
}
if ( current.first - 1 > -1 + region.y && visited.at<uchar>(current.first-1, current.second) == 0){
visited.at<uchar>(current.first-1, current.second) = 255;
processNeighbor(pixelcount,mean,mask,current.first - 1,current.second,depth);
}
if ( current.second + 1 < colcount + region.x && visited.at<uchar>(current.first, current.second+1) == 0){
visited.at<uchar>(current.first, current.second+1) = 255;
processNeighbor(pixelcount,mean,mask,current.first,current.second + 1,depth);
}
if( current.second - 1 > -1 + region.x && visited.at<uchar>(current.first, current.second-1) == 0){
visited.at<uchar>(current.first, current.second-1) = 255;
processNeighbor(pixelcount,mean,mask,current.first,current.second - 1,depth);
}
}
}
示例13: dot
//computes dot-product of each column in src with each basis vector
inline void dot(const cv::Mat &src, cv::Mat &dst) const
{
dst.create(basis.size(), src.cols, CV_64FC1);
dst.setTo(0);
for (size_t i = 0; i < basis.size(); i++) {
basis[i].dot(src, dst.row(i));
}
}
示例14: buildObtainedMat
/**
* @brief QueryImage::buildObtainedMat Build the mat representing the obtained labeling
* @param result Mat in which the function will print the result
*/
void QueryImage::buildObtainedMat(cv::Mat &result, set<int> *usedLabels){
result.create(image.getImage()->size(), CV_8UC1);
result.setTo(cv::Scalar::all(0));
if(usedLabels!=NULL) usedLabels->clear();
for(uint i=0; i<superPixelList.size(); ++i){
if(usedLabels!=NULL) usedLabels->insert(superPixelList[i]->getLabel());
superPixelList[i]->printToMat(result);
}
}
示例15: _tmain
int _tmain(int argc, _TCHAR* argv[])
{
int display_x,display_y;
display_x=480;
display_y=480;
img.create(display_x, display_y, CV_8UC3);
img.setTo(cv::Scalar(255,255,255));
point eye(eye_x,eye_y,eye_z);
camera cam(eye,display_x,display_y);//display_x,display_y);
scenes scene;
ball light1(point(20,240,26),5,color(255,255,255));
light1.set(0.2,0.2,8,0.3,0.2,0.2,0.2,true);
//ball light2(point(360,340,20),5,color(255,255,255));
//light2.set(0.2,0.2,8,0.3,0.2,0.2,0.2,true);
scene.lights.push_back(&light1);
// scene.lights.push_back(&light2);
ball ball1(point(360,370,100),30,color(0,0,0));//͸Ã÷
ball1.set(0.05,0.05,8,0.05,1,1,0.1,false);
poly4 po1(point(150,150,100),point(200,100,100),point(200,200,200),point(220,200,80),color(50,205,50));
po1.set(0.2,0.2,8,0.3,0.4,0.5,0.2,false);
ball ball2(point(360,290,100),30,color(255,0,0));
ball2.set(0.2,0.2,8,0.3,0.2,0.1,0.2,false);
cuboid cub1(point(400,100,30),point(420,140,50),point(400,160,30),point(380,120,10),point(340,100,90),point(360,140,110),point(340,160,90),point(320,120,70),color(0,155,255));
cub1.set(0.2,0.2,8,0.3,0.4,0.5,0.2,false);
scene.objects.push_back(&po1);
Floor floor1(430);
floor1.set(0.2,0.2,8,0.3,0.2,0.2,0.2,false);
scene.objects.push_back(&ball2);
scene.objects.push_back(&ball1);
// scene.objects.push_back(&light2);
scene.objects.push_back(&light1);
scene.objects.push_back(&cub1);
scene.objects.push_back(&floor1);
#pragma omp parallel for
for(int i=0;i<display_x;i++)
for(int j=0;j<display_y;j++)
{
*Triangle::fout2<<"i="<<i<<" j="<<j;
//paint(i,j,scene.trace(ray(eye,vector3(i-eye_x,j-eye_y,-eye_z)),depth));
//paint(i,j,scene.trace(ray(point(i,j,0),vector3(0,0,1)),3));
paint(i,j,(scene.trace(ray(eye,vector3(i-0.5-eye_x,j-0.5-eye_y,-eye_z)),depth)
+scene.trace(ray(eye,vector3(i-0.5-eye_x,j+0.5-eye_y,-eye_z)),depth)
+scene.trace(ray(eye,vector3(i-0.5-eye_x,j-eye_y,-eye_z)),depth)
+scene.trace(ray(eye,vector3(i-eye_x,j+0.5-eye_y,-eye_z)),depth)
+scene.trace(ray(eye,vector3(i+0.5-eye_x,j-eye_y,-eye_z)),depth)
+scene.trace(ray(eye,vector3(i-eye_x,j-0.5-eye_y,-eye_z)),depth)
+scene.trace(ray(eye,vector3(i-eye_x,j-eye_y,-eye_z)),depth)
+scene.trace(ray(eye,vector3(i+0.5-eye_x,j-0.5-eye_y,-eye_z)),depth)
+scene.trace(ray(eye,vector3(i+0.5-eye_x,j+0.5-eye_y,-eye_z)),depth))*(1.0/9));
*Triangle::fout2<<endl;
}
cv::imshow("show", img);
cv::waitKey(0);
return 0;
}