本文整理汇总了C++中UMat::getMat方法的典型用法代码示例。如果您正苦于以下问题:C++ UMat::getMat方法的具体用法?C++ UMat::getMat怎么用?C++ UMat::getMat使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类UMat
的用法示例。
在下文中一共展示了UMat::getMat方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: Mat
TEST_P(UMatBasicTests, GetUMat)
{
if(useRoi)
{
a = Mat(a, roi);
ua = UMat(ua,roi);
}
{
UMat ub;
ub = a.getUMat(ACCESS_RW);
EXPECT_MAT_NEAR(ub, ua, 0);
}
{
UMat u = a.getUMat(ACCESS_RW);
{
Mat b = u.getMat(ACCESS_RW);
EXPECT_MAT_NEAR(b, a, 0);
}
}
{
Mat b;
b = ua.getMat(ACCESS_RW);
EXPECT_MAT_NEAR(b, a, 0);
}
{
Mat m = ua.getMat(ACCESS_RW);
{
UMat ub = m.getUMat(ACCESS_RW);
EXPECT_MAT_NEAR(ub, ua, 0);
}
}
}
示例2: downloadKeypoints
void SURF_OCL::downloadKeypoints(const UMat &keypointsGPU, std::vector<KeyPoint> &keypoints)
{
const int nFeatures = keypointsGPU.cols;
if (nFeatures == 0)
keypoints.clear();
else
{
CV_Assert(keypointsGPU.type() == CV_32FC1 && keypointsGPU.rows == ROWS_COUNT);
Mat keypointsCPU = keypointsGPU.getMat(ACCESS_READ);
keypoints.resize(nFeatures);
float *kp_x = keypointsCPU.ptr<float>(SURF_OCL::X_ROW);
float *kp_y = keypointsCPU.ptr<float>(SURF_OCL::Y_ROW);
int *kp_laplacian = keypointsCPU.ptr<int>(SURF_OCL::LAPLACIAN_ROW);
int *kp_octave = keypointsCPU.ptr<int>(SURF_OCL::OCTAVE_ROW);
float *kp_size = keypointsCPU.ptr<float>(SURF_OCL::SIZE_ROW);
float *kp_dir = keypointsCPU.ptr<float>(SURF_OCL::ANGLE_ROW);
float *kp_hessian = keypointsCPU.ptr<float>(SURF_OCL::HESSIAN_ROW);
for (int i = 0; i < nFeatures; ++i)
{
KeyPoint &kp = keypoints[i];
kp.pt.x = kp_x[i];
kp.pt.y = kp_y[i];
kp.class_id = kp_laplacian[i];
kp.octave = kp_octave[i];
kp.size = kp_size[i];
kp.angle = kp_dir[i];
kp.response = kp_hessian[i];
}
}
}
示例3: drawSquares
// the function draws all the squares in the image
static void drawSquares( UMat& _image, const vector<vector<Point> >& squares )
{
Mat image = _image.getMat(ACCESS_WRITE);
for( size_t i = 0; i < squares.size(); i++ )
{
const Point* p = &squares[i][0];
int n = (int)squares[i].size();
polylines(image, &p, &n, 1, true, Scalar(0,255,0), 3, LINE_AA);
}
}
示例4: s
OCL_PERF_TEST_P(BufferPoolFixture, BufferPool_UMatIntegral10, Bool())
{
BufferPoolState s(cv::ocl::getOpenCLAllocator()->getBufferPoolController(), GetParam());
Size sz(1920, 1080);
OCL_TEST_CYCLE()
{
for (int i = 0; i < 10; i++)
{
UMat src(sz, CV_32FC1);
UMat dst;
integral(src, dst);
dst.getMat(ACCESS_READ); // complete async operations
}
}
SANITY_CHECK_NOTHING();
}
示例5: drawArrows
static void drawArrows(UMat& _frame, const vector<Point2f>& prevPts, const vector<Point2f>& nextPts, const vector<uchar>& status,
Scalar line_color = Scalar(0, 0, 255))
{
Mat frame = _frame.getMat(ACCESS_WRITE);
for (size_t i = 0; i < prevPts.size(); ++i)
{
if (status[i])
{
int line_thickness = 1;
Point p = prevPts[i];
Point q = nextPts[i];
double angle = atan2((double) p.y - q.y, (double) p.x - q.x);
double hypotenuse = sqrt( (double)(p.y - q.y)*(p.y - q.y) + (double)(p.x - q.x)*(p.x - q.x) );
if (hypotenuse < 1.0)
continue;
// Here we lengthen the arrow by a factor of three.
q.x = (int) (p.x - 3 * hypotenuse * cos(angle));
q.y = (int) (p.y - 3 * hypotenuse * sin(angle));
// Now we draw the main line of the arrow.
line(frame, p, q, line_color, line_thickness);
// Now draw the tips of the arrow. I do some scaling so that the
// tips look proportional to the main line of the arrow.
p.x = (int) (q.x + 9 * cos(angle + CV_PI / 4));
p.y = (int) (q.y + 9 * sin(angle + CV_PI / 4));
line(frame, p, q, line_color, line_thickness);
p.x = (int) (q.x + 9 * cos(angle - CV_PI / 4));
p.y = (int) (q.y + 9 * sin(angle - CV_PI / 4));
line(frame, p, q, line_color, line_thickness);
}
}
}
示例6: get
static const Mat get(const UMat& m) { return m.getMat(ACCESS_READ); }
示例7: detectAndRecog
bool OCRTess::detectAndRecog() {
UMat grey = UMat::zeros(this->img.rows + 2, this->img.cols + 2, CV_8UC1);
cvtColor(this->img.clone(), grey, COLOR_RGB2GRAY);
vector<UMat> channels;
channels.clear();
channels.push_back(grey);
Mat m = 255 - grey.getMat(ACCESS_READ | ACCESS_WRITE);
channels.push_back(m.getUMat(ACCESS_READ));
vector<vector<ERStat>> regions(2);
regions[0].clear();
regions[1].clear();
switch (this->REGION) {
case REG_CSER: {
parallel_for_(Range(0, (int) channels.size()), Parallel_extractCSER(channels, regions, this->erf1, this->erf2));
break;
}
case REG_MSER: {
vector<vector<Point> > contours;
vector<Rect> bboxes;
Ptr<MSER> mser = MSER::create(21, (int) (0.00002 * grey.cols * grey.rows), (int) (0.05 * grey.cols * grey.rows), 1, 0.7);
mser->detectRegions(grey, contours, bboxes);
if (contours.size() > 0)
MSERsToERStats(grey, contours, regions);
break;
}
default: {
break;
}
}
/*Text Recognition (OCR)*/
vector<vector<Vec2i> > nm_region_groups;
vector<Rect> nm_boxes;
switch (this->GROUP) {
case 0:
erGrouping(this->img, channels, regions, nm_region_groups, nm_boxes, ERGROUPING_ORIENTATION_HORIZ);
break;
case 1:
default:
erGrouping(this->img, channels, regions, nm_region_groups, nm_boxes, ERGROUPING_ORIENTATION_ANY, DIR + TR_GRP, 0.5);
break;
}
if (!nm_boxes.size() || nm_boxes.size() > 1) return false;
vector<string> words_detection;
float min_confidence1 = 51.f, min_confidence2 = 60.f;
vector<UMat> detections;
for (int i = 0; i < (int) nm_boxes.size(); i++) {
// rectangle(this->out, nm_boxes[i].tl(), nm_boxes[i].br(), Scalar(255, 255, 0), 3);
UMat group_img = UMat::zeros(this->img.rows + 2, this->img.cols + 2, CV_8UC1);
er_draw(channels, regions, nm_region_groups[i], group_img);
group_img = group_img(nm_boxes[i]);
copyMakeBorder(group_img.clone(), group_img, 15, 15, 15, 15, BORDER_CONSTANT, Scalar(0));
detections.push_back(group_img);
}
vector<string> outputs((int) detections.size());
vector<vector<Rect> > boxes((int) detections.size());
vector<vector<string> > words((int) detections.size());
vector<vector<float> > confidences((int) detections.size());
if (!detections.size() || detections.size() > 1) return false;
for (int i = 0; i < (int) detections.size(); i = i + this->num) {
Range r;
if (i + this->num <= (int) detections.size()) r = Range(i, i + this->num);
else r = Range(i, (int) detections.size());
parallel_for_(r, Parallel_OCR<OCRTesseract>(detections, outputs, boxes, words, confidences, this->ocrs));
}
for (int i = 0; i < (int) detections.size(); i++) {
outputs[i].erase(remove(outputs[i].begin(), outputs[i].end(), '\n'), outputs[i].end());
if (outputs[i].size() < 3) {
continue;
}
for (int j = 0; j < (int) boxes[i].size(); j++) {
boxes[i][j].x += nm_boxes[i].x - 15;
boxes[i][j].y += nm_boxes[i].y - 15;
if ((words[i][j].size() < 2) || (confidences[i][j] < min_confidence1) ||
((words[i][j].size() == 2) && (words[i][j][0] == words[i][j][1])) ||
((words[i][j].size() < 4) && (confidences[i][j] < min_confidence2)) ||
isRepetitive(words[i][j]))
continue;
words_detection.push_back(words[i][j]);
// rectangle(this->out, boxes[i][j].tl(), boxes[i][j].br(), Scalar(255, 0, 255), 3);
// Size word_size = getTextSize(words[i][j], FONT_HERSHEY_SIMPLEX, (double) scale_font, (int) (3 * scale_font), NULL);
// rectangle(this->out, boxes[i][j].tl() - Point(3, word_size.height + 3), boxes[i][j].tl() + Point(word_size.width, 0), Scalar(255, 0, 255), -1);
// putText(this->out, words[i][j], boxes[i][j].tl() - Point(1, 1), FONT_HERSHEY_SIMPLEX, scale_font, Scalar(255, 255, 255), (int) (3 * scale_font));
}
}
if (!words_detection.size() || words_detection.size() > 1) return false;
return (words_detection[0].compare(WORD) == 0);
}