本文整理汇总了C++中Net::connect方法的典型用法代码示例。如果您正苦于以下问题:C++ Net::connect方法的具体用法?C++ Net::connect怎么用?C++ Net::connect使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Net
的用法示例。
在下文中一共展示了Net::connect方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: weights
TEST_P(Concat, Accuracy)
{
Vec3i inSize = get<0>(GetParam());
Vec3i numChannels = get<1>(GetParam());
Net net;
std::vector<int> convLayerIds;
convLayerIds.reserve(numChannels.channels);
for (int i = 0, n = numChannels.channels; i < n; ++i)
{
if (!numChannels[i])
break;
Mat weights({numChannels[i], inSize[0], 1, 1}, CV_32F);
randu(weights, -1.0f, 1.0f);
LayerParams convParam;
convParam.set("kernel_w", 1);
convParam.set("kernel_h", 1);
convParam.set("num_output", numChannels[i]);
convParam.set("bias_term", false);
convParam.type = "Convolution";
std::ostringstream ss;
ss << "convLayer" << i;
convParam.name = ss.str();
convParam.blobs.push_back(weights);
int layerId = net.addLayer(convParam.name, convParam.type, convParam);
convLayerIds.push_back(layerId);
net.connect(0, 0, layerId, 0);
}
LayerParams concatParam;
concatParam.type = "Concat";
concatParam.name = "testLayer";
int concatId = net.addLayer(concatParam.name, concatParam.type, concatParam);
net.connect(0, 0, concatId, 0);
for (int i = 0; i < convLayerIds.size(); ++i)
{
net.connect(convLayerIds[i], 0, concatId, i + 1);
}
Mat input({1, inSize[0], inSize[1], inSize[2]}, CV_32F);
randu(input, -1.0f, 1.0f);
net.setInput(input);
Mat outputDefault = net.forward(concatParam.name).clone();
net.setPreferableBackend(DNN_BACKEND_HALIDE);
Mat outputHalide = net.forward(concatParam.name).clone();
normAssert(outputDefault, outputHalide);
}
示例2: testInPlaceActivation
void testInPlaceActivation(LayerParams& lp)
{
EXPECT_FALSE(lp.name.empty());
LayerParams pool;
pool.set("pool", "ave");
pool.set("kernel_w", 2);
pool.set("kernel_h", 2);
pool.set("stride_w", 2);
pool.set("stride_h", 2);
pool.type = "Pooling";
Net net;
int poolId = net.addLayer(pool.name, pool.type, pool);
net.connect(0, 0, poolId, 0);
net.addLayerToPrev(lp.name, lp.type, lp);
Mat input({1, kNumChannels, 10, 10}, CV_32F);
randu(input, -1.0f, 1.0f);
net.setInput(input);
Mat outputDefault = net.forward(lp.name).clone();
net.setInput(input);
net.setPreferableBackend(DNN_BACKEND_HALIDE);
Mat outputHalide = net.forward(lp.name).clone();
normAssert(outputDefault, outputHalide);
}
示例3: input
//////////////////////////////////////////////////////////////////////////////
// Max pooling - unpooling
//////////////////////////////////////////////////////////////////////////////
TEST(MaxPoolUnpool_Halide, Accuracy)
{
LayerParams pool;
pool.set("pool", "max");
pool.set("kernel_w", 2);
pool.set("kernel_h", 2);
pool.set("stride_w", 2);
pool.set("stride_h", 2);
pool.set("pad_w", 0);
pool.set("pad_h", 0);
pool.type = "Pooling";
pool.name = "testPool";
LayerParams unpool;
unpool.set("pool_k_w", 2);
unpool.set("pool_k_h", 2);
unpool.set("pool_stride_w", 2);
unpool.set("pool_stride_h", 2);
unpool.set("pool_pad_w", 0);
unpool.set("pool_pad_h", 0);
unpool.type = "MaxUnpool";
unpool.name = "testUnpool";
Net net;
int poolId = net.addLayer(pool.name, pool.type, pool);
net.connect(0, 0, poolId, 0);
int unpoolId = net.addLayer(unpool.name, unpool.type, unpool);
net.connect(poolId, 0, unpoolId, 0);
net.connect(poolId, 1, unpoolId, 1);
Mat input({1, 1, 4, 4}, CV_32F);
randu(input, -1.0f, 1.0f);
net.setInput(input);
Mat outputDefault = net.forward("testUnpool").clone();
net.setPreferableBackend(DNN_BACKEND_HALIDE);
net.setInput(input);
Mat outputHalide = net.forward("testUnpool").clone();
normAssert(outputDefault, outputHalide);
}
示例4: test
static void test(LayerParams& params, Mat& input)
{
randu(input, -1.0f, 1.0f);
Net net;
int lid = net.addLayer(params.name, params.type, params);
net.connect(0, 0, lid, 0);
net.setInput(input);
Mat outputDefault = net.forward(params.name).clone();
net.setPreferableBackend(DNN_BACKEND_HALIDE);
Mat outputHalide = net.forward(params.name).clone();
normAssert(outputDefault, outputHalide);
}
示例5: addInput
void addInput(const std::string &name, int layerId, int inNum, Net &dstNet)
{
int idx;
for (idx = (int)addedBlobs.size() - 1; idx >= 0; idx--)
{
if (addedBlobs[idx].name == name)
break;
}
if (idx < 0)
{
CV_Error(Error::StsObjectNotFound, "Can't find output blob \"" + name + "\"");
return;
}
dstNet.connect(addedBlobs[idx].layerId, addedBlobs[idx].outNum, layerId, inNum);
}
示例6: populateNet
//.........这里部分代码省略.........
constBlobs.insert(std::make_pair(layerParams.name, out));
continue;
}
replaceLayerParam(layerParams, "shape", "dim");
}
}
else if (layer_type == "Pad")
{
layerParams.type = "Padding";
}
else if (layer_type == "Shape")
{
CV_Assert(node_proto.input_size() == 1);
shapeIt = outShapes.find(node_proto.input(0));
CV_Assert(shapeIt != outShapes.end());
MatShape inpShape = shapeIt->second;
Mat shapeMat(inpShape.size(), 1, CV_32S);
for (int j = 0; j < inpShape.size(); ++j)
shapeMat.at<int>(j) = inpShape[j];
shapeMat.dims = 1;
constBlobs.insert(std::make_pair(layerParams.name, shapeMat));
continue;
}
else if (layer_type == "Gather")
{
CV_Assert(node_proto.input_size() == 2);
CV_Assert(layerParams.has("axis"));
Mat input = getBlob(node_proto, constBlobs, 0);
Mat indexMat = getBlob(node_proto, constBlobs, 1);
CV_Assert_N(indexMat.type() == CV_32S, indexMat.total() == 1);
int index = indexMat.at<int>(0);
int axis = layerParams.get<int>("axis");
std::vector<cv::Range> ranges(input.dims, Range::all());
ranges[axis] = Range(index, index + 1);
Mat out = input(ranges);
constBlobs.insert(std::make_pair(layerParams.name, out));
continue;
}
else if (layer_type == "Concat")
{
bool hasVariableInps = false;
for (int i = 0; i < node_proto.input_size(); ++i)
{
if (layer_id.find(node_proto.input(i)) != layer_id.end())
{
hasVariableInps = true;
break;
}
}
if (!hasVariableInps)
{
std::vector<Mat> inputs(node_proto.input_size()), concatenated;
for (size_t i = 0; i < inputs.size(); ++i)
{
inputs[i] = getBlob(node_proto, constBlobs, i);
}
Ptr<Layer> concat = ConcatLayer::create(layerParams);
runLayer(concat, inputs, concatenated);
CV_Assert(concatenated.size() == 1);
constBlobs.insert(std::make_pair(layerParams.name, concatenated[0]));
continue;
}
}
else
{
for (int j = 0; j < node_proto.input_size(); j++) {
if (layer_id.find(node_proto.input(j)) == layer_id.end())
layerParams.blobs.push_back(getBlob(node_proto, constBlobs, j));
}
}
int id = dstNet.addLayer(layerParams.name, layerParams.type, layerParams);
layer_id.insert(std::make_pair(layerParams.name, LayerInfo(id, 0)));
std::vector<MatShape> layerInpShapes, layerOutShapes, layerInternalShapes;
for (int j = 0; j < node_proto.input_size(); j++) {
layerId = layer_id.find(node_proto.input(j));
if (layerId != layer_id.end()) {
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, j);
// Collect input shapes.
shapeIt = outShapes.find(node_proto.input(j));
CV_Assert(shapeIt != outShapes.end());
layerInpShapes.push_back(shapeIt->second);
}
}
// Compute shape of output blob for this layer.
Ptr<Layer> layer = dstNet.getLayer(id);
layer->getMemoryShapes(layerInpShapes, 0, layerOutShapes, layerInternalShapes);
CV_Assert(!layerOutShapes.empty());
outShapes[layerParams.name] = layerOutShapes[0];
}
}