提交 60a841c7 编写于 作者: L Lubov Batanina 提交者: Alexander Alekhin

Merge pull request #14255 from l-bat:networks_visualization

* Add networks visualization

* Disable CXX11

* Fixed multy inputs support

* Added output shapes

* Added color for DLIE/CPU

* Fixed graph colors
上级 a246d8d8
......@@ -385,6 +385,16 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
/** Returns true if there are no layers in the network. */
CV_WRAP bool empty() const;
/** @brief Dump net to String
* @returns String with structure, hyperparameters, backend, target and fusion
* To see correct backend, target and fusion run after forward().
*/
CV_WRAP String dump();
/** @brief Dump net structure, hyperparameters, backend, target and fusion to dot file
* @param path path to output file with .dot extension
* @see dump()
*/
CV_WRAP void dumpToFile(const String& path);
/** @brief Adds new layer to the net.
* @param name unique name of the adding layer.
* @param type typename of the adding layer (type must be registered in LayerRegister).
......
......@@ -47,6 +47,7 @@
#include <algorithm>
#include <iostream>
#include <sstream>
#include <fstream>
#include <iterator>
#include <numeric>
#include <opencv2/dnn/shape_utils.hpp>
......@@ -2921,6 +2922,205 @@ int Net::getLayerId(const String &layer)
return impl->getLayerId(layer);
}
String Net::dump()
{
CV_Assert(!empty());
std::ostringstream out;
std::map<int, LayerData>& map = impl->layers;
int prefBackend = impl->preferableBackend;
std::vector<std::vector<int> > skippedLayers;
std::vector<int> skipId;
std::vector<int> allLayers(map.size(), -1);
int idPrev = -1;
Ptr<BackendNode> prevNode;
for (std::map<int, LayerData>::reverse_iterator rit = map.rbegin(); rit != map.rend(); ++rit)
{
std::map<int, Ptr<BackendNode> >::iterator itBackend = rit->second.backendNodes.find(prefBackend);
if (prefBackend == DNN_BACKEND_OPENCV || itBackend == rit->second.backendNodes.end() ||
itBackend->second.empty())
{
if (rit->second.skip)
skipId.push_back(rit->first);
else if (!skipId.empty())
{
if (prefBackend == DNN_BACKEND_OPENCV || prevNode.empty())
skipId.push_back(rit->first);
else if (idPrev != -1)
skipId.push_back(idPrev);
std::sort(skipId.begin(), skipId.end());
for (int i = 0; i < skipId.size(); i++) {
allLayers[skipId[i]] = skippedLayers.size();
}
skippedLayers.push_back(skipId);
skipId.clear();
}
}
else
{
if (itBackend->second == prevNode)
skipId.push_back(idPrev);
else if (!skipId.empty())
{
skipId.push_back(idPrev);
std::sort(skipId.begin(), skipId.end());
for (int i = 0; i < skipId.size(); i++) {
allLayers[skipId[i]] = skippedLayers.size();
}
skippedLayers.push_back(skipId);
skipId.clear();
}
idPrev = rit->first;
prevNode = itBackend->second;
}
}
String colors[] = {"#ffffb3", "#fccde5", "#8dd3c7", "#bebada", "#80b1d3", "#fdb462"};
String backend;
switch (prefBackend) {
case DNN_BACKEND_DEFAULT: backend = "DEFAULT/"; break;
case DNN_BACKEND_HALIDE: backend = "HALIDE/"; break;
case DNN_BACKEND_INFERENCE_ENGINE: backend = "DLIE/"; break;
case DNN_BACKEND_OPENCV: backend = "OCV/"; break;
}
out << "digraph G {" << '\n';
// Add nodes
for (std::map<int, LayerData>::iterator it = map.begin(); it != map.end(); ++it)
{
String name = it->second.params.name;
if (allLayers[it->first] == -1 && !name.empty()) {
out << " " << "\"" << name << "\"" << " [label=\"";
skipId.clear();
skipId.push_back(it->first);
}
else if (name.empty() || it->first != skippedLayers[allLayers[it->first]][0])
continue;
else { // first node in cluster : it->first == skippedLayers[allLayers[it->first]][0]
int cluster = allLayers[it->first];
out << " " << "\"" << "cluster_" << cluster << "\"" << " [label=\"{";
skipId = skippedLayers[allLayers[it->first]]; // vertices in current cluster
}
for (int i = 0; i < skipId.size(); i++)
{
LayerParams& lp = map[skipId[i]].params;
if (!lp.name.empty()) {
if (i > 0) {
out << " | ";
}
out << lp.name << "\\n" << lp.type << "\\n";
if (lp.has("kernel_size")) {
DictValue size = lp.get("kernel_size");
out << "kernel (HxW): " << size << " x " << size << "\\l";
} else if (lp.has("kernel_h") && lp.has("kernel_w")) {
DictValue h = lp.get("kernel_h");
DictValue w = lp.get("kernel_w");
out << "kernel (HxW): " << h << " x " << w << "\\l";
}
if (lp.has("stride")) {
DictValue stride = lp.get("stride");
out << "stride (HxW): " << stride << " x " << stride << "\\l";
} else if (lp.has("stride_h") && lp.has("stride_w")) {
DictValue h = lp.get("stride_h");
DictValue w = lp.get("stride_w");
out << "stride (HxW): " << h << " x " << w << "\\l";
}
if (lp.has("dilation")) {
DictValue dilation = lp.get("dilation");
out << "dilation (HxW): " << dilation << " x " << dilation << "\\l";
} else if (lp.has("dilation_h") && lp.has("dilation_w")) {
DictValue h = lp.get("dilation_h");
DictValue w = lp.get("dilation_w");
out << "dilation (HxW): " << h << " x " << w << "\\l";
}
if (lp.has("pad")) {
DictValue pad = lp.get("pad");
out << "pad (LxTxRxB): " << pad << " x " << pad << " x " << pad << " x " << pad << "\\l";
} else if (lp.has("pad_l") && lp.has("pad_t") && lp.has("pad_r") && lp.has("pad_b")) {
DictValue l = lp.get("pad_l");
DictValue t = lp.get("pad_t");
DictValue r = lp.get("pad_r");
DictValue b = lp.get("pad_b");
out << "pad (LxTxRxB): " << l << " x " << t << " x " << r << " x " << b << "\\l";
}
else if (lp.has("pooled_w") || lp.has("pooled_h")) {
DictValue h = lp.get("pooled_h");
DictValue w = lp.get("pooled_w");
out << "pad (HxW): " << h << " x " << w << "\\l";
}
if (lp.has("pool")) {
out << "pool: " << lp.get("pool") << "\\l";
}
if (lp.has("global_pooling")) {
out << "global_pooling: " << lp.get("global_pooling") << "\\l";
}
if (lp.has("group")) {
out << "group: " << lp.get("group") << "\\l";
}
}
}
if (!it->second.outputBlobs.empty())
out << "output: " << it->second.outputBlobs[0].size << "\\l";
Ptr<BackendNode> layerBackend = it->second.backendNodes[prefBackend];
out << (!layerBackend.empty() ? backend : "OCV/");
int colorId = 0;
switch (it->second.layerInstance->preferableTarget) {
case DNN_TARGET_CPU: out << "CPU\\n"; colorId = layerBackend.empty() ? 0 : 5; break;
case DNN_TARGET_OPENCL: out << "OCL\\n"; colorId = 1; break;
case DNN_TARGET_OPENCL_FP16: out << "OCL_FP16\\n"; colorId = 2; break;
case DNN_TARGET_MYRIAD: out << "MYRIAD\\n"; colorId = 3; break;
case DNN_TARGET_FPGA: out << "FPGA\\n"; colorId = 4; break;
}
out << ((skipId.size() == 1)? "\" " : " }\" ");
out << "fillcolor=\"" << colors[colorId] << "\" ";
out << "style=filled ";
out << "shape=" << ((skipId.size() == 1)? "box" : "record") << "]" << '\n';
}
out << '\n';
// Add edges
int inputsSize = impl->netInputLayer->outNames.size();
for (std::map<int, LayerData>::iterator it = map.begin(); it != map.end(); ++it)
{
if (allLayers[it->first] == -1) // node
{
for (int i = 0; i < it->second.consumers.size(); i++)
{
int outId = it->second.consumers[i].lid;
if (it == map.begin() && inputsSize > 1)
out << " " << "\"" << it->second.name << "_" << i << "\"" << " -> ";
else
out << " " << "\"" << it->second.name << "\"" << " -> ";
if (allLayers[outId] == -1) // node
out << "\"" << map[outId].name << "\"" << '\n';
else // cluster
out << "\"" << "cluster_" << allLayers[outId] << "\"" << '\n';
}
}
else if (it->first == skippedLayers[allLayers[it->first]].back()) // edges from last layer in cluster
{
for (int i = 0; i < it->second.consumers.size(); i++)
{
int outId = it->second.consumers[i].lid;
if (allLayers[outId] == -1) { // node
out << " " << "\"" << "cluster_" << allLayers[it->first] << "\"" << " -> ";
out << "\"" << map[outId].name << "\"" << '\n';
}
else if (allLayers[outId] != allLayers[it->first]) { // another cluster
out << " " << "\"" << "cluster_" << allLayers[it->first] << "\"" << " -> ";
out << "\"" << "cluster_" << allLayers[outId] << "\"" << '\n';
}
}
}
}
out << "}";
return out.str();
}
void Net::dumpToFile(const String& path) {
std::ofstream file(path.c_str());
file << dump();
file.close();
}
Ptr<Layer> Net::getLayer(LayerId layerId)
{
LayerData &ld = impl->getLayerData(layerId);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册