@@ -124,7 +124,7 @@ namespace tk { namespace dnn {
124
124
}
125
125
126
126
tk::dnn::Network *darknetAddNet (darknetFields_t &fields) {
127
- std::cout<<" Add Net: " <<fields.type <<" \n " ;
127
+ // std::cout<<"Add Net: "<<fields.type<<"\n";
128
128
dataDim_t dim (1 , fields.channels , fields.height , fields.width );
129
129
return new tk::dnn::Network (dim);
130
130
}
@@ -138,10 +138,10 @@ namespace tk { namespace dnn {
138
138
if (f.pad == 1 ) {
139
139
f.padding_x = f.padding_y = f.size_x /2 ;
140
140
}
141
- std::cout<<" Add layer: " <<f.type <<" \n " ;
141
+ // std::cout<<"Add layer: "<<f.type<<"\n";
142
142
if (f.type == " convolutional" ) {
143
143
std::string wgs = wgs_path + " /c" + std::to_string (netLayers.size ()) + " .bin" ;
144
- printf (" %d (%d,%d) (%d,%d) (%d,%d) %s %d %d\n " , f.filters , f.size_x , f.size_y , f.stride_x , f.stride_y , f.padding_x , f.padding_y , wgs.c_str (), f.batch_normalize , f.groups );
144
+ // printf("%d (%d,%d) (%d,%d) (%d,%d) %s %d %d\n", f.filters, f.size_x, f.size_y, f.stride_x, f.stride_y, f.padding_x, f.padding_y, wgs.c_str(), f.batch_normalize, f.groups);
145
145
tk::dnn::Conv2d *l= new tk::dnn::Conv2d (net, f.filters , f.size_x , f.size_y , f.stride_x ,
146
146
f.stride_y , f.padding_x , f.padding_y , wgs, f.batch_normalize , false , f.groups );
147
147
netLayers.push_back (l);
@@ -163,7 +163,7 @@ namespace tk { namespace dnn {
163
163
if (layerIdx < 0 )
164
164
layerIdx = netLayers.size () + layerIdx;
165
165
if (layerIdx < 0 || layerIdx >= netLayers.size ()) FatalError (" impossible to shortcut\n " );
166
- std::cout<<" shortcut to " <<layerIdx<<" " <<netLayers[layerIdx]->getLayerName ()<<" \n " ;
166
+ // std::cout<<"shortcut to "<<layerIdx<<" "<<netLayers[layerIdx]->getLayerName()<<"\n";
167
167
netLayers.push_back (new tk::dnn::Shortcut (net, netLayers[layerIdx]));
168
168
169
169
} else if (f.type == " upsample" ) {
@@ -177,7 +177,7 @@ namespace tk { namespace dnn {
177
177
if (layerIdx < 0 )
178
178
layerIdx = netLayers.size () + layerIdx;
179
179
if (layerIdx < 0 || layerIdx >= netLayers.size ()) FatalError (" impossible to route\n " );
180
- std::cout<<" Route to " <<layerIdx<<" " <<netLayers[layerIdx]->getLayerName ()<<" \n " ;
180
+ // std::cout<<"Route to "<<layerIdx<<" "<<netLayers[layerIdx]->getLayerName()<<"\n";
181
181
layers.push_back (netLayers[layerIdx]);
182
182
}
183
183
netLayers.push_back (new tk::dnn::Route (net, layers.data (), layers.size ()));
@@ -190,7 +190,7 @@ namespace tk { namespace dnn {
190
190
191
191
} else if (f.type == " yolo" ) {
192
192
std::string wgs = wgs_path + " /g" + std::to_string (netLayers.size ()) + " .bin" ;
193
- printf (" %d %d %s %d %f\n " , f.classes , f.num /f.n_mask , wgs.c_str (), f.n_mask , f.scale_xy );
193
+ // printf("%d %d %s %d %f\n", f.classes, f.num/f.n_mask, wgs.c_str(), f.n_mask, f.scale_xy);
194
194
tk::dnn::Yolo *l = new tk::dnn::Yolo (net, f.classes , f.num /f.n_mask , wgs, f.n_mask , f.scale_xy );
195
195
if (names.size () != f.classes )
196
196
FatalError (" Mismatch between number of classes and names" );
0 commit comments