Skip to content

Commit 7736500

Browse files
committed
Fixed broken commit in mnist learn example: KasperskyLab#188
1 parent 1ccb977 commit 7736500

2 files changed

Lines changed: 25 additions & 46 deletions

File tree

examples/mnist-learn/inference.h

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,10 @@
2626
#include <knp/framework/projection/wta.h>
2727
#include <knp/framework/tags/name.h>
2828

29-
#include <map>
30-
#include <memory>
3129
#include <filesystem>
3230
#include <fstream>
31+
#include <map>
32+
#include <memory>
3333
#include <string>
3434
#include <utility>
3535
#include <vector>
@@ -42,11 +42,12 @@
4242
/**
4343
* @brief Run inference on a neural network and record spike activity.
4444
*
45-
* @details This function executes the inference process on a trained neural network, processing input data through the network
46-
* and recording spike messages for analysis. It configures the network with appropriate input and output channels, sets up
47-
* WTA mechanisms, initializes logging for spike monitoring, and executes the simulation.
45+
* @details This function executes the inference process on a trained neural network, processing input data through the
46+
* network and recording spike messages for analysis. It configures the network with appropriate input and output
47+
* channels, sets up WTA mechanisms, initializes logging for spike monitoring, and executes the simulation.
4848
*
4949
* @tparam Neuron neuron type template parameter for neuron model specification.
50+
*
5051
* @param backend shared pointer to the computational backend for execution.
5152
* @param network annotated network structure containing the network and its annotations.
5253
* @param model_desc model description containing configuration parameters and paths.
@@ -87,7 +88,7 @@ std::vector<knp::core::messaging::SpikeMessage> infer_network(
8788
// Connect rasterized image projections to the input channel.
8889
for (auto image_proj_uid : network.data_.projections_from_raster_)
8990
model.add_input_channel(input_image_channel_uid, image_proj_uid);
90-
91+
9192
// Initialize model executor with backend and channel mappings.
9293
// Online Help link: https://click.kaspersky.com/?hl=en-US&version=2.0&pid=KNP&link=online_help&helpid=251296
9394
knp::framework::ModelExecutor model_executor(model, backend, std::move(channel_map));
@@ -143,8 +144,7 @@ std::vector<knp::core::messaging::SpikeMessage> infer_network(
143144
for (const auto& message : messages)
144145
{
145146
const auto name_iter = pop_names.find(message.header_.sender_uid_);
146-
const std::string sender_name =
147-
name_iter == pop_names.end() ? "UNKNOWN" : name_iter->second;
147+
const std::string sender_name = name_iter == pop_names.end() ? "UNKNOWN" : name_iter->second;
148148

149149
// Log each neuron index from the spike message.
150150
for (const auto neuron_index : message.neuron_indexes_)
@@ -179,7 +179,7 @@ std::vector<knp::core::messaging::SpikeMessage> infer_network(
179179
if (step % 20 == 0) std::cout << "Inference step: " << step << std::endl;
180180
return step != dataset.get_steps_amount_for_inference();
181181
});
182-
182+
183183
// Retrieve final spike results from output channel.
184184
auto spikes = out_channel.update();
185185

@@ -193,12 +193,12 @@ std::vector<knp::core::messaging::SpikeMessage> infer_network(
193193

194194
/**
195195
* @brief Run inference on a model and record spike activity.
196-
*
197-
* @details This function provides a high-level interface for running inference on a trained model, handling backend loading and
198-
* delegation to the lower-level inference function.
199-
*
196+
*
197+
* @details This function provides a high-level interface for running inference on a trained model, handling backend
198+
* loading and delegation to the lower-level inference function.
199+
*
200200
* @tparam Neuron neuron type template parameter for neuron model specification.
201-
*
201+
*
202202
* @param model_desc model description containing configuration parameters and paths.
203203
* @param dataset dataset with inference data.
204204
* @param network annotated network structure containing the network and its annotations.

examples/mnist-learn/main.cpp

Lines changed: 11 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -19,44 +19,31 @@
1919
* limitations under the License.
2020
*/
2121

22-
#include <knp/framework/network_validator.h>
23-
2422
#include <iostream>
2523

2624
#include "dataset.h"
2725
#include "evaluate_results.h"
2826
#include "inference.h"
27+
#include "network_validation.h"
2928
#include "parse_arguments.h"
3029
#include "save_network.h"
3130
#include "training.h"
3231

3332

34-
/**
35-
* @brief Execute complete model pipeline for specified neuron type.
36-
*
37-
* @details This template function orchestrates the entire machine learning pipeline for neural networks, including
38-
* dataset processing, network construction, training, inference, and evaluation. It serves as the core execution
39-
* engine for both AltAI and BLIFAT neuron models.
40-
*
41-
* @tparam Neuron neuron type for neuron model specification.
42-
*
43-
* @param model_desc model description containing configuration parameters and paths.
44-
*/
33+
// Execute complete model pipeline for specified neuron type.
34+
// This template function orchestrates the entire machine learning pipeline for neural networks, including dataset
35+
// processing, network construction, training, inference, and evaluation. It serves as the core execution engine
36+
// for both AltAI and BLIFAT neuron models.
4537
template <typename Neuron>
4638
void run_model(const ModelDescription& model_desc)
4739
{
4840
Dataset dataset = process_dataset(model_desc);
4941

5042
AnnotatedNetwork network = construct_network<Neuron>(model_desc);
5143

52-
knp::framework::NetworkValidator validator;
53-
validator.add_validator(knp::framework::network_validators::Connectivity());
54-
bool validation_result = validator.run_validators(network.network_);
55-
if (!validation_result)
56-
{
57-
throw std::runtime_error("Network validation failed.");
58-
}
44+
validate_network(network.network_);
5945

46+
// Create backend loader for training and inference.
6047
// Online Help link: https://click.kaspersky.com/?hl=en-US&version=2.0&pid=KNP&link=online_help&helpid=243548
6148
knp::framework::BackendLoader backend_loader;
6249

@@ -74,17 +61,10 @@ void run_model(const ModelDescription& model_desc)
7461
}
7562

7663

77-
/**
78-
* @brief Main application entry point.
79-
*
80-
* @details This function serves as the primary execution point for the MNIST neural network learning application.
81-
* It handles command-line argument parsing, configuration validation, user interaction, and routes execution to
82-
* the appropriate neuron model.
83-
*
84-
* @param argc argument count.
85-
* @param argv arguments values.
86-
* @return ret code.
87-
*/
64+
// Main application entry point.
65+
// This function serves as the primary execution point for the MNIST neural network learning application.
66+
// It handles command-line argument parsing, configuration validation, user interaction, and routes execution to
67+
// the appropriate neuron model.
8868
int main(int argc, char** argv)
8969
{
9070
// Parse command-line arguments and validate configuration.
@@ -103,7 +83,6 @@ int main(int argc, char** argv)
10383
{
10484
case SupportedModelType::BLIFAT:
10585
{
106-
// cppcheck-suppress throwInEntryPoint
10786
run_model<knp::neuron_traits::BLIFATNeuron>(model_desc);
10887
break;
10988
}

0 commit comments

Comments
 (0)