Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions examples/mnist-learn/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
* limitations under the License.
*/

#include <knp/framework/inference_evaluation/classification.h>
#include <knp/framework/inference_evaluation/classification/processor.h>

#include <filesystem>
#include <fstream>
Expand Down Expand Up @@ -80,7 +80,7 @@ int main(int argc, char** argv)
std::cout << get_time_string() << ": inference finished -- output spike count is " << spikes.size() << std::endl;

// Evaluate results.
inference_evaluation::InferenceResultForClass::InferenceResultsProcessor inference_processor;
inference_evaluation::InferenceResultsProcessor inference_processor;
inference_processor.process_inference_results(spikes, dataset);

inference_processor.write_inference_results_to_stream_as_csv(std::cout);
Expand Down
2 changes: 1 addition & 1 deletion knp/base-framework/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,8 @@ knp_add_library("${PROJECT_NAME}-core"
impl/sonata/types/additive_delta_synapse.cpp
impl/data_processing/classification/dataset.cpp
impl/data_processing/classification/image.cpp
impl/inference_evaluation/classification.cpp
impl/inference_evaluation/perfomance_metrics.cpp
impl/inference_evaluation/classification/processor.cpp
impl/observer.cpp
${${PROJECT_NAME}_headers}
ALIAS KNP::BaseFramework::Core
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
*/

#include <knp/core/messaging/messaging.h>
#include <knp/framework/inference_evaluation/classification.h>
#include <knp/framework/inference_evaluation/classification/processor.h>
#include <knp/framework/inference_evaluation/perfomance_metrics.h>

#include <algorithm>
Expand All @@ -30,14 +30,16 @@
namespace knp::framework::inference_evaluation::classification
{

class InferenceResultForClass::InferenceResultsProcessor::EvaluationHelper
class EvaluationHelper
{
public:
explicit EvaluationHelper(const knp::framework::data_processing::classification::Dataset &dataset);
explicit EvaluationHelper(
const knp::framework::data_processing::classification::Dataset &dataset,
std::vector<InferenceResult> &inference_results);

void process_spikes(const knp::core::messaging::SpikeData &firing_neuron_indices, size_t step);

[[nodiscard]] std::vector<InferenceResultForClass> process_inference_predictions() const;
[[nodiscard]] std::vector<InferenceResult> process_inference_predictions() const;

private:
struct Prediction
Expand All @@ -53,18 +55,19 @@ class InferenceResultForClass::InferenceResultsProcessor::EvaluationHelper
std::vector<size_t> class_votes_;

const knp::framework::data_processing::classification::Dataset &dataset_;
std::vector<InferenceResult> &inference_results_;
};


InferenceResultForClass::InferenceResultsProcessor::EvaluationHelper::EvaluationHelper(
const knp::framework::data_processing::classification::Dataset &dataset)
: class_votes_(dataset.get_amount_of_classes(), 0), dataset_(dataset)
EvaluationHelper::EvaluationHelper(
const knp::framework::data_processing::classification::Dataset &dataset,
std::vector<InferenceResult> &inference_results)
: class_votes_(dataset.get_amount_of_classes(), 0), dataset_(dataset), inference_results_(inference_results)
{
}


void InferenceResultForClass::InferenceResultsProcessor::EvaluationHelper::process_spikes(
const knp::core::messaging::SpikeData &firing_neuron_indices, size_t step)
void EvaluationHelper::process_spikes(const knp::core::messaging::SpikeData &firing_neuron_indices, size_t step)
{
for (auto i : firing_neuron_indices) ++class_votes_[i % dataset_.get_amount_of_classes()];
if (!((step + 1) % dataset_.get_steps_per_frame()))
Expand All @@ -86,10 +89,9 @@ void InferenceResultForClass::InferenceResultsProcessor::EvaluationHelper::proce
}


std::vector<InferenceResultForClass>
InferenceResultForClass::InferenceResultsProcessor::EvaluationHelper::process_inference_predictions() const
std::vector<InferenceResult> EvaluationHelper::process_inference_predictions() const
{
std::vector<InferenceResultForClass> prediction_results(dataset_.get_amount_of_classes());
std::vector<InferenceResult> prediction_results(dataset_.get_amount_of_classes());
for (size_t i = 0; i < predictions_.size(); ++i)
{
auto const &prediction = predictions_[i];
Expand All @@ -113,11 +115,11 @@ InferenceResultForClass::InferenceResultsProcessor::EvaluationHelper::process_in
}


void InferenceResultForClass::InferenceResultsProcessor::process_inference_results(
void InferenceResultsProcessor::process_inference_results(
const std::vector<knp::core::messaging::SpikeMessage> &spikes,
knp::framework::data_processing::classification::Dataset const &dataset)
{
EvaluationHelper helper(dataset);
EvaluationHelper helper(dataset, inference_results_);
knp::core::messaging::SpikeData firing_neuron_indices;
auto spikes_iter = spikes.begin();

Expand All @@ -137,11 +139,10 @@ void InferenceResultForClass::InferenceResultsProcessor::process_inference_resul
}


void InferenceResultForClass::InferenceResultsProcessor::write_inference_results_to_stream_as_csv(
std::ostream &results_stream)
void InferenceResultsProcessor::write_inference_results_to_stream_as_csv(std::ostream &results_stream)
{
results_stream << "CLASS,TOTAL_VOTES,TRUE_POSITIVES,FALSE_NEGATIVES,FALSE_POSITIVES,TRUE_NEGATIVES,PRECISION,"
"RECALL,PREVALENCE,ACCURACY,F_MEASURE\n";
"RECALL,PREVALENCE,ACCURACY,F_SCORE\n";
for (size_t label = 0; label < inference_results_.size(); ++label)
{
auto const &prediction = inference_results_[label];
Expand All @@ -153,12 +154,12 @@ void InferenceResultForClass::InferenceResultsProcessor::write_inference_results
const float accuracy = get_accuracy(
prediction.true_positives_, prediction.false_negatives_, prediction.false_positives_,
prediction.true_negatives_);
const float f_measure = get_f_measure(precision, recall);
const float f_score = get_f_score(precision, recall);

results_stream << label << ',' << prediction.get_total_votes() << ',' << prediction.true_positives_ << ','
<< prediction.false_negatives_ << ',' << prediction.false_positives_ << ','
<< prediction.true_negatives_ << ',' << precision << ',' << recall << ',' << prevalence << ','
<< accuracy << ',' << f_measure << std::endl;
<< accuracy << ',' << f_score << std::endl;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ float get_accuracy(size_t true_positives, size_t false_negatives, size_t false_p
}


float get_f_measure(float precision, float recall)
float get_f_score(float precision, float recall)
{
if (precision * recall == 0) return 0.F;
return 2.F * precision * recall / (precision + recall);
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/**
* @file processor.h
* @brief Processing inference results.
* @kaspersky_support D. Postnikov
* @date 05.09.2025
* @license Apache 2.0
* @copyright © 2025 AO Kaspersky Lab
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#pragma once

#include <knp/core/messaging/messaging.h>
#include <knp/framework/data_processing/classification/image.h>

#include <vector>

#include "result.h"

Comment thread
artiomn marked this conversation as resolved.

namespace knp::framework::inference_evaluation::classification
{

/**
* @details A class to process inference results.
*/
class KNP_DECLSPEC InferenceResultsProcessor
{
public:
/**
* @brief Process inference results. Suited for classification models.
* @param spikes All spikes from inference.
* @param dataset Dataset.
*/
void process_inference_results(
const std::vector<knp::core::messaging::SpikeMessage> &spikes,
const knp::framework::data_processing::classification::Dataset &dataset);

/**
* @brief Put inference results for each class to a stream in form of csv.
* @param results_stream stream for output.
*/
void write_inference_results_to_stream_as_csv(std::ostream &results_stream);

/**
* @brief Get inference results.
* @return Inference results.
*/
[[nodiscard]] const std::vector<InferenceResult> &get_inference_results() const { return inference_results_; }

Comment thread
artiomn marked this conversation as resolved.
private:
/**
* @brief Processed inference results.
*/
std::vector<InferenceResult> inference_results_;
};

} // namespace knp::framework::inference_evaluation::classification
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/**
* @file result.h
* @brief Structure to hold inference results.
* @kaspersky_support D. Postnikov
* @date 16.07.2025
* @license Apache 2.0
* @copyright © 2025 AO Kaspersky Lab
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#pragma once

#include <knp/core/impexp.h>

namespace knp::framework::inference_evaluation::classification
{

/**
* @brief Processed inference result for single class.
*/
struct KNP_DECLSPEC InferenceResult
{
/**
* @brief Amount of times model, that is supposed to predict dog, predicted dog when it is a dog.
Comment thread
artiomn marked this conversation as resolved.
*/
size_t true_positives_ = 0;
Comment thread
artiomn marked this conversation as resolved.

/**
* @brief Amount of times model, that is supposed to predict dog, predicted not a dog when it is a dog.
*/
size_t false_negatives_ = 0;

/**
* @brief Amount of times model, that is supposed to predict dog, predicted dog when it is not a dog.
*/
size_t false_positives_ = 0;

/**
* @brief Amount of times model, that is supposed to predict dog, predicted not a dog when it is a not a dog.
*/
size_t true_negatives_ = 0;

/**
* @brief Shortcut for getting total votes.
* @return Total votes.
*/
[[nodiscard]] size_t get_total_votes() const { return true_positives_ + false_negatives_ + false_positives_; }
};

} // namespace knp::framework::inference_evaluation::classification
Loading
Loading