Skip to content
Merged
Show file tree
Hide file tree
Changes from 39 commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
5971cb0
fixed file documentation: #83
DavidIkov Jul 3, 2025
cf12309
moved wta outside: #83
DavidIkov Jul 4, 2025
163cea6
fix for issue: #69
DavidIkov Jul 4, 2025
ccb62a8
Fix for issue: #84
DavidIkov Jul 4, 2025
f09d5c4
Added borders as parameter for wta: #83
DavidIkov Jul 4, 2025
418dccb
Moved dataset processing to framework: #83
DavidIkov Jul 15, 2025
02d9f57
Made so data processing use filestreams intead of paths, more usefull…
DavidIkov Jul 15, 2025
368681e
Added comments to image_classification.h: #83
DavidIkov Jul 15, 2025
72437e8
Added more comments to fix build: #83
DavidIkov Jul 15, 2025
3fdef1a
Changes ifstreams to istreams in image_classification: #83
DavidIkov Jul 16, 2025
02ef0bc
Moved evaluation to framework: #83
DavidIkov Jul 16, 2025
91193bb
Fixed comments: #83
DavidIkov Jul 16, 2025
667def6
Changed file descriptions: #83
DavidIkov Jul 21, 2025
9b1870f
Moved generators: #83
DavidIkov Jul 21, 2025
96e3c07
Refactored comments: #83
DavidIkov Jul 21, 2025
3d0bdca
Fixed comments issue: #83
DavidIkov Jul 21, 2025
0d3a505
Fixed comments: #83
DavidIkov Jul 21, 2025
4618a3b
Added data processing test and fixed bug with dataset split: #83
DavidIkov Jul 21, 2025
cd7875b
Remade classification dataset in the name of scalability: #83
DavidIkov Jul 21, 2025
366a1fd
Fixed mnist learn example: #83
DavidIkov Jul 21, 2025
2f2a4b0
Added inference evaluation test. Some refactoring: #83
DavidIkov Jul 21, 2025
66bc91c
Added projections creators for new generators: #83
DavidIkov Jul 22, 2025
9c11cdd
Added tests for new generators and refactored old tests: #83
DavidIkov Jul 22, 2025
d4cea4c
Fixed comment issue: #83
DavidIkov Jul 23, 2025
9962d61
Added wta test: #83
DavidIkov Jul 23, 2025
36da668
Merge remote-tracking branch 'upstream/master'
DavidIkov Jul 23, 2025
9947acc
Removed debug code: #83
DavidIkov Jul 23, 2025
06f6eea
Fix for build: #83
DavidIkov Jul 23, 2025
a249828
Fix for build: #83
DavidIkov Jul 23, 2025
7905e45
Changed lambda captures: #88
DavidIkov Jul 24, 2025
8f4ebf8
Fixed simple errors: #88
DavidIkov Jul 24, 2025
decc860
Removed repeated empty lines: #88
DavidIkov Jul 24, 2025
7bba233
Some changes: #88
DavidIkov Jul 24, 2025
31fd0d3
Some fixes for pull request: #88
DavidIkov Jul 25, 2025
fc6b403
Fixed some comments: #88
DavidIkov Jul 25, 2025
83a4c83
Fix for inference wta: #88
DavidIkov Jul 28, 2025
0eb5e0b
Remade data processing and inference evaluation architecture: #88
DavidIkov Jul 31, 2025
5fcd008
Some refactoring and fixes: #88
DavidIkov Jul 31, 2025
ca13893
Fixed bug with log path default value: #88
DavidIkov Aug 1, 2025
82533c3
Some refactoring: #88
DavidIkov Aug 1, 2025
1d2579b
Remade inference evaluation metrics: #88
DavidIkov Aug 1, 2025
72ef8dc
Quick test fix: #88
DavidIkov Aug 1, 2025
d100876
Added Frame logic: #88
DavidIkov Aug 1, 2025
c9fb17e
Added comments for dataset: #88
DavidIkov Aug 4, 2025
06a8e78
Added consts and made image dataset not inheritable: #88
DavidIkov Aug 4, 2025
403aefe
Added consts: #88
DavidIkov Aug 4, 2025
49d7ab7
Fix for pre commit: #88
DavidIkov Aug 4, 2025
398afba
Fix for precommit: #88
DavidIkov Aug 4, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/mnist-learn/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,5 @@ if (NOT TARGET KNP::BaseFramework::Core)
find_package(knp-base-framework REQUIRED)
endif()

add_executable(mnist_learn_example main.cpp data_read.cpp construct_network.cpp evaluation.cpp train.cpp inference.cpp wta.cpp time_string.cpp)
add_executable(mnist_learn_example main.cpp construct_network.cpp train.cpp inference.cpp time_string.cpp)
target_link_libraries(mnist_learn_example PRIVATE KNP::BaseFramework::Core ${Boost_LIBRARIES})
113 changes: 36 additions & 77 deletions examples/mnist-learn/construct_network.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,51 +21,21 @@

#include "construct_network.h"

#include <knp/core/population.h>
#include <knp/core/projection.h>
#include <knp/framework/sonata/network_io.h>
#include <knp/neuron-traits/all_traits.h>
#include <knp/synapse-traits/all_traits.h>
#include "shared_network.h"

#include "generators.h"

// A list of short type names to make reading easier.
using DeltaSynapseData = knp::synapse_traits::synapse_parameters<knp::synapse_traits::DeltaSynapse>;
using DeltaProjection = knp::core::Projection<knp::synapse_traits::DeltaSynapse>;
using ResourceSynapse = knp::synapse_traits::SynapticResourceSTDPDeltaSynapse;
using ResourceDeltaProjection = knp::core::Projection<knp::synapse_traits::SynapticResourceSTDPDeltaSynapse>;
using ResourceSynapseData = ResourceDeltaProjection::Synapse;
using ResourceSynapseParams = knp::synapse_traits::synapse_parameters<ResourceSynapse>;
using BlifatPopulation = knp::core::Population<knp::neuron_traits::BLIFATNeuron>;
using ResourceBlifatPopulation = knp::core::Population<knp::neuron_traits::SynapticResourceSTDPBLIFATNeuron>;
using ResourceNeuron = knp::neuron_traits::SynapticResourceSTDPBLIFATNeuron;
using ResourceNeuronData = knp::neuron_traits::neuron_parameters<ResourceNeuron>;

// Network hyperparameters. You may want to fine-tune these.
constexpr float default_threshold = 8.571F;
constexpr float min_synaptic_weight = -0.7;
constexpr float max_synaptic_weight = 0.864249F;
constexpr float base_weight_value = 0.000F;
constexpr int neuron_dopamine_period = 10;
constexpr int synapse_dopamine_period = 10;
constexpr float l_neuron_potential_decay = 1.0 - 1.0 / 3.0;
constexpr float dopamine_parameter = 0.042F;
constexpr float dopamine_value = dopamine_parameter;
constexpr float threshold_weight_coeff = 0.023817F;

//
// Network geometry.
//

// Number of neurons reserved per a single digit.
constexpr int neurons_per_column = 15;

// Ten possible digits, one column per each.
constexpr int num_possible_labels = classes_in_mnist;

// All columns are a part of the same population.
constexpr int num_input_neurons = neurons_per_column * num_possible_labels;

// Number of pixels for a single MNIST image.
constexpr int input_size = 28 * 28;

// Dense input projection from 28 * 28 image to population of 150 neurons.
constexpr int input_projection_size = input_size * num_input_neurons;


// Intermediate population neurons.
template <class Neuron>
Expand Down Expand Up @@ -148,7 +118,6 @@ auto add_subnetwork_populations(AnnotatedNetwork &result)
return std::make_pair(population_uids, pop_data);
}


// Create network for MNIST.
AnnotatedNetwork create_example_network(int num_compound_networks)
{
Expand All @@ -169,89 +138,79 @@ AnnotatedNetwork create_example_network(int num_compound_networks)
afferent_synapse.rule_.w_max_ = max_synaptic_weight;

// 1. Trainable input projection.
ResourceDeltaProjection input_projection{
knp::core::UID{false}, population_uids[INPUT], make_dense_generator(input_size, afferent_synapse),
input_projection_size};
ResourceDeltaProjection input_projection = knp::framework::projection::creators::all_to_all<ResourceSynapse>(
knp::core::UID{false}, population_uids[INPUT], input_size, num_input_neurons,
[&afferent_synapse](size_t, size_t) { return afferent_synapse; });
result.data_.projections_from_raster_.push_back(input_projection.get_uid());
input_projection.unlock_weights(); // Trainable
result.network_.add_projection(input_projection);
result.data_.inference_internal_projection_.insert(input_projection.get_uid());

default_synapse.weight_ = 9;
Comment thread
artiomn marked this conversation as resolved.

// 2. Activating projection. It sends signals from labels to dopamine population.
const DeltaSynapseData default_activating_synapse{1, 1, knp::synapse_traits::OutputType::BLOCKING};
DeltaProjection projection_2{
knp::core::UID{false}, population_uids[DOPAMINE],
make_aligned_generator(pop_data[INPUT].pd_.size_, pop_data[DOPAMINE].pd_.size_, default_activating_synapse),
pop_data[INPUT].pd_.size_};
DeltaProjection projection_2 = knp::framework::projection::creators::aligned<knp::synapse_traits::DeltaSynapse>(
knp::core::UID{false}, population_uids[DOPAMINE], pop_data[INPUT].pd_.size_, pop_data[DOPAMINE].pd_.size_,
[&default_activating_synapse](size_t, size_t) { return default_activating_synapse; });
result.network_.add_projection(projection_2);
result.data_.wta_data_[i].second.push_back(projection_2.get_uid());

// 3. Dopamine projection, it goes from dopamine population to input population.
const DeltaSynapseData default_dopamine_synapse{dopamine_value, 1, knp::synapse_traits::OutputType::DOPAMINE};
DeltaProjection projection_3{
population_uids[DOPAMINE], population_uids[INPUT],
make_aligned_generator(pop_data[DOPAMINE].pd_.size_, pop_data[INPUT].pd_.size_, default_dopamine_synapse),
pop_data[INPUT].pd_.size_};

DeltaProjection projection_3 = knp::framework::projection::creators::aligned<knp::synapse_traits::DeltaSynapse>(
population_uids[DOPAMINE], population_uids[INPUT], pop_data[DOPAMINE].pd_.size_, pop_data[INPUT].pd_.size_,
[&default_dopamine_synapse](size_t, size_t) { return default_dopamine_synapse; });
result.network_.add_projection(projection_3);
result.data_.inference_internal_projection_.insert(projection_3.get_uid());

// 4. Strong excitatory projection going to output neurons.
DeltaProjection projection_4{
knp::core::UID{false}, population_uids[OUTPUT],
make_aligned_generator(pop_data[INPUT].pd_.size_, pop_data[OUTPUT].pd_.size_, default_synapse),
pop_data[INPUT].pd_.size_};
default_synapse.weight_ = 9;
Comment thread
artiomn marked this conversation as resolved.
DeltaProjection projection_4 = knp::framework::projection::creators::aligned<knp::synapse_traits::DeltaSynapse>(
knp::core::UID{false}, population_uids[OUTPUT], pop_data[INPUT].pd_.size_, pop_data[OUTPUT].pd_.size_,
[&default_synapse](size_t, size_t) { return default_synapse; });
result.data_.wta_data_[i].second.push_back(projection_4.get_uid());

result.network_.add_projection(projection_4);
result.data_.inference_internal_projection_.insert(projection_4.get_uid());

// 5. Blocking projection.
const DeltaSynapseData default_blocking_synapse{-20, 1, knp::synapse_traits::OutputType::BLOCKING};
DeltaProjection projection_5{
population_uids[OUTPUT], population_uids[GATE],
make_aligned_generator(pop_data[OUTPUT].pd_.size_, pop_data[GATE].pd_.size_, default_blocking_synapse),
num_possible_labels};
DeltaProjection projection_5 = knp::framework::projection::creators::aligned<knp::synapse_traits::DeltaSynapse>(
population_uids[OUTPUT], population_uids[GATE], pop_data[OUTPUT].pd_.size_, pop_data[GATE].pd_.size_,
[&default_blocking_synapse](size_t, size_t) { return default_blocking_synapse; });
result.network_.add_projection(projection_5);
result.data_.inference_internal_projection_.insert(projection_5.get_uid());

// 6. Strong excitatory projection going from ground truth classes.
DeltaProjection projection_6{
knp::core::UID{false}, population_uids[DOPAMINE],
make_aligned_generator(num_possible_labels, pop_data[DOPAMINE].pd_.size_, default_synapse),
pop_data[DOPAMINE].pd_.size_};
DeltaProjection projection_6 = knp::framework::projection::creators::aligned<knp::synapse_traits::DeltaSynapse>(
knp::core::UID{false}, population_uids[DOPAMINE], num_possible_labels, pop_data[DOPAMINE].pd_.size_,
[&default_synapse](size_t, size_t) { return default_synapse; });
result.network_.add_projection(projection_6);
result.data_.projections_from_classes_.push_back(projection_6.get_uid());

// 7. Strong slow excitatory projection going from ground truth classes.
auto slow_synapse = default_synapse;
slow_synapse.delay_ = 10;
DeltaProjection projection_7{
knp::core::UID{false}, population_uids[GATE],
make_aligned_generator(num_possible_labels, pop_data[GATE].pd_.size_, slow_synapse),
pop_data[GATE].pd_.size_};
DeltaProjection projection_7 = knp::framework::projection::creators::aligned<knp::synapse_traits::DeltaSynapse>(
knp::core::UID{false}, population_uids[GATE], num_possible_labels, pop_data[GATE].pd_.size_,
[&slow_synapse](size_t, size_t) { return slow_synapse; });
result.network_.add_projection(projection_7);
result.data_.projections_from_classes_.push_back(projection_7.get_uid());

// 8. Strong inhibitory projection from ground truth input.
auto inhibitory_synapse = default_synapse;
inhibitory_synapse.weight_ = -30;
DeltaProjection projection_8{
knp::core::UID{false}, population_uids[GATE],
make_exclusive_generator(num_possible_labels, inhibitory_synapse),
num_possible_labels * (pop_data[GATE].pd_.size_ - 1)};
DeltaProjection projection_8 =
knp::framework::projection::creators::exclusive<knp::synapse_traits::DeltaSynapse>(
knp::core::UID{false}, population_uids[GATE], num_possible_labels,
[&inhibitory_synapse](size_t, size_t) { return inhibitory_synapse; });
result.data_.projections_from_classes_.push_back(projection_8.get_uid());
result.network_.add_projection(projection_8);

// 9. Weak excitatory projection.
auto weak_excitatory_synapse = default_synapse;
weak_excitatory_synapse.weight_ = 3;
DeltaProjection projection_9{
population_uids[GATE], population_uids[INPUT],
make_aligned_generator(pop_data[GATE].pd_.size_, pop_data[INPUT].pd_.size_, weak_excitatory_synapse),
pop_data[INPUT].pd_.size_};
DeltaProjection projection_9 = knp::framework::projection::creators::aligned<knp::synapse_traits::DeltaSynapse>(
population_uids[GATE], population_uids[INPUT], pop_data[GATE].pd_.size_, pop_data[INPUT].pd_.size_,
[&weak_excitatory_synapse](size_t, size_t) { return weak_excitatory_synapse; });
result.network_.add_projection(projection_9);
result.data_.inference_internal_projection_.insert(projection_9.get_uid());
}
Comment thread
artiomn marked this conversation as resolved.
Expand Down
6 changes: 0 additions & 6 deletions examples/mnist-learn/construct_network.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,6 @@
#include <vector>


/// How many steps to use for learning. 20 steps are used for a single image.
constexpr int learning_period = 200000;
/// Classes in MNIST.
constexpr int classes_in_mnist = 10;
Comment thread
artiomn marked this conversation as resolved.


struct AnnotatedNetwork
{
knp::framework::Network network_;
Expand Down
141 changes: 0 additions & 141 deletions examples/mnist-learn/data_read.cpp

This file was deleted.

Loading
Loading