From 89532564ea91dd2cc9c9c239f9669af83fb8cef4 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 17 Oct 2024 11:08:09 +0200 Subject: [PATCH 01/68] add explicit output parameters to spiking output port --- .../neuromodulated_stdp_synapse.nestml | 2 +- models/synapses/noisy_synapse.nestml | 2 +- models/synapses/static_synapse.nestml | 2 +- .../stdp_nn_pre_centered_synapse.nestml | 2 +- .../stdp_nn_restr_symm_synapse.nestml | 2 +- models/synapses/stdp_nn_symm_synapse.nestml | 2 +- models/synapses/stdp_synapse.nestml | 2 +- models/synapses/stdp_triplet_synapse.nestml | 2 +- .../co_co_output_port_defined_if_emit_call.py | 51 +- .../codegeneration/printers/nestml_printer.py | 8 + pynestml/generated/PyNestMLLexer.py | 506 +++---- pynestml/generated/PyNestMLParser.py | 1313 +++++++++-------- pynestml/generated/PyNestMLParserVisitor.py | 4 +- pynestml/grammars/PyNestMLLexer.g4 | 5 +- pynestml/grammars/PyNestMLParser.g4 | 9 +- pynestml/meta_model/ast_function_call.py | 7 +- pynestml/meta_model/ast_node_factory.py | 7 +- pynestml/meta_model/ast_output_block.py | 24 +- pynestml/utils/messages.py | 9 + pynestml/visitors/ast_builder_visitor.py | 12 +- .../CoCoOutputPortTypeIfEmitCall-2.nestml | 37 + .../CoCoOutputPortTypeIfEmitCall-3.nestml | 37 + .../CoCoOutputPortTypeIfEmitCall.nestml | 37 + tests/test_cocos.py | 26 +- 24 files changed, 1230 insertions(+), 878 deletions(-) create mode 100644 tests/invalid/CoCoOutputPortTypeIfEmitCall-2.nestml create mode 100644 tests/invalid/CoCoOutputPortTypeIfEmitCall-3.nestml create mode 100644 tests/invalid/CoCoOutputPortTypeIfEmitCall.nestml diff --git a/models/synapses/neuromodulated_stdp_synapse.nestml b/models/synapses/neuromodulated_stdp_synapse.nestml index 4fe77d56c..236968417 100644 --- a/models/synapses/neuromodulated_stdp_synapse.nestml +++ b/models/synapses/neuromodulated_stdp_synapse.nestml @@ -56,7 +56,7 @@ model neuromodulated_stdp_synapse: mod_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(mod_spikes): n += 1. / tau_n diff --git a/models/synapses/noisy_synapse.nestml b/models/synapses/noisy_synapse.nestml index 8a1d0bbfa..3f776fc1d 100644 --- a/models/synapses/noisy_synapse.nestml +++ b/models/synapses/noisy_synapse.nestml @@ -17,7 +17,7 @@ model noisy_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): # temporary variable for the "weight" that will be transmitted diff --git a/models/synapses/static_synapse.nestml b/models/synapses/static_synapse.nestml index 097dcae6b..043a9463f 100644 --- a/models/synapses/static_synapse.nestml +++ b/models/synapses/static_synapse.nestml @@ -15,7 +15,7 @@ model static_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(w, d) diff --git a/models/synapses/stdp_nn_pre_centered_synapse.nestml b/models/synapses/stdp_nn_pre_centered_synapse.nestml index 2787d26ed..1d51bb481 100644 --- a/models/synapses/stdp_nn_pre_centered_synapse.nestml +++ b/models/synapses/stdp_nn_pre_centered_synapse.nestml @@ -79,7 +79,7 @@ model stdp_nn_pre_centered_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(post_spikes): post_trace = 1 diff --git a/models/synapses/stdp_nn_restr_symm_synapse.nestml b/models/synapses/stdp_nn_restr_symm_synapse.nestml index 86efefbff..9ca9e6360 100644 --- a/models/synapses/stdp_nn_restr_symm_synapse.nestml +++ b/models/synapses/stdp_nn_restr_symm_synapse.nestml @@ -72,7 +72,7 @@ model stdp_nn_restr_symm_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(post_spikes): post_trace = 1 diff --git a/models/synapses/stdp_nn_symm_synapse.nestml b/models/synapses/stdp_nn_symm_synapse.nestml index 22cb9d565..55bc413a5 100644 --- a/models/synapses/stdp_nn_symm_synapse.nestml +++ b/models/synapses/stdp_nn_symm_synapse.nestml @@ -76,7 +76,7 @@ model stdp_nn_symm_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(post_spikes): post_trace = 1 diff --git a/models/synapses/stdp_synapse.nestml b/models/synapses/stdp_synapse.nestml index f04b4b971..da2893391 100644 --- a/models/synapses/stdp_synapse.nestml +++ b/models/synapses/stdp_synapse.nestml @@ -59,7 +59,7 @@ model stdp_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(post_spikes): post_trace += 1 diff --git a/models/synapses/stdp_triplet_synapse.nestml b/models/synapses/stdp_triplet_synapse.nestml index dd7ca2bd1..bef9c8c54 100644 --- a/models/synapses/stdp_triplet_synapse.nestml +++ b/models/synapses/stdp_triplet_synapse.nestml @@ -56,7 +56,7 @@ model stdp_triplet_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(post_spikes): # potentiate synapse diff --git a/pynestml/cocos/co_co_output_port_defined_if_emit_call.py b/pynestml/cocos/co_co_output_port_defined_if_emit_call.py index 9157115ac..24bdd13af 100644 --- a/pynestml/cocos/co_co_output_port_defined_if_emit_call.py +++ b/pynestml/cocos/co_co_output_port_defined_if_emit_call.py @@ -24,6 +24,8 @@ from pynestml.cocos.co_co import CoCo from pynestml.meta_model.ast_function_call import ASTFunctionCall from pynestml.meta_model.ast_model import ASTModel +from pynestml.symbols.predefined_functions import PredefinedFunctions +from pynestml.utils.ast_utils import ASTUtils from pynestml.utils.logger import Logger, LoggingLevel from pynestml.utils.messages import Messages from pynestml.visitors.ast_visitor import ASTVisitor @@ -60,22 +62,55 @@ def visit_function_call(self, node: ASTFunctionCall): """ assert self.neuron is not None func_name = node.get_name() - if func_name == 'emit_spike': + if func_name == PredefinedFunctions.EMIT_SPIKE: output_blocks = self.neuron.get_output_blocks() - if not output_blocks: + + # exactly one output block should be defined + if len(output_blocks) == 0: code, message = Messages.get_block_not_defined_correctly('output', missing=True) Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, code=code, message=message) return - spike_output_exists = False - for output_block in output_blocks: - if output_block.is_spike(): - spike_output_exists = True - break + if len(output_blocks) > 1: + code, message = Messages.get_block_not_defined_correctly('output', missing=False) + Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, + code=code, message=message) + return - if not spike_output_exists: + assert len(output_blocks) == 1 + + if not output_blocks[0].is_spike(): code, message = Messages.get_emit_spike_function_but_no_output_port() Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR, error_position=node.get_source_position()) return + + # check types + if len(node.get_args()) != len(output_blocks[0].get_attributes()): + code, message = Messages.get_output_port_type_differs() + Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR, + error_position=node.get_source_position()) + return + + for emit_spike_arg, output_block_attr in zip(node.get_args(), output_blocks[0].get_attributes()): + + emit_spike_arg_type_sym = emit_spike_arg.type + output_block_attr_type_sym = output_block_attr.get_data_type().get_type_symbol() + + if emit_spike_arg_type_sym.equals(output_block_attr_type_sym): + continue + + if emit_spike_arg_type_sym.is_castable_to(output_block_attr_type_sym): + # types are not equal, but castable + code, message = Messages.get_implicit_cast_rhs_to_lhs(output_block_attr_type_sym.print_symbol(), + emit_spike_arg_type_sym.print_symbol()) + Logger.log_message(error_position=node.get_source_position(), + code=code, message=message, log_level=LoggingLevel.WARNING) + continue + else: + # types are not equal and not castable + code, message = Messages.get_output_port_type_differs() + Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR, + error_position=node.get_source_position()) + return diff --git a/pynestml/codegeneration/printers/nestml_printer.py b/pynestml/codegeneration/printers/nestml_printer.py index f03d9931d..481fc9a6a 100644 --- a/pynestml/codegeneration/printers/nestml_printer.py +++ b/pynestml/codegeneration/printers/nestml_printer.py @@ -431,6 +431,14 @@ def print_output_block(self, node: ASTOutputBlock) -> str: ret += print_n_spaces(self.indent) + "output:\n" ret += print_n_spaces(self.indent + 4) ret += "spike" if node.is_spike() else "continuous" + if node.get_attributes(): + ret += "(" + for i, attr in enumerate(node.get_attributes()): + ret += self.print(attr) + if i < len(node.get_attributes()) - 1: + ret += ", " + + ret += ")" ret += print_sl_comment(node.in_comment) ret += "\n" return ret diff --git a/pynestml/generated/PyNestMLLexer.py b/pynestml/generated/PyNestMLLexer.py index d430d8556..e1fda7af4 100644 --- a/pynestml/generated/PyNestMLLexer.py +++ b/pynestml/generated/PyNestMLLexer.py @@ -1,4 +1,4 @@ -# Generated from PyNestMLLexer.g4 by ANTLR 4.13.1 +# Generated from PyNestMLLexer.g4 by ANTLR 4.10.1 from antlr4 import * from io import StringIO import sys @@ -8,14 +8,14 @@ from typing.io import TextIO -if "." in __name__: +if __name__ is not None and "." in __name__: from .PyNestMLLexerBase import PyNestMLLexerBase else: from PyNestMLLexerBase import PyNestMLLexerBase def serializedATN(): return [ - 4,0,90,702,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5, + 4,0,91,707,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5, 2,6,7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2, 13,7,13,2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7, 19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2, @@ -29,243 +29,244 @@ def serializedATN(): 71,2,72,7,72,2,73,7,73,2,74,7,74,2,75,7,75,2,76,7,76,2,77,7,77,2, 78,7,78,2,79,7,79,2,80,7,80,2,81,7,81,2,82,7,82,2,83,7,83,2,84,7, 84,2,85,7,85,2,86,7,86,2,87,7,87,2,88,7,88,2,89,7,89,2,90,7,90,2, - 91,7,91,1,0,1,0,1,0,1,0,1,1,3,1,191,8,1,1,1,1,1,1,2,1,2,1,2,3,2, - 198,8,2,1,3,4,3,201,8,3,11,3,12,3,202,1,3,1,3,1,4,1,4,1,4,1,4,1, - 4,1,5,1,5,5,5,214,8,5,10,5,12,5,217,9,5,1,5,1,5,4,5,221,8,5,11,5, - 12,5,222,1,5,1,5,1,6,1,6,5,6,229,8,6,10,6,12,6,232,9,6,1,6,1,6,1, - 7,1,7,1,7,3,7,239,8,7,1,7,1,7,1,7,3,7,244,8,7,3,7,246,8,7,1,7,1, - 7,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,9,1,9,1,9,1,9,1,9,1,10,1,10, - 1,10,1,10,1,10,1,10,1,10,1,11,1,11,1,11,1,11,1,11,1,11,1,11,1,11, - 1,12,1,12,1,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13, - 1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,15,1,15,1,15,1,15,1,15, - 1,15,1,15,1,16,1,16,1,16,1,17,1,17,1,17,1,17,1,17,1,18,1,18,1,18, - 1,18,1,18,1,19,1,19,1,19,1,19,1,20,1,20,1,20,1,20,1,20,1,20,1,21, - 1,21,1,21,1,22,1,22,1,22,1,22,1,22,1,23,1,23,1,23,1,23,1,24,1,24, - 1,24,1,24,1,25,1,25,1,25,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27, - 1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1,28,1,28, - 1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,30, - 1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,32,1,32, - 1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33, - 1,33,1,33,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,35, - 1,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,1,36,1,36,1,37, - 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,38,1,38,1,38, - 1,38,1,38,1,38,1,38,1,38,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39, - 1,39,1,39,1,39,1,39,1,39,1,39,1,40,1,40,1,40,1,40,1,40,1,40,1,41, - 1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,1,42, - 1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,43,1,43,1,43,1,43,1,43, - 1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,44,1,44,1,44,1,44,1,44, - 1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,45,1,45,1,46, - 1,46,1,46,1,46,1,47,1,47,1,48,1,48,1,49,1,49,1,50,1,50,1,51,1,51, - 1,52,1,52,1,53,1,53,1,54,1,54,1,55,1,55,1,55,1,56,1,56,1,57,1,57, - 1,57,1,58,1,58,1,58,1,59,1,59,1,59,1,60,1,60,1,60,1,61,1,61,1,62, - 1,62,1,63,1,63,1,63,1,64,1,64,1,64,1,65,1,65,1,65,1,66,1,66,1,66, - 1,67,1,67,1,67,1,68,1,68,1,68,1,69,1,69,1,69,1,70,1,70,1,70,1,71, - 1,71,1,71,1,72,1,72,1,73,1,73,1,74,1,74,1,75,1,75,1,76,1,76,1,76, - 1,77,1,77,1,78,1,78,1,79,1,79,1,80,1,80,1,81,1,81,1,81,1,82,1,82, - 1,83,1,83,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84, - 1,84,1,84,1,84,1,84,1,84,1,84,1,84,3,84,636,8,84,1,85,1,85,1,85, - 4,85,641,8,85,11,85,12,85,642,1,85,3,85,646,8,85,1,85,3,85,649,8, - 85,1,85,3,85,652,8,85,1,85,5,85,655,8,85,10,85,12,85,658,9,85,1, - 85,1,85,1,86,3,86,663,8,86,1,86,5,86,666,8,86,10,86,12,86,669,9, - 86,1,87,4,87,672,8,87,11,87,12,87,673,1,88,1,88,3,88,678,8,88,1, - 89,3,89,681,8,89,1,89,1,89,1,89,1,89,1,89,3,89,688,8,89,1,90,1,90, - 3,90,692,8,90,1,90,1,90,1,90,1,91,1,91,3,91,699,8,91,1,91,1,91,2, - 215,222,0,92,1,3,3,0,5,4,7,5,9,6,11,7,13,8,15,9,17,10,19,11,21,12, - 23,13,25,14,27,15,29,16,31,17,33,18,35,19,37,20,39,21,41,22,43,23, - 45,24,47,25,49,26,51,27,53,28,55,29,57,30,59,31,61,32,63,33,65,34, - 67,35,69,36,71,37,73,38,75,39,77,40,79,41,81,42,83,43,85,44,87,45, - 89,46,91,47,93,48,95,49,97,50,99,51,101,52,103,53,105,54,107,55, - 109,56,111,57,113,58,115,59,117,60,119,61,121,62,123,63,125,64,127, - 65,129,66,131,67,133,68,135,69,137,70,139,71,141,72,143,73,145,74, - 147,75,149,76,151,77,153,78,155,79,157,80,159,81,161,82,163,83,165, - 84,167,85,169,86,171,87,173,88,175,89,177,90,179,0,181,0,183,0,1, - 0,7,2,0,9,9,32,32,2,0,10,10,13,13,4,0,10,10,13,13,34,34,92,92,4, - 0,36,36,65,90,95,95,97,122,5,0,36,36,48,57,65,90,95,95,97,122,1, - 0,48,57,2,0,69,69,101,101,723,0,1,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0, - 0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0, - 0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1,0,0,0,0,25,1,0,0,0,0,27,1,0,0, - 0,0,29,1,0,0,0,0,31,1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,0,0,37,1,0,0, - 0,0,39,1,0,0,0,0,41,1,0,0,0,0,43,1,0,0,0,0,45,1,0,0,0,0,47,1,0,0, - 0,0,49,1,0,0,0,0,51,1,0,0,0,0,53,1,0,0,0,0,55,1,0,0,0,0,57,1,0,0, - 0,0,59,1,0,0,0,0,61,1,0,0,0,0,63,1,0,0,0,0,65,1,0,0,0,0,67,1,0,0, - 0,0,69,1,0,0,0,0,71,1,0,0,0,0,73,1,0,0,0,0,75,1,0,0,0,0,77,1,0,0, - 0,0,79,1,0,0,0,0,81,1,0,0,0,0,83,1,0,0,0,0,85,1,0,0,0,0,87,1,0,0, - 0,0,89,1,0,0,0,0,91,1,0,0,0,0,93,1,0,0,0,0,95,1,0,0,0,0,97,1,0,0, - 0,0,99,1,0,0,0,0,101,1,0,0,0,0,103,1,0,0,0,0,105,1,0,0,0,0,107,1, - 0,0,0,0,109,1,0,0,0,0,111,1,0,0,0,0,113,1,0,0,0,0,115,1,0,0,0,0, - 117,1,0,0,0,0,119,1,0,0,0,0,121,1,0,0,0,0,123,1,0,0,0,0,125,1,0, - 0,0,0,127,1,0,0,0,0,129,1,0,0,0,0,131,1,0,0,0,0,133,1,0,0,0,0,135, - 1,0,0,0,0,137,1,0,0,0,0,139,1,0,0,0,0,141,1,0,0,0,0,143,1,0,0,0, - 0,145,1,0,0,0,0,147,1,0,0,0,0,149,1,0,0,0,0,151,1,0,0,0,0,153,1, - 0,0,0,0,155,1,0,0,0,0,157,1,0,0,0,0,159,1,0,0,0,0,161,1,0,0,0,0, - 163,1,0,0,0,0,165,1,0,0,0,0,167,1,0,0,0,0,169,1,0,0,0,0,171,1,0, - 0,0,0,173,1,0,0,0,0,175,1,0,0,0,0,177,1,0,0,0,1,185,1,0,0,0,3,190, - 1,0,0,0,5,194,1,0,0,0,7,200,1,0,0,0,9,206,1,0,0,0,11,211,1,0,0,0, - 13,226,1,0,0,0,15,245,1,0,0,0,17,249,1,0,0,0,19,257,1,0,0,0,21,262, - 1,0,0,0,23,269,1,0,0,0,25,277,1,0,0,0,27,282,1,0,0,0,29,291,1,0, - 0,0,31,298,1,0,0,0,33,305,1,0,0,0,35,308,1,0,0,0,37,313,1,0,0,0, - 39,318,1,0,0,0,41,322,1,0,0,0,43,328,1,0,0,0,45,331,1,0,0,0,47,336, - 1,0,0,0,49,340,1,0,0,0,51,344,1,0,0,0,53,347,1,0,0,0,55,351,1,0, - 0,0,57,362,1,0,0,0,59,369,1,0,0,0,61,375,1,0,0,0,63,381,1,0,0,0, - 65,392,1,0,0,0,67,402,1,0,0,0,69,409,1,0,0,0,71,419,1,0,0,0,73,425, - 1,0,0,0,75,432,1,0,0,0,77,443,1,0,0,0,79,453,1,0,0,0,81,465,1,0, - 0,0,83,471,1,0,0,0,85,482,1,0,0,0,87,493,1,0,0,0,89,506,1,0,0,0, - 91,521,1,0,0,0,93,523,1,0,0,0,95,527,1,0,0,0,97,529,1,0,0,0,99,531, - 1,0,0,0,101,533,1,0,0,0,103,535,1,0,0,0,105,537,1,0,0,0,107,539, - 1,0,0,0,109,541,1,0,0,0,111,543,1,0,0,0,113,546,1,0,0,0,115,548, - 1,0,0,0,117,551,1,0,0,0,119,554,1,0,0,0,121,557,1,0,0,0,123,560, - 1,0,0,0,125,562,1,0,0,0,127,564,1,0,0,0,129,567,1,0,0,0,131,570, - 1,0,0,0,133,573,1,0,0,0,135,576,1,0,0,0,137,579,1,0,0,0,139,582, - 1,0,0,0,141,585,1,0,0,0,143,588,1,0,0,0,145,591,1,0,0,0,147,593, - 1,0,0,0,149,595,1,0,0,0,151,597,1,0,0,0,153,599,1,0,0,0,155,602, - 1,0,0,0,157,604,1,0,0,0,159,606,1,0,0,0,161,608,1,0,0,0,163,610, - 1,0,0,0,165,613,1,0,0,0,167,615,1,0,0,0,169,635,1,0,0,0,171,637, - 1,0,0,0,173,662,1,0,0,0,175,671,1,0,0,0,177,677,1,0,0,0,179,687, - 1,0,0,0,181,691,1,0,0,0,183,698,1,0,0,0,185,186,5,34,0,0,186,187, - 5,34,0,0,187,188,5,34,0,0,188,2,1,0,0,0,189,191,5,13,0,0,190,189, - 1,0,0,0,190,191,1,0,0,0,191,192,1,0,0,0,192,193,5,10,0,0,193,4,1, - 0,0,0,194,195,3,145,72,0,195,197,3,3,1,0,196,198,3,7,3,0,197,196, - 1,0,0,0,197,198,1,0,0,0,198,6,1,0,0,0,199,201,7,0,0,0,200,199,1, - 0,0,0,201,202,1,0,0,0,202,200,1,0,0,0,202,203,1,0,0,0,203,204,1, - 0,0,0,204,205,6,3,0,0,205,8,1,0,0,0,206,207,5,92,0,0,207,208,3,3, - 1,0,208,209,1,0,0,0,209,210,6,4,0,0,210,10,1,0,0,0,211,215,3,1,0, - 0,212,214,9,0,0,0,213,212,1,0,0,0,214,217,1,0,0,0,215,216,1,0,0, - 0,215,213,1,0,0,0,216,218,1,0,0,0,217,215,1,0,0,0,218,220,3,1,0, - 0,219,221,3,3,1,0,220,219,1,0,0,0,221,222,1,0,0,0,222,223,1,0,0, - 0,222,220,1,0,0,0,223,224,1,0,0,0,224,225,6,5,1,0,225,12,1,0,0,0, - 226,230,5,35,0,0,227,229,8,1,0,0,228,227,1,0,0,0,229,232,1,0,0,0, - 230,228,1,0,0,0,230,231,1,0,0,0,231,233,1,0,0,0,232,230,1,0,0,0, - 233,234,6,6,1,0,234,14,1,0,0,0,235,236,4,7,0,0,236,246,3,7,3,0,237, - 239,5,13,0,0,238,237,1,0,0,0,238,239,1,0,0,0,239,240,1,0,0,0,240, - 241,5,10,0,0,241,243,1,0,0,0,242,244,3,7,3,0,243,242,1,0,0,0,243, - 244,1,0,0,0,244,246,1,0,0,0,245,235,1,0,0,0,245,238,1,0,0,0,246, - 247,1,0,0,0,247,248,6,7,2,0,248,16,1,0,0,0,249,250,5,105,0,0,250, - 251,5,110,0,0,251,252,5,116,0,0,252,253,5,101,0,0,253,254,5,103, - 0,0,254,255,5,101,0,0,255,256,5,114,0,0,256,18,1,0,0,0,257,258,5, - 114,0,0,258,259,5,101,0,0,259,260,5,97,0,0,260,261,5,108,0,0,261, - 20,1,0,0,0,262,263,5,115,0,0,263,264,5,116,0,0,264,265,5,114,0,0, - 265,266,5,105,0,0,266,267,5,110,0,0,267,268,5,103,0,0,268,22,1,0, - 0,0,269,270,5,98,0,0,270,271,5,111,0,0,271,272,5,111,0,0,272,273, - 5,108,0,0,273,274,5,101,0,0,274,275,5,97,0,0,275,276,5,110,0,0,276, - 24,1,0,0,0,277,278,5,118,0,0,278,279,5,111,0,0,279,280,5,105,0,0, - 280,281,5,100,0,0,281,26,1,0,0,0,282,283,5,102,0,0,283,284,5,117, - 0,0,284,285,5,110,0,0,285,286,5,99,0,0,286,287,5,116,0,0,287,288, - 5,105,0,0,288,289,5,111,0,0,289,290,5,110,0,0,290,28,1,0,0,0,291, - 292,5,105,0,0,292,293,5,110,0,0,293,294,5,108,0,0,294,295,5,105, - 0,0,295,296,5,110,0,0,296,297,5,101,0,0,297,30,1,0,0,0,298,299,5, - 114,0,0,299,300,5,101,0,0,300,301,5,116,0,0,301,302,5,117,0,0,302, - 303,5,114,0,0,303,304,5,110,0,0,304,32,1,0,0,0,305,306,5,105,0,0, - 306,307,5,102,0,0,307,34,1,0,0,0,308,309,5,101,0,0,309,310,5,108, - 0,0,310,311,5,105,0,0,311,312,5,102,0,0,312,36,1,0,0,0,313,314,5, - 101,0,0,314,315,5,108,0,0,315,316,5,115,0,0,316,317,5,101,0,0,317, - 38,1,0,0,0,318,319,5,102,0,0,319,320,5,111,0,0,320,321,5,114,0,0, - 321,40,1,0,0,0,322,323,5,119,0,0,323,324,5,104,0,0,324,325,5,105, - 0,0,325,326,5,108,0,0,326,327,5,101,0,0,327,42,1,0,0,0,328,329,5, - 105,0,0,329,330,5,110,0,0,330,44,1,0,0,0,331,332,5,115,0,0,332,333, - 5,116,0,0,333,334,5,101,0,0,334,335,5,112,0,0,335,46,1,0,0,0,336, - 337,5,105,0,0,337,338,5,110,0,0,338,339,5,102,0,0,339,48,1,0,0,0, - 340,341,5,97,0,0,341,342,5,110,0,0,342,343,5,100,0,0,343,50,1,0, - 0,0,344,345,5,111,0,0,345,346,5,114,0,0,346,52,1,0,0,0,347,348,5, - 110,0,0,348,349,5,111,0,0,349,350,5,116,0,0,350,54,1,0,0,0,351,352, - 5,114,0,0,352,353,5,101,0,0,353,354,5,99,0,0,354,355,5,111,0,0,355, - 356,5,114,0,0,356,357,5,100,0,0,357,358,5,97,0,0,358,359,5,98,0, - 0,359,360,5,108,0,0,360,361,5,101,0,0,361,56,1,0,0,0,362,363,5,107, - 0,0,363,364,5,101,0,0,364,365,5,114,0,0,365,366,5,110,0,0,366,367, - 5,101,0,0,367,368,5,108,0,0,368,58,1,0,0,0,369,370,5,109,0,0,370, - 371,5,111,0,0,371,372,5,100,0,0,372,373,5,101,0,0,373,374,5,108, - 0,0,374,60,1,0,0,0,375,376,5,115,0,0,376,377,5,116,0,0,377,378,5, - 97,0,0,378,379,5,116,0,0,379,380,5,101,0,0,380,62,1,0,0,0,381,382, - 5,112,0,0,382,383,5,97,0,0,383,384,5,114,0,0,384,385,5,97,0,0,385, - 386,5,109,0,0,386,387,5,101,0,0,387,388,5,116,0,0,388,389,5,101, - 0,0,389,390,5,114,0,0,390,391,5,115,0,0,391,64,1,0,0,0,392,393,5, - 105,0,0,393,394,5,110,0,0,394,395,5,116,0,0,395,396,5,101,0,0,396, - 397,5,114,0,0,397,398,5,110,0,0,398,399,5,97,0,0,399,400,5,108,0, - 0,400,401,5,115,0,0,401,66,1,0,0,0,402,403,5,117,0,0,403,404,5,112, - 0,0,404,405,5,100,0,0,405,406,5,97,0,0,406,407,5,116,0,0,407,408, - 5,101,0,0,408,68,1,0,0,0,409,410,5,101,0,0,410,411,5,113,0,0,411, - 412,5,117,0,0,412,413,5,97,0,0,413,414,5,116,0,0,414,415,5,105,0, - 0,415,416,5,111,0,0,416,417,5,110,0,0,417,418,5,115,0,0,418,70,1, - 0,0,0,419,420,5,105,0,0,420,421,5,110,0,0,421,422,5,112,0,0,422, - 423,5,117,0,0,423,424,5,116,0,0,424,72,1,0,0,0,425,426,5,111,0,0, - 426,427,5,117,0,0,427,428,5,116,0,0,428,429,5,112,0,0,429,430,5, - 117,0,0,430,431,5,116,0,0,431,74,1,0,0,0,432,433,5,99,0,0,433,434, - 5,111,0,0,434,435,5,110,0,0,435,436,5,116,0,0,436,437,5,105,0,0, - 437,438,5,110,0,0,438,439,5,117,0,0,439,440,5,111,0,0,440,441,5, - 117,0,0,441,442,5,115,0,0,442,76,1,0,0,0,443,444,5,111,0,0,444,445, - 5,110,0,0,445,446,5,82,0,0,446,447,5,101,0,0,447,448,5,99,0,0,448, - 449,5,101,0,0,449,450,5,105,0,0,450,451,5,118,0,0,451,452,5,101, - 0,0,452,78,1,0,0,0,453,454,5,111,0,0,454,455,5,110,0,0,455,456,5, - 67,0,0,456,457,5,111,0,0,457,458,5,110,0,0,458,459,5,100,0,0,459, - 460,5,105,0,0,460,461,5,116,0,0,461,462,5,105,0,0,462,463,5,111, - 0,0,463,464,5,110,0,0,464,80,1,0,0,0,465,466,5,115,0,0,466,467,5, - 112,0,0,467,468,5,105,0,0,468,469,5,107,0,0,469,470,5,101,0,0,470, - 82,1,0,0,0,471,472,5,105,0,0,472,473,5,110,0,0,473,474,5,104,0,0, - 474,475,5,105,0,0,475,476,5,98,0,0,476,477,5,105,0,0,477,478,5,116, - 0,0,478,479,5,111,0,0,479,480,5,114,0,0,480,481,5,121,0,0,481,84, - 1,0,0,0,482,483,5,101,0,0,483,484,5,120,0,0,484,485,5,99,0,0,485, - 486,5,105,0,0,486,487,5,116,0,0,487,488,5,97,0,0,488,489,5,116,0, - 0,489,490,5,111,0,0,490,491,5,114,0,0,491,492,5,121,0,0,492,86,1, - 0,0,0,493,494,5,64,0,0,494,495,5,104,0,0,495,496,5,111,0,0,496,497, - 5,109,0,0,497,498,5,111,0,0,498,499,5,103,0,0,499,500,5,101,0,0, - 500,501,5,110,0,0,501,502,5,101,0,0,502,503,5,111,0,0,503,504,5, - 117,0,0,504,505,5,115,0,0,505,88,1,0,0,0,506,507,5,64,0,0,507,508, - 5,104,0,0,508,509,5,101,0,0,509,510,5,116,0,0,510,511,5,101,0,0, - 511,512,5,114,0,0,512,513,5,111,0,0,513,514,5,103,0,0,514,515,5, - 101,0,0,515,516,5,110,0,0,516,517,5,101,0,0,517,518,5,111,0,0,518, - 519,5,117,0,0,519,520,5,115,0,0,520,90,1,0,0,0,521,522,5,64,0,0, - 522,92,1,0,0,0,523,524,5,46,0,0,524,525,5,46,0,0,525,526,5,46,0, - 0,526,94,1,0,0,0,527,528,5,40,0,0,528,96,1,0,0,0,529,530,5,41,0, - 0,530,98,1,0,0,0,531,532,5,43,0,0,532,100,1,0,0,0,533,534,5,126, - 0,0,534,102,1,0,0,0,535,536,5,124,0,0,536,104,1,0,0,0,537,538,5, - 94,0,0,538,106,1,0,0,0,539,540,5,38,0,0,540,108,1,0,0,0,541,542, - 5,91,0,0,542,110,1,0,0,0,543,544,5,60,0,0,544,545,5,45,0,0,545,112, - 1,0,0,0,546,547,5,93,0,0,547,114,1,0,0,0,548,549,5,91,0,0,549,550, - 5,91,0,0,550,116,1,0,0,0,551,552,5,93,0,0,552,553,5,93,0,0,553,118, - 1,0,0,0,554,555,5,60,0,0,555,556,5,60,0,0,556,120,1,0,0,0,557,558, - 5,62,0,0,558,559,5,62,0,0,559,122,1,0,0,0,560,561,5,60,0,0,561,124, - 1,0,0,0,562,563,5,62,0,0,563,126,1,0,0,0,564,565,5,60,0,0,565,566, - 5,61,0,0,566,128,1,0,0,0,567,568,5,43,0,0,568,569,5,61,0,0,569,130, - 1,0,0,0,570,571,5,45,0,0,571,572,5,61,0,0,572,132,1,0,0,0,573,574, - 5,42,0,0,574,575,5,61,0,0,575,134,1,0,0,0,576,577,5,47,0,0,577,578, - 5,61,0,0,578,136,1,0,0,0,579,580,5,61,0,0,580,581,5,61,0,0,581,138, - 1,0,0,0,582,583,5,33,0,0,583,584,5,61,0,0,584,140,1,0,0,0,585,586, - 5,60,0,0,586,587,5,62,0,0,587,142,1,0,0,0,588,589,5,62,0,0,589,590, - 5,61,0,0,590,144,1,0,0,0,591,592,5,44,0,0,592,146,1,0,0,0,593,594, - 5,45,0,0,594,148,1,0,0,0,595,596,5,61,0,0,596,150,1,0,0,0,597,598, - 5,42,0,0,598,152,1,0,0,0,599,600,5,42,0,0,600,601,5,42,0,0,601,154, - 1,0,0,0,602,603,5,47,0,0,603,156,1,0,0,0,604,605,5,37,0,0,605,158, - 1,0,0,0,606,607,5,63,0,0,607,160,1,0,0,0,608,609,5,58,0,0,609,162, - 1,0,0,0,610,611,5,58,0,0,611,612,5,58,0,0,612,164,1,0,0,0,613,614, - 5,59,0,0,614,166,1,0,0,0,615,616,5,39,0,0,616,168,1,0,0,0,617,618, - 5,116,0,0,618,619,5,114,0,0,619,620,5,117,0,0,620,636,5,101,0,0, - 621,622,5,84,0,0,622,623,5,114,0,0,623,624,5,117,0,0,624,636,5,101, - 0,0,625,626,5,102,0,0,626,627,5,97,0,0,627,628,5,108,0,0,628,629, - 5,115,0,0,629,636,5,101,0,0,630,631,5,70,0,0,631,632,5,97,0,0,632, - 633,5,108,0,0,633,634,5,115,0,0,634,636,5,101,0,0,635,617,1,0,0, - 0,635,621,1,0,0,0,635,625,1,0,0,0,635,630,1,0,0,0,636,170,1,0,0, - 0,637,656,5,34,0,0,638,651,5,92,0,0,639,641,7,0,0,0,640,639,1,0, - 0,0,641,642,1,0,0,0,642,640,1,0,0,0,642,643,1,0,0,0,643,648,1,0, - 0,0,644,646,5,13,0,0,645,644,1,0,0,0,645,646,1,0,0,0,646,647,1,0, - 0,0,647,649,5,10,0,0,648,645,1,0,0,0,648,649,1,0,0,0,649,652,1,0, - 0,0,650,652,9,0,0,0,651,640,1,0,0,0,651,650,1,0,0,0,652,655,1,0, - 0,0,653,655,8,2,0,0,654,638,1,0,0,0,654,653,1,0,0,0,655,658,1,0, - 0,0,656,654,1,0,0,0,656,657,1,0,0,0,657,659,1,0,0,0,658,656,1,0, - 0,0,659,660,5,34,0,0,660,172,1,0,0,0,661,663,7,3,0,0,662,661,1,0, - 0,0,663,667,1,0,0,0,664,666,7,4,0,0,665,664,1,0,0,0,666,669,1,0, - 0,0,667,665,1,0,0,0,667,668,1,0,0,0,668,174,1,0,0,0,669,667,1,0, - 0,0,670,672,7,5,0,0,671,670,1,0,0,0,672,673,1,0,0,0,673,671,1,0, - 0,0,673,674,1,0,0,0,674,176,1,0,0,0,675,678,3,179,89,0,676,678,3, - 181,90,0,677,675,1,0,0,0,677,676,1,0,0,0,678,178,1,0,0,0,679,681, - 3,175,87,0,680,679,1,0,0,0,680,681,1,0,0,0,681,682,1,0,0,0,682,683, - 5,46,0,0,683,688,3,175,87,0,684,685,3,175,87,0,685,686,5,46,0,0, - 686,688,1,0,0,0,687,680,1,0,0,0,687,684,1,0,0,0,688,180,1,0,0,0, - 689,692,3,175,87,0,690,692,3,179,89,0,691,689,1,0,0,0,691,690,1, - 0,0,0,692,693,1,0,0,0,693,694,7,6,0,0,694,695,3,183,91,0,695,182, - 1,0,0,0,696,699,3,99,49,0,697,699,3,147,73,0,698,696,1,0,0,0,698, - 697,1,0,0,0,698,699,1,0,0,0,699,700,1,0,0,0,700,701,3,175,87,0,701, - 184,1,0,0,0,26,0,190,197,202,215,222,230,238,243,245,635,642,645, - 648,651,654,656,662,665,667,673,677,680,687,691,698,3,0,1,0,0,2, - 0,1,7,0 + 91,7,91,2,92,7,92,1,0,1,0,1,0,1,0,1,1,3,1,193,8,1,1,1,1,1,1,2,1, + 2,1,2,3,2,200,8,2,1,3,4,3,203,8,3,11,3,12,3,204,1,3,1,3,1,4,1,4, + 1,4,1,4,1,4,1,5,1,5,5,5,216,8,5,10,5,12,5,219,9,5,1,5,1,5,4,5,223, + 8,5,11,5,12,5,224,1,5,1,5,1,6,1,6,5,6,231,8,6,10,6,12,6,234,9,6, + 1,6,1,6,1,7,1,7,1,7,3,7,241,8,7,1,7,1,7,1,7,3,7,246,8,7,3,7,248, + 8,7,1,7,1,7,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,9,1,9,1,9,1,9,1,9, + 1,10,1,10,1,10,1,10,1,10,1,10,1,10,1,11,1,11,1,11,1,11,1,11,1,11, + 1,11,1,11,1,12,1,12,1,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13, + 1,13,1,13,1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,15,1,15,1,15, + 1,15,1,15,1,15,1,15,1,16,1,16,1,16,1,17,1,17,1,17,1,17,1,17,1,18, + 1,18,1,18,1,18,1,18,1,19,1,19,1,19,1,19,1,20,1,20,1,20,1,20,1,20, + 1,20,1,21,1,21,1,21,1,22,1,22,1,22,1,22,1,22,1,23,1,23,1,23,1,23, + 1,24,1,24,1,24,1,24,1,25,1,25,1,25,1,26,1,26,1,26,1,26,1,27,1,27, + 1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28, + 1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30, + 1,30,1,30,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31, + 1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,33,1,33,1,33, + 1,33,1,33,1,33,1,33,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34, + 1,34,1,35,1,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,1,36, + 1,36,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,38, + 1,38,1,38,1,38,1,38,1,38,1,38,1,38,1,38,1,38,1,39,1,39,1,39,1,39, + 1,39,1,39,1,39,1,39,1,39,1,39,1,39,1,39,1,40,1,40,1,40,1,40,1,40, + 1,40,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,42, + 1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,43,1,43,1,43, + 1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,44,1,44,1,44, + 1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,45, + 1,45,1,46,1,46,1,46,1,46,1,47,1,47,1,48,1,48,1,49,1,49,1,50,1,50, + 1,51,1,51,1,52,1,52,1,53,1,53,1,54,1,54,1,55,1,55,1,55,1,56,1,56, + 1,57,1,57,1,57,1,58,1,58,1,58,1,59,1,59,1,59,1,60,1,60,1,60,1,61, + 1,61,1,62,1,62,1,63,1,63,1,63,1,64,1,64,1,64,1,65,1,65,1,65,1,66, + 1,66,1,66,1,67,1,67,1,67,1,68,1,68,1,68,1,69,1,69,1,69,1,70,1,70, + 1,70,1,71,1,71,1,71,1,72,1,72,1,73,1,73,1,74,1,74,1,75,1,75,1,76, + 1,76,1,76,1,77,1,77,1,78,1,78,1,79,1,79,1,80,1,80,1,81,1,81,1,81, + 1,82,1,82,1,83,1,83,1,84,1,84,1,85,1,85,1,85,1,85,1,85,1,85,1,85, + 1,85,1,85,1,85,1,85,1,85,1,85,1,85,1,85,1,85,1,85,1,85,3,85,640, + 8,85,1,86,1,86,1,86,4,86,645,8,86,11,86,12,86,646,1,86,3,86,650, + 8,86,1,86,3,86,653,8,86,1,86,3,86,656,8,86,1,86,5,86,659,8,86,10, + 86,12,86,662,9,86,1,86,1,86,1,87,3,87,667,8,87,1,87,5,87,670,8,87, + 10,87,12,87,673,9,87,1,88,4,88,676,8,88,11,88,12,88,677,1,89,1,89, + 3,89,682,8,89,1,90,3,90,685,8,90,1,90,1,90,1,90,1,90,1,90,1,90,3, + 90,693,8,90,1,91,1,91,3,91,697,8,91,1,91,1,91,1,91,1,92,1,92,3,92, + 704,8,92,1,92,1,92,2,217,224,0,93,1,3,3,0,5,4,7,5,9,6,11,7,13,8, + 15,9,17,10,19,11,21,12,23,13,25,14,27,15,29,16,31,17,33,18,35,19, + 37,20,39,21,41,22,43,23,45,24,47,25,49,26,51,27,53,28,55,29,57,30, + 59,31,61,32,63,33,65,34,67,35,69,36,71,37,73,38,75,39,77,40,79,41, + 81,42,83,43,85,44,87,45,89,46,91,47,93,48,95,49,97,50,99,51,101, + 52,103,53,105,54,107,55,109,56,111,57,113,58,115,59,117,60,119,61, + 121,62,123,63,125,64,127,65,129,66,131,67,133,68,135,69,137,70,139, + 71,141,72,143,73,145,74,147,75,149,76,151,77,153,78,155,79,157,80, + 159,81,161,82,163,83,165,84,167,85,169,86,171,87,173,88,175,89,177, + 90,179,91,181,0,183,0,185,0,1,0,7,2,0,9,9,32,32,2,0,10,10,13,13, + 4,0,10,10,13,13,34,34,92,92,4,0,36,36,65,90,95,95,97,122,5,0,36, + 36,48,57,65,90,95,95,97,122,1,0,48,57,2,0,69,69,101,101,728,0,1, + 1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1, + 0,0,0,0,15,1,0,0,0,0,17,1,0,0,0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1, + 0,0,0,0,25,1,0,0,0,0,27,1,0,0,0,0,29,1,0,0,0,0,31,1,0,0,0,0,33,1, + 0,0,0,0,35,1,0,0,0,0,37,1,0,0,0,0,39,1,0,0,0,0,41,1,0,0,0,0,43,1, + 0,0,0,0,45,1,0,0,0,0,47,1,0,0,0,0,49,1,0,0,0,0,51,1,0,0,0,0,53,1, + 0,0,0,0,55,1,0,0,0,0,57,1,0,0,0,0,59,1,0,0,0,0,61,1,0,0,0,0,63,1, + 0,0,0,0,65,1,0,0,0,0,67,1,0,0,0,0,69,1,0,0,0,0,71,1,0,0,0,0,73,1, + 0,0,0,0,75,1,0,0,0,0,77,1,0,0,0,0,79,1,0,0,0,0,81,1,0,0,0,0,83,1, + 0,0,0,0,85,1,0,0,0,0,87,1,0,0,0,0,89,1,0,0,0,0,91,1,0,0,0,0,93,1, + 0,0,0,0,95,1,0,0,0,0,97,1,0,0,0,0,99,1,0,0,0,0,101,1,0,0,0,0,103, + 1,0,0,0,0,105,1,0,0,0,0,107,1,0,0,0,0,109,1,0,0,0,0,111,1,0,0,0, + 0,113,1,0,0,0,0,115,1,0,0,0,0,117,1,0,0,0,0,119,1,0,0,0,0,121,1, + 0,0,0,0,123,1,0,0,0,0,125,1,0,0,0,0,127,1,0,0,0,0,129,1,0,0,0,0, + 131,1,0,0,0,0,133,1,0,0,0,0,135,1,0,0,0,0,137,1,0,0,0,0,139,1,0, + 0,0,0,141,1,0,0,0,0,143,1,0,0,0,0,145,1,0,0,0,0,147,1,0,0,0,0,149, + 1,0,0,0,0,151,1,0,0,0,0,153,1,0,0,0,0,155,1,0,0,0,0,157,1,0,0,0, + 0,159,1,0,0,0,0,161,1,0,0,0,0,163,1,0,0,0,0,165,1,0,0,0,0,167,1, + 0,0,0,0,169,1,0,0,0,0,171,1,0,0,0,0,173,1,0,0,0,0,175,1,0,0,0,0, + 177,1,0,0,0,0,179,1,0,0,0,1,187,1,0,0,0,3,192,1,0,0,0,5,196,1,0, + 0,0,7,202,1,0,0,0,9,208,1,0,0,0,11,213,1,0,0,0,13,228,1,0,0,0,15, + 247,1,0,0,0,17,251,1,0,0,0,19,259,1,0,0,0,21,264,1,0,0,0,23,271, + 1,0,0,0,25,279,1,0,0,0,27,284,1,0,0,0,29,293,1,0,0,0,31,300,1,0, + 0,0,33,307,1,0,0,0,35,310,1,0,0,0,37,315,1,0,0,0,39,320,1,0,0,0, + 41,324,1,0,0,0,43,330,1,0,0,0,45,333,1,0,0,0,47,338,1,0,0,0,49,342, + 1,0,0,0,51,346,1,0,0,0,53,349,1,0,0,0,55,353,1,0,0,0,57,364,1,0, + 0,0,59,371,1,0,0,0,61,377,1,0,0,0,63,383,1,0,0,0,65,394,1,0,0,0, + 67,404,1,0,0,0,69,411,1,0,0,0,71,421,1,0,0,0,73,427,1,0,0,0,75,434, + 1,0,0,0,77,445,1,0,0,0,79,455,1,0,0,0,81,467,1,0,0,0,83,473,1,0, + 0,0,85,484,1,0,0,0,87,495,1,0,0,0,89,508,1,0,0,0,91,523,1,0,0,0, + 93,525,1,0,0,0,95,529,1,0,0,0,97,531,1,0,0,0,99,533,1,0,0,0,101, + 535,1,0,0,0,103,537,1,0,0,0,105,539,1,0,0,0,107,541,1,0,0,0,109, + 543,1,0,0,0,111,545,1,0,0,0,113,548,1,0,0,0,115,550,1,0,0,0,117, + 553,1,0,0,0,119,556,1,0,0,0,121,559,1,0,0,0,123,562,1,0,0,0,125, + 564,1,0,0,0,127,566,1,0,0,0,129,569,1,0,0,0,131,572,1,0,0,0,133, + 575,1,0,0,0,135,578,1,0,0,0,137,581,1,0,0,0,139,584,1,0,0,0,141, + 587,1,0,0,0,143,590,1,0,0,0,145,593,1,0,0,0,147,595,1,0,0,0,149, + 597,1,0,0,0,151,599,1,0,0,0,153,601,1,0,0,0,155,604,1,0,0,0,157, + 606,1,0,0,0,159,608,1,0,0,0,161,610,1,0,0,0,163,612,1,0,0,0,165, + 615,1,0,0,0,167,617,1,0,0,0,169,619,1,0,0,0,171,639,1,0,0,0,173, + 641,1,0,0,0,175,666,1,0,0,0,177,675,1,0,0,0,179,681,1,0,0,0,181, + 692,1,0,0,0,183,696,1,0,0,0,185,703,1,0,0,0,187,188,5,34,0,0,188, + 189,5,34,0,0,189,190,5,34,0,0,190,2,1,0,0,0,191,193,5,13,0,0,192, + 191,1,0,0,0,192,193,1,0,0,0,193,194,1,0,0,0,194,195,5,10,0,0,195, + 4,1,0,0,0,196,197,3,145,72,0,197,199,3,3,1,0,198,200,3,7,3,0,199, + 198,1,0,0,0,199,200,1,0,0,0,200,6,1,0,0,0,201,203,7,0,0,0,202,201, + 1,0,0,0,203,204,1,0,0,0,204,202,1,0,0,0,204,205,1,0,0,0,205,206, + 1,0,0,0,206,207,6,3,0,0,207,8,1,0,0,0,208,209,5,92,0,0,209,210,3, + 3,1,0,210,211,1,0,0,0,211,212,6,4,0,0,212,10,1,0,0,0,213,217,3,1, + 0,0,214,216,9,0,0,0,215,214,1,0,0,0,216,219,1,0,0,0,217,218,1,0, + 0,0,217,215,1,0,0,0,218,220,1,0,0,0,219,217,1,0,0,0,220,222,3,1, + 0,0,221,223,3,3,1,0,222,221,1,0,0,0,223,224,1,0,0,0,224,225,1,0, + 0,0,224,222,1,0,0,0,225,226,1,0,0,0,226,227,6,5,1,0,227,12,1,0,0, + 0,228,232,5,35,0,0,229,231,8,1,0,0,230,229,1,0,0,0,231,234,1,0,0, + 0,232,230,1,0,0,0,232,233,1,0,0,0,233,235,1,0,0,0,234,232,1,0,0, + 0,235,236,6,6,1,0,236,14,1,0,0,0,237,238,4,7,0,0,238,248,3,7,3,0, + 239,241,5,13,0,0,240,239,1,0,0,0,240,241,1,0,0,0,241,242,1,0,0,0, + 242,243,5,10,0,0,243,245,1,0,0,0,244,246,3,7,3,0,245,244,1,0,0,0, + 245,246,1,0,0,0,246,248,1,0,0,0,247,237,1,0,0,0,247,240,1,0,0,0, + 248,249,1,0,0,0,249,250,6,7,2,0,250,16,1,0,0,0,251,252,5,105,0,0, + 252,253,5,110,0,0,253,254,5,116,0,0,254,255,5,101,0,0,255,256,5, + 103,0,0,256,257,5,101,0,0,257,258,5,114,0,0,258,18,1,0,0,0,259,260, + 5,114,0,0,260,261,5,101,0,0,261,262,5,97,0,0,262,263,5,108,0,0,263, + 20,1,0,0,0,264,265,5,115,0,0,265,266,5,116,0,0,266,267,5,114,0,0, + 267,268,5,105,0,0,268,269,5,110,0,0,269,270,5,103,0,0,270,22,1,0, + 0,0,271,272,5,98,0,0,272,273,5,111,0,0,273,274,5,111,0,0,274,275, + 5,108,0,0,275,276,5,101,0,0,276,277,5,97,0,0,277,278,5,110,0,0,278, + 24,1,0,0,0,279,280,5,118,0,0,280,281,5,111,0,0,281,282,5,105,0,0, + 282,283,5,100,0,0,283,26,1,0,0,0,284,285,5,102,0,0,285,286,5,117, + 0,0,286,287,5,110,0,0,287,288,5,99,0,0,288,289,5,116,0,0,289,290, + 5,105,0,0,290,291,5,111,0,0,291,292,5,110,0,0,292,28,1,0,0,0,293, + 294,5,105,0,0,294,295,5,110,0,0,295,296,5,108,0,0,296,297,5,105, + 0,0,297,298,5,110,0,0,298,299,5,101,0,0,299,30,1,0,0,0,300,301,5, + 114,0,0,301,302,5,101,0,0,302,303,5,116,0,0,303,304,5,117,0,0,304, + 305,5,114,0,0,305,306,5,110,0,0,306,32,1,0,0,0,307,308,5,105,0,0, + 308,309,5,102,0,0,309,34,1,0,0,0,310,311,5,101,0,0,311,312,5,108, + 0,0,312,313,5,105,0,0,313,314,5,102,0,0,314,36,1,0,0,0,315,316,5, + 101,0,0,316,317,5,108,0,0,317,318,5,115,0,0,318,319,5,101,0,0,319, + 38,1,0,0,0,320,321,5,102,0,0,321,322,5,111,0,0,322,323,5,114,0,0, + 323,40,1,0,0,0,324,325,5,119,0,0,325,326,5,104,0,0,326,327,5,105, + 0,0,327,328,5,108,0,0,328,329,5,101,0,0,329,42,1,0,0,0,330,331,5, + 105,0,0,331,332,5,110,0,0,332,44,1,0,0,0,333,334,5,115,0,0,334,335, + 5,116,0,0,335,336,5,101,0,0,336,337,5,112,0,0,337,46,1,0,0,0,338, + 339,5,105,0,0,339,340,5,110,0,0,340,341,5,102,0,0,341,48,1,0,0,0, + 342,343,5,97,0,0,343,344,5,110,0,0,344,345,5,100,0,0,345,50,1,0, + 0,0,346,347,5,111,0,0,347,348,5,114,0,0,348,52,1,0,0,0,349,350,5, + 110,0,0,350,351,5,111,0,0,351,352,5,116,0,0,352,54,1,0,0,0,353,354, + 5,114,0,0,354,355,5,101,0,0,355,356,5,99,0,0,356,357,5,111,0,0,357, + 358,5,114,0,0,358,359,5,100,0,0,359,360,5,97,0,0,360,361,5,98,0, + 0,361,362,5,108,0,0,362,363,5,101,0,0,363,56,1,0,0,0,364,365,5,107, + 0,0,365,366,5,101,0,0,366,367,5,114,0,0,367,368,5,110,0,0,368,369, + 5,101,0,0,369,370,5,108,0,0,370,58,1,0,0,0,371,372,5,109,0,0,372, + 373,5,111,0,0,373,374,5,100,0,0,374,375,5,101,0,0,375,376,5,108, + 0,0,376,60,1,0,0,0,377,378,5,115,0,0,378,379,5,116,0,0,379,380,5, + 97,0,0,380,381,5,116,0,0,381,382,5,101,0,0,382,62,1,0,0,0,383,384, + 5,112,0,0,384,385,5,97,0,0,385,386,5,114,0,0,386,387,5,97,0,0,387, + 388,5,109,0,0,388,389,5,101,0,0,389,390,5,116,0,0,390,391,5,101, + 0,0,391,392,5,114,0,0,392,393,5,115,0,0,393,64,1,0,0,0,394,395,5, + 105,0,0,395,396,5,110,0,0,396,397,5,116,0,0,397,398,5,101,0,0,398, + 399,5,114,0,0,399,400,5,110,0,0,400,401,5,97,0,0,401,402,5,108,0, + 0,402,403,5,115,0,0,403,66,1,0,0,0,404,405,5,117,0,0,405,406,5,112, + 0,0,406,407,5,100,0,0,407,408,5,97,0,0,408,409,5,116,0,0,409,410, + 5,101,0,0,410,68,1,0,0,0,411,412,5,101,0,0,412,413,5,113,0,0,413, + 414,5,117,0,0,414,415,5,97,0,0,415,416,5,116,0,0,416,417,5,105,0, + 0,417,418,5,111,0,0,418,419,5,110,0,0,419,420,5,115,0,0,420,70,1, + 0,0,0,421,422,5,105,0,0,422,423,5,110,0,0,423,424,5,112,0,0,424, + 425,5,117,0,0,425,426,5,116,0,0,426,72,1,0,0,0,427,428,5,111,0,0, + 428,429,5,117,0,0,429,430,5,116,0,0,430,431,5,112,0,0,431,432,5, + 117,0,0,432,433,5,116,0,0,433,74,1,0,0,0,434,435,5,99,0,0,435,436, + 5,111,0,0,436,437,5,110,0,0,437,438,5,116,0,0,438,439,5,105,0,0, + 439,440,5,110,0,0,440,441,5,117,0,0,441,442,5,111,0,0,442,443,5, + 117,0,0,443,444,5,115,0,0,444,76,1,0,0,0,445,446,5,111,0,0,446,447, + 5,110,0,0,447,448,5,82,0,0,448,449,5,101,0,0,449,450,5,99,0,0,450, + 451,5,101,0,0,451,452,5,105,0,0,452,453,5,118,0,0,453,454,5,101, + 0,0,454,78,1,0,0,0,455,456,5,111,0,0,456,457,5,110,0,0,457,458,5, + 67,0,0,458,459,5,111,0,0,459,460,5,110,0,0,460,461,5,100,0,0,461, + 462,5,105,0,0,462,463,5,116,0,0,463,464,5,105,0,0,464,465,5,111, + 0,0,465,466,5,110,0,0,466,80,1,0,0,0,467,468,5,115,0,0,468,469,5, + 112,0,0,469,470,5,105,0,0,470,471,5,107,0,0,471,472,5,101,0,0,472, + 82,1,0,0,0,473,474,5,105,0,0,474,475,5,110,0,0,475,476,5,104,0,0, + 476,477,5,105,0,0,477,478,5,98,0,0,478,479,5,105,0,0,479,480,5,116, + 0,0,480,481,5,111,0,0,481,482,5,114,0,0,482,483,5,121,0,0,483,84, + 1,0,0,0,484,485,5,101,0,0,485,486,5,120,0,0,486,487,5,99,0,0,487, + 488,5,105,0,0,488,489,5,116,0,0,489,490,5,97,0,0,490,491,5,116,0, + 0,491,492,5,111,0,0,492,493,5,114,0,0,493,494,5,121,0,0,494,86,1, + 0,0,0,495,496,5,64,0,0,496,497,5,104,0,0,497,498,5,111,0,0,498,499, + 5,109,0,0,499,500,5,111,0,0,500,501,5,103,0,0,501,502,5,101,0,0, + 502,503,5,110,0,0,503,504,5,101,0,0,504,505,5,111,0,0,505,506,5, + 117,0,0,506,507,5,115,0,0,507,88,1,0,0,0,508,509,5,64,0,0,509,510, + 5,104,0,0,510,511,5,101,0,0,511,512,5,116,0,0,512,513,5,101,0,0, + 513,514,5,114,0,0,514,515,5,111,0,0,515,516,5,103,0,0,516,517,5, + 101,0,0,517,518,5,110,0,0,518,519,5,101,0,0,519,520,5,111,0,0,520, + 521,5,117,0,0,521,522,5,115,0,0,522,90,1,0,0,0,523,524,5,64,0,0, + 524,92,1,0,0,0,525,526,5,46,0,0,526,527,5,46,0,0,527,528,5,46,0, + 0,528,94,1,0,0,0,529,530,5,40,0,0,530,96,1,0,0,0,531,532,5,41,0, + 0,532,98,1,0,0,0,533,534,5,43,0,0,534,100,1,0,0,0,535,536,5,126, + 0,0,536,102,1,0,0,0,537,538,5,124,0,0,538,104,1,0,0,0,539,540,5, + 94,0,0,540,106,1,0,0,0,541,542,5,38,0,0,542,108,1,0,0,0,543,544, + 5,91,0,0,544,110,1,0,0,0,545,546,5,60,0,0,546,547,5,45,0,0,547,112, + 1,0,0,0,548,549,5,93,0,0,549,114,1,0,0,0,550,551,5,91,0,0,551,552, + 5,91,0,0,552,116,1,0,0,0,553,554,5,93,0,0,554,555,5,93,0,0,555,118, + 1,0,0,0,556,557,5,60,0,0,557,558,5,60,0,0,558,120,1,0,0,0,559,560, + 5,62,0,0,560,561,5,62,0,0,561,122,1,0,0,0,562,563,5,60,0,0,563,124, + 1,0,0,0,564,565,5,62,0,0,565,126,1,0,0,0,566,567,5,60,0,0,567,568, + 5,61,0,0,568,128,1,0,0,0,569,570,5,43,0,0,570,571,5,61,0,0,571,130, + 1,0,0,0,572,573,5,45,0,0,573,574,5,61,0,0,574,132,1,0,0,0,575,576, + 5,42,0,0,576,577,5,61,0,0,577,134,1,0,0,0,578,579,5,47,0,0,579,580, + 5,61,0,0,580,136,1,0,0,0,581,582,5,61,0,0,582,583,5,61,0,0,583,138, + 1,0,0,0,584,585,5,33,0,0,585,586,5,61,0,0,586,140,1,0,0,0,587,588, + 5,60,0,0,588,589,5,62,0,0,589,142,1,0,0,0,590,591,5,62,0,0,591,592, + 5,61,0,0,592,144,1,0,0,0,593,594,5,44,0,0,594,146,1,0,0,0,595,596, + 5,45,0,0,596,148,1,0,0,0,597,598,5,61,0,0,598,150,1,0,0,0,599,600, + 5,42,0,0,600,152,1,0,0,0,601,602,5,42,0,0,602,603,5,42,0,0,603,154, + 1,0,0,0,604,605,5,47,0,0,605,156,1,0,0,0,606,607,5,37,0,0,607,158, + 1,0,0,0,608,609,5,63,0,0,609,160,1,0,0,0,610,611,5,58,0,0,611,162, + 1,0,0,0,612,613,5,58,0,0,613,614,5,58,0,0,614,164,1,0,0,0,615,616, + 5,59,0,0,616,166,1,0,0,0,617,618,5,39,0,0,618,168,1,0,0,0,619,620, + 5,46,0,0,620,170,1,0,0,0,621,622,5,116,0,0,622,623,5,114,0,0,623, + 624,5,117,0,0,624,640,5,101,0,0,625,626,5,84,0,0,626,627,5,114,0, + 0,627,628,5,117,0,0,628,640,5,101,0,0,629,630,5,102,0,0,630,631, + 5,97,0,0,631,632,5,108,0,0,632,633,5,115,0,0,633,640,5,101,0,0,634, + 635,5,70,0,0,635,636,5,97,0,0,636,637,5,108,0,0,637,638,5,115,0, + 0,638,640,5,101,0,0,639,621,1,0,0,0,639,625,1,0,0,0,639,629,1,0, + 0,0,639,634,1,0,0,0,640,172,1,0,0,0,641,660,5,34,0,0,642,655,5,92, + 0,0,643,645,7,0,0,0,644,643,1,0,0,0,645,646,1,0,0,0,646,644,1,0, + 0,0,646,647,1,0,0,0,647,652,1,0,0,0,648,650,5,13,0,0,649,648,1,0, + 0,0,649,650,1,0,0,0,650,651,1,0,0,0,651,653,5,10,0,0,652,649,1,0, + 0,0,652,653,1,0,0,0,653,656,1,0,0,0,654,656,9,0,0,0,655,644,1,0, + 0,0,655,654,1,0,0,0,656,659,1,0,0,0,657,659,8,2,0,0,658,642,1,0, + 0,0,658,657,1,0,0,0,659,662,1,0,0,0,660,658,1,0,0,0,660,661,1,0, + 0,0,661,663,1,0,0,0,662,660,1,0,0,0,663,664,5,34,0,0,664,174,1,0, + 0,0,665,667,7,3,0,0,666,665,1,0,0,0,667,671,1,0,0,0,668,670,7,4, + 0,0,669,668,1,0,0,0,670,673,1,0,0,0,671,669,1,0,0,0,671,672,1,0, + 0,0,672,176,1,0,0,0,673,671,1,0,0,0,674,676,7,5,0,0,675,674,1,0, + 0,0,676,677,1,0,0,0,677,675,1,0,0,0,677,678,1,0,0,0,678,178,1,0, + 0,0,679,682,3,181,90,0,680,682,3,183,91,0,681,679,1,0,0,0,681,680, + 1,0,0,0,682,180,1,0,0,0,683,685,3,177,88,0,684,683,1,0,0,0,684,685, + 1,0,0,0,685,686,1,0,0,0,686,687,3,169,84,0,687,688,3,177,88,0,688, + 693,1,0,0,0,689,690,3,177,88,0,690,691,3,169,84,0,691,693,1,0,0, + 0,692,684,1,0,0,0,692,689,1,0,0,0,693,182,1,0,0,0,694,697,3,177, + 88,0,695,697,3,181,90,0,696,694,1,0,0,0,696,695,1,0,0,0,697,698, + 1,0,0,0,698,699,7,6,0,0,699,700,3,185,92,0,700,184,1,0,0,0,701,704, + 3,99,49,0,702,704,3,147,73,0,703,701,1,0,0,0,703,702,1,0,0,0,703, + 704,1,0,0,0,704,705,1,0,0,0,705,706,3,177,88,0,706,186,1,0,0,0,26, + 0,192,199,204,217,224,232,240,245,247,639,646,649,652,655,658,660, + 666,669,671,677,681,684,692,696,703,3,0,1,0,0,2,0,1,7,0 ] class PyNestMLLexer(PyNestMLLexerBase): @@ -361,11 +362,12 @@ class PyNestMLLexer(PyNestMLLexerBase): DOUBLE_COLON = 83 SEMICOLON = 84 DIFFERENTIAL_ORDER = 85 - BOOLEAN_LITERAL = 86 - STRING_LITERAL = 87 - NAME = 88 - UNSIGNED_INTEGER = 89 - FLOAT = 90 + FULLSTOP = 86 + BOOLEAN_LITERAL = 87 + STRING_LITERAL = 88 + NAME = 89 + UNSIGNED_INTEGER = 90 + FLOAT = 91 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN", u"COMMENT" ] @@ -383,7 +385,7 @@ class PyNestMLLexer(PyNestMLLexerBase): "'|'", "'^'", "'&'", "'['", "'<-'", "']'", "'[['", "']]'", "'<<'", "'>>'", "'<'", "'>'", "'<='", "'+='", "'-='", "'*='", "'/='", "'=='", "'!='", "'<>'", "'>='", "','", "'-'", "'='", "'*'", - "'**'", "'/'", "'%'", "'?'", "':'", "'::'", "';'", "'''" ] + "'**'", "'/'", "'%'", "'?'", "':'", "'::'", "';'", "'''", "'.'" ] symbolicNames = [ "", "INDENT", "DEDENT", "DOCSTRING_TRIPLEQUOTE", "KERNEL_JOINING", @@ -406,8 +408,8 @@ class PyNestMLLexer(PyNestMLLexerBase): "EQUALS_EQUALS", "EXCLAMATION_EQUALS", "LEFT_ANGLE_RIGHT_ANGLE", "RIGHT_ANGLE_EQUALS", "COMMA", "MINUS", "EQUALS", "STAR", "STAR_STAR", "FORWARD_SLASH", "PERCENT", "QUESTION", "COLON", "DOUBLE_COLON", - "SEMICOLON", "DIFFERENTIAL_ORDER", "BOOLEAN_LITERAL", "STRING_LITERAL", - "NAME", "UNSIGNED_INTEGER", "FLOAT" ] + "SEMICOLON", "DIFFERENTIAL_ORDER", "FULLSTOP", "BOOLEAN_LITERAL", + "STRING_LITERAL", "NAME", "UNSIGNED_INTEGER", "FLOAT" ] ruleNames = [ "DOCSTRING_TRIPLEQUOTE", "NEWLINE_FRAG", "KERNEL_JOINING", "WS", "LINE_ESCAPE", "DOCSTRING", "SL_COMMENT", "NEWLINE", @@ -431,15 +433,15 @@ class PyNestMLLexer(PyNestMLLexerBase): "EQUALS_EQUALS", "EXCLAMATION_EQUALS", "LEFT_ANGLE_RIGHT_ANGLE", "RIGHT_ANGLE_EQUALS", "COMMA", "MINUS", "EQUALS", "STAR", "STAR_STAR", "FORWARD_SLASH", "PERCENT", "QUESTION", "COLON", - "DOUBLE_COLON", "SEMICOLON", "DIFFERENTIAL_ORDER", "BOOLEAN_LITERAL", - "STRING_LITERAL", "NAME", "UNSIGNED_INTEGER", "FLOAT", - "POINT_FLOAT", "EXPONENT_FLOAT", "EXPONENT" ] + "DOUBLE_COLON", "SEMICOLON", "DIFFERENTIAL_ORDER", "FULLSTOP", + "BOOLEAN_LITERAL", "STRING_LITERAL", "NAME", "UNSIGNED_INTEGER", + "FLOAT", "POINT_FLOAT", "EXPONENT_FLOAT", "EXPONENT" ] grammarFileName = "PyNestMLLexer.g4" def __init__(self, input=None, output:TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.13.1") + self.checkVersion("4.10.1") self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) self._actions = None self._predicates = None diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index ecb08158a..ccb7bfe00 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -1,4 +1,4 @@ -# Generated from PyNestMLParser.g4 by ANTLR 4.13.1 +# Generated from PyNestMLParser.g4 by ANTLR 4.10.1 # encoding: utf-8 from antlr4 import * from io import StringIO @@ -10,7 +10,7 @@ def serializedATN(): return [ - 4,1,90,598,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,91,632,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, @@ -27,209 +27,223 @@ def serializedATN(): 1,4,3,4,194,8,4,1,5,1,5,1,5,3,5,199,8,5,1,6,1,6,1,6,1,6,1,6,3,6, 206,8,6,1,7,1,7,1,7,1,7,1,7,1,7,1,7,3,7,215,8,7,1,8,1,8,3,8,219, 8,8,1,9,1,9,1,9,1,9,1,9,3,9,226,8,9,1,9,5,9,229,8,9,10,9,12,9,232, - 9,9,1,10,1,10,1,10,1,10,1,10,5,10,239,8,10,10,10,12,10,242,9,10, - 3,10,244,8,10,1,10,1,10,1,11,3,11,249,8,11,1,11,1,11,1,11,1,11,1, - 11,1,11,3,11,257,8,11,1,11,5,11,260,8,11,10,11,12,11,263,9,11,1, - 11,1,11,1,12,1,12,1,12,1,12,3,12,271,8,12,1,12,5,12,274,8,12,10, - 12,12,12,277,9,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1, - 13,1,13,5,13,290,8,13,10,13,12,13,293,9,13,1,13,3,13,296,8,13,1, - 13,1,13,1,14,1,14,1,14,4,14,303,8,14,11,14,12,14,304,1,14,1,14,1, - 15,1,15,3,15,311,8,15,1,16,1,16,1,16,3,16,316,8,16,1,17,1,17,1,17, - 1,17,3,17,322,8,17,1,17,1,17,1,18,1,18,1,18,1,18,1,18,1,18,3,18, - 332,8,18,1,18,1,18,1,19,3,19,337,8,19,1,19,3,19,340,8,19,1,19,1, - 19,1,19,5,19,345,8,19,10,19,12,19,348,9,19,1,19,1,19,1,19,3,19,353, - 8,19,1,19,1,19,1,19,1,19,3,19,359,8,19,1,19,5,19,362,8,19,10,19, - 12,19,365,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1,21,1,21,1,21,1,21, - 3,21,377,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3,24,385,8,24,1,25,1, - 25,5,25,389,8,25,10,25,12,25,392,9,25,1,25,3,25,395,8,25,1,26,1, - 26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1, - 29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,419,8,29,1,29,1,29,1, - 29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31,432,8,31,11,31,12, - 31,433,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1, - 33,1,33,1,33,1,33,1,33,1,33,4,33,453,8,33,11,33,12,33,454,1,33,1, - 33,1,34,1,34,1,34,1,34,1,34,5,34,464,8,34,10,34,12,34,467,9,34,1, - 34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35,478,8,35,10,35,12, - 35,481,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,4,36,492, - 8,36,11,36,12,36,493,1,36,1,36,1,37,1,37,1,37,1,37,1,38,1,38,1,38, - 1,38,1,38,1,38,1,38,4,38,509,8,38,11,38,12,38,510,1,38,1,38,1,39, - 1,39,1,39,1,39,1,39,1,39,4,39,521,8,39,11,39,12,39,522,1,39,1,39, - 1,40,1,40,1,40,1,40,1,40,3,40,532,8,40,1,40,1,40,5,40,536,8,40,10, - 40,12,40,539,9,40,1,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,549, - 8,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,3,42,558,8,42,1,43,1,43, - 1,43,1,43,1,43,1,43,3,43,566,8,43,1,43,1,43,1,43,1,44,1,44,1,44, - 1,44,1,44,1,44,5,44,577,8,44,10,44,12,44,580,9,44,3,44,582,8,44, - 1,44,1,44,3,44,586,8,44,1,44,1,44,1,44,1,45,1,45,1,45,1,46,1,46, - 1,46,1,46,1,46,0,2,2,6,47,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28, - 30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72, - 74,76,78,80,82,84,86,88,90,92,0,4,2,0,51,51,75,75,1,0,89,90,1,0, - 32,34,3,0,25,25,86,87,89,90,653,0,100,1,0,0,0,2,111,1,0,0,0,4,128, - 1,0,0,0,6,143,1,0,0,0,8,193,1,0,0,0,10,198,1,0,0,0,12,205,1,0,0, - 0,14,214,1,0,0,0,16,218,1,0,0,0,18,220,1,0,0,0,20,233,1,0,0,0,22, - 248,1,0,0,0,24,266,1,0,0,0,26,280,1,0,0,0,28,299,1,0,0,0,30,310, - 1,0,0,0,32,315,1,0,0,0,34,321,1,0,0,0,36,325,1,0,0,0,38,336,1,0, - 0,0,40,366,1,0,0,0,42,376,1,0,0,0,44,378,1,0,0,0,46,380,1,0,0,0, - 48,382,1,0,0,0,50,386,1,0,0,0,52,396,1,0,0,0,54,401,1,0,0,0,56,406, - 1,0,0,0,58,410,1,0,0,0,60,424,1,0,0,0,62,431,1,0,0,0,64,437,1,0, - 0,0,66,441,1,0,0,0,68,458,1,0,0,0,70,472,1,0,0,0,72,486,1,0,0,0, - 74,497,1,0,0,0,76,501,1,0,0,0,78,514,1,0,0,0,80,526,1,0,0,0,82,543, - 1,0,0,0,84,557,1,0,0,0,86,559,1,0,0,0,88,570,1,0,0,0,90,590,1,0, - 0,0,92,593,1,0,0,0,94,101,5,10,0,0,95,101,5,11,0,0,96,101,5,12,0, - 0,97,101,5,13,0,0,98,101,5,14,0,0,99,101,3,2,1,0,100,94,1,0,0,0, - 100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100,98,1,0,0,0,100, - 99,1,0,0,0,101,1,1,0,0,0,102,103,6,1,-1,0,103,104,5,49,0,0,104,105, - 3,2,1,0,105,106,5,50,0,0,106,112,1,0,0,0,107,108,5,89,0,0,108,109, - 5,79,0,0,109,112,3,2,1,2,110,112,5,88,0,0,111,102,1,0,0,0,111,107, - 1,0,0,0,111,110,1,0,0,0,112,124,1,0,0,0,113,116,10,3,0,0,114,117, - 5,77,0,0,115,117,5,79,0,0,116,114,1,0,0,0,116,115,1,0,0,0,117,118, - 1,0,0,0,118,123,3,2,1,4,119,120,10,4,0,0,120,121,5,78,0,0,121,123, - 3,4,2,0,122,113,1,0,0,0,122,119,1,0,0,0,123,126,1,0,0,0,124,122, - 1,0,0,0,124,125,1,0,0,0,125,3,1,0,0,0,126,124,1,0,0,0,127,129,7, - 0,0,0,128,127,1,0,0,0,128,129,1,0,0,0,129,130,1,0,0,0,130,131,5, - 89,0,0,131,5,1,0,0,0,132,133,6,3,-1,0,133,134,5,49,0,0,134,135,3, - 6,3,0,135,136,5,50,0,0,136,144,1,0,0,0,137,138,3,10,5,0,138,139, - 3,6,3,9,139,144,1,0,0,0,140,141,5,28,0,0,141,144,3,6,3,4,142,144, - 3,8,4,0,143,132,1,0,0,0,143,137,1,0,0,0,143,140,1,0,0,0,143,142, - 1,0,0,0,144,181,1,0,0,0,145,146,10,10,0,0,146,147,5,78,0,0,147,180, - 3,6,3,10,148,152,10,8,0,0,149,153,5,77,0,0,150,153,5,79,0,0,151, - 153,5,80,0,0,152,149,1,0,0,0,152,150,1,0,0,0,152,151,1,0,0,0,153, - 154,1,0,0,0,154,180,3,6,3,9,155,158,10,7,0,0,156,159,5,51,0,0,157, - 159,5,75,0,0,158,156,1,0,0,0,158,157,1,0,0,0,159,160,1,0,0,0,160, - 180,3,6,3,8,161,162,10,6,0,0,162,163,3,12,6,0,163,164,3,6,3,7,164, - 180,1,0,0,0,165,166,10,5,0,0,166,167,3,14,7,0,167,168,3,6,3,6,168, - 180,1,0,0,0,169,170,10,3,0,0,170,171,3,16,8,0,171,172,3,6,3,4,172, - 180,1,0,0,0,173,174,10,2,0,0,174,175,5,81,0,0,175,176,3,6,3,0,176, - 177,5,82,0,0,177,178,3,6,3,3,178,180,1,0,0,0,179,145,1,0,0,0,179, - 148,1,0,0,0,179,155,1,0,0,0,179,161,1,0,0,0,179,165,1,0,0,0,179, - 169,1,0,0,0,179,173,1,0,0,0,180,183,1,0,0,0,181,179,1,0,0,0,181, - 182,1,0,0,0,182,7,1,0,0,0,183,181,1,0,0,0,184,194,3,20,10,0,185, - 194,5,86,0,0,186,188,7,1,0,0,187,189,3,18,9,0,188,187,1,0,0,0,188, - 189,1,0,0,0,189,194,1,0,0,0,190,194,5,87,0,0,191,194,5,25,0,0,192, - 194,3,18,9,0,193,184,1,0,0,0,193,185,1,0,0,0,193,186,1,0,0,0,193, - 190,1,0,0,0,193,191,1,0,0,0,193,192,1,0,0,0,194,9,1,0,0,0,195,199, - 5,51,0,0,196,199,5,75,0,0,197,199,5,52,0,0,198,195,1,0,0,0,198,196, - 1,0,0,0,198,197,1,0,0,0,199,11,1,0,0,0,200,206,5,55,0,0,201,206, - 5,54,0,0,202,206,5,53,0,0,203,206,5,61,0,0,204,206,5,62,0,0,205, - 200,1,0,0,0,205,201,1,0,0,0,205,202,1,0,0,0,205,203,1,0,0,0,205, - 204,1,0,0,0,206,13,1,0,0,0,207,215,5,63,0,0,208,215,5,65,0,0,209, - 215,5,70,0,0,210,215,5,71,0,0,211,215,5,72,0,0,212,215,5,73,0,0, - 213,215,5,64,0,0,214,207,1,0,0,0,214,208,1,0,0,0,214,209,1,0,0,0, - 214,210,1,0,0,0,214,211,1,0,0,0,214,212,1,0,0,0,214,213,1,0,0,0, - 215,15,1,0,0,0,216,219,5,26,0,0,217,219,5,27,0,0,218,216,1,0,0,0, - 218,217,1,0,0,0,219,17,1,0,0,0,220,225,5,88,0,0,221,222,5,56,0,0, - 222,223,3,6,3,0,223,224,5,58,0,0,224,226,1,0,0,0,225,221,1,0,0,0, - 225,226,1,0,0,0,226,230,1,0,0,0,227,229,5,85,0,0,228,227,1,0,0,0, - 229,232,1,0,0,0,230,228,1,0,0,0,230,231,1,0,0,0,231,19,1,0,0,0,232, - 230,1,0,0,0,233,234,5,88,0,0,234,243,5,49,0,0,235,240,3,6,3,0,236, - 237,5,74,0,0,237,239,3,6,3,0,238,236,1,0,0,0,239,242,1,0,0,0,240, - 238,1,0,0,0,240,241,1,0,0,0,241,244,1,0,0,0,242,240,1,0,0,0,243, - 235,1,0,0,0,243,244,1,0,0,0,244,245,1,0,0,0,245,246,5,50,0,0,246, - 21,1,0,0,0,247,249,5,29,0,0,248,247,1,0,0,0,248,249,1,0,0,0,249, - 250,1,0,0,0,250,251,5,16,0,0,251,252,5,88,0,0,252,253,3,0,0,0,253, - 254,5,76,0,0,254,256,3,6,3,0,255,257,5,84,0,0,256,255,1,0,0,0,256, - 257,1,0,0,0,257,261,1,0,0,0,258,260,3,42,21,0,259,258,1,0,0,0,260, - 263,1,0,0,0,261,259,1,0,0,0,261,262,1,0,0,0,262,264,1,0,0,0,263, - 261,1,0,0,0,264,265,5,9,0,0,265,23,1,0,0,0,266,267,3,18,9,0,267, - 268,5,76,0,0,268,270,3,6,3,0,269,271,5,84,0,0,270,269,1,0,0,0,270, - 271,1,0,0,0,271,275,1,0,0,0,272,274,3,42,21,0,273,272,1,0,0,0,274, - 277,1,0,0,0,275,273,1,0,0,0,275,276,1,0,0,0,276,278,1,0,0,0,277, - 275,1,0,0,0,278,279,5,9,0,0,279,25,1,0,0,0,280,281,5,30,0,0,281, - 282,3,18,9,0,282,283,5,76,0,0,283,291,3,6,3,0,284,285,5,4,0,0,285, - 286,3,18,9,0,286,287,5,76,0,0,287,288,3,6,3,0,288,290,1,0,0,0,289, - 284,1,0,0,0,290,293,1,0,0,0,291,289,1,0,0,0,291,292,1,0,0,0,292, - 295,1,0,0,0,293,291,1,0,0,0,294,296,5,84,0,0,295,294,1,0,0,0,295, - 296,1,0,0,0,296,297,1,0,0,0,297,298,5,9,0,0,298,27,1,0,0,0,299,300, - 5,9,0,0,300,302,5,1,0,0,301,303,3,30,15,0,302,301,1,0,0,0,303,304, - 1,0,0,0,304,302,1,0,0,0,304,305,1,0,0,0,305,306,1,0,0,0,306,307, - 5,2,0,0,307,29,1,0,0,0,308,311,3,34,17,0,309,311,3,32,16,0,310,308, - 1,0,0,0,310,309,1,0,0,0,311,31,1,0,0,0,312,316,3,50,25,0,313,316, - 3,58,29,0,314,316,3,60,30,0,315,312,1,0,0,0,315,313,1,0,0,0,315, - 314,1,0,0,0,316,33,1,0,0,0,317,322,3,36,18,0,318,322,3,20,10,0,319, - 322,3,38,19,0,320,322,3,48,24,0,321,317,1,0,0,0,321,318,1,0,0,0, - 321,319,1,0,0,0,321,320,1,0,0,0,322,323,1,0,0,0,323,324,5,9,0,0, - 324,35,1,0,0,0,325,331,3,18,9,0,326,332,5,76,0,0,327,332,5,66,0, - 0,328,332,5,67,0,0,329,332,5,68,0,0,330,332,5,69,0,0,331,326,1,0, - 0,0,331,327,1,0,0,0,331,328,1,0,0,0,331,329,1,0,0,0,331,330,1,0, - 0,0,332,333,1,0,0,0,333,334,3,6,3,0,334,37,1,0,0,0,335,337,5,29, - 0,0,336,335,1,0,0,0,336,337,1,0,0,0,337,339,1,0,0,0,338,340,5,16, - 0,0,339,338,1,0,0,0,339,340,1,0,0,0,340,341,1,0,0,0,341,346,3,18, - 9,0,342,343,5,74,0,0,343,345,3,18,9,0,344,342,1,0,0,0,345,348,1, - 0,0,0,346,344,1,0,0,0,346,347,1,0,0,0,347,349,1,0,0,0,348,346,1, - 0,0,0,349,352,3,0,0,0,350,351,5,76,0,0,351,353,3,6,3,0,352,350,1, - 0,0,0,352,353,1,0,0,0,353,358,1,0,0,0,354,355,5,59,0,0,355,356,3, - 6,3,0,356,357,5,60,0,0,357,359,1,0,0,0,358,354,1,0,0,0,358,359,1, - 0,0,0,359,363,1,0,0,0,360,362,3,42,21,0,361,360,1,0,0,0,362,365, - 1,0,0,0,363,361,1,0,0,0,363,364,1,0,0,0,364,39,1,0,0,0,365,363,1, - 0,0,0,366,367,3,38,19,0,367,368,5,9,0,0,368,41,1,0,0,0,369,377,5, - 45,0,0,370,377,5,46,0,0,371,372,5,47,0,0,372,373,3,44,22,0,373,374, - 5,83,0,0,374,375,3,46,23,0,375,377,1,0,0,0,376,369,1,0,0,0,376,370, - 1,0,0,0,376,371,1,0,0,0,377,43,1,0,0,0,378,379,5,88,0,0,379,45,1, - 0,0,0,380,381,5,88,0,0,381,47,1,0,0,0,382,384,5,17,0,0,383,385,3, - 6,3,0,384,383,1,0,0,0,384,385,1,0,0,0,385,49,1,0,0,0,386,390,3,52, - 26,0,387,389,3,54,27,0,388,387,1,0,0,0,389,392,1,0,0,0,390,388,1, - 0,0,0,390,391,1,0,0,0,391,394,1,0,0,0,392,390,1,0,0,0,393,395,3, - 56,28,0,394,393,1,0,0,0,394,395,1,0,0,0,395,51,1,0,0,0,396,397,5, - 18,0,0,397,398,3,6,3,0,398,399,5,82,0,0,399,400,3,28,14,0,400,53, - 1,0,0,0,401,402,5,19,0,0,402,403,3,6,3,0,403,404,5,82,0,0,404,405, - 3,28,14,0,405,55,1,0,0,0,406,407,5,20,0,0,407,408,5,82,0,0,408,409, - 3,28,14,0,409,57,1,0,0,0,410,411,5,21,0,0,411,412,5,88,0,0,412,413, - 5,23,0,0,413,414,3,6,3,0,414,415,5,48,0,0,415,416,3,6,3,0,416,418, - 5,24,0,0,417,419,5,75,0,0,418,417,1,0,0,0,418,419,1,0,0,0,419,420, - 1,0,0,0,420,421,7,1,0,0,421,422,5,82,0,0,422,423,3,28,14,0,423,59, - 1,0,0,0,424,425,5,22,0,0,425,426,3,6,3,0,426,427,5,82,0,0,427,428, - 3,28,14,0,428,61,1,0,0,0,429,432,3,64,32,0,430,432,5,9,0,0,431,429, - 1,0,0,0,431,430,1,0,0,0,432,433,1,0,0,0,433,431,1,0,0,0,433,434, - 1,0,0,0,434,435,1,0,0,0,435,436,5,0,0,1,436,63,1,0,0,0,437,438,5, - 31,0,0,438,439,5,88,0,0,439,440,3,66,33,0,440,65,1,0,0,0,441,442, - 5,82,0,0,442,443,5,9,0,0,443,452,5,1,0,0,444,453,3,72,36,0,445,453, - 3,76,38,0,446,453,3,78,39,0,447,453,3,86,43,0,448,453,3,88,44,0, - 449,453,3,68,34,0,450,453,3,70,35,0,451,453,3,74,37,0,452,444,1, - 0,0,0,452,445,1,0,0,0,452,446,1,0,0,0,452,447,1,0,0,0,452,448,1, - 0,0,0,452,449,1,0,0,0,452,450,1,0,0,0,452,451,1,0,0,0,453,454,1, - 0,0,0,454,452,1,0,0,0,454,455,1,0,0,0,455,456,1,0,0,0,456,457,5, - 2,0,0,457,67,1,0,0,0,458,459,5,40,0,0,459,460,5,49,0,0,460,465,5, - 88,0,0,461,462,5,74,0,0,462,464,3,92,46,0,463,461,1,0,0,0,464,467, - 1,0,0,0,465,463,1,0,0,0,465,466,1,0,0,0,466,468,1,0,0,0,467,465, - 1,0,0,0,468,469,5,50,0,0,469,470,5,82,0,0,470,471,3,28,14,0,471, - 69,1,0,0,0,472,473,5,41,0,0,473,474,5,49,0,0,474,479,3,6,3,0,475, - 476,5,74,0,0,476,478,3,92,46,0,477,475,1,0,0,0,478,481,1,0,0,0,479, - 477,1,0,0,0,479,480,1,0,0,0,480,482,1,0,0,0,481,479,1,0,0,0,482, - 483,5,50,0,0,483,484,5,82,0,0,484,485,3,28,14,0,485,71,1,0,0,0,486, - 487,7,2,0,0,487,488,5,82,0,0,488,489,5,9,0,0,489,491,5,1,0,0,490, - 492,3,40,20,0,491,490,1,0,0,0,492,493,1,0,0,0,493,491,1,0,0,0,493, - 494,1,0,0,0,494,495,1,0,0,0,495,496,5,2,0,0,496,73,1,0,0,0,497,498, - 5,35,0,0,498,499,5,82,0,0,499,500,3,28,14,0,500,75,1,0,0,0,501,502, - 5,36,0,0,502,503,5,82,0,0,503,504,5,9,0,0,504,508,5,1,0,0,505,509, - 3,22,11,0,506,509,3,24,12,0,507,509,3,26,13,0,508,505,1,0,0,0,508, - 506,1,0,0,0,508,507,1,0,0,0,509,510,1,0,0,0,510,508,1,0,0,0,510, - 511,1,0,0,0,511,512,1,0,0,0,512,513,5,2,0,0,513,77,1,0,0,0,514,515, - 5,37,0,0,515,516,5,82,0,0,516,517,5,9,0,0,517,520,5,1,0,0,518,521, - 3,80,40,0,519,521,3,82,41,0,520,518,1,0,0,0,520,519,1,0,0,0,521, - 522,1,0,0,0,522,520,1,0,0,0,522,523,1,0,0,0,523,524,1,0,0,0,524, - 525,5,2,0,0,525,79,1,0,0,0,526,531,5,88,0,0,527,528,5,56,0,0,528, - 529,3,6,3,0,529,530,5,58,0,0,530,532,1,0,0,0,531,527,1,0,0,0,531, - 532,1,0,0,0,532,533,1,0,0,0,533,537,5,57,0,0,534,536,3,84,42,0,535, - 534,1,0,0,0,536,539,1,0,0,0,537,535,1,0,0,0,537,538,1,0,0,0,538, - 540,1,0,0,0,539,537,1,0,0,0,540,541,5,42,0,0,541,542,5,9,0,0,542, - 81,1,0,0,0,543,548,5,88,0,0,544,545,5,56,0,0,545,546,3,6,3,0,546, - 547,5,58,0,0,547,549,1,0,0,0,548,544,1,0,0,0,548,549,1,0,0,0,549, - 550,1,0,0,0,550,551,3,0,0,0,551,552,5,57,0,0,552,553,5,39,0,0,553, - 554,5,9,0,0,554,83,1,0,0,0,555,558,5,43,0,0,556,558,5,44,0,0,557, - 555,1,0,0,0,557,556,1,0,0,0,558,85,1,0,0,0,559,560,5,38,0,0,560, - 561,5,82,0,0,561,562,5,9,0,0,562,565,5,1,0,0,563,566,5,42,0,0,564, - 566,5,39,0,0,565,563,1,0,0,0,565,564,1,0,0,0,566,567,1,0,0,0,567, - 568,5,9,0,0,568,569,5,2,0,0,569,87,1,0,0,0,570,571,5,15,0,0,571, - 572,5,88,0,0,572,581,5,49,0,0,573,578,3,90,45,0,574,575,5,74,0,0, - 575,577,3,90,45,0,576,574,1,0,0,0,577,580,1,0,0,0,578,576,1,0,0, - 0,578,579,1,0,0,0,579,582,1,0,0,0,580,578,1,0,0,0,581,573,1,0,0, - 0,581,582,1,0,0,0,582,583,1,0,0,0,583,585,5,50,0,0,584,586,3,0,0, - 0,585,584,1,0,0,0,585,586,1,0,0,0,586,587,1,0,0,0,587,588,5,82,0, - 0,588,589,3,28,14,0,589,89,1,0,0,0,590,591,5,88,0,0,591,592,3,0, - 0,0,592,91,1,0,0,0,593,594,5,88,0,0,594,595,5,76,0,0,595,596,7,3, - 0,0,596,93,1,0,0,0,63,100,111,116,122,124,128,143,152,158,179,181, - 188,193,198,205,214,218,225,230,240,243,248,256,261,270,275,291, - 295,304,310,315,321,331,336,339,346,352,358,363,376,384,390,394, - 418,431,433,452,454,465,479,493,508,510,520,522,531,537,548,557, - 565,578,581,585 + 9,9,1,9,1,9,3,9,236,8,9,1,10,1,10,1,10,1,10,1,10,5,10,243,8,10,10, + 10,12,10,246,9,10,3,10,248,8,10,1,10,1,10,1,11,3,11,253,8,11,1,11, + 1,11,1,11,1,11,1,11,1,11,3,11,261,8,11,1,11,5,11,264,8,11,10,11, + 12,11,267,9,11,1,11,1,11,1,12,1,12,1,12,1,12,3,12,275,8,12,1,12, + 5,12,278,8,12,10,12,12,12,281,9,12,1,12,1,12,1,13,1,13,1,13,1,13, + 1,13,1,13,1,13,1,13,1,13,5,13,294,8,13,10,13,12,13,297,9,13,1,13, + 3,13,300,8,13,1,13,1,13,1,14,1,14,1,14,4,14,307,8,14,11,14,12,14, + 308,1,14,1,14,1,15,1,15,3,15,315,8,15,1,16,1,16,1,16,3,16,320,8, + 16,1,17,1,17,1,17,1,17,3,17,326,8,17,1,17,1,17,1,18,1,18,1,18,1, + 18,1,18,1,18,3,18,336,8,18,1,18,1,18,1,19,3,19,341,8,19,1,19,3,19, + 344,8,19,1,19,1,19,1,19,5,19,349,8,19,10,19,12,19,352,9,19,1,19, + 1,19,1,19,3,19,357,8,19,1,19,1,19,1,19,1,19,3,19,363,8,19,1,19,5, + 19,366,8,19,10,19,12,19,369,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1, + 21,1,21,1,21,1,21,3,21,381,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3, + 24,389,8,24,1,25,1,25,5,25,393,8,25,10,25,12,25,396,9,25,1,25,3, + 25,399,8,25,1,26,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1, + 28,1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,423, + 8,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31, + 436,8,31,11,31,12,31,437,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33, + 1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,4,33,457,8,33,11,33, + 12,33,458,1,33,1,33,1,34,1,34,1,34,1,34,1,34,5,34,468,8,34,10,34, + 12,34,471,9,34,1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35, + 482,8,35,10,35,12,35,485,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36, + 1,36,1,36,4,36,496,8,36,11,36,12,36,497,1,36,1,36,1,37,1,37,1,37, + 1,37,1,38,1,38,1,38,1,38,1,38,1,38,1,38,4,38,513,8,38,11,38,12,38, + 514,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,3,39,525,8,39,1,39,1, + 39,1,39,1,39,5,39,531,8,39,10,39,12,39,534,9,39,3,39,536,8,39,1, + 39,3,39,539,8,39,4,39,541,8,39,11,39,12,39,542,1,39,1,39,1,40,1, + 40,1,40,1,40,1,40,3,40,552,8,40,1,40,1,40,5,40,556,8,40,10,40,12, + 40,559,9,40,1,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,569,8,41, + 1,41,1,41,1,41,1,41,1,41,1,42,1,42,3,42,578,8,42,1,43,1,43,1,43, + 1,43,1,43,1,43,3,43,586,8,43,1,43,1,43,1,43,1,43,5,43,592,8,43,10, + 43,12,43,595,9,43,3,43,597,8,43,1,43,3,43,600,8,43,1,43,1,43,1,43, + 1,44,1,44,1,44,1,44,1,44,1,44,5,44,611,8,44,10,44,12,44,614,9,44, + 3,44,616,8,44,1,44,1,44,3,44,620,8,44,1,44,1,44,1,44,1,45,1,45,1, + 45,1,46,1,46,1,46,1,46,1,46,0,2,2,6,47,0,2,4,6,8,10,12,14,16,18, + 20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62, + 64,66,68,70,72,74,76,78,80,82,84,86,88,90,92,0,4,2,0,51,51,75,75, + 1,0,90,91,1,0,32,34,3,0,25,25,87,88,90,91,694,0,100,1,0,0,0,2,111, + 1,0,0,0,4,128,1,0,0,0,6,143,1,0,0,0,8,193,1,0,0,0,10,198,1,0,0,0, + 12,205,1,0,0,0,14,214,1,0,0,0,16,218,1,0,0,0,18,220,1,0,0,0,20,237, + 1,0,0,0,22,252,1,0,0,0,24,270,1,0,0,0,26,284,1,0,0,0,28,303,1,0, + 0,0,30,314,1,0,0,0,32,319,1,0,0,0,34,325,1,0,0,0,36,329,1,0,0,0, + 38,340,1,0,0,0,40,370,1,0,0,0,42,380,1,0,0,0,44,382,1,0,0,0,46,384, + 1,0,0,0,48,386,1,0,0,0,50,390,1,0,0,0,52,400,1,0,0,0,54,405,1,0, + 0,0,56,410,1,0,0,0,58,414,1,0,0,0,60,428,1,0,0,0,62,435,1,0,0,0, + 64,441,1,0,0,0,66,445,1,0,0,0,68,462,1,0,0,0,70,476,1,0,0,0,72,490, + 1,0,0,0,74,501,1,0,0,0,76,505,1,0,0,0,78,518,1,0,0,0,80,546,1,0, + 0,0,82,563,1,0,0,0,84,577,1,0,0,0,86,579,1,0,0,0,88,604,1,0,0,0, + 90,624,1,0,0,0,92,627,1,0,0,0,94,101,5,10,0,0,95,101,5,11,0,0,96, + 101,5,12,0,0,97,101,5,13,0,0,98,101,5,14,0,0,99,101,3,2,1,0,100, + 94,1,0,0,0,100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100,98,1, + 0,0,0,100,99,1,0,0,0,101,1,1,0,0,0,102,103,6,1,-1,0,103,104,5,49, + 0,0,104,105,3,2,1,0,105,106,5,50,0,0,106,112,1,0,0,0,107,108,5,90, + 0,0,108,109,5,79,0,0,109,112,3,2,1,2,110,112,5,89,0,0,111,102,1, + 0,0,0,111,107,1,0,0,0,111,110,1,0,0,0,112,124,1,0,0,0,113,116,10, + 3,0,0,114,117,5,77,0,0,115,117,5,79,0,0,116,114,1,0,0,0,116,115, + 1,0,0,0,117,118,1,0,0,0,118,123,3,2,1,4,119,120,10,4,0,0,120,121, + 5,78,0,0,121,123,3,4,2,0,122,113,1,0,0,0,122,119,1,0,0,0,123,126, + 1,0,0,0,124,122,1,0,0,0,124,125,1,0,0,0,125,3,1,0,0,0,126,124,1, + 0,0,0,127,129,7,0,0,0,128,127,1,0,0,0,128,129,1,0,0,0,129,130,1, + 0,0,0,130,131,5,90,0,0,131,5,1,0,0,0,132,133,6,3,-1,0,133,134,5, + 49,0,0,134,135,3,6,3,0,135,136,5,50,0,0,136,144,1,0,0,0,137,138, + 3,10,5,0,138,139,3,6,3,9,139,144,1,0,0,0,140,141,5,28,0,0,141,144, + 3,6,3,4,142,144,3,8,4,0,143,132,1,0,0,0,143,137,1,0,0,0,143,140, + 1,0,0,0,143,142,1,0,0,0,144,181,1,0,0,0,145,146,10,10,0,0,146,147, + 5,78,0,0,147,180,3,6,3,10,148,152,10,8,0,0,149,153,5,77,0,0,150, + 153,5,79,0,0,151,153,5,80,0,0,152,149,1,0,0,0,152,150,1,0,0,0,152, + 151,1,0,0,0,153,154,1,0,0,0,154,180,3,6,3,9,155,158,10,7,0,0,156, + 159,5,51,0,0,157,159,5,75,0,0,158,156,1,0,0,0,158,157,1,0,0,0,159, + 160,1,0,0,0,160,180,3,6,3,8,161,162,10,6,0,0,162,163,3,12,6,0,163, + 164,3,6,3,7,164,180,1,0,0,0,165,166,10,5,0,0,166,167,3,14,7,0,167, + 168,3,6,3,6,168,180,1,0,0,0,169,170,10,3,0,0,170,171,3,16,8,0,171, + 172,3,6,3,4,172,180,1,0,0,0,173,174,10,2,0,0,174,175,5,81,0,0,175, + 176,3,6,3,0,176,177,5,82,0,0,177,178,3,6,3,3,178,180,1,0,0,0,179, + 145,1,0,0,0,179,148,1,0,0,0,179,155,1,0,0,0,179,161,1,0,0,0,179, + 165,1,0,0,0,179,169,1,0,0,0,179,173,1,0,0,0,180,183,1,0,0,0,181, + 179,1,0,0,0,181,182,1,0,0,0,182,7,1,0,0,0,183,181,1,0,0,0,184,194, + 3,20,10,0,185,194,5,87,0,0,186,188,7,1,0,0,187,189,3,18,9,0,188, + 187,1,0,0,0,188,189,1,0,0,0,189,194,1,0,0,0,190,194,5,88,0,0,191, + 194,5,25,0,0,192,194,3,18,9,0,193,184,1,0,0,0,193,185,1,0,0,0,193, + 186,1,0,0,0,193,190,1,0,0,0,193,191,1,0,0,0,193,192,1,0,0,0,194, + 9,1,0,0,0,195,199,5,51,0,0,196,199,5,75,0,0,197,199,5,52,0,0,198, + 195,1,0,0,0,198,196,1,0,0,0,198,197,1,0,0,0,199,11,1,0,0,0,200,206, + 5,55,0,0,201,206,5,54,0,0,202,206,5,53,0,0,203,206,5,61,0,0,204, + 206,5,62,0,0,205,200,1,0,0,0,205,201,1,0,0,0,205,202,1,0,0,0,205, + 203,1,0,0,0,205,204,1,0,0,0,206,13,1,0,0,0,207,215,5,63,0,0,208, + 215,5,65,0,0,209,215,5,70,0,0,210,215,5,71,0,0,211,215,5,72,0,0, + 212,215,5,73,0,0,213,215,5,64,0,0,214,207,1,0,0,0,214,208,1,0,0, + 0,214,209,1,0,0,0,214,210,1,0,0,0,214,211,1,0,0,0,214,212,1,0,0, + 0,214,213,1,0,0,0,215,15,1,0,0,0,216,219,5,26,0,0,217,219,5,27,0, + 0,218,216,1,0,0,0,218,217,1,0,0,0,219,17,1,0,0,0,220,225,5,89,0, + 0,221,222,5,56,0,0,222,223,3,6,3,0,223,224,5,58,0,0,224,226,1,0, + 0,0,225,221,1,0,0,0,225,226,1,0,0,0,226,230,1,0,0,0,227,229,5,85, + 0,0,228,227,1,0,0,0,229,232,1,0,0,0,230,228,1,0,0,0,230,231,1,0, + 0,0,231,235,1,0,0,0,232,230,1,0,0,0,233,234,5,86,0,0,234,236,3,18, + 9,0,235,233,1,0,0,0,235,236,1,0,0,0,236,19,1,0,0,0,237,238,5,89, + 0,0,238,247,5,49,0,0,239,244,3,6,3,0,240,241,5,74,0,0,241,243,3, + 6,3,0,242,240,1,0,0,0,243,246,1,0,0,0,244,242,1,0,0,0,244,245,1, + 0,0,0,245,248,1,0,0,0,246,244,1,0,0,0,247,239,1,0,0,0,247,248,1, + 0,0,0,248,249,1,0,0,0,249,250,5,50,0,0,250,21,1,0,0,0,251,253,5, + 29,0,0,252,251,1,0,0,0,252,253,1,0,0,0,253,254,1,0,0,0,254,255,5, + 16,0,0,255,256,5,89,0,0,256,257,3,0,0,0,257,258,5,76,0,0,258,260, + 3,6,3,0,259,261,5,84,0,0,260,259,1,0,0,0,260,261,1,0,0,0,261,265, + 1,0,0,0,262,264,3,42,21,0,263,262,1,0,0,0,264,267,1,0,0,0,265,263, + 1,0,0,0,265,266,1,0,0,0,266,268,1,0,0,0,267,265,1,0,0,0,268,269, + 5,9,0,0,269,23,1,0,0,0,270,271,3,18,9,0,271,272,5,76,0,0,272,274, + 3,6,3,0,273,275,5,84,0,0,274,273,1,0,0,0,274,275,1,0,0,0,275,279, + 1,0,0,0,276,278,3,42,21,0,277,276,1,0,0,0,278,281,1,0,0,0,279,277, + 1,0,0,0,279,280,1,0,0,0,280,282,1,0,0,0,281,279,1,0,0,0,282,283, + 5,9,0,0,283,25,1,0,0,0,284,285,5,30,0,0,285,286,3,18,9,0,286,287, + 5,76,0,0,287,295,3,6,3,0,288,289,5,4,0,0,289,290,3,18,9,0,290,291, + 5,76,0,0,291,292,3,6,3,0,292,294,1,0,0,0,293,288,1,0,0,0,294,297, + 1,0,0,0,295,293,1,0,0,0,295,296,1,0,0,0,296,299,1,0,0,0,297,295, + 1,0,0,0,298,300,5,84,0,0,299,298,1,0,0,0,299,300,1,0,0,0,300,301, + 1,0,0,0,301,302,5,9,0,0,302,27,1,0,0,0,303,304,5,9,0,0,304,306,5, + 1,0,0,305,307,3,30,15,0,306,305,1,0,0,0,307,308,1,0,0,0,308,306, + 1,0,0,0,308,309,1,0,0,0,309,310,1,0,0,0,310,311,5,2,0,0,311,29,1, + 0,0,0,312,315,3,34,17,0,313,315,3,32,16,0,314,312,1,0,0,0,314,313, + 1,0,0,0,315,31,1,0,0,0,316,320,3,50,25,0,317,320,3,58,29,0,318,320, + 3,60,30,0,319,316,1,0,0,0,319,317,1,0,0,0,319,318,1,0,0,0,320,33, + 1,0,0,0,321,326,3,36,18,0,322,326,3,20,10,0,323,326,3,38,19,0,324, + 326,3,48,24,0,325,321,1,0,0,0,325,322,1,0,0,0,325,323,1,0,0,0,325, + 324,1,0,0,0,326,327,1,0,0,0,327,328,5,9,0,0,328,35,1,0,0,0,329,335, + 3,18,9,0,330,336,5,76,0,0,331,336,5,66,0,0,332,336,5,67,0,0,333, + 336,5,68,0,0,334,336,5,69,0,0,335,330,1,0,0,0,335,331,1,0,0,0,335, + 332,1,0,0,0,335,333,1,0,0,0,335,334,1,0,0,0,336,337,1,0,0,0,337, + 338,3,6,3,0,338,37,1,0,0,0,339,341,5,29,0,0,340,339,1,0,0,0,340, + 341,1,0,0,0,341,343,1,0,0,0,342,344,5,16,0,0,343,342,1,0,0,0,343, + 344,1,0,0,0,344,345,1,0,0,0,345,350,3,18,9,0,346,347,5,74,0,0,347, + 349,3,18,9,0,348,346,1,0,0,0,349,352,1,0,0,0,350,348,1,0,0,0,350, + 351,1,0,0,0,351,353,1,0,0,0,352,350,1,0,0,0,353,356,3,0,0,0,354, + 355,5,76,0,0,355,357,3,6,3,0,356,354,1,0,0,0,356,357,1,0,0,0,357, + 362,1,0,0,0,358,359,5,59,0,0,359,360,3,6,3,0,360,361,5,60,0,0,361, + 363,1,0,0,0,362,358,1,0,0,0,362,363,1,0,0,0,363,367,1,0,0,0,364, + 366,3,42,21,0,365,364,1,0,0,0,366,369,1,0,0,0,367,365,1,0,0,0,367, + 368,1,0,0,0,368,39,1,0,0,0,369,367,1,0,0,0,370,371,3,38,19,0,371, + 372,5,9,0,0,372,41,1,0,0,0,373,381,5,45,0,0,374,381,5,46,0,0,375, + 376,5,47,0,0,376,377,3,44,22,0,377,378,5,83,0,0,378,379,3,46,23, + 0,379,381,1,0,0,0,380,373,1,0,0,0,380,374,1,0,0,0,380,375,1,0,0, + 0,381,43,1,0,0,0,382,383,5,89,0,0,383,45,1,0,0,0,384,385,5,89,0, + 0,385,47,1,0,0,0,386,388,5,17,0,0,387,389,3,6,3,0,388,387,1,0,0, + 0,388,389,1,0,0,0,389,49,1,0,0,0,390,394,3,52,26,0,391,393,3,54, + 27,0,392,391,1,0,0,0,393,396,1,0,0,0,394,392,1,0,0,0,394,395,1,0, + 0,0,395,398,1,0,0,0,396,394,1,0,0,0,397,399,3,56,28,0,398,397,1, + 0,0,0,398,399,1,0,0,0,399,51,1,0,0,0,400,401,5,18,0,0,401,402,3, + 6,3,0,402,403,5,82,0,0,403,404,3,28,14,0,404,53,1,0,0,0,405,406, + 5,19,0,0,406,407,3,6,3,0,407,408,5,82,0,0,408,409,3,28,14,0,409, + 55,1,0,0,0,410,411,5,20,0,0,411,412,5,82,0,0,412,413,3,28,14,0,413, + 57,1,0,0,0,414,415,5,21,0,0,415,416,5,89,0,0,416,417,5,23,0,0,417, + 418,3,6,3,0,418,419,5,48,0,0,419,420,3,6,3,0,420,422,5,24,0,0,421, + 423,5,75,0,0,422,421,1,0,0,0,422,423,1,0,0,0,423,424,1,0,0,0,424, + 425,7,1,0,0,425,426,5,82,0,0,426,427,3,28,14,0,427,59,1,0,0,0,428, + 429,5,22,0,0,429,430,3,6,3,0,430,431,5,82,0,0,431,432,3,28,14,0, + 432,61,1,0,0,0,433,436,3,64,32,0,434,436,5,9,0,0,435,433,1,0,0,0, + 435,434,1,0,0,0,436,437,1,0,0,0,437,435,1,0,0,0,437,438,1,0,0,0, + 438,439,1,0,0,0,439,440,5,0,0,1,440,63,1,0,0,0,441,442,5,31,0,0, + 442,443,5,89,0,0,443,444,3,66,33,0,444,65,1,0,0,0,445,446,5,82,0, + 0,446,447,5,9,0,0,447,456,5,1,0,0,448,457,3,72,36,0,449,457,3,76, + 38,0,450,457,3,78,39,0,451,457,3,86,43,0,452,457,3,88,44,0,453,457, + 3,68,34,0,454,457,3,70,35,0,455,457,3,74,37,0,456,448,1,0,0,0,456, + 449,1,0,0,0,456,450,1,0,0,0,456,451,1,0,0,0,456,452,1,0,0,0,456, + 453,1,0,0,0,456,454,1,0,0,0,456,455,1,0,0,0,457,458,1,0,0,0,458, + 456,1,0,0,0,458,459,1,0,0,0,459,460,1,0,0,0,460,461,5,2,0,0,461, + 67,1,0,0,0,462,463,5,40,0,0,463,464,5,49,0,0,464,469,5,89,0,0,465, + 466,5,74,0,0,466,468,3,92,46,0,467,465,1,0,0,0,468,471,1,0,0,0,469, + 467,1,0,0,0,469,470,1,0,0,0,470,472,1,0,0,0,471,469,1,0,0,0,472, + 473,5,50,0,0,473,474,5,82,0,0,474,475,3,28,14,0,475,69,1,0,0,0,476, + 477,5,41,0,0,477,478,5,49,0,0,478,483,3,6,3,0,479,480,5,74,0,0,480, + 482,3,92,46,0,481,479,1,0,0,0,482,485,1,0,0,0,483,481,1,0,0,0,483, + 484,1,0,0,0,484,486,1,0,0,0,485,483,1,0,0,0,486,487,5,50,0,0,487, + 488,5,82,0,0,488,489,3,28,14,0,489,71,1,0,0,0,490,491,7,2,0,0,491, + 492,5,82,0,0,492,493,5,9,0,0,493,495,5,1,0,0,494,496,3,40,20,0,495, + 494,1,0,0,0,496,497,1,0,0,0,497,495,1,0,0,0,497,498,1,0,0,0,498, + 499,1,0,0,0,499,500,5,2,0,0,500,73,1,0,0,0,501,502,5,35,0,0,502, + 503,5,82,0,0,503,504,3,28,14,0,504,75,1,0,0,0,505,506,5,36,0,0,506, + 507,5,82,0,0,507,508,5,9,0,0,508,512,5,1,0,0,509,513,3,22,11,0,510, + 513,3,24,12,0,511,513,3,26,13,0,512,509,1,0,0,0,512,510,1,0,0,0, + 512,511,1,0,0,0,513,514,1,0,0,0,514,512,1,0,0,0,514,515,1,0,0,0, + 515,516,1,0,0,0,516,517,5,2,0,0,517,77,1,0,0,0,518,519,5,37,0,0, + 519,520,5,82,0,0,520,521,5,9,0,0,521,540,5,1,0,0,522,525,3,80,40, + 0,523,525,3,82,41,0,524,522,1,0,0,0,524,523,1,0,0,0,525,538,1,0, + 0,0,526,535,5,49,0,0,527,532,3,90,45,0,528,529,5,74,0,0,529,531, + 3,90,45,0,530,528,1,0,0,0,531,534,1,0,0,0,532,530,1,0,0,0,532,533, + 1,0,0,0,533,536,1,0,0,0,534,532,1,0,0,0,535,527,1,0,0,0,535,536, + 1,0,0,0,536,537,1,0,0,0,537,539,5,50,0,0,538,526,1,0,0,0,538,539, + 1,0,0,0,539,541,1,0,0,0,540,524,1,0,0,0,541,542,1,0,0,0,542,540, + 1,0,0,0,542,543,1,0,0,0,543,544,1,0,0,0,544,545,5,2,0,0,545,79,1, + 0,0,0,546,551,5,89,0,0,547,548,5,56,0,0,548,549,3,6,3,0,549,550, + 5,58,0,0,550,552,1,0,0,0,551,547,1,0,0,0,551,552,1,0,0,0,552,553, + 1,0,0,0,553,557,5,57,0,0,554,556,3,84,42,0,555,554,1,0,0,0,556,559, + 1,0,0,0,557,555,1,0,0,0,557,558,1,0,0,0,558,560,1,0,0,0,559,557, + 1,0,0,0,560,561,5,42,0,0,561,562,5,9,0,0,562,81,1,0,0,0,563,568, + 5,89,0,0,564,565,5,56,0,0,565,566,3,6,3,0,566,567,5,58,0,0,567,569, + 1,0,0,0,568,564,1,0,0,0,568,569,1,0,0,0,569,570,1,0,0,0,570,571, + 3,0,0,0,571,572,5,57,0,0,572,573,5,39,0,0,573,574,5,9,0,0,574,83, + 1,0,0,0,575,578,5,43,0,0,576,578,5,44,0,0,577,575,1,0,0,0,577,576, + 1,0,0,0,578,85,1,0,0,0,579,580,5,38,0,0,580,581,5,82,0,0,581,582, + 5,9,0,0,582,585,5,1,0,0,583,586,5,42,0,0,584,586,5,39,0,0,585,583, + 1,0,0,0,585,584,1,0,0,0,586,599,1,0,0,0,587,596,5,49,0,0,588,593, + 3,90,45,0,589,590,5,74,0,0,590,592,3,90,45,0,591,589,1,0,0,0,592, + 595,1,0,0,0,593,591,1,0,0,0,593,594,1,0,0,0,594,597,1,0,0,0,595, + 593,1,0,0,0,596,588,1,0,0,0,596,597,1,0,0,0,597,598,1,0,0,0,598, + 600,5,50,0,0,599,587,1,0,0,0,599,600,1,0,0,0,600,601,1,0,0,0,601, + 602,5,9,0,0,602,603,5,2,0,0,603,87,1,0,0,0,604,605,5,15,0,0,605, + 606,5,89,0,0,606,615,5,49,0,0,607,612,3,90,45,0,608,609,5,74,0,0, + 609,611,3,90,45,0,610,608,1,0,0,0,611,614,1,0,0,0,612,610,1,0,0, + 0,612,613,1,0,0,0,613,616,1,0,0,0,614,612,1,0,0,0,615,607,1,0,0, + 0,615,616,1,0,0,0,616,617,1,0,0,0,617,619,5,50,0,0,618,620,3,0,0, + 0,619,618,1,0,0,0,619,620,1,0,0,0,620,621,1,0,0,0,621,622,5,82,0, + 0,622,623,3,28,14,0,623,89,1,0,0,0,624,625,5,89,0,0,625,626,3,0, + 0,0,626,91,1,0,0,0,627,628,5,89,0,0,628,629,5,76,0,0,629,630,7,3, + 0,0,630,93,1,0,0,0,70,100,111,116,122,124,128,143,152,158,179,181, + 188,193,198,205,214,218,225,230,235,244,247,252,260,265,274,279, + 295,299,308,314,319,325,335,340,343,350,356,362,367,380,388,394, + 398,422,435,437,456,458,469,483,497,512,514,524,532,535,538,542, + 551,557,568,577,585,593,596,599,612,615,619 ] class PyNestMLParser ( Parser ): @@ -257,7 +271,7 @@ class PyNestMLParser ( Parser ): "'<<'", "'>>'", "'<'", "'>'", "'<='", "'+='", "'-='", "'*='", "'/='", "'=='", "'!='", "'<>'", "'>='", "','", "'-'", "'='", "'*'", "'**'", "'/'", "'%'", "'?'", "':'", - "'::'", "';'", "'''" ] + "'::'", "';'", "'''", "'.'" ] symbolicNames = [ "", "INDENT", "DEDENT", "DOCSTRING_TRIPLEQUOTE", "KERNEL_JOINING", "WS", "LINE_ESCAPE", "DOCSTRING", @@ -283,8 +297,8 @@ class PyNestMLParser ( Parser ): "RIGHT_ANGLE_EQUALS", "COMMA", "MINUS", "EQUALS", "STAR", "STAR_STAR", "FORWARD_SLASH", "PERCENT", "QUESTION", "COLON", "DOUBLE_COLON", "SEMICOLON", "DIFFERENTIAL_ORDER", - "BOOLEAN_LITERAL", "STRING_LITERAL", "NAME", "UNSIGNED_INTEGER", - "FLOAT" ] + "FULLSTOP", "BOOLEAN_LITERAL", "STRING_LITERAL", "NAME", + "UNSIGNED_INTEGER", "FLOAT" ] RULE_dataType = 0 RULE_unitType = 1 @@ -433,15 +447,16 @@ class PyNestMLParser ( Parser ): DOUBLE_COLON=83 SEMICOLON=84 DIFFERENTIAL_ORDER=85 - BOOLEAN_LITERAL=86 - STRING_LITERAL=87 - NAME=88 - UNSIGNED_INTEGER=89 - FLOAT=90 + FULLSTOP=86 + BOOLEAN_LITERAL=87 + STRING_LITERAL=88 + NAME=89 + UNSIGNED_INTEGER=90 + FLOAT=91 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.13.1") + self.checkVersion("4.10.1") self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) self._predicates = None @@ -500,32 +515,32 @@ def dataType(self): self.state = 100 self._errHandler.sync(self) token = self._input.LA(1) - if token in [10]: + if token in [PyNestMLParser.INTEGER_KEYWORD]: self.enterOuterAlt(localctx, 1) self.state = 94 localctx.isInt = self.match(PyNestMLParser.INTEGER_KEYWORD) pass - elif token in [11]: + elif token in [PyNestMLParser.REAL_KEYWORD]: self.enterOuterAlt(localctx, 2) self.state = 95 localctx.isReal = self.match(PyNestMLParser.REAL_KEYWORD) pass - elif token in [12]: + elif token in [PyNestMLParser.STRING_KEYWORD]: self.enterOuterAlt(localctx, 3) self.state = 96 localctx.isString = self.match(PyNestMLParser.STRING_KEYWORD) pass - elif token in [13]: + elif token in [PyNestMLParser.BOOLEAN_KEYWORD]: self.enterOuterAlt(localctx, 4) self.state = 97 localctx.isBool = self.match(PyNestMLParser.BOOLEAN_KEYWORD) pass - elif token in [14]: + elif token in [PyNestMLParser.VOID_KEYWORD]: self.enterOuterAlt(localctx, 5) self.state = 98 localctx.isVoid = self.match(PyNestMLParser.VOID_KEYWORD) pass - elif token in [49, 88, 89]: + elif token in [PyNestMLParser.LEFT_PAREN, PyNestMLParser.NAME, PyNestMLParser.UNSIGNED_INTEGER]: self.enterOuterAlt(localctx, 6) self.state = 99 localctx.unit = self.unitType(0) @@ -616,7 +631,7 @@ def unitType(self, _p:int=0): self.state = 111 self._errHandler.sync(self) token = self._input.LA(1) - if token in [49]: + if token in [PyNestMLParser.LEFT_PAREN]: self.state = 103 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) self.state = 104 @@ -624,7 +639,7 @@ def unitType(self, _p:int=0): self.state = 105 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass - elif token in [89]: + elif token in [PyNestMLParser.UNSIGNED_INTEGER]: self.state = 107 localctx.unitlessLiteral = self.match(PyNestMLParser.UNSIGNED_INTEGER) self.state = 108 @@ -632,7 +647,7 @@ def unitType(self, _p:int=0): self.state = 109 localctx.right = self.unitType(2) pass - elif token in [88]: + elif token in [PyNestMLParser.NAME]: self.state = 110 localctx.unit = self.match(PyNestMLParser.NAME) pass @@ -662,11 +677,11 @@ def unitType(self, _p:int=0): self.state = 116 self._errHandler.sync(self) token = self._input.LA(1) - if token in [77]: + if token in [PyNestMLParser.STAR]: self.state = 114 localctx.timesOp = self.match(PyNestMLParser.STAR) pass - elif token in [79]: + elif token in [PyNestMLParser.FORWARD_SLASH]: self.state = 115 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass @@ -743,10 +758,10 @@ def unitTypeExponent(self): self.state = 128 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==51 or _la==75: + if _la==PyNestMLParser.PLUS or _la==PyNestMLParser.MINUS: self.state = 127 _la = self._input.LA(1) - if not(_la==51 or _la==75): + if not(_la==PyNestMLParser.PLUS or _la==PyNestMLParser.MINUS): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -869,7 +884,7 @@ def expression(self, _p:int=0): self.state = 143 self._errHandler.sync(self) token = self._input.LA(1) - if token in [49]: + if token in [PyNestMLParser.LEFT_PAREN]: self.state = 133 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) self.state = 134 @@ -877,19 +892,19 @@ def expression(self, _p:int=0): self.state = 135 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass - elif token in [51, 52, 75]: + elif token in [PyNestMLParser.PLUS, PyNestMLParser.TILDE, PyNestMLParser.MINUS]: self.state = 137 self.unaryOperator() self.state = 138 localctx.term = self.expression(9) pass - elif token in [28]: + elif token in [PyNestMLParser.NOT_KEYWORD]: self.state = 140 localctx.logicalNot = self.match(PyNestMLParser.NOT_KEYWORD) self.state = 141 localctx.term = self.expression(4) pass - elif token in [25, 86, 87, 88, 89, 90]: + elif token in [PyNestMLParser.INF_KEYWORD, PyNestMLParser.BOOLEAN_LITERAL, PyNestMLParser.STRING_LITERAL, PyNestMLParser.NAME, PyNestMLParser.UNSIGNED_INTEGER, PyNestMLParser.FLOAT]: self.state = 142 self.simpleExpression() pass @@ -933,15 +948,15 @@ def expression(self, _p:int=0): self.state = 152 self._errHandler.sync(self) token = self._input.LA(1) - if token in [77]: + if token in [PyNestMLParser.STAR]: self.state = 149 localctx.timesOp = self.match(PyNestMLParser.STAR) pass - elif token in [79]: + elif token in [PyNestMLParser.FORWARD_SLASH]: self.state = 150 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass - elif token in [80]: + elif token in [PyNestMLParser.PERCENT]: self.state = 151 localctx.moduloOp = self.match(PyNestMLParser.PERCENT) pass @@ -963,11 +978,11 @@ def expression(self, _p:int=0): self.state = 158 self._errHandler.sync(self) token = self._input.LA(1) - if token in [51]: + if token in [PyNestMLParser.PLUS]: self.state = 156 localctx.plusOp = self.match(PyNestMLParser.PLUS) pass - elif token in [75]: + elif token in [PyNestMLParser.MINUS]: self.state = 157 localctx.minusOp = self.match(PyNestMLParser.MINUS) pass @@ -1121,7 +1136,7 @@ def simpleExpression(self): self.enterOuterAlt(localctx, 3) self.state = 186 _la = self._input.LA(1) - if not(_la==89 or _la==90): + if not(_la==PyNestMLParser.UNSIGNED_INTEGER or _la==PyNestMLParser.FLOAT): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1204,15 +1219,15 @@ def unaryOperator(self): self.state = 198 self._errHandler.sync(self) token = self._input.LA(1) - if token in [51]: + if token in [PyNestMLParser.PLUS]: self.state = 195 localctx.unaryPlus = self.match(PyNestMLParser.PLUS) pass - elif token in [75]: + elif token in [PyNestMLParser.MINUS]: self.state = 196 localctx.unaryMinus = self.match(PyNestMLParser.MINUS) pass - elif token in [52]: + elif token in [PyNestMLParser.TILDE]: self.state = 197 localctx.unaryTilde = self.match(PyNestMLParser.TILDE) pass @@ -1276,23 +1291,23 @@ def bitOperator(self): self.state = 205 self._errHandler.sync(self) token = self._input.LA(1) - if token in [55]: + if token in [PyNestMLParser.AMPERSAND]: self.state = 200 localctx.bitAnd = self.match(PyNestMLParser.AMPERSAND) pass - elif token in [54]: + elif token in [PyNestMLParser.CARET]: self.state = 201 localctx.bitXor = self.match(PyNestMLParser.CARET) pass - elif token in [53]: + elif token in [PyNestMLParser.PIPE]: self.state = 202 localctx.bitOr = self.match(PyNestMLParser.PIPE) pass - elif token in [61]: + elif token in [PyNestMLParser.LEFT_LEFT_ANGLE]: self.state = 203 localctx.bitShiftLeft = self.match(PyNestMLParser.LEFT_LEFT_ANGLE) pass - elif token in [62]: + elif token in [PyNestMLParser.RIGHT_RIGHT_ANGLE]: self.state = 204 localctx.bitShiftRight = self.match(PyNestMLParser.RIGHT_RIGHT_ANGLE) pass @@ -1364,31 +1379,31 @@ def comparisonOperator(self): self.state = 214 self._errHandler.sync(self) token = self._input.LA(1) - if token in [63]: + if token in [PyNestMLParser.LEFT_ANGLE]: self.state = 207 localctx.lt = self.match(PyNestMLParser.LEFT_ANGLE) pass - elif token in [65]: + elif token in [PyNestMLParser.LEFT_ANGLE_EQUALS]: self.state = 208 localctx.le = self.match(PyNestMLParser.LEFT_ANGLE_EQUALS) pass - elif token in [70]: + elif token in [PyNestMLParser.EQUALS_EQUALS]: self.state = 209 localctx.eq = self.match(PyNestMLParser.EQUALS_EQUALS) pass - elif token in [71]: + elif token in [PyNestMLParser.EXCLAMATION_EQUALS]: self.state = 210 localctx.ne = self.match(PyNestMLParser.EXCLAMATION_EQUALS) pass - elif token in [72]: + elif token in [PyNestMLParser.LEFT_ANGLE_RIGHT_ANGLE]: self.state = 211 localctx.ne2 = self.match(PyNestMLParser.LEFT_ANGLE_RIGHT_ANGLE) pass - elif token in [73]: + elif token in [PyNestMLParser.RIGHT_ANGLE_EQUALS]: self.state = 212 localctx.ge = self.match(PyNestMLParser.RIGHT_ANGLE_EQUALS) pass - elif token in [64]: + elif token in [PyNestMLParser.RIGHT_ANGLE]: self.state = 213 localctx.gt = self.match(PyNestMLParser.RIGHT_ANGLE) pass @@ -1440,11 +1455,11 @@ def logicalOperator(self): self.state = 218 self._errHandler.sync(self) token = self._input.LA(1) - if token in [26]: + if token in [PyNestMLParser.AND_KEYWORD]: self.state = 216 localctx.logicalAnd = self.match(PyNestMLParser.AND_KEYWORD) pass - elif token in [27]: + elif token in [PyNestMLParser.OR_KEYWORD]: self.state = 217 localctx.logicalOr = self.match(PyNestMLParser.OR_KEYWORD) pass @@ -1468,6 +1483,7 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): self.parser = parser self.name = None # Token self.vectorParameter = None # ExpressionContext + self.attribute = None # VariableContext def NAME(self): return self.getToken(PyNestMLParser.NAME, 0) @@ -1484,10 +1500,17 @@ def DIFFERENTIAL_ORDER(self, i:int=None): else: return self.getToken(PyNestMLParser.DIFFERENTIAL_ORDER, i) + def FULLSTOP(self): + return self.getToken(PyNestMLParser.FULLSTOP, 0) + def expression(self): return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + def variable(self): + return self.getTypedRuleContext(PyNestMLParser.VariableContext,0) + + def getRuleIndex(self): return PyNestMLParser.RULE_variable @@ -1531,6 +1554,16 @@ def variable(self): self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,18,self._ctx) + self.state = 235 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,19,self._ctx) + if la_ == 1: + self.state = 233 + self.match(PyNestMLParser.FULLSTOP) + self.state = 234 + localctx.attribute = self.variable() + + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -1589,31 +1622,31 @@ def functionCall(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 233 + self.state = 237 localctx.calleeName = self.match(PyNestMLParser.NAME) - self.state = 234 + self.state = 238 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 243 + self.state = 247 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 63489) != 0): - self.state = 235 + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INF_KEYWORD) | (1 << PyNestMLParser.NOT_KEYWORD) | (1 << PyNestMLParser.LEFT_PAREN) | (1 << PyNestMLParser.PLUS) | (1 << PyNestMLParser.TILDE))) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & ((1 << (PyNestMLParser.MINUS - 75)) | (1 << (PyNestMLParser.BOOLEAN_LITERAL - 75)) | (1 << (PyNestMLParser.STRING_LITERAL - 75)) | (1 << (PyNestMLParser.NAME - 75)) | (1 << (PyNestMLParser.UNSIGNED_INTEGER - 75)) | (1 << (PyNestMLParser.FLOAT - 75)))) != 0): + self.state = 239 self.expression(0) - self.state = 240 + self.state = 244 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: - self.state = 236 + while _la==PyNestMLParser.COMMA: + self.state = 240 self.match(PyNestMLParser.COMMA) - self.state = 237 + self.state = 241 self.expression(0) - self.state = 242 + self.state = 246 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 245 + self.state = 249 self.match(PyNestMLParser.RIGHT_PAREN) except RecognitionException as re: localctx.exception = re @@ -1686,43 +1719,43 @@ def inlineExpression(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 248 + self.state = 252 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==29: - self.state = 247 + if _la==PyNestMLParser.RECORDABLE_KEYWORD: + self.state = 251 localctx.recordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 250 + self.state = 254 self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 251 + self.state = 255 localctx.variableName = self.match(PyNestMLParser.NAME) - self.state = 252 + self.state = 256 self.dataType() - self.state = 253 + self.state = 257 self.match(PyNestMLParser.EQUALS) - self.state = 254 + self.state = 258 self.expression(0) - self.state = 256 + self.state = 260 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==84: - self.state = 255 + if _la==PyNestMLParser.SEMICOLON: + self.state = 259 self.match(PyNestMLParser.SEMICOLON) - self.state = 261 + self.state = 265 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 258 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.DECORATOR_HOMOGENEOUS) | (1 << PyNestMLParser.DECORATOR_HETEROGENEOUS) | (1 << PyNestMLParser.AT))) != 0): + self.state = 262 localctx.decorator = self.anyDecorator() - self.state = 263 + self.state = 267 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 264 + self.state = 268 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1786,31 +1819,31 @@ def odeEquation(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 266 + self.state = 270 localctx.lhs = self.variable() - self.state = 267 + self.state = 271 self.match(PyNestMLParser.EQUALS) - self.state = 268 + self.state = 272 localctx.rhs = self.expression(0) - self.state = 270 + self.state = 274 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==84: - self.state = 269 + if _la==PyNestMLParser.SEMICOLON: + self.state = 273 self.match(PyNestMLParser.SEMICOLON) - self.state = 275 + self.state = 279 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 272 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.DECORATOR_HOMOGENEOUS) | (1 << PyNestMLParser.DECORATOR_HETEROGENEOUS) | (1 << PyNestMLParser.AT))) != 0): + self.state = 276 localctx.decorator = self.anyDecorator() - self.state = 277 + self.state = 281 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 278 + self.state = 282 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1882,39 +1915,39 @@ def kernel(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 280 + self.state = 284 self.match(PyNestMLParser.KERNEL_KEYWORD) - self.state = 281 + self.state = 285 self.variable() - self.state = 282 + self.state = 286 self.match(PyNestMLParser.EQUALS) - self.state = 283 + self.state = 287 self.expression(0) - self.state = 291 + self.state = 295 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==4: - self.state = 284 + while _la==PyNestMLParser.KERNEL_JOINING: + self.state = 288 self.match(PyNestMLParser.KERNEL_JOINING) - self.state = 285 + self.state = 289 self.variable() - self.state = 286 + self.state = 290 self.match(PyNestMLParser.EQUALS) - self.state = 287 + self.state = 291 self.expression(0) - self.state = 293 + self.state = 297 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 295 + self.state = 299 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==84: - self.state = 294 + if _la==PyNestMLParser.SEMICOLON: + self.state = 298 self.match(PyNestMLParser.SEMICOLON) - self.state = 297 + self.state = 301 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1967,23 +2000,23 @@ def block(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 299 + self.state = 303 self.match(PyNestMLParser.NEWLINE) - self.state = 300 + self.state = 304 self.match(PyNestMLParser.INDENT) - self.state = 302 + self.state = 306 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 301 + self.state = 305 self.stmt() - self.state = 304 + self.state = 308 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 543621120) != 0) or _la==88): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INLINE_KEYWORD) | (1 << PyNestMLParser.RETURN_KEYWORD) | (1 << PyNestMLParser.IF_KEYWORD) | (1 << PyNestMLParser.FOR_KEYWORD) | (1 << PyNestMLParser.WHILE_KEYWORD) | (1 << PyNestMLParser.RECORDABLE_KEYWORD))) != 0) or _la==PyNestMLParser.NAME): break - self.state = 306 + self.state = 310 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -2026,17 +2059,17 @@ def stmt(self): localctx = PyNestMLParser.StmtContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_stmt) try: - self.state = 310 + self.state = 314 self._errHandler.sync(self) token = self._input.LA(1) - if token in [16, 17, 29, 88]: + if token in [PyNestMLParser.INLINE_KEYWORD, PyNestMLParser.RETURN_KEYWORD, PyNestMLParser.RECORDABLE_KEYWORD, PyNestMLParser.NAME]: self.enterOuterAlt(localctx, 1) - self.state = 308 + self.state = 312 self.smallStmt() pass - elif token in [18, 21, 22]: + elif token in [PyNestMLParser.IF_KEYWORD, PyNestMLParser.FOR_KEYWORD, PyNestMLParser.WHILE_KEYWORD]: self.enterOuterAlt(localctx, 2) - self.state = 309 + self.state = 313 self.compoundStmt() pass else: @@ -2087,22 +2120,22 @@ def compoundStmt(self): localctx = PyNestMLParser.CompoundStmtContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_compoundStmt) try: - self.state = 315 + self.state = 319 self._errHandler.sync(self) token = self._input.LA(1) - if token in [18]: + if token in [PyNestMLParser.IF_KEYWORD]: self.enterOuterAlt(localctx, 1) - self.state = 312 + self.state = 316 self.ifStmt() pass - elif token in [21]: + elif token in [PyNestMLParser.FOR_KEYWORD]: self.enterOuterAlt(localctx, 2) - self.state = 313 + self.state = 317 self.forStmt() pass - elif token in [22]: + elif token in [PyNestMLParser.WHILE_KEYWORD]: self.enterOuterAlt(localctx, 3) - self.state = 314 + self.state = 318 self.whileStmt() pass else: @@ -2161,31 +2194,31 @@ def smallStmt(self): self.enterRule(localctx, 34, self.RULE_smallStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 321 + self.state = 325 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,31,self._ctx) + la_ = self._interp.adaptivePredict(self._input,32,self._ctx) if la_ == 1: - self.state = 317 + self.state = 321 self.assignment() pass elif la_ == 2: - self.state = 318 + self.state = 322 self.functionCall() pass elif la_ == 3: - self.state = 319 + self.state = 323 self.declaration() pass elif la_ == 4: - self.state = 320 + self.state = 324 self.returnStmt() pass - self.state = 323 + self.state = 327 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2250,35 +2283,35 @@ def assignment(self): self.enterRule(localctx, 36, self.RULE_assignment) try: self.enterOuterAlt(localctx, 1) - self.state = 325 + self.state = 329 localctx.lhs_variable = self.variable() - self.state = 331 + self.state = 335 self._errHandler.sync(self) token = self._input.LA(1) - if token in [76]: - self.state = 326 + if token in [PyNestMLParser.EQUALS]: + self.state = 330 localctx.directAssignment = self.match(PyNestMLParser.EQUALS) pass - elif token in [66]: - self.state = 327 + elif token in [PyNestMLParser.PLUS_EQUALS]: + self.state = 331 localctx.compoundSum = self.match(PyNestMLParser.PLUS_EQUALS) pass - elif token in [67]: - self.state = 328 + elif token in [PyNestMLParser.MINUS_EQUALS]: + self.state = 332 localctx.compoundMinus = self.match(PyNestMLParser.MINUS_EQUALS) pass - elif token in [68]: - self.state = 329 + elif token in [PyNestMLParser.STAR_EQUALS]: + self.state = 333 localctx.compoundProduct = self.match(PyNestMLParser.STAR_EQUALS) pass - elif token in [69]: - self.state = 330 + elif token in [PyNestMLParser.FORWARD_SLASH_EQUALS]: + self.state = 334 localctx.compoundQuotient = self.match(PyNestMLParser.FORWARD_SLASH_EQUALS) pass else: raise NoViableAltException(self) - self.state = 333 + self.state = 337 self.expression(0) except RecognitionException as re: localctx.exception = re @@ -2366,67 +2399,67 @@ def declaration(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 336 + self.state = 340 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==29: - self.state = 335 + if _la==PyNestMLParser.RECORDABLE_KEYWORD: + self.state = 339 localctx.isRecordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 339 + self.state = 343 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==16: - self.state = 338 + if _la==PyNestMLParser.INLINE_KEYWORD: + self.state = 342 localctx.isInlineExpression = self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 341 + self.state = 345 self.variable() - self.state = 346 + self.state = 350 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: - self.state = 342 + while _la==PyNestMLParser.COMMA: + self.state = 346 self.match(PyNestMLParser.COMMA) - self.state = 343 + self.state = 347 self.variable() - self.state = 348 + self.state = 352 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 349 + self.state = 353 self.dataType() - self.state = 352 + self.state = 356 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==76: - self.state = 350 + if _la==PyNestMLParser.EQUALS: + self.state = 354 self.match(PyNestMLParser.EQUALS) - self.state = 351 + self.state = 355 localctx.rhs = self.expression(0) - self.state = 358 + self.state = 362 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==59: - self.state = 354 + if _la==PyNestMLParser.LEFT_LEFT_SQUARE: + self.state = 358 self.match(PyNestMLParser.LEFT_LEFT_SQUARE) - self.state = 355 + self.state = 359 localctx.invariant = self.expression(0) - self.state = 356 + self.state = 360 self.match(PyNestMLParser.RIGHT_RIGHT_SQUARE) - self.state = 363 + self.state = 367 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 360 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.DECORATOR_HOMOGENEOUS) | (1 << PyNestMLParser.DECORATOR_HETEROGENEOUS) | (1 << PyNestMLParser.AT))) != 0): + self.state = 364 localctx.decorator = self.anyDecorator() - self.state = 365 + self.state = 369 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2471,9 +2504,9 @@ def declaration_newline(self): self.enterRule(localctx, 40, self.RULE_declaration_newline) try: self.enterOuterAlt(localctx, 1) - self.state = 366 + self.state = 370 self.declaration() - self.state = 367 + self.state = 371 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2528,28 +2561,28 @@ def anyDecorator(self): localctx = PyNestMLParser.AnyDecoratorContext(self, self._ctx, self.state) self.enterRule(localctx, 42, self.RULE_anyDecorator) try: - self.state = 376 + self.state = 380 self._errHandler.sync(self) token = self._input.LA(1) - if token in [45]: + if token in [PyNestMLParser.DECORATOR_HOMOGENEOUS]: self.enterOuterAlt(localctx, 1) - self.state = 369 + self.state = 373 self.match(PyNestMLParser.DECORATOR_HOMOGENEOUS) pass - elif token in [46]: + elif token in [PyNestMLParser.DECORATOR_HETEROGENEOUS]: self.enterOuterAlt(localctx, 2) - self.state = 370 + self.state = 374 self.match(PyNestMLParser.DECORATOR_HETEROGENEOUS) pass - elif token in [47]: + elif token in [PyNestMLParser.AT]: self.enterOuterAlt(localctx, 3) - self.state = 371 + self.state = 375 self.match(PyNestMLParser.AT) - self.state = 372 + self.state = 376 self.namespaceDecoratorNamespace() - self.state = 373 + self.state = 377 self.match(PyNestMLParser.DOUBLE_COLON) - self.state = 374 + self.state = 378 self.namespaceDecoratorName() pass else: @@ -2593,7 +2626,7 @@ def namespaceDecoratorNamespace(self): self.enterRule(localctx, 44, self.RULE_namespaceDecoratorNamespace) try: self.enterOuterAlt(localctx, 1) - self.state = 378 + self.state = 382 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2633,7 +2666,7 @@ def namespaceDecoratorName(self): self.enterRule(localctx, 46, self.RULE_namespaceDecoratorName) try: self.enterOuterAlt(localctx, 1) - self.state = 380 + self.state = 384 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2677,13 +2710,13 @@ def returnStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 382 + self.state = 386 self.match(PyNestMLParser.RETURN_KEYWORD) - self.state = 384 + self.state = 388 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 63489) != 0): - self.state = 383 + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INF_KEYWORD) | (1 << PyNestMLParser.NOT_KEYWORD) | (1 << PyNestMLParser.LEFT_PAREN) | (1 << PyNestMLParser.PLUS) | (1 << PyNestMLParser.TILDE))) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & ((1 << (PyNestMLParser.MINUS - 75)) | (1 << (PyNestMLParser.BOOLEAN_LITERAL - 75)) | (1 << (PyNestMLParser.STRING_LITERAL - 75)) | (1 << (PyNestMLParser.NAME - 75)) | (1 << (PyNestMLParser.UNSIGNED_INTEGER - 75)) | (1 << (PyNestMLParser.FLOAT - 75)))) != 0): + self.state = 387 self.expression(0) @@ -2737,23 +2770,23 @@ def ifStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 386 - self.ifClause() self.state = 390 + self.ifClause() + self.state = 394 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==19: - self.state = 387 + while _la==PyNestMLParser.ELIF_KEYWORD: + self.state = 391 self.elifClause() - self.state = 392 + self.state = 396 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 394 + self.state = 398 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==20: - self.state = 393 + if _la==PyNestMLParser.ELSE_KEYWORD: + self.state = 397 self.elseClause() @@ -2805,13 +2838,13 @@ def ifClause(self): self.enterRule(localctx, 52, self.RULE_ifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 396 + self.state = 400 self.match(PyNestMLParser.IF_KEYWORD) - self.state = 397 + self.state = 401 self.expression(0) - self.state = 398 + self.state = 402 self.match(PyNestMLParser.COLON) - self.state = 399 + self.state = 403 self.block() except RecognitionException as re: localctx.exception = re @@ -2861,13 +2894,13 @@ def elifClause(self): self.enterRule(localctx, 54, self.RULE_elifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 401 + self.state = 405 self.match(PyNestMLParser.ELIF_KEYWORD) - self.state = 402 + self.state = 406 self.expression(0) - self.state = 403 + self.state = 407 self.match(PyNestMLParser.COLON) - self.state = 404 + self.state = 408 self.block() except RecognitionException as re: localctx.exception = re @@ -2913,11 +2946,11 @@ def elseClause(self): self.enterRule(localctx, 56, self.RULE_elseClause) try: self.enterOuterAlt(localctx, 1) - self.state = 406 + self.state = 410 self.match(PyNestMLParser.ELSE_KEYWORD) - self.state = 407 + self.state = 411 self.match(PyNestMLParser.COLON) - self.state = 408 + self.state = 412 self.block() except RecognitionException as re: localctx.exception = re @@ -2996,39 +3029,39 @@ def forStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 410 + self.state = 414 self.match(PyNestMLParser.FOR_KEYWORD) - self.state = 411 + self.state = 415 localctx.var = self.match(PyNestMLParser.NAME) - self.state = 412 + self.state = 416 self.match(PyNestMLParser.IN_KEYWORD) - self.state = 413 + self.state = 417 localctx.start_from = self.expression(0) - self.state = 414 + self.state = 418 self.match(PyNestMLParser.ELLIPSIS) - self.state = 415 + self.state = 419 localctx.end_at = self.expression(0) - self.state = 416 + self.state = 420 self.match(PyNestMLParser.STEP_KEYWORD) - self.state = 418 + self.state = 422 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==75: - self.state = 417 + if _la==PyNestMLParser.MINUS: + self.state = 421 localctx.negative = self.match(PyNestMLParser.MINUS) - self.state = 420 + self.state = 424 _la = self._input.LA(1) - if not(_la==89 or _la==90): + if not(_la==PyNestMLParser.UNSIGNED_INTEGER or _la==PyNestMLParser.FLOAT): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 421 + self.state = 425 self.match(PyNestMLParser.COLON) - self.state = 422 + self.state = 426 self.block() except RecognitionException as re: localctx.exception = re @@ -3078,13 +3111,13 @@ def whileStmt(self): self.enterRule(localctx, 60, self.RULE_whileStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 424 + self.state = 428 self.match(PyNestMLParser.WHILE_KEYWORD) - self.state = 425 + self.state = 429 self.expression(0) - self.state = 426 + self.state = 430 self.match(PyNestMLParser.COLON) - self.state = 427 + self.state = 431 self.block() except RecognitionException as re: localctx.exception = re @@ -3137,31 +3170,31 @@ def nestMLCompilationUnit(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 431 + self.state = 435 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 431 + self.state = 435 self._errHandler.sync(self) token = self._input.LA(1) - if token in [31]: - self.state = 429 + if token in [PyNestMLParser.MODEL_KEYWORD]: + self.state = 433 self.model() pass - elif token in [9]: - self.state = 430 + elif token in [PyNestMLParser.NEWLINE]: + self.state = 434 self.match(PyNestMLParser.NEWLINE) pass else: raise NoViableAltException(self) - self.state = 433 + self.state = 437 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==9 or _la==31): + if not (_la==PyNestMLParser.NEWLINE or _la==PyNestMLParser.MODEL_KEYWORD): break - self.state = 435 + self.state = 439 self.match(PyNestMLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -3207,11 +3240,11 @@ def model(self): self.enterRule(localctx, 64, self.RULE_model) try: self.enterOuterAlt(localctx, 1) - self.state = 437 + self.state = 441 self.match(PyNestMLParser.MODEL_KEYWORD) - self.state = 438 + self.state = 442 self.match(PyNestMLParser.NAME) - self.state = 439 + self.state = 443 self.modelBody() except RecognitionException as re: localctx.exception = re @@ -3316,61 +3349,61 @@ def modelBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 441 + self.state = 445 self.match(PyNestMLParser.COLON) - self.state = 442 + self.state = 446 self.match(PyNestMLParser.NEWLINE) - self.state = 443 + self.state = 447 self.match(PyNestMLParser.INDENT) - self.state = 452 + self.state = 456 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 452 + self.state = 456 self._errHandler.sync(self) token = self._input.LA(1) - if token in [32, 33, 34]: - self.state = 444 + if token in [PyNestMLParser.STATE_KEYWORD, PyNestMLParser.PARAMETERS_KEYWORD, PyNestMLParser.INTERNALS_KEYWORD]: + self.state = 448 self.blockWithVariables() pass - elif token in [36]: - self.state = 445 + elif token in [PyNestMLParser.EQUATIONS_KEYWORD]: + self.state = 449 self.equationsBlock() pass - elif token in [37]: - self.state = 446 + elif token in [PyNestMLParser.INPUT_KEYWORD]: + self.state = 450 self.inputBlock() pass - elif token in [38]: - self.state = 447 + elif token in [PyNestMLParser.OUTPUT_KEYWORD]: + self.state = 451 self.outputBlock() pass - elif token in [15]: - self.state = 448 + elif token in [PyNestMLParser.FUNCTION_KEYWORD]: + self.state = 452 self.function() pass - elif token in [40]: - self.state = 449 + elif token in [PyNestMLParser.ON_RECEIVE_KEYWORD]: + self.state = 453 self.onReceiveBlock() pass - elif token in [41]: - self.state = 450 + elif token in [PyNestMLParser.ON_CONDITION_KEYWORD]: + self.state = 454 self.onConditionBlock() pass - elif token in [35]: - self.state = 451 + elif token in [PyNestMLParser.UPDATE_KEYWORD]: + self.state = 455 self.updateBlock() pass else: raise NoViableAltException(self) - self.state = 454 + self.state = 458 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 3843995762688) != 0)): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.FUNCTION_KEYWORD) | (1 << PyNestMLParser.STATE_KEYWORD) | (1 << PyNestMLParser.PARAMETERS_KEYWORD) | (1 << PyNestMLParser.INTERNALS_KEYWORD) | (1 << PyNestMLParser.UPDATE_KEYWORD) | (1 << PyNestMLParser.EQUATIONS_KEYWORD) | (1 << PyNestMLParser.INPUT_KEYWORD) | (1 << PyNestMLParser.OUTPUT_KEYWORD) | (1 << PyNestMLParser.ON_RECEIVE_KEYWORD) | (1 << PyNestMLParser.ON_CONDITION_KEYWORD))) != 0)): break - self.state = 456 + self.state = 460 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3440,29 +3473,29 @@ def onReceiveBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 458 + self.state = 462 self.match(PyNestMLParser.ON_RECEIVE_KEYWORD) - self.state = 459 + self.state = 463 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 460 + self.state = 464 localctx.inputPortName = self.match(PyNestMLParser.NAME) - self.state = 465 + self.state = 469 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: - self.state = 461 + while _la==PyNestMLParser.COMMA: + self.state = 465 self.match(PyNestMLParser.COMMA) - self.state = 462 + self.state = 466 self.constParameter() - self.state = 467 + self.state = 471 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 468 + self.state = 472 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 469 + self.state = 473 self.match(PyNestMLParser.COLON) - self.state = 470 + self.state = 474 self.block() except RecognitionException as re: localctx.exception = re @@ -3533,29 +3566,29 @@ def onConditionBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 472 + self.state = 476 self.match(PyNestMLParser.ON_CONDITION_KEYWORD) - self.state = 473 + self.state = 477 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 474 + self.state = 478 localctx.condition = self.expression(0) - self.state = 479 + self.state = 483 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: - self.state = 475 + while _la==PyNestMLParser.COMMA: + self.state = 479 self.match(PyNestMLParser.COMMA) - self.state = 476 + self.state = 480 self.constParameter() - self.state = 481 + self.state = 485 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 482 + self.state = 486 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 483 + self.state = 487 self.match(PyNestMLParser.COLON) - self.state = 484 + self.state = 488 self.block() except RecognitionException as re: localctx.exception = re @@ -3621,33 +3654,33 @@ def blockWithVariables(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 486 + self.state = 490 localctx.blockType = self._input.LT(1) _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 30064771072) != 0)): + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.STATE_KEYWORD) | (1 << PyNestMLParser.PARAMETERS_KEYWORD) | (1 << PyNestMLParser.INTERNALS_KEYWORD))) != 0)): localctx.blockType = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 487 + self.state = 491 self.match(PyNestMLParser.COLON) - self.state = 488 + self.state = 492 self.match(PyNestMLParser.NEWLINE) - self.state = 489 + self.state = 493 self.match(PyNestMLParser.INDENT) - self.state = 491 + self.state = 495 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 490 + self.state = 494 self.declaration_newline() - self.state = 493 + self.state = 497 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==16 or _la==29 or _la==88): + if not (_la==PyNestMLParser.INLINE_KEYWORD or _la==PyNestMLParser.RECORDABLE_KEYWORD or _la==PyNestMLParser.NAME): break - self.state = 495 + self.state = 499 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3693,11 +3726,11 @@ def updateBlock(self): self.enterRule(localctx, 74, self.RULE_updateBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 497 + self.state = 501 self.match(PyNestMLParser.UPDATE_KEYWORD) - self.state = 498 + self.state = 502 self.match(PyNestMLParser.COLON) - self.state = 499 + self.state = 503 self.block() except RecognitionException as re: localctx.exception = re @@ -3770,43 +3803,43 @@ def equationsBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 501 + self.state = 505 self.match(PyNestMLParser.EQUATIONS_KEYWORD) - self.state = 502 + self.state = 506 self.match(PyNestMLParser.COLON) - self.state = 503 + self.state = 507 self.match(PyNestMLParser.NEWLINE) - self.state = 504 + self.state = 508 self.match(PyNestMLParser.INDENT) - self.state = 508 + self.state = 512 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 508 + self.state = 512 self._errHandler.sync(self) token = self._input.LA(1) - if token in [16, 29]: - self.state = 505 + if token in [PyNestMLParser.INLINE_KEYWORD, PyNestMLParser.RECORDABLE_KEYWORD]: + self.state = 509 self.inlineExpression() pass - elif token in [88]: - self.state = 506 + elif token in [PyNestMLParser.NAME]: + self.state = 510 self.odeEquation() pass - elif token in [30]: - self.state = 507 + elif token in [PyNestMLParser.KERNEL_KEYWORD]: + self.state = 511 self.kernel() pass else: raise NoViableAltException(self) - self.state = 510 + self.state = 514 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==88): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INLINE_KEYWORD) | (1 << PyNestMLParser.RECORDABLE_KEYWORD) | (1 << PyNestMLParser.KERNEL_KEYWORD))) != 0) or _la==PyNestMLParser.NAME): break - self.state = 512 + self.state = 516 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3853,6 +3886,31 @@ def continuousInputPort(self, i:int=None): return self.getTypedRuleContext(PyNestMLParser.ContinuousInputPortContext,i) + def LEFT_PAREN(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.LEFT_PAREN) + else: + return self.getToken(PyNestMLParser.LEFT_PAREN, i) + + def RIGHT_PAREN(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.RIGHT_PAREN) + else: + return self.getToken(PyNestMLParser.RIGHT_PAREN, i) + + def parameter(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PyNestMLParser.ParameterContext) + else: + return self.getTypedRuleContext(PyNestMLParser.ParameterContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.COMMA) + else: + return self.getToken(PyNestMLParser.COMMA, i) + def getRuleIndex(self): return PyNestMLParser.RULE_inputBlock @@ -3872,39 +3930,69 @@ def inputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 514 + self.state = 518 self.match(PyNestMLParser.INPUT_KEYWORD) - self.state = 515 + self.state = 519 self.match(PyNestMLParser.COLON) - self.state = 516 + self.state = 520 self.match(PyNestMLParser.NEWLINE) - self.state = 517 + self.state = 521 self.match(PyNestMLParser.INDENT) - self.state = 520 + self.state = 540 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 520 + self.state = 524 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,53,self._ctx) + la_ = self._interp.adaptivePredict(self._input,54,self._ctx) if la_ == 1: - self.state = 518 + self.state = 522 self.spikeInputPort() pass elif la_ == 2: - self.state = 519 + self.state = 523 self.continuousInputPort() pass - self.state = 522 + self.state = 538 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==88): + if _la==PyNestMLParser.LEFT_PAREN: + self.state = 526 + self.match(PyNestMLParser.LEFT_PAREN) + self.state = 535 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PyNestMLParser.NAME: + self.state = 527 + self.parameter() + self.state = 532 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PyNestMLParser.COMMA: + self.state = 528 + self.match(PyNestMLParser.COMMA) + self.state = 529 + self.parameter() + self.state = 534 + self._errHandler.sync(self) + _la = self._input.LA(1) + + + + self.state = 537 + self.match(PyNestMLParser.RIGHT_PAREN) + + + self.state = 542 + self._errHandler.sync(self) + _la = self._input.LA(1) + if not (_la==PyNestMLParser.NAME): break - self.state = 524 + self.state = 544 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3972,35 +4060,35 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 526 + self.state = 546 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 531 + self.state = 551 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==56: - self.state = 527 + if _la==PyNestMLParser.LEFT_SQUARE_BRACKET: + self.state = 547 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 528 + self.state = 548 localctx.sizeParameter = self.expression(0) - self.state = 529 + self.state = 549 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 533 + self.state = 553 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 537 + self.state = 557 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==43 or _la==44: - self.state = 534 + while _la==PyNestMLParser.INHIBITORY_KEYWORD or _la==PyNestMLParser.EXCITATORY_KEYWORD: + self.state = 554 self.inputQualifier() - self.state = 539 + self.state = 559 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 540 + self.state = 560 self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 541 + self.state = 561 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4065,27 +4153,27 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 543 + self.state = 563 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 548 + self.state = 568 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==56: - self.state = 544 + if _la==PyNestMLParser.LEFT_SQUARE_BRACKET: + self.state = 564 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 545 + self.state = 565 localctx.sizeParameter = self.expression(0) - self.state = 546 + self.state = 566 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 550 + self.state = 570 self.dataType() - self.state = 551 + self.state = 571 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 552 + self.state = 572 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 553 + self.state = 573 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4129,15 +4217,15 @@ def inputQualifier(self): self.enterRule(localctx, 84, self.RULE_inputQualifier) try: self.enterOuterAlt(localctx, 1) - self.state = 557 + self.state = 577 self._errHandler.sync(self) token = self._input.LA(1) - if token in [43]: - self.state = 555 + if token in [PyNestMLParser.INHIBITORY_KEYWORD]: + self.state = 575 localctx.isInhibitory = self.match(PyNestMLParser.INHIBITORY_KEYWORD) pass - elif token in [44]: - self.state = 556 + elif token in [PyNestMLParser.EXCITATORY_KEYWORD]: + self.state = 576 localctx.isExcitatory = self.match(PyNestMLParser.EXCITATORY_KEYWORD) pass else: @@ -4160,6 +4248,7 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): self.parser = parser self.isSpike = None # Token self.isContinuous = None # Token + self.attribute = None # ParameterContext def OUTPUT_KEYWORD(self): return self.getToken(PyNestMLParser.OUTPUT_KEYWORD, 0) @@ -4185,6 +4274,25 @@ def SPIKE_KEYWORD(self): def CONTINUOUS_KEYWORD(self): return self.getToken(PyNestMLParser.CONTINUOUS_KEYWORD, 0) + def LEFT_PAREN(self): + return self.getToken(PyNestMLParser.LEFT_PAREN, 0) + + def RIGHT_PAREN(self): + return self.getToken(PyNestMLParser.RIGHT_PAREN, 0) + + def parameter(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PyNestMLParser.ParameterContext) + else: + return self.getTypedRuleContext(PyNestMLParser.ParameterContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.COMMA) + else: + return self.getToken(PyNestMLParser.COMMA, i) + def getRuleIndex(self): return PyNestMLParser.RULE_outputBlock @@ -4201,33 +4309,64 @@ def outputBlock(self): localctx = PyNestMLParser.OutputBlockContext(self, self._ctx, self.state) self.enterRule(localctx, 86, self.RULE_outputBlock) + self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 559 + self.state = 579 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 560 + self.state = 580 self.match(PyNestMLParser.COLON) - self.state = 561 + self.state = 581 self.match(PyNestMLParser.NEWLINE) - self.state = 562 + self.state = 582 self.match(PyNestMLParser.INDENT) - self.state = 565 + self.state = 585 self._errHandler.sync(self) token = self._input.LA(1) - if token in [42]: - self.state = 563 + if token in [PyNestMLParser.SPIKE_KEYWORD]: + self.state = 583 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) pass - elif token in [39]: - self.state = 564 + elif token in [PyNestMLParser.CONTINUOUS_KEYWORD]: + self.state = 584 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 567 + self.state = 599 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PyNestMLParser.LEFT_PAREN: + self.state = 587 + self.match(PyNestMLParser.LEFT_PAREN) + self.state = 596 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PyNestMLParser.NAME: + self.state = 588 + localctx.attribute = self.parameter() + self.state = 593 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PyNestMLParser.COMMA: + self.state = 589 + self.match(PyNestMLParser.COMMA) + self.state = 590 + localctx.attribute = self.parameter() + self.state = 595 + self._errHandler.sync(self) + _la = self._input.LA(1) + + + + self.state = 598 + self.match(PyNestMLParser.RIGHT_PAREN) + + + self.state = 601 self.match(PyNestMLParser.NEWLINE) - self.state = 568 + self.state = 602 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4301,45 +4440,45 @@ def function(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 570 + self.state = 604 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 571 + self.state = 605 self.match(PyNestMLParser.NAME) - self.state = 572 + self.state = 606 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 581 + self.state = 615 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==88: - self.state = 573 + if _la==PyNestMLParser.NAME: + self.state = 607 self.parameter() - self.state = 578 + self.state = 612 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: - self.state = 574 + while _la==PyNestMLParser.COMMA: + self.state = 608 self.match(PyNestMLParser.COMMA) - self.state = 575 + self.state = 609 self.parameter() - self.state = 580 + self.state = 614 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 583 + self.state = 617 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 585 + self.state = 619 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 562949953453056) != 0) or _la==88 or _la==89: - self.state = 584 + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INTEGER_KEYWORD) | (1 << PyNestMLParser.REAL_KEYWORD) | (1 << PyNestMLParser.STRING_KEYWORD) | (1 << PyNestMLParser.BOOLEAN_KEYWORD) | (1 << PyNestMLParser.VOID_KEYWORD) | (1 << PyNestMLParser.LEFT_PAREN))) != 0) or _la==PyNestMLParser.NAME or _la==PyNestMLParser.UNSIGNED_INTEGER: + self.state = 618 localctx.returnType = self.dataType() - self.state = 587 + self.state = 621 self.match(PyNestMLParser.COLON) - self.state = 588 + self.state = 622 self.block() except RecognitionException as re: localctx.exception = re @@ -4382,9 +4521,9 @@ def parameter(self): self.enterRule(localctx, 90, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 590 + self.state = 624 self.match(PyNestMLParser.NAME) - self.state = 591 + self.state = 625 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4444,14 +4583,14 @@ def constParameter(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 593 + self.state = 627 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 594 + self.state = 628 self.match(PyNestMLParser.EQUALS) - self.state = 595 + self.state = 629 localctx.value = self._input.LT(1) _la = self._input.LA(1) - if not(_la==25 or ((((_la - 86)) & ~0x3f) == 0 and ((1 << (_la - 86)) & 27) != 0)): + if not(_la==PyNestMLParser.INF_KEYWORD or ((((_la - 87)) & ~0x3f) == 0 and ((1 << (_la - 87)) & ((1 << (PyNestMLParser.BOOLEAN_LITERAL - 87)) | (1 << (PyNestMLParser.STRING_LITERAL - 87)) | (1 << (PyNestMLParser.UNSIGNED_INTEGER - 87)) | (1 << (PyNestMLParser.FLOAT - 87)))) != 0)): localctx.value = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) diff --git a/pynestml/generated/PyNestMLParserVisitor.py b/pynestml/generated/PyNestMLParserVisitor.py index 4f9cbf508..b66a05306 100644 --- a/pynestml/generated/PyNestMLParserVisitor.py +++ b/pynestml/generated/PyNestMLParserVisitor.py @@ -1,6 +1,6 @@ -# Generated from PyNestMLParser.g4 by ANTLR 4.13.1 +# Generated from PyNestMLParser.g4 by ANTLR 4.10.1 from antlr4 import * -if "." in __name__: +if __name__ is not None and "." in __name__: from .PyNestMLParser import PyNestMLParser else: from PyNestMLParser import PyNestMLParser diff --git a/pynestml/grammars/PyNestMLLexer.g4 b/pynestml/grammars/PyNestMLLexer.g4 index c7255e807..b5e36be4f 100644 --- a/pynestml/grammars/PyNestMLLexer.g4 +++ b/pynestml/grammars/PyNestMLLexer.g4 @@ -138,6 +138,7 @@ lexer grammar PyNestMLLexer; DOUBLE_COLON : '::'; SEMICOLON : ';'; DIFFERENTIAL_ORDER : '\''; + FULLSTOP : '.'; /** @@ -174,8 +175,8 @@ lexer grammar PyNestMLLexer; FLOAT : POINT_FLOAT | EXPONENT_FLOAT; - fragment POINT_FLOAT : UNSIGNED_INTEGER? '.' UNSIGNED_INTEGER - | UNSIGNED_INTEGER '.' + fragment POINT_FLOAT : UNSIGNED_INTEGER? FULLSTOP UNSIGNED_INTEGER + | UNSIGNED_INTEGER FULLSTOP ; fragment EXPONENT_FLOAT: ( UNSIGNED_INTEGER | POINT_FLOAT ) [eE] EXPONENT ; diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index 5d2af2d50..9a0618561 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -108,7 +108,8 @@ parser grammar PyNestMLParser; */ variable : name=NAME (LEFT_SQUARE_BRACKET vectorParameter=expression RIGHT_SQUARE_BRACKET)? - (DIFFERENTIAL_ORDER)*; + (DIFFERENTIAL_ORDER)* + (FULLSTOP attribute=variable)?; /** ASTFunctionCall Represents a function call, e.g. myFun("a", "b"). @@ -284,7 +285,7 @@ parser grammar PyNestMLParser; @attribute inputPort: A list of input ports. */ inputBlock: INPUT_KEYWORD COLON - NEWLINE INDENT (spikeInputPort | continuousInputPort)+ DEDENT; + NEWLINE INDENT ((spikeInputPort | continuousInputPort) (LEFT_PAREN (parameter (COMMA parameter)*)? RIGHT_PAREN)?)+ DEDENT; /** ASTInputPort represents a single input port, e.g.: spike_in[3] <- excitatory spike @@ -322,7 +323,9 @@ parser grammar PyNestMLParser; @attribute isContinuous: true if and only if the neuron has a continuous-time output. */ outputBlock: OUTPUT_KEYWORD COLON - NEWLINE INDENT (isSpike=SPIKE_KEYWORD | isContinuous=CONTINUOUS_KEYWORD) NEWLINE DEDENT; + NEWLINE INDENT (isSpike=SPIKE_KEYWORD | isContinuous=CONTINUOUS_KEYWORD) + (LEFT_PAREN (attribute=parameter (COMMA attribute=parameter)*)? RIGHT_PAREN)? + NEWLINE DEDENT; /** ASTFunction A single declaration of a user-defined function definition: function set_V_m(v mV): diff --git a/pynestml/meta_model/ast_function_call.py b/pynestml/meta_model/ast_function_call.py index a078e188a..a07c3483a 100644 --- a/pynestml/meta_model/ast_function_call.py +++ b/pynestml/meta_model/ast_function_call.py @@ -22,6 +22,7 @@ from typing import List from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_parameter import ASTParameter class ASTFunctionCall(ASTNode): @@ -91,12 +92,14 @@ def has_args(self): """ return (self.args is not None) and len(self.args) > 0 - def get_args(self): + def get_args(self) -> List[ASTParameter]: """ Returns the list of arguments. :return: the list of arguments. - :rtype: list(ASTExpression) """ + if self.args is None: + return [] + return self.args def get_children(self) -> List[ASTNode]: diff --git a/pynestml/meta_model/ast_node_factory.py b/pynestml/meta_model/ast_node_factory.py index da3986be9..781d75f9e 100644 --- a/pynestml/meta_model/ast_node_factory.py +++ b/pynestml/meta_model/ast_node_factory.py @@ -19,7 +19,7 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -from typing import Optional, Union +from typing import List, Optional, Union from pynestml.meta_model.ast_arithmetic_operator import ASTArithmeticOperator from pynestml.meta_model.ast_assignment import ASTAssignment @@ -295,9 +295,8 @@ def create_ast_kernel(cls, variables=None, expressions=None, source_position=Non return ASTKernel(variables, expressions, source_position=source_position) @classmethod - def create_ast_output_block(cls, s_type, source_position): - # type: (PortSignalType,ASTSourceLocation) -> ASTOutputBlock - return ASTOutputBlock(s_type, source_position=source_position) + def create_ast_output_block(cls, s_type: PortSignalType, attributes: Optional[List[ASTParameter]] = None, source_position: ASTSourceLocation = None) -> ASTOutputBlock: + return ASTOutputBlock(s_type, attributes=attributes, source_position=source_position) @classmethod def create_ast_parameter(cls, name, data_type, source_position): diff --git a/pynestml/meta_model/ast_output_block.py b/pynestml/meta_model/ast_output_block.py index 66a61f71d..33cf33245 100644 --- a/pynestml/meta_model/ast_output_block.py +++ b/pynestml/meta_model/ast_output_block.py @@ -19,9 +19,10 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -from typing import List +from typing import List, Optional from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_parameter import ASTParameter from pynestml.utils.port_signal_type import PortSignalType @@ -39,7 +40,7 @@ class ASTOutputBlock(ASTNode): type = None """ - def __init__(self, o_type, *args, **kwargs): + def __init__(self, o_type, attributes: Optional[List[ASTParameter]], *args, **kwargs): """ Standard constructor. @@ -51,6 +52,7 @@ def __init__(self, o_type, *args, **kwargs): assert isinstance(o_type, PortSignalType) super(ASTOutputBlock, self).__init__(*args, **kwargs) self.type = o_type + self.attributes = attributes def clone(self): """ @@ -60,6 +62,7 @@ def clone(self): :rtype: ASTOutputBlock """ dup = ASTOutputBlock(o_type=self.type, + attributes=self.attributes, # ASTNode common attributes: source_position=self.source_position, scope=self.scope, @@ -84,6 +87,16 @@ def is_continuous(self) -> bool: """ return self.type is PortSignalType.CONTINUOUS + def get_attributes(self) -> List[ASTParameter]: + r""" + Returns the attributes of this node, if any. + :return: List of attributes of this node. + """ + if self.attributes is None: + return [] + + return self.attributes + def get_children(self) -> List[ASTNode]: r""" Returns the children of this node, if any. @@ -98,4 +111,11 @@ def equals(self, other: ASTNode) -> bool: if not isinstance(other, ASTOutputBlock): return False + if bool(self.attributes) != bool(other.attributes): + return False + + for attribute_self, attribute_other in zip(self.attributes, other.attributes): + if not attribute_self.equals(attribute_other): + return False + return self.is_spike() == other.is_spike() and self.is_continuous() == other.is_continuous() diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index bb8914dd6..22f9242cd 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -139,6 +139,7 @@ class MessageCode(Enum): TIMESTEP_FUNCTION_LEGALLY_USED = 113 RANDOM_FUNCTIONS_LEGALLY_USED = 113 EXPONENT_MUST_BE_INTEGER = 114 + EMIT_SPIKE_OUTPUT_PORT_TYPE_DIFFERS = 115 class Messages: @@ -1073,6 +1074,14 @@ def get_emit_spike_function_but_no_output_port(cls): message = 'emit_spike() function was called, but no spiking output port has been defined!' return MessageCode.EMIT_SPIKE_FUNCTION_BUT_NO_OUTPUT_PORT, message + @classmethod + def get_output_port_type_differs(cls) -> Tuple[MessageCode, str]: + """ + Indicates that an emit_spike() function was called, but with different parameter types than the output port was defined with. + """ + message = 'emit_spike() function was called, but with different parameter types than the output port was defined with!' + return MessageCode.EMIT_SPIKE_OUTPUT_PORT_TYPE_DIFFERS, message + @classmethod def get_kernel_wrong_type(cls, kernel_name: str, diff --git a/pynestml/visitors/ast_builder_visitor.py b/pynestml/visitors/ast_builder_visitor.py index bfc4dd902..02a9bd396 100644 --- a/pynestml/visitors/ast_builder_visitor.py +++ b/pynestml/visitors/ast_builder_visitor.py @@ -644,13 +644,21 @@ def visitInputQualifier(self, ctx): # Visit a parse tree produced by PyNESTMLParser#outputBuffer. def visitOutputBlock(self, ctx): source_pos = create_source_pos(ctx) + attributes: List[ASTParameter] = [] + if ctx.parameter() is not None: + if type(ctx.parameter()) is list: + for par in ctx.parameter(): + attributes.append(self.visit(par)) + else: + attributes.append(self.visit(ctx.parameter())) + if ctx.isSpike is not None: - ret = ASTNodeFactory.create_ast_output_block(s_type=PortSignalType.SPIKE, source_position=source_pos) + ret = ASTNodeFactory.create_ast_output_block(s_type=PortSignalType.SPIKE, attributes=attributes, source_position=source_pos) update_node_comments(ret, self.__comments.visit(ctx)) return ret if ctx.isContinuous is not None: - ret = ASTNodeFactory.create_ast_output_block(s_type=PortSignalType.CONTINUOUS, source_position=source_pos) + ret = ASTNodeFactory.create_ast_output_block(s_type=PortSignalType.CONTINUOUS, attributes=attributes, source_position=source_pos) update_node_comments(ret, self.__comments.visit(ctx)) return ret diff --git a/tests/invalid/CoCoOutputPortTypeIfEmitCall-2.nestml b/tests/invalid/CoCoOutputPortTypeIfEmitCall-2.nestml new file mode 100644 index 000000000..c8ddcbdc6 --- /dev/null +++ b/tests/invalid/CoCoOutputPortTypeIfEmitCall-2.nestml @@ -0,0 +1,37 @@ +""" +CoCoOutputPortTypeIfEmitCall.nestml +################################### + + +Description ++++++++++++ + +This model is used to test if broken CoCos are identified correctly. Here, test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model output_port_types_neuron: + output: + spike(foo real) + + update: + emit_spike() diff --git a/tests/invalid/CoCoOutputPortTypeIfEmitCall-3.nestml b/tests/invalid/CoCoOutputPortTypeIfEmitCall-3.nestml new file mode 100644 index 000000000..0e3c5a26f --- /dev/null +++ b/tests/invalid/CoCoOutputPortTypeIfEmitCall-3.nestml @@ -0,0 +1,37 @@ +""" +CoCoOutputPortTypeIfEmitCall.nestml +################################### + + +Description ++++++++++++ + +This model is used to test if broken CoCos are identified correctly. Here, test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model output_port_types_neuron: + output: + spike(foo real) + + update: + emit_spike(1 ms) diff --git a/tests/invalid/CoCoOutputPortTypeIfEmitCall.nestml b/tests/invalid/CoCoOutputPortTypeIfEmitCall.nestml new file mode 100644 index 000000000..d9fc459f0 --- /dev/null +++ b/tests/invalid/CoCoOutputPortTypeIfEmitCall.nestml @@ -0,0 +1,37 @@ +""" +CoCoOutputPortTypeIfEmitCall.nestml +################################### + + +Description ++++++++++++ + +This model is used to test if broken CoCos are identified correctly. Here, test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model output_port_types_neuron: + output: + spike + + update: + emit_spike(1 ms) diff --git a/tests/test_cocos.py b/tests/test_cocos.py index 731fb8d8a..20b8cd369 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -115,7 +115,7 @@ def test_valid_function_unique_and_defined(self): def test_invalid_inline_expressions_have_rhs(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionHasNoRhs.nestml')) - assert model is None + assert model is None # parse error def test_valid_inline_expressions_have_rhs(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionHasNoRhs.nestml')) @@ -123,7 +123,7 @@ def test_valid_inline_expressions_have_rhs(self): def test_invalid_inline_expression_has_several_lhs(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) - assert model is None + assert model is None # parse error def test_valid_inline_expression_has_several_lhs(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) @@ -324,6 +324,23 @@ def test_valid_output_port_defined_if_emit_call(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_output_port_type_if_emit_call(self): + """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + def test_invalid_output_port_type_if_emit_call(self): + """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-2.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + def test_valid_output_port_type_if_emit_call(self): + """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-3.nestml')) + assert model is not None + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) > 0 + def test_valid_coco_kernel_type(self): """ Test the functionality of CoCoKernelType. @@ -388,10 +405,7 @@ def _parse_and_validate_model(self, fname: str) -> Optional[str]: Logger.init_logger(LoggingLevel.DEBUG) - try: - generate_target(input_path=fname, target_platform="NONE", logging_level="DEBUG") - except BaseException: - return None + generate_target(input_path=fname, target_platform="NONE", logging_level="DEBUG") ast_compilation_unit = ModelParser.parse_file(fname) if ast_compilation_unit is None or len(ast_compilation_unit.get_model_list()) == 0: From cd490c740476d170eb9e25bdc3f801c77989d5ec Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 17 Oct 2024 11:32:41 +0200 Subject: [PATCH 02/68] add explicit output parameters to spiking output port --- pynestml/generated/PyNestMLLexer.py | 6 +- pynestml/generated/PyNestMLParser.py | 228 ++++++++++---------- pynestml/generated/PyNestMLParserVisitor.py | 4 +- 3 files changed, 119 insertions(+), 119 deletions(-) diff --git a/pynestml/generated/PyNestMLLexer.py b/pynestml/generated/PyNestMLLexer.py index e1fda7af4..fe99b1dd7 100644 --- a/pynestml/generated/PyNestMLLexer.py +++ b/pynestml/generated/PyNestMLLexer.py @@ -1,4 +1,4 @@ -# Generated from PyNestMLLexer.g4 by ANTLR 4.10.1 +# Generated from PyNestMLLexer.g4 by ANTLR 4.13.2 from antlr4 import * from io import StringIO import sys @@ -8,7 +8,7 @@ from typing.io import TextIO -if __name__ is not None and "." in __name__: +if "." in __name__: from .PyNestMLLexerBase import PyNestMLLexerBase else: from PyNestMLLexerBase import PyNestMLLexerBase @@ -441,7 +441,7 @@ class PyNestMLLexer(PyNestMLLexerBase): def __init__(self, input=None, output:TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.10.1") + self.checkVersion("4.13.2") self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) self._actions = None self._predicates = None diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index ccb7bfe00..9fb849705 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -1,4 +1,4 @@ -# Generated from PyNestMLParser.g4 by ANTLR 4.10.1 +# Generated from PyNestMLParser.g4 by ANTLR 4.13.2 # encoding: utf-8 from antlr4 import * from io import StringIO @@ -456,7 +456,7 @@ class PyNestMLParser ( Parser ): def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.10.1") + self.checkVersion("4.13.2") self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) self._predicates = None @@ -515,32 +515,32 @@ def dataType(self): self.state = 100 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.INTEGER_KEYWORD]: + if token in [10]: self.enterOuterAlt(localctx, 1) self.state = 94 localctx.isInt = self.match(PyNestMLParser.INTEGER_KEYWORD) pass - elif token in [PyNestMLParser.REAL_KEYWORD]: + elif token in [11]: self.enterOuterAlt(localctx, 2) self.state = 95 localctx.isReal = self.match(PyNestMLParser.REAL_KEYWORD) pass - elif token in [PyNestMLParser.STRING_KEYWORD]: + elif token in [12]: self.enterOuterAlt(localctx, 3) self.state = 96 localctx.isString = self.match(PyNestMLParser.STRING_KEYWORD) pass - elif token in [PyNestMLParser.BOOLEAN_KEYWORD]: + elif token in [13]: self.enterOuterAlt(localctx, 4) self.state = 97 localctx.isBool = self.match(PyNestMLParser.BOOLEAN_KEYWORD) pass - elif token in [PyNestMLParser.VOID_KEYWORD]: + elif token in [14]: self.enterOuterAlt(localctx, 5) self.state = 98 localctx.isVoid = self.match(PyNestMLParser.VOID_KEYWORD) pass - elif token in [PyNestMLParser.LEFT_PAREN, PyNestMLParser.NAME, PyNestMLParser.UNSIGNED_INTEGER]: + elif token in [49, 89, 90]: self.enterOuterAlt(localctx, 6) self.state = 99 localctx.unit = self.unitType(0) @@ -631,7 +631,7 @@ def unitType(self, _p:int=0): self.state = 111 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.LEFT_PAREN]: + if token in [49]: self.state = 103 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) self.state = 104 @@ -639,7 +639,7 @@ def unitType(self, _p:int=0): self.state = 105 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass - elif token in [PyNestMLParser.UNSIGNED_INTEGER]: + elif token in [90]: self.state = 107 localctx.unitlessLiteral = self.match(PyNestMLParser.UNSIGNED_INTEGER) self.state = 108 @@ -647,7 +647,7 @@ def unitType(self, _p:int=0): self.state = 109 localctx.right = self.unitType(2) pass - elif token in [PyNestMLParser.NAME]: + elif token in [89]: self.state = 110 localctx.unit = self.match(PyNestMLParser.NAME) pass @@ -677,11 +677,11 @@ def unitType(self, _p:int=0): self.state = 116 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.STAR]: + if token in [77]: self.state = 114 localctx.timesOp = self.match(PyNestMLParser.STAR) pass - elif token in [PyNestMLParser.FORWARD_SLASH]: + elif token in [79]: self.state = 115 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass @@ -758,10 +758,10 @@ def unitTypeExponent(self): self.state = 128 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.PLUS or _la==PyNestMLParser.MINUS: + if _la==51 or _la==75: self.state = 127 _la = self._input.LA(1) - if not(_la==PyNestMLParser.PLUS or _la==PyNestMLParser.MINUS): + if not(_la==51 or _la==75): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -884,7 +884,7 @@ def expression(self, _p:int=0): self.state = 143 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.LEFT_PAREN]: + if token in [49]: self.state = 133 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) self.state = 134 @@ -892,19 +892,19 @@ def expression(self, _p:int=0): self.state = 135 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass - elif token in [PyNestMLParser.PLUS, PyNestMLParser.TILDE, PyNestMLParser.MINUS]: + elif token in [51, 52, 75]: self.state = 137 self.unaryOperator() self.state = 138 localctx.term = self.expression(9) pass - elif token in [PyNestMLParser.NOT_KEYWORD]: + elif token in [28]: self.state = 140 localctx.logicalNot = self.match(PyNestMLParser.NOT_KEYWORD) self.state = 141 localctx.term = self.expression(4) pass - elif token in [PyNestMLParser.INF_KEYWORD, PyNestMLParser.BOOLEAN_LITERAL, PyNestMLParser.STRING_LITERAL, PyNestMLParser.NAME, PyNestMLParser.UNSIGNED_INTEGER, PyNestMLParser.FLOAT]: + elif token in [25, 87, 88, 89, 90, 91]: self.state = 142 self.simpleExpression() pass @@ -948,15 +948,15 @@ def expression(self, _p:int=0): self.state = 152 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.STAR]: + if token in [77]: self.state = 149 localctx.timesOp = self.match(PyNestMLParser.STAR) pass - elif token in [PyNestMLParser.FORWARD_SLASH]: + elif token in [79]: self.state = 150 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass - elif token in [PyNestMLParser.PERCENT]: + elif token in [80]: self.state = 151 localctx.moduloOp = self.match(PyNestMLParser.PERCENT) pass @@ -978,11 +978,11 @@ def expression(self, _p:int=0): self.state = 158 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.PLUS]: + if token in [51]: self.state = 156 localctx.plusOp = self.match(PyNestMLParser.PLUS) pass - elif token in [PyNestMLParser.MINUS]: + elif token in [75]: self.state = 157 localctx.minusOp = self.match(PyNestMLParser.MINUS) pass @@ -1136,7 +1136,7 @@ def simpleExpression(self): self.enterOuterAlt(localctx, 3) self.state = 186 _la = self._input.LA(1) - if not(_la==PyNestMLParser.UNSIGNED_INTEGER or _la==PyNestMLParser.FLOAT): + if not(_la==90 or _la==91): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1219,15 +1219,15 @@ def unaryOperator(self): self.state = 198 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.PLUS]: + if token in [51]: self.state = 195 localctx.unaryPlus = self.match(PyNestMLParser.PLUS) pass - elif token in [PyNestMLParser.MINUS]: + elif token in [75]: self.state = 196 localctx.unaryMinus = self.match(PyNestMLParser.MINUS) pass - elif token in [PyNestMLParser.TILDE]: + elif token in [52]: self.state = 197 localctx.unaryTilde = self.match(PyNestMLParser.TILDE) pass @@ -1291,23 +1291,23 @@ def bitOperator(self): self.state = 205 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.AMPERSAND]: + if token in [55]: self.state = 200 localctx.bitAnd = self.match(PyNestMLParser.AMPERSAND) pass - elif token in [PyNestMLParser.CARET]: + elif token in [54]: self.state = 201 localctx.bitXor = self.match(PyNestMLParser.CARET) pass - elif token in [PyNestMLParser.PIPE]: + elif token in [53]: self.state = 202 localctx.bitOr = self.match(PyNestMLParser.PIPE) pass - elif token in [PyNestMLParser.LEFT_LEFT_ANGLE]: + elif token in [61]: self.state = 203 localctx.bitShiftLeft = self.match(PyNestMLParser.LEFT_LEFT_ANGLE) pass - elif token in [PyNestMLParser.RIGHT_RIGHT_ANGLE]: + elif token in [62]: self.state = 204 localctx.bitShiftRight = self.match(PyNestMLParser.RIGHT_RIGHT_ANGLE) pass @@ -1379,31 +1379,31 @@ def comparisonOperator(self): self.state = 214 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.LEFT_ANGLE]: + if token in [63]: self.state = 207 localctx.lt = self.match(PyNestMLParser.LEFT_ANGLE) pass - elif token in [PyNestMLParser.LEFT_ANGLE_EQUALS]: + elif token in [65]: self.state = 208 localctx.le = self.match(PyNestMLParser.LEFT_ANGLE_EQUALS) pass - elif token in [PyNestMLParser.EQUALS_EQUALS]: + elif token in [70]: self.state = 209 localctx.eq = self.match(PyNestMLParser.EQUALS_EQUALS) pass - elif token in [PyNestMLParser.EXCLAMATION_EQUALS]: + elif token in [71]: self.state = 210 localctx.ne = self.match(PyNestMLParser.EXCLAMATION_EQUALS) pass - elif token in [PyNestMLParser.LEFT_ANGLE_RIGHT_ANGLE]: + elif token in [72]: self.state = 211 localctx.ne2 = self.match(PyNestMLParser.LEFT_ANGLE_RIGHT_ANGLE) pass - elif token in [PyNestMLParser.RIGHT_ANGLE_EQUALS]: + elif token in [73]: self.state = 212 localctx.ge = self.match(PyNestMLParser.RIGHT_ANGLE_EQUALS) pass - elif token in [PyNestMLParser.RIGHT_ANGLE]: + elif token in [64]: self.state = 213 localctx.gt = self.match(PyNestMLParser.RIGHT_ANGLE) pass @@ -1455,11 +1455,11 @@ def logicalOperator(self): self.state = 218 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.AND_KEYWORD]: + if token in [26]: self.state = 216 localctx.logicalAnd = self.match(PyNestMLParser.AND_KEYWORD) pass - elif token in [PyNestMLParser.OR_KEYWORD]: + elif token in [27]: self.state = 217 localctx.logicalOr = self.match(PyNestMLParser.OR_KEYWORD) pass @@ -1629,13 +1629,13 @@ def functionCall(self): self.state = 247 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INF_KEYWORD) | (1 << PyNestMLParser.NOT_KEYWORD) | (1 << PyNestMLParser.LEFT_PAREN) | (1 << PyNestMLParser.PLUS) | (1 << PyNestMLParser.TILDE))) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & ((1 << (PyNestMLParser.MINUS - 75)) | (1 << (PyNestMLParser.BOOLEAN_LITERAL - 75)) | (1 << (PyNestMLParser.STRING_LITERAL - 75)) | (1 << (PyNestMLParser.NAME - 75)) | (1 << (PyNestMLParser.UNSIGNED_INTEGER - 75)) | (1 << (PyNestMLParser.FLOAT - 75)))) != 0): + if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 126977) != 0): self.state = 239 self.expression(0) self.state = 244 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PyNestMLParser.COMMA: + while _la==74: self.state = 240 self.match(PyNestMLParser.COMMA) self.state = 241 @@ -1722,7 +1722,7 @@ def inlineExpression(self): self.state = 252 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.RECORDABLE_KEYWORD: + if _la==29: self.state = 251 localctx.recordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) @@ -1740,7 +1740,7 @@ def inlineExpression(self): self.state = 260 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.SEMICOLON: + if _la==84: self.state = 259 self.match(PyNestMLParser.SEMICOLON) @@ -1748,7 +1748,7 @@ def inlineExpression(self): self.state = 265 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.DECORATOR_HOMOGENEOUS) | (1 << PyNestMLParser.DECORATOR_HETEROGENEOUS) | (1 << PyNestMLParser.AT))) != 0): + while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): self.state = 262 localctx.decorator = self.anyDecorator() self.state = 267 @@ -1828,7 +1828,7 @@ def odeEquation(self): self.state = 274 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.SEMICOLON: + if _la==84: self.state = 273 self.match(PyNestMLParser.SEMICOLON) @@ -1836,7 +1836,7 @@ def odeEquation(self): self.state = 279 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.DECORATOR_HOMOGENEOUS) | (1 << PyNestMLParser.DECORATOR_HETEROGENEOUS) | (1 << PyNestMLParser.AT))) != 0): + while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): self.state = 276 localctx.decorator = self.anyDecorator() self.state = 281 @@ -1926,7 +1926,7 @@ def kernel(self): self.state = 295 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PyNestMLParser.KERNEL_JOINING: + while _la==4: self.state = 288 self.match(PyNestMLParser.KERNEL_JOINING) self.state = 289 @@ -1942,7 +1942,7 @@ def kernel(self): self.state = 299 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.SEMICOLON: + if _la==84: self.state = 298 self.match(PyNestMLParser.SEMICOLON) @@ -2013,7 +2013,7 @@ def block(self): self.state = 308 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INLINE_KEYWORD) | (1 << PyNestMLParser.RETURN_KEYWORD) | (1 << PyNestMLParser.IF_KEYWORD) | (1 << PyNestMLParser.FOR_KEYWORD) | (1 << PyNestMLParser.WHILE_KEYWORD) | (1 << PyNestMLParser.RECORDABLE_KEYWORD))) != 0) or _la==PyNestMLParser.NAME): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 543621120) != 0) or _la==89): break self.state = 310 @@ -2062,12 +2062,12 @@ def stmt(self): self.state = 314 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.INLINE_KEYWORD, PyNestMLParser.RETURN_KEYWORD, PyNestMLParser.RECORDABLE_KEYWORD, PyNestMLParser.NAME]: + if token in [16, 17, 29, 89]: self.enterOuterAlt(localctx, 1) self.state = 312 self.smallStmt() pass - elif token in [PyNestMLParser.IF_KEYWORD, PyNestMLParser.FOR_KEYWORD, PyNestMLParser.WHILE_KEYWORD]: + elif token in [18, 21, 22]: self.enterOuterAlt(localctx, 2) self.state = 313 self.compoundStmt() @@ -2123,17 +2123,17 @@ def compoundStmt(self): self.state = 319 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.IF_KEYWORD]: + if token in [18]: self.enterOuterAlt(localctx, 1) self.state = 316 self.ifStmt() pass - elif token in [PyNestMLParser.FOR_KEYWORD]: + elif token in [21]: self.enterOuterAlt(localctx, 2) self.state = 317 self.forStmt() pass - elif token in [PyNestMLParser.WHILE_KEYWORD]: + elif token in [22]: self.enterOuterAlt(localctx, 3) self.state = 318 self.whileStmt() @@ -2288,23 +2288,23 @@ def assignment(self): self.state = 335 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.EQUALS]: + if token in [76]: self.state = 330 localctx.directAssignment = self.match(PyNestMLParser.EQUALS) pass - elif token in [PyNestMLParser.PLUS_EQUALS]: + elif token in [66]: self.state = 331 localctx.compoundSum = self.match(PyNestMLParser.PLUS_EQUALS) pass - elif token in [PyNestMLParser.MINUS_EQUALS]: + elif token in [67]: self.state = 332 localctx.compoundMinus = self.match(PyNestMLParser.MINUS_EQUALS) pass - elif token in [PyNestMLParser.STAR_EQUALS]: + elif token in [68]: self.state = 333 localctx.compoundProduct = self.match(PyNestMLParser.STAR_EQUALS) pass - elif token in [PyNestMLParser.FORWARD_SLASH_EQUALS]: + elif token in [69]: self.state = 334 localctx.compoundQuotient = self.match(PyNestMLParser.FORWARD_SLASH_EQUALS) pass @@ -2402,7 +2402,7 @@ def declaration(self): self.state = 340 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.RECORDABLE_KEYWORD: + if _la==29: self.state = 339 localctx.isRecordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) @@ -2410,7 +2410,7 @@ def declaration(self): self.state = 343 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.INLINE_KEYWORD: + if _la==16: self.state = 342 localctx.isInlineExpression = self.match(PyNestMLParser.INLINE_KEYWORD) @@ -2420,7 +2420,7 @@ def declaration(self): self.state = 350 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PyNestMLParser.COMMA: + while _la==74: self.state = 346 self.match(PyNestMLParser.COMMA) self.state = 347 @@ -2434,7 +2434,7 @@ def declaration(self): self.state = 356 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.EQUALS: + if _la==76: self.state = 354 self.match(PyNestMLParser.EQUALS) self.state = 355 @@ -2444,7 +2444,7 @@ def declaration(self): self.state = 362 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.LEFT_LEFT_SQUARE: + if _la==59: self.state = 358 self.match(PyNestMLParser.LEFT_LEFT_SQUARE) self.state = 359 @@ -2456,7 +2456,7 @@ def declaration(self): self.state = 367 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.DECORATOR_HOMOGENEOUS) | (1 << PyNestMLParser.DECORATOR_HETEROGENEOUS) | (1 << PyNestMLParser.AT))) != 0): + while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): self.state = 364 localctx.decorator = self.anyDecorator() self.state = 369 @@ -2564,17 +2564,17 @@ def anyDecorator(self): self.state = 380 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.DECORATOR_HOMOGENEOUS]: + if token in [45]: self.enterOuterAlt(localctx, 1) self.state = 373 self.match(PyNestMLParser.DECORATOR_HOMOGENEOUS) pass - elif token in [PyNestMLParser.DECORATOR_HETEROGENEOUS]: + elif token in [46]: self.enterOuterAlt(localctx, 2) self.state = 374 self.match(PyNestMLParser.DECORATOR_HETEROGENEOUS) pass - elif token in [PyNestMLParser.AT]: + elif token in [47]: self.enterOuterAlt(localctx, 3) self.state = 375 self.match(PyNestMLParser.AT) @@ -2715,7 +2715,7 @@ def returnStmt(self): self.state = 388 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INF_KEYWORD) | (1 << PyNestMLParser.NOT_KEYWORD) | (1 << PyNestMLParser.LEFT_PAREN) | (1 << PyNestMLParser.PLUS) | (1 << PyNestMLParser.TILDE))) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & ((1 << (PyNestMLParser.MINUS - 75)) | (1 << (PyNestMLParser.BOOLEAN_LITERAL - 75)) | (1 << (PyNestMLParser.STRING_LITERAL - 75)) | (1 << (PyNestMLParser.NAME - 75)) | (1 << (PyNestMLParser.UNSIGNED_INTEGER - 75)) | (1 << (PyNestMLParser.FLOAT - 75)))) != 0): + if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 126977) != 0): self.state = 387 self.expression(0) @@ -2775,7 +2775,7 @@ def ifStmt(self): self.state = 394 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PyNestMLParser.ELIF_KEYWORD: + while _la==19: self.state = 391 self.elifClause() self.state = 396 @@ -2785,7 +2785,7 @@ def ifStmt(self): self.state = 398 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.ELSE_KEYWORD: + if _la==20: self.state = 397 self.elseClause() @@ -3047,14 +3047,14 @@ def forStmt(self): self.state = 422 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.MINUS: + if _la==75: self.state = 421 localctx.negative = self.match(PyNestMLParser.MINUS) self.state = 424 _la = self._input.LA(1) - if not(_la==PyNestMLParser.UNSIGNED_INTEGER or _la==PyNestMLParser.FLOAT): + if not(_la==90 or _la==91): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -3177,11 +3177,11 @@ def nestMLCompilationUnit(self): self.state = 435 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.MODEL_KEYWORD]: + if token in [31]: self.state = 433 self.model() pass - elif token in [PyNestMLParser.NEWLINE]: + elif token in [9]: self.state = 434 self.match(PyNestMLParser.NEWLINE) pass @@ -3191,7 +3191,7 @@ def nestMLCompilationUnit(self): self.state = 437 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==PyNestMLParser.NEWLINE or _la==PyNestMLParser.MODEL_KEYWORD): + if not (_la==9 or _la==31): break self.state = 439 @@ -3362,35 +3362,35 @@ def modelBody(self): self.state = 456 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.STATE_KEYWORD, PyNestMLParser.PARAMETERS_KEYWORD, PyNestMLParser.INTERNALS_KEYWORD]: + if token in [32, 33, 34]: self.state = 448 self.blockWithVariables() pass - elif token in [PyNestMLParser.EQUATIONS_KEYWORD]: + elif token in [36]: self.state = 449 self.equationsBlock() pass - elif token in [PyNestMLParser.INPUT_KEYWORD]: + elif token in [37]: self.state = 450 self.inputBlock() pass - elif token in [PyNestMLParser.OUTPUT_KEYWORD]: + elif token in [38]: self.state = 451 self.outputBlock() pass - elif token in [PyNestMLParser.FUNCTION_KEYWORD]: + elif token in [15]: self.state = 452 self.function() pass - elif token in [PyNestMLParser.ON_RECEIVE_KEYWORD]: + elif token in [40]: self.state = 453 self.onReceiveBlock() pass - elif token in [PyNestMLParser.ON_CONDITION_KEYWORD]: + elif token in [41]: self.state = 454 self.onConditionBlock() pass - elif token in [PyNestMLParser.UPDATE_KEYWORD]: + elif token in [35]: self.state = 455 self.updateBlock() pass @@ -3400,7 +3400,7 @@ def modelBody(self): self.state = 458 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.FUNCTION_KEYWORD) | (1 << PyNestMLParser.STATE_KEYWORD) | (1 << PyNestMLParser.PARAMETERS_KEYWORD) | (1 << PyNestMLParser.INTERNALS_KEYWORD) | (1 << PyNestMLParser.UPDATE_KEYWORD) | (1 << PyNestMLParser.EQUATIONS_KEYWORD) | (1 << PyNestMLParser.INPUT_KEYWORD) | (1 << PyNestMLParser.OUTPUT_KEYWORD) | (1 << PyNestMLParser.ON_RECEIVE_KEYWORD) | (1 << PyNestMLParser.ON_CONDITION_KEYWORD))) != 0)): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 3843995762688) != 0)): break self.state = 460 @@ -3482,7 +3482,7 @@ def onReceiveBlock(self): self.state = 469 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PyNestMLParser.COMMA: + while _la==74: self.state = 465 self.match(PyNestMLParser.COMMA) self.state = 466 @@ -3575,7 +3575,7 @@ def onConditionBlock(self): self.state = 483 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PyNestMLParser.COMMA: + while _la==74: self.state = 479 self.match(PyNestMLParser.COMMA) self.state = 480 @@ -3657,7 +3657,7 @@ def blockWithVariables(self): self.state = 490 localctx.blockType = self._input.LT(1) _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.STATE_KEYWORD) | (1 << PyNestMLParser.PARAMETERS_KEYWORD) | (1 << PyNestMLParser.INTERNALS_KEYWORD))) != 0)): + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 30064771072) != 0)): localctx.blockType = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -3677,7 +3677,7 @@ def blockWithVariables(self): self.state = 497 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==PyNestMLParser.INLINE_KEYWORD or _la==PyNestMLParser.RECORDABLE_KEYWORD or _la==PyNestMLParser.NAME): + if not (_la==16 or _la==29 or _la==89): break self.state = 499 @@ -3818,15 +3818,15 @@ def equationsBlock(self): self.state = 512 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.INLINE_KEYWORD, PyNestMLParser.RECORDABLE_KEYWORD]: + if token in [16, 29]: self.state = 509 self.inlineExpression() pass - elif token in [PyNestMLParser.NAME]: + elif token in [89]: self.state = 510 self.odeEquation() pass - elif token in [PyNestMLParser.KERNEL_KEYWORD]: + elif token in [30]: self.state = 511 self.kernel() pass @@ -3836,7 +3836,7 @@ def equationsBlock(self): self.state = 514 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INLINE_KEYWORD) | (1 << PyNestMLParser.RECORDABLE_KEYWORD) | (1 << PyNestMLParser.KERNEL_KEYWORD))) != 0) or _la==PyNestMLParser.NAME): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==89): break self.state = 516 @@ -3959,19 +3959,19 @@ def inputBlock(self): self.state = 538 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.LEFT_PAREN: + if _la==49: self.state = 526 self.match(PyNestMLParser.LEFT_PAREN) self.state = 535 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.NAME: + if _la==89: self.state = 527 self.parameter() self.state = 532 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PyNestMLParser.COMMA: + while _la==74: self.state = 528 self.match(PyNestMLParser.COMMA) self.state = 529 @@ -3989,7 +3989,7 @@ def inputBlock(self): self.state = 542 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==PyNestMLParser.NAME): + if not (_la==89): break self.state = 544 @@ -4065,7 +4065,7 @@ def spikeInputPort(self): self.state = 551 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.LEFT_SQUARE_BRACKET: + if _la==56: self.state = 547 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) self.state = 548 @@ -4079,7 +4079,7 @@ def spikeInputPort(self): self.state = 557 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PyNestMLParser.INHIBITORY_KEYWORD or _la==PyNestMLParser.EXCITATORY_KEYWORD: + while _la==43 or _la==44: self.state = 554 self.inputQualifier() self.state = 559 @@ -4158,7 +4158,7 @@ def continuousInputPort(self): self.state = 568 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.LEFT_SQUARE_BRACKET: + if _la==56: self.state = 564 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) self.state = 565 @@ -4220,11 +4220,11 @@ def inputQualifier(self): self.state = 577 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.INHIBITORY_KEYWORD]: + if token in [43]: self.state = 575 localctx.isInhibitory = self.match(PyNestMLParser.INHIBITORY_KEYWORD) pass - elif token in [PyNestMLParser.EXCITATORY_KEYWORD]: + elif token in [44]: self.state = 576 localctx.isExcitatory = self.match(PyNestMLParser.EXCITATORY_KEYWORD) pass @@ -4323,11 +4323,11 @@ def outputBlock(self): self.state = 585 self._errHandler.sync(self) token = self._input.LA(1) - if token in [PyNestMLParser.SPIKE_KEYWORD]: + if token in [42]: self.state = 583 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) pass - elif token in [PyNestMLParser.CONTINUOUS_KEYWORD]: + elif token in [39]: self.state = 584 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass @@ -4337,19 +4337,19 @@ def outputBlock(self): self.state = 599 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.LEFT_PAREN: + if _la==49: self.state = 587 self.match(PyNestMLParser.LEFT_PAREN) self.state = 596 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.NAME: + if _la==89: self.state = 588 localctx.attribute = self.parameter() self.state = 593 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PyNestMLParser.COMMA: + while _la==74: self.state = 589 self.match(PyNestMLParser.COMMA) self.state = 590 @@ -4449,13 +4449,13 @@ def function(self): self.state = 615 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PyNestMLParser.NAME: + if _la==89: self.state = 607 self.parameter() self.state = 612 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PyNestMLParser.COMMA: + while _la==74: self.state = 608 self.match(PyNestMLParser.COMMA) self.state = 609 @@ -4471,7 +4471,7 @@ def function(self): self.state = 619 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PyNestMLParser.INTEGER_KEYWORD) | (1 << PyNestMLParser.REAL_KEYWORD) | (1 << PyNestMLParser.STRING_KEYWORD) | (1 << PyNestMLParser.BOOLEAN_KEYWORD) | (1 << PyNestMLParser.VOID_KEYWORD) | (1 << PyNestMLParser.LEFT_PAREN))) != 0) or _la==PyNestMLParser.NAME or _la==PyNestMLParser.UNSIGNED_INTEGER: + if (((_la) & ~0x3f) == 0 and ((1 << _la) & 562949953453056) != 0) or _la==89 or _la==90: self.state = 618 localctx.returnType = self.dataType() @@ -4590,7 +4590,7 @@ def constParameter(self): self.state = 629 localctx.value = self._input.LT(1) _la = self._input.LA(1) - if not(_la==PyNestMLParser.INF_KEYWORD or ((((_la - 87)) & ~0x3f) == 0 and ((1 << (_la - 87)) & ((1 << (PyNestMLParser.BOOLEAN_LITERAL - 87)) | (1 << (PyNestMLParser.STRING_LITERAL - 87)) | (1 << (PyNestMLParser.UNSIGNED_INTEGER - 87)) | (1 << (PyNestMLParser.FLOAT - 87)))) != 0)): + if not(_la==25 or ((((_la - 87)) & ~0x3f) == 0 and ((1 << (_la - 87)) & 27) != 0)): localctx.value = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) diff --git a/pynestml/generated/PyNestMLParserVisitor.py b/pynestml/generated/PyNestMLParserVisitor.py index b66a05306..dc4499256 100644 --- a/pynestml/generated/PyNestMLParserVisitor.py +++ b/pynestml/generated/PyNestMLParserVisitor.py @@ -1,6 +1,6 @@ -# Generated from PyNestMLParser.g4 by ANTLR 4.10.1 +# Generated from PyNestMLParser.g4 by ANTLR 4.13.2 from antlr4 import * -if __name__ is not None and "." in __name__: +if "." in __name__: from .PyNestMLParser import PyNestMLParser else: from PyNestMLParser import PyNestMLParser From ae9ffa76abfccd020a919034a9061bc3e6087e72 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sun, 20 Oct 2024 01:12:47 +0200 Subject: [PATCH 03/68] add explicit output parameters to spiking output port --- .../nestml_language_concepts.rst | 22 ++++++++ doc/running/running_nest.rst | 8 +++ .../co_co_no_attributes_on_continuous_port.py | 56 +++++++++++++++++++ .../co_co_output_port_defined_if_emit_call.py | 9 ++- pynestml/cocos/co_cos_manager.py | 10 ++++ pynestml/utils/messages.py | 6 ++ .../CoCoOutputPortTypeContinuous.nestml | 34 +++++++++++ .../delay_test_assigned_delay2_synapse.nestml | 2 +- .../delay_test_assigned_delay_synapse.nestml | 2 +- .../delay_test_assigned_synapse.nestml | 2 +- .../delay_test_plastic_synapse.nestml | 2 +- .../resources/delay_test_synapse.nestml | 2 +- .../dopa_second_order_synapse.nestml | 2 +- .../homogeneous_parameters_synapse.nestml | 2 +- .../random_functions_illegal_synapse.nestml | 2 +- .../test_plasticity_dynamics_synapse.nestml | 2 +- .../weight_test_assigned_synapse.nestml | 2 +- .../weight_test_plastic_synapse.nestml | 2 +- tests/test_cocos.py | 6 ++ 19 files changed, 157 insertions(+), 16 deletions(-) create mode 100644 pynestml/cocos/co_co_no_attributes_on_continuous_port.py create mode 100644 tests/invalid/CoCoOutputPortTypeContinuous.nestml diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index a76e08bc8..826842136 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -907,6 +907,28 @@ Each model can only send a single type of event. The type of the event has to be Calling the ``emit_spike()`` function in the ``update`` block results in firing a spike to all target neurons and devices time stamped with the simulation time at the end of the time interval ``t + timestep()``. +Event attributes +~~~~~~~~~~~~~~~~ + +Each spiking output event can be parameterised by one or more attributes. For example, a synapse could assign a weight (as a real number) and delay (in milliseconds) to its spike events by including these values in the call to ``emit_spike()``: + +.. code-block:: nestml + + parameters: + weight real = 10. + + update: + emit_spike(w, 1 ms) + +If spike event attributes are used, their names and types must be given as part the output port specification. The names are only used externally, for other models making reference to this one (it is allowed to have a state variable called ``weight`` and an output port attribute by the same name). + +.. code-block:: nestml + + output: + spike(weight real, delay ms) + +Specific code generators may support a specific set of attributes; please check the documentation of each individual code generator for more details. + Equations --------- diff --git a/doc/running/running_nest.rst b/doc/running/running_nest.rst index bb40a63aa..f6d53dfc9 100644 --- a/doc/running/running_nest.rst +++ b/doc/running/running_nest.rst @@ -182,6 +182,14 @@ For a full example, please see `iaf_psc_exp_multisynapse_vectors.nestml . + +from typing import Optional + +from pynestml.cocos.co_co import CoCo +from pynestml.meta_model.ast_function_call import ASTFunctionCall +from pynestml.meta_model.ast_model import ASTModel +from pynestml.symbols.predefined_functions import PredefinedFunctions +from pynestml.utils.ast_utils import ASTUtils +from pynestml.utils.logger import Logger, LoggingLevel +from pynestml.utils.messages import Messages +from pynestml.visitors.ast_visitor import ASTVisitor + + +class CoCoNoAttributesOnContinuousPort(CoCo): + """ + This context condition checker ensures that no attributes are defined on continuous time output ports. + """ + + @classmethod + def check_co_co(cls, neuron: ASTModel): + """ + Checks the coco for the handed over neuron. + :param neuron: a single neuron instance. + """ + output_blocks = neuron.get_output_blocks() + if not len(output_blocks) == 1: + # too few or too many output blocks; this will be checked elsewhere + return + + output_block = output_blocks[0] + + if output_block.is_continuous() and output_block.get_attributes(): + code, message = Messages.get_continuous_output_port_cannot_have_attributes() + Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR, + error_position=output_block.get_source_position()) + return diff --git a/pynestml/cocos/co_co_output_port_defined_if_emit_call.py b/pynestml/cocos/co_co_output_port_defined_if_emit_call.py index 24bdd13af..e3c1cd10b 100644 --- a/pynestml/cocos/co_co_output_port_defined_if_emit_call.py +++ b/pynestml/cocos/co_co_output_port_defined_if_emit_call.py @@ -83,18 +83,17 @@ def visit_function_call(self, node: ASTFunctionCall): if not output_blocks[0].is_spike(): code, message = Messages.get_emit_spike_function_but_no_output_port() Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR, - error_position=node.get_source_position()) + error_position=output_blocks[0].get_source_position()) return # check types if len(node.get_args()) != len(output_blocks[0].get_attributes()): code, message = Messages.get_output_port_type_differs() Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR, - error_position=node.get_source_position()) + error_position=output_blocks[0].get_source_position()) return for emit_spike_arg, output_block_attr in zip(node.get_args(), output_blocks[0].get_attributes()): - emit_spike_arg_type_sym = emit_spike_arg.type output_block_attr_type_sym = output_block_attr.get_data_type().get_type_symbol() @@ -105,12 +104,12 @@ def visit_function_call(self, node: ASTFunctionCall): # types are not equal, but castable code, message = Messages.get_implicit_cast_rhs_to_lhs(output_block_attr_type_sym.print_symbol(), emit_spike_arg_type_sym.print_symbol()) - Logger.log_message(error_position=node.get_source_position(), + Logger.log_message(error_position=output_blocks[0].get_source_position(), code=code, message=message, log_level=LoggingLevel.WARNING) continue else: # types are not equal and not castable code, message = Messages.get_output_port_type_differs() Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR, - error_position=node.get_source_position()) + error_position=output_blocks[0].get_source_position()) return diff --git a/pynestml/cocos/co_cos_manager.py b/pynestml/cocos/co_cos_manager.py index 6858151f0..b7a05d484 100644 --- a/pynestml/cocos/co_cos_manager.py +++ b/pynestml/cocos/co_cos_manager.py @@ -49,6 +49,7 @@ from pynestml.cocos.co_co_kernel_type import CoCoKernelType from pynestml.cocos.co_co_model_name_unique import CoCoModelNameUnique from pynestml.cocos.co_co_nest_random_functions_legally_used import CoCoNestRandomFunctionsLegallyUsed +from pynestml.cocos.co_co_no_attributes_on_continuous_port import CoCoNoAttributesOnContinuousPort from pynestml.cocos.co_co_no_kernels_except_in_convolve import CoCoNoKernelsExceptInConvolve from pynestml.cocos.co_co_no_nest_name_space_collision import CoCoNoNestNameSpaceCollision from pynestml.cocos.co_co_no_duplicate_compilation_unit_names import CoCoNoDuplicateCompilationUnitNames @@ -427,6 +428,14 @@ def check_co_co_nest_random_functions_legally_used(cls, model: ASTModel): """ CoCoNestRandomFunctionsLegallyUsed.check_co_co(model) + @classmethod + def check_co_co_no_attributes_on_continuous_port(cls, model: ASTModel): + """ + Checks that no attributes are defined on continuous time output ports. + :param model: a single model object. + """ + CoCoNoAttributesOnContinuousPort.check_co_co(model) + @classmethod def check_cocos(cls, model: ASTModel, after_ast_rewrite: bool = False): """ @@ -482,5 +491,6 @@ def check_cocos(cls, model: ASTModel, after_ast_rewrite: bool = False): cls.check_resolution_func_legally_used(model) cls.check_input_port_size_type(model) cls.check_timestep_func_legally_used(model) + cls.check_co_co_no_attributes_on_continuous_port(model) Logger.set_current_node(None) diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index 22f9242cd..c26f6d657 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -140,6 +140,7 @@ class MessageCode(Enum): RANDOM_FUNCTIONS_LEGALLY_USED = 113 EXPONENT_MUST_BE_INTEGER = 114 EMIT_SPIKE_OUTPUT_PORT_TYPE_DIFFERS = 115 + CONTINUOUS_OUTPUT_PORT_MAY_NOT_HAVE_ATTRIBUTES = 116 class Messages: @@ -1395,3 +1396,8 @@ def get_non_constant_exponent(cls) -> Tuple[MessageCode, str]: def get_random_functions_legally_used(cls, name): message = "The function '" + name + "' can only be used in the update, onReceive, or onCondition blocks." return MessageCode.RANDOM_FUNCTIONS_LEGALLY_USED, message + + @classmethod + def get_continuous_output_port_cannot_have_attributes(cls): + message = "continuous time output port may not have attributes." + return MessageCode.CONTINUOUS_OUTPUT_PORT_MAY_NOT_HAVE_ATTRIBUTES, message diff --git a/tests/invalid/CoCoOutputPortTypeContinuous.nestml b/tests/invalid/CoCoOutputPortTypeContinuous.nestml new file mode 100644 index 000000000..d5486a730 --- /dev/null +++ b/tests/invalid/CoCoOutputPortTypeContinuous.nestml @@ -0,0 +1,34 @@ +""" +CoCoOutputPortTypeContinuous.nestml +################################### + + +Description ++++++++++++ + +This model is used to test if broken CoCos are identified correctly. Here, test that an error is raised when a continous-time output port is defined as having attributes. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model output_port_types_neuron: + output: + continuous(foo real) diff --git a/tests/nest_tests/resources/delay_test_assigned_delay2_synapse.nestml b/tests/nest_tests/resources/delay_test_assigned_delay2_synapse.nestml index bb382c0dc..6373a19a9 100644 --- a/tests/nest_tests/resources/delay_test_assigned_delay2_synapse.nestml +++ b/tests/nest_tests/resources/delay_test_assigned_delay2_synapse.nestml @@ -11,7 +11,7 @@ model delay_test_assigned_delay2_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(w, d) diff --git a/tests/nest_tests/resources/delay_test_assigned_delay_synapse.nestml b/tests/nest_tests/resources/delay_test_assigned_delay_synapse.nestml index 23d1d4929..3c7866319 100644 --- a/tests/nest_tests/resources/delay_test_assigned_delay_synapse.nestml +++ b/tests/nest_tests/resources/delay_test_assigned_delay_synapse.nestml @@ -11,7 +11,7 @@ model delay_test_assigned_delay_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): d = 2 ms # not allowed! diff --git a/tests/nest_tests/resources/delay_test_assigned_synapse.nestml b/tests/nest_tests/resources/delay_test_assigned_synapse.nestml index 3774d79e9..04e451613 100644 --- a/tests/nest_tests/resources/delay_test_assigned_synapse.nestml +++ b/tests/nest_tests/resources/delay_test_assigned_synapse.nestml @@ -11,7 +11,7 @@ model delay_test_assigned_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): w = 2 diff --git a/tests/nest_tests/resources/delay_test_plastic_synapse.nestml b/tests/nest_tests/resources/delay_test_plastic_synapse.nestml index b639827cc..f7ba1ff2d 100644 --- a/tests/nest_tests/resources/delay_test_plastic_synapse.nestml +++ b/tests/nest_tests/resources/delay_test_plastic_synapse.nestml @@ -12,7 +12,7 @@ model delay_test_plastic_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(w, d) diff --git a/tests/nest_tests/resources/delay_test_synapse.nestml b/tests/nest_tests/resources/delay_test_synapse.nestml index 3663d76ab..a403c650b 100644 --- a/tests/nest_tests/resources/delay_test_synapse.nestml +++ b/tests/nest_tests/resources/delay_test_synapse.nestml @@ -14,7 +14,7 @@ model delay_test_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(w, d) diff --git a/tests/nest_tests/resources/dopa_second_order_synapse.nestml b/tests/nest_tests/resources/dopa_second_order_synapse.nestml index 662465217..794248f6f 100644 --- a/tests/nest_tests/resources/dopa_second_order_synapse.nestml +++ b/tests/nest_tests/resources/dopa_second_order_synapse.nestml @@ -49,7 +49,7 @@ model dopa_second_order_synapse: dopa_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(dopa_spikes): dopa_rate_d += 1. / tau_dopa diff --git a/tests/nest_tests/resources/homogeneous_parameters_synapse.nestml b/tests/nest_tests/resources/homogeneous_parameters_synapse.nestml index 4b6ead791..e535fdf37 100644 --- a/tests/nest_tests/resources/homogeneous_parameters_synapse.nestml +++ b/tests/nest_tests/resources/homogeneous_parameters_synapse.nestml @@ -18,7 +18,7 @@ model static_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(3.18E-3 * a * b * w, d) diff --git a/tests/nest_tests/resources/random_functions_illegal_synapse.nestml b/tests/nest_tests/resources/random_functions_illegal_synapse.nestml index 473791ec7..7790640e4 100644 --- a/tests/nest_tests/resources/random_functions_illegal_synapse.nestml +++ b/tests/nest_tests/resources/random_functions_illegal_synapse.nestml @@ -50,7 +50,7 @@ model random_functions_illegal_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(post_spikes): post_trace += 1 diff --git a/tests/nest_tests/resources/test_plasticity_dynamics_synapse.nestml b/tests/nest_tests/resources/test_plasticity_dynamics_synapse.nestml index 251dcdd8f..b32786f24 100644 --- a/tests/nest_tests/resources/test_plasticity_dynamics_synapse.nestml +++ b/tests/nest_tests/resources/test_plasticity_dynamics_synapse.nestml @@ -41,7 +41,7 @@ model test_plasticity_dynamics_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(post_spikes): if t_last_pre_spike < t and t <= (t_last_pre_spike + 1*ms): diff --git a/tests/nest_tests/resources/weight_test_assigned_synapse.nestml b/tests/nest_tests/resources/weight_test_assigned_synapse.nestml index 1737c4c57..9271adaf4 100644 --- a/tests/nest_tests/resources/weight_test_assigned_synapse.nestml +++ b/tests/nest_tests/resources/weight_test_assigned_synapse.nestml @@ -11,7 +11,7 @@ model weight_test_assigned_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): w = 2 diff --git a/tests/nest_tests/resources/weight_test_plastic_synapse.nestml b/tests/nest_tests/resources/weight_test_plastic_synapse.nestml index 8860a472c..bededd562 100644 --- a/tests/nest_tests/resources/weight_test_plastic_synapse.nestml +++ b/tests/nest_tests/resources/weight_test_plastic_synapse.nestml @@ -12,7 +12,7 @@ model weight_test_plastic_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(w, d) diff --git a/tests/test_cocos.py b/tests/test_cocos.py index 20b8cd369..676924319 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -341,6 +341,12 @@ def test_valid_output_port_type_if_emit_call(self): assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) > 0 + def test_valid_output_port_type_continuous(self): + """test that an error is raised when a continous-time output port is defined as having attributes.""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeContinuous.nestml')) + assert model is not None + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + def test_valid_coco_kernel_type(self): """ Test the functionality of CoCoKernelType. From 07dcfc670f3624de74a01a45d90809551852c155 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sun, 20 Oct 2024 09:02:17 +0200 Subject: [PATCH 04/68] add explicit output parameters to spiking output port --- tests/resources/synapse_event_priority_test.nestml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/resources/synapse_event_priority_test.nestml b/tests/resources/synapse_event_priority_test.nestml index e8aae17b2..ac11a4657 100644 --- a/tests/resources/synapse_event_priority_test.nestml +++ b/tests/resources/synapse_event_priority_test.nestml @@ -42,7 +42,7 @@ model event_priority_test_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes, priority=1): tr += 1. From 90a4aac34ee5daba8fb1cefda64e0789c2caa573 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sun, 20 Oct 2024 09:07:12 +0200 Subject: [PATCH 05/68] add explicit output parameters to spiking output port --- pynestml/generated/PyNestMLParser.py | 1022 +++++++++++++------------- pynestml/grammars/PyNestMLParser.g4 | 3 +- 2 files changed, 502 insertions(+), 523 deletions(-) diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index 9fb849705..c27a0bce6 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -10,7 +10,7 @@ def serializedATN(): return [ - 4,1,91,632,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,91,628,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, @@ -27,223 +27,221 @@ def serializedATN(): 1,4,3,4,194,8,4,1,5,1,5,1,5,3,5,199,8,5,1,6,1,6,1,6,1,6,1,6,3,6, 206,8,6,1,7,1,7,1,7,1,7,1,7,1,7,1,7,3,7,215,8,7,1,8,1,8,3,8,219, 8,8,1,9,1,9,1,9,1,9,1,9,3,9,226,8,9,1,9,5,9,229,8,9,10,9,12,9,232, - 9,9,1,9,1,9,3,9,236,8,9,1,10,1,10,1,10,1,10,1,10,5,10,243,8,10,10, - 10,12,10,246,9,10,3,10,248,8,10,1,10,1,10,1,11,3,11,253,8,11,1,11, - 1,11,1,11,1,11,1,11,1,11,3,11,261,8,11,1,11,5,11,264,8,11,10,11, - 12,11,267,9,11,1,11,1,11,1,12,1,12,1,12,1,12,3,12,275,8,12,1,12, - 5,12,278,8,12,10,12,12,12,281,9,12,1,12,1,12,1,13,1,13,1,13,1,13, - 1,13,1,13,1,13,1,13,1,13,5,13,294,8,13,10,13,12,13,297,9,13,1,13, - 3,13,300,8,13,1,13,1,13,1,14,1,14,1,14,4,14,307,8,14,11,14,12,14, - 308,1,14,1,14,1,15,1,15,3,15,315,8,15,1,16,1,16,1,16,3,16,320,8, - 16,1,17,1,17,1,17,1,17,3,17,326,8,17,1,17,1,17,1,18,1,18,1,18,1, - 18,1,18,1,18,3,18,336,8,18,1,18,1,18,1,19,3,19,341,8,19,1,19,3,19, - 344,8,19,1,19,1,19,1,19,5,19,349,8,19,10,19,12,19,352,9,19,1,19, - 1,19,1,19,3,19,357,8,19,1,19,1,19,1,19,1,19,3,19,363,8,19,1,19,5, - 19,366,8,19,10,19,12,19,369,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1, - 21,1,21,1,21,1,21,3,21,381,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3, - 24,389,8,24,1,25,1,25,5,25,393,8,25,10,25,12,25,396,9,25,1,25,3, - 25,399,8,25,1,26,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1, - 28,1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,423, - 8,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31, - 436,8,31,11,31,12,31,437,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33, - 1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,4,33,457,8,33,11,33, - 12,33,458,1,33,1,33,1,34,1,34,1,34,1,34,1,34,5,34,468,8,34,10,34, - 12,34,471,9,34,1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35, - 482,8,35,10,35,12,35,485,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36, - 1,36,1,36,4,36,496,8,36,11,36,12,36,497,1,36,1,36,1,37,1,37,1,37, - 1,37,1,38,1,38,1,38,1,38,1,38,1,38,1,38,4,38,513,8,38,11,38,12,38, - 514,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,3,39,525,8,39,1,39,1, - 39,1,39,1,39,5,39,531,8,39,10,39,12,39,534,9,39,3,39,536,8,39,1, - 39,3,39,539,8,39,4,39,541,8,39,11,39,12,39,542,1,39,1,39,1,40,1, - 40,1,40,1,40,1,40,3,40,552,8,40,1,40,1,40,5,40,556,8,40,10,40,12, - 40,559,9,40,1,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,569,8,41, - 1,41,1,41,1,41,1,41,1,41,1,42,1,42,3,42,578,8,42,1,43,1,43,1,43, - 1,43,1,43,1,43,3,43,586,8,43,1,43,1,43,1,43,1,43,5,43,592,8,43,10, - 43,12,43,595,9,43,3,43,597,8,43,1,43,3,43,600,8,43,1,43,1,43,1,43, - 1,44,1,44,1,44,1,44,1,44,1,44,5,44,611,8,44,10,44,12,44,614,9,44, - 3,44,616,8,44,1,44,1,44,3,44,620,8,44,1,44,1,44,1,44,1,45,1,45,1, - 45,1,46,1,46,1,46,1,46,1,46,0,2,2,6,47,0,2,4,6,8,10,12,14,16,18, - 20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62, - 64,66,68,70,72,74,76,78,80,82,84,86,88,90,92,0,4,2,0,51,51,75,75, - 1,0,90,91,1,0,32,34,3,0,25,25,87,88,90,91,694,0,100,1,0,0,0,2,111, - 1,0,0,0,4,128,1,0,0,0,6,143,1,0,0,0,8,193,1,0,0,0,10,198,1,0,0,0, - 12,205,1,0,0,0,14,214,1,0,0,0,16,218,1,0,0,0,18,220,1,0,0,0,20,237, - 1,0,0,0,22,252,1,0,0,0,24,270,1,0,0,0,26,284,1,0,0,0,28,303,1,0, - 0,0,30,314,1,0,0,0,32,319,1,0,0,0,34,325,1,0,0,0,36,329,1,0,0,0, - 38,340,1,0,0,0,40,370,1,0,0,0,42,380,1,0,0,0,44,382,1,0,0,0,46,384, - 1,0,0,0,48,386,1,0,0,0,50,390,1,0,0,0,52,400,1,0,0,0,54,405,1,0, - 0,0,56,410,1,0,0,0,58,414,1,0,0,0,60,428,1,0,0,0,62,435,1,0,0,0, - 64,441,1,0,0,0,66,445,1,0,0,0,68,462,1,0,0,0,70,476,1,0,0,0,72,490, - 1,0,0,0,74,501,1,0,0,0,76,505,1,0,0,0,78,518,1,0,0,0,80,546,1,0, - 0,0,82,563,1,0,0,0,84,577,1,0,0,0,86,579,1,0,0,0,88,604,1,0,0,0, - 90,624,1,0,0,0,92,627,1,0,0,0,94,101,5,10,0,0,95,101,5,11,0,0,96, - 101,5,12,0,0,97,101,5,13,0,0,98,101,5,14,0,0,99,101,3,2,1,0,100, - 94,1,0,0,0,100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100,98,1, - 0,0,0,100,99,1,0,0,0,101,1,1,0,0,0,102,103,6,1,-1,0,103,104,5,49, - 0,0,104,105,3,2,1,0,105,106,5,50,0,0,106,112,1,0,0,0,107,108,5,90, - 0,0,108,109,5,79,0,0,109,112,3,2,1,2,110,112,5,89,0,0,111,102,1, - 0,0,0,111,107,1,0,0,0,111,110,1,0,0,0,112,124,1,0,0,0,113,116,10, - 3,0,0,114,117,5,77,0,0,115,117,5,79,0,0,116,114,1,0,0,0,116,115, - 1,0,0,0,117,118,1,0,0,0,118,123,3,2,1,4,119,120,10,4,0,0,120,121, - 5,78,0,0,121,123,3,4,2,0,122,113,1,0,0,0,122,119,1,0,0,0,123,126, - 1,0,0,0,124,122,1,0,0,0,124,125,1,0,0,0,125,3,1,0,0,0,126,124,1, - 0,0,0,127,129,7,0,0,0,128,127,1,0,0,0,128,129,1,0,0,0,129,130,1, - 0,0,0,130,131,5,90,0,0,131,5,1,0,0,0,132,133,6,3,-1,0,133,134,5, - 49,0,0,134,135,3,6,3,0,135,136,5,50,0,0,136,144,1,0,0,0,137,138, - 3,10,5,0,138,139,3,6,3,9,139,144,1,0,0,0,140,141,5,28,0,0,141,144, - 3,6,3,4,142,144,3,8,4,0,143,132,1,0,0,0,143,137,1,0,0,0,143,140, - 1,0,0,0,143,142,1,0,0,0,144,181,1,0,0,0,145,146,10,10,0,0,146,147, - 5,78,0,0,147,180,3,6,3,10,148,152,10,8,0,0,149,153,5,77,0,0,150, - 153,5,79,0,0,151,153,5,80,0,0,152,149,1,0,0,0,152,150,1,0,0,0,152, - 151,1,0,0,0,153,154,1,0,0,0,154,180,3,6,3,9,155,158,10,7,0,0,156, - 159,5,51,0,0,157,159,5,75,0,0,158,156,1,0,0,0,158,157,1,0,0,0,159, - 160,1,0,0,0,160,180,3,6,3,8,161,162,10,6,0,0,162,163,3,12,6,0,163, - 164,3,6,3,7,164,180,1,0,0,0,165,166,10,5,0,0,166,167,3,14,7,0,167, - 168,3,6,3,6,168,180,1,0,0,0,169,170,10,3,0,0,170,171,3,16,8,0,171, - 172,3,6,3,4,172,180,1,0,0,0,173,174,10,2,0,0,174,175,5,81,0,0,175, - 176,3,6,3,0,176,177,5,82,0,0,177,178,3,6,3,3,178,180,1,0,0,0,179, - 145,1,0,0,0,179,148,1,0,0,0,179,155,1,0,0,0,179,161,1,0,0,0,179, - 165,1,0,0,0,179,169,1,0,0,0,179,173,1,0,0,0,180,183,1,0,0,0,181, - 179,1,0,0,0,181,182,1,0,0,0,182,7,1,0,0,0,183,181,1,0,0,0,184,194, - 3,20,10,0,185,194,5,87,0,0,186,188,7,1,0,0,187,189,3,18,9,0,188, - 187,1,0,0,0,188,189,1,0,0,0,189,194,1,0,0,0,190,194,5,88,0,0,191, - 194,5,25,0,0,192,194,3,18,9,0,193,184,1,0,0,0,193,185,1,0,0,0,193, - 186,1,0,0,0,193,190,1,0,0,0,193,191,1,0,0,0,193,192,1,0,0,0,194, - 9,1,0,0,0,195,199,5,51,0,0,196,199,5,75,0,0,197,199,5,52,0,0,198, - 195,1,0,0,0,198,196,1,0,0,0,198,197,1,0,0,0,199,11,1,0,0,0,200,206, - 5,55,0,0,201,206,5,54,0,0,202,206,5,53,0,0,203,206,5,61,0,0,204, - 206,5,62,0,0,205,200,1,0,0,0,205,201,1,0,0,0,205,202,1,0,0,0,205, - 203,1,0,0,0,205,204,1,0,0,0,206,13,1,0,0,0,207,215,5,63,0,0,208, - 215,5,65,0,0,209,215,5,70,0,0,210,215,5,71,0,0,211,215,5,72,0,0, - 212,215,5,73,0,0,213,215,5,64,0,0,214,207,1,0,0,0,214,208,1,0,0, - 0,214,209,1,0,0,0,214,210,1,0,0,0,214,211,1,0,0,0,214,212,1,0,0, - 0,214,213,1,0,0,0,215,15,1,0,0,0,216,219,5,26,0,0,217,219,5,27,0, - 0,218,216,1,0,0,0,218,217,1,0,0,0,219,17,1,0,0,0,220,225,5,89,0, - 0,221,222,5,56,0,0,222,223,3,6,3,0,223,224,5,58,0,0,224,226,1,0, - 0,0,225,221,1,0,0,0,225,226,1,0,0,0,226,230,1,0,0,0,227,229,5,85, - 0,0,228,227,1,0,0,0,229,232,1,0,0,0,230,228,1,0,0,0,230,231,1,0, - 0,0,231,235,1,0,0,0,232,230,1,0,0,0,233,234,5,86,0,0,234,236,3,18, - 9,0,235,233,1,0,0,0,235,236,1,0,0,0,236,19,1,0,0,0,237,238,5,89, - 0,0,238,247,5,49,0,0,239,244,3,6,3,0,240,241,5,74,0,0,241,243,3, - 6,3,0,242,240,1,0,0,0,243,246,1,0,0,0,244,242,1,0,0,0,244,245,1, - 0,0,0,245,248,1,0,0,0,246,244,1,0,0,0,247,239,1,0,0,0,247,248,1, - 0,0,0,248,249,1,0,0,0,249,250,5,50,0,0,250,21,1,0,0,0,251,253,5, - 29,0,0,252,251,1,0,0,0,252,253,1,0,0,0,253,254,1,0,0,0,254,255,5, - 16,0,0,255,256,5,89,0,0,256,257,3,0,0,0,257,258,5,76,0,0,258,260, - 3,6,3,0,259,261,5,84,0,0,260,259,1,0,0,0,260,261,1,0,0,0,261,265, - 1,0,0,0,262,264,3,42,21,0,263,262,1,0,0,0,264,267,1,0,0,0,265,263, - 1,0,0,0,265,266,1,0,0,0,266,268,1,0,0,0,267,265,1,0,0,0,268,269, - 5,9,0,0,269,23,1,0,0,0,270,271,3,18,9,0,271,272,5,76,0,0,272,274, - 3,6,3,0,273,275,5,84,0,0,274,273,1,0,0,0,274,275,1,0,0,0,275,279, - 1,0,0,0,276,278,3,42,21,0,277,276,1,0,0,0,278,281,1,0,0,0,279,277, - 1,0,0,0,279,280,1,0,0,0,280,282,1,0,0,0,281,279,1,0,0,0,282,283, - 5,9,0,0,283,25,1,0,0,0,284,285,5,30,0,0,285,286,3,18,9,0,286,287, - 5,76,0,0,287,295,3,6,3,0,288,289,5,4,0,0,289,290,3,18,9,0,290,291, - 5,76,0,0,291,292,3,6,3,0,292,294,1,0,0,0,293,288,1,0,0,0,294,297, - 1,0,0,0,295,293,1,0,0,0,295,296,1,0,0,0,296,299,1,0,0,0,297,295, - 1,0,0,0,298,300,5,84,0,0,299,298,1,0,0,0,299,300,1,0,0,0,300,301, - 1,0,0,0,301,302,5,9,0,0,302,27,1,0,0,0,303,304,5,9,0,0,304,306,5, - 1,0,0,305,307,3,30,15,0,306,305,1,0,0,0,307,308,1,0,0,0,308,306, - 1,0,0,0,308,309,1,0,0,0,309,310,1,0,0,0,310,311,5,2,0,0,311,29,1, - 0,0,0,312,315,3,34,17,0,313,315,3,32,16,0,314,312,1,0,0,0,314,313, - 1,0,0,0,315,31,1,0,0,0,316,320,3,50,25,0,317,320,3,58,29,0,318,320, - 3,60,30,0,319,316,1,0,0,0,319,317,1,0,0,0,319,318,1,0,0,0,320,33, - 1,0,0,0,321,326,3,36,18,0,322,326,3,20,10,0,323,326,3,38,19,0,324, - 326,3,48,24,0,325,321,1,0,0,0,325,322,1,0,0,0,325,323,1,0,0,0,325, - 324,1,0,0,0,326,327,1,0,0,0,327,328,5,9,0,0,328,35,1,0,0,0,329,335, - 3,18,9,0,330,336,5,76,0,0,331,336,5,66,0,0,332,336,5,67,0,0,333, - 336,5,68,0,0,334,336,5,69,0,0,335,330,1,0,0,0,335,331,1,0,0,0,335, - 332,1,0,0,0,335,333,1,0,0,0,335,334,1,0,0,0,336,337,1,0,0,0,337, - 338,3,6,3,0,338,37,1,0,0,0,339,341,5,29,0,0,340,339,1,0,0,0,340, - 341,1,0,0,0,341,343,1,0,0,0,342,344,5,16,0,0,343,342,1,0,0,0,343, - 344,1,0,0,0,344,345,1,0,0,0,345,350,3,18,9,0,346,347,5,74,0,0,347, - 349,3,18,9,0,348,346,1,0,0,0,349,352,1,0,0,0,350,348,1,0,0,0,350, - 351,1,0,0,0,351,353,1,0,0,0,352,350,1,0,0,0,353,356,3,0,0,0,354, - 355,5,76,0,0,355,357,3,6,3,0,356,354,1,0,0,0,356,357,1,0,0,0,357, - 362,1,0,0,0,358,359,5,59,0,0,359,360,3,6,3,0,360,361,5,60,0,0,361, - 363,1,0,0,0,362,358,1,0,0,0,362,363,1,0,0,0,363,367,1,0,0,0,364, - 366,3,42,21,0,365,364,1,0,0,0,366,369,1,0,0,0,367,365,1,0,0,0,367, - 368,1,0,0,0,368,39,1,0,0,0,369,367,1,0,0,0,370,371,3,38,19,0,371, - 372,5,9,0,0,372,41,1,0,0,0,373,381,5,45,0,0,374,381,5,46,0,0,375, - 376,5,47,0,0,376,377,3,44,22,0,377,378,5,83,0,0,378,379,3,46,23, - 0,379,381,1,0,0,0,380,373,1,0,0,0,380,374,1,0,0,0,380,375,1,0,0, - 0,381,43,1,0,0,0,382,383,5,89,0,0,383,45,1,0,0,0,384,385,5,89,0, - 0,385,47,1,0,0,0,386,388,5,17,0,0,387,389,3,6,3,0,388,387,1,0,0, - 0,388,389,1,0,0,0,389,49,1,0,0,0,390,394,3,52,26,0,391,393,3,54, - 27,0,392,391,1,0,0,0,393,396,1,0,0,0,394,392,1,0,0,0,394,395,1,0, - 0,0,395,398,1,0,0,0,396,394,1,0,0,0,397,399,3,56,28,0,398,397,1, - 0,0,0,398,399,1,0,0,0,399,51,1,0,0,0,400,401,5,18,0,0,401,402,3, - 6,3,0,402,403,5,82,0,0,403,404,3,28,14,0,404,53,1,0,0,0,405,406, - 5,19,0,0,406,407,3,6,3,0,407,408,5,82,0,0,408,409,3,28,14,0,409, - 55,1,0,0,0,410,411,5,20,0,0,411,412,5,82,0,0,412,413,3,28,14,0,413, - 57,1,0,0,0,414,415,5,21,0,0,415,416,5,89,0,0,416,417,5,23,0,0,417, - 418,3,6,3,0,418,419,5,48,0,0,419,420,3,6,3,0,420,422,5,24,0,0,421, - 423,5,75,0,0,422,421,1,0,0,0,422,423,1,0,0,0,423,424,1,0,0,0,424, - 425,7,1,0,0,425,426,5,82,0,0,426,427,3,28,14,0,427,59,1,0,0,0,428, - 429,5,22,0,0,429,430,3,6,3,0,430,431,5,82,0,0,431,432,3,28,14,0, - 432,61,1,0,0,0,433,436,3,64,32,0,434,436,5,9,0,0,435,433,1,0,0,0, - 435,434,1,0,0,0,436,437,1,0,0,0,437,435,1,0,0,0,437,438,1,0,0,0, - 438,439,1,0,0,0,439,440,5,0,0,1,440,63,1,0,0,0,441,442,5,31,0,0, - 442,443,5,89,0,0,443,444,3,66,33,0,444,65,1,0,0,0,445,446,5,82,0, - 0,446,447,5,9,0,0,447,456,5,1,0,0,448,457,3,72,36,0,449,457,3,76, - 38,0,450,457,3,78,39,0,451,457,3,86,43,0,452,457,3,88,44,0,453,457, - 3,68,34,0,454,457,3,70,35,0,455,457,3,74,37,0,456,448,1,0,0,0,456, - 449,1,0,0,0,456,450,1,0,0,0,456,451,1,0,0,0,456,452,1,0,0,0,456, - 453,1,0,0,0,456,454,1,0,0,0,456,455,1,0,0,0,457,458,1,0,0,0,458, - 456,1,0,0,0,458,459,1,0,0,0,459,460,1,0,0,0,460,461,5,2,0,0,461, - 67,1,0,0,0,462,463,5,40,0,0,463,464,5,49,0,0,464,469,5,89,0,0,465, - 466,5,74,0,0,466,468,3,92,46,0,467,465,1,0,0,0,468,471,1,0,0,0,469, - 467,1,0,0,0,469,470,1,0,0,0,470,472,1,0,0,0,471,469,1,0,0,0,472, - 473,5,50,0,0,473,474,5,82,0,0,474,475,3,28,14,0,475,69,1,0,0,0,476, - 477,5,41,0,0,477,478,5,49,0,0,478,483,3,6,3,0,479,480,5,74,0,0,480, - 482,3,92,46,0,481,479,1,0,0,0,482,485,1,0,0,0,483,481,1,0,0,0,483, - 484,1,0,0,0,484,486,1,0,0,0,485,483,1,0,0,0,486,487,5,50,0,0,487, - 488,5,82,0,0,488,489,3,28,14,0,489,71,1,0,0,0,490,491,7,2,0,0,491, - 492,5,82,0,0,492,493,5,9,0,0,493,495,5,1,0,0,494,496,3,40,20,0,495, - 494,1,0,0,0,496,497,1,0,0,0,497,495,1,0,0,0,497,498,1,0,0,0,498, - 499,1,0,0,0,499,500,5,2,0,0,500,73,1,0,0,0,501,502,5,35,0,0,502, - 503,5,82,0,0,503,504,3,28,14,0,504,75,1,0,0,0,505,506,5,36,0,0,506, - 507,5,82,0,0,507,508,5,9,0,0,508,512,5,1,0,0,509,513,3,22,11,0,510, - 513,3,24,12,0,511,513,3,26,13,0,512,509,1,0,0,0,512,510,1,0,0,0, - 512,511,1,0,0,0,513,514,1,0,0,0,514,512,1,0,0,0,514,515,1,0,0,0, - 515,516,1,0,0,0,516,517,5,2,0,0,517,77,1,0,0,0,518,519,5,37,0,0, - 519,520,5,82,0,0,520,521,5,9,0,0,521,540,5,1,0,0,522,525,3,80,40, - 0,523,525,3,82,41,0,524,522,1,0,0,0,524,523,1,0,0,0,525,538,1,0, - 0,0,526,535,5,49,0,0,527,532,3,90,45,0,528,529,5,74,0,0,529,531, - 3,90,45,0,530,528,1,0,0,0,531,534,1,0,0,0,532,530,1,0,0,0,532,533, - 1,0,0,0,533,536,1,0,0,0,534,532,1,0,0,0,535,527,1,0,0,0,535,536, - 1,0,0,0,536,537,1,0,0,0,537,539,5,50,0,0,538,526,1,0,0,0,538,539, - 1,0,0,0,539,541,1,0,0,0,540,524,1,0,0,0,541,542,1,0,0,0,542,540, - 1,0,0,0,542,543,1,0,0,0,543,544,1,0,0,0,544,545,5,2,0,0,545,79,1, - 0,0,0,546,551,5,89,0,0,547,548,5,56,0,0,548,549,3,6,3,0,549,550, - 5,58,0,0,550,552,1,0,0,0,551,547,1,0,0,0,551,552,1,0,0,0,552,553, - 1,0,0,0,553,557,5,57,0,0,554,556,3,84,42,0,555,554,1,0,0,0,556,559, - 1,0,0,0,557,555,1,0,0,0,557,558,1,0,0,0,558,560,1,0,0,0,559,557, - 1,0,0,0,560,561,5,42,0,0,561,562,5,9,0,0,562,81,1,0,0,0,563,568, - 5,89,0,0,564,565,5,56,0,0,565,566,3,6,3,0,566,567,5,58,0,0,567,569, - 1,0,0,0,568,564,1,0,0,0,568,569,1,0,0,0,569,570,1,0,0,0,570,571, - 3,0,0,0,571,572,5,57,0,0,572,573,5,39,0,0,573,574,5,9,0,0,574,83, - 1,0,0,0,575,578,5,43,0,0,576,578,5,44,0,0,577,575,1,0,0,0,577,576, - 1,0,0,0,578,85,1,0,0,0,579,580,5,38,0,0,580,581,5,82,0,0,581,582, - 5,9,0,0,582,585,5,1,0,0,583,586,5,42,0,0,584,586,5,39,0,0,585,583, - 1,0,0,0,585,584,1,0,0,0,586,599,1,0,0,0,587,596,5,49,0,0,588,593, - 3,90,45,0,589,590,5,74,0,0,590,592,3,90,45,0,591,589,1,0,0,0,592, - 595,1,0,0,0,593,591,1,0,0,0,593,594,1,0,0,0,594,597,1,0,0,0,595, - 593,1,0,0,0,596,588,1,0,0,0,596,597,1,0,0,0,597,598,1,0,0,0,598, - 600,5,50,0,0,599,587,1,0,0,0,599,600,1,0,0,0,600,601,1,0,0,0,601, - 602,5,9,0,0,602,603,5,2,0,0,603,87,1,0,0,0,604,605,5,15,0,0,605, - 606,5,89,0,0,606,615,5,49,0,0,607,612,3,90,45,0,608,609,5,74,0,0, - 609,611,3,90,45,0,610,608,1,0,0,0,611,614,1,0,0,0,612,610,1,0,0, - 0,612,613,1,0,0,0,613,616,1,0,0,0,614,612,1,0,0,0,615,607,1,0,0, - 0,615,616,1,0,0,0,616,617,1,0,0,0,617,619,5,50,0,0,618,620,3,0,0, - 0,619,618,1,0,0,0,619,620,1,0,0,0,620,621,1,0,0,0,621,622,5,82,0, - 0,622,623,3,28,14,0,623,89,1,0,0,0,624,625,5,89,0,0,625,626,3,0, - 0,0,626,91,1,0,0,0,627,628,5,89,0,0,628,629,5,76,0,0,629,630,7,3, - 0,0,630,93,1,0,0,0,70,100,111,116,122,124,128,143,152,158,179,181, - 188,193,198,205,214,218,225,230,235,244,247,252,260,265,274,279, - 295,299,308,314,319,325,335,340,343,350,356,362,367,380,388,394, - 398,422,435,437,456,458,469,483,497,512,514,524,532,535,538,542, - 551,557,568,577,585,593,596,599,612,615,619 + 9,9,1,10,1,10,1,10,1,10,1,10,5,10,239,8,10,10,10,12,10,242,9,10, + 3,10,244,8,10,1,10,1,10,1,11,3,11,249,8,11,1,11,1,11,1,11,1,11,1, + 11,1,11,3,11,257,8,11,1,11,5,11,260,8,11,10,11,12,11,263,9,11,1, + 11,1,11,1,12,1,12,1,12,1,12,3,12,271,8,12,1,12,5,12,274,8,12,10, + 12,12,12,277,9,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1, + 13,1,13,5,13,290,8,13,10,13,12,13,293,9,13,1,13,3,13,296,8,13,1, + 13,1,13,1,14,1,14,1,14,4,14,303,8,14,11,14,12,14,304,1,14,1,14,1, + 15,1,15,3,15,311,8,15,1,16,1,16,1,16,3,16,316,8,16,1,17,1,17,1,17, + 1,17,3,17,322,8,17,1,17,1,17,1,18,1,18,1,18,1,18,1,18,1,18,3,18, + 332,8,18,1,18,1,18,1,19,3,19,337,8,19,1,19,3,19,340,8,19,1,19,1, + 19,1,19,5,19,345,8,19,10,19,12,19,348,9,19,1,19,1,19,1,19,3,19,353, + 8,19,1,19,1,19,1,19,1,19,3,19,359,8,19,1,19,5,19,362,8,19,10,19, + 12,19,365,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1,21,1,21,1,21,1,21, + 3,21,377,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3,24,385,8,24,1,25,1, + 25,5,25,389,8,25,10,25,12,25,392,9,25,1,25,3,25,395,8,25,1,26,1, + 26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1, + 29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,419,8,29,1,29,1,29,1, + 29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31,432,8,31,11,31,12, + 31,433,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1, + 33,1,33,1,33,1,33,1,33,1,33,4,33,453,8,33,11,33,12,33,454,1,33,1, + 33,1,34,1,34,1,34,1,34,1,34,5,34,464,8,34,10,34,12,34,467,9,34,1, + 34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35,478,8,35,10,35,12, + 35,481,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,4,36,492, + 8,36,11,36,12,36,493,1,36,1,36,1,37,1,37,1,37,1,37,1,38,1,38,1,38, + 1,38,1,38,1,38,1,38,4,38,509,8,38,11,38,12,38,510,1,38,1,38,1,39, + 1,39,1,39,1,39,1,39,1,39,3,39,521,8,39,1,39,1,39,1,39,1,39,5,39, + 527,8,39,10,39,12,39,530,9,39,3,39,532,8,39,1,39,3,39,535,8,39,4, + 39,537,8,39,11,39,12,39,538,1,39,1,39,1,40,1,40,1,40,1,40,1,40,3, + 40,548,8,40,1,40,1,40,5,40,552,8,40,10,40,12,40,555,9,40,1,40,1, + 40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,565,8,41,1,41,1,41,1,41,1, + 41,1,41,1,42,1,42,3,42,574,8,42,1,43,1,43,1,43,1,43,1,43,1,43,3, + 43,582,8,43,1,43,1,43,1,43,1,43,5,43,588,8,43,10,43,12,43,591,9, + 43,3,43,593,8,43,1,43,3,43,596,8,43,1,43,1,43,1,43,1,44,1,44,1,44, + 1,44,1,44,1,44,5,44,607,8,44,10,44,12,44,610,9,44,3,44,612,8,44, + 1,44,1,44,3,44,616,8,44,1,44,1,44,1,44,1,45,1,45,1,45,1,46,1,46, + 1,46,1,46,1,46,0,2,2,6,47,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28, + 30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72, + 74,76,78,80,82,84,86,88,90,92,0,4,2,0,51,51,75,75,1,0,90,91,1,0, + 32,34,3,0,25,25,87,88,90,91,689,0,100,1,0,0,0,2,111,1,0,0,0,4,128, + 1,0,0,0,6,143,1,0,0,0,8,193,1,0,0,0,10,198,1,0,0,0,12,205,1,0,0, + 0,14,214,1,0,0,0,16,218,1,0,0,0,18,220,1,0,0,0,20,233,1,0,0,0,22, + 248,1,0,0,0,24,266,1,0,0,0,26,280,1,0,0,0,28,299,1,0,0,0,30,310, + 1,0,0,0,32,315,1,0,0,0,34,321,1,0,0,0,36,325,1,0,0,0,38,336,1,0, + 0,0,40,366,1,0,0,0,42,376,1,0,0,0,44,378,1,0,0,0,46,380,1,0,0,0, + 48,382,1,0,0,0,50,386,1,0,0,0,52,396,1,0,0,0,54,401,1,0,0,0,56,406, + 1,0,0,0,58,410,1,0,0,0,60,424,1,0,0,0,62,431,1,0,0,0,64,437,1,0, + 0,0,66,441,1,0,0,0,68,458,1,0,0,0,70,472,1,0,0,0,72,486,1,0,0,0, + 74,497,1,0,0,0,76,501,1,0,0,0,78,514,1,0,0,0,80,542,1,0,0,0,82,559, + 1,0,0,0,84,573,1,0,0,0,86,575,1,0,0,0,88,600,1,0,0,0,90,620,1,0, + 0,0,92,623,1,0,0,0,94,101,5,10,0,0,95,101,5,11,0,0,96,101,5,12,0, + 0,97,101,5,13,0,0,98,101,5,14,0,0,99,101,3,2,1,0,100,94,1,0,0,0, + 100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100,98,1,0,0,0,100, + 99,1,0,0,0,101,1,1,0,0,0,102,103,6,1,-1,0,103,104,5,49,0,0,104,105, + 3,2,1,0,105,106,5,50,0,0,106,112,1,0,0,0,107,108,5,90,0,0,108,109, + 5,79,0,0,109,112,3,2,1,2,110,112,5,89,0,0,111,102,1,0,0,0,111,107, + 1,0,0,0,111,110,1,0,0,0,112,124,1,0,0,0,113,116,10,3,0,0,114,117, + 5,77,0,0,115,117,5,79,0,0,116,114,1,0,0,0,116,115,1,0,0,0,117,118, + 1,0,0,0,118,123,3,2,1,4,119,120,10,4,0,0,120,121,5,78,0,0,121,123, + 3,4,2,0,122,113,1,0,0,0,122,119,1,0,0,0,123,126,1,0,0,0,124,122, + 1,0,0,0,124,125,1,0,0,0,125,3,1,0,0,0,126,124,1,0,0,0,127,129,7, + 0,0,0,128,127,1,0,0,0,128,129,1,0,0,0,129,130,1,0,0,0,130,131,5, + 90,0,0,131,5,1,0,0,0,132,133,6,3,-1,0,133,134,5,49,0,0,134,135,3, + 6,3,0,135,136,5,50,0,0,136,144,1,0,0,0,137,138,3,10,5,0,138,139, + 3,6,3,9,139,144,1,0,0,0,140,141,5,28,0,0,141,144,3,6,3,4,142,144, + 3,8,4,0,143,132,1,0,0,0,143,137,1,0,0,0,143,140,1,0,0,0,143,142, + 1,0,0,0,144,181,1,0,0,0,145,146,10,10,0,0,146,147,5,78,0,0,147,180, + 3,6,3,10,148,152,10,8,0,0,149,153,5,77,0,0,150,153,5,79,0,0,151, + 153,5,80,0,0,152,149,1,0,0,0,152,150,1,0,0,0,152,151,1,0,0,0,153, + 154,1,0,0,0,154,180,3,6,3,9,155,158,10,7,0,0,156,159,5,51,0,0,157, + 159,5,75,0,0,158,156,1,0,0,0,158,157,1,0,0,0,159,160,1,0,0,0,160, + 180,3,6,3,8,161,162,10,6,0,0,162,163,3,12,6,0,163,164,3,6,3,7,164, + 180,1,0,0,0,165,166,10,5,0,0,166,167,3,14,7,0,167,168,3,6,3,6,168, + 180,1,0,0,0,169,170,10,3,0,0,170,171,3,16,8,0,171,172,3,6,3,4,172, + 180,1,0,0,0,173,174,10,2,0,0,174,175,5,81,0,0,175,176,3,6,3,0,176, + 177,5,82,0,0,177,178,3,6,3,3,178,180,1,0,0,0,179,145,1,0,0,0,179, + 148,1,0,0,0,179,155,1,0,0,0,179,161,1,0,0,0,179,165,1,0,0,0,179, + 169,1,0,0,0,179,173,1,0,0,0,180,183,1,0,0,0,181,179,1,0,0,0,181, + 182,1,0,0,0,182,7,1,0,0,0,183,181,1,0,0,0,184,194,3,20,10,0,185, + 194,5,87,0,0,186,188,7,1,0,0,187,189,3,18,9,0,188,187,1,0,0,0,188, + 189,1,0,0,0,189,194,1,0,0,0,190,194,5,88,0,0,191,194,5,25,0,0,192, + 194,3,18,9,0,193,184,1,0,0,0,193,185,1,0,0,0,193,186,1,0,0,0,193, + 190,1,0,0,0,193,191,1,0,0,0,193,192,1,0,0,0,194,9,1,0,0,0,195,199, + 5,51,0,0,196,199,5,75,0,0,197,199,5,52,0,0,198,195,1,0,0,0,198,196, + 1,0,0,0,198,197,1,0,0,0,199,11,1,0,0,0,200,206,5,55,0,0,201,206, + 5,54,0,0,202,206,5,53,0,0,203,206,5,61,0,0,204,206,5,62,0,0,205, + 200,1,0,0,0,205,201,1,0,0,0,205,202,1,0,0,0,205,203,1,0,0,0,205, + 204,1,0,0,0,206,13,1,0,0,0,207,215,5,63,0,0,208,215,5,65,0,0,209, + 215,5,70,0,0,210,215,5,71,0,0,211,215,5,72,0,0,212,215,5,73,0,0, + 213,215,5,64,0,0,214,207,1,0,0,0,214,208,1,0,0,0,214,209,1,0,0,0, + 214,210,1,0,0,0,214,211,1,0,0,0,214,212,1,0,0,0,214,213,1,0,0,0, + 215,15,1,0,0,0,216,219,5,26,0,0,217,219,5,27,0,0,218,216,1,0,0,0, + 218,217,1,0,0,0,219,17,1,0,0,0,220,225,5,89,0,0,221,222,5,56,0,0, + 222,223,3,6,3,0,223,224,5,58,0,0,224,226,1,0,0,0,225,221,1,0,0,0, + 225,226,1,0,0,0,226,230,1,0,0,0,227,229,5,85,0,0,228,227,1,0,0,0, + 229,232,1,0,0,0,230,228,1,0,0,0,230,231,1,0,0,0,231,19,1,0,0,0,232, + 230,1,0,0,0,233,234,5,89,0,0,234,243,5,49,0,0,235,240,3,6,3,0,236, + 237,5,74,0,0,237,239,3,6,3,0,238,236,1,0,0,0,239,242,1,0,0,0,240, + 238,1,0,0,0,240,241,1,0,0,0,241,244,1,0,0,0,242,240,1,0,0,0,243, + 235,1,0,0,0,243,244,1,0,0,0,244,245,1,0,0,0,245,246,5,50,0,0,246, + 21,1,0,0,0,247,249,5,29,0,0,248,247,1,0,0,0,248,249,1,0,0,0,249, + 250,1,0,0,0,250,251,5,16,0,0,251,252,5,89,0,0,252,253,3,0,0,0,253, + 254,5,76,0,0,254,256,3,6,3,0,255,257,5,84,0,0,256,255,1,0,0,0,256, + 257,1,0,0,0,257,261,1,0,0,0,258,260,3,42,21,0,259,258,1,0,0,0,260, + 263,1,0,0,0,261,259,1,0,0,0,261,262,1,0,0,0,262,264,1,0,0,0,263, + 261,1,0,0,0,264,265,5,9,0,0,265,23,1,0,0,0,266,267,3,18,9,0,267, + 268,5,76,0,0,268,270,3,6,3,0,269,271,5,84,0,0,270,269,1,0,0,0,270, + 271,1,0,0,0,271,275,1,0,0,0,272,274,3,42,21,0,273,272,1,0,0,0,274, + 277,1,0,0,0,275,273,1,0,0,0,275,276,1,0,0,0,276,278,1,0,0,0,277, + 275,1,0,0,0,278,279,5,9,0,0,279,25,1,0,0,0,280,281,5,30,0,0,281, + 282,3,18,9,0,282,283,5,76,0,0,283,291,3,6,3,0,284,285,5,4,0,0,285, + 286,3,18,9,0,286,287,5,76,0,0,287,288,3,6,3,0,288,290,1,0,0,0,289, + 284,1,0,0,0,290,293,1,0,0,0,291,289,1,0,0,0,291,292,1,0,0,0,292, + 295,1,0,0,0,293,291,1,0,0,0,294,296,5,84,0,0,295,294,1,0,0,0,295, + 296,1,0,0,0,296,297,1,0,0,0,297,298,5,9,0,0,298,27,1,0,0,0,299,300, + 5,9,0,0,300,302,5,1,0,0,301,303,3,30,15,0,302,301,1,0,0,0,303,304, + 1,0,0,0,304,302,1,0,0,0,304,305,1,0,0,0,305,306,1,0,0,0,306,307, + 5,2,0,0,307,29,1,0,0,0,308,311,3,34,17,0,309,311,3,32,16,0,310,308, + 1,0,0,0,310,309,1,0,0,0,311,31,1,0,0,0,312,316,3,50,25,0,313,316, + 3,58,29,0,314,316,3,60,30,0,315,312,1,0,0,0,315,313,1,0,0,0,315, + 314,1,0,0,0,316,33,1,0,0,0,317,322,3,36,18,0,318,322,3,20,10,0,319, + 322,3,38,19,0,320,322,3,48,24,0,321,317,1,0,0,0,321,318,1,0,0,0, + 321,319,1,0,0,0,321,320,1,0,0,0,322,323,1,0,0,0,323,324,5,9,0,0, + 324,35,1,0,0,0,325,331,3,18,9,0,326,332,5,76,0,0,327,332,5,66,0, + 0,328,332,5,67,0,0,329,332,5,68,0,0,330,332,5,69,0,0,331,326,1,0, + 0,0,331,327,1,0,0,0,331,328,1,0,0,0,331,329,1,0,0,0,331,330,1,0, + 0,0,332,333,1,0,0,0,333,334,3,6,3,0,334,37,1,0,0,0,335,337,5,29, + 0,0,336,335,1,0,0,0,336,337,1,0,0,0,337,339,1,0,0,0,338,340,5,16, + 0,0,339,338,1,0,0,0,339,340,1,0,0,0,340,341,1,0,0,0,341,346,3,18, + 9,0,342,343,5,74,0,0,343,345,3,18,9,0,344,342,1,0,0,0,345,348,1, + 0,0,0,346,344,1,0,0,0,346,347,1,0,0,0,347,349,1,0,0,0,348,346,1, + 0,0,0,349,352,3,0,0,0,350,351,5,76,0,0,351,353,3,6,3,0,352,350,1, + 0,0,0,352,353,1,0,0,0,353,358,1,0,0,0,354,355,5,59,0,0,355,356,3, + 6,3,0,356,357,5,60,0,0,357,359,1,0,0,0,358,354,1,0,0,0,358,359,1, + 0,0,0,359,363,1,0,0,0,360,362,3,42,21,0,361,360,1,0,0,0,362,365, + 1,0,0,0,363,361,1,0,0,0,363,364,1,0,0,0,364,39,1,0,0,0,365,363,1, + 0,0,0,366,367,3,38,19,0,367,368,5,9,0,0,368,41,1,0,0,0,369,377,5, + 45,0,0,370,377,5,46,0,0,371,372,5,47,0,0,372,373,3,44,22,0,373,374, + 5,83,0,0,374,375,3,46,23,0,375,377,1,0,0,0,376,369,1,0,0,0,376,370, + 1,0,0,0,376,371,1,0,0,0,377,43,1,0,0,0,378,379,5,89,0,0,379,45,1, + 0,0,0,380,381,5,89,0,0,381,47,1,0,0,0,382,384,5,17,0,0,383,385,3, + 6,3,0,384,383,1,0,0,0,384,385,1,0,0,0,385,49,1,0,0,0,386,390,3,52, + 26,0,387,389,3,54,27,0,388,387,1,0,0,0,389,392,1,0,0,0,390,388,1, + 0,0,0,390,391,1,0,0,0,391,394,1,0,0,0,392,390,1,0,0,0,393,395,3, + 56,28,0,394,393,1,0,0,0,394,395,1,0,0,0,395,51,1,0,0,0,396,397,5, + 18,0,0,397,398,3,6,3,0,398,399,5,82,0,0,399,400,3,28,14,0,400,53, + 1,0,0,0,401,402,5,19,0,0,402,403,3,6,3,0,403,404,5,82,0,0,404,405, + 3,28,14,0,405,55,1,0,0,0,406,407,5,20,0,0,407,408,5,82,0,0,408,409, + 3,28,14,0,409,57,1,0,0,0,410,411,5,21,0,0,411,412,5,89,0,0,412,413, + 5,23,0,0,413,414,3,6,3,0,414,415,5,48,0,0,415,416,3,6,3,0,416,418, + 5,24,0,0,417,419,5,75,0,0,418,417,1,0,0,0,418,419,1,0,0,0,419,420, + 1,0,0,0,420,421,7,1,0,0,421,422,5,82,0,0,422,423,3,28,14,0,423,59, + 1,0,0,0,424,425,5,22,0,0,425,426,3,6,3,0,426,427,5,82,0,0,427,428, + 3,28,14,0,428,61,1,0,0,0,429,432,3,64,32,0,430,432,5,9,0,0,431,429, + 1,0,0,0,431,430,1,0,0,0,432,433,1,0,0,0,433,431,1,0,0,0,433,434, + 1,0,0,0,434,435,1,0,0,0,435,436,5,0,0,1,436,63,1,0,0,0,437,438,5, + 31,0,0,438,439,5,89,0,0,439,440,3,66,33,0,440,65,1,0,0,0,441,442, + 5,82,0,0,442,443,5,9,0,0,443,452,5,1,0,0,444,453,3,72,36,0,445,453, + 3,76,38,0,446,453,3,78,39,0,447,453,3,86,43,0,448,453,3,88,44,0, + 449,453,3,68,34,0,450,453,3,70,35,0,451,453,3,74,37,0,452,444,1, + 0,0,0,452,445,1,0,0,0,452,446,1,0,0,0,452,447,1,0,0,0,452,448,1, + 0,0,0,452,449,1,0,0,0,452,450,1,0,0,0,452,451,1,0,0,0,453,454,1, + 0,0,0,454,452,1,0,0,0,454,455,1,0,0,0,455,456,1,0,0,0,456,457,5, + 2,0,0,457,67,1,0,0,0,458,459,5,40,0,0,459,460,5,49,0,0,460,465,5, + 89,0,0,461,462,5,74,0,0,462,464,3,92,46,0,463,461,1,0,0,0,464,467, + 1,0,0,0,465,463,1,0,0,0,465,466,1,0,0,0,466,468,1,0,0,0,467,465, + 1,0,0,0,468,469,5,50,0,0,469,470,5,82,0,0,470,471,3,28,14,0,471, + 69,1,0,0,0,472,473,5,41,0,0,473,474,5,49,0,0,474,479,3,6,3,0,475, + 476,5,74,0,0,476,478,3,92,46,0,477,475,1,0,0,0,478,481,1,0,0,0,479, + 477,1,0,0,0,479,480,1,0,0,0,480,482,1,0,0,0,481,479,1,0,0,0,482, + 483,5,50,0,0,483,484,5,82,0,0,484,485,3,28,14,0,485,71,1,0,0,0,486, + 487,7,2,0,0,487,488,5,82,0,0,488,489,5,9,0,0,489,491,5,1,0,0,490, + 492,3,40,20,0,491,490,1,0,0,0,492,493,1,0,0,0,493,491,1,0,0,0,493, + 494,1,0,0,0,494,495,1,0,0,0,495,496,5,2,0,0,496,73,1,0,0,0,497,498, + 5,35,0,0,498,499,5,82,0,0,499,500,3,28,14,0,500,75,1,0,0,0,501,502, + 5,36,0,0,502,503,5,82,0,0,503,504,5,9,0,0,504,508,5,1,0,0,505,509, + 3,22,11,0,506,509,3,24,12,0,507,509,3,26,13,0,508,505,1,0,0,0,508, + 506,1,0,0,0,508,507,1,0,0,0,509,510,1,0,0,0,510,508,1,0,0,0,510, + 511,1,0,0,0,511,512,1,0,0,0,512,513,5,2,0,0,513,77,1,0,0,0,514,515, + 5,37,0,0,515,516,5,82,0,0,516,517,5,9,0,0,517,536,5,1,0,0,518,521, + 3,80,40,0,519,521,3,82,41,0,520,518,1,0,0,0,520,519,1,0,0,0,521, + 534,1,0,0,0,522,531,5,49,0,0,523,528,3,90,45,0,524,525,5,74,0,0, + 525,527,3,90,45,0,526,524,1,0,0,0,527,530,1,0,0,0,528,526,1,0,0, + 0,528,529,1,0,0,0,529,532,1,0,0,0,530,528,1,0,0,0,531,523,1,0,0, + 0,531,532,1,0,0,0,532,533,1,0,0,0,533,535,5,50,0,0,534,522,1,0,0, + 0,534,535,1,0,0,0,535,537,1,0,0,0,536,520,1,0,0,0,537,538,1,0,0, + 0,538,536,1,0,0,0,538,539,1,0,0,0,539,540,1,0,0,0,540,541,5,2,0, + 0,541,79,1,0,0,0,542,547,5,89,0,0,543,544,5,56,0,0,544,545,3,6,3, + 0,545,546,5,58,0,0,546,548,1,0,0,0,547,543,1,0,0,0,547,548,1,0,0, + 0,548,549,1,0,0,0,549,553,5,57,0,0,550,552,3,84,42,0,551,550,1,0, + 0,0,552,555,1,0,0,0,553,551,1,0,0,0,553,554,1,0,0,0,554,556,1,0, + 0,0,555,553,1,0,0,0,556,557,5,42,0,0,557,558,5,9,0,0,558,81,1,0, + 0,0,559,564,5,89,0,0,560,561,5,56,0,0,561,562,3,6,3,0,562,563,5, + 58,0,0,563,565,1,0,0,0,564,560,1,0,0,0,564,565,1,0,0,0,565,566,1, + 0,0,0,566,567,3,0,0,0,567,568,5,57,0,0,568,569,5,39,0,0,569,570, + 5,9,0,0,570,83,1,0,0,0,571,574,5,43,0,0,572,574,5,44,0,0,573,571, + 1,0,0,0,573,572,1,0,0,0,574,85,1,0,0,0,575,576,5,38,0,0,576,577, + 5,82,0,0,577,578,5,9,0,0,578,581,5,1,0,0,579,582,5,42,0,0,580,582, + 5,39,0,0,581,579,1,0,0,0,581,580,1,0,0,0,582,595,1,0,0,0,583,592, + 5,49,0,0,584,589,3,90,45,0,585,586,5,74,0,0,586,588,3,90,45,0,587, + 585,1,0,0,0,588,591,1,0,0,0,589,587,1,0,0,0,589,590,1,0,0,0,590, + 593,1,0,0,0,591,589,1,0,0,0,592,584,1,0,0,0,592,593,1,0,0,0,593, + 594,1,0,0,0,594,596,5,50,0,0,595,583,1,0,0,0,595,596,1,0,0,0,596, + 597,1,0,0,0,597,598,5,9,0,0,598,599,5,2,0,0,599,87,1,0,0,0,600,601, + 5,15,0,0,601,602,5,89,0,0,602,611,5,49,0,0,603,608,3,90,45,0,604, + 605,5,74,0,0,605,607,3,90,45,0,606,604,1,0,0,0,607,610,1,0,0,0,608, + 606,1,0,0,0,608,609,1,0,0,0,609,612,1,0,0,0,610,608,1,0,0,0,611, + 603,1,0,0,0,611,612,1,0,0,0,612,613,1,0,0,0,613,615,5,50,0,0,614, + 616,3,0,0,0,615,614,1,0,0,0,615,616,1,0,0,0,616,617,1,0,0,0,617, + 618,5,82,0,0,618,619,3,28,14,0,619,89,1,0,0,0,620,621,5,89,0,0,621, + 622,3,0,0,0,622,91,1,0,0,0,623,624,5,89,0,0,624,625,5,76,0,0,625, + 626,7,3,0,0,626,93,1,0,0,0,69,100,111,116,122,124,128,143,152,158, + 179,181,188,193,198,205,214,218,225,230,240,243,248,256,261,270, + 275,291,295,304,310,315,321,331,336,339,346,352,358,363,376,384, + 390,394,418,431,433,452,454,465,479,493,508,510,520,528,531,534, + 538,547,553,564,573,581,589,592,595,608,611,615 ] class PyNestMLParser ( Parser ): @@ -1483,7 +1481,6 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): self.parser = parser self.name = None # Token self.vectorParameter = None # ExpressionContext - self.attribute = None # VariableContext def NAME(self): return self.getToken(PyNestMLParser.NAME, 0) @@ -1500,17 +1497,10 @@ def DIFFERENTIAL_ORDER(self, i:int=None): else: return self.getToken(PyNestMLParser.DIFFERENTIAL_ORDER, i) - def FULLSTOP(self): - return self.getToken(PyNestMLParser.FULLSTOP, 0) - def expression(self): return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) - def variable(self): - return self.getTypedRuleContext(PyNestMLParser.VariableContext,0) - - def getRuleIndex(self): return PyNestMLParser.RULE_variable @@ -1554,16 +1544,6 @@ def variable(self): self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,18,self._ctx) - self.state = 235 - self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,19,self._ctx) - if la_ == 1: - self.state = 233 - self.match(PyNestMLParser.FULLSTOP) - self.state = 234 - localctx.attribute = self.variable() - - except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -1622,31 +1602,31 @@ def functionCall(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 237 + self.state = 233 localctx.calleeName = self.match(PyNestMLParser.NAME) - self.state = 238 + self.state = 234 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 247 + self.state = 243 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 126977) != 0): - self.state = 239 + self.state = 235 self.expression(0) - self.state = 244 + self.state = 240 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 240 + self.state = 236 self.match(PyNestMLParser.COMMA) - self.state = 241 + self.state = 237 self.expression(0) - self.state = 246 + self.state = 242 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 249 + self.state = 245 self.match(PyNestMLParser.RIGHT_PAREN) except RecognitionException as re: localctx.exception = re @@ -1719,43 +1699,43 @@ def inlineExpression(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 252 + self.state = 248 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 251 + self.state = 247 localctx.recordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 254 + self.state = 250 self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 255 + self.state = 251 localctx.variableName = self.match(PyNestMLParser.NAME) - self.state = 256 + self.state = 252 self.dataType() - self.state = 257 + self.state = 253 self.match(PyNestMLParser.EQUALS) - self.state = 258 + self.state = 254 self.expression(0) - self.state = 260 + self.state = 256 self._errHandler.sync(self) _la = self._input.LA(1) if _la==84: - self.state = 259 + self.state = 255 self.match(PyNestMLParser.SEMICOLON) - self.state = 265 + self.state = 261 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 262 + self.state = 258 localctx.decorator = self.anyDecorator() - self.state = 267 + self.state = 263 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 268 + self.state = 264 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1819,31 +1799,31 @@ def odeEquation(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 270 + self.state = 266 localctx.lhs = self.variable() - self.state = 271 + self.state = 267 self.match(PyNestMLParser.EQUALS) - self.state = 272 + self.state = 268 localctx.rhs = self.expression(0) - self.state = 274 + self.state = 270 self._errHandler.sync(self) _la = self._input.LA(1) if _la==84: - self.state = 273 + self.state = 269 self.match(PyNestMLParser.SEMICOLON) - self.state = 279 + self.state = 275 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 276 + self.state = 272 localctx.decorator = self.anyDecorator() - self.state = 281 + self.state = 277 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 282 + self.state = 278 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1915,39 +1895,39 @@ def kernel(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 284 + self.state = 280 self.match(PyNestMLParser.KERNEL_KEYWORD) - self.state = 285 + self.state = 281 self.variable() - self.state = 286 + self.state = 282 self.match(PyNestMLParser.EQUALS) - self.state = 287 + self.state = 283 self.expression(0) - self.state = 295 + self.state = 291 self._errHandler.sync(self) _la = self._input.LA(1) while _la==4: - self.state = 288 + self.state = 284 self.match(PyNestMLParser.KERNEL_JOINING) - self.state = 289 + self.state = 285 self.variable() - self.state = 290 + self.state = 286 self.match(PyNestMLParser.EQUALS) - self.state = 291 + self.state = 287 self.expression(0) - self.state = 297 + self.state = 293 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 299 + self.state = 295 self._errHandler.sync(self) _la = self._input.LA(1) if _la==84: - self.state = 298 + self.state = 294 self.match(PyNestMLParser.SEMICOLON) - self.state = 301 + self.state = 297 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2000,23 +1980,23 @@ def block(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 303 + self.state = 299 self.match(PyNestMLParser.NEWLINE) - self.state = 304 + self.state = 300 self.match(PyNestMLParser.INDENT) - self.state = 306 + self.state = 302 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 305 + self.state = 301 self.stmt() - self.state = 308 + self.state = 304 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 543621120) != 0) or _la==89): break - self.state = 310 + self.state = 306 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -2059,17 +2039,17 @@ def stmt(self): localctx = PyNestMLParser.StmtContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_stmt) try: - self.state = 314 + self.state = 310 self._errHandler.sync(self) token = self._input.LA(1) if token in [16, 17, 29, 89]: self.enterOuterAlt(localctx, 1) - self.state = 312 + self.state = 308 self.smallStmt() pass elif token in [18, 21, 22]: self.enterOuterAlt(localctx, 2) - self.state = 313 + self.state = 309 self.compoundStmt() pass else: @@ -2120,22 +2100,22 @@ def compoundStmt(self): localctx = PyNestMLParser.CompoundStmtContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_compoundStmt) try: - self.state = 319 + self.state = 315 self._errHandler.sync(self) token = self._input.LA(1) if token in [18]: self.enterOuterAlt(localctx, 1) - self.state = 316 + self.state = 312 self.ifStmt() pass elif token in [21]: self.enterOuterAlt(localctx, 2) - self.state = 317 + self.state = 313 self.forStmt() pass elif token in [22]: self.enterOuterAlt(localctx, 3) - self.state = 318 + self.state = 314 self.whileStmt() pass else: @@ -2194,31 +2174,31 @@ def smallStmt(self): self.enterRule(localctx, 34, self.RULE_smallStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 325 + self.state = 321 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,32,self._ctx) + la_ = self._interp.adaptivePredict(self._input,31,self._ctx) if la_ == 1: - self.state = 321 + self.state = 317 self.assignment() pass elif la_ == 2: - self.state = 322 + self.state = 318 self.functionCall() pass elif la_ == 3: - self.state = 323 + self.state = 319 self.declaration() pass elif la_ == 4: - self.state = 324 + self.state = 320 self.returnStmt() pass - self.state = 327 + self.state = 323 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2283,35 +2263,35 @@ def assignment(self): self.enterRule(localctx, 36, self.RULE_assignment) try: self.enterOuterAlt(localctx, 1) - self.state = 329 + self.state = 325 localctx.lhs_variable = self.variable() - self.state = 335 + self.state = 331 self._errHandler.sync(self) token = self._input.LA(1) if token in [76]: - self.state = 330 + self.state = 326 localctx.directAssignment = self.match(PyNestMLParser.EQUALS) pass elif token in [66]: - self.state = 331 + self.state = 327 localctx.compoundSum = self.match(PyNestMLParser.PLUS_EQUALS) pass elif token in [67]: - self.state = 332 + self.state = 328 localctx.compoundMinus = self.match(PyNestMLParser.MINUS_EQUALS) pass elif token in [68]: - self.state = 333 + self.state = 329 localctx.compoundProduct = self.match(PyNestMLParser.STAR_EQUALS) pass elif token in [69]: - self.state = 334 + self.state = 330 localctx.compoundQuotient = self.match(PyNestMLParser.FORWARD_SLASH_EQUALS) pass else: raise NoViableAltException(self) - self.state = 337 + self.state = 333 self.expression(0) except RecognitionException as re: localctx.exception = re @@ -2399,67 +2379,67 @@ def declaration(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 340 + self.state = 336 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 339 + self.state = 335 localctx.isRecordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 343 + self.state = 339 self._errHandler.sync(self) _la = self._input.LA(1) if _la==16: - self.state = 342 + self.state = 338 localctx.isInlineExpression = self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 345 + self.state = 341 self.variable() - self.state = 350 + self.state = 346 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 346 + self.state = 342 self.match(PyNestMLParser.COMMA) - self.state = 347 + self.state = 343 self.variable() - self.state = 352 + self.state = 348 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 353 + self.state = 349 self.dataType() - self.state = 356 + self.state = 352 self._errHandler.sync(self) _la = self._input.LA(1) if _la==76: - self.state = 354 + self.state = 350 self.match(PyNestMLParser.EQUALS) - self.state = 355 + self.state = 351 localctx.rhs = self.expression(0) - self.state = 362 + self.state = 358 self._errHandler.sync(self) _la = self._input.LA(1) if _la==59: - self.state = 358 + self.state = 354 self.match(PyNestMLParser.LEFT_LEFT_SQUARE) - self.state = 359 + self.state = 355 localctx.invariant = self.expression(0) - self.state = 360 + self.state = 356 self.match(PyNestMLParser.RIGHT_RIGHT_SQUARE) - self.state = 367 + self.state = 363 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 364 + self.state = 360 localctx.decorator = self.anyDecorator() - self.state = 369 + self.state = 365 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2504,9 +2484,9 @@ def declaration_newline(self): self.enterRule(localctx, 40, self.RULE_declaration_newline) try: self.enterOuterAlt(localctx, 1) - self.state = 370 + self.state = 366 self.declaration() - self.state = 371 + self.state = 367 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2561,28 +2541,28 @@ def anyDecorator(self): localctx = PyNestMLParser.AnyDecoratorContext(self, self._ctx, self.state) self.enterRule(localctx, 42, self.RULE_anyDecorator) try: - self.state = 380 + self.state = 376 self._errHandler.sync(self) token = self._input.LA(1) if token in [45]: self.enterOuterAlt(localctx, 1) - self.state = 373 + self.state = 369 self.match(PyNestMLParser.DECORATOR_HOMOGENEOUS) pass elif token in [46]: self.enterOuterAlt(localctx, 2) - self.state = 374 + self.state = 370 self.match(PyNestMLParser.DECORATOR_HETEROGENEOUS) pass elif token in [47]: self.enterOuterAlt(localctx, 3) - self.state = 375 + self.state = 371 self.match(PyNestMLParser.AT) - self.state = 376 + self.state = 372 self.namespaceDecoratorNamespace() - self.state = 377 + self.state = 373 self.match(PyNestMLParser.DOUBLE_COLON) - self.state = 378 + self.state = 374 self.namespaceDecoratorName() pass else: @@ -2626,7 +2606,7 @@ def namespaceDecoratorNamespace(self): self.enterRule(localctx, 44, self.RULE_namespaceDecoratorNamespace) try: self.enterOuterAlt(localctx, 1) - self.state = 382 + self.state = 378 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2666,7 +2646,7 @@ def namespaceDecoratorName(self): self.enterRule(localctx, 46, self.RULE_namespaceDecoratorName) try: self.enterOuterAlt(localctx, 1) - self.state = 384 + self.state = 380 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2710,13 +2690,13 @@ def returnStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 386 + self.state = 382 self.match(PyNestMLParser.RETURN_KEYWORD) - self.state = 388 + self.state = 384 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 126977) != 0): - self.state = 387 + self.state = 383 self.expression(0) @@ -2770,23 +2750,23 @@ def ifStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 390 + self.state = 386 self.ifClause() - self.state = 394 + self.state = 390 self._errHandler.sync(self) _la = self._input.LA(1) while _la==19: - self.state = 391 + self.state = 387 self.elifClause() - self.state = 396 + self.state = 392 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 398 + self.state = 394 self._errHandler.sync(self) _la = self._input.LA(1) if _la==20: - self.state = 397 + self.state = 393 self.elseClause() @@ -2838,13 +2818,13 @@ def ifClause(self): self.enterRule(localctx, 52, self.RULE_ifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 400 + self.state = 396 self.match(PyNestMLParser.IF_KEYWORD) - self.state = 401 + self.state = 397 self.expression(0) - self.state = 402 + self.state = 398 self.match(PyNestMLParser.COLON) - self.state = 403 + self.state = 399 self.block() except RecognitionException as re: localctx.exception = re @@ -2894,13 +2874,13 @@ def elifClause(self): self.enterRule(localctx, 54, self.RULE_elifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 405 + self.state = 401 self.match(PyNestMLParser.ELIF_KEYWORD) - self.state = 406 + self.state = 402 self.expression(0) - self.state = 407 + self.state = 403 self.match(PyNestMLParser.COLON) - self.state = 408 + self.state = 404 self.block() except RecognitionException as re: localctx.exception = re @@ -2946,11 +2926,11 @@ def elseClause(self): self.enterRule(localctx, 56, self.RULE_elseClause) try: self.enterOuterAlt(localctx, 1) - self.state = 410 + self.state = 406 self.match(PyNestMLParser.ELSE_KEYWORD) - self.state = 411 + self.state = 407 self.match(PyNestMLParser.COLON) - self.state = 412 + self.state = 408 self.block() except RecognitionException as re: localctx.exception = re @@ -3029,39 +3009,39 @@ def forStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 414 + self.state = 410 self.match(PyNestMLParser.FOR_KEYWORD) - self.state = 415 + self.state = 411 localctx.var = self.match(PyNestMLParser.NAME) - self.state = 416 + self.state = 412 self.match(PyNestMLParser.IN_KEYWORD) - self.state = 417 + self.state = 413 localctx.start_from = self.expression(0) - self.state = 418 + self.state = 414 self.match(PyNestMLParser.ELLIPSIS) - self.state = 419 + self.state = 415 localctx.end_at = self.expression(0) - self.state = 420 + self.state = 416 self.match(PyNestMLParser.STEP_KEYWORD) - self.state = 422 + self.state = 418 self._errHandler.sync(self) _la = self._input.LA(1) if _la==75: - self.state = 421 + self.state = 417 localctx.negative = self.match(PyNestMLParser.MINUS) - self.state = 424 + self.state = 420 _la = self._input.LA(1) if not(_la==90 or _la==91): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 425 + self.state = 421 self.match(PyNestMLParser.COLON) - self.state = 426 + self.state = 422 self.block() except RecognitionException as re: localctx.exception = re @@ -3111,13 +3091,13 @@ def whileStmt(self): self.enterRule(localctx, 60, self.RULE_whileStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 428 + self.state = 424 self.match(PyNestMLParser.WHILE_KEYWORD) - self.state = 429 + self.state = 425 self.expression(0) - self.state = 430 + self.state = 426 self.match(PyNestMLParser.COLON) - self.state = 431 + self.state = 427 self.block() except RecognitionException as re: localctx.exception = re @@ -3170,31 +3150,31 @@ def nestMLCompilationUnit(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 435 + self.state = 431 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 435 + self.state = 431 self._errHandler.sync(self) token = self._input.LA(1) if token in [31]: - self.state = 433 + self.state = 429 self.model() pass elif token in [9]: - self.state = 434 + self.state = 430 self.match(PyNestMLParser.NEWLINE) pass else: raise NoViableAltException(self) - self.state = 437 + self.state = 433 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==9 or _la==31): break - self.state = 439 + self.state = 435 self.match(PyNestMLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -3240,11 +3220,11 @@ def model(self): self.enterRule(localctx, 64, self.RULE_model) try: self.enterOuterAlt(localctx, 1) - self.state = 441 + self.state = 437 self.match(PyNestMLParser.MODEL_KEYWORD) - self.state = 442 + self.state = 438 self.match(PyNestMLParser.NAME) - self.state = 443 + self.state = 439 self.modelBody() except RecognitionException as re: localctx.exception = re @@ -3349,61 +3329,61 @@ def modelBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 445 + self.state = 441 self.match(PyNestMLParser.COLON) - self.state = 446 + self.state = 442 self.match(PyNestMLParser.NEWLINE) - self.state = 447 + self.state = 443 self.match(PyNestMLParser.INDENT) - self.state = 456 + self.state = 452 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 456 + self.state = 452 self._errHandler.sync(self) token = self._input.LA(1) if token in [32, 33, 34]: - self.state = 448 + self.state = 444 self.blockWithVariables() pass elif token in [36]: - self.state = 449 + self.state = 445 self.equationsBlock() pass elif token in [37]: - self.state = 450 + self.state = 446 self.inputBlock() pass elif token in [38]: - self.state = 451 + self.state = 447 self.outputBlock() pass elif token in [15]: - self.state = 452 + self.state = 448 self.function() pass elif token in [40]: - self.state = 453 + self.state = 449 self.onReceiveBlock() pass elif token in [41]: - self.state = 454 + self.state = 450 self.onConditionBlock() pass elif token in [35]: - self.state = 455 + self.state = 451 self.updateBlock() pass else: raise NoViableAltException(self) - self.state = 458 + self.state = 454 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 3843995762688) != 0)): break - self.state = 460 + self.state = 456 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3473,29 +3453,29 @@ def onReceiveBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 462 + self.state = 458 self.match(PyNestMLParser.ON_RECEIVE_KEYWORD) - self.state = 463 + self.state = 459 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 464 + self.state = 460 localctx.inputPortName = self.match(PyNestMLParser.NAME) - self.state = 469 + self.state = 465 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 465 + self.state = 461 self.match(PyNestMLParser.COMMA) - self.state = 466 + self.state = 462 self.constParameter() - self.state = 471 + self.state = 467 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 472 + self.state = 468 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 473 + self.state = 469 self.match(PyNestMLParser.COLON) - self.state = 474 + self.state = 470 self.block() except RecognitionException as re: localctx.exception = re @@ -3566,29 +3546,29 @@ def onConditionBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 476 + self.state = 472 self.match(PyNestMLParser.ON_CONDITION_KEYWORD) - self.state = 477 + self.state = 473 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 478 + self.state = 474 localctx.condition = self.expression(0) - self.state = 483 + self.state = 479 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 479 + self.state = 475 self.match(PyNestMLParser.COMMA) - self.state = 480 + self.state = 476 self.constParameter() - self.state = 485 + self.state = 481 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 486 + self.state = 482 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 487 + self.state = 483 self.match(PyNestMLParser.COLON) - self.state = 488 + self.state = 484 self.block() except RecognitionException as re: localctx.exception = re @@ -3654,7 +3634,7 @@ def blockWithVariables(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 490 + self.state = 486 localctx.blockType = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 30064771072) != 0)): @@ -3662,25 +3642,25 @@ def blockWithVariables(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 491 + self.state = 487 self.match(PyNestMLParser.COLON) - self.state = 492 + self.state = 488 self.match(PyNestMLParser.NEWLINE) - self.state = 493 + self.state = 489 self.match(PyNestMLParser.INDENT) - self.state = 495 + self.state = 491 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 494 + self.state = 490 self.declaration_newline() - self.state = 497 + self.state = 493 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==16 or _la==29 or _la==89): break - self.state = 499 + self.state = 495 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3726,11 +3706,11 @@ def updateBlock(self): self.enterRule(localctx, 74, self.RULE_updateBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 501 + self.state = 497 self.match(PyNestMLParser.UPDATE_KEYWORD) - self.state = 502 + self.state = 498 self.match(PyNestMLParser.COLON) - self.state = 503 + self.state = 499 self.block() except RecognitionException as re: localctx.exception = re @@ -3803,43 +3783,43 @@ def equationsBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 505 + self.state = 501 self.match(PyNestMLParser.EQUATIONS_KEYWORD) - self.state = 506 + self.state = 502 self.match(PyNestMLParser.COLON) - self.state = 507 + self.state = 503 self.match(PyNestMLParser.NEWLINE) - self.state = 508 + self.state = 504 self.match(PyNestMLParser.INDENT) - self.state = 512 + self.state = 508 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 512 + self.state = 508 self._errHandler.sync(self) token = self._input.LA(1) if token in [16, 29]: - self.state = 509 + self.state = 505 self.inlineExpression() pass elif token in [89]: - self.state = 510 + self.state = 506 self.odeEquation() pass elif token in [30]: - self.state = 511 + self.state = 507 self.kernel() pass else: raise NoViableAltException(self) - self.state = 514 + self.state = 510 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==89): break - self.state = 516 + self.state = 512 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3930,69 +3910,69 @@ def inputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 518 + self.state = 514 self.match(PyNestMLParser.INPUT_KEYWORD) - self.state = 519 + self.state = 515 self.match(PyNestMLParser.COLON) - self.state = 520 + self.state = 516 self.match(PyNestMLParser.NEWLINE) - self.state = 521 + self.state = 517 self.match(PyNestMLParser.INDENT) - self.state = 540 + self.state = 536 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 524 + self.state = 520 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,54,self._ctx) + la_ = self._interp.adaptivePredict(self._input,53,self._ctx) if la_ == 1: - self.state = 522 + self.state = 518 self.spikeInputPort() pass elif la_ == 2: - self.state = 523 + self.state = 519 self.continuousInputPort() pass - self.state = 538 + self.state = 534 self._errHandler.sync(self) _la = self._input.LA(1) if _la==49: - self.state = 526 + self.state = 522 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 535 + self.state = 531 self._errHandler.sync(self) _la = self._input.LA(1) if _la==89: - self.state = 527 + self.state = 523 self.parameter() - self.state = 532 + self.state = 528 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 528 + self.state = 524 self.match(PyNestMLParser.COMMA) - self.state = 529 + self.state = 525 self.parameter() - self.state = 534 + self.state = 530 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 537 + self.state = 533 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 542 + self.state = 538 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==89): break - self.state = 544 + self.state = 540 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4060,35 +4040,35 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 546 + self.state = 542 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 551 + self.state = 547 self._errHandler.sync(self) _la = self._input.LA(1) if _la==56: - self.state = 547 + self.state = 543 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 548 + self.state = 544 localctx.sizeParameter = self.expression(0) - self.state = 549 + self.state = 545 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 553 + self.state = 549 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 557 + self.state = 553 self._errHandler.sync(self) _la = self._input.LA(1) while _la==43 or _la==44: - self.state = 554 + self.state = 550 self.inputQualifier() - self.state = 559 + self.state = 555 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 560 + self.state = 556 self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 561 + self.state = 557 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4153,27 +4133,27 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 563 + self.state = 559 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 568 + self.state = 564 self._errHandler.sync(self) _la = self._input.LA(1) if _la==56: - self.state = 564 + self.state = 560 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 565 + self.state = 561 localctx.sizeParameter = self.expression(0) - self.state = 566 + self.state = 562 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 570 + self.state = 566 self.dataType() - self.state = 571 + self.state = 567 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 572 + self.state = 568 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 573 + self.state = 569 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4217,15 +4197,15 @@ def inputQualifier(self): self.enterRule(localctx, 84, self.RULE_inputQualifier) try: self.enterOuterAlt(localctx, 1) - self.state = 577 + self.state = 573 self._errHandler.sync(self) token = self._input.LA(1) if token in [43]: - self.state = 575 + self.state = 571 localctx.isInhibitory = self.match(PyNestMLParser.INHIBITORY_KEYWORD) pass elif token in [44]: - self.state = 576 + self.state = 572 localctx.isExcitatory = self.match(PyNestMLParser.EXCITATORY_KEYWORD) pass else: @@ -4312,61 +4292,61 @@ def outputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 579 + self.state = 575 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 580 + self.state = 576 self.match(PyNestMLParser.COLON) - self.state = 581 + self.state = 577 self.match(PyNestMLParser.NEWLINE) - self.state = 582 + self.state = 578 self.match(PyNestMLParser.INDENT) - self.state = 585 + self.state = 581 self._errHandler.sync(self) token = self._input.LA(1) if token in [42]: - self.state = 583 + self.state = 579 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) pass elif token in [39]: - self.state = 584 + self.state = 580 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 599 + self.state = 595 self._errHandler.sync(self) _la = self._input.LA(1) if _la==49: - self.state = 587 + self.state = 583 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 596 + self.state = 592 self._errHandler.sync(self) _la = self._input.LA(1) if _la==89: - self.state = 588 + self.state = 584 localctx.attribute = self.parameter() - self.state = 593 + self.state = 589 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 589 + self.state = 585 self.match(PyNestMLParser.COMMA) - self.state = 590 + self.state = 586 localctx.attribute = self.parameter() - self.state = 595 + self.state = 591 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 598 + self.state = 594 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 601 + self.state = 597 self.match(PyNestMLParser.NEWLINE) - self.state = 602 + self.state = 598 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4440,45 +4420,45 @@ def function(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 604 + self.state = 600 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 605 + self.state = 601 self.match(PyNestMLParser.NAME) - self.state = 606 + self.state = 602 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 615 + self.state = 611 self._errHandler.sync(self) _la = self._input.LA(1) if _la==89: - self.state = 607 + self.state = 603 self.parameter() - self.state = 612 + self.state = 608 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 608 + self.state = 604 self.match(PyNestMLParser.COMMA) - self.state = 609 + self.state = 605 self.parameter() - self.state = 614 + self.state = 610 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 617 + self.state = 613 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 619 + self.state = 615 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 562949953453056) != 0) or _la==89 or _la==90: - self.state = 618 + self.state = 614 localctx.returnType = self.dataType() - self.state = 621 + self.state = 617 self.match(PyNestMLParser.COLON) - self.state = 622 + self.state = 618 self.block() except RecognitionException as re: localctx.exception = re @@ -4521,9 +4501,9 @@ def parameter(self): self.enterRule(localctx, 90, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 624 + self.state = 620 self.match(PyNestMLParser.NAME) - self.state = 625 + self.state = 621 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4583,11 +4563,11 @@ def constParameter(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 627 + self.state = 623 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 628 + self.state = 624 self.match(PyNestMLParser.EQUALS) - self.state = 629 + self.state = 625 localctx.value = self._input.LT(1) _la = self._input.LA(1) if not(_la==25 or ((((_la - 87)) & ~0x3f) == 0 and ((1 << (_la - 87)) & 27) != 0)): diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index 9a0618561..6f88f9577 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -108,8 +108,7 @@ parser grammar PyNestMLParser; */ variable : name=NAME (LEFT_SQUARE_BRACKET vectorParameter=expression RIGHT_SQUARE_BRACKET)? - (DIFFERENTIAL_ORDER)* - (FULLSTOP attribute=variable)?; + (DIFFERENTIAL_ORDER)*; /** ASTFunctionCall Represents a function call, e.g. myFun("a", "b"). From 224e0f363114fa585bb7131dbac45c3363f6e330 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 21 Oct 2024 10:17:16 +0200 Subject: [PATCH 06/68] add explicit attributes to spiking input port --- .../nest_declarations_helper.py | 1 - .../printers/cpp_variable_printer.py | 2 +- .../printers/gsl_variable_printer.py | 1 + pynestml/generated/PyNestMLParser.py | 1022 +++++++++-------- pynestml/grammars/PyNestMLParser.g4 | 3 +- pynestml/meta_model/ast_variable.py | 7 +- pynestml/visitors/ast_builder_visitor.py | 3 + 7 files changed, 533 insertions(+), 506 deletions(-) diff --git a/pynestml/codegeneration/nest_declarations_helper.py b/pynestml/codegeneration/nest_declarations_helper.py index 981c05b95..827ad6dcc 100644 --- a/pynestml/codegeneration/nest_declarations_helper.py +++ b/pynestml/codegeneration/nest_declarations_helper.py @@ -56,7 +56,6 @@ def print_variable_type(self, variable_symbol) -> str: :param variable_symbol: a single variable symbol :type variable_symbol: variable_symbol :return: a string presentation of the variable symbol's type - :rtype: str """ if variable_symbol.has_vector_parameter(): return 'std::vector< ' + self.type_symbol_printer.print(variable_symbol.get_type_symbol()) + \ diff --git a/pynestml/codegeneration/printers/cpp_variable_printer.py b/pynestml/codegeneration/printers/cpp_variable_printer.py index 2a6af847a..1e1039165 100644 --- a/pynestml/codegeneration/printers/cpp_variable_printer.py +++ b/pynestml/codegeneration/printers/cpp_variable_printer.py @@ -51,7 +51,7 @@ def print_variable(self, node: ASTVariable) -> str: if node.get_name() == PredefinedVariables.E_CONSTANT: return "2.718281828459045235360287471352" # not defined in C++11 stdlib - if node.get_name() == PredefinedVariables.E_CONSTANT: + if node.get_name() == PredefinedVariables.PI_CONSTANT: return "M_PI" # from return CppVariablePrinter._print_cpp_name(node.get_complete_name()) diff --git a/pynestml/codegeneration/printers/gsl_variable_printer.py b/pynestml/codegeneration/printers/gsl_variable_printer.py index 4de36651a..df87f443d 100644 --- a/pynestml/codegeneration/printers/gsl_variable_printer.py +++ b/pynestml/codegeneration/printers/gsl_variable_printer.py @@ -88,6 +88,7 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: var_name += "_0 + " + variable.get_vector_parameter().get_variable().get_name() else: var_name += "_" + str(variable.get_vector_parameter()) + return "spike_inputs_grid_sum_[node." + var_name + " - node.MIN_SPIKE_RECEPTOR]" return variable_symbol.get_symbol_name() + '_grid_sum_' diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index c27a0bce6..9fb849705 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -10,7 +10,7 @@ def serializedATN(): return [ - 4,1,91,628,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,91,632,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, @@ -27,221 +27,223 @@ def serializedATN(): 1,4,3,4,194,8,4,1,5,1,5,1,5,3,5,199,8,5,1,6,1,6,1,6,1,6,1,6,3,6, 206,8,6,1,7,1,7,1,7,1,7,1,7,1,7,1,7,3,7,215,8,7,1,8,1,8,3,8,219, 8,8,1,9,1,9,1,9,1,9,1,9,3,9,226,8,9,1,9,5,9,229,8,9,10,9,12,9,232, - 9,9,1,10,1,10,1,10,1,10,1,10,5,10,239,8,10,10,10,12,10,242,9,10, - 3,10,244,8,10,1,10,1,10,1,11,3,11,249,8,11,1,11,1,11,1,11,1,11,1, - 11,1,11,3,11,257,8,11,1,11,5,11,260,8,11,10,11,12,11,263,9,11,1, - 11,1,11,1,12,1,12,1,12,1,12,3,12,271,8,12,1,12,5,12,274,8,12,10, - 12,12,12,277,9,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1, - 13,1,13,5,13,290,8,13,10,13,12,13,293,9,13,1,13,3,13,296,8,13,1, - 13,1,13,1,14,1,14,1,14,4,14,303,8,14,11,14,12,14,304,1,14,1,14,1, - 15,1,15,3,15,311,8,15,1,16,1,16,1,16,3,16,316,8,16,1,17,1,17,1,17, - 1,17,3,17,322,8,17,1,17,1,17,1,18,1,18,1,18,1,18,1,18,1,18,3,18, - 332,8,18,1,18,1,18,1,19,3,19,337,8,19,1,19,3,19,340,8,19,1,19,1, - 19,1,19,5,19,345,8,19,10,19,12,19,348,9,19,1,19,1,19,1,19,3,19,353, - 8,19,1,19,1,19,1,19,1,19,3,19,359,8,19,1,19,5,19,362,8,19,10,19, - 12,19,365,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1,21,1,21,1,21,1,21, - 3,21,377,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3,24,385,8,24,1,25,1, - 25,5,25,389,8,25,10,25,12,25,392,9,25,1,25,3,25,395,8,25,1,26,1, - 26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1, - 29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,419,8,29,1,29,1,29,1, - 29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31,432,8,31,11,31,12, - 31,433,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1, - 33,1,33,1,33,1,33,1,33,1,33,4,33,453,8,33,11,33,12,33,454,1,33,1, - 33,1,34,1,34,1,34,1,34,1,34,5,34,464,8,34,10,34,12,34,467,9,34,1, - 34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35,478,8,35,10,35,12, - 35,481,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,4,36,492, - 8,36,11,36,12,36,493,1,36,1,36,1,37,1,37,1,37,1,37,1,38,1,38,1,38, - 1,38,1,38,1,38,1,38,4,38,509,8,38,11,38,12,38,510,1,38,1,38,1,39, - 1,39,1,39,1,39,1,39,1,39,3,39,521,8,39,1,39,1,39,1,39,1,39,5,39, - 527,8,39,10,39,12,39,530,9,39,3,39,532,8,39,1,39,3,39,535,8,39,4, - 39,537,8,39,11,39,12,39,538,1,39,1,39,1,40,1,40,1,40,1,40,1,40,3, - 40,548,8,40,1,40,1,40,5,40,552,8,40,10,40,12,40,555,9,40,1,40,1, - 40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,565,8,41,1,41,1,41,1,41,1, - 41,1,41,1,42,1,42,3,42,574,8,42,1,43,1,43,1,43,1,43,1,43,1,43,3, - 43,582,8,43,1,43,1,43,1,43,1,43,5,43,588,8,43,10,43,12,43,591,9, - 43,3,43,593,8,43,1,43,3,43,596,8,43,1,43,1,43,1,43,1,44,1,44,1,44, - 1,44,1,44,1,44,5,44,607,8,44,10,44,12,44,610,9,44,3,44,612,8,44, - 1,44,1,44,3,44,616,8,44,1,44,1,44,1,44,1,45,1,45,1,45,1,46,1,46, - 1,46,1,46,1,46,0,2,2,6,47,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28, - 30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72, - 74,76,78,80,82,84,86,88,90,92,0,4,2,0,51,51,75,75,1,0,90,91,1,0, - 32,34,3,0,25,25,87,88,90,91,689,0,100,1,0,0,0,2,111,1,0,0,0,4,128, - 1,0,0,0,6,143,1,0,0,0,8,193,1,0,0,0,10,198,1,0,0,0,12,205,1,0,0, - 0,14,214,1,0,0,0,16,218,1,0,0,0,18,220,1,0,0,0,20,233,1,0,0,0,22, - 248,1,0,0,0,24,266,1,0,0,0,26,280,1,0,0,0,28,299,1,0,0,0,30,310, - 1,0,0,0,32,315,1,0,0,0,34,321,1,0,0,0,36,325,1,0,0,0,38,336,1,0, - 0,0,40,366,1,0,0,0,42,376,1,0,0,0,44,378,1,0,0,0,46,380,1,0,0,0, - 48,382,1,0,0,0,50,386,1,0,0,0,52,396,1,0,0,0,54,401,1,0,0,0,56,406, - 1,0,0,0,58,410,1,0,0,0,60,424,1,0,0,0,62,431,1,0,0,0,64,437,1,0, - 0,0,66,441,1,0,0,0,68,458,1,0,0,0,70,472,1,0,0,0,72,486,1,0,0,0, - 74,497,1,0,0,0,76,501,1,0,0,0,78,514,1,0,0,0,80,542,1,0,0,0,82,559, - 1,0,0,0,84,573,1,0,0,0,86,575,1,0,0,0,88,600,1,0,0,0,90,620,1,0, - 0,0,92,623,1,0,0,0,94,101,5,10,0,0,95,101,5,11,0,0,96,101,5,12,0, - 0,97,101,5,13,0,0,98,101,5,14,0,0,99,101,3,2,1,0,100,94,1,0,0,0, - 100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100,98,1,0,0,0,100, - 99,1,0,0,0,101,1,1,0,0,0,102,103,6,1,-1,0,103,104,5,49,0,0,104,105, - 3,2,1,0,105,106,5,50,0,0,106,112,1,0,0,0,107,108,5,90,0,0,108,109, - 5,79,0,0,109,112,3,2,1,2,110,112,5,89,0,0,111,102,1,0,0,0,111,107, - 1,0,0,0,111,110,1,0,0,0,112,124,1,0,0,0,113,116,10,3,0,0,114,117, - 5,77,0,0,115,117,5,79,0,0,116,114,1,0,0,0,116,115,1,0,0,0,117,118, - 1,0,0,0,118,123,3,2,1,4,119,120,10,4,0,0,120,121,5,78,0,0,121,123, - 3,4,2,0,122,113,1,0,0,0,122,119,1,0,0,0,123,126,1,0,0,0,124,122, - 1,0,0,0,124,125,1,0,0,0,125,3,1,0,0,0,126,124,1,0,0,0,127,129,7, - 0,0,0,128,127,1,0,0,0,128,129,1,0,0,0,129,130,1,0,0,0,130,131,5, - 90,0,0,131,5,1,0,0,0,132,133,6,3,-1,0,133,134,5,49,0,0,134,135,3, - 6,3,0,135,136,5,50,0,0,136,144,1,0,0,0,137,138,3,10,5,0,138,139, - 3,6,3,9,139,144,1,0,0,0,140,141,5,28,0,0,141,144,3,6,3,4,142,144, - 3,8,4,0,143,132,1,0,0,0,143,137,1,0,0,0,143,140,1,0,0,0,143,142, - 1,0,0,0,144,181,1,0,0,0,145,146,10,10,0,0,146,147,5,78,0,0,147,180, - 3,6,3,10,148,152,10,8,0,0,149,153,5,77,0,0,150,153,5,79,0,0,151, - 153,5,80,0,0,152,149,1,0,0,0,152,150,1,0,0,0,152,151,1,0,0,0,153, - 154,1,0,0,0,154,180,3,6,3,9,155,158,10,7,0,0,156,159,5,51,0,0,157, - 159,5,75,0,0,158,156,1,0,0,0,158,157,1,0,0,0,159,160,1,0,0,0,160, - 180,3,6,3,8,161,162,10,6,0,0,162,163,3,12,6,0,163,164,3,6,3,7,164, - 180,1,0,0,0,165,166,10,5,0,0,166,167,3,14,7,0,167,168,3,6,3,6,168, - 180,1,0,0,0,169,170,10,3,0,0,170,171,3,16,8,0,171,172,3,6,3,4,172, - 180,1,0,0,0,173,174,10,2,0,0,174,175,5,81,0,0,175,176,3,6,3,0,176, - 177,5,82,0,0,177,178,3,6,3,3,178,180,1,0,0,0,179,145,1,0,0,0,179, - 148,1,0,0,0,179,155,1,0,0,0,179,161,1,0,0,0,179,165,1,0,0,0,179, - 169,1,0,0,0,179,173,1,0,0,0,180,183,1,0,0,0,181,179,1,0,0,0,181, - 182,1,0,0,0,182,7,1,0,0,0,183,181,1,0,0,0,184,194,3,20,10,0,185, - 194,5,87,0,0,186,188,7,1,0,0,187,189,3,18,9,0,188,187,1,0,0,0,188, - 189,1,0,0,0,189,194,1,0,0,0,190,194,5,88,0,0,191,194,5,25,0,0,192, - 194,3,18,9,0,193,184,1,0,0,0,193,185,1,0,0,0,193,186,1,0,0,0,193, - 190,1,0,0,0,193,191,1,0,0,0,193,192,1,0,0,0,194,9,1,0,0,0,195,199, - 5,51,0,0,196,199,5,75,0,0,197,199,5,52,0,0,198,195,1,0,0,0,198,196, - 1,0,0,0,198,197,1,0,0,0,199,11,1,0,0,0,200,206,5,55,0,0,201,206, - 5,54,0,0,202,206,5,53,0,0,203,206,5,61,0,0,204,206,5,62,0,0,205, - 200,1,0,0,0,205,201,1,0,0,0,205,202,1,0,0,0,205,203,1,0,0,0,205, - 204,1,0,0,0,206,13,1,0,0,0,207,215,5,63,0,0,208,215,5,65,0,0,209, - 215,5,70,0,0,210,215,5,71,0,0,211,215,5,72,0,0,212,215,5,73,0,0, - 213,215,5,64,0,0,214,207,1,0,0,0,214,208,1,0,0,0,214,209,1,0,0,0, - 214,210,1,0,0,0,214,211,1,0,0,0,214,212,1,0,0,0,214,213,1,0,0,0, - 215,15,1,0,0,0,216,219,5,26,0,0,217,219,5,27,0,0,218,216,1,0,0,0, - 218,217,1,0,0,0,219,17,1,0,0,0,220,225,5,89,0,0,221,222,5,56,0,0, - 222,223,3,6,3,0,223,224,5,58,0,0,224,226,1,0,0,0,225,221,1,0,0,0, - 225,226,1,0,0,0,226,230,1,0,0,0,227,229,5,85,0,0,228,227,1,0,0,0, - 229,232,1,0,0,0,230,228,1,0,0,0,230,231,1,0,0,0,231,19,1,0,0,0,232, - 230,1,0,0,0,233,234,5,89,0,0,234,243,5,49,0,0,235,240,3,6,3,0,236, - 237,5,74,0,0,237,239,3,6,3,0,238,236,1,0,0,0,239,242,1,0,0,0,240, - 238,1,0,0,0,240,241,1,0,0,0,241,244,1,0,0,0,242,240,1,0,0,0,243, - 235,1,0,0,0,243,244,1,0,0,0,244,245,1,0,0,0,245,246,5,50,0,0,246, - 21,1,0,0,0,247,249,5,29,0,0,248,247,1,0,0,0,248,249,1,0,0,0,249, - 250,1,0,0,0,250,251,5,16,0,0,251,252,5,89,0,0,252,253,3,0,0,0,253, - 254,5,76,0,0,254,256,3,6,3,0,255,257,5,84,0,0,256,255,1,0,0,0,256, - 257,1,0,0,0,257,261,1,0,0,0,258,260,3,42,21,0,259,258,1,0,0,0,260, - 263,1,0,0,0,261,259,1,0,0,0,261,262,1,0,0,0,262,264,1,0,0,0,263, - 261,1,0,0,0,264,265,5,9,0,0,265,23,1,0,0,0,266,267,3,18,9,0,267, - 268,5,76,0,0,268,270,3,6,3,0,269,271,5,84,0,0,270,269,1,0,0,0,270, - 271,1,0,0,0,271,275,1,0,0,0,272,274,3,42,21,0,273,272,1,0,0,0,274, - 277,1,0,0,0,275,273,1,0,0,0,275,276,1,0,0,0,276,278,1,0,0,0,277, - 275,1,0,0,0,278,279,5,9,0,0,279,25,1,0,0,0,280,281,5,30,0,0,281, - 282,3,18,9,0,282,283,5,76,0,0,283,291,3,6,3,0,284,285,5,4,0,0,285, - 286,3,18,9,0,286,287,5,76,0,0,287,288,3,6,3,0,288,290,1,0,0,0,289, - 284,1,0,0,0,290,293,1,0,0,0,291,289,1,0,0,0,291,292,1,0,0,0,292, - 295,1,0,0,0,293,291,1,0,0,0,294,296,5,84,0,0,295,294,1,0,0,0,295, - 296,1,0,0,0,296,297,1,0,0,0,297,298,5,9,0,0,298,27,1,0,0,0,299,300, - 5,9,0,0,300,302,5,1,0,0,301,303,3,30,15,0,302,301,1,0,0,0,303,304, - 1,0,0,0,304,302,1,0,0,0,304,305,1,0,0,0,305,306,1,0,0,0,306,307, - 5,2,0,0,307,29,1,0,0,0,308,311,3,34,17,0,309,311,3,32,16,0,310,308, - 1,0,0,0,310,309,1,0,0,0,311,31,1,0,0,0,312,316,3,50,25,0,313,316, - 3,58,29,0,314,316,3,60,30,0,315,312,1,0,0,0,315,313,1,0,0,0,315, - 314,1,0,0,0,316,33,1,0,0,0,317,322,3,36,18,0,318,322,3,20,10,0,319, - 322,3,38,19,0,320,322,3,48,24,0,321,317,1,0,0,0,321,318,1,0,0,0, - 321,319,1,0,0,0,321,320,1,0,0,0,322,323,1,0,0,0,323,324,5,9,0,0, - 324,35,1,0,0,0,325,331,3,18,9,0,326,332,5,76,0,0,327,332,5,66,0, - 0,328,332,5,67,0,0,329,332,5,68,0,0,330,332,5,69,0,0,331,326,1,0, - 0,0,331,327,1,0,0,0,331,328,1,0,0,0,331,329,1,0,0,0,331,330,1,0, - 0,0,332,333,1,0,0,0,333,334,3,6,3,0,334,37,1,0,0,0,335,337,5,29, - 0,0,336,335,1,0,0,0,336,337,1,0,0,0,337,339,1,0,0,0,338,340,5,16, - 0,0,339,338,1,0,0,0,339,340,1,0,0,0,340,341,1,0,0,0,341,346,3,18, - 9,0,342,343,5,74,0,0,343,345,3,18,9,0,344,342,1,0,0,0,345,348,1, - 0,0,0,346,344,1,0,0,0,346,347,1,0,0,0,347,349,1,0,0,0,348,346,1, - 0,0,0,349,352,3,0,0,0,350,351,5,76,0,0,351,353,3,6,3,0,352,350,1, - 0,0,0,352,353,1,0,0,0,353,358,1,0,0,0,354,355,5,59,0,0,355,356,3, - 6,3,0,356,357,5,60,0,0,357,359,1,0,0,0,358,354,1,0,0,0,358,359,1, - 0,0,0,359,363,1,0,0,0,360,362,3,42,21,0,361,360,1,0,0,0,362,365, - 1,0,0,0,363,361,1,0,0,0,363,364,1,0,0,0,364,39,1,0,0,0,365,363,1, - 0,0,0,366,367,3,38,19,0,367,368,5,9,0,0,368,41,1,0,0,0,369,377,5, - 45,0,0,370,377,5,46,0,0,371,372,5,47,0,0,372,373,3,44,22,0,373,374, - 5,83,0,0,374,375,3,46,23,0,375,377,1,0,0,0,376,369,1,0,0,0,376,370, - 1,0,0,0,376,371,1,0,0,0,377,43,1,0,0,0,378,379,5,89,0,0,379,45,1, - 0,0,0,380,381,5,89,0,0,381,47,1,0,0,0,382,384,5,17,0,0,383,385,3, - 6,3,0,384,383,1,0,0,0,384,385,1,0,0,0,385,49,1,0,0,0,386,390,3,52, - 26,0,387,389,3,54,27,0,388,387,1,0,0,0,389,392,1,0,0,0,390,388,1, - 0,0,0,390,391,1,0,0,0,391,394,1,0,0,0,392,390,1,0,0,0,393,395,3, - 56,28,0,394,393,1,0,0,0,394,395,1,0,0,0,395,51,1,0,0,0,396,397,5, - 18,0,0,397,398,3,6,3,0,398,399,5,82,0,0,399,400,3,28,14,0,400,53, - 1,0,0,0,401,402,5,19,0,0,402,403,3,6,3,0,403,404,5,82,0,0,404,405, - 3,28,14,0,405,55,1,0,0,0,406,407,5,20,0,0,407,408,5,82,0,0,408,409, - 3,28,14,0,409,57,1,0,0,0,410,411,5,21,0,0,411,412,5,89,0,0,412,413, - 5,23,0,0,413,414,3,6,3,0,414,415,5,48,0,0,415,416,3,6,3,0,416,418, - 5,24,0,0,417,419,5,75,0,0,418,417,1,0,0,0,418,419,1,0,0,0,419,420, - 1,0,0,0,420,421,7,1,0,0,421,422,5,82,0,0,422,423,3,28,14,0,423,59, - 1,0,0,0,424,425,5,22,0,0,425,426,3,6,3,0,426,427,5,82,0,0,427,428, - 3,28,14,0,428,61,1,0,0,0,429,432,3,64,32,0,430,432,5,9,0,0,431,429, - 1,0,0,0,431,430,1,0,0,0,432,433,1,0,0,0,433,431,1,0,0,0,433,434, - 1,0,0,0,434,435,1,0,0,0,435,436,5,0,0,1,436,63,1,0,0,0,437,438,5, - 31,0,0,438,439,5,89,0,0,439,440,3,66,33,0,440,65,1,0,0,0,441,442, - 5,82,0,0,442,443,5,9,0,0,443,452,5,1,0,0,444,453,3,72,36,0,445,453, - 3,76,38,0,446,453,3,78,39,0,447,453,3,86,43,0,448,453,3,88,44,0, - 449,453,3,68,34,0,450,453,3,70,35,0,451,453,3,74,37,0,452,444,1, - 0,0,0,452,445,1,0,0,0,452,446,1,0,0,0,452,447,1,0,0,0,452,448,1, - 0,0,0,452,449,1,0,0,0,452,450,1,0,0,0,452,451,1,0,0,0,453,454,1, - 0,0,0,454,452,1,0,0,0,454,455,1,0,0,0,455,456,1,0,0,0,456,457,5, - 2,0,0,457,67,1,0,0,0,458,459,5,40,0,0,459,460,5,49,0,0,460,465,5, - 89,0,0,461,462,5,74,0,0,462,464,3,92,46,0,463,461,1,0,0,0,464,467, - 1,0,0,0,465,463,1,0,0,0,465,466,1,0,0,0,466,468,1,0,0,0,467,465, - 1,0,0,0,468,469,5,50,0,0,469,470,5,82,0,0,470,471,3,28,14,0,471, - 69,1,0,0,0,472,473,5,41,0,0,473,474,5,49,0,0,474,479,3,6,3,0,475, - 476,5,74,0,0,476,478,3,92,46,0,477,475,1,0,0,0,478,481,1,0,0,0,479, - 477,1,0,0,0,479,480,1,0,0,0,480,482,1,0,0,0,481,479,1,0,0,0,482, - 483,5,50,0,0,483,484,5,82,0,0,484,485,3,28,14,0,485,71,1,0,0,0,486, - 487,7,2,0,0,487,488,5,82,0,0,488,489,5,9,0,0,489,491,5,1,0,0,490, - 492,3,40,20,0,491,490,1,0,0,0,492,493,1,0,0,0,493,491,1,0,0,0,493, - 494,1,0,0,0,494,495,1,0,0,0,495,496,5,2,0,0,496,73,1,0,0,0,497,498, - 5,35,0,0,498,499,5,82,0,0,499,500,3,28,14,0,500,75,1,0,0,0,501,502, - 5,36,0,0,502,503,5,82,0,0,503,504,5,9,0,0,504,508,5,1,0,0,505,509, - 3,22,11,0,506,509,3,24,12,0,507,509,3,26,13,0,508,505,1,0,0,0,508, - 506,1,0,0,0,508,507,1,0,0,0,509,510,1,0,0,0,510,508,1,0,0,0,510, - 511,1,0,0,0,511,512,1,0,0,0,512,513,5,2,0,0,513,77,1,0,0,0,514,515, - 5,37,0,0,515,516,5,82,0,0,516,517,5,9,0,0,517,536,5,1,0,0,518,521, - 3,80,40,0,519,521,3,82,41,0,520,518,1,0,0,0,520,519,1,0,0,0,521, - 534,1,0,0,0,522,531,5,49,0,0,523,528,3,90,45,0,524,525,5,74,0,0, - 525,527,3,90,45,0,526,524,1,0,0,0,527,530,1,0,0,0,528,526,1,0,0, - 0,528,529,1,0,0,0,529,532,1,0,0,0,530,528,1,0,0,0,531,523,1,0,0, - 0,531,532,1,0,0,0,532,533,1,0,0,0,533,535,5,50,0,0,534,522,1,0,0, - 0,534,535,1,0,0,0,535,537,1,0,0,0,536,520,1,0,0,0,537,538,1,0,0, - 0,538,536,1,0,0,0,538,539,1,0,0,0,539,540,1,0,0,0,540,541,5,2,0, - 0,541,79,1,0,0,0,542,547,5,89,0,0,543,544,5,56,0,0,544,545,3,6,3, - 0,545,546,5,58,0,0,546,548,1,0,0,0,547,543,1,0,0,0,547,548,1,0,0, - 0,548,549,1,0,0,0,549,553,5,57,0,0,550,552,3,84,42,0,551,550,1,0, - 0,0,552,555,1,0,0,0,553,551,1,0,0,0,553,554,1,0,0,0,554,556,1,0, - 0,0,555,553,1,0,0,0,556,557,5,42,0,0,557,558,5,9,0,0,558,81,1,0, - 0,0,559,564,5,89,0,0,560,561,5,56,0,0,561,562,3,6,3,0,562,563,5, - 58,0,0,563,565,1,0,0,0,564,560,1,0,0,0,564,565,1,0,0,0,565,566,1, - 0,0,0,566,567,3,0,0,0,567,568,5,57,0,0,568,569,5,39,0,0,569,570, - 5,9,0,0,570,83,1,0,0,0,571,574,5,43,0,0,572,574,5,44,0,0,573,571, - 1,0,0,0,573,572,1,0,0,0,574,85,1,0,0,0,575,576,5,38,0,0,576,577, - 5,82,0,0,577,578,5,9,0,0,578,581,5,1,0,0,579,582,5,42,0,0,580,582, - 5,39,0,0,581,579,1,0,0,0,581,580,1,0,0,0,582,595,1,0,0,0,583,592, - 5,49,0,0,584,589,3,90,45,0,585,586,5,74,0,0,586,588,3,90,45,0,587, - 585,1,0,0,0,588,591,1,0,0,0,589,587,1,0,0,0,589,590,1,0,0,0,590, - 593,1,0,0,0,591,589,1,0,0,0,592,584,1,0,0,0,592,593,1,0,0,0,593, - 594,1,0,0,0,594,596,5,50,0,0,595,583,1,0,0,0,595,596,1,0,0,0,596, - 597,1,0,0,0,597,598,5,9,0,0,598,599,5,2,0,0,599,87,1,0,0,0,600,601, - 5,15,0,0,601,602,5,89,0,0,602,611,5,49,0,0,603,608,3,90,45,0,604, - 605,5,74,0,0,605,607,3,90,45,0,606,604,1,0,0,0,607,610,1,0,0,0,608, - 606,1,0,0,0,608,609,1,0,0,0,609,612,1,0,0,0,610,608,1,0,0,0,611, - 603,1,0,0,0,611,612,1,0,0,0,612,613,1,0,0,0,613,615,5,50,0,0,614, - 616,3,0,0,0,615,614,1,0,0,0,615,616,1,0,0,0,616,617,1,0,0,0,617, - 618,5,82,0,0,618,619,3,28,14,0,619,89,1,0,0,0,620,621,5,89,0,0,621, - 622,3,0,0,0,622,91,1,0,0,0,623,624,5,89,0,0,624,625,5,76,0,0,625, - 626,7,3,0,0,626,93,1,0,0,0,69,100,111,116,122,124,128,143,152,158, - 179,181,188,193,198,205,214,218,225,230,240,243,248,256,261,270, - 275,291,295,304,310,315,321,331,336,339,346,352,358,363,376,384, - 390,394,418,431,433,452,454,465,479,493,508,510,520,528,531,534, - 538,547,553,564,573,581,589,592,595,608,611,615 + 9,9,1,9,1,9,3,9,236,8,9,1,10,1,10,1,10,1,10,1,10,5,10,243,8,10,10, + 10,12,10,246,9,10,3,10,248,8,10,1,10,1,10,1,11,3,11,253,8,11,1,11, + 1,11,1,11,1,11,1,11,1,11,3,11,261,8,11,1,11,5,11,264,8,11,10,11, + 12,11,267,9,11,1,11,1,11,1,12,1,12,1,12,1,12,3,12,275,8,12,1,12, + 5,12,278,8,12,10,12,12,12,281,9,12,1,12,1,12,1,13,1,13,1,13,1,13, + 1,13,1,13,1,13,1,13,1,13,5,13,294,8,13,10,13,12,13,297,9,13,1,13, + 3,13,300,8,13,1,13,1,13,1,14,1,14,1,14,4,14,307,8,14,11,14,12,14, + 308,1,14,1,14,1,15,1,15,3,15,315,8,15,1,16,1,16,1,16,3,16,320,8, + 16,1,17,1,17,1,17,1,17,3,17,326,8,17,1,17,1,17,1,18,1,18,1,18,1, + 18,1,18,1,18,3,18,336,8,18,1,18,1,18,1,19,3,19,341,8,19,1,19,3,19, + 344,8,19,1,19,1,19,1,19,5,19,349,8,19,10,19,12,19,352,9,19,1,19, + 1,19,1,19,3,19,357,8,19,1,19,1,19,1,19,1,19,3,19,363,8,19,1,19,5, + 19,366,8,19,10,19,12,19,369,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1, + 21,1,21,1,21,1,21,3,21,381,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3, + 24,389,8,24,1,25,1,25,5,25,393,8,25,10,25,12,25,396,9,25,1,25,3, + 25,399,8,25,1,26,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1, + 28,1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,423, + 8,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31, + 436,8,31,11,31,12,31,437,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33, + 1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,4,33,457,8,33,11,33, + 12,33,458,1,33,1,33,1,34,1,34,1,34,1,34,1,34,5,34,468,8,34,10,34, + 12,34,471,9,34,1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35, + 482,8,35,10,35,12,35,485,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36, + 1,36,1,36,4,36,496,8,36,11,36,12,36,497,1,36,1,36,1,37,1,37,1,37, + 1,37,1,38,1,38,1,38,1,38,1,38,1,38,1,38,4,38,513,8,38,11,38,12,38, + 514,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,3,39,525,8,39,1,39,1, + 39,1,39,1,39,5,39,531,8,39,10,39,12,39,534,9,39,3,39,536,8,39,1, + 39,3,39,539,8,39,4,39,541,8,39,11,39,12,39,542,1,39,1,39,1,40,1, + 40,1,40,1,40,1,40,3,40,552,8,40,1,40,1,40,5,40,556,8,40,10,40,12, + 40,559,9,40,1,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,569,8,41, + 1,41,1,41,1,41,1,41,1,41,1,42,1,42,3,42,578,8,42,1,43,1,43,1,43, + 1,43,1,43,1,43,3,43,586,8,43,1,43,1,43,1,43,1,43,5,43,592,8,43,10, + 43,12,43,595,9,43,3,43,597,8,43,1,43,3,43,600,8,43,1,43,1,43,1,43, + 1,44,1,44,1,44,1,44,1,44,1,44,5,44,611,8,44,10,44,12,44,614,9,44, + 3,44,616,8,44,1,44,1,44,3,44,620,8,44,1,44,1,44,1,44,1,45,1,45,1, + 45,1,46,1,46,1,46,1,46,1,46,0,2,2,6,47,0,2,4,6,8,10,12,14,16,18, + 20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62, + 64,66,68,70,72,74,76,78,80,82,84,86,88,90,92,0,4,2,0,51,51,75,75, + 1,0,90,91,1,0,32,34,3,0,25,25,87,88,90,91,694,0,100,1,0,0,0,2,111, + 1,0,0,0,4,128,1,0,0,0,6,143,1,0,0,0,8,193,1,0,0,0,10,198,1,0,0,0, + 12,205,1,0,0,0,14,214,1,0,0,0,16,218,1,0,0,0,18,220,1,0,0,0,20,237, + 1,0,0,0,22,252,1,0,0,0,24,270,1,0,0,0,26,284,1,0,0,0,28,303,1,0, + 0,0,30,314,1,0,0,0,32,319,1,0,0,0,34,325,1,0,0,0,36,329,1,0,0,0, + 38,340,1,0,0,0,40,370,1,0,0,0,42,380,1,0,0,0,44,382,1,0,0,0,46,384, + 1,0,0,0,48,386,1,0,0,0,50,390,1,0,0,0,52,400,1,0,0,0,54,405,1,0, + 0,0,56,410,1,0,0,0,58,414,1,0,0,0,60,428,1,0,0,0,62,435,1,0,0,0, + 64,441,1,0,0,0,66,445,1,0,0,0,68,462,1,0,0,0,70,476,1,0,0,0,72,490, + 1,0,0,0,74,501,1,0,0,0,76,505,1,0,0,0,78,518,1,0,0,0,80,546,1,0, + 0,0,82,563,1,0,0,0,84,577,1,0,0,0,86,579,1,0,0,0,88,604,1,0,0,0, + 90,624,1,0,0,0,92,627,1,0,0,0,94,101,5,10,0,0,95,101,5,11,0,0,96, + 101,5,12,0,0,97,101,5,13,0,0,98,101,5,14,0,0,99,101,3,2,1,0,100, + 94,1,0,0,0,100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100,98,1, + 0,0,0,100,99,1,0,0,0,101,1,1,0,0,0,102,103,6,1,-1,0,103,104,5,49, + 0,0,104,105,3,2,1,0,105,106,5,50,0,0,106,112,1,0,0,0,107,108,5,90, + 0,0,108,109,5,79,0,0,109,112,3,2,1,2,110,112,5,89,0,0,111,102,1, + 0,0,0,111,107,1,0,0,0,111,110,1,0,0,0,112,124,1,0,0,0,113,116,10, + 3,0,0,114,117,5,77,0,0,115,117,5,79,0,0,116,114,1,0,0,0,116,115, + 1,0,0,0,117,118,1,0,0,0,118,123,3,2,1,4,119,120,10,4,0,0,120,121, + 5,78,0,0,121,123,3,4,2,0,122,113,1,0,0,0,122,119,1,0,0,0,123,126, + 1,0,0,0,124,122,1,0,0,0,124,125,1,0,0,0,125,3,1,0,0,0,126,124,1, + 0,0,0,127,129,7,0,0,0,128,127,1,0,0,0,128,129,1,0,0,0,129,130,1, + 0,0,0,130,131,5,90,0,0,131,5,1,0,0,0,132,133,6,3,-1,0,133,134,5, + 49,0,0,134,135,3,6,3,0,135,136,5,50,0,0,136,144,1,0,0,0,137,138, + 3,10,5,0,138,139,3,6,3,9,139,144,1,0,0,0,140,141,5,28,0,0,141,144, + 3,6,3,4,142,144,3,8,4,0,143,132,1,0,0,0,143,137,1,0,0,0,143,140, + 1,0,0,0,143,142,1,0,0,0,144,181,1,0,0,0,145,146,10,10,0,0,146,147, + 5,78,0,0,147,180,3,6,3,10,148,152,10,8,0,0,149,153,5,77,0,0,150, + 153,5,79,0,0,151,153,5,80,0,0,152,149,1,0,0,0,152,150,1,0,0,0,152, + 151,1,0,0,0,153,154,1,0,0,0,154,180,3,6,3,9,155,158,10,7,0,0,156, + 159,5,51,0,0,157,159,5,75,0,0,158,156,1,0,0,0,158,157,1,0,0,0,159, + 160,1,0,0,0,160,180,3,6,3,8,161,162,10,6,0,0,162,163,3,12,6,0,163, + 164,3,6,3,7,164,180,1,0,0,0,165,166,10,5,0,0,166,167,3,14,7,0,167, + 168,3,6,3,6,168,180,1,0,0,0,169,170,10,3,0,0,170,171,3,16,8,0,171, + 172,3,6,3,4,172,180,1,0,0,0,173,174,10,2,0,0,174,175,5,81,0,0,175, + 176,3,6,3,0,176,177,5,82,0,0,177,178,3,6,3,3,178,180,1,0,0,0,179, + 145,1,0,0,0,179,148,1,0,0,0,179,155,1,0,0,0,179,161,1,0,0,0,179, + 165,1,0,0,0,179,169,1,0,0,0,179,173,1,0,0,0,180,183,1,0,0,0,181, + 179,1,0,0,0,181,182,1,0,0,0,182,7,1,0,0,0,183,181,1,0,0,0,184,194, + 3,20,10,0,185,194,5,87,0,0,186,188,7,1,0,0,187,189,3,18,9,0,188, + 187,1,0,0,0,188,189,1,0,0,0,189,194,1,0,0,0,190,194,5,88,0,0,191, + 194,5,25,0,0,192,194,3,18,9,0,193,184,1,0,0,0,193,185,1,0,0,0,193, + 186,1,0,0,0,193,190,1,0,0,0,193,191,1,0,0,0,193,192,1,0,0,0,194, + 9,1,0,0,0,195,199,5,51,0,0,196,199,5,75,0,0,197,199,5,52,0,0,198, + 195,1,0,0,0,198,196,1,0,0,0,198,197,1,0,0,0,199,11,1,0,0,0,200,206, + 5,55,0,0,201,206,5,54,0,0,202,206,5,53,0,0,203,206,5,61,0,0,204, + 206,5,62,0,0,205,200,1,0,0,0,205,201,1,0,0,0,205,202,1,0,0,0,205, + 203,1,0,0,0,205,204,1,0,0,0,206,13,1,0,0,0,207,215,5,63,0,0,208, + 215,5,65,0,0,209,215,5,70,0,0,210,215,5,71,0,0,211,215,5,72,0,0, + 212,215,5,73,0,0,213,215,5,64,0,0,214,207,1,0,0,0,214,208,1,0,0, + 0,214,209,1,0,0,0,214,210,1,0,0,0,214,211,1,0,0,0,214,212,1,0,0, + 0,214,213,1,0,0,0,215,15,1,0,0,0,216,219,5,26,0,0,217,219,5,27,0, + 0,218,216,1,0,0,0,218,217,1,0,0,0,219,17,1,0,0,0,220,225,5,89,0, + 0,221,222,5,56,0,0,222,223,3,6,3,0,223,224,5,58,0,0,224,226,1,0, + 0,0,225,221,1,0,0,0,225,226,1,0,0,0,226,230,1,0,0,0,227,229,5,85, + 0,0,228,227,1,0,0,0,229,232,1,0,0,0,230,228,1,0,0,0,230,231,1,0, + 0,0,231,235,1,0,0,0,232,230,1,0,0,0,233,234,5,86,0,0,234,236,3,18, + 9,0,235,233,1,0,0,0,235,236,1,0,0,0,236,19,1,0,0,0,237,238,5,89, + 0,0,238,247,5,49,0,0,239,244,3,6,3,0,240,241,5,74,0,0,241,243,3, + 6,3,0,242,240,1,0,0,0,243,246,1,0,0,0,244,242,1,0,0,0,244,245,1, + 0,0,0,245,248,1,0,0,0,246,244,1,0,0,0,247,239,1,0,0,0,247,248,1, + 0,0,0,248,249,1,0,0,0,249,250,5,50,0,0,250,21,1,0,0,0,251,253,5, + 29,0,0,252,251,1,0,0,0,252,253,1,0,0,0,253,254,1,0,0,0,254,255,5, + 16,0,0,255,256,5,89,0,0,256,257,3,0,0,0,257,258,5,76,0,0,258,260, + 3,6,3,0,259,261,5,84,0,0,260,259,1,0,0,0,260,261,1,0,0,0,261,265, + 1,0,0,0,262,264,3,42,21,0,263,262,1,0,0,0,264,267,1,0,0,0,265,263, + 1,0,0,0,265,266,1,0,0,0,266,268,1,0,0,0,267,265,1,0,0,0,268,269, + 5,9,0,0,269,23,1,0,0,0,270,271,3,18,9,0,271,272,5,76,0,0,272,274, + 3,6,3,0,273,275,5,84,0,0,274,273,1,0,0,0,274,275,1,0,0,0,275,279, + 1,0,0,0,276,278,3,42,21,0,277,276,1,0,0,0,278,281,1,0,0,0,279,277, + 1,0,0,0,279,280,1,0,0,0,280,282,1,0,0,0,281,279,1,0,0,0,282,283, + 5,9,0,0,283,25,1,0,0,0,284,285,5,30,0,0,285,286,3,18,9,0,286,287, + 5,76,0,0,287,295,3,6,3,0,288,289,5,4,0,0,289,290,3,18,9,0,290,291, + 5,76,0,0,291,292,3,6,3,0,292,294,1,0,0,0,293,288,1,0,0,0,294,297, + 1,0,0,0,295,293,1,0,0,0,295,296,1,0,0,0,296,299,1,0,0,0,297,295, + 1,0,0,0,298,300,5,84,0,0,299,298,1,0,0,0,299,300,1,0,0,0,300,301, + 1,0,0,0,301,302,5,9,0,0,302,27,1,0,0,0,303,304,5,9,0,0,304,306,5, + 1,0,0,305,307,3,30,15,0,306,305,1,0,0,0,307,308,1,0,0,0,308,306, + 1,0,0,0,308,309,1,0,0,0,309,310,1,0,0,0,310,311,5,2,0,0,311,29,1, + 0,0,0,312,315,3,34,17,0,313,315,3,32,16,0,314,312,1,0,0,0,314,313, + 1,0,0,0,315,31,1,0,0,0,316,320,3,50,25,0,317,320,3,58,29,0,318,320, + 3,60,30,0,319,316,1,0,0,0,319,317,1,0,0,0,319,318,1,0,0,0,320,33, + 1,0,0,0,321,326,3,36,18,0,322,326,3,20,10,0,323,326,3,38,19,0,324, + 326,3,48,24,0,325,321,1,0,0,0,325,322,1,0,0,0,325,323,1,0,0,0,325, + 324,1,0,0,0,326,327,1,0,0,0,327,328,5,9,0,0,328,35,1,0,0,0,329,335, + 3,18,9,0,330,336,5,76,0,0,331,336,5,66,0,0,332,336,5,67,0,0,333, + 336,5,68,0,0,334,336,5,69,0,0,335,330,1,0,0,0,335,331,1,0,0,0,335, + 332,1,0,0,0,335,333,1,0,0,0,335,334,1,0,0,0,336,337,1,0,0,0,337, + 338,3,6,3,0,338,37,1,0,0,0,339,341,5,29,0,0,340,339,1,0,0,0,340, + 341,1,0,0,0,341,343,1,0,0,0,342,344,5,16,0,0,343,342,1,0,0,0,343, + 344,1,0,0,0,344,345,1,0,0,0,345,350,3,18,9,0,346,347,5,74,0,0,347, + 349,3,18,9,0,348,346,1,0,0,0,349,352,1,0,0,0,350,348,1,0,0,0,350, + 351,1,0,0,0,351,353,1,0,0,0,352,350,1,0,0,0,353,356,3,0,0,0,354, + 355,5,76,0,0,355,357,3,6,3,0,356,354,1,0,0,0,356,357,1,0,0,0,357, + 362,1,0,0,0,358,359,5,59,0,0,359,360,3,6,3,0,360,361,5,60,0,0,361, + 363,1,0,0,0,362,358,1,0,0,0,362,363,1,0,0,0,363,367,1,0,0,0,364, + 366,3,42,21,0,365,364,1,0,0,0,366,369,1,0,0,0,367,365,1,0,0,0,367, + 368,1,0,0,0,368,39,1,0,0,0,369,367,1,0,0,0,370,371,3,38,19,0,371, + 372,5,9,0,0,372,41,1,0,0,0,373,381,5,45,0,0,374,381,5,46,0,0,375, + 376,5,47,0,0,376,377,3,44,22,0,377,378,5,83,0,0,378,379,3,46,23, + 0,379,381,1,0,0,0,380,373,1,0,0,0,380,374,1,0,0,0,380,375,1,0,0, + 0,381,43,1,0,0,0,382,383,5,89,0,0,383,45,1,0,0,0,384,385,5,89,0, + 0,385,47,1,0,0,0,386,388,5,17,0,0,387,389,3,6,3,0,388,387,1,0,0, + 0,388,389,1,0,0,0,389,49,1,0,0,0,390,394,3,52,26,0,391,393,3,54, + 27,0,392,391,1,0,0,0,393,396,1,0,0,0,394,392,1,0,0,0,394,395,1,0, + 0,0,395,398,1,0,0,0,396,394,1,0,0,0,397,399,3,56,28,0,398,397,1, + 0,0,0,398,399,1,0,0,0,399,51,1,0,0,0,400,401,5,18,0,0,401,402,3, + 6,3,0,402,403,5,82,0,0,403,404,3,28,14,0,404,53,1,0,0,0,405,406, + 5,19,0,0,406,407,3,6,3,0,407,408,5,82,0,0,408,409,3,28,14,0,409, + 55,1,0,0,0,410,411,5,20,0,0,411,412,5,82,0,0,412,413,3,28,14,0,413, + 57,1,0,0,0,414,415,5,21,0,0,415,416,5,89,0,0,416,417,5,23,0,0,417, + 418,3,6,3,0,418,419,5,48,0,0,419,420,3,6,3,0,420,422,5,24,0,0,421, + 423,5,75,0,0,422,421,1,0,0,0,422,423,1,0,0,0,423,424,1,0,0,0,424, + 425,7,1,0,0,425,426,5,82,0,0,426,427,3,28,14,0,427,59,1,0,0,0,428, + 429,5,22,0,0,429,430,3,6,3,0,430,431,5,82,0,0,431,432,3,28,14,0, + 432,61,1,0,0,0,433,436,3,64,32,0,434,436,5,9,0,0,435,433,1,0,0,0, + 435,434,1,0,0,0,436,437,1,0,0,0,437,435,1,0,0,0,437,438,1,0,0,0, + 438,439,1,0,0,0,439,440,5,0,0,1,440,63,1,0,0,0,441,442,5,31,0,0, + 442,443,5,89,0,0,443,444,3,66,33,0,444,65,1,0,0,0,445,446,5,82,0, + 0,446,447,5,9,0,0,447,456,5,1,0,0,448,457,3,72,36,0,449,457,3,76, + 38,0,450,457,3,78,39,0,451,457,3,86,43,0,452,457,3,88,44,0,453,457, + 3,68,34,0,454,457,3,70,35,0,455,457,3,74,37,0,456,448,1,0,0,0,456, + 449,1,0,0,0,456,450,1,0,0,0,456,451,1,0,0,0,456,452,1,0,0,0,456, + 453,1,0,0,0,456,454,1,0,0,0,456,455,1,0,0,0,457,458,1,0,0,0,458, + 456,1,0,0,0,458,459,1,0,0,0,459,460,1,0,0,0,460,461,5,2,0,0,461, + 67,1,0,0,0,462,463,5,40,0,0,463,464,5,49,0,0,464,469,5,89,0,0,465, + 466,5,74,0,0,466,468,3,92,46,0,467,465,1,0,0,0,468,471,1,0,0,0,469, + 467,1,0,0,0,469,470,1,0,0,0,470,472,1,0,0,0,471,469,1,0,0,0,472, + 473,5,50,0,0,473,474,5,82,0,0,474,475,3,28,14,0,475,69,1,0,0,0,476, + 477,5,41,0,0,477,478,5,49,0,0,478,483,3,6,3,0,479,480,5,74,0,0,480, + 482,3,92,46,0,481,479,1,0,0,0,482,485,1,0,0,0,483,481,1,0,0,0,483, + 484,1,0,0,0,484,486,1,0,0,0,485,483,1,0,0,0,486,487,5,50,0,0,487, + 488,5,82,0,0,488,489,3,28,14,0,489,71,1,0,0,0,490,491,7,2,0,0,491, + 492,5,82,0,0,492,493,5,9,0,0,493,495,5,1,0,0,494,496,3,40,20,0,495, + 494,1,0,0,0,496,497,1,0,0,0,497,495,1,0,0,0,497,498,1,0,0,0,498, + 499,1,0,0,0,499,500,5,2,0,0,500,73,1,0,0,0,501,502,5,35,0,0,502, + 503,5,82,0,0,503,504,3,28,14,0,504,75,1,0,0,0,505,506,5,36,0,0,506, + 507,5,82,0,0,507,508,5,9,0,0,508,512,5,1,0,0,509,513,3,22,11,0,510, + 513,3,24,12,0,511,513,3,26,13,0,512,509,1,0,0,0,512,510,1,0,0,0, + 512,511,1,0,0,0,513,514,1,0,0,0,514,512,1,0,0,0,514,515,1,0,0,0, + 515,516,1,0,0,0,516,517,5,2,0,0,517,77,1,0,0,0,518,519,5,37,0,0, + 519,520,5,82,0,0,520,521,5,9,0,0,521,540,5,1,0,0,522,525,3,80,40, + 0,523,525,3,82,41,0,524,522,1,0,0,0,524,523,1,0,0,0,525,538,1,0, + 0,0,526,535,5,49,0,0,527,532,3,90,45,0,528,529,5,74,0,0,529,531, + 3,90,45,0,530,528,1,0,0,0,531,534,1,0,0,0,532,530,1,0,0,0,532,533, + 1,0,0,0,533,536,1,0,0,0,534,532,1,0,0,0,535,527,1,0,0,0,535,536, + 1,0,0,0,536,537,1,0,0,0,537,539,5,50,0,0,538,526,1,0,0,0,538,539, + 1,0,0,0,539,541,1,0,0,0,540,524,1,0,0,0,541,542,1,0,0,0,542,540, + 1,0,0,0,542,543,1,0,0,0,543,544,1,0,0,0,544,545,5,2,0,0,545,79,1, + 0,0,0,546,551,5,89,0,0,547,548,5,56,0,0,548,549,3,6,3,0,549,550, + 5,58,0,0,550,552,1,0,0,0,551,547,1,0,0,0,551,552,1,0,0,0,552,553, + 1,0,0,0,553,557,5,57,0,0,554,556,3,84,42,0,555,554,1,0,0,0,556,559, + 1,0,0,0,557,555,1,0,0,0,557,558,1,0,0,0,558,560,1,0,0,0,559,557, + 1,0,0,0,560,561,5,42,0,0,561,562,5,9,0,0,562,81,1,0,0,0,563,568, + 5,89,0,0,564,565,5,56,0,0,565,566,3,6,3,0,566,567,5,58,0,0,567,569, + 1,0,0,0,568,564,1,0,0,0,568,569,1,0,0,0,569,570,1,0,0,0,570,571, + 3,0,0,0,571,572,5,57,0,0,572,573,5,39,0,0,573,574,5,9,0,0,574,83, + 1,0,0,0,575,578,5,43,0,0,576,578,5,44,0,0,577,575,1,0,0,0,577,576, + 1,0,0,0,578,85,1,0,0,0,579,580,5,38,0,0,580,581,5,82,0,0,581,582, + 5,9,0,0,582,585,5,1,0,0,583,586,5,42,0,0,584,586,5,39,0,0,585,583, + 1,0,0,0,585,584,1,0,0,0,586,599,1,0,0,0,587,596,5,49,0,0,588,593, + 3,90,45,0,589,590,5,74,0,0,590,592,3,90,45,0,591,589,1,0,0,0,592, + 595,1,0,0,0,593,591,1,0,0,0,593,594,1,0,0,0,594,597,1,0,0,0,595, + 593,1,0,0,0,596,588,1,0,0,0,596,597,1,0,0,0,597,598,1,0,0,0,598, + 600,5,50,0,0,599,587,1,0,0,0,599,600,1,0,0,0,600,601,1,0,0,0,601, + 602,5,9,0,0,602,603,5,2,0,0,603,87,1,0,0,0,604,605,5,15,0,0,605, + 606,5,89,0,0,606,615,5,49,0,0,607,612,3,90,45,0,608,609,5,74,0,0, + 609,611,3,90,45,0,610,608,1,0,0,0,611,614,1,0,0,0,612,610,1,0,0, + 0,612,613,1,0,0,0,613,616,1,0,0,0,614,612,1,0,0,0,615,607,1,0,0, + 0,615,616,1,0,0,0,616,617,1,0,0,0,617,619,5,50,0,0,618,620,3,0,0, + 0,619,618,1,0,0,0,619,620,1,0,0,0,620,621,1,0,0,0,621,622,5,82,0, + 0,622,623,3,28,14,0,623,89,1,0,0,0,624,625,5,89,0,0,625,626,3,0, + 0,0,626,91,1,0,0,0,627,628,5,89,0,0,628,629,5,76,0,0,629,630,7,3, + 0,0,630,93,1,0,0,0,70,100,111,116,122,124,128,143,152,158,179,181, + 188,193,198,205,214,218,225,230,235,244,247,252,260,265,274,279, + 295,299,308,314,319,325,335,340,343,350,356,362,367,380,388,394, + 398,422,435,437,456,458,469,483,497,512,514,524,532,535,538,542, + 551,557,568,577,585,593,596,599,612,615,619 ] class PyNestMLParser ( Parser ): @@ -1481,6 +1483,7 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): self.parser = parser self.name = None # Token self.vectorParameter = None # ExpressionContext + self.attribute = None # VariableContext def NAME(self): return self.getToken(PyNestMLParser.NAME, 0) @@ -1497,10 +1500,17 @@ def DIFFERENTIAL_ORDER(self, i:int=None): else: return self.getToken(PyNestMLParser.DIFFERENTIAL_ORDER, i) + def FULLSTOP(self): + return self.getToken(PyNestMLParser.FULLSTOP, 0) + def expression(self): return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + def variable(self): + return self.getTypedRuleContext(PyNestMLParser.VariableContext,0) + + def getRuleIndex(self): return PyNestMLParser.RULE_variable @@ -1544,6 +1554,16 @@ def variable(self): self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,18,self._ctx) + self.state = 235 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,19,self._ctx) + if la_ == 1: + self.state = 233 + self.match(PyNestMLParser.FULLSTOP) + self.state = 234 + localctx.attribute = self.variable() + + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -1602,31 +1622,31 @@ def functionCall(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 233 + self.state = 237 localctx.calleeName = self.match(PyNestMLParser.NAME) - self.state = 234 + self.state = 238 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 243 + self.state = 247 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 126977) != 0): - self.state = 235 + self.state = 239 self.expression(0) - self.state = 240 + self.state = 244 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 236 + self.state = 240 self.match(PyNestMLParser.COMMA) - self.state = 237 + self.state = 241 self.expression(0) - self.state = 242 + self.state = 246 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 245 + self.state = 249 self.match(PyNestMLParser.RIGHT_PAREN) except RecognitionException as re: localctx.exception = re @@ -1699,43 +1719,43 @@ def inlineExpression(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 248 + self.state = 252 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 247 + self.state = 251 localctx.recordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 250 + self.state = 254 self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 251 + self.state = 255 localctx.variableName = self.match(PyNestMLParser.NAME) - self.state = 252 + self.state = 256 self.dataType() - self.state = 253 + self.state = 257 self.match(PyNestMLParser.EQUALS) - self.state = 254 + self.state = 258 self.expression(0) - self.state = 256 + self.state = 260 self._errHandler.sync(self) _la = self._input.LA(1) if _la==84: - self.state = 255 + self.state = 259 self.match(PyNestMLParser.SEMICOLON) - self.state = 261 + self.state = 265 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 258 + self.state = 262 localctx.decorator = self.anyDecorator() - self.state = 263 + self.state = 267 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 264 + self.state = 268 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1799,31 +1819,31 @@ def odeEquation(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 266 + self.state = 270 localctx.lhs = self.variable() - self.state = 267 + self.state = 271 self.match(PyNestMLParser.EQUALS) - self.state = 268 + self.state = 272 localctx.rhs = self.expression(0) - self.state = 270 + self.state = 274 self._errHandler.sync(self) _la = self._input.LA(1) if _la==84: - self.state = 269 + self.state = 273 self.match(PyNestMLParser.SEMICOLON) - self.state = 275 + self.state = 279 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 272 + self.state = 276 localctx.decorator = self.anyDecorator() - self.state = 277 + self.state = 281 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 278 + self.state = 282 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1895,39 +1915,39 @@ def kernel(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 280 + self.state = 284 self.match(PyNestMLParser.KERNEL_KEYWORD) - self.state = 281 + self.state = 285 self.variable() - self.state = 282 + self.state = 286 self.match(PyNestMLParser.EQUALS) - self.state = 283 + self.state = 287 self.expression(0) - self.state = 291 + self.state = 295 self._errHandler.sync(self) _la = self._input.LA(1) while _la==4: - self.state = 284 + self.state = 288 self.match(PyNestMLParser.KERNEL_JOINING) - self.state = 285 + self.state = 289 self.variable() - self.state = 286 + self.state = 290 self.match(PyNestMLParser.EQUALS) - self.state = 287 + self.state = 291 self.expression(0) - self.state = 293 + self.state = 297 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 295 + self.state = 299 self._errHandler.sync(self) _la = self._input.LA(1) if _la==84: - self.state = 294 + self.state = 298 self.match(PyNestMLParser.SEMICOLON) - self.state = 297 + self.state = 301 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1980,23 +2000,23 @@ def block(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 299 + self.state = 303 self.match(PyNestMLParser.NEWLINE) - self.state = 300 + self.state = 304 self.match(PyNestMLParser.INDENT) - self.state = 302 + self.state = 306 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 301 + self.state = 305 self.stmt() - self.state = 304 + self.state = 308 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 543621120) != 0) or _la==89): break - self.state = 306 + self.state = 310 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -2039,17 +2059,17 @@ def stmt(self): localctx = PyNestMLParser.StmtContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_stmt) try: - self.state = 310 + self.state = 314 self._errHandler.sync(self) token = self._input.LA(1) if token in [16, 17, 29, 89]: self.enterOuterAlt(localctx, 1) - self.state = 308 + self.state = 312 self.smallStmt() pass elif token in [18, 21, 22]: self.enterOuterAlt(localctx, 2) - self.state = 309 + self.state = 313 self.compoundStmt() pass else: @@ -2100,22 +2120,22 @@ def compoundStmt(self): localctx = PyNestMLParser.CompoundStmtContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_compoundStmt) try: - self.state = 315 + self.state = 319 self._errHandler.sync(self) token = self._input.LA(1) if token in [18]: self.enterOuterAlt(localctx, 1) - self.state = 312 + self.state = 316 self.ifStmt() pass elif token in [21]: self.enterOuterAlt(localctx, 2) - self.state = 313 + self.state = 317 self.forStmt() pass elif token in [22]: self.enterOuterAlt(localctx, 3) - self.state = 314 + self.state = 318 self.whileStmt() pass else: @@ -2174,31 +2194,31 @@ def smallStmt(self): self.enterRule(localctx, 34, self.RULE_smallStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 321 + self.state = 325 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,31,self._ctx) + la_ = self._interp.adaptivePredict(self._input,32,self._ctx) if la_ == 1: - self.state = 317 + self.state = 321 self.assignment() pass elif la_ == 2: - self.state = 318 + self.state = 322 self.functionCall() pass elif la_ == 3: - self.state = 319 + self.state = 323 self.declaration() pass elif la_ == 4: - self.state = 320 + self.state = 324 self.returnStmt() pass - self.state = 323 + self.state = 327 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2263,35 +2283,35 @@ def assignment(self): self.enterRule(localctx, 36, self.RULE_assignment) try: self.enterOuterAlt(localctx, 1) - self.state = 325 + self.state = 329 localctx.lhs_variable = self.variable() - self.state = 331 + self.state = 335 self._errHandler.sync(self) token = self._input.LA(1) if token in [76]: - self.state = 326 + self.state = 330 localctx.directAssignment = self.match(PyNestMLParser.EQUALS) pass elif token in [66]: - self.state = 327 + self.state = 331 localctx.compoundSum = self.match(PyNestMLParser.PLUS_EQUALS) pass elif token in [67]: - self.state = 328 + self.state = 332 localctx.compoundMinus = self.match(PyNestMLParser.MINUS_EQUALS) pass elif token in [68]: - self.state = 329 + self.state = 333 localctx.compoundProduct = self.match(PyNestMLParser.STAR_EQUALS) pass elif token in [69]: - self.state = 330 + self.state = 334 localctx.compoundQuotient = self.match(PyNestMLParser.FORWARD_SLASH_EQUALS) pass else: raise NoViableAltException(self) - self.state = 333 + self.state = 337 self.expression(0) except RecognitionException as re: localctx.exception = re @@ -2379,67 +2399,67 @@ def declaration(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 336 + self.state = 340 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 335 + self.state = 339 localctx.isRecordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 339 + self.state = 343 self._errHandler.sync(self) _la = self._input.LA(1) if _la==16: - self.state = 338 + self.state = 342 localctx.isInlineExpression = self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 341 + self.state = 345 self.variable() - self.state = 346 + self.state = 350 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 342 + self.state = 346 self.match(PyNestMLParser.COMMA) - self.state = 343 + self.state = 347 self.variable() - self.state = 348 + self.state = 352 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 349 + self.state = 353 self.dataType() - self.state = 352 + self.state = 356 self._errHandler.sync(self) _la = self._input.LA(1) if _la==76: - self.state = 350 + self.state = 354 self.match(PyNestMLParser.EQUALS) - self.state = 351 + self.state = 355 localctx.rhs = self.expression(0) - self.state = 358 + self.state = 362 self._errHandler.sync(self) _la = self._input.LA(1) if _la==59: - self.state = 354 + self.state = 358 self.match(PyNestMLParser.LEFT_LEFT_SQUARE) - self.state = 355 + self.state = 359 localctx.invariant = self.expression(0) - self.state = 356 + self.state = 360 self.match(PyNestMLParser.RIGHT_RIGHT_SQUARE) - self.state = 363 + self.state = 367 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 360 + self.state = 364 localctx.decorator = self.anyDecorator() - self.state = 365 + self.state = 369 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2484,9 +2504,9 @@ def declaration_newline(self): self.enterRule(localctx, 40, self.RULE_declaration_newline) try: self.enterOuterAlt(localctx, 1) - self.state = 366 + self.state = 370 self.declaration() - self.state = 367 + self.state = 371 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2541,28 +2561,28 @@ def anyDecorator(self): localctx = PyNestMLParser.AnyDecoratorContext(self, self._ctx, self.state) self.enterRule(localctx, 42, self.RULE_anyDecorator) try: - self.state = 376 + self.state = 380 self._errHandler.sync(self) token = self._input.LA(1) if token in [45]: self.enterOuterAlt(localctx, 1) - self.state = 369 + self.state = 373 self.match(PyNestMLParser.DECORATOR_HOMOGENEOUS) pass elif token in [46]: self.enterOuterAlt(localctx, 2) - self.state = 370 + self.state = 374 self.match(PyNestMLParser.DECORATOR_HETEROGENEOUS) pass elif token in [47]: self.enterOuterAlt(localctx, 3) - self.state = 371 + self.state = 375 self.match(PyNestMLParser.AT) - self.state = 372 + self.state = 376 self.namespaceDecoratorNamespace() - self.state = 373 + self.state = 377 self.match(PyNestMLParser.DOUBLE_COLON) - self.state = 374 + self.state = 378 self.namespaceDecoratorName() pass else: @@ -2606,7 +2626,7 @@ def namespaceDecoratorNamespace(self): self.enterRule(localctx, 44, self.RULE_namespaceDecoratorNamespace) try: self.enterOuterAlt(localctx, 1) - self.state = 378 + self.state = 382 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2646,7 +2666,7 @@ def namespaceDecoratorName(self): self.enterRule(localctx, 46, self.RULE_namespaceDecoratorName) try: self.enterOuterAlt(localctx, 1) - self.state = 380 + self.state = 384 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2690,13 +2710,13 @@ def returnStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 382 + self.state = 386 self.match(PyNestMLParser.RETURN_KEYWORD) - self.state = 384 + self.state = 388 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 126977) != 0): - self.state = 383 + self.state = 387 self.expression(0) @@ -2750,23 +2770,23 @@ def ifStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 386 - self.ifClause() self.state = 390 + self.ifClause() + self.state = 394 self._errHandler.sync(self) _la = self._input.LA(1) while _la==19: - self.state = 387 + self.state = 391 self.elifClause() - self.state = 392 + self.state = 396 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 394 + self.state = 398 self._errHandler.sync(self) _la = self._input.LA(1) if _la==20: - self.state = 393 + self.state = 397 self.elseClause() @@ -2818,13 +2838,13 @@ def ifClause(self): self.enterRule(localctx, 52, self.RULE_ifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 396 + self.state = 400 self.match(PyNestMLParser.IF_KEYWORD) - self.state = 397 + self.state = 401 self.expression(0) - self.state = 398 + self.state = 402 self.match(PyNestMLParser.COLON) - self.state = 399 + self.state = 403 self.block() except RecognitionException as re: localctx.exception = re @@ -2874,13 +2894,13 @@ def elifClause(self): self.enterRule(localctx, 54, self.RULE_elifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 401 + self.state = 405 self.match(PyNestMLParser.ELIF_KEYWORD) - self.state = 402 + self.state = 406 self.expression(0) - self.state = 403 + self.state = 407 self.match(PyNestMLParser.COLON) - self.state = 404 + self.state = 408 self.block() except RecognitionException as re: localctx.exception = re @@ -2926,11 +2946,11 @@ def elseClause(self): self.enterRule(localctx, 56, self.RULE_elseClause) try: self.enterOuterAlt(localctx, 1) - self.state = 406 + self.state = 410 self.match(PyNestMLParser.ELSE_KEYWORD) - self.state = 407 + self.state = 411 self.match(PyNestMLParser.COLON) - self.state = 408 + self.state = 412 self.block() except RecognitionException as re: localctx.exception = re @@ -3009,39 +3029,39 @@ def forStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 410 + self.state = 414 self.match(PyNestMLParser.FOR_KEYWORD) - self.state = 411 + self.state = 415 localctx.var = self.match(PyNestMLParser.NAME) - self.state = 412 + self.state = 416 self.match(PyNestMLParser.IN_KEYWORD) - self.state = 413 + self.state = 417 localctx.start_from = self.expression(0) - self.state = 414 + self.state = 418 self.match(PyNestMLParser.ELLIPSIS) - self.state = 415 + self.state = 419 localctx.end_at = self.expression(0) - self.state = 416 + self.state = 420 self.match(PyNestMLParser.STEP_KEYWORD) - self.state = 418 + self.state = 422 self._errHandler.sync(self) _la = self._input.LA(1) if _la==75: - self.state = 417 + self.state = 421 localctx.negative = self.match(PyNestMLParser.MINUS) - self.state = 420 + self.state = 424 _la = self._input.LA(1) if not(_la==90 or _la==91): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 421 + self.state = 425 self.match(PyNestMLParser.COLON) - self.state = 422 + self.state = 426 self.block() except RecognitionException as re: localctx.exception = re @@ -3091,13 +3111,13 @@ def whileStmt(self): self.enterRule(localctx, 60, self.RULE_whileStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 424 + self.state = 428 self.match(PyNestMLParser.WHILE_KEYWORD) - self.state = 425 + self.state = 429 self.expression(0) - self.state = 426 + self.state = 430 self.match(PyNestMLParser.COLON) - self.state = 427 + self.state = 431 self.block() except RecognitionException as re: localctx.exception = re @@ -3150,31 +3170,31 @@ def nestMLCompilationUnit(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 431 + self.state = 435 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 431 + self.state = 435 self._errHandler.sync(self) token = self._input.LA(1) if token in [31]: - self.state = 429 + self.state = 433 self.model() pass elif token in [9]: - self.state = 430 + self.state = 434 self.match(PyNestMLParser.NEWLINE) pass else: raise NoViableAltException(self) - self.state = 433 + self.state = 437 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==9 or _la==31): break - self.state = 435 + self.state = 439 self.match(PyNestMLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -3220,11 +3240,11 @@ def model(self): self.enterRule(localctx, 64, self.RULE_model) try: self.enterOuterAlt(localctx, 1) - self.state = 437 + self.state = 441 self.match(PyNestMLParser.MODEL_KEYWORD) - self.state = 438 + self.state = 442 self.match(PyNestMLParser.NAME) - self.state = 439 + self.state = 443 self.modelBody() except RecognitionException as re: localctx.exception = re @@ -3329,61 +3349,61 @@ def modelBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 441 + self.state = 445 self.match(PyNestMLParser.COLON) - self.state = 442 + self.state = 446 self.match(PyNestMLParser.NEWLINE) - self.state = 443 + self.state = 447 self.match(PyNestMLParser.INDENT) - self.state = 452 + self.state = 456 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 452 + self.state = 456 self._errHandler.sync(self) token = self._input.LA(1) if token in [32, 33, 34]: - self.state = 444 + self.state = 448 self.blockWithVariables() pass elif token in [36]: - self.state = 445 + self.state = 449 self.equationsBlock() pass elif token in [37]: - self.state = 446 + self.state = 450 self.inputBlock() pass elif token in [38]: - self.state = 447 + self.state = 451 self.outputBlock() pass elif token in [15]: - self.state = 448 + self.state = 452 self.function() pass elif token in [40]: - self.state = 449 + self.state = 453 self.onReceiveBlock() pass elif token in [41]: - self.state = 450 + self.state = 454 self.onConditionBlock() pass elif token in [35]: - self.state = 451 + self.state = 455 self.updateBlock() pass else: raise NoViableAltException(self) - self.state = 454 + self.state = 458 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 3843995762688) != 0)): break - self.state = 456 + self.state = 460 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3453,29 +3473,29 @@ def onReceiveBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 458 + self.state = 462 self.match(PyNestMLParser.ON_RECEIVE_KEYWORD) - self.state = 459 + self.state = 463 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 460 + self.state = 464 localctx.inputPortName = self.match(PyNestMLParser.NAME) - self.state = 465 + self.state = 469 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 461 + self.state = 465 self.match(PyNestMLParser.COMMA) - self.state = 462 + self.state = 466 self.constParameter() - self.state = 467 + self.state = 471 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 468 + self.state = 472 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 469 + self.state = 473 self.match(PyNestMLParser.COLON) - self.state = 470 + self.state = 474 self.block() except RecognitionException as re: localctx.exception = re @@ -3546,29 +3566,29 @@ def onConditionBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 472 + self.state = 476 self.match(PyNestMLParser.ON_CONDITION_KEYWORD) - self.state = 473 + self.state = 477 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 474 + self.state = 478 localctx.condition = self.expression(0) - self.state = 479 + self.state = 483 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 475 + self.state = 479 self.match(PyNestMLParser.COMMA) - self.state = 476 + self.state = 480 self.constParameter() - self.state = 481 + self.state = 485 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 482 + self.state = 486 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 483 + self.state = 487 self.match(PyNestMLParser.COLON) - self.state = 484 + self.state = 488 self.block() except RecognitionException as re: localctx.exception = re @@ -3634,7 +3654,7 @@ def blockWithVariables(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 486 + self.state = 490 localctx.blockType = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 30064771072) != 0)): @@ -3642,25 +3662,25 @@ def blockWithVariables(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 487 + self.state = 491 self.match(PyNestMLParser.COLON) - self.state = 488 + self.state = 492 self.match(PyNestMLParser.NEWLINE) - self.state = 489 + self.state = 493 self.match(PyNestMLParser.INDENT) - self.state = 491 + self.state = 495 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 490 + self.state = 494 self.declaration_newline() - self.state = 493 + self.state = 497 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==16 or _la==29 or _la==89): break - self.state = 495 + self.state = 499 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3706,11 +3726,11 @@ def updateBlock(self): self.enterRule(localctx, 74, self.RULE_updateBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 497 + self.state = 501 self.match(PyNestMLParser.UPDATE_KEYWORD) - self.state = 498 + self.state = 502 self.match(PyNestMLParser.COLON) - self.state = 499 + self.state = 503 self.block() except RecognitionException as re: localctx.exception = re @@ -3783,43 +3803,43 @@ def equationsBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 501 + self.state = 505 self.match(PyNestMLParser.EQUATIONS_KEYWORD) - self.state = 502 + self.state = 506 self.match(PyNestMLParser.COLON) - self.state = 503 + self.state = 507 self.match(PyNestMLParser.NEWLINE) - self.state = 504 + self.state = 508 self.match(PyNestMLParser.INDENT) - self.state = 508 + self.state = 512 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 508 + self.state = 512 self._errHandler.sync(self) token = self._input.LA(1) if token in [16, 29]: - self.state = 505 + self.state = 509 self.inlineExpression() pass elif token in [89]: - self.state = 506 + self.state = 510 self.odeEquation() pass elif token in [30]: - self.state = 507 + self.state = 511 self.kernel() pass else: raise NoViableAltException(self) - self.state = 510 + self.state = 514 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==89): break - self.state = 512 + self.state = 516 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3910,69 +3930,69 @@ def inputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 514 + self.state = 518 self.match(PyNestMLParser.INPUT_KEYWORD) - self.state = 515 + self.state = 519 self.match(PyNestMLParser.COLON) - self.state = 516 + self.state = 520 self.match(PyNestMLParser.NEWLINE) - self.state = 517 + self.state = 521 self.match(PyNestMLParser.INDENT) - self.state = 536 + self.state = 540 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 520 + self.state = 524 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,53,self._ctx) + la_ = self._interp.adaptivePredict(self._input,54,self._ctx) if la_ == 1: - self.state = 518 + self.state = 522 self.spikeInputPort() pass elif la_ == 2: - self.state = 519 + self.state = 523 self.continuousInputPort() pass - self.state = 534 + self.state = 538 self._errHandler.sync(self) _la = self._input.LA(1) if _la==49: - self.state = 522 + self.state = 526 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 531 + self.state = 535 self._errHandler.sync(self) _la = self._input.LA(1) if _la==89: - self.state = 523 + self.state = 527 self.parameter() - self.state = 528 + self.state = 532 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 524 + self.state = 528 self.match(PyNestMLParser.COMMA) - self.state = 525 + self.state = 529 self.parameter() - self.state = 530 + self.state = 534 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 533 + self.state = 537 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 538 + self.state = 542 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==89): break - self.state = 540 + self.state = 544 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4040,35 +4060,35 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 542 + self.state = 546 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 547 + self.state = 551 self._errHandler.sync(self) _la = self._input.LA(1) if _la==56: - self.state = 543 + self.state = 547 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 544 + self.state = 548 localctx.sizeParameter = self.expression(0) - self.state = 545 + self.state = 549 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 549 - self.match(PyNestMLParser.LEFT_ANGLE_MINUS) self.state = 553 + self.match(PyNestMLParser.LEFT_ANGLE_MINUS) + self.state = 557 self._errHandler.sync(self) _la = self._input.LA(1) while _la==43 or _la==44: - self.state = 550 + self.state = 554 self.inputQualifier() - self.state = 555 + self.state = 559 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 556 + self.state = 560 self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 557 + self.state = 561 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4133,27 +4153,27 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 559 + self.state = 563 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 564 + self.state = 568 self._errHandler.sync(self) _la = self._input.LA(1) if _la==56: - self.state = 560 + self.state = 564 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 561 + self.state = 565 localctx.sizeParameter = self.expression(0) - self.state = 562 + self.state = 566 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 566 + self.state = 570 self.dataType() - self.state = 567 + self.state = 571 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 568 + self.state = 572 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 569 + self.state = 573 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4197,15 +4217,15 @@ def inputQualifier(self): self.enterRule(localctx, 84, self.RULE_inputQualifier) try: self.enterOuterAlt(localctx, 1) - self.state = 573 + self.state = 577 self._errHandler.sync(self) token = self._input.LA(1) if token in [43]: - self.state = 571 + self.state = 575 localctx.isInhibitory = self.match(PyNestMLParser.INHIBITORY_KEYWORD) pass elif token in [44]: - self.state = 572 + self.state = 576 localctx.isExcitatory = self.match(PyNestMLParser.EXCITATORY_KEYWORD) pass else: @@ -4292,61 +4312,61 @@ def outputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 575 + self.state = 579 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 576 + self.state = 580 self.match(PyNestMLParser.COLON) - self.state = 577 + self.state = 581 self.match(PyNestMLParser.NEWLINE) - self.state = 578 + self.state = 582 self.match(PyNestMLParser.INDENT) - self.state = 581 + self.state = 585 self._errHandler.sync(self) token = self._input.LA(1) if token in [42]: - self.state = 579 + self.state = 583 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) pass elif token in [39]: - self.state = 580 + self.state = 584 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 595 + self.state = 599 self._errHandler.sync(self) _la = self._input.LA(1) if _la==49: - self.state = 583 + self.state = 587 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 592 + self.state = 596 self._errHandler.sync(self) _la = self._input.LA(1) if _la==89: - self.state = 584 + self.state = 588 localctx.attribute = self.parameter() - self.state = 589 + self.state = 593 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 585 + self.state = 589 self.match(PyNestMLParser.COMMA) - self.state = 586 + self.state = 590 localctx.attribute = self.parameter() - self.state = 591 + self.state = 595 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 594 + self.state = 598 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 597 + self.state = 601 self.match(PyNestMLParser.NEWLINE) - self.state = 598 + self.state = 602 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4420,45 +4440,45 @@ def function(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 600 + self.state = 604 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 601 + self.state = 605 self.match(PyNestMLParser.NAME) - self.state = 602 + self.state = 606 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 611 + self.state = 615 self._errHandler.sync(self) _la = self._input.LA(1) if _la==89: - self.state = 603 + self.state = 607 self.parameter() - self.state = 608 + self.state = 612 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 604 + self.state = 608 self.match(PyNestMLParser.COMMA) - self.state = 605 + self.state = 609 self.parameter() - self.state = 610 + self.state = 614 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 613 + self.state = 617 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 615 + self.state = 619 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 562949953453056) != 0) or _la==89 or _la==90: - self.state = 614 + self.state = 618 localctx.returnType = self.dataType() - self.state = 617 + self.state = 621 self.match(PyNestMLParser.COLON) - self.state = 618 + self.state = 622 self.block() except RecognitionException as re: localctx.exception = re @@ -4501,9 +4521,9 @@ def parameter(self): self.enterRule(localctx, 90, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 620 + self.state = 624 self.match(PyNestMLParser.NAME) - self.state = 621 + self.state = 625 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4563,11 +4583,11 @@ def constParameter(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 623 + self.state = 627 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 624 + self.state = 628 self.match(PyNestMLParser.EQUALS) - self.state = 625 + self.state = 629 localctx.value = self._input.LT(1) _la = self._input.LA(1) if not(_la==25 or ((((_la - 87)) & ~0x3f) == 0 and ((1 << (_la - 87)) & 27) != 0)): diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index 6f88f9577..9a0618561 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -108,7 +108,8 @@ parser grammar PyNestMLParser; */ variable : name=NAME (LEFT_SQUARE_BRACKET vectorParameter=expression RIGHT_SQUARE_BRACKET)? - (DIFFERENTIAL_ORDER)*; + (DIFFERENTIAL_ORDER)* + (FULLSTOP attribute=variable)?; /** ASTFunctionCall Represents a function call, e.g. myFun("a", "b"). diff --git a/pynestml/meta_model/ast_variable.py b/pynestml/meta_model/ast_variable.py index c0645be25..7ef70a2df 100644 --- a/pynestml/meta_model/ast_variable.py +++ b/pynestml/meta_model/ast_variable.py @@ -43,7 +43,7 @@ class ASTVariable(ASTNode): """ def __init__(self, name, differential_order=0, type_symbol: Optional[str] = None, - vector_parameter: Optional[str] = None, is_homogeneous: bool = False, delay_parameter: Optional[str] = None, *args, **kwargs): + vector_parameter: Optional[str] = None, is_homogeneous: bool = False, delay_parameter: Optional[str] = None, attribute: Optional[str] = None, *args, **kwargs): r""" Standard constructor. :param name: the name of the variable @@ -53,6 +53,7 @@ def __init__(self, name, differential_order=0, type_symbol: Optional[str] = None :param type_symbol: the type of the variable :param vector_parameter: the vector parameter of the variable :param delay_parameter: the delay value to be used in the differential equation + :param attribute: the attribute (using the dot notation, for example, ``variable.attribute``) """ super(ASTVariable, self).__init__(*args, **kwargs) assert isinstance(differential_order, int), \ @@ -67,6 +68,7 @@ def __init__(self, name, differential_order=0, type_symbol: Optional[str] = None self.vector_parameter = vector_parameter self.is_homogeneous = is_homogeneous self.delay_parameter = delay_parameter + self.attribute = attribute def clone(self): r""" @@ -77,6 +79,7 @@ def clone(self): type_symbol=self.type_symbol, vector_parameter=self.vector_parameter, delay_parameter=self.delay_parameter, + attribute=self.attribute, # ASTNode common attriutes: source_position=self.get_source_position(), scope=self.scope, @@ -220,4 +223,4 @@ def equals(self, other: ASTNode) -> bool: if not isinstance(other, ASTVariable): return False - return self.get_name() == other.get_name() and self.get_differential_order() == other.get_differential_order() + return self.get_name() == other.get_name() and self.get_differential_order() == other.get_differential_order() and self.attribute == other.attribute diff --git a/pynestml/visitors/ast_builder_visitor.py b/pynestml/visitors/ast_builder_visitor.py index 02a9bd396..f6aea15e0 100644 --- a/pynestml/visitors/ast_builder_visitor.py +++ b/pynestml/visitors/ast_builder_visitor.py @@ -258,9 +258,12 @@ def visitVariable(self, ctx): vector_parameter = self.visit(ctx.vectorParameter) differential_order = (len(ctx.DIFFERENTIAL_ORDER()) if ctx.DIFFERENTIAL_ORDER() is not None else 0) + attribute = ctx.attribute + return ASTNodeFactory.create_ast_variable(name=str(ctx.NAME()), differential_order=differential_order, vector_parameter=vector_parameter, + attribute=attribute, source_position=create_source_pos(ctx)) # Visit a parse tree produced by PyNESTMLParser#functionCall. From c427eb9bcba5ead228b72a398a227b6733bb04a1 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 21 Oct 2024 10:18:12 +0200 Subject: [PATCH 07/68] add explicit output parameters to spiking output port --- tests/resources/synapse_event_inv_priority_test.nestml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/resources/synapse_event_inv_priority_test.nestml b/tests/resources/synapse_event_inv_priority_test.nestml index e8ae16242..c104aae39 100644 --- a/tests/resources/synapse_event_inv_priority_test.nestml +++ b/tests/resources/synapse_event_inv_priority_test.nestml @@ -42,7 +42,7 @@ model event_inv_priority_test_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes, priority=2): tr += 1. From a24d0cf07c684a8a5e92d003e02dcc94d5f52ff6 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 21 Oct 2024 17:20:34 +0200 Subject: [PATCH 08/68] add explicit parameters to spiking input port --- pynestml/codegeneration/printers/gsl_variable_printer.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pynestml/codegeneration/printers/gsl_variable_printer.py b/pynestml/codegeneration/printers/gsl_variable_printer.py index df87f443d..3705c96d2 100644 --- a/pynestml/codegeneration/printers/gsl_variable_printer.py +++ b/pynestml/codegeneration/printers/gsl_variable_printer.py @@ -89,6 +89,12 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: else: var_name += "_" + str(variable.get_vector_parameter()) - return "spike_inputs_grid_sum_[node." + var_name + " - node.MIN_SPIKE_RECEPTOR]" + # add variable attribute if it exists + variable_attr = "" + if variable.attribute: + variable_attr = "_" + variable.attribute + return "spike_inputs_grid_sum_" + variable_attr + "[node." + var_name + " - node.MIN_SPIKE_RECEPTOR]" + + # case of continuous-type input port return variable_symbol.get_symbol_name() + '_grid_sum_' From 3d297065ab6b21e1454d8f6b9cebba13bf6f1fd1 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 21 Oct 2024 17:23:41 +0200 Subject: [PATCH 09/68] add explicit output parameters to spiking output port --- doc/tutorials/stdp_dopa_synapse/stdp_dopa_synapse.ipynb | 2 +- .../stdp_third_factor_active_dendrite.ipynb | 2 +- doc/tutorials/stdp_windows/stdp_windows.ipynb | 6 +++--- .../triplet_stdp_synapse/triplet_stdp_synapse.ipynb | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/tutorials/stdp_dopa_synapse/stdp_dopa_synapse.ipynb b/doc/tutorials/stdp_dopa_synapse/stdp_dopa_synapse.ipynb index 8d0b4e4c3..0d2e71726 100644 --- a/doc/tutorials/stdp_dopa_synapse/stdp_dopa_synapse.ipynb +++ b/doc/tutorials/stdp_dopa_synapse/stdp_dopa_synapse.ipynb @@ -179,7 +179,7 @@ " mod_spikes <- spike\n", "\n", " output:\n", - " spike\n", + " spike(weight real, delay ms)\n", "\n", " onReceive(mod_spikes):\n", " n += A_vt / tau_n\n", diff --git a/doc/tutorials/stdp_third_factor_active_dendrite/stdp_third_factor_active_dendrite.ipynb b/doc/tutorials/stdp_third_factor_active_dendrite/stdp_third_factor_active_dendrite.ipynb index 0b73974c8..a6307815d 100644 --- a/doc/tutorials/stdp_third_factor_active_dendrite/stdp_third_factor_active_dendrite.ipynb +++ b/doc/tutorials/stdp_third_factor_active_dendrite/stdp_third_factor_active_dendrite.ipynb @@ -444,7 +444,7 @@ " I_post_dend pA <- continuous\n", "\n", " output:\n", - " spike\n", + " spike(weight real, delay ms)\n", "\n", " onReceive(post_spikes):\n", " # potentiate synapse\n", diff --git a/doc/tutorials/stdp_windows/stdp_windows.ipynb b/doc/tutorials/stdp_windows/stdp_windows.ipynb index 8a751a67d..282e372fc 100644 --- a/doc/tutorials/stdp_windows/stdp_windows.ipynb +++ b/doc/tutorials/stdp_windows/stdp_windows.ipynb @@ -249,7 +249,7 @@ " post_spikes <- spike\n", "\n", " output:\n", - " spike\n", + " spike(weight real, delay ms)\n", "\n", " onReceive(post_spikes):\n", " # potentiate synapse\n", @@ -3214,7 +3214,7 @@ " post_spikes <- spike\n", "\n", " output:\n", - " spike\n", + " spike(weight real, delay ms)\n", "\n", " onReceive(post_spikes):\n", " post_nn_trace = 1\n", @@ -4805,7 +4805,7 @@ " post_spikes <- spike\n", "\n", " output:\n", - " spike\n", + " spike(weight real, delay ms)\n", "\n", " onReceive(post_spikes, priority=2):\n", " w += lambda * (pre_trace + post_trace)\n", diff --git a/doc/tutorials/triplet_stdp_synapse/triplet_stdp_synapse.ipynb b/doc/tutorials/triplet_stdp_synapse/triplet_stdp_synapse.ipynb index 6168bb9b2..35ebccf60 100644 --- a/doc/tutorials/triplet_stdp_synapse/triplet_stdp_synapse.ipynb +++ b/doc/tutorials/triplet_stdp_synapse/triplet_stdp_synapse.ipynb @@ -216,7 +216,7 @@ " post_spikes <- spike\n", "\n", " output:\n", - " spike\n", + " spike(weight real, delay ms)\n", "\n", " onReceive(post_spikes):\n", " # increment post trace values\n", @@ -1120,7 +1120,7 @@ " post_spikes <- spike\n", "\n", " output:\n", - " spike\n", + " spike(weight real, delay ms)\n", "\n", " onReceive(post_spikes):\n", " # increment post trace values\n", From 9f5eeb8feb1dc61dc7959fb909e4f38980b69c63 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 21 Oct 2024 10:47:12 +0200 Subject: [PATCH 10/68] update version numbers after 8.0.0-rc3 release --- pynestml/__init__.py | 2 +- setup.py | 2 +- ...oCoConvolveNotCorrectlyParametrized.nestml | 2 +- .../CoCoConvolveNotCorrectlyProvided.nestml | 2 +- .../CoCoInputPortWithRedundantTypes.nestml | 36 ---- .../CoCoValueAssignedToInputPort.nestml | 4 +- tests/nest_tests/input_ports_test.py | 165 ------------------ .../iaf_cond_exp_Istep_neuron.nestml | 4 +- .../iaf_psc_exp_resolution_test.nestml | 19 +- tests/nest_tests/resources/input_ports.nestml | 8 +- tests/resources/NestMLPrinterTest.nestml | 2 +- .../resources/SynapseEventSequenceTest.nestml | 48 ----- .../random_number_generators_test.nestml | 5 - tests/test_cocos.py | 16 +- ...oCoConvolveNotCorrectlyParametrized.nestml | 2 +- .../CoCoConvolveNotCorrectlyProvided.nestml | 2 +- .../CoCoInputPortWithRedundantTypes.nestml | 36 ---- .../valid/CoCoValueAssignedToInputPort.nestml | 2 +- .../CoCoVectorInputPortSizeAndType.nestml | 4 +- 19 files changed, 39 insertions(+), 322 deletions(-) delete mode 100644 tests/invalid/CoCoInputPortWithRedundantTypes.nestml delete mode 100644 tests/nest_tests/input_ports_test.py delete mode 100644 tests/resources/SynapseEventSequenceTest.nestml delete mode 100644 tests/valid/CoCoInputPortWithRedundantTypes.nestml diff --git a/pynestml/__init__.py b/pynestml/__init__.py index 1bf5f861f..6b317a98c 100644 --- a/pynestml/__init__.py +++ b/pynestml/__init__.py @@ -19,7 +19,7 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -__version__ = "8.0.0-rc3" +__version__ = "8.0.0-rc3-post-dev" __all__ = ['cocos', 'codegeneration', diff --git a/setup.py b/setup.py index 9fc9caf32..158fd45ee 100755 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ setup( name="NESTML", - version="8.0.0-rc3", + version="8.0.0-rc3-post-dev", description="NESTML is a domain specific language that supports the specification of neuron models in a" " precise and concise syntax, based on the syntax of Python. Model equations can either be given" " as a simple string of mathematical notation or as an algorithm written in the built-in procedural" diff --git a/tests/invalid/CoCoConvolveNotCorrectlyParametrized.nestml b/tests/invalid/CoCoConvolveNotCorrectlyParametrized.nestml index 2a0a760eb..80af28002 100644 --- a/tests/invalid/CoCoConvolveNotCorrectlyParametrized.nestml +++ b/tests/invalid/CoCoConvolveNotCorrectlyParametrized.nestml @@ -39,4 +39,4 @@ model CoCoConvolveNotCorrectlyParametrized: inline testB pA = convolve(V_m+V_m, spikeExc) input: - spikeExc <- excitatory spike + spikeExc <- spike diff --git a/tests/invalid/CoCoConvolveNotCorrectlyProvided.nestml b/tests/invalid/CoCoConvolveNotCorrectlyProvided.nestml index 85fc57159..9a9b6571e 100644 --- a/tests/invalid/CoCoConvolveNotCorrectlyProvided.nestml +++ b/tests/invalid/CoCoConvolveNotCorrectlyProvided.nestml @@ -42,7 +42,7 @@ model CoCoConvolveNotCorrectlyProvided: V_m' = 20 mV/ms input: - spikeExc <- excitatory spike + spikeExc <- spike update: integrate_odes() diff --git a/tests/invalid/CoCoInputPortWithRedundantTypes.nestml b/tests/invalid/CoCoInputPortWithRedundantTypes.nestml deleted file mode 100644 index 8d3809c1b..000000000 --- a/tests/invalid/CoCoInputPortWithRedundantTypes.nestml +++ /dev/null @@ -1,36 +0,0 @@ -""" -CoCoInputPortWithRedundantTypes.nestml -###################################### - - -Description -+++++++++++ - -This model is used to test if broken CoCos are identified correctly. Here, if each input port is defined uniquely, i.e., no redundant keywords are used. - -Negative case. - - -Copyright statement -+++++++++++++++++++ - -This file is part of NEST. - -Copyright (C) 2004 The NEST Initiative - -NEST is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation, either version 2 of the License, or -(at your option) any later version. - -NEST is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with NEST. If not, see . -""" -model CoCoInputPortWithRedundantTypes: - input: - spikeInhX2 <- inhibitory inhibitory spike # spike redundant keywords used diff --git a/tests/invalid/CoCoValueAssignedToInputPort.nestml b/tests/invalid/CoCoValueAssignedToInputPort.nestml index e915c2ecc..289b405df 100644 --- a/tests/invalid/CoCoValueAssignedToInputPort.nestml +++ b/tests/invalid/CoCoValueAssignedToInputPort.nestml @@ -33,7 +33,7 @@ along with NEST. If not, see . """ model CoCoValueAssignedToInputPort: input: - spikeInh <- inhibitory spike + spike_in_port <- spike update: - spikeInh = 10 / s + spike_in_port = 10 / s diff --git a/tests/nest_tests/input_ports_test.py b/tests/nest_tests/input_ports_test.py deleted file mode 100644 index 0d26c79ad..000000000 --- a/tests/nest_tests/input_ports_test.py +++ /dev/null @@ -1,165 +0,0 @@ -# -*- coding: utf-8 -*- -# -# input_ports_test.py -# -# This file is part of NEST. -# -# Copyright (C) 2004 The NEST Initiative -# -# NEST is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# NEST is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with NEST. If not, see . -import os -import pytest - -import nest - -from pynestml.frontend.pynestml_frontend import generate_nest_target -from pynestml.codegeneration.nest_tools import NESTTools - - -class TestInputPorts: - """ - Tests the different kind of input ports supported in NESTML. - """ - - @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), - reason="This test does not support NEST 2") - def test_input_ports(self): - input_path = os.path.join(os.path.realpath(os.path.join( - os.path.dirname(__file__), "resources", "input_ports.nestml"))) - target_path = "target" - logging_level = "INFO" - module_name = "nestmlmodule" - suffix = "_nestml" - - generate_nest_target(input_path, - target_path=target_path, - logging_level=logging_level, - module_name=module_name, - suffix=suffix) - nest.ResetKernel() - nest.Install(module_name) - - neuron = nest.Create("input_ports_nestml") - - # List of receptor types for the spiking input ports - receptor_types = nest.GetStatus(neuron, "receptor_types")[0] - - spike_times = [ - [10., 44.], # NMDA_SPIKES - [12., 42.], # AMPA_SPIKES - [14., 40.], # GABA_SPIKES - [16., 38.], # FOO_0 - [18., 36.], # FOO_1 - [20., 34.], # MY_SPIKES_0 - [22., 32.], # MY_SPIKES_1 - [24., 30.], # MY_SPIKES2_1 - ] - sgs = nest.Create('spike_generator', len(spike_times)) - for i, sg in enumerate(sgs): - sg.spike_times = spike_times[i] - - nest.Connect(sgs[0], neuron, syn_spec={'receptor_type': receptor_types["NMDA_SPIKES"], 'weight': -1.0, 'delay': 1.0}) - nest.Connect(sgs[1], neuron, syn_spec={'receptor_type': receptor_types["AMPA_SPIKES"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[2], neuron, syn_spec={'receptor_type': receptor_types["GABA_SPIKES"], 'weight': -1.0, 'delay': 1.0}) - nest.Connect(sgs[3], neuron, syn_spec={'receptor_type': receptor_types["FOO_0"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[4], neuron, syn_spec={'receptor_type': receptor_types["FOO_1"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[5], neuron, syn_spec={'receptor_type': receptor_types["MY_SPIKES_0"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[6], neuron, syn_spec={'receptor_type': receptor_types["MY_SPIKES_1"], 'weight': 2.0, 'delay': 1.0}) - nest.Connect(sgs[7], neuron, syn_spec={'receptor_type': receptor_types["MY_SPIKES2_1"], 'weight': -3.0, 'delay': 1.0}) - - mm = nest.Create("multimeter", {"record_from": ["bar", "foo_spikes", "my_spikes_ip"]}) - nest.Connect(mm, neuron) - - nest.Simulate(50.) - - events = mm.get("events") - connections = nest.GetConnections(target=neuron) - - # corresponds to ``bar += NMDA_spikes + 2 * AMPA_spikes - 3 * GABA_spikes`` in the update block - assert events["bar"][-1] == len(spike_times[0]) * abs(connections.get("weight")[0]) \ - + 2 * len(spike_times[1]) * abs(connections.get("weight")[1]) \ - - 3 * len(spike_times[2]) * abs(connections.get("weight")[2]) - - # corresponds to ``foo_spikes += foo[0] + 5.5 * foo[1]`` in the update block - assert events["foo_spikes"][-1] == len(spike_times[3]) * abs(connections.get("weight")[3]) \ - + 5.5 * len(spike_times[4]) * abs(connections.get("weight")[4]) - - # corresponds to ``my_spikes_ip += my_spikes[0] + my_spikes[1] - my_spikes2[1]`` in the update block - assert events["my_spikes_ip"][-1] == len(spike_times[5]) * abs(connections.get("weight")[5]) \ - + len(spike_times[6]) * abs(connections.get("weight")[6]) \ - - len(spike_times[7]) * abs(connections.get("weight")[7]) - - @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), - reason="This test does not support NEST 2") - def test_input_ports_in_loop(self): - input_path = os.path.join(os.path.realpath(os.path.join( - os.path.dirname(__file__), "resources", "input_ports_in_loop.nestml"))) - target_path = "target" - logging_level = "INFO" - module_name = "nestmlmodule" - suffix = "_nestml" - - generate_nest_target(input_path, - target_path=target_path, - logging_level=logging_level, - module_name=module_name, - suffix=suffix) - nest.ResetKernel() - nest.Install(module_name) - - neuron = nest.Create("input_ports_loop_nestml") - - # List of receptor types for the spiking input ports - receptor_types = nest.GetStatus(neuron, "receptor_types")[0] - - spike_times = [ - [10., 39.], # NMDA_SPIKES - [12., 37.], # FOO_0 - [14., 35.], # FOO_1 - [16., 33.], # SPIKE_BUF_0 - [18., 31.], # SPIKE_BUF_1 - [20., 29.], # SPIKE_BUF_2 - [22., 27.], # SPIKE_BUF_3 - [24., 25.], # SPIKE_BUF_4 - ] - sgs = nest.Create('spike_generator', len(spike_times)) - for i, sg in enumerate(sgs): - sg.spike_times = spike_times[i] - - nest.Connect(sgs[0], neuron, - syn_spec={'receptor_type': receptor_types["NMDA_SPIKES"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[1], neuron, - syn_spec={'receptor_type': receptor_types["FOO_0"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[2], neuron, - syn_spec={'receptor_type': receptor_types["FOO_1"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[3], neuron, syn_spec={'receptor_type': receptor_types["SPIKE_BUF_0"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[4], neuron, syn_spec={'receptor_type': receptor_types["SPIKE_BUF_1"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[5], neuron, - syn_spec={'receptor_type': receptor_types["SPIKE_BUF_2"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[6], neuron, - syn_spec={'receptor_type': receptor_types["SPIKE_BUF_3"], 'weight': 2.0, 'delay': 1.0}) - nest.Connect(sgs[7], neuron, - syn_spec={'receptor_type': receptor_types["SPIKE_BUF_4"], 'weight': 3.0, 'delay': 1.0}) - - mm = nest.Create("multimeter", {"record_from": ["bar", "foo_spikes", "MY_SPIKES_IP_2", "MY_SPIKES_IP_3", "MY_SPIKES_IP_4", "MY_SPIKES_IP_5", "MY_SPIKES_IP_6"]}) - nest.Connect(mm, neuron) - - nest.Simulate(41.) - - events = mm.get("events") - assert events["bar"][-1] == 2.0 - assert events["foo_spikes"][-1] == 25.0 - assert events["MY_SPIKES_IP_2"][-1] == 2.0 - assert events["MY_SPIKES_IP_5"][-1] == 4.0 - assert events["MY_SPIKES_IP_6"][-1] == 6.0 diff --git a/tests/nest_tests/resources/iaf_cond_exp_Istep_neuron.nestml b/tests/nest_tests/resources/iaf_cond_exp_Istep_neuron.nestml index a348b9540..75b0e5304 100644 --- a/tests/nest_tests/resources/iaf_cond_exp_Istep_neuron.nestml +++ b/tests/nest_tests/resources/iaf_cond_exp_Istep_neuron.nestml @@ -58,8 +58,8 @@ model iaf_cond_exp_Istep_neuron: t_step[n_step] ms = 0. ms # times of step current changes input: - inh_spikes <- inhibitory spike - exc_spikes <- excitatory spike + inh_spikes <- spike + exc_spikes <- spike I_stim pA <- continuous output: diff --git a/tests/nest_tests/resources/iaf_psc_exp_resolution_test.nestml b/tests/nest_tests/resources/iaf_psc_exp_resolution_test.nestml index 2de1cd1be..2f90e2776 100644 --- a/tests/nest_tests/resources/iaf_psc_exp_resolution_test.nestml +++ b/tests/nest_tests/resources/iaf_psc_exp_resolution_test.nestml @@ -9,16 +9,17 @@ Used to test resolution() function. """ model iaf_psc_exp_resolution_test_neuron: state: + I_syn_exc pA = 0 pA + I_syn_inh pA = 0 pA V_m mV = E_L refr_t ms = 0 ms # Refractory period timer is_refractory boolean = false a ms = resolution() equations: - kernel I_kernel_inh = exp(-t/tau_syn_inh) - kernel I_kernel_exc = exp(-t/tau_syn_exc) - inline I_syn pA = convolve(I_kernel_inh, inh_spikes) * pA + convolve(I_kernel_exc, exc_spikes) * pA + I_e + I_stim - V_m' = -(V_m - E_L) / tau_m + I_syn / C_m + I_syn_exc' = -I_syn_exc / tau_syn_exc + I_syn_inh' = -I_syn_inh / tau_syn_inh + V_m' = -(V_m - E_L) / tau_m + (I_syn_exc - I_syn_inh + I_e + I_stim) / C_m parameters: C_m pF = 250 pF # Capacitance of the membrane @@ -38,13 +39,19 @@ model iaf_psc_exp_resolution_test_neuron: c ms = resolution() input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + spike_in_port <- spike I_stim pA <- continuous output: spike + onReceive(spike_in_port): + # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike + if spike_in_port < 0: + I_syn_inh += spike_in_port * pA * s + else: + I_syn_exc += spike_in_port * pA * s + update: d ms = resolution() if is_refractory: diff --git a/tests/nest_tests/resources/input_ports.nestml b/tests/nest_tests/resources/input_ports.nestml index e60aa7b8b..cec915dff 100644 --- a/tests/nest_tests/resources/input_ports.nestml +++ b/tests/nest_tests/resources/input_ports.nestml @@ -36,12 +36,12 @@ model input_ports: my_spikes_ip pA = 0 pA input: - AMPA_spikes <- excitatory spike - GABA_spikes <- inhibitory spike + AMPA_spikes <- spike + GABA_spikes <- spike NMDA_spikes <- spike foo[2] <- spike - my_spikes[3] <- excitatory spike - my_spikes2[3] <- inhibitory spike + my_spikes[3] <- spike + my_spikes2[3] <- spike I_stim pA <- continuous update: diff --git a/tests/resources/NestMLPrinterTest.nestml b/tests/resources/NestMLPrinterTest.nestml index 6041ee0af..30556d35b 100644 --- a/tests/resources/NestMLPrinterTest.nestml +++ b/tests/resources/NestMLPrinterTest.nestml @@ -62,7 +62,7 @@ model aeif_cond_alpha_implicit: # input pre input: # input decl pre - inh_spikes <- inhibitory spike # input decl in + inh_spikes <- spike # input decl in # input decl post # output pre diff --git a/tests/resources/SynapseEventSequenceTest.nestml b/tests/resources/SynapseEventSequenceTest.nestml deleted file mode 100644 index 1e5826b3d..000000000 --- a/tests/resources/SynapseEventSequenceTest.nestml +++ /dev/null @@ -1,48 +0,0 @@ -""" -SynapseEventSequenceTest.nestml -################ - - -Description -+++++++++++ - -This model is used to test the sequencing of event handlers in synapse models. - - -Copyright statement -+++++++++++++++++++ - -This file is part of NEST. - -Copyright (C) 2004 The NEST Initiative - -NEST is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation, either version 2 of the License, or -(at your option) any later version. - -NEST is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with NEST. If not, see . -""" -model event_sequence_test_synapse: - parameters: - w real = 1 - d ms = 1 ms - - state: - tr real = 1. - - input: - pre_spikes <- spike - post_spikes <- spike - - onReceive(pre_spikes, priority=1): - tr += 1. - - onReceive(post_spikes, priority=2): - tr *= 3.14159 diff --git a/tests/resources/random_number_generators_test.nestml b/tests/resources/random_number_generators_test.nestml index 59bf28c9c..616812e36 100644 --- a/tests/resources/random_number_generators_test.nestml +++ b/tests/resources/random_number_generators_test.nestml @@ -34,10 +34,5 @@ model test_random: q' = random_normal(500, 25) / s r' = random_uniform(42, 123) / s - input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike - currents pA <- continuous - update: integrate_odes() diff --git a/tests/test_cocos.py b/tests/test_cocos.py index 731fb8d8a..e4e5ef20e 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -169,14 +169,6 @@ def test_valid_no_nest_collision(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNestNamespaceCollision.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_redundant_input_port_keywords_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortWithRedundantTypes.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - def test_valid_redundant_input_port_keywords_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortWithRedundantTypes.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_parameters_assigned_only_in_parameters_block(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoParameterAssignedOutsideBlock.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 @@ -383,6 +375,14 @@ def test_invalid_co_co_vector_input_port(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorInputPortSizeAndType.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_invalid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegal.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 + + def test_invalid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers2(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegal2.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 + def _parse_and_validate_model(self, fname: str) -> Optional[str]: from pynestml.frontend.pynestml_frontend import generate_target diff --git a/tests/valid/CoCoConvolveNotCorrectlyParametrized.nestml b/tests/valid/CoCoConvolveNotCorrectlyParametrized.nestml index c4a6a213a..179d384db 100644 --- a/tests/valid/CoCoConvolveNotCorrectlyParametrized.nestml +++ b/tests/valid/CoCoConvolveNotCorrectlyParametrized.nestml @@ -43,4 +43,4 @@ model CoCoConvolveNotCorrectlyParametrized: inline testB pA = convolve(G, spikeExc) * pA # convolve is now correctly parametrized input: - spikeExc <- excitatory spike + spikeExc <- spike diff --git a/tests/valid/CoCoConvolveNotCorrectlyProvided.nestml b/tests/valid/CoCoConvolveNotCorrectlyProvided.nestml index 4d465d60e..0d7f464f7 100644 --- a/tests/valid/CoCoConvolveNotCorrectlyProvided.nestml +++ b/tests/valid/CoCoConvolveNotCorrectlyProvided.nestml @@ -37,7 +37,7 @@ model CoCoConvolveNotCorrectlyProvided: inline testB pA = convolve(test, spikeExc) * pA # convolve provided with a kernel and a spike input port, thus correct input: - spikeExc <- excitatory spike + spikeExc <- spike update: integrate_odes() diff --git a/tests/valid/CoCoInputPortWithRedundantTypes.nestml b/tests/valid/CoCoInputPortWithRedundantTypes.nestml deleted file mode 100644 index 967f51a4c..000000000 --- a/tests/valid/CoCoInputPortWithRedundantTypes.nestml +++ /dev/null @@ -1,36 +0,0 @@ -""" -CoCoInputPortWithRedundantTypes.nestml -###################################### - - -Description -+++++++++++ - -This model is used to test if broken CoCos are identified correctly. Here, if each input port is defined uniquely, i.e., no redundant keywords are used. - -Positive case. - - -Copyright statement -+++++++++++++++++++ - -This file is part of NEST. - -Copyright (C) 2004 The NEST Initiative - -NEST is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation, either version 2 of the License, or -(at your option) any later version. - -NEST is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with NEST. If not, see . -""" -model CoCoInputPortWithRedundantTypes: - input: - spikeInh <- inhibitory spike # no redundant keywords used, thus correct diff --git a/tests/valid/CoCoValueAssignedToInputPort.nestml b/tests/valid/CoCoValueAssignedToInputPort.nestml index 9a76bebd2..4a30a9b43 100644 --- a/tests/valid/CoCoValueAssignedToInputPort.nestml +++ b/tests/valid/CoCoValueAssignedToInputPort.nestml @@ -33,7 +33,7 @@ along with NEST. If not, see . """ model CoCoValueAssignedToInputPort: input: - spikeInh <- inhibitory spike + spikeInh <- spike update: # input port not assigned to, thus everything is correct test integer = spikeInh * s + 10 diff --git a/tests/valid/CoCoVectorInputPortSizeAndType.nestml b/tests/valid/CoCoVectorInputPortSizeAndType.nestml index aa230ad49..b11c96f34 100644 --- a/tests/valid/CoCoVectorInputPortSizeAndType.nestml +++ b/tests/valid/CoCoVectorInputPortSizeAndType.nestml @@ -35,5 +35,5 @@ along with NEST. If not, see . model CoCoVectorInputPortSizeAndType: input: foo[2] <- spike - bar1[3] <- excitatory spike - bar2[3] <- inhibitory spike + bar1[3] <- spike + bar2[3] <- spike From dd8fed6392799879c506074bf7c6442ffcc6d95c Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 29 Oct 2024 14:55:37 +0100 Subject: [PATCH 11/68] remove qualifiers from spiking input ports --- doc/nestml_language/neurons_in_nestml.rst | 49 +- doc/pynestml_toolchain/front.rst | 2 - doc/running/running_nest.rst | 2 +- .../models/iaf_psc_alpha.nestml | 4 +- .../models/iaf_psc_alpha_adapt_curr.nestml | 3 +- .../models/iaf_psc_alpha_adapt_thresh.nestml | 4 +- .../iaf_psc_alpha_adapt_thresh_OU.nestml | 4 +- models/neurons/aeif_cond_alpha_neuron.nestml | 4 +- models/neurons/aeif_cond_exp_neuron.nestml | 20 +- .../hh_cond_exp_destexhe_neuron.nestml | 4 +- .../neurons/hh_cond_exp_traub_neuron.nestml | 4 +- models/neurons/hh_moto_5ht_neuron.nestml | 4 +- models/neurons/hh_psc_alpha_neuron.nestml | 4 +- models/neurons/iaf_chxk_2008_neuron.nestml | 4 +- models/neurons/iaf_cond_alpha_neuron.nestml | 4 +- models/neurons/iaf_cond_beta_neuron.nestml | 4 +- models/neurons/iaf_cond_exp_neuron.nestml | 4 +- .../neurons/iaf_cond_exp_sfa_rr_neuron.nestml | 4 +- models/neurons/iaf_psc_alpha_neuron.nestml | 4 +- models/neurons/iaf_psc_exp_dend_neuron.nestml | 4 +- models/neurons/iaf_psc_exp_htum_neuron.nestml | 4 +- models/neurons/iaf_psc_exp_neuron.nestml | 16 +- .../izhikevich_psc_alpha_neuron.nestml | 4 +- models/neurons/mat2_psc_exp_neuron.nestml | 4 +- models/neurons/terub_gpe_neuron.nestml | 4 +- models/neurons/terub_stn_neuron.nestml | 4 +- models/neurons/traub_psc_alpha_neuron.nestml | 4 +- models/neurons/wb_cond_exp_neuron.nestml | 4 +- pynestml/cocos/__init__.py | 1 - .../co_co_input_port_qualifier_unique.py | 72 - ...only_in_equation_rhs_and_event_handlers.py | 73 + pynestml/cocos/co_cos_manager.py | 20 +- .../codegeneration/printers/model_printer.py | 7 - .../codegeneration/printers/nestml_printer.py | 11 - .../point_neuron/common/NeuronClass.jinja2 | 48 +- .../point_neuron/common/NeuronHeader.jinja2 | 23 +- .../RportToBufferIndexEntry.jinja2 | 14 +- pynestml/generated/PyNestMLLexer.py | 599 ++++--- pynestml/generated/PyNestMLParser.py | 1499 ++++++++--------- pynestml/generated/PyNestMLParserVisitor.py | 5 - pynestml/grammars/PyNestMLLexer.g4 | 2 - pynestml/grammars/PyNestMLParser.g4 | 14 +- pynestml/meta_model/__init__.py | 1 - pynestml/meta_model/ast_input_block.py | 2 +- pynestml/meta_model/ast_input_port.py | 73 +- pynestml/meta_model/ast_input_qualifier.py | 90 - pynestml/meta_model/ast_model.py | 19 +- pynestml/meta_model/ast_node_factory.py | 12 +- pynestml/symbols/variable_symbol.py | 14 - pynestml/utils/ast_utils.py | 51 +- pynestml/utils/messages.py | 24 +- pynestml/utils/model_parser.py | 9 - pynestml/visitors/ast_builder_visitor.py | 15 +- pynestml/visitors/ast_symbol_table_visitor.py | 3 - pynestml/visitors/ast_visitor.py | 37 +- tests/invalid/CoCoInputPortsIllegal.nestml | 50 + tests/invalid/CoCoInputPortsIllegal2.nestml | 50 + tests/nest_tests/test_input_ports.py | 166 ++ 58 files changed, 1489 insertions(+), 1695 deletions(-) delete mode 100644 pynestml/cocos/co_co_input_port_qualifier_unique.py create mode 100644 pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py delete mode 100644 pynestml/meta_model/ast_input_qualifier.py create mode 100644 tests/invalid/CoCoInputPortsIllegal.nestml create mode 100644 tests/invalid/CoCoInputPortsIllegal2.nestml create mode 100644 tests/nest_tests/test_input_ports.py diff --git a/doc/nestml_language/neurons_in_nestml.rst b/doc/nestml_language/neurons_in_nestml.rst index a6329bf80..8f72031fd 100644 --- a/doc/nestml_language/neurons_in_nestml.rst +++ b/doc/nestml_language/neurons_in_nestml.rst @@ -31,48 +31,7 @@ A neuron model written in NESTML can be configured to receive two distinct types AMPA_spikes <- spike I_stim pA <- continuous -The general syntax is: - -:: - - port_name <- inputQualifier spike - port_name dataType <- continuous - The spiking input ports are declared without a data type, whereas the continuous input ports must have a data type. -For spiking input ports, the qualifier keywords decide whether inhibitory and excitatory inputs are lumped together into a single named input port, or if they are separated into differently named input ports based on their sign. When processing a spike event, some simulators (including NEST) use the sign of the amplitude (or weight) property in the spike event to indicate whether it should be considered an excitatory or inhibitory spike. By using the qualifier keywords, a single spike handler can route each incoming spike event to the correct input buffer (excitatory or inhibitory). Compare: - -.. code-block:: nestml - - input: - # [...] - all_spikes <- spike - -In this case, all spike events will be processed through the ``all_spikes`` input port. A spike weight could be positive or negative, and the occurrences of ``all_spikes`` in the model should be considered a signed quantity. - -.. code-block:: nestml - - input: - # [...] - AMPA_spikes <- excitatory spike - GABA_spikes <- inhibitory spike - -In this case, spike events that have a negative weight are routed to the ``GABA_spikes`` input port, and those that have a positive weight to the ``AMPA_spikes`` port. - -It is equivalent if either both `inhibitory` and `excitatory` are given, or neither: an unmarked port will by default handle all incoming presynaptic spikes. - -.. list-table:: - :header-rows: 1 - :widths: 10 60 - - * - Keyword - - The incoming weight :math:`w`... - * - none, or ``excitatory`` and ``inhibitory`` - - ... may be positive or negative. It is added to the buffer with signed value :math:`w` (positive or negative). - * - ``excitatory`` - - ... should not be negative. It is added to the buffer with non-negative magnitude :math:`w`. - * - ``inhibitory`` - - ... should be negative. It is added to the buffer with non-negative magnitude :math:`-w`. - Integrating current input @@ -214,12 +173,12 @@ The input ports can also be defined as vectors. For example, neuron multi_synapse_vectors: input: - AMPA_spikes <- excitatory spike - GABA_spikes <- inhibitory spike + AMPA_spikes <- spike + GABA_spikes <- spike NMDA_spikes <- spike foo[2] <- spike - exc_spikes[3] <- excitatory spike - inh_spikes[3] <- inhibitory spike + exc_spikes[3] <- spike + inh_spikes[3] <- spike equations: kernel I_kernel_exc = exp(-1 / tau_syn_exc * t) diff --git a/doc/pynestml_toolchain/front.rst b/doc/pynestml_toolchain/front.rst index b6fd2ec04..407498ef8 100644 --- a/doc/pynestml_toolchain/front.rst +++ b/doc/pynestml_toolchain/front.rst @@ -267,8 +267,6 @@ Given the fact that context conditions have the commonality of checking the cont - *CoCoConvolveHasCorrectParameter*: Checks that *convolve* calls are not provided with complex expressions, but only variables. -- *CoCoTypeOfBufferUnique*: Checks that no keyword is stated twice in an input buffer declaration, e.g., *inhibitory inhibitory spike*. - - *CoCoUserDeclaredFunctionCorrectlyDefined*: Checks that user-defined functions are correctly defined, i.e., only parameters of the function are used, and the return type is correctly stated. - *CoCoVariableOncePerScope*: Checks that each variable is defined at most once per scope, i.e., no variable is redefined. diff --git a/doc/running/running_nest.rst b/doc/running/running_nest.rst index bb40a63aa..303f42d32 100644 --- a/doc/running/running_nest.rst +++ b/doc/running/running_nest.rst @@ -133,7 +133,7 @@ Multiple input ports with vectors in NEST See :ref:`Multiple input ports with vectors` for an example with input ports defined as vectors. -Each connection in NEST is denoted by a receiver port or ``rport`` number which is an integer that starts with 0. All default connections in NEST have the ``rport`` 0. NESTML routes the spikes with ``excitatory`` and ``inhibitory`` qualifiers into separate input buffers, whereas NEST identifies them with the same ``rport`` number. +Each connection in NEST is denoted by a receiver port or ``rport`` number which is an integer that starts with 0. All default connections in NEST have the ``rport`` 0. During the code generation for NEST, NESTML maintains an internal mapping between NEST ``rports`` and NESTML input ports. A list of port names defined in a model and their corresponding ``rport`` numbers can be queried from the status dictionary using the NEST API. For neurons with multiple input ports, the ``receptor_type`` values in the ``nest.Connect()`` call start from 1 as the default ``receptor_type`` 0 is excluded to avoid any accidental connections. diff --git a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha.nestml b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha.nestml index 5a6bcd84e..faf90cc7c 100644 --- a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha.nestml +++ b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha.nestml @@ -83,8 +83,8 @@ model iaf_psc_alpha_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml index 7d9887075..fa454127b 100644 --- a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml +++ b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml @@ -90,8 +90,7 @@ model iaf_psc_alpha_adapt_curr_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + in_spikes <- spike I_stim pA <- continuous output: diff --git a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh.nestml b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh.nestml index ec0f42c50..791099c13 100644 --- a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh.nestml +++ b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh.nestml @@ -90,8 +90,8 @@ model iaf_psc_alpha_adapt_thresh_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh_OU.nestml b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh_OU.nestml index 96bd2f5a3..8f9926813 100644 --- a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh_OU.nestml +++ b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh_OU.nestml @@ -99,8 +99,8 @@ model iaf_psc_alpha_adapt_thresh_OU_neuron: A_noise pA = ((D_noise * tau_syn_exc / 2) * (1 - exp(-2 * resolution() / tau_syn_exc )))**.5 input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/aeif_cond_alpha_neuron.nestml b/models/neurons/aeif_cond_alpha_neuron.nestml index ee067d26f..6d9e6093d 100644 --- a/models/neurons/aeif_cond_alpha_neuron.nestml +++ b/models/neurons/aeif_cond_alpha_neuron.nestml @@ -98,8 +98,8 @@ model aeif_cond_alpha_neuron: PSConInit_I nS/ms = nS * e / tau_syn_inh input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/aeif_cond_exp_neuron.nestml b/models/neurons/aeif_cond_exp_neuron.nestml index c1008d3bf..0c2a134f3 100644 --- a/models/neurons/aeif_cond_exp_neuron.nestml +++ b/models/neurons/aeif_cond_exp_neuron.nestml @@ -46,17 +46,19 @@ model aeif_cond_exp_neuron: V_m mV = E_L # Membrane potential w pA = 0 pA # Spike-adaptation current refr_t ms = 0 ms # Refractory period timer + g_syn_exc nS = 0 nS + g_syn_inh nS = 0 nS equations: inline V_bounded mV = min(V_m, V_peak) # prevent exponential divergence - kernel g_inh = exp(-t / tau_syn_inh) - kernel g_exc = exp(-t / tau_syn_exc) + g_syn_exc' = -g_syn_exc / tau_syn_exc + g_syn_inh' = -g_syn_inh / tau_syn_inh # Add inlines to simplify the equation definition of V_m inline exp_arg real = (V_bounded - V_th) / Delta_T inline I_spike pA = g_L * Delta_T * exp(exp_arg) - inline I_syn_exc pA = convolve(g_exc, exc_spikes) * nS * (V_bounded - E_exc) - inline I_syn_inh pA = convolve(g_inh, inh_spikes) * nS * (V_bounded - E_inh) + inline I_syn_exc pA = g_syn_exc * (V_bounded - E_exc) + inline I_syn_inh pA = g_syn_inh * nS * (V_bounded - E_inh) V_m' = (-g_L * (V_bounded - E_L) + I_spike - I_syn_exc - I_syn_inh - w + I_e + I_stim) / C_m w' = (a * (V_bounded - E_L) - w) / tau_w @@ -89,13 +91,19 @@ model aeif_cond_exp_neuron: I_e pA = 0 pA input: - inh_spikes <- inhibitory spike - exc_spikes <- excitatory spike + spike_in_port <- spike I_stim pA <- continuous output: spike + onReceive(spike_in_port): + # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike + if spike_in_port < 0: + g_syn_inh += spike_in_port * nS * s + else: + g_syn_exc += spike_in_port * nS * s + update: if refr_t > 0 ms: # neuron is absolute refractory, do not evolve V_m diff --git a/models/neurons/hh_cond_exp_destexhe_neuron.nestml b/models/neurons/hh_cond_exp_destexhe_neuron.nestml index c2bfced5a..51e46f88a 100644 --- a/models/neurons/hh_cond_exp_destexhe_neuron.nestml +++ b/models/neurons/hh_cond_exp_destexhe_neuron.nestml @@ -126,8 +126,8 @@ model hh_cond_exp_destexhe_neuron: D_inh uS**2/ms = 2 * sigma_noise_inh**2 / tau_syn_inh input: - inh_spikes <- inhibitory spike - exc_spikes <- excitatory spike + inh_spikes <- spike + exc_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/hh_cond_exp_traub_neuron.nestml b/models/neurons/hh_cond_exp_traub_neuron.nestml index 7d9c7ab33..97b067501 100644 --- a/models/neurons/hh_cond_exp_traub_neuron.nestml +++ b/models/neurons/hh_cond_exp_traub_neuron.nestml @@ -113,8 +113,8 @@ model hh_cond_exp_traub_neuron: I_e pA = 0 pA input: - inh_spikes <- inhibitory spike - exc_spikes <- excitatory spike + inh_spikes <- spike + exc_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/hh_moto_5ht_neuron.nestml b/models/neurons/hh_moto_5ht_neuron.nestml index 5fa861a1f..8da40a08c 100644 --- a/models/neurons/hh_moto_5ht_neuron.nestml +++ b/models/neurons/hh_moto_5ht_neuron.nestml @@ -108,8 +108,8 @@ model hh_moto_5ht_neuron: alpha mmol/pA = 1E-5 mmol/pA input: - inh_spikes <- inhibitory spike - exc_spikes <- excitatory spike + inh_spikes <- spike + exc_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/hh_psc_alpha_neuron.nestml b/models/neurons/hh_psc_alpha_neuron.nestml index 7b53cbf71..53c80c320 100644 --- a/models/neurons/hh_psc_alpha_neuron.nestml +++ b/models/neurons/hh_psc_alpha_neuron.nestml @@ -108,8 +108,8 @@ model hh_psc_alpha_neuron: beta_h_init real = 1. / ( 1. + exp( -( V_m_init / mV + 35. ) / 10. ) ) input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/iaf_chxk_2008_neuron.nestml b/models/neurons/iaf_chxk_2008_neuron.nestml index ea8bc6eb4..dafe98864 100644 --- a/models/neurons/iaf_chxk_2008_neuron.nestml +++ b/models/neurons/iaf_chxk_2008_neuron.nestml @@ -80,8 +80,8 @@ model iaf_chxk_2008_neuron: PSConInit_AHP real = G_ahp * e / tau_ahp * (ms/nS) input: - inh_spikes <- inhibitory spike - exc_spikes <- excitatory spike + inh_spikes <- spike + exc_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/iaf_cond_alpha_neuron.nestml b/models/neurons/iaf_cond_alpha_neuron.nestml index a7b4ac265..ced68d31d 100644 --- a/models/neurons/iaf_cond_alpha_neuron.nestml +++ b/models/neurons/iaf_cond_alpha_neuron.nestml @@ -68,8 +68,8 @@ model iaf_cond_alpha_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/iaf_cond_beta_neuron.nestml b/models/neurons/iaf_cond_beta_neuron.nestml index 6b6d8abf0..ddea838ca 100644 --- a/models/neurons/iaf_cond_beta_neuron.nestml +++ b/models/neurons/iaf_cond_beta_neuron.nestml @@ -100,8 +100,8 @@ model iaf_cond_beta_neuron: g_I_const real = 1 / (exp(-t_peak_I / tau_syn_decay_I) - exp(-t_peak_I / tau_syn_rise_I)) input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/iaf_cond_exp_neuron.nestml b/models/neurons/iaf_cond_exp_neuron.nestml index 86616b1ea..018297ae5 100644 --- a/models/neurons/iaf_cond_exp_neuron.nestml +++ b/models/neurons/iaf_cond_exp_neuron.nestml @@ -59,8 +59,8 @@ model iaf_cond_exp_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/iaf_cond_exp_sfa_rr_neuron.nestml b/models/neurons/iaf_cond_exp_sfa_rr_neuron.nestml index c360f17e6..0d5474065 100644 --- a/models/neurons/iaf_cond_exp_sfa_rr_neuron.nestml +++ b/models/neurons/iaf_cond_exp_sfa_rr_neuron.nestml @@ -79,8 +79,8 @@ model iaf_cond_exp_sfa_rr_neuron: I_e pA = 0 pA input: - inh_spikes <- inhibitory spike - exc_spikes <- excitatory spike + inh_spikes <- spike + exc_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/iaf_psc_alpha_neuron.nestml b/models/neurons/iaf_psc_alpha_neuron.nestml index ef68e438c..a90b689f8 100644 --- a/models/neurons/iaf_psc_alpha_neuron.nestml +++ b/models/neurons/iaf_psc_alpha_neuron.nestml @@ -83,8 +83,8 @@ model iaf_psc_alpha_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/iaf_psc_exp_dend_neuron.nestml b/models/neurons/iaf_psc_exp_dend_neuron.nestml index 38a3e8e73..49f60ba02 100644 --- a/models/neurons/iaf_psc_exp_dend_neuron.nestml +++ b/models/neurons/iaf_psc_exp_dend_neuron.nestml @@ -63,8 +63,8 @@ model iaf_psc_exp_dend_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/iaf_psc_exp_htum_neuron.nestml b/models/neurons/iaf_psc_exp_htum_neuron.nestml index c79da9558..6b0cb7462 100644 --- a/models/neurons/iaf_psc_exp_htum_neuron.nestml +++ b/models/neurons/iaf_psc_exp_htum_neuron.nestml @@ -98,8 +98,8 @@ model iaf_psc_exp_htum_neuron: RefractoryCountsTot integer = steps(t_ref_tot) [[RefractoryCountsTot > 0]] input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/iaf_psc_exp_neuron.nestml b/models/neurons/iaf_psc_exp_neuron.nestml index c153f4fc5..2b2ba8d6e 100644 --- a/models/neurons/iaf_psc_exp_neuron.nestml +++ b/models/neurons/iaf_psc_exp_neuron.nestml @@ -80,13 +80,19 @@ model iaf_psc_exp_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + spike_in_port <- spike I_stim pA <- continuous output: spike + onReceive(spike_in_port): + # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike + if spike_in_port < 0: + I_syn_inh += spike_in_port * pA * s + else: + I_syn_exc += spike_in_port * pA * s + update: if refr_t > 0 ms: # neuron is absolute refractory, do not evolve V_m @@ -95,12 +101,6 @@ model iaf_psc_exp_neuron: # neuron not refractory integrate_odes(I_syn_exc, I_syn_inh, V_m) - onReceive(exc_spikes): - I_syn_exc += exc_spikes * pA * s - - onReceive(inh_spikes): - I_syn_inh += inh_spikes * pA * s - onCondition(refr_t <= 0 ms and V_m >= V_th): # threshold crossing refr_t = refr_T # start of the refractory period diff --git a/models/neurons/izhikevich_psc_alpha_neuron.nestml b/models/neurons/izhikevich_psc_alpha_neuron.nestml index d372f03c7..1c3ff7b37 100644 --- a/models/neurons/izhikevich_psc_alpha_neuron.nestml +++ b/models/neurons/izhikevich_psc_alpha_neuron.nestml @@ -72,8 +72,8 @@ model izhikevich_psc_alpha_neuron: I_e pA = 0 pA input: - inh_spikes <- inhibitory spike - exc_spikes <- excitatory spike + inh_spikes <- spike + exc_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/mat2_psc_exp_neuron.nestml b/models/neurons/mat2_psc_exp_neuron.nestml index 11c039d83..43d85dce7 100644 --- a/models/neurons/mat2_psc_exp_neuron.nestml +++ b/models/neurons/mat2_psc_exp_neuron.nestml @@ -80,8 +80,8 @@ model mat2_psc_exp_neuron: P22th real = exp(-h / tau_2) input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/terub_gpe_neuron.nestml b/models/neurons/terub_gpe_neuron.nestml index 6d138245a..f847dd546 100644 --- a/models/neurons/terub_gpe_neuron.nestml +++ b/models/neurons/terub_gpe_neuron.nestml @@ -138,8 +138,8 @@ model terub_gpe_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/terub_stn_neuron.nestml b/models/neurons/terub_stn_neuron.nestml index 495273012..a480dbb69 100644 --- a/models/neurons/terub_stn_neuron.nestml +++ b/models/neurons/terub_stn_neuron.nestml @@ -144,8 +144,8 @@ model terub_stn_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/traub_psc_alpha_neuron.nestml b/models/neurons/traub_psc_alpha_neuron.nestml index 7803fa6db..5b97d1bd9 100644 --- a/models/neurons/traub_psc_alpha_neuron.nestml +++ b/models/neurons/traub_psc_alpha_neuron.nestml @@ -86,8 +86,8 @@ model traub_psc_alpha_neuron: beta_h_init real = 4.0 / (1.0 + exp(-(V_m / mV + 27.) / 5.)) input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/models/neurons/wb_cond_exp_neuron.nestml b/models/neurons/wb_cond_exp_neuron.nestml index fe9232947..ae32c5238 100644 --- a/models/neurons/wb_cond_exp_neuron.nestml +++ b/models/neurons/wb_cond_exp_neuron.nestml @@ -78,8 +78,8 @@ model wb_cond_exp_neuron: beta_h_init 1/ms = 5.0 / (exp(-0.1 / mV * (E_L + 28.0 mV)) + 1.0) /ms input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/pynestml/cocos/__init__.py b/pynestml/cocos/__init__.py index ddfc395d4..635e455a4 100644 --- a/pynestml/cocos/__init__.py +++ b/pynestml/cocos/__init__.py @@ -41,7 +41,6 @@ 'co_co_parameters_assigned_only_in_parameter_block.py', 'co_cos_manager.py', 'co_co_convolve_has_correct_parameter.py', - 'co_co_input_port_qualifier_unique.py', 'co_co_user_defined_function_correctly_defined.py', 'co_co_variable_once_per_scope.py', 'co_co_vector_variable_in_non_vector_declaration.py' diff --git a/pynestml/cocos/co_co_input_port_qualifier_unique.py b/pynestml/cocos/co_co_input_port_qualifier_unique.py deleted file mode 100644 index 92cc5a4ca..000000000 --- a/pynestml/cocos/co_co_input_port_qualifier_unique.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# -# co_co_input_port_qualifier_unique.py -# -# This file is part of NEST. -# -# Copyright (C) 2004 The NEST Initiative -# -# NEST is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# NEST is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with NEST. If not, see . - -from pynestml.cocos.co_co import CoCo -from pynestml.meta_model.ast_model import ASTModel -from pynestml.utils.logger import LoggingLevel, Logger -from pynestml.utils.messages import Messages -from pynestml.visitors.ast_visitor import ASTVisitor - - -class CoCoInputPortQualifierUnique(CoCo): - """ - This coco ensures that each spike input port has at most one type of modifier inhibitory and excitatory. - - Allowed: - - .. code-block:: nestml - - spike pA <- inhibitory spike - - Not allowed: - - .. code-block:: nestml - - spike pA <- inhibitory inhibitory spike - - """ - - @classmethod - def check_co_co(cls, model: ASTModel): - """ - Ensures the coco for the handed over model. - :param node: a single model instance. - """ - cls.neuronName = model.get_name() - model.accept(InputPortQualifierUniqueVisitor()) - - -class InputPortQualifierUniqueVisitor(ASTVisitor): - """ - This visitor ensures that all input ports are qualified uniquely by keywords. - """ - - def visit_input_port(self, node): - """ - Checks the coco on the current node. - :param node: a single input port. - :type node: ASTInputPort - """ - if node.is_spike(): - if node.has_input_qualifiers() and len(node.get_input_qualifiers()) > 1: - code, message = Messages.get_multiple_keywords(", ".join([str(q) for q in node.get_input_qualifiers()])) - Logger.log_message(error_position=node.get_source_position(), code=code, message=message, - log_level=LoggingLevel.ERROR) diff --git a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py new file mode 100644 index 000000000..130c3f7ff --- /dev/null +++ b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# +# co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from typing import Optional +from pynestml.cocos.co_co import CoCo +from pynestml.meta_model.ast_input_port import ASTInputPort +from pynestml.meta_model.ast_model import ASTModel +from pynestml.meta_model.ast_ode_equation import ASTOdeEquation +from pynestml.meta_model.ast_on_receive_block import ASTOnReceiveBlock +from pynestml.symbols.symbol import SymbolKind +from pynestml.utils.ast_utils import ASTUtils +from pynestml.utils.logger import Logger, LoggingLevel +from pynestml.utils.messages import Messages +from pynestml.visitors.ast_visitor import ASTVisitor + + +class CoCoSpikeInputPortsAppearOnlyInEquationRHSAndEventHandlers(CoCo): + """ + This coco ensures that spiking input port names appear only in the right-hand side of equations and in the onReceive block declaration. + """ + + @classmethod + def check_co_co(cls, node): + """ + Ensures the coco for the handed over node. + """ + assert node is not None and (isinstance(node, ASTModel)), "No or wrong type provided (%s): expecting neuron or synapse!" % type(node) + + visitor = SpikeInputPortsAppearOnlyInEquationRHSAndEventHandlersVisitor() + visitor.model_ = node + node.accept(visitor) + + +class SpikeInputPortsAppearOnlyInEquationRHSAndEventHandlersVisitor(ASTVisitor): + + def visit_variable(self, node): + in_port: Optional[ASTInputPort] = ASTUtils.get_input_port_by_name(self.model_.get_input_blocks(), node.get_name()) + if in_port is not None and in_port.is_spike(): + _node = node + while _node: + _node = _node.get_parent() + + if isinstance(_node, ASTOnReceiveBlock) and _node.port_name == node.get_name(): + # spike input port was used inside an ``onReceive(spike_in_port)`` block; everything is OK + return + + if isinstance(_node, ASTOdeEquation): + # spike input port was used inside the rhs of an equation; everything is OK + return + + if isinstance(_node, ASTModel): + # we reached the top-level block without running into an ``update`` block on the way --> incorrect usage of the function + code, message = Messages.get_spike_input_port_appears_outside_equation_rhs_and_event_handler(node.get_name()) + Logger.log_message(code=code, message=message, error_position=node.get_source_position(), + log_level=LoggingLevel.ERROR) diff --git a/pynestml/cocos/co_cos_manager.py b/pynestml/cocos/co_cos_manager.py index 6858151f0..0c4855b9b 100644 --- a/pynestml/cocos/co_cos_manager.py +++ b/pynestml/cocos/co_cos_manager.py @@ -42,7 +42,6 @@ from pynestml.cocos.co_co_inline_expression_not_assigned_to import CoCoInlineExpressionNotAssignedTo from pynestml.cocos.co_co_inline_max_one_lhs import CoCoInlineMaxOneLhs from pynestml.cocos.co_co_input_port_not_assigned_to import CoCoInputPortNotAssignedTo -from pynestml.cocos.co_co_input_port_qualifier_unique import CoCoInputPortQualifierUnique from pynestml.cocos.co_co_internals_assigned_only_in_internals_block import CoCoInternalsAssignedOnlyInInternalsBlock from pynestml.cocos.co_co_integrate_odes_called_if_equations_defined import CoCoIntegrateOdesCalledIfEquationsDefined from pynestml.cocos.co_co_invariant_is_boolean import CoCoInvariantIsBoolean @@ -60,6 +59,7 @@ from pynestml.cocos.co_co_resolution_func_legally_used import CoCoResolutionFuncLegallyUsed from pynestml.cocos.co_co_resolution_func_used import CoCoResolutionOrStepsFuncUsed from pynestml.cocos.co_co_simple_delta_function import CoCoSimpleDeltaFunction +from pynestml.cocos.co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers import CoCoSpikeInputPortsAppearOnlyInEquationRHSAndEventHandlers from pynestml.cocos.co_co_state_variables_initialized import CoCoStateVariablesInitialized from pynestml.cocos.co_co_timestep_function_legally_used import CoCoTimestepFuncLegallyUsed from pynestml.cocos.co_co_user_defined_function_correctly_defined import CoCoUserDefinedFunctionCorrectlyDefined @@ -100,6 +100,14 @@ def check_each_block_defined_at_most_once(cls, node: ASTModel): """ CoCoEachBlockDefinedAtMostOnce.check_co_co(node) + @classmethod + def check_input_ports_appear_only_in_equation_rhs_and_event_handlers(cls, node: ASTModel): + """ + Checks if in the handed over model, each block is defined at most once and mandatory blocks are defined. + :param node: a single model instance + """ + CoCoSpikeInputPortsAppearOnlyInEquationRHSAndEventHandlers.check_co_co(node) + @classmethod def check_function_declared_and_correctly_typed(cls, model: ASTModel): """ @@ -211,14 +219,6 @@ def check_no_nest_namespace_collisions(cls, model: ASTModel): """ CoCoNoNestNameSpaceCollision.check_co_co(model) - @classmethod - def check_input_port_qualifier_unique(cls, model: ASTModel): - """ - Checks that no spiking input ports are defined with redundant qualifiers. - :param model: a single model object. - """ - CoCoInputPortQualifierUnique.check_co_co(model) - @classmethod def check_kernel_type(cls, model: ASTModel) -> None: """ @@ -451,7 +451,6 @@ def check_cocos(cls, model: ASTModel, after_ast_rewrite: bool = False): cls.check_order_of_equations_correct(model) cls.check_numerator_of_unit_is_one_if_numeric(model) cls.check_no_nest_namespace_collisions(model) - cls.check_input_port_qualifier_unique(model) cls.check_parameters_not_assigned_outside_parameters_block(model) cls.check_internals_not_assigned_outside_internals_block(model) cls.check_user_defined_function_correctly_built(model) @@ -482,5 +481,6 @@ def check_cocos(cls, model: ASTModel, after_ast_rewrite: bool = False): cls.check_resolution_func_legally_used(model) cls.check_input_port_size_type(model) cls.check_timestep_func_legally_used(model) + cls.check_input_ports_appear_only_in_equation_rhs_and_event_handlers(model) Logger.set_current_node(None) diff --git a/pynestml/codegeneration/printers/model_printer.py b/pynestml/codegeneration/printers/model_printer.py index ab52c5569..ba4e3ab28 100644 --- a/pynestml/codegeneration/printers/model_printer.py +++ b/pynestml/codegeneration/printers/model_printer.py @@ -36,7 +36,6 @@ from pynestml.meta_model.ast_if_clause import ASTIfClause from pynestml.meta_model.ast_if_stmt import ASTIfStmt from pynestml.meta_model.ast_input_block import ASTInputBlock -from pynestml.meta_model.ast_input_qualifier import ASTInputQualifier from pynestml.meta_model.ast_kernel import ASTKernel from pynestml.meta_model.ast_logical_operator import ASTLogicalOperator from pynestml.meta_model.ast_model import ASTModel @@ -123,9 +122,6 @@ def print_input_block(self, node: ASTInputBlock) -> str: def print_input_port(self, node: ASTInputPort) -> str: raise Exception("Printer does not support printing this node type") - def print_input_qualifier(self, node: ASTInputQualifier) -> str: - raise Exception("Printer does not support printing this node type") - def print_logical_operator(self, node: ASTLogicalOperator) -> str: raise Exception("Printer does not support printing this node type") @@ -250,9 +246,6 @@ def print(self, node: ASTNode) -> str: if isinstance(node, ASTInputPort): return self.print_input_port(node) - if isinstance(node, ASTInputQualifier): - return self.print_input_qualifier(node) - if isinstance(node, ASTKernel): return self.print_kernel(node) diff --git a/pynestml/codegeneration/printers/nestml_printer.py b/pynestml/codegeneration/printers/nestml_printer.py index f03d9931d..fad94ccd4 100644 --- a/pynestml/codegeneration/printers/nestml_printer.py +++ b/pynestml/codegeneration/printers/nestml_printer.py @@ -40,7 +40,6 @@ from pynestml.meta_model.ast_if_stmt import ASTIfStmt from pynestml.meta_model.ast_input_block import ASTInputBlock from pynestml.meta_model.ast_input_port import ASTInputPort -from pynestml.meta_model.ast_input_qualifier import ASTInputQualifier from pynestml.meta_model.ast_kernel import ASTKernel from pynestml.meta_model.ast_logical_operator import ASTLogicalOperator from pynestml.meta_model.ast_namespace_decorator import ASTNamespaceDecorator @@ -362,9 +361,6 @@ def print_input_port(self, node: ASTInputPort) -> str: if node.has_size_parameter(): ret += "[" + self.print(node.get_size_parameter()) + "]" ret += " <- " - if node.has_input_qualifiers(): - for qual in node.get_input_qualifiers(): - ret += self.print(qual) + " " if node.is_spike(): ret += "spike" else: @@ -372,13 +368,6 @@ def print_input_port(self, node: ASTInputPort) -> str: ret += print_sl_comment(node.in_comment) + "\n" return ret - def print_input_qualifier(self, node: ASTInputQualifier) -> str: - if node.is_inhibitory: - return "inhibitory" - if node.is_excitatory: - return "excitatory" - return "" - def print_logical_operator(self, node: ASTLogicalOperator) -> str: if node.is_logical_and: return " and " diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index a4253ce90..24b2f0062 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -148,20 +148,20 @@ template <> void RecordablesMap<{{ neuronName }}>::create() } {%- if neuron.get_spike_input_ports()|length > 1 or neuron.is_multisynapse_spikes() %} -std::vector< std::tuple< int, int > > {{ neuronName }}::rport_to_nestml_buffer_idx = +std::vector< size_t > {{ neuronName }}::rport_to_nestml_buffer_idx = { -{%- for key, ports in utils.get_spike_input_ports_in_pairs(neuron).items() %} -{%- set ns = namespace(rport=key) %} -{%- if ports[0].has_vector_parameter() %} -{%- set size = utils.get_numeric_vector_size(ports[0]) %} -{%- for i in range(size) %} - {{ rport_to_port_map_entry.RportToBufferIndexEntry(ports, ns.rport, index=i) }} -{%- set ns.rport = ns.rport + 1 %} -{%- endfor %} -{%- else %} - {{ rport_to_port_map_entry.RportToBufferIndexEntry(ports, ns.rport) }} -{%- endif %} -{%- endfor %} +{%- for rport, ports in utils.get_spike_input_ports_in_pairs(neuron).items() %} +{%- set ns = namespace(rport=rport) %} +{%- if ports[0].has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(ports[0]) %} +{%- for i in range(size) %} + {{ rport_to_port_map_entry.RportToBufferIndexEntry(ports, ns.rport, index=i) }}, +{%- set ns.rport = ns.rport + 1 %} +{%- endfor %} +{%- else %} + {{ rport_to_port_map_entry.RportToBufferIndexEntry(ports, ns.rport) }}, +{%- endif %} +{%- endfor %} }; {%- endif %} @@ -1090,26 +1090,14 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) assert(e.get_delay_steps() > 0); assert( e.get_rport() < B_.spike_inputs_.size() ); - double weight = e.get_weight(); - size_t nestml_buffer_idx = 0; -{%- if neuron.get_spike_input_ports()|length > 1 or neuron.is_multisynapse_spikes() %} - if ( weight >= 0.0 ) - { - nestml_buffer_idx = std::get<0>(rport_to_nestml_buffer_idx[e.get_rport()]); - } - else - { - nestml_buffer_idx = std::get<1>(rport_to_nestml_buffer_idx[e.get_rport()]); - if ( nestml_buffer_idx == {{ neuronName }}::PORT_NOT_AVAILABLE ) - { - nestml_buffer_idx = std::get<0>(rport_to_nestml_buffer_idx[e.get_rport()]); - } - weight = -weight; - } +{% if neuron.get_spike_input_ports() | length > 1 or neuron.is_multisynapse_spikes() -%} + const size_t nestml_buffer_idx = rport_to_nestml_buffer_idx[e.get_rport()]; +{%- else %} + const size_t nestml_buffer_idx = 0; {%- endif %} B_.spike_inputs_[ nestml_buffer_idx - MIN_SPIKE_RECEPTOR ].add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), - weight * e.get_multiplicity() ); + e.get_weight() * e.get_multiplicity() ); B_.spike_input_received_[ nestml_buffer_idx - MIN_SPIKE_RECEPTOR ].add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), 1. ); diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 457b84381..31b791775 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -547,7 +547,6 @@ private: static const nest_port_t MIN_SPIKE_RECEPTOR = 0; {%- set ns = namespace(count=0) %} {%- endif %} - static const nest_port_t PORT_NOT_AVAILABLE = -1; enum SynapseTypes { @@ -568,8 +567,8 @@ private: static const size_t NUM_SPIKE_RECEPTORS = MAX_SPIKE_RECEPTOR - MIN_SPIKE_RECEPTOR; -{% if neuron.get_spike_input_ports()|length > 1 or neuron.is_multisynapse_spikes() -%} - static std::vector< std::tuple< int, int > > rport_to_nestml_buffer_idx; +{% if neuron.get_spike_input_ports() | length > 1 or neuron.is_multisynapse_spikes() -%} + static std::vector< size_t > rport_to_nestml_buffer_idx; {%- endif %} /** @@ -1061,19 +1060,19 @@ inline void {{neuronName}}::get_status(DictionaryDatum &__d) const {%- if (neuron.get_multiple_receptors())|length > 1 or neuron.is_multisynapse_spikes() %} DictionaryDatum __receptor_type = new Dictionary(); -{%- for key, ports in utils.get_spike_input_ports_in_pairs(neuron).items() %} -{%- set ns = namespace(rport=key) %} -{%- for port in ports %} -{%- if not port.has_vector_parameter() %} +{%- for rport, ports in utils.get_spike_input_ports_in_pairs(neuron).items() %} +{%- set ns = namespace(rport=rport) %} +{%- for port in ports %} +{%- if not port.has_vector_parameter() %} ( *__receptor_type )[ "{{port.get_symbol_name().upper()}}" ] = {{ns.rport + 1}}; -{%- else %} -{%- set size = utils.get_numeric_vector_size(port) %} -{%- for i in range(size) %} +{%- else %} +{%- set size = utils.get_numeric_vector_size(port) %} +{%- for i in range(size) %} ( *__receptor_type )[ "{{port.get_symbol_name().upper()}}_{{i}}" ] = {{ns.rport + i + 1}}, +{%- endfor %} +{%- endif %} {%- endfor %} -{%- endif %} {%- endfor %} -{%- endfor %} ( *__d )[ "receptor_types" ] = __receptor_type; {%- endif %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/RportToBufferIndexEntry.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/RportToBufferIndexEntry.jinja2 index 0e6a13c28..e3251a576 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/RportToBufferIndexEntry.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/RportToBufferIndexEntry.jinja2 @@ -4,17 +4,5 @@ {%- else -%} {%- set name = "{}" %} {%- endif -%} - -{%- if ports|length > 1 -%} -{%- if ports[0].is_excitatory() %} -{%- set exc_port = ports[0] %} -{%- set inh_port = ports[1] %} -{%- else %} -{%- set exc_port = ports[1] %} -{%- set inh_port = ports[0] %} -{%- endif %} - { {{neuronName}}::{{ name.format(exc_port.get_symbol_name().upper()) }}, {{neuronName}}::{{ name.format(inh_port.get_symbol_name().upper()) }} }, -{%- else -%} - { {{neuronName}}::{{ name.format(ports[0].get_symbol_name().upper()) }}, {{neuronName}}::PORT_NOT_AVAILABLE }, -{%- endif -%} + {{ neuronName }}::{{ name.format(ports[0].get_symbol_name().upper()) }} {%- endmacro -%} diff --git a/pynestml/generated/PyNestMLLexer.py b/pynestml/generated/PyNestMLLexer.py index 90123821f..b914e8d70 100644 --- a/pynestml/generated/PyNestMLLexer.py +++ b/pynestml/generated/PyNestMLLexer.py @@ -15,7 +15,7 @@ def serializedATN(): return [ - 4,0,90,702,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5, + 4,0,88,676,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5, 2,6,7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2, 13,7,13,2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7, 19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2, @@ -28,49 +28,47 @@ def serializedATN(): 65,7,65,2,66,7,66,2,67,7,67,2,68,7,68,2,69,7,69,2,70,7,70,2,71,7, 71,2,72,7,72,2,73,7,73,2,74,7,74,2,75,7,75,2,76,7,76,2,77,7,77,2, 78,7,78,2,79,7,79,2,80,7,80,2,81,7,81,2,82,7,82,2,83,7,83,2,84,7, - 84,2,85,7,85,2,86,7,86,2,87,7,87,2,88,7,88,2,89,7,89,2,90,7,90,2, - 91,7,91,1,0,1,0,1,0,1,0,1,1,3,1,191,8,1,1,1,1,1,1,2,1,2,1,2,3,2, - 198,8,2,1,3,4,3,201,8,3,11,3,12,3,202,1,3,1,3,1,4,1,4,1,4,1,4,1, - 4,1,5,1,5,5,5,214,8,5,10,5,12,5,217,9,5,1,5,1,5,4,5,221,8,5,11,5, - 12,5,222,1,5,1,5,1,6,1,6,5,6,229,8,6,10,6,12,6,232,9,6,1,6,1,6,1, - 7,1,7,1,7,3,7,239,8,7,1,7,1,7,1,7,3,7,244,8,7,3,7,246,8,7,1,7,1, - 7,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,9,1,9,1,9,1,9,1,9,1,10,1,10, - 1,10,1,10,1,10,1,10,1,10,1,11,1,11,1,11,1,11,1,11,1,11,1,11,1,11, - 1,12,1,12,1,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13, - 1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,15,1,15,1,15,1,15,1,15, - 1,15,1,15,1,16,1,16,1,16,1,17,1,17,1,17,1,17,1,17,1,18,1,18,1,18, - 1,18,1,18,1,19,1,19,1,19,1,19,1,20,1,20,1,20,1,20,1,20,1,20,1,21, - 1,21,1,21,1,22,1,22,1,22,1,22,1,22,1,23,1,23,1,23,1,23,1,24,1,24, - 1,24,1,24,1,25,1,25,1,25,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27, - 1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1,28,1,28, - 1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,30, - 1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,32,1,32, - 1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33, - 1,33,1,33,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,35, - 1,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,1,36,1,36,1,37, - 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,38,1,38,1,38, - 1,38,1,38,1,38,1,38,1,38,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39, - 1,39,1,39,1,39,1,39,1,39,1,39,1,40,1,40,1,40,1,40,1,40,1,40,1,41, - 1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,1,42, - 1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,43,1,43,1,43,1,43,1,43, - 1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,44,1,44,1,44,1,44,1,44, - 1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,45,1,45,1,46, - 1,46,1,46,1,46,1,47,1,47,1,48,1,48,1,49,1,49,1,50,1,50,1,51,1,51, - 1,52,1,52,1,53,1,53,1,54,1,54,1,55,1,55,1,55,1,56,1,56,1,57,1,57, - 1,57,1,58,1,58,1,58,1,59,1,59,1,59,1,60,1,60,1,60,1,61,1,61,1,62, - 1,62,1,63,1,63,1,63,1,64,1,64,1,64,1,65,1,65,1,65,1,66,1,66,1,66, - 1,67,1,67,1,67,1,68,1,68,1,68,1,69,1,69,1,69,1,70,1,70,1,70,1,71, - 1,71,1,71,1,72,1,72,1,73,1,73,1,74,1,74,1,75,1,75,1,76,1,76,1,76, - 1,77,1,77,1,78,1,78,1,79,1,79,1,80,1,80,1,81,1,81,1,81,1,82,1,82, - 1,83,1,83,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84, - 1,84,1,84,1,84,1,84,1,84,1,84,1,84,3,84,636,8,84,1,85,1,85,1,85, - 4,85,641,8,85,11,85,12,85,642,1,85,3,85,646,8,85,1,85,3,85,649,8, - 85,1,85,3,85,652,8,85,1,85,5,85,655,8,85,10,85,12,85,658,9,85,1, - 85,1,85,1,86,3,86,663,8,86,1,86,5,86,666,8,86,10,86,12,86,669,9, - 86,1,87,4,87,672,8,87,11,87,12,87,673,1,88,1,88,3,88,678,8,88,1, - 89,3,89,681,8,89,1,89,1,89,1,89,1,89,1,89,3,89,688,8,89,1,90,1,90, - 3,90,692,8,90,1,90,1,90,1,90,1,91,1,91,3,91,699,8,91,1,91,1,91,2, - 215,222,0,92,1,3,3,0,5,4,7,5,9,6,11,7,13,8,15,9,17,10,19,11,21,12, + 84,2,85,7,85,2,86,7,86,2,87,7,87,2,88,7,88,2,89,7,89,1,0,1,0,1,0, + 1,0,1,1,3,1,187,8,1,1,1,1,1,1,2,1,2,1,2,3,2,194,8,2,1,3,4,3,197, + 8,3,11,3,12,3,198,1,3,1,3,1,4,1,4,1,4,1,4,1,4,1,5,1,5,5,5,210,8, + 5,10,5,12,5,213,9,5,1,5,1,5,4,5,217,8,5,11,5,12,5,218,1,5,1,5,1, + 6,1,6,5,6,225,8,6,10,6,12,6,228,9,6,1,6,1,6,1,7,1,7,1,7,3,7,235, + 8,7,1,7,1,7,1,7,3,7,240,8,7,3,7,242,8,7,1,7,1,7,1,8,1,8,1,8,1,8, + 1,8,1,8,1,8,1,8,1,9,1,9,1,9,1,9,1,9,1,10,1,10,1,10,1,10,1,10,1,10, + 1,10,1,11,1,11,1,11,1,11,1,11,1,11,1,11,1,11,1,12,1,12,1,12,1,12, + 1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,14,1,14,1,14, + 1,14,1,14,1,14,1,14,1,15,1,15,1,15,1,15,1,15,1,15,1,15,1,16,1,16, + 1,16,1,17,1,17,1,17,1,17,1,17,1,18,1,18,1,18,1,18,1,18,1,19,1,19, + 1,19,1,19,1,20,1,20,1,20,1,20,1,20,1,20,1,21,1,21,1,21,1,22,1,22, + 1,22,1,22,1,22,1,23,1,23,1,23,1,23,1,24,1,24,1,24,1,24,1,25,1,25, + 1,25,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27, + 1,27,1,27,1,27,1,28,1,28,1,28,1,28,1,28,1,28,1,28,1,29,1,29,1,29, + 1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,30,1,31,1,31,1,31,1,31, + 1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,32,1,32,1,32,1,32,1,32,1,32, + 1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,34,1,34, + 1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35, + 1,35,1,36,1,36,1,36,1,36,1,36,1,36,1,36,1,37,1,37,1,37,1,37,1,37, + 1,37,1,37,1,37,1,37,1,37,1,37,1,38,1,38,1,38,1,38,1,38,1,38,1,38, + 1,38,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,1,39,1,39,1,39,1,39, + 1,39,1,39,1,40,1,40,1,40,1,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41, + 1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,1,42,1,42,1,42, + 1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,43,1,43,1,44, + 1,44,1,44,1,44,1,45,1,45,1,46,1,46,1,47,1,47,1,48,1,48,1,49,1,49, + 1,50,1,50,1,51,1,51,1,52,1,52,1,53,1,53,1,53,1,54,1,54,1,55,1,55, + 1,55,1,56,1,56,1,56,1,57,1,57,1,57,1,58,1,58,1,58,1,59,1,59,1,60, + 1,60,1,61,1,61,1,61,1,62,1,62,1,62,1,63,1,63,1,63,1,64,1,64,1,64, + 1,65,1,65,1,65,1,66,1,66,1,66,1,67,1,67,1,67,1,68,1,68,1,68,1,69, + 1,69,1,69,1,70,1,70,1,71,1,71,1,72,1,72,1,73,1,73,1,74,1,74,1,74, + 1,75,1,75,1,76,1,76,1,77,1,77,1,78,1,78,1,79,1,79,1,79,1,80,1,80, + 1,81,1,81,1,82,1,82,1,82,1,82,1,82,1,82,1,82,1,82,1,82,1,82,1,82, + 1,82,1,82,1,82,1,82,1,82,1,82,1,82,3,82,610,8,82,1,83,1,83,1,83, + 4,83,615,8,83,11,83,12,83,616,1,83,3,83,620,8,83,1,83,3,83,623,8, + 83,1,83,3,83,626,8,83,1,83,5,83,629,8,83,10,83,12,83,632,9,83,1, + 83,1,83,1,84,3,84,637,8,84,1,84,5,84,640,8,84,10,84,12,84,643,9, + 84,1,85,4,85,646,8,85,11,85,12,85,647,1,86,1,86,3,86,652,8,86,1, + 87,3,87,655,8,87,1,87,1,87,1,87,1,87,1,87,3,87,662,8,87,1,88,1,88, + 3,88,666,8,88,1,88,1,88,1,88,1,89,1,89,3,89,673,8,89,1,89,1,89,2, + 211,218,0,90,1,3,3,0,5,4,7,5,9,6,11,7,13,8,15,9,17,10,19,11,21,12, 23,13,25,14,27,15,29,16,31,17,33,18,35,19,37,20,39,21,41,22,43,23, 45,24,47,25,49,26,51,27,53,28,55,29,57,30,59,31,61,32,63,33,65,34, 67,35,69,36,71,37,73,38,75,39,77,40,79,41,81,42,83,43,85,44,87,45, @@ -78,194 +76,186 @@ def serializedATN(): 109,56,111,57,113,58,115,59,117,60,119,61,121,62,123,63,125,64,127, 65,129,66,131,67,133,68,135,69,137,70,139,71,141,72,143,73,145,74, 147,75,149,76,151,77,153,78,155,79,157,80,159,81,161,82,163,83,165, - 84,167,85,169,86,171,87,173,88,175,89,177,90,179,0,181,0,183,0,1, - 0,7,2,0,9,9,32,32,2,0,10,10,13,13,4,0,10,10,13,13,34,34,92,92,4, - 0,36,36,65,90,95,95,97,122,5,0,36,36,48,57,65,90,95,95,97,122,1, - 0,48,57,2,0,69,69,101,101,723,0,1,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0, - 0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0, - 0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1,0,0,0,0,25,1,0,0,0,0,27,1,0,0, - 0,0,29,1,0,0,0,0,31,1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,0,0,37,1,0,0, - 0,0,39,1,0,0,0,0,41,1,0,0,0,0,43,1,0,0,0,0,45,1,0,0,0,0,47,1,0,0, - 0,0,49,1,0,0,0,0,51,1,0,0,0,0,53,1,0,0,0,0,55,1,0,0,0,0,57,1,0,0, - 0,0,59,1,0,0,0,0,61,1,0,0,0,0,63,1,0,0,0,0,65,1,0,0,0,0,67,1,0,0, - 0,0,69,1,0,0,0,0,71,1,0,0,0,0,73,1,0,0,0,0,75,1,0,0,0,0,77,1,0,0, - 0,0,79,1,0,0,0,0,81,1,0,0,0,0,83,1,0,0,0,0,85,1,0,0,0,0,87,1,0,0, - 0,0,89,1,0,0,0,0,91,1,0,0,0,0,93,1,0,0,0,0,95,1,0,0,0,0,97,1,0,0, - 0,0,99,1,0,0,0,0,101,1,0,0,0,0,103,1,0,0,0,0,105,1,0,0,0,0,107,1, - 0,0,0,0,109,1,0,0,0,0,111,1,0,0,0,0,113,1,0,0,0,0,115,1,0,0,0,0, - 117,1,0,0,0,0,119,1,0,0,0,0,121,1,0,0,0,0,123,1,0,0,0,0,125,1,0, - 0,0,0,127,1,0,0,0,0,129,1,0,0,0,0,131,1,0,0,0,0,133,1,0,0,0,0,135, - 1,0,0,0,0,137,1,0,0,0,0,139,1,0,0,0,0,141,1,0,0,0,0,143,1,0,0,0, - 0,145,1,0,0,0,0,147,1,0,0,0,0,149,1,0,0,0,0,151,1,0,0,0,0,153,1, - 0,0,0,0,155,1,0,0,0,0,157,1,0,0,0,0,159,1,0,0,0,0,161,1,0,0,0,0, - 163,1,0,0,0,0,165,1,0,0,0,0,167,1,0,0,0,0,169,1,0,0,0,0,171,1,0, - 0,0,0,173,1,0,0,0,0,175,1,0,0,0,0,177,1,0,0,0,1,185,1,0,0,0,3,190, - 1,0,0,0,5,194,1,0,0,0,7,200,1,0,0,0,9,206,1,0,0,0,11,211,1,0,0,0, - 13,226,1,0,0,0,15,245,1,0,0,0,17,249,1,0,0,0,19,257,1,0,0,0,21,262, - 1,0,0,0,23,269,1,0,0,0,25,277,1,0,0,0,27,282,1,0,0,0,29,291,1,0, - 0,0,31,298,1,0,0,0,33,305,1,0,0,0,35,308,1,0,0,0,37,313,1,0,0,0, - 39,318,1,0,0,0,41,322,1,0,0,0,43,328,1,0,0,0,45,331,1,0,0,0,47,336, - 1,0,0,0,49,340,1,0,0,0,51,344,1,0,0,0,53,347,1,0,0,0,55,351,1,0, - 0,0,57,362,1,0,0,0,59,369,1,0,0,0,61,375,1,0,0,0,63,381,1,0,0,0, - 65,392,1,0,0,0,67,402,1,0,0,0,69,409,1,0,0,0,71,419,1,0,0,0,73,425, - 1,0,0,0,75,432,1,0,0,0,77,443,1,0,0,0,79,453,1,0,0,0,81,465,1,0, - 0,0,83,471,1,0,0,0,85,482,1,0,0,0,87,493,1,0,0,0,89,506,1,0,0,0, - 91,521,1,0,0,0,93,523,1,0,0,0,95,527,1,0,0,0,97,529,1,0,0,0,99,531, - 1,0,0,0,101,533,1,0,0,0,103,535,1,0,0,0,105,537,1,0,0,0,107,539, - 1,0,0,0,109,541,1,0,0,0,111,543,1,0,0,0,113,546,1,0,0,0,115,548, - 1,0,0,0,117,551,1,0,0,0,119,554,1,0,0,0,121,557,1,0,0,0,123,560, - 1,0,0,0,125,562,1,0,0,0,127,564,1,0,0,0,129,567,1,0,0,0,131,570, - 1,0,0,0,133,573,1,0,0,0,135,576,1,0,0,0,137,579,1,0,0,0,139,582, - 1,0,0,0,141,585,1,0,0,0,143,588,1,0,0,0,145,591,1,0,0,0,147,593, - 1,0,0,0,149,595,1,0,0,0,151,597,1,0,0,0,153,599,1,0,0,0,155,602, - 1,0,0,0,157,604,1,0,0,0,159,606,1,0,0,0,161,608,1,0,0,0,163,610, - 1,0,0,0,165,613,1,0,0,0,167,615,1,0,0,0,169,635,1,0,0,0,171,637, - 1,0,0,0,173,662,1,0,0,0,175,671,1,0,0,0,177,677,1,0,0,0,179,687, - 1,0,0,0,181,691,1,0,0,0,183,698,1,0,0,0,185,186,5,34,0,0,186,187, - 5,34,0,0,187,188,5,34,0,0,188,2,1,0,0,0,189,191,5,13,0,0,190,189, - 1,0,0,0,190,191,1,0,0,0,191,192,1,0,0,0,192,193,5,10,0,0,193,4,1, - 0,0,0,194,195,3,145,72,0,195,197,3,3,1,0,196,198,3,7,3,0,197,196, - 1,0,0,0,197,198,1,0,0,0,198,6,1,0,0,0,199,201,7,0,0,0,200,199,1, - 0,0,0,201,202,1,0,0,0,202,200,1,0,0,0,202,203,1,0,0,0,203,204,1, - 0,0,0,204,205,6,3,0,0,205,8,1,0,0,0,206,207,5,92,0,0,207,208,3,3, - 1,0,208,209,1,0,0,0,209,210,6,4,0,0,210,10,1,0,0,0,211,215,3,1,0, - 0,212,214,9,0,0,0,213,212,1,0,0,0,214,217,1,0,0,0,215,216,1,0,0, - 0,215,213,1,0,0,0,216,218,1,0,0,0,217,215,1,0,0,0,218,220,3,1,0, - 0,219,221,3,3,1,0,220,219,1,0,0,0,221,222,1,0,0,0,222,223,1,0,0, - 0,222,220,1,0,0,0,223,224,1,0,0,0,224,225,6,5,1,0,225,12,1,0,0,0, - 226,230,5,35,0,0,227,229,8,1,0,0,228,227,1,0,0,0,229,232,1,0,0,0, - 230,228,1,0,0,0,230,231,1,0,0,0,231,233,1,0,0,0,232,230,1,0,0,0, - 233,234,6,6,1,0,234,14,1,0,0,0,235,236,4,7,0,0,236,246,3,7,3,0,237, - 239,5,13,0,0,238,237,1,0,0,0,238,239,1,0,0,0,239,240,1,0,0,0,240, - 241,5,10,0,0,241,243,1,0,0,0,242,244,3,7,3,0,243,242,1,0,0,0,243, - 244,1,0,0,0,244,246,1,0,0,0,245,235,1,0,0,0,245,238,1,0,0,0,246, - 247,1,0,0,0,247,248,6,7,2,0,248,16,1,0,0,0,249,250,5,105,0,0,250, - 251,5,110,0,0,251,252,5,116,0,0,252,253,5,101,0,0,253,254,5,103, - 0,0,254,255,5,101,0,0,255,256,5,114,0,0,256,18,1,0,0,0,257,258,5, - 114,0,0,258,259,5,101,0,0,259,260,5,97,0,0,260,261,5,108,0,0,261, - 20,1,0,0,0,262,263,5,115,0,0,263,264,5,116,0,0,264,265,5,114,0,0, - 265,266,5,105,0,0,266,267,5,110,0,0,267,268,5,103,0,0,268,22,1,0, - 0,0,269,270,5,98,0,0,270,271,5,111,0,0,271,272,5,111,0,0,272,273, - 5,108,0,0,273,274,5,101,0,0,274,275,5,97,0,0,275,276,5,110,0,0,276, - 24,1,0,0,0,277,278,5,118,0,0,278,279,5,111,0,0,279,280,5,105,0,0, - 280,281,5,100,0,0,281,26,1,0,0,0,282,283,5,102,0,0,283,284,5,117, - 0,0,284,285,5,110,0,0,285,286,5,99,0,0,286,287,5,116,0,0,287,288, - 5,105,0,0,288,289,5,111,0,0,289,290,5,110,0,0,290,28,1,0,0,0,291, - 292,5,105,0,0,292,293,5,110,0,0,293,294,5,108,0,0,294,295,5,105, - 0,0,295,296,5,110,0,0,296,297,5,101,0,0,297,30,1,0,0,0,298,299,5, - 114,0,0,299,300,5,101,0,0,300,301,5,116,0,0,301,302,5,117,0,0,302, - 303,5,114,0,0,303,304,5,110,0,0,304,32,1,0,0,0,305,306,5,105,0,0, - 306,307,5,102,0,0,307,34,1,0,0,0,308,309,5,101,0,0,309,310,5,108, - 0,0,310,311,5,105,0,0,311,312,5,102,0,0,312,36,1,0,0,0,313,314,5, - 101,0,0,314,315,5,108,0,0,315,316,5,115,0,0,316,317,5,101,0,0,317, - 38,1,0,0,0,318,319,5,102,0,0,319,320,5,111,0,0,320,321,5,114,0,0, - 321,40,1,0,0,0,322,323,5,119,0,0,323,324,5,104,0,0,324,325,5,105, - 0,0,325,326,5,108,0,0,326,327,5,101,0,0,327,42,1,0,0,0,328,329,5, - 105,0,0,329,330,5,110,0,0,330,44,1,0,0,0,331,332,5,115,0,0,332,333, - 5,116,0,0,333,334,5,101,0,0,334,335,5,112,0,0,335,46,1,0,0,0,336, - 337,5,105,0,0,337,338,5,110,0,0,338,339,5,102,0,0,339,48,1,0,0,0, - 340,341,5,97,0,0,341,342,5,110,0,0,342,343,5,100,0,0,343,50,1,0, - 0,0,344,345,5,111,0,0,345,346,5,114,0,0,346,52,1,0,0,0,347,348,5, - 110,0,0,348,349,5,111,0,0,349,350,5,116,0,0,350,54,1,0,0,0,351,352, - 5,114,0,0,352,353,5,101,0,0,353,354,5,99,0,0,354,355,5,111,0,0,355, - 356,5,114,0,0,356,357,5,100,0,0,357,358,5,97,0,0,358,359,5,98,0, - 0,359,360,5,108,0,0,360,361,5,101,0,0,361,56,1,0,0,0,362,363,5,107, - 0,0,363,364,5,101,0,0,364,365,5,114,0,0,365,366,5,110,0,0,366,367, - 5,101,0,0,367,368,5,108,0,0,368,58,1,0,0,0,369,370,5,109,0,0,370, - 371,5,111,0,0,371,372,5,100,0,0,372,373,5,101,0,0,373,374,5,108, - 0,0,374,60,1,0,0,0,375,376,5,115,0,0,376,377,5,116,0,0,377,378,5, - 97,0,0,378,379,5,116,0,0,379,380,5,101,0,0,380,62,1,0,0,0,381,382, - 5,112,0,0,382,383,5,97,0,0,383,384,5,114,0,0,384,385,5,97,0,0,385, - 386,5,109,0,0,386,387,5,101,0,0,387,388,5,116,0,0,388,389,5,101, - 0,0,389,390,5,114,0,0,390,391,5,115,0,0,391,64,1,0,0,0,392,393,5, - 105,0,0,393,394,5,110,0,0,394,395,5,116,0,0,395,396,5,101,0,0,396, - 397,5,114,0,0,397,398,5,110,0,0,398,399,5,97,0,0,399,400,5,108,0, - 0,400,401,5,115,0,0,401,66,1,0,0,0,402,403,5,117,0,0,403,404,5,112, - 0,0,404,405,5,100,0,0,405,406,5,97,0,0,406,407,5,116,0,0,407,408, - 5,101,0,0,408,68,1,0,0,0,409,410,5,101,0,0,410,411,5,113,0,0,411, - 412,5,117,0,0,412,413,5,97,0,0,413,414,5,116,0,0,414,415,5,105,0, - 0,415,416,5,111,0,0,416,417,5,110,0,0,417,418,5,115,0,0,418,70,1, - 0,0,0,419,420,5,105,0,0,420,421,5,110,0,0,421,422,5,112,0,0,422, - 423,5,117,0,0,423,424,5,116,0,0,424,72,1,0,0,0,425,426,5,111,0,0, - 426,427,5,117,0,0,427,428,5,116,0,0,428,429,5,112,0,0,429,430,5, - 117,0,0,430,431,5,116,0,0,431,74,1,0,0,0,432,433,5,99,0,0,433,434, - 5,111,0,0,434,435,5,110,0,0,435,436,5,116,0,0,436,437,5,105,0,0, - 437,438,5,110,0,0,438,439,5,117,0,0,439,440,5,111,0,0,440,441,5, - 117,0,0,441,442,5,115,0,0,442,76,1,0,0,0,443,444,5,111,0,0,444,445, - 5,110,0,0,445,446,5,82,0,0,446,447,5,101,0,0,447,448,5,99,0,0,448, - 449,5,101,0,0,449,450,5,105,0,0,450,451,5,118,0,0,451,452,5,101, - 0,0,452,78,1,0,0,0,453,454,5,111,0,0,454,455,5,110,0,0,455,456,5, - 67,0,0,456,457,5,111,0,0,457,458,5,110,0,0,458,459,5,100,0,0,459, - 460,5,105,0,0,460,461,5,116,0,0,461,462,5,105,0,0,462,463,5,111, - 0,0,463,464,5,110,0,0,464,80,1,0,0,0,465,466,5,115,0,0,466,467,5, - 112,0,0,467,468,5,105,0,0,468,469,5,107,0,0,469,470,5,101,0,0,470, - 82,1,0,0,0,471,472,5,105,0,0,472,473,5,110,0,0,473,474,5,104,0,0, - 474,475,5,105,0,0,475,476,5,98,0,0,476,477,5,105,0,0,477,478,5,116, - 0,0,478,479,5,111,0,0,479,480,5,114,0,0,480,481,5,121,0,0,481,84, - 1,0,0,0,482,483,5,101,0,0,483,484,5,120,0,0,484,485,5,99,0,0,485, - 486,5,105,0,0,486,487,5,116,0,0,487,488,5,97,0,0,488,489,5,116,0, - 0,489,490,5,111,0,0,490,491,5,114,0,0,491,492,5,121,0,0,492,86,1, - 0,0,0,493,494,5,64,0,0,494,495,5,104,0,0,495,496,5,111,0,0,496,497, - 5,109,0,0,497,498,5,111,0,0,498,499,5,103,0,0,499,500,5,101,0,0, - 500,501,5,110,0,0,501,502,5,101,0,0,502,503,5,111,0,0,503,504,5, - 117,0,0,504,505,5,115,0,0,505,88,1,0,0,0,506,507,5,64,0,0,507,508, - 5,104,0,0,508,509,5,101,0,0,509,510,5,116,0,0,510,511,5,101,0,0, - 511,512,5,114,0,0,512,513,5,111,0,0,513,514,5,103,0,0,514,515,5, - 101,0,0,515,516,5,110,0,0,516,517,5,101,0,0,517,518,5,111,0,0,518, - 519,5,117,0,0,519,520,5,115,0,0,520,90,1,0,0,0,521,522,5,64,0,0, - 522,92,1,0,0,0,523,524,5,46,0,0,524,525,5,46,0,0,525,526,5,46,0, - 0,526,94,1,0,0,0,527,528,5,40,0,0,528,96,1,0,0,0,529,530,5,41,0, - 0,530,98,1,0,0,0,531,532,5,43,0,0,532,100,1,0,0,0,533,534,5,126, - 0,0,534,102,1,0,0,0,535,536,5,124,0,0,536,104,1,0,0,0,537,538,5, - 94,0,0,538,106,1,0,0,0,539,540,5,38,0,0,540,108,1,0,0,0,541,542, - 5,91,0,0,542,110,1,0,0,0,543,544,5,60,0,0,544,545,5,45,0,0,545,112, - 1,0,0,0,546,547,5,93,0,0,547,114,1,0,0,0,548,549,5,91,0,0,549,550, - 5,91,0,0,550,116,1,0,0,0,551,552,5,93,0,0,552,553,5,93,0,0,553,118, - 1,0,0,0,554,555,5,60,0,0,555,556,5,60,0,0,556,120,1,0,0,0,557,558, - 5,62,0,0,558,559,5,62,0,0,559,122,1,0,0,0,560,561,5,60,0,0,561,124, - 1,0,0,0,562,563,5,62,0,0,563,126,1,0,0,0,564,565,5,60,0,0,565,566, - 5,61,0,0,566,128,1,0,0,0,567,568,5,43,0,0,568,569,5,61,0,0,569,130, - 1,0,0,0,570,571,5,45,0,0,571,572,5,61,0,0,572,132,1,0,0,0,573,574, - 5,42,0,0,574,575,5,61,0,0,575,134,1,0,0,0,576,577,5,47,0,0,577,578, - 5,61,0,0,578,136,1,0,0,0,579,580,5,61,0,0,580,581,5,61,0,0,581,138, - 1,0,0,0,582,583,5,33,0,0,583,584,5,61,0,0,584,140,1,0,0,0,585,586, - 5,60,0,0,586,587,5,62,0,0,587,142,1,0,0,0,588,589,5,62,0,0,589,590, - 5,61,0,0,590,144,1,0,0,0,591,592,5,44,0,0,592,146,1,0,0,0,593,594, - 5,45,0,0,594,148,1,0,0,0,595,596,5,61,0,0,596,150,1,0,0,0,597,598, - 5,42,0,0,598,152,1,0,0,0,599,600,5,42,0,0,600,601,5,42,0,0,601,154, - 1,0,0,0,602,603,5,47,0,0,603,156,1,0,0,0,604,605,5,37,0,0,605,158, - 1,0,0,0,606,607,5,63,0,0,607,160,1,0,0,0,608,609,5,58,0,0,609,162, - 1,0,0,0,610,611,5,58,0,0,611,612,5,58,0,0,612,164,1,0,0,0,613,614, - 5,59,0,0,614,166,1,0,0,0,615,616,5,39,0,0,616,168,1,0,0,0,617,618, - 5,116,0,0,618,619,5,114,0,0,619,620,5,117,0,0,620,636,5,101,0,0, - 621,622,5,84,0,0,622,623,5,114,0,0,623,624,5,117,0,0,624,636,5,101, - 0,0,625,626,5,102,0,0,626,627,5,97,0,0,627,628,5,108,0,0,628,629, - 5,115,0,0,629,636,5,101,0,0,630,631,5,70,0,0,631,632,5,97,0,0,632, - 633,5,108,0,0,633,634,5,115,0,0,634,636,5,101,0,0,635,617,1,0,0, - 0,635,621,1,0,0,0,635,625,1,0,0,0,635,630,1,0,0,0,636,170,1,0,0, - 0,637,656,5,34,0,0,638,651,5,92,0,0,639,641,7,0,0,0,640,639,1,0, - 0,0,641,642,1,0,0,0,642,640,1,0,0,0,642,643,1,0,0,0,643,648,1,0, - 0,0,644,646,5,13,0,0,645,644,1,0,0,0,645,646,1,0,0,0,646,647,1,0, - 0,0,647,649,5,10,0,0,648,645,1,0,0,0,648,649,1,0,0,0,649,652,1,0, - 0,0,650,652,9,0,0,0,651,640,1,0,0,0,651,650,1,0,0,0,652,655,1,0, - 0,0,653,655,8,2,0,0,654,638,1,0,0,0,654,653,1,0,0,0,655,658,1,0, - 0,0,656,654,1,0,0,0,656,657,1,0,0,0,657,659,1,0,0,0,658,656,1,0, - 0,0,659,660,5,34,0,0,660,172,1,0,0,0,661,663,7,3,0,0,662,661,1,0, - 0,0,663,667,1,0,0,0,664,666,7,4,0,0,665,664,1,0,0,0,666,669,1,0, - 0,0,667,665,1,0,0,0,667,668,1,0,0,0,668,174,1,0,0,0,669,667,1,0, - 0,0,670,672,7,5,0,0,671,670,1,0,0,0,672,673,1,0,0,0,673,671,1,0, - 0,0,673,674,1,0,0,0,674,176,1,0,0,0,675,678,3,179,89,0,676,678,3, - 181,90,0,677,675,1,0,0,0,677,676,1,0,0,0,678,178,1,0,0,0,679,681, - 3,175,87,0,680,679,1,0,0,0,680,681,1,0,0,0,681,682,1,0,0,0,682,683, - 5,46,0,0,683,688,3,175,87,0,684,685,3,175,87,0,685,686,5,46,0,0, - 686,688,1,0,0,0,687,680,1,0,0,0,687,684,1,0,0,0,688,180,1,0,0,0, - 689,692,3,175,87,0,690,692,3,179,89,0,691,689,1,0,0,0,691,690,1, - 0,0,0,692,693,1,0,0,0,693,694,7,6,0,0,694,695,3,183,91,0,695,182, - 1,0,0,0,696,699,3,99,49,0,697,699,3,147,73,0,698,696,1,0,0,0,698, - 697,1,0,0,0,698,699,1,0,0,0,699,700,1,0,0,0,700,701,3,175,87,0,701, - 184,1,0,0,0,26,0,190,197,202,215,222,230,238,243,245,635,642,645, - 648,651,654,656,662,665,667,673,677,680,687,691,698,3,0,1,0,0,2, - 0,1,7,0 + 84,167,85,169,86,171,87,173,88,175,0,177,0,179,0,1,0,7,2,0,9,9,32, + 32,2,0,10,10,13,13,4,0,10,10,13,13,34,34,92,92,4,0,36,36,65,90,95, + 95,97,122,5,0,36,36,48,57,65,90,95,95,97,122,1,0,48,57,2,0,69,69, + 101,101,697,0,1,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0,9,1,0,0,0,0,11, + 1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0,0,0,19,1,0,0,0,0,21, + 1,0,0,0,0,23,1,0,0,0,0,25,1,0,0,0,0,27,1,0,0,0,0,29,1,0,0,0,0,31, + 1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,0,0,37,1,0,0,0,0,39,1,0,0,0,0,41, + 1,0,0,0,0,43,1,0,0,0,0,45,1,0,0,0,0,47,1,0,0,0,0,49,1,0,0,0,0,51, + 1,0,0,0,0,53,1,0,0,0,0,55,1,0,0,0,0,57,1,0,0,0,0,59,1,0,0,0,0,61, + 1,0,0,0,0,63,1,0,0,0,0,65,1,0,0,0,0,67,1,0,0,0,0,69,1,0,0,0,0,71, + 1,0,0,0,0,73,1,0,0,0,0,75,1,0,0,0,0,77,1,0,0,0,0,79,1,0,0,0,0,81, + 1,0,0,0,0,83,1,0,0,0,0,85,1,0,0,0,0,87,1,0,0,0,0,89,1,0,0,0,0,91, + 1,0,0,0,0,93,1,0,0,0,0,95,1,0,0,0,0,97,1,0,0,0,0,99,1,0,0,0,0,101, + 1,0,0,0,0,103,1,0,0,0,0,105,1,0,0,0,0,107,1,0,0,0,0,109,1,0,0,0, + 0,111,1,0,0,0,0,113,1,0,0,0,0,115,1,0,0,0,0,117,1,0,0,0,0,119,1, + 0,0,0,0,121,1,0,0,0,0,123,1,0,0,0,0,125,1,0,0,0,0,127,1,0,0,0,0, + 129,1,0,0,0,0,131,1,0,0,0,0,133,1,0,0,0,0,135,1,0,0,0,0,137,1,0, + 0,0,0,139,1,0,0,0,0,141,1,0,0,0,0,143,1,0,0,0,0,145,1,0,0,0,0,147, + 1,0,0,0,0,149,1,0,0,0,0,151,1,0,0,0,0,153,1,0,0,0,0,155,1,0,0,0, + 0,157,1,0,0,0,0,159,1,0,0,0,0,161,1,0,0,0,0,163,1,0,0,0,0,165,1, + 0,0,0,0,167,1,0,0,0,0,169,1,0,0,0,0,171,1,0,0,0,0,173,1,0,0,0,1, + 181,1,0,0,0,3,186,1,0,0,0,5,190,1,0,0,0,7,196,1,0,0,0,9,202,1,0, + 0,0,11,207,1,0,0,0,13,222,1,0,0,0,15,241,1,0,0,0,17,245,1,0,0,0, + 19,253,1,0,0,0,21,258,1,0,0,0,23,265,1,0,0,0,25,273,1,0,0,0,27,278, + 1,0,0,0,29,287,1,0,0,0,31,294,1,0,0,0,33,301,1,0,0,0,35,304,1,0, + 0,0,37,309,1,0,0,0,39,314,1,0,0,0,41,318,1,0,0,0,43,324,1,0,0,0, + 45,327,1,0,0,0,47,332,1,0,0,0,49,336,1,0,0,0,51,340,1,0,0,0,53,343, + 1,0,0,0,55,347,1,0,0,0,57,358,1,0,0,0,59,365,1,0,0,0,61,371,1,0, + 0,0,63,377,1,0,0,0,65,388,1,0,0,0,67,398,1,0,0,0,69,405,1,0,0,0, + 71,415,1,0,0,0,73,421,1,0,0,0,75,428,1,0,0,0,77,439,1,0,0,0,79,449, + 1,0,0,0,81,461,1,0,0,0,83,467,1,0,0,0,85,480,1,0,0,0,87,495,1,0, + 0,0,89,497,1,0,0,0,91,501,1,0,0,0,93,503,1,0,0,0,95,505,1,0,0,0, + 97,507,1,0,0,0,99,509,1,0,0,0,101,511,1,0,0,0,103,513,1,0,0,0,105, + 515,1,0,0,0,107,517,1,0,0,0,109,520,1,0,0,0,111,522,1,0,0,0,113, + 525,1,0,0,0,115,528,1,0,0,0,117,531,1,0,0,0,119,534,1,0,0,0,121, + 536,1,0,0,0,123,538,1,0,0,0,125,541,1,0,0,0,127,544,1,0,0,0,129, + 547,1,0,0,0,131,550,1,0,0,0,133,553,1,0,0,0,135,556,1,0,0,0,137, + 559,1,0,0,0,139,562,1,0,0,0,141,565,1,0,0,0,143,567,1,0,0,0,145, + 569,1,0,0,0,147,571,1,0,0,0,149,573,1,0,0,0,151,576,1,0,0,0,153, + 578,1,0,0,0,155,580,1,0,0,0,157,582,1,0,0,0,159,584,1,0,0,0,161, + 587,1,0,0,0,163,589,1,0,0,0,165,609,1,0,0,0,167,611,1,0,0,0,169, + 636,1,0,0,0,171,645,1,0,0,0,173,651,1,0,0,0,175,661,1,0,0,0,177, + 665,1,0,0,0,179,672,1,0,0,0,181,182,5,34,0,0,182,183,5,34,0,0,183, + 184,5,34,0,0,184,2,1,0,0,0,185,187,5,13,0,0,186,185,1,0,0,0,186, + 187,1,0,0,0,187,188,1,0,0,0,188,189,5,10,0,0,189,4,1,0,0,0,190,191, + 3,141,70,0,191,193,3,3,1,0,192,194,3,7,3,0,193,192,1,0,0,0,193,194, + 1,0,0,0,194,6,1,0,0,0,195,197,7,0,0,0,196,195,1,0,0,0,197,198,1, + 0,0,0,198,196,1,0,0,0,198,199,1,0,0,0,199,200,1,0,0,0,200,201,6, + 3,0,0,201,8,1,0,0,0,202,203,5,92,0,0,203,204,3,3,1,0,204,205,1,0, + 0,0,205,206,6,4,0,0,206,10,1,0,0,0,207,211,3,1,0,0,208,210,9,0,0, + 0,209,208,1,0,0,0,210,213,1,0,0,0,211,212,1,0,0,0,211,209,1,0,0, + 0,212,214,1,0,0,0,213,211,1,0,0,0,214,216,3,1,0,0,215,217,3,3,1, + 0,216,215,1,0,0,0,217,218,1,0,0,0,218,219,1,0,0,0,218,216,1,0,0, + 0,219,220,1,0,0,0,220,221,6,5,1,0,221,12,1,0,0,0,222,226,5,35,0, + 0,223,225,8,1,0,0,224,223,1,0,0,0,225,228,1,0,0,0,226,224,1,0,0, + 0,226,227,1,0,0,0,227,229,1,0,0,0,228,226,1,0,0,0,229,230,6,6,1, + 0,230,14,1,0,0,0,231,232,4,7,0,0,232,242,3,7,3,0,233,235,5,13,0, + 0,234,233,1,0,0,0,234,235,1,0,0,0,235,236,1,0,0,0,236,237,5,10,0, + 0,237,239,1,0,0,0,238,240,3,7,3,0,239,238,1,0,0,0,239,240,1,0,0, + 0,240,242,1,0,0,0,241,231,1,0,0,0,241,234,1,0,0,0,242,243,1,0,0, + 0,243,244,6,7,2,0,244,16,1,0,0,0,245,246,5,105,0,0,246,247,5,110, + 0,0,247,248,5,116,0,0,248,249,5,101,0,0,249,250,5,103,0,0,250,251, + 5,101,0,0,251,252,5,114,0,0,252,18,1,0,0,0,253,254,5,114,0,0,254, + 255,5,101,0,0,255,256,5,97,0,0,256,257,5,108,0,0,257,20,1,0,0,0, + 258,259,5,115,0,0,259,260,5,116,0,0,260,261,5,114,0,0,261,262,5, + 105,0,0,262,263,5,110,0,0,263,264,5,103,0,0,264,22,1,0,0,0,265,266, + 5,98,0,0,266,267,5,111,0,0,267,268,5,111,0,0,268,269,5,108,0,0,269, + 270,5,101,0,0,270,271,5,97,0,0,271,272,5,110,0,0,272,24,1,0,0,0, + 273,274,5,118,0,0,274,275,5,111,0,0,275,276,5,105,0,0,276,277,5, + 100,0,0,277,26,1,0,0,0,278,279,5,102,0,0,279,280,5,117,0,0,280,281, + 5,110,0,0,281,282,5,99,0,0,282,283,5,116,0,0,283,284,5,105,0,0,284, + 285,5,111,0,0,285,286,5,110,0,0,286,28,1,0,0,0,287,288,5,105,0,0, + 288,289,5,110,0,0,289,290,5,108,0,0,290,291,5,105,0,0,291,292,5, + 110,0,0,292,293,5,101,0,0,293,30,1,0,0,0,294,295,5,114,0,0,295,296, + 5,101,0,0,296,297,5,116,0,0,297,298,5,117,0,0,298,299,5,114,0,0, + 299,300,5,110,0,0,300,32,1,0,0,0,301,302,5,105,0,0,302,303,5,102, + 0,0,303,34,1,0,0,0,304,305,5,101,0,0,305,306,5,108,0,0,306,307,5, + 105,0,0,307,308,5,102,0,0,308,36,1,0,0,0,309,310,5,101,0,0,310,311, + 5,108,0,0,311,312,5,115,0,0,312,313,5,101,0,0,313,38,1,0,0,0,314, + 315,5,102,0,0,315,316,5,111,0,0,316,317,5,114,0,0,317,40,1,0,0,0, + 318,319,5,119,0,0,319,320,5,104,0,0,320,321,5,105,0,0,321,322,5, + 108,0,0,322,323,5,101,0,0,323,42,1,0,0,0,324,325,5,105,0,0,325,326, + 5,110,0,0,326,44,1,0,0,0,327,328,5,115,0,0,328,329,5,116,0,0,329, + 330,5,101,0,0,330,331,5,112,0,0,331,46,1,0,0,0,332,333,5,105,0,0, + 333,334,5,110,0,0,334,335,5,102,0,0,335,48,1,0,0,0,336,337,5,97, + 0,0,337,338,5,110,0,0,338,339,5,100,0,0,339,50,1,0,0,0,340,341,5, + 111,0,0,341,342,5,114,0,0,342,52,1,0,0,0,343,344,5,110,0,0,344,345, + 5,111,0,0,345,346,5,116,0,0,346,54,1,0,0,0,347,348,5,114,0,0,348, + 349,5,101,0,0,349,350,5,99,0,0,350,351,5,111,0,0,351,352,5,114,0, + 0,352,353,5,100,0,0,353,354,5,97,0,0,354,355,5,98,0,0,355,356,5, + 108,0,0,356,357,5,101,0,0,357,56,1,0,0,0,358,359,5,107,0,0,359,360, + 5,101,0,0,360,361,5,114,0,0,361,362,5,110,0,0,362,363,5,101,0,0, + 363,364,5,108,0,0,364,58,1,0,0,0,365,366,5,109,0,0,366,367,5,111, + 0,0,367,368,5,100,0,0,368,369,5,101,0,0,369,370,5,108,0,0,370,60, + 1,0,0,0,371,372,5,115,0,0,372,373,5,116,0,0,373,374,5,97,0,0,374, + 375,5,116,0,0,375,376,5,101,0,0,376,62,1,0,0,0,377,378,5,112,0,0, + 378,379,5,97,0,0,379,380,5,114,0,0,380,381,5,97,0,0,381,382,5,109, + 0,0,382,383,5,101,0,0,383,384,5,116,0,0,384,385,5,101,0,0,385,386, + 5,114,0,0,386,387,5,115,0,0,387,64,1,0,0,0,388,389,5,105,0,0,389, + 390,5,110,0,0,390,391,5,116,0,0,391,392,5,101,0,0,392,393,5,114, + 0,0,393,394,5,110,0,0,394,395,5,97,0,0,395,396,5,108,0,0,396,397, + 5,115,0,0,397,66,1,0,0,0,398,399,5,117,0,0,399,400,5,112,0,0,400, + 401,5,100,0,0,401,402,5,97,0,0,402,403,5,116,0,0,403,404,5,101,0, + 0,404,68,1,0,0,0,405,406,5,101,0,0,406,407,5,113,0,0,407,408,5,117, + 0,0,408,409,5,97,0,0,409,410,5,116,0,0,410,411,5,105,0,0,411,412, + 5,111,0,0,412,413,5,110,0,0,413,414,5,115,0,0,414,70,1,0,0,0,415, + 416,5,105,0,0,416,417,5,110,0,0,417,418,5,112,0,0,418,419,5,117, + 0,0,419,420,5,116,0,0,420,72,1,0,0,0,421,422,5,111,0,0,422,423,5, + 117,0,0,423,424,5,116,0,0,424,425,5,112,0,0,425,426,5,117,0,0,426, + 427,5,116,0,0,427,74,1,0,0,0,428,429,5,99,0,0,429,430,5,111,0,0, + 430,431,5,110,0,0,431,432,5,116,0,0,432,433,5,105,0,0,433,434,5, + 110,0,0,434,435,5,117,0,0,435,436,5,111,0,0,436,437,5,117,0,0,437, + 438,5,115,0,0,438,76,1,0,0,0,439,440,5,111,0,0,440,441,5,110,0,0, + 441,442,5,82,0,0,442,443,5,101,0,0,443,444,5,99,0,0,444,445,5,101, + 0,0,445,446,5,105,0,0,446,447,5,118,0,0,447,448,5,101,0,0,448,78, + 1,0,0,0,449,450,5,111,0,0,450,451,5,110,0,0,451,452,5,67,0,0,452, + 453,5,111,0,0,453,454,5,110,0,0,454,455,5,100,0,0,455,456,5,105, + 0,0,456,457,5,116,0,0,457,458,5,105,0,0,458,459,5,111,0,0,459,460, + 5,110,0,0,460,80,1,0,0,0,461,462,5,115,0,0,462,463,5,112,0,0,463, + 464,5,105,0,0,464,465,5,107,0,0,465,466,5,101,0,0,466,82,1,0,0,0, + 467,468,5,64,0,0,468,469,5,104,0,0,469,470,5,111,0,0,470,471,5,109, + 0,0,471,472,5,111,0,0,472,473,5,103,0,0,473,474,5,101,0,0,474,475, + 5,110,0,0,475,476,5,101,0,0,476,477,5,111,0,0,477,478,5,117,0,0, + 478,479,5,115,0,0,479,84,1,0,0,0,480,481,5,64,0,0,481,482,5,104, + 0,0,482,483,5,101,0,0,483,484,5,116,0,0,484,485,5,101,0,0,485,486, + 5,114,0,0,486,487,5,111,0,0,487,488,5,103,0,0,488,489,5,101,0,0, + 489,490,5,110,0,0,490,491,5,101,0,0,491,492,5,111,0,0,492,493,5, + 117,0,0,493,494,5,115,0,0,494,86,1,0,0,0,495,496,5,64,0,0,496,88, + 1,0,0,0,497,498,5,46,0,0,498,499,5,46,0,0,499,500,5,46,0,0,500,90, + 1,0,0,0,501,502,5,40,0,0,502,92,1,0,0,0,503,504,5,41,0,0,504,94, + 1,0,0,0,505,506,5,43,0,0,506,96,1,0,0,0,507,508,5,126,0,0,508,98, + 1,0,0,0,509,510,5,124,0,0,510,100,1,0,0,0,511,512,5,94,0,0,512,102, + 1,0,0,0,513,514,5,38,0,0,514,104,1,0,0,0,515,516,5,91,0,0,516,106, + 1,0,0,0,517,518,5,60,0,0,518,519,5,45,0,0,519,108,1,0,0,0,520,521, + 5,93,0,0,521,110,1,0,0,0,522,523,5,91,0,0,523,524,5,91,0,0,524,112, + 1,0,0,0,525,526,5,93,0,0,526,527,5,93,0,0,527,114,1,0,0,0,528,529, + 5,60,0,0,529,530,5,60,0,0,530,116,1,0,0,0,531,532,5,62,0,0,532,533, + 5,62,0,0,533,118,1,0,0,0,534,535,5,60,0,0,535,120,1,0,0,0,536,537, + 5,62,0,0,537,122,1,0,0,0,538,539,5,60,0,0,539,540,5,61,0,0,540,124, + 1,0,0,0,541,542,5,43,0,0,542,543,5,61,0,0,543,126,1,0,0,0,544,545, + 5,45,0,0,545,546,5,61,0,0,546,128,1,0,0,0,547,548,5,42,0,0,548,549, + 5,61,0,0,549,130,1,0,0,0,550,551,5,47,0,0,551,552,5,61,0,0,552,132, + 1,0,0,0,553,554,5,61,0,0,554,555,5,61,0,0,555,134,1,0,0,0,556,557, + 5,33,0,0,557,558,5,61,0,0,558,136,1,0,0,0,559,560,5,60,0,0,560,561, + 5,62,0,0,561,138,1,0,0,0,562,563,5,62,0,0,563,564,5,61,0,0,564,140, + 1,0,0,0,565,566,5,44,0,0,566,142,1,0,0,0,567,568,5,45,0,0,568,144, + 1,0,0,0,569,570,5,61,0,0,570,146,1,0,0,0,571,572,5,42,0,0,572,148, + 1,0,0,0,573,574,5,42,0,0,574,575,5,42,0,0,575,150,1,0,0,0,576,577, + 5,47,0,0,577,152,1,0,0,0,578,579,5,37,0,0,579,154,1,0,0,0,580,581, + 5,63,0,0,581,156,1,0,0,0,582,583,5,58,0,0,583,158,1,0,0,0,584,585, + 5,58,0,0,585,586,5,58,0,0,586,160,1,0,0,0,587,588,5,59,0,0,588,162, + 1,0,0,0,589,590,5,39,0,0,590,164,1,0,0,0,591,592,5,116,0,0,592,593, + 5,114,0,0,593,594,5,117,0,0,594,610,5,101,0,0,595,596,5,84,0,0,596, + 597,5,114,0,0,597,598,5,117,0,0,598,610,5,101,0,0,599,600,5,102, + 0,0,600,601,5,97,0,0,601,602,5,108,0,0,602,603,5,115,0,0,603,610, + 5,101,0,0,604,605,5,70,0,0,605,606,5,97,0,0,606,607,5,108,0,0,607, + 608,5,115,0,0,608,610,5,101,0,0,609,591,1,0,0,0,609,595,1,0,0,0, + 609,599,1,0,0,0,609,604,1,0,0,0,610,166,1,0,0,0,611,630,5,34,0,0, + 612,625,5,92,0,0,613,615,7,0,0,0,614,613,1,0,0,0,615,616,1,0,0,0, + 616,614,1,0,0,0,616,617,1,0,0,0,617,622,1,0,0,0,618,620,5,13,0,0, + 619,618,1,0,0,0,619,620,1,0,0,0,620,621,1,0,0,0,621,623,5,10,0,0, + 622,619,1,0,0,0,622,623,1,0,0,0,623,626,1,0,0,0,624,626,9,0,0,0, + 625,614,1,0,0,0,625,624,1,0,0,0,626,629,1,0,0,0,627,629,8,2,0,0, + 628,612,1,0,0,0,628,627,1,0,0,0,629,632,1,0,0,0,630,628,1,0,0,0, + 630,631,1,0,0,0,631,633,1,0,0,0,632,630,1,0,0,0,633,634,5,34,0,0, + 634,168,1,0,0,0,635,637,7,3,0,0,636,635,1,0,0,0,637,641,1,0,0,0, + 638,640,7,4,0,0,639,638,1,0,0,0,640,643,1,0,0,0,641,639,1,0,0,0, + 641,642,1,0,0,0,642,170,1,0,0,0,643,641,1,0,0,0,644,646,7,5,0,0, + 645,644,1,0,0,0,646,647,1,0,0,0,647,645,1,0,0,0,647,648,1,0,0,0, + 648,172,1,0,0,0,649,652,3,175,87,0,650,652,3,177,88,0,651,649,1, + 0,0,0,651,650,1,0,0,0,652,174,1,0,0,0,653,655,3,171,85,0,654,653, + 1,0,0,0,654,655,1,0,0,0,655,656,1,0,0,0,656,657,5,46,0,0,657,662, + 3,171,85,0,658,659,3,171,85,0,659,660,5,46,0,0,660,662,1,0,0,0,661, + 654,1,0,0,0,661,658,1,0,0,0,662,176,1,0,0,0,663,666,3,171,85,0,664, + 666,3,175,87,0,665,663,1,0,0,0,665,664,1,0,0,0,666,667,1,0,0,0,667, + 668,7,6,0,0,668,669,3,179,89,0,669,178,1,0,0,0,670,673,3,95,47,0, + 671,673,3,143,71,0,672,670,1,0,0,0,672,671,1,0,0,0,672,673,1,0,0, + 0,673,674,1,0,0,0,674,675,3,171,85,0,675,180,1,0,0,0,26,0,186,193, + 198,211,218,226,234,239,241,609,616,619,622,625,628,630,636,639, + 641,647,651,654,661,665,672,3,0,1,0,0,2,0,1,7,0 ] class PyNestMLLexer(PyNestMLLexerBase): @@ -318,54 +308,52 @@ class PyNestMLLexer(PyNestMLLexerBase): ON_RECEIVE_KEYWORD = 40 ON_CONDITION_KEYWORD = 41 SPIKE_KEYWORD = 42 - INHIBITORY_KEYWORD = 43 - EXCITATORY_KEYWORD = 44 - DECORATOR_HOMOGENEOUS = 45 - DECORATOR_HETEROGENEOUS = 46 - AT = 47 - ELLIPSIS = 48 - LEFT_PAREN = 49 - RIGHT_PAREN = 50 - PLUS = 51 - TILDE = 52 - PIPE = 53 - CARET = 54 - AMPERSAND = 55 - LEFT_SQUARE_BRACKET = 56 - LEFT_ANGLE_MINUS = 57 - RIGHT_SQUARE_BRACKET = 58 - LEFT_LEFT_SQUARE = 59 - RIGHT_RIGHT_SQUARE = 60 - LEFT_LEFT_ANGLE = 61 - RIGHT_RIGHT_ANGLE = 62 - LEFT_ANGLE = 63 - RIGHT_ANGLE = 64 - LEFT_ANGLE_EQUALS = 65 - PLUS_EQUALS = 66 - MINUS_EQUALS = 67 - STAR_EQUALS = 68 - FORWARD_SLASH_EQUALS = 69 - EQUALS_EQUALS = 70 - EXCLAMATION_EQUALS = 71 - LEFT_ANGLE_RIGHT_ANGLE = 72 - RIGHT_ANGLE_EQUALS = 73 - COMMA = 74 - MINUS = 75 - EQUALS = 76 - STAR = 77 - STAR_STAR = 78 - FORWARD_SLASH = 79 - PERCENT = 80 - QUESTION = 81 - COLON = 82 - DOUBLE_COLON = 83 - SEMICOLON = 84 - DIFFERENTIAL_ORDER = 85 - BOOLEAN_LITERAL = 86 - STRING_LITERAL = 87 - NAME = 88 - UNSIGNED_INTEGER = 89 - FLOAT = 90 + DECORATOR_HOMOGENEOUS = 43 + DECORATOR_HETEROGENEOUS = 44 + AT = 45 + ELLIPSIS = 46 + LEFT_PAREN = 47 + RIGHT_PAREN = 48 + PLUS = 49 + TILDE = 50 + PIPE = 51 + CARET = 52 + AMPERSAND = 53 + LEFT_SQUARE_BRACKET = 54 + LEFT_ANGLE_MINUS = 55 + RIGHT_SQUARE_BRACKET = 56 + LEFT_LEFT_SQUARE = 57 + RIGHT_RIGHT_SQUARE = 58 + LEFT_LEFT_ANGLE = 59 + RIGHT_RIGHT_ANGLE = 60 + LEFT_ANGLE = 61 + RIGHT_ANGLE = 62 + LEFT_ANGLE_EQUALS = 63 + PLUS_EQUALS = 64 + MINUS_EQUALS = 65 + STAR_EQUALS = 66 + FORWARD_SLASH_EQUALS = 67 + EQUALS_EQUALS = 68 + EXCLAMATION_EQUALS = 69 + LEFT_ANGLE_RIGHT_ANGLE = 70 + RIGHT_ANGLE_EQUALS = 71 + COMMA = 72 + MINUS = 73 + EQUALS = 74 + STAR = 75 + STAR_STAR = 76 + FORWARD_SLASH = 77 + PERCENT = 78 + QUESTION = 79 + COLON = 80 + DOUBLE_COLON = 81 + SEMICOLON = 82 + DIFFERENTIAL_ORDER = 83 + BOOLEAN_LITERAL = 84 + STRING_LITERAL = 85 + NAME = 86 + UNSIGNED_INTEGER = 87 + FLOAT = 88 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN", u"COMMENT" ] @@ -378,12 +366,12 @@ class PyNestMLLexer(PyNestMLLexerBase): "'or'", "'not'", "'recordable'", "'kernel'", "'model'", "'state'", "'parameters'", "'internals'", "'update'", "'equations'", "'input'", "'output'", "'continuous'", "'onReceive'", "'onCondition'", - "'spike'", "'inhibitory'", "'excitatory'", "'@homogeneous'", - "'@heterogeneous'", "'@'", "'...'", "'('", "')'", "'+'", "'~'", - "'|'", "'^'", "'&'", "'['", "'<-'", "']'", "'[['", "']]'", "'<<'", - "'>>'", "'<'", "'>'", "'<='", "'+='", "'-='", "'*='", "'/='", - "'=='", "'!='", "'<>'", "'>='", "','", "'-'", "'='", "'*'", - "'**'", "'/'", "'%'", "'?'", "':'", "'::'", "';'", "'''" ] + "'spike'", "'@homogeneous'", "'@heterogeneous'", "'@'", "'...'", + "'('", "')'", "'+'", "'~'", "'|'", "'^'", "'&'", "'['", "'<-'", + "']'", "'[['", "']]'", "'<<'", "'>>'", "'<'", "'>'", "'<='", + "'+='", "'-='", "'*='", "'/='", "'=='", "'!='", "'<>'", "'>='", + "','", "'-'", "'='", "'*'", "'**'", "'/'", "'%'", "'?'", "':'", + "'::'", "';'", "'''" ] symbolicNames = [ "", "INDENT", "DEDENT", "DOCSTRING_TRIPLEQUOTE", "KERNEL_JOINING", @@ -396,18 +384,18 @@ class PyNestMLLexer(PyNestMLLexerBase): "MODEL_KEYWORD", "STATE_KEYWORD", "PARAMETERS_KEYWORD", "INTERNALS_KEYWORD", "UPDATE_KEYWORD", "EQUATIONS_KEYWORD", "INPUT_KEYWORD", "OUTPUT_KEYWORD", "CONTINUOUS_KEYWORD", "ON_RECEIVE_KEYWORD", "ON_CONDITION_KEYWORD", - "SPIKE_KEYWORD", "INHIBITORY_KEYWORD", "EXCITATORY_KEYWORD", - "DECORATOR_HOMOGENEOUS", "DECORATOR_HETEROGENEOUS", "AT", "ELLIPSIS", - "LEFT_PAREN", "RIGHT_PAREN", "PLUS", "TILDE", "PIPE", "CARET", - "AMPERSAND", "LEFT_SQUARE_BRACKET", "LEFT_ANGLE_MINUS", "RIGHT_SQUARE_BRACKET", - "LEFT_LEFT_SQUARE", "RIGHT_RIGHT_SQUARE", "LEFT_LEFT_ANGLE", - "RIGHT_RIGHT_ANGLE", "LEFT_ANGLE", "RIGHT_ANGLE", "LEFT_ANGLE_EQUALS", - "PLUS_EQUALS", "MINUS_EQUALS", "STAR_EQUALS", "FORWARD_SLASH_EQUALS", - "EQUALS_EQUALS", "EXCLAMATION_EQUALS", "LEFT_ANGLE_RIGHT_ANGLE", - "RIGHT_ANGLE_EQUALS", "COMMA", "MINUS", "EQUALS", "STAR", "STAR_STAR", - "FORWARD_SLASH", "PERCENT", "QUESTION", "COLON", "DOUBLE_COLON", - "SEMICOLON", "DIFFERENTIAL_ORDER", "BOOLEAN_LITERAL", "STRING_LITERAL", - "NAME", "UNSIGNED_INTEGER", "FLOAT" ] + "SPIKE_KEYWORD", "DECORATOR_HOMOGENEOUS", "DECORATOR_HETEROGENEOUS", + "AT", "ELLIPSIS", "LEFT_PAREN", "RIGHT_PAREN", "PLUS", "TILDE", + "PIPE", "CARET", "AMPERSAND", "LEFT_SQUARE_BRACKET", "LEFT_ANGLE_MINUS", + "RIGHT_SQUARE_BRACKET", "LEFT_LEFT_SQUARE", "RIGHT_RIGHT_SQUARE", + "LEFT_LEFT_ANGLE", "RIGHT_RIGHT_ANGLE", "LEFT_ANGLE", "RIGHT_ANGLE", + "LEFT_ANGLE_EQUALS", "PLUS_EQUALS", "MINUS_EQUALS", "STAR_EQUALS", + "FORWARD_SLASH_EQUALS", "EQUALS_EQUALS", "EXCLAMATION_EQUALS", + "LEFT_ANGLE_RIGHT_ANGLE", "RIGHT_ANGLE_EQUALS", "COMMA", "MINUS", + "EQUALS", "STAR", "STAR_STAR", "FORWARD_SLASH", "PERCENT", "QUESTION", + "COLON", "DOUBLE_COLON", "SEMICOLON", "DIFFERENTIAL_ORDER", + "BOOLEAN_LITERAL", "STRING_LITERAL", "NAME", "UNSIGNED_INTEGER", + "FLOAT" ] ruleNames = [ "DOCSTRING_TRIPLEQUOTE", "NEWLINE_FRAG", "KERNEL_JOINING", "WS", "LINE_ESCAPE", "DOCSTRING", "SL_COMMENT", "NEWLINE", @@ -420,14 +408,13 @@ class PyNestMLLexer(PyNestMLLexerBase): "STATE_KEYWORD", "PARAMETERS_KEYWORD", "INTERNALS_KEYWORD", "UPDATE_KEYWORD", "EQUATIONS_KEYWORD", "INPUT_KEYWORD", "OUTPUT_KEYWORD", "CONTINUOUS_KEYWORD", "ON_RECEIVE_KEYWORD", - "ON_CONDITION_KEYWORD", "SPIKE_KEYWORD", "INHIBITORY_KEYWORD", - "EXCITATORY_KEYWORD", "DECORATOR_HOMOGENEOUS", "DECORATOR_HETEROGENEOUS", - "AT", "ELLIPSIS", "LEFT_PAREN", "RIGHT_PAREN", "PLUS", - "TILDE", "PIPE", "CARET", "AMPERSAND", "LEFT_SQUARE_BRACKET", - "LEFT_ANGLE_MINUS", "RIGHT_SQUARE_BRACKET", "LEFT_LEFT_SQUARE", - "RIGHT_RIGHT_SQUARE", "LEFT_LEFT_ANGLE", "RIGHT_RIGHT_ANGLE", - "LEFT_ANGLE", "RIGHT_ANGLE", "LEFT_ANGLE_EQUALS", "PLUS_EQUALS", - "MINUS_EQUALS", "STAR_EQUALS", "FORWARD_SLASH_EQUALS", + "ON_CONDITION_KEYWORD", "SPIKE_KEYWORD", "DECORATOR_HOMOGENEOUS", + "DECORATOR_HETEROGENEOUS", "AT", "ELLIPSIS", "LEFT_PAREN", + "RIGHT_PAREN", "PLUS", "TILDE", "PIPE", "CARET", "AMPERSAND", + "LEFT_SQUARE_BRACKET", "LEFT_ANGLE_MINUS", "RIGHT_SQUARE_BRACKET", + "LEFT_LEFT_SQUARE", "RIGHT_RIGHT_SQUARE", "LEFT_LEFT_ANGLE", + "RIGHT_RIGHT_ANGLE", "LEFT_ANGLE", "RIGHT_ANGLE", "LEFT_ANGLE_EQUALS", + "PLUS_EQUALS", "MINUS_EQUALS", "STAR_EQUALS", "FORWARD_SLASH_EQUALS", "EQUALS_EQUALS", "EXCLAMATION_EQUALS", "LEFT_ANGLE_RIGHT_ANGLE", "RIGHT_ANGLE_EQUALS", "COMMA", "MINUS", "EQUALS", "STAR", "STAR_STAR", "FORWARD_SLASH", "PERCENT", "QUESTION", "COLON", diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index 0cfec8e7d..5fe010bd4 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -10,226 +10,221 @@ def serializedATN(): return [ - 4,1,90,598,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,88,586,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, 2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,2,31,7,31,2,32,7,32,2,33, 7,33,2,34,7,34,2,35,7,35,2,36,7,36,2,37,7,37,2,38,7,38,2,39,7,39, - 2,40,7,40,2,41,7,41,2,42,7,42,2,43,7,43,2,44,7,44,2,45,7,45,2,46, - 7,46,1,0,1,0,1,0,1,0,1,0,1,0,3,0,101,8,0,1,1,1,1,1,1,1,1,1,1,1,1, - 1,1,1,1,1,1,3,1,112,8,1,1,1,1,1,1,1,3,1,117,8,1,1,1,1,1,1,1,1,1, - 5,1,123,8,1,10,1,12,1,126,9,1,1,2,3,2,129,8,2,1,2,1,2,1,3,1,3,1, - 3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,3,3,144,8,3,1,3,1,3,1,3,1,3,1, - 3,1,3,1,3,3,3,153,8,3,1,3,1,3,1,3,1,3,3,3,159,8,3,1,3,1,3,1,3,1, - 3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,5, - 3,180,8,3,10,3,12,3,183,9,3,1,4,1,4,1,4,1,4,3,4,189,8,4,1,4,1,4, - 1,4,3,4,194,8,4,1,5,1,5,1,5,3,5,199,8,5,1,6,1,6,1,6,1,6,1,6,3,6, - 206,8,6,1,7,1,7,1,7,1,7,1,7,1,7,1,7,3,7,215,8,7,1,8,1,8,3,8,219, - 8,8,1,9,1,9,1,9,1,9,1,9,3,9,226,8,9,1,9,5,9,229,8,9,10,9,12,9,232, - 9,9,1,10,1,10,1,10,1,10,1,10,5,10,239,8,10,10,10,12,10,242,9,10, - 3,10,244,8,10,1,10,1,10,1,11,3,11,249,8,11,1,11,1,11,1,11,1,11,1, - 11,1,11,3,11,257,8,11,1,11,5,11,260,8,11,10,11,12,11,263,9,11,1, - 11,1,11,1,12,1,12,1,12,1,12,3,12,271,8,12,1,12,5,12,274,8,12,10, - 12,12,12,277,9,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1, - 13,1,13,5,13,290,8,13,10,13,12,13,293,9,13,1,13,3,13,296,8,13,1, - 13,1,13,1,14,1,14,1,14,4,14,303,8,14,11,14,12,14,304,1,14,1,14,1, - 15,1,15,3,15,311,8,15,1,16,1,16,1,16,3,16,316,8,16,1,17,1,17,1,17, - 1,17,3,17,322,8,17,1,17,1,17,1,18,1,18,1,18,1,18,1,18,1,18,3,18, - 332,8,18,1,18,1,18,1,19,3,19,337,8,19,1,19,3,19,340,8,19,1,19,1, - 19,1,19,5,19,345,8,19,10,19,12,19,348,9,19,1,19,1,19,1,19,3,19,353, - 8,19,1,19,1,19,1,19,1,19,3,19,359,8,19,1,19,5,19,362,8,19,10,19, - 12,19,365,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1,21,1,21,1,21,1,21, - 3,21,377,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3,24,385,8,24,1,25,1, - 25,5,25,389,8,25,10,25,12,25,392,9,25,1,25,3,25,395,8,25,1,26,1, - 26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1, - 29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,419,8,29,1,29,1,29,1, - 29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31,432,8,31,11,31,12, - 31,433,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1, - 33,1,33,1,33,1,33,1,33,1,33,4,33,453,8,33,11,33,12,33,454,1,33,1, - 33,1,34,1,34,1,34,1,34,1,34,5,34,464,8,34,10,34,12,34,467,9,34,1, - 34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35,478,8,35,10,35,12, - 35,481,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,4,36,492, - 8,36,11,36,12,36,493,1,36,1,36,1,37,1,37,1,37,1,37,1,38,1,38,1,38, - 1,38,1,38,1,38,1,38,4,38,509,8,38,11,38,12,38,510,1,38,1,38,1,39, - 1,39,1,39,1,39,1,39,1,39,4,39,521,8,39,11,39,12,39,522,1,39,1,39, - 1,40,1,40,1,40,1,40,1,40,3,40,532,8,40,1,40,1,40,5,40,536,8,40,10, - 40,12,40,539,9,40,1,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,549, - 8,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,3,42,558,8,42,1,43,1,43, - 1,43,1,43,1,43,1,43,3,43,566,8,43,1,43,1,43,1,43,1,44,1,44,1,44, - 1,44,1,44,1,44,5,44,577,8,44,10,44,12,44,580,9,44,3,44,582,8,44, - 1,44,1,44,3,44,586,8,44,1,44,1,44,1,44,1,45,1,45,1,45,1,46,1,46, - 1,46,1,46,1,46,0,2,2,6,47,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28, - 30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72, - 74,76,78,80,82,84,86,88,90,92,0,4,2,0,51,51,75,75,1,0,89,90,1,0, - 32,34,3,0,25,25,86,87,89,90,653,0,100,1,0,0,0,2,111,1,0,0,0,4,128, - 1,0,0,0,6,143,1,0,0,0,8,193,1,0,0,0,10,198,1,0,0,0,12,205,1,0,0, - 0,14,214,1,0,0,0,16,218,1,0,0,0,18,220,1,0,0,0,20,233,1,0,0,0,22, - 248,1,0,0,0,24,266,1,0,0,0,26,280,1,0,0,0,28,299,1,0,0,0,30,310, - 1,0,0,0,32,315,1,0,0,0,34,321,1,0,0,0,36,325,1,0,0,0,38,336,1,0, - 0,0,40,366,1,0,0,0,42,376,1,0,0,0,44,378,1,0,0,0,46,380,1,0,0,0, - 48,382,1,0,0,0,50,386,1,0,0,0,52,396,1,0,0,0,54,401,1,0,0,0,56,406, - 1,0,0,0,58,410,1,0,0,0,60,424,1,0,0,0,62,431,1,0,0,0,64,437,1,0, - 0,0,66,441,1,0,0,0,68,458,1,0,0,0,70,472,1,0,0,0,72,486,1,0,0,0, - 74,497,1,0,0,0,76,501,1,0,0,0,78,514,1,0,0,0,80,526,1,0,0,0,82,543, - 1,0,0,0,84,557,1,0,0,0,86,559,1,0,0,0,88,570,1,0,0,0,90,590,1,0, - 0,0,92,593,1,0,0,0,94,101,5,10,0,0,95,101,5,11,0,0,96,101,5,12,0, - 0,97,101,5,13,0,0,98,101,5,14,0,0,99,101,3,2,1,0,100,94,1,0,0,0, - 100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100,98,1,0,0,0,100, - 99,1,0,0,0,101,1,1,0,0,0,102,103,6,1,-1,0,103,104,5,49,0,0,104,105, - 3,2,1,0,105,106,5,50,0,0,106,112,1,0,0,0,107,108,5,89,0,0,108,109, - 5,79,0,0,109,112,3,2,1,2,110,112,5,88,0,0,111,102,1,0,0,0,111,107, - 1,0,0,0,111,110,1,0,0,0,112,124,1,0,0,0,113,116,10,3,0,0,114,117, - 5,77,0,0,115,117,5,79,0,0,116,114,1,0,0,0,116,115,1,0,0,0,117,118, - 1,0,0,0,118,123,3,2,1,4,119,120,10,4,0,0,120,121,5,78,0,0,121,123, - 3,4,2,0,122,113,1,0,0,0,122,119,1,0,0,0,123,126,1,0,0,0,124,122, - 1,0,0,0,124,125,1,0,0,0,125,3,1,0,0,0,126,124,1,0,0,0,127,129,7, - 0,0,0,128,127,1,0,0,0,128,129,1,0,0,0,129,130,1,0,0,0,130,131,5, - 89,0,0,131,5,1,0,0,0,132,133,6,3,-1,0,133,134,5,49,0,0,134,135,3, - 6,3,0,135,136,5,50,0,0,136,144,1,0,0,0,137,138,3,10,5,0,138,139, - 3,6,3,9,139,144,1,0,0,0,140,141,5,28,0,0,141,144,3,6,3,4,142,144, - 3,8,4,0,143,132,1,0,0,0,143,137,1,0,0,0,143,140,1,0,0,0,143,142, - 1,0,0,0,144,181,1,0,0,0,145,146,10,10,0,0,146,147,5,78,0,0,147,180, - 3,6,3,10,148,152,10,8,0,0,149,153,5,77,0,0,150,153,5,79,0,0,151, - 153,5,80,0,0,152,149,1,0,0,0,152,150,1,0,0,0,152,151,1,0,0,0,153, - 154,1,0,0,0,154,180,3,6,3,9,155,158,10,7,0,0,156,159,5,51,0,0,157, - 159,5,75,0,0,158,156,1,0,0,0,158,157,1,0,0,0,159,160,1,0,0,0,160, - 180,3,6,3,8,161,162,10,6,0,0,162,163,3,12,6,0,163,164,3,6,3,7,164, - 180,1,0,0,0,165,166,10,5,0,0,166,167,3,14,7,0,167,168,3,6,3,6,168, - 180,1,0,0,0,169,170,10,3,0,0,170,171,3,16,8,0,171,172,3,6,3,4,172, - 180,1,0,0,0,173,174,10,2,0,0,174,175,5,81,0,0,175,176,3,6,3,0,176, - 177,5,82,0,0,177,178,3,6,3,3,178,180,1,0,0,0,179,145,1,0,0,0,179, - 148,1,0,0,0,179,155,1,0,0,0,179,161,1,0,0,0,179,165,1,0,0,0,179, - 169,1,0,0,0,179,173,1,0,0,0,180,183,1,0,0,0,181,179,1,0,0,0,181, - 182,1,0,0,0,182,7,1,0,0,0,183,181,1,0,0,0,184,194,3,20,10,0,185, - 194,5,86,0,0,186,188,7,1,0,0,187,189,3,18,9,0,188,187,1,0,0,0,188, - 189,1,0,0,0,189,194,1,0,0,0,190,194,5,87,0,0,191,194,5,25,0,0,192, - 194,3,18,9,0,193,184,1,0,0,0,193,185,1,0,0,0,193,186,1,0,0,0,193, - 190,1,0,0,0,193,191,1,0,0,0,193,192,1,0,0,0,194,9,1,0,0,0,195,199, - 5,51,0,0,196,199,5,75,0,0,197,199,5,52,0,0,198,195,1,0,0,0,198,196, - 1,0,0,0,198,197,1,0,0,0,199,11,1,0,0,0,200,206,5,55,0,0,201,206, - 5,54,0,0,202,206,5,53,0,0,203,206,5,61,0,0,204,206,5,62,0,0,205, - 200,1,0,0,0,205,201,1,0,0,0,205,202,1,0,0,0,205,203,1,0,0,0,205, - 204,1,0,0,0,206,13,1,0,0,0,207,215,5,63,0,0,208,215,5,65,0,0,209, - 215,5,70,0,0,210,215,5,71,0,0,211,215,5,72,0,0,212,215,5,73,0,0, - 213,215,5,64,0,0,214,207,1,0,0,0,214,208,1,0,0,0,214,209,1,0,0,0, - 214,210,1,0,0,0,214,211,1,0,0,0,214,212,1,0,0,0,214,213,1,0,0,0, - 215,15,1,0,0,0,216,219,5,26,0,0,217,219,5,27,0,0,218,216,1,0,0,0, - 218,217,1,0,0,0,219,17,1,0,0,0,220,225,5,88,0,0,221,222,5,56,0,0, - 222,223,3,6,3,0,223,224,5,58,0,0,224,226,1,0,0,0,225,221,1,0,0,0, - 225,226,1,0,0,0,226,230,1,0,0,0,227,229,5,85,0,0,228,227,1,0,0,0, - 229,232,1,0,0,0,230,228,1,0,0,0,230,231,1,0,0,0,231,19,1,0,0,0,232, - 230,1,0,0,0,233,234,5,88,0,0,234,243,5,49,0,0,235,240,3,6,3,0,236, - 237,5,74,0,0,237,239,3,6,3,0,238,236,1,0,0,0,239,242,1,0,0,0,240, - 238,1,0,0,0,240,241,1,0,0,0,241,244,1,0,0,0,242,240,1,0,0,0,243, - 235,1,0,0,0,243,244,1,0,0,0,244,245,1,0,0,0,245,246,5,50,0,0,246, - 21,1,0,0,0,247,249,5,29,0,0,248,247,1,0,0,0,248,249,1,0,0,0,249, - 250,1,0,0,0,250,251,5,16,0,0,251,252,5,88,0,0,252,253,3,0,0,0,253, - 254,5,76,0,0,254,256,3,6,3,0,255,257,5,84,0,0,256,255,1,0,0,0,256, - 257,1,0,0,0,257,261,1,0,0,0,258,260,3,42,21,0,259,258,1,0,0,0,260, - 263,1,0,0,0,261,259,1,0,0,0,261,262,1,0,0,0,262,264,1,0,0,0,263, - 261,1,0,0,0,264,265,5,9,0,0,265,23,1,0,0,0,266,267,3,18,9,0,267, - 268,5,76,0,0,268,270,3,6,3,0,269,271,5,84,0,0,270,269,1,0,0,0,270, - 271,1,0,0,0,271,275,1,0,0,0,272,274,3,42,21,0,273,272,1,0,0,0,274, - 277,1,0,0,0,275,273,1,0,0,0,275,276,1,0,0,0,276,278,1,0,0,0,277, - 275,1,0,0,0,278,279,5,9,0,0,279,25,1,0,0,0,280,281,5,30,0,0,281, - 282,3,18,9,0,282,283,5,76,0,0,283,291,3,6,3,0,284,285,5,4,0,0,285, - 286,3,18,9,0,286,287,5,76,0,0,287,288,3,6,3,0,288,290,1,0,0,0,289, - 284,1,0,0,0,290,293,1,0,0,0,291,289,1,0,0,0,291,292,1,0,0,0,292, - 295,1,0,0,0,293,291,1,0,0,0,294,296,5,84,0,0,295,294,1,0,0,0,295, - 296,1,0,0,0,296,297,1,0,0,0,297,298,5,9,0,0,298,27,1,0,0,0,299,300, - 5,9,0,0,300,302,5,1,0,0,301,303,3,30,15,0,302,301,1,0,0,0,303,304, - 1,0,0,0,304,302,1,0,0,0,304,305,1,0,0,0,305,306,1,0,0,0,306,307, - 5,2,0,0,307,29,1,0,0,0,308,311,3,34,17,0,309,311,3,32,16,0,310,308, - 1,0,0,0,310,309,1,0,0,0,311,31,1,0,0,0,312,316,3,50,25,0,313,316, - 3,58,29,0,314,316,3,60,30,0,315,312,1,0,0,0,315,313,1,0,0,0,315, - 314,1,0,0,0,316,33,1,0,0,0,317,322,3,36,18,0,318,322,3,20,10,0,319, - 322,3,38,19,0,320,322,3,48,24,0,321,317,1,0,0,0,321,318,1,0,0,0, - 321,319,1,0,0,0,321,320,1,0,0,0,322,323,1,0,0,0,323,324,5,9,0,0, - 324,35,1,0,0,0,325,331,3,18,9,0,326,332,5,76,0,0,327,332,5,66,0, - 0,328,332,5,67,0,0,329,332,5,68,0,0,330,332,5,69,0,0,331,326,1,0, - 0,0,331,327,1,0,0,0,331,328,1,0,0,0,331,329,1,0,0,0,331,330,1,0, - 0,0,332,333,1,0,0,0,333,334,3,6,3,0,334,37,1,0,0,0,335,337,5,29, - 0,0,336,335,1,0,0,0,336,337,1,0,0,0,337,339,1,0,0,0,338,340,5,16, - 0,0,339,338,1,0,0,0,339,340,1,0,0,0,340,341,1,0,0,0,341,346,3,18, - 9,0,342,343,5,74,0,0,343,345,3,18,9,0,344,342,1,0,0,0,345,348,1, - 0,0,0,346,344,1,0,0,0,346,347,1,0,0,0,347,349,1,0,0,0,348,346,1, - 0,0,0,349,352,3,0,0,0,350,351,5,76,0,0,351,353,3,6,3,0,352,350,1, - 0,0,0,352,353,1,0,0,0,353,358,1,0,0,0,354,355,5,59,0,0,355,356,3, - 6,3,0,356,357,5,60,0,0,357,359,1,0,0,0,358,354,1,0,0,0,358,359,1, - 0,0,0,359,363,1,0,0,0,360,362,3,42,21,0,361,360,1,0,0,0,362,365, - 1,0,0,0,363,361,1,0,0,0,363,364,1,0,0,0,364,39,1,0,0,0,365,363,1, - 0,0,0,366,367,3,38,19,0,367,368,5,9,0,0,368,41,1,0,0,0,369,377,5, - 45,0,0,370,377,5,46,0,0,371,372,5,47,0,0,372,373,3,44,22,0,373,374, - 5,83,0,0,374,375,3,46,23,0,375,377,1,0,0,0,376,369,1,0,0,0,376,370, - 1,0,0,0,376,371,1,0,0,0,377,43,1,0,0,0,378,379,5,88,0,0,379,45,1, - 0,0,0,380,381,5,88,0,0,381,47,1,0,0,0,382,384,5,17,0,0,383,385,3, - 6,3,0,384,383,1,0,0,0,384,385,1,0,0,0,385,49,1,0,0,0,386,390,3,52, - 26,0,387,389,3,54,27,0,388,387,1,0,0,0,389,392,1,0,0,0,390,388,1, - 0,0,0,390,391,1,0,0,0,391,394,1,0,0,0,392,390,1,0,0,0,393,395,3, - 56,28,0,394,393,1,0,0,0,394,395,1,0,0,0,395,51,1,0,0,0,396,397,5, - 18,0,0,397,398,3,6,3,0,398,399,5,82,0,0,399,400,3,28,14,0,400,53, - 1,0,0,0,401,402,5,19,0,0,402,403,3,6,3,0,403,404,5,82,0,0,404,405, - 3,28,14,0,405,55,1,0,0,0,406,407,5,20,0,0,407,408,5,82,0,0,408,409, - 3,28,14,0,409,57,1,0,0,0,410,411,5,21,0,0,411,412,5,88,0,0,412,413, - 5,23,0,0,413,414,3,6,3,0,414,415,5,48,0,0,415,416,3,6,3,0,416,418, - 5,24,0,0,417,419,5,75,0,0,418,417,1,0,0,0,418,419,1,0,0,0,419,420, - 1,0,0,0,420,421,7,1,0,0,421,422,5,82,0,0,422,423,3,28,14,0,423,59, - 1,0,0,0,424,425,5,22,0,0,425,426,3,6,3,0,426,427,5,82,0,0,427,428, - 3,28,14,0,428,61,1,0,0,0,429,432,3,64,32,0,430,432,5,9,0,0,431,429, - 1,0,0,0,431,430,1,0,0,0,432,433,1,0,0,0,433,431,1,0,0,0,433,434, - 1,0,0,0,434,435,1,0,0,0,435,436,5,0,0,1,436,63,1,0,0,0,437,438,5, - 31,0,0,438,439,5,88,0,0,439,440,3,66,33,0,440,65,1,0,0,0,441,442, - 5,82,0,0,442,443,5,9,0,0,443,452,5,1,0,0,444,453,3,72,36,0,445,453, - 3,76,38,0,446,453,3,78,39,0,447,453,3,86,43,0,448,453,3,88,44,0, - 449,453,3,68,34,0,450,453,3,70,35,0,451,453,3,74,37,0,452,444,1, - 0,0,0,452,445,1,0,0,0,452,446,1,0,0,0,452,447,1,0,0,0,452,448,1, - 0,0,0,452,449,1,0,0,0,452,450,1,0,0,0,452,451,1,0,0,0,453,454,1, - 0,0,0,454,452,1,0,0,0,454,455,1,0,0,0,455,456,1,0,0,0,456,457,5, - 2,0,0,457,67,1,0,0,0,458,459,5,40,0,0,459,460,5,49,0,0,460,465,5, - 88,0,0,461,462,5,74,0,0,462,464,3,92,46,0,463,461,1,0,0,0,464,467, - 1,0,0,0,465,463,1,0,0,0,465,466,1,0,0,0,466,468,1,0,0,0,467,465, - 1,0,0,0,468,469,5,50,0,0,469,470,5,82,0,0,470,471,3,28,14,0,471, - 69,1,0,0,0,472,473,5,41,0,0,473,474,5,49,0,0,474,479,3,6,3,0,475, - 476,5,74,0,0,476,478,3,92,46,0,477,475,1,0,0,0,478,481,1,0,0,0,479, - 477,1,0,0,0,479,480,1,0,0,0,480,482,1,0,0,0,481,479,1,0,0,0,482, - 483,5,50,0,0,483,484,5,82,0,0,484,485,3,28,14,0,485,71,1,0,0,0,486, - 487,7,2,0,0,487,488,5,82,0,0,488,489,5,9,0,0,489,491,5,1,0,0,490, - 492,3,40,20,0,491,490,1,0,0,0,492,493,1,0,0,0,493,491,1,0,0,0,493, - 494,1,0,0,0,494,495,1,0,0,0,495,496,5,2,0,0,496,73,1,0,0,0,497,498, - 5,35,0,0,498,499,5,82,0,0,499,500,3,28,14,0,500,75,1,0,0,0,501,502, - 5,36,0,0,502,503,5,82,0,0,503,504,5,9,0,0,504,508,5,1,0,0,505,509, - 3,22,11,0,506,509,3,24,12,0,507,509,3,26,13,0,508,505,1,0,0,0,508, - 506,1,0,0,0,508,507,1,0,0,0,509,510,1,0,0,0,510,508,1,0,0,0,510, - 511,1,0,0,0,511,512,1,0,0,0,512,513,5,2,0,0,513,77,1,0,0,0,514,515, - 5,37,0,0,515,516,5,82,0,0,516,517,5,9,0,0,517,520,5,1,0,0,518,521, - 3,80,40,0,519,521,3,82,41,0,520,518,1,0,0,0,520,519,1,0,0,0,521, - 522,1,0,0,0,522,520,1,0,0,0,522,523,1,0,0,0,523,524,1,0,0,0,524, - 525,5,2,0,0,525,79,1,0,0,0,526,531,5,88,0,0,527,528,5,56,0,0,528, - 529,3,6,3,0,529,530,5,58,0,0,530,532,1,0,0,0,531,527,1,0,0,0,531, - 532,1,0,0,0,532,533,1,0,0,0,533,537,5,57,0,0,534,536,3,84,42,0,535, - 534,1,0,0,0,536,539,1,0,0,0,537,535,1,0,0,0,537,538,1,0,0,0,538, - 540,1,0,0,0,539,537,1,0,0,0,540,541,5,42,0,0,541,542,5,9,0,0,542, - 81,1,0,0,0,543,548,5,88,0,0,544,545,5,56,0,0,545,546,3,6,3,0,546, - 547,5,58,0,0,547,549,1,0,0,0,548,544,1,0,0,0,548,549,1,0,0,0,549, - 550,1,0,0,0,550,551,3,0,0,0,551,552,5,57,0,0,552,553,5,39,0,0,553, - 554,5,9,0,0,554,83,1,0,0,0,555,558,5,43,0,0,556,558,5,44,0,0,557, - 555,1,0,0,0,557,556,1,0,0,0,558,85,1,0,0,0,559,560,5,38,0,0,560, - 561,5,82,0,0,561,562,5,9,0,0,562,565,5,1,0,0,563,566,5,42,0,0,564, - 566,5,39,0,0,565,563,1,0,0,0,565,564,1,0,0,0,566,567,1,0,0,0,567, - 568,5,9,0,0,568,569,5,2,0,0,569,87,1,0,0,0,570,571,5,15,0,0,571, - 572,5,88,0,0,572,581,5,49,0,0,573,578,3,90,45,0,574,575,5,74,0,0, - 575,577,3,90,45,0,576,574,1,0,0,0,577,580,1,0,0,0,578,576,1,0,0, - 0,578,579,1,0,0,0,579,582,1,0,0,0,580,578,1,0,0,0,581,573,1,0,0, - 0,581,582,1,0,0,0,582,583,1,0,0,0,583,585,5,50,0,0,584,586,3,0,0, - 0,585,584,1,0,0,0,585,586,1,0,0,0,586,587,1,0,0,0,587,588,5,82,0, - 0,588,589,3,28,14,0,589,89,1,0,0,0,590,591,5,88,0,0,591,592,3,0, - 0,0,592,91,1,0,0,0,593,594,5,88,0,0,594,595,5,76,0,0,595,596,7,3, - 0,0,596,93,1,0,0,0,63,100,111,116,122,124,128,143,152,158,179,181, - 188,193,198,205,214,218,225,230,240,243,248,256,261,270,275,291, - 295,304,310,315,321,331,336,339,346,352,358,363,376,384,390,394, - 418,431,433,452,454,465,479,493,508,510,520,522,531,537,548,557, - 565,578,581,585 + 2,40,7,40,2,41,7,41,2,42,7,42,2,43,7,43,2,44,7,44,2,45,7,45,1,0, + 1,0,1,0,1,0,1,0,1,0,3,0,99,8,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, + 1,3,1,110,8,1,1,1,1,1,1,1,3,1,115,8,1,1,1,1,1,1,1,1,1,5,1,121,8, + 1,10,1,12,1,124,9,1,1,2,3,2,127,8,2,1,2,1,2,1,3,1,3,1,3,1,3,1,3, + 1,3,1,3,1,3,1,3,1,3,1,3,3,3,142,8,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3, + 3,3,151,8,3,1,3,1,3,1,3,1,3,3,3,157,8,3,1,3,1,3,1,3,1,3,1,3,1,3, + 1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,5,3,178,8,3, + 10,3,12,3,181,9,3,1,4,1,4,1,4,1,4,3,4,187,8,4,1,4,1,4,1,4,3,4,192, + 8,4,1,5,1,5,1,5,3,5,197,8,5,1,6,1,6,1,6,1,6,1,6,3,6,204,8,6,1,7, + 1,7,1,7,1,7,1,7,1,7,1,7,3,7,213,8,7,1,8,1,8,3,8,217,8,8,1,9,1,9, + 1,9,1,9,1,9,3,9,224,8,9,1,9,5,9,227,8,9,10,9,12,9,230,9,9,1,10,1, + 10,1,10,1,10,1,10,5,10,237,8,10,10,10,12,10,240,9,10,3,10,242,8, + 10,1,10,1,10,1,11,3,11,247,8,11,1,11,1,11,1,11,1,11,1,11,1,11,3, + 11,255,8,11,1,11,5,11,258,8,11,10,11,12,11,261,9,11,1,11,1,11,1, + 12,1,12,1,12,1,12,3,12,269,8,12,1,12,5,12,272,8,12,10,12,12,12,275, + 9,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13,5,13, + 288,8,13,10,13,12,13,291,9,13,1,13,3,13,294,8,13,1,13,1,13,1,14, + 1,14,1,14,4,14,301,8,14,11,14,12,14,302,1,14,1,14,1,15,1,15,3,15, + 309,8,15,1,16,1,16,1,16,3,16,314,8,16,1,17,1,17,1,17,1,17,3,17,320, + 8,17,1,17,1,17,1,18,1,18,1,18,1,18,1,18,1,18,3,18,330,8,18,1,18, + 1,18,1,19,3,19,335,8,19,1,19,3,19,338,8,19,1,19,1,19,1,19,5,19,343, + 8,19,10,19,12,19,346,9,19,1,19,1,19,1,19,3,19,351,8,19,1,19,1,19, + 1,19,1,19,3,19,357,8,19,1,19,5,19,360,8,19,10,19,12,19,363,9,19, + 1,20,1,20,1,20,1,21,1,21,1,21,1,21,1,21,1,21,1,21,3,21,375,8,21, + 1,22,1,22,1,23,1,23,1,24,1,24,3,24,383,8,24,1,25,1,25,5,25,387,8, + 25,10,25,12,25,390,9,25,1,25,3,25,393,8,25,1,26,1,26,1,26,1,26,1, + 26,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1,29,1,29,1,29,1, + 29,1,29,1,29,1,29,1,29,3,29,417,8,29,1,29,1,29,1,29,1,29,1,30,1, + 30,1,30,1,30,1,30,1,31,1,31,4,31,430,8,31,11,31,12,31,431,1,31,1, + 31,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1, + 33,1,33,1,33,4,33,451,8,33,11,33,12,33,452,1,33,1,33,1,34,1,34,1, + 34,1,34,1,34,5,34,462,8,34,10,34,12,34,465,9,34,1,34,1,34,1,34,1, + 34,1,35,1,35,1,35,1,35,1,35,5,35,476,8,35,10,35,12,35,479,9,35,1, + 35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,4,36,490,8,36,11,36,12, + 36,491,1,36,1,36,1,37,1,37,1,37,1,37,1,38,1,38,1,38,1,38,1,38,1, + 38,1,38,4,38,507,8,38,11,38,12,38,508,1,38,1,38,1,39,1,39,1,39,1, + 39,1,39,1,39,4,39,519,8,39,11,39,12,39,520,1,39,1,39,1,40,1,40,1, + 40,1,40,1,40,3,40,530,8,40,1,40,1,40,1,40,1,40,1,41,1,41,1,41,1, + 41,1,41,3,41,541,8,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,1,42,1, + 42,1,42,1,42,3,42,554,8,42,1,42,1,42,1,42,1,43,1,43,1,43,1,43,1, + 43,1,43,5,43,565,8,43,10,43,12,43,568,9,43,3,43,570,8,43,1,43,1, + 43,3,43,574,8,43,1,43,1,43,1,43,1,44,1,44,1,44,1,45,1,45,1,45,1, + 45,1,45,0,2,2,6,46,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32, + 34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,76, + 78,80,82,84,86,88,90,0,4,2,0,49,49,73,73,1,0,87,88,1,0,32,34,3,0, + 25,25,84,85,87,88,640,0,98,1,0,0,0,2,109,1,0,0,0,4,126,1,0,0,0,6, + 141,1,0,0,0,8,191,1,0,0,0,10,196,1,0,0,0,12,203,1,0,0,0,14,212,1, + 0,0,0,16,216,1,0,0,0,18,218,1,0,0,0,20,231,1,0,0,0,22,246,1,0,0, + 0,24,264,1,0,0,0,26,278,1,0,0,0,28,297,1,0,0,0,30,308,1,0,0,0,32, + 313,1,0,0,0,34,319,1,0,0,0,36,323,1,0,0,0,38,334,1,0,0,0,40,364, + 1,0,0,0,42,374,1,0,0,0,44,376,1,0,0,0,46,378,1,0,0,0,48,380,1,0, + 0,0,50,384,1,0,0,0,52,394,1,0,0,0,54,399,1,0,0,0,56,404,1,0,0,0, + 58,408,1,0,0,0,60,422,1,0,0,0,62,429,1,0,0,0,64,435,1,0,0,0,66,439, + 1,0,0,0,68,456,1,0,0,0,70,470,1,0,0,0,72,484,1,0,0,0,74,495,1,0, + 0,0,76,499,1,0,0,0,78,512,1,0,0,0,80,524,1,0,0,0,82,535,1,0,0,0, + 84,547,1,0,0,0,86,558,1,0,0,0,88,578,1,0,0,0,90,581,1,0,0,0,92,99, + 5,10,0,0,93,99,5,11,0,0,94,99,5,12,0,0,95,99,5,13,0,0,96,99,5,14, + 0,0,97,99,3,2,1,0,98,92,1,0,0,0,98,93,1,0,0,0,98,94,1,0,0,0,98,95, + 1,0,0,0,98,96,1,0,0,0,98,97,1,0,0,0,99,1,1,0,0,0,100,101,6,1,-1, + 0,101,102,5,47,0,0,102,103,3,2,1,0,103,104,5,48,0,0,104,110,1,0, + 0,0,105,106,5,87,0,0,106,107,5,77,0,0,107,110,3,2,1,2,108,110,5, + 86,0,0,109,100,1,0,0,0,109,105,1,0,0,0,109,108,1,0,0,0,110,122,1, + 0,0,0,111,114,10,3,0,0,112,115,5,75,0,0,113,115,5,77,0,0,114,112, + 1,0,0,0,114,113,1,0,0,0,115,116,1,0,0,0,116,121,3,2,1,4,117,118, + 10,4,0,0,118,119,5,76,0,0,119,121,3,4,2,0,120,111,1,0,0,0,120,117, + 1,0,0,0,121,124,1,0,0,0,122,120,1,0,0,0,122,123,1,0,0,0,123,3,1, + 0,0,0,124,122,1,0,0,0,125,127,7,0,0,0,126,125,1,0,0,0,126,127,1, + 0,0,0,127,128,1,0,0,0,128,129,5,87,0,0,129,5,1,0,0,0,130,131,6,3, + -1,0,131,132,5,47,0,0,132,133,3,6,3,0,133,134,5,48,0,0,134,142,1, + 0,0,0,135,136,3,10,5,0,136,137,3,6,3,9,137,142,1,0,0,0,138,139,5, + 28,0,0,139,142,3,6,3,4,140,142,3,8,4,0,141,130,1,0,0,0,141,135,1, + 0,0,0,141,138,1,0,0,0,141,140,1,0,0,0,142,179,1,0,0,0,143,144,10, + 10,0,0,144,145,5,76,0,0,145,178,3,6,3,10,146,150,10,8,0,0,147,151, + 5,75,0,0,148,151,5,77,0,0,149,151,5,78,0,0,150,147,1,0,0,0,150,148, + 1,0,0,0,150,149,1,0,0,0,151,152,1,0,0,0,152,178,3,6,3,9,153,156, + 10,7,0,0,154,157,5,49,0,0,155,157,5,73,0,0,156,154,1,0,0,0,156,155, + 1,0,0,0,157,158,1,0,0,0,158,178,3,6,3,8,159,160,10,6,0,0,160,161, + 3,12,6,0,161,162,3,6,3,7,162,178,1,0,0,0,163,164,10,5,0,0,164,165, + 3,14,7,0,165,166,3,6,3,6,166,178,1,0,0,0,167,168,10,3,0,0,168,169, + 3,16,8,0,169,170,3,6,3,4,170,178,1,0,0,0,171,172,10,2,0,0,172,173, + 5,79,0,0,173,174,3,6,3,0,174,175,5,80,0,0,175,176,3,6,3,3,176,178, + 1,0,0,0,177,143,1,0,0,0,177,146,1,0,0,0,177,153,1,0,0,0,177,159, + 1,0,0,0,177,163,1,0,0,0,177,167,1,0,0,0,177,171,1,0,0,0,178,181, + 1,0,0,0,179,177,1,0,0,0,179,180,1,0,0,0,180,7,1,0,0,0,181,179,1, + 0,0,0,182,192,3,20,10,0,183,192,5,84,0,0,184,186,7,1,0,0,185,187, + 3,18,9,0,186,185,1,0,0,0,186,187,1,0,0,0,187,192,1,0,0,0,188,192, + 5,85,0,0,189,192,5,25,0,0,190,192,3,18,9,0,191,182,1,0,0,0,191,183, + 1,0,0,0,191,184,1,0,0,0,191,188,1,0,0,0,191,189,1,0,0,0,191,190, + 1,0,0,0,192,9,1,0,0,0,193,197,5,49,0,0,194,197,5,73,0,0,195,197, + 5,50,0,0,196,193,1,0,0,0,196,194,1,0,0,0,196,195,1,0,0,0,197,11, + 1,0,0,0,198,204,5,53,0,0,199,204,5,52,0,0,200,204,5,51,0,0,201,204, + 5,59,0,0,202,204,5,60,0,0,203,198,1,0,0,0,203,199,1,0,0,0,203,200, + 1,0,0,0,203,201,1,0,0,0,203,202,1,0,0,0,204,13,1,0,0,0,205,213,5, + 61,0,0,206,213,5,63,0,0,207,213,5,68,0,0,208,213,5,69,0,0,209,213, + 5,70,0,0,210,213,5,71,0,0,211,213,5,62,0,0,212,205,1,0,0,0,212,206, + 1,0,0,0,212,207,1,0,0,0,212,208,1,0,0,0,212,209,1,0,0,0,212,210, + 1,0,0,0,212,211,1,0,0,0,213,15,1,0,0,0,214,217,5,26,0,0,215,217, + 5,27,0,0,216,214,1,0,0,0,216,215,1,0,0,0,217,17,1,0,0,0,218,223, + 5,86,0,0,219,220,5,54,0,0,220,221,3,6,3,0,221,222,5,56,0,0,222,224, + 1,0,0,0,223,219,1,0,0,0,223,224,1,0,0,0,224,228,1,0,0,0,225,227, + 5,83,0,0,226,225,1,0,0,0,227,230,1,0,0,0,228,226,1,0,0,0,228,229, + 1,0,0,0,229,19,1,0,0,0,230,228,1,0,0,0,231,232,5,86,0,0,232,241, + 5,47,0,0,233,238,3,6,3,0,234,235,5,72,0,0,235,237,3,6,3,0,236,234, + 1,0,0,0,237,240,1,0,0,0,238,236,1,0,0,0,238,239,1,0,0,0,239,242, + 1,0,0,0,240,238,1,0,0,0,241,233,1,0,0,0,241,242,1,0,0,0,242,243, + 1,0,0,0,243,244,5,48,0,0,244,21,1,0,0,0,245,247,5,29,0,0,246,245, + 1,0,0,0,246,247,1,0,0,0,247,248,1,0,0,0,248,249,5,16,0,0,249,250, + 5,86,0,0,250,251,3,0,0,0,251,252,5,74,0,0,252,254,3,6,3,0,253,255, + 5,82,0,0,254,253,1,0,0,0,254,255,1,0,0,0,255,259,1,0,0,0,256,258, + 3,42,21,0,257,256,1,0,0,0,258,261,1,0,0,0,259,257,1,0,0,0,259,260, + 1,0,0,0,260,262,1,0,0,0,261,259,1,0,0,0,262,263,5,9,0,0,263,23,1, + 0,0,0,264,265,3,18,9,0,265,266,5,74,0,0,266,268,3,6,3,0,267,269, + 5,82,0,0,268,267,1,0,0,0,268,269,1,0,0,0,269,273,1,0,0,0,270,272, + 3,42,21,0,271,270,1,0,0,0,272,275,1,0,0,0,273,271,1,0,0,0,273,274, + 1,0,0,0,274,276,1,0,0,0,275,273,1,0,0,0,276,277,5,9,0,0,277,25,1, + 0,0,0,278,279,5,30,0,0,279,280,3,18,9,0,280,281,5,74,0,0,281,289, + 3,6,3,0,282,283,5,4,0,0,283,284,3,18,9,0,284,285,5,74,0,0,285,286, + 3,6,3,0,286,288,1,0,0,0,287,282,1,0,0,0,288,291,1,0,0,0,289,287, + 1,0,0,0,289,290,1,0,0,0,290,293,1,0,0,0,291,289,1,0,0,0,292,294, + 5,82,0,0,293,292,1,0,0,0,293,294,1,0,0,0,294,295,1,0,0,0,295,296, + 5,9,0,0,296,27,1,0,0,0,297,298,5,9,0,0,298,300,5,1,0,0,299,301,3, + 30,15,0,300,299,1,0,0,0,301,302,1,0,0,0,302,300,1,0,0,0,302,303, + 1,0,0,0,303,304,1,0,0,0,304,305,5,2,0,0,305,29,1,0,0,0,306,309,3, + 34,17,0,307,309,3,32,16,0,308,306,1,0,0,0,308,307,1,0,0,0,309,31, + 1,0,0,0,310,314,3,50,25,0,311,314,3,58,29,0,312,314,3,60,30,0,313, + 310,1,0,0,0,313,311,1,0,0,0,313,312,1,0,0,0,314,33,1,0,0,0,315,320, + 3,36,18,0,316,320,3,20,10,0,317,320,3,38,19,0,318,320,3,48,24,0, + 319,315,1,0,0,0,319,316,1,0,0,0,319,317,1,0,0,0,319,318,1,0,0,0, + 320,321,1,0,0,0,321,322,5,9,0,0,322,35,1,0,0,0,323,329,3,18,9,0, + 324,330,5,74,0,0,325,330,5,64,0,0,326,330,5,65,0,0,327,330,5,66, + 0,0,328,330,5,67,0,0,329,324,1,0,0,0,329,325,1,0,0,0,329,326,1,0, + 0,0,329,327,1,0,0,0,329,328,1,0,0,0,330,331,1,0,0,0,331,332,3,6, + 3,0,332,37,1,0,0,0,333,335,5,29,0,0,334,333,1,0,0,0,334,335,1,0, + 0,0,335,337,1,0,0,0,336,338,5,16,0,0,337,336,1,0,0,0,337,338,1,0, + 0,0,338,339,1,0,0,0,339,344,3,18,9,0,340,341,5,72,0,0,341,343,3, + 18,9,0,342,340,1,0,0,0,343,346,1,0,0,0,344,342,1,0,0,0,344,345,1, + 0,0,0,345,347,1,0,0,0,346,344,1,0,0,0,347,350,3,0,0,0,348,349,5, + 74,0,0,349,351,3,6,3,0,350,348,1,0,0,0,350,351,1,0,0,0,351,356,1, + 0,0,0,352,353,5,57,0,0,353,354,3,6,3,0,354,355,5,58,0,0,355,357, + 1,0,0,0,356,352,1,0,0,0,356,357,1,0,0,0,357,361,1,0,0,0,358,360, + 3,42,21,0,359,358,1,0,0,0,360,363,1,0,0,0,361,359,1,0,0,0,361,362, + 1,0,0,0,362,39,1,0,0,0,363,361,1,0,0,0,364,365,3,38,19,0,365,366, + 5,9,0,0,366,41,1,0,0,0,367,375,5,43,0,0,368,375,5,44,0,0,369,370, + 5,45,0,0,370,371,3,44,22,0,371,372,5,81,0,0,372,373,3,46,23,0,373, + 375,1,0,0,0,374,367,1,0,0,0,374,368,1,0,0,0,374,369,1,0,0,0,375, + 43,1,0,0,0,376,377,5,86,0,0,377,45,1,0,0,0,378,379,5,86,0,0,379, + 47,1,0,0,0,380,382,5,17,0,0,381,383,3,6,3,0,382,381,1,0,0,0,382, + 383,1,0,0,0,383,49,1,0,0,0,384,388,3,52,26,0,385,387,3,54,27,0,386, + 385,1,0,0,0,387,390,1,0,0,0,388,386,1,0,0,0,388,389,1,0,0,0,389, + 392,1,0,0,0,390,388,1,0,0,0,391,393,3,56,28,0,392,391,1,0,0,0,392, + 393,1,0,0,0,393,51,1,0,0,0,394,395,5,18,0,0,395,396,3,6,3,0,396, + 397,5,80,0,0,397,398,3,28,14,0,398,53,1,0,0,0,399,400,5,19,0,0,400, + 401,3,6,3,0,401,402,5,80,0,0,402,403,3,28,14,0,403,55,1,0,0,0,404, + 405,5,20,0,0,405,406,5,80,0,0,406,407,3,28,14,0,407,57,1,0,0,0,408, + 409,5,21,0,0,409,410,5,86,0,0,410,411,5,23,0,0,411,412,3,6,3,0,412, + 413,5,46,0,0,413,414,3,6,3,0,414,416,5,24,0,0,415,417,5,73,0,0,416, + 415,1,0,0,0,416,417,1,0,0,0,417,418,1,0,0,0,418,419,7,1,0,0,419, + 420,5,80,0,0,420,421,3,28,14,0,421,59,1,0,0,0,422,423,5,22,0,0,423, + 424,3,6,3,0,424,425,5,80,0,0,425,426,3,28,14,0,426,61,1,0,0,0,427, + 430,3,64,32,0,428,430,5,9,0,0,429,427,1,0,0,0,429,428,1,0,0,0,430, + 431,1,0,0,0,431,429,1,0,0,0,431,432,1,0,0,0,432,433,1,0,0,0,433, + 434,5,0,0,1,434,63,1,0,0,0,435,436,5,31,0,0,436,437,5,86,0,0,437, + 438,3,66,33,0,438,65,1,0,0,0,439,440,5,80,0,0,440,441,5,9,0,0,441, + 450,5,1,0,0,442,451,3,72,36,0,443,451,3,76,38,0,444,451,3,78,39, + 0,445,451,3,84,42,0,446,451,3,86,43,0,447,451,3,68,34,0,448,451, + 3,70,35,0,449,451,3,74,37,0,450,442,1,0,0,0,450,443,1,0,0,0,450, + 444,1,0,0,0,450,445,1,0,0,0,450,446,1,0,0,0,450,447,1,0,0,0,450, + 448,1,0,0,0,450,449,1,0,0,0,451,452,1,0,0,0,452,450,1,0,0,0,452, + 453,1,0,0,0,453,454,1,0,0,0,454,455,5,2,0,0,455,67,1,0,0,0,456,457, + 5,40,0,0,457,458,5,47,0,0,458,463,5,86,0,0,459,460,5,72,0,0,460, + 462,3,90,45,0,461,459,1,0,0,0,462,465,1,0,0,0,463,461,1,0,0,0,463, + 464,1,0,0,0,464,466,1,0,0,0,465,463,1,0,0,0,466,467,5,48,0,0,467, + 468,5,80,0,0,468,469,3,28,14,0,469,69,1,0,0,0,470,471,5,41,0,0,471, + 472,5,47,0,0,472,477,3,6,3,0,473,474,5,72,0,0,474,476,3,90,45,0, + 475,473,1,0,0,0,476,479,1,0,0,0,477,475,1,0,0,0,477,478,1,0,0,0, + 478,480,1,0,0,0,479,477,1,0,0,0,480,481,5,48,0,0,481,482,5,80,0, + 0,482,483,3,28,14,0,483,71,1,0,0,0,484,485,7,2,0,0,485,486,5,80, + 0,0,486,487,5,9,0,0,487,489,5,1,0,0,488,490,3,40,20,0,489,488,1, + 0,0,0,490,491,1,0,0,0,491,489,1,0,0,0,491,492,1,0,0,0,492,493,1, + 0,0,0,493,494,5,2,0,0,494,73,1,0,0,0,495,496,5,35,0,0,496,497,5, + 80,0,0,497,498,3,28,14,0,498,75,1,0,0,0,499,500,5,36,0,0,500,501, + 5,80,0,0,501,502,5,9,0,0,502,506,5,1,0,0,503,507,3,22,11,0,504,507, + 3,24,12,0,505,507,3,26,13,0,506,503,1,0,0,0,506,504,1,0,0,0,506, + 505,1,0,0,0,507,508,1,0,0,0,508,506,1,0,0,0,508,509,1,0,0,0,509, + 510,1,0,0,0,510,511,5,2,0,0,511,77,1,0,0,0,512,513,5,37,0,0,513, + 514,5,80,0,0,514,515,5,9,0,0,515,518,5,1,0,0,516,519,3,80,40,0,517, + 519,3,82,41,0,518,516,1,0,0,0,518,517,1,0,0,0,519,520,1,0,0,0,520, + 518,1,0,0,0,520,521,1,0,0,0,521,522,1,0,0,0,522,523,5,2,0,0,523, + 79,1,0,0,0,524,529,5,86,0,0,525,526,5,54,0,0,526,527,3,6,3,0,527, + 528,5,56,0,0,528,530,1,0,0,0,529,525,1,0,0,0,529,530,1,0,0,0,530, + 531,1,0,0,0,531,532,5,55,0,0,532,533,5,42,0,0,533,534,5,9,0,0,534, + 81,1,0,0,0,535,540,5,86,0,0,536,537,5,54,0,0,537,538,3,6,3,0,538, + 539,5,56,0,0,539,541,1,0,0,0,540,536,1,0,0,0,540,541,1,0,0,0,541, + 542,1,0,0,0,542,543,3,0,0,0,543,544,5,55,0,0,544,545,5,39,0,0,545, + 546,5,9,0,0,546,83,1,0,0,0,547,548,5,38,0,0,548,549,5,80,0,0,549, + 550,5,9,0,0,550,553,5,1,0,0,551,554,5,42,0,0,552,554,5,39,0,0,553, + 551,1,0,0,0,553,552,1,0,0,0,554,555,1,0,0,0,555,556,5,9,0,0,556, + 557,5,2,0,0,557,85,1,0,0,0,558,559,5,15,0,0,559,560,5,86,0,0,560, + 569,5,47,0,0,561,566,3,88,44,0,562,563,5,72,0,0,563,565,3,88,44, + 0,564,562,1,0,0,0,565,568,1,0,0,0,566,564,1,0,0,0,566,567,1,0,0, + 0,567,570,1,0,0,0,568,566,1,0,0,0,569,561,1,0,0,0,569,570,1,0,0, + 0,570,571,1,0,0,0,571,573,5,48,0,0,572,574,3,0,0,0,573,572,1,0,0, + 0,573,574,1,0,0,0,574,575,1,0,0,0,575,576,5,80,0,0,576,577,3,28, + 14,0,577,87,1,0,0,0,578,579,5,86,0,0,579,580,3,0,0,0,580,89,1,0, + 0,0,581,582,5,86,0,0,582,583,5,74,0,0,583,584,7,3,0,0,584,91,1,0, + 0,0,61,98,109,114,120,122,126,141,150,156,177,179,186,191,196,203, + 212,216,223,228,238,241,246,254,259,268,273,289,293,302,308,313, + 319,329,334,337,344,350,356,361,374,382,388,392,416,429,431,450, + 452,463,477,491,506,508,518,520,529,540,553,566,569,573 ] class PyNestMLParser ( Parser ): @@ -250,14 +245,13 @@ class PyNestMLParser ( Parser ): "'step'", "'inf'", "'and'", "'or'", "'not'", "'recordable'", "'kernel'", "'model'", "'state'", "'parameters'", "'internals'", "'update'", "'equations'", "'input'", "'output'", "'continuous'", - "'onReceive'", "'onCondition'", "'spike'", "'inhibitory'", - "'excitatory'", "'@homogeneous'", "'@heterogeneous'", - "'@'", "'...'", "'('", "')'", "'+'", "'~'", "'|'", - "'^'", "'&'", "'['", "'<-'", "']'", "'[['", "']]'", - "'<<'", "'>>'", "'<'", "'>'", "'<='", "'+='", "'-='", - "'*='", "'/='", "'=='", "'!='", "'<>'", "'>='", "','", - "'-'", "'='", "'*'", "'**'", "'/'", "'%'", "'?'", "':'", - "'::'", "';'", "'''" ] + "'onReceive'", "'onCondition'", "'spike'", "'@homogeneous'", + "'@heterogeneous'", "'@'", "'...'", "'('", "')'", "'+'", + "'~'", "'|'", "'^'", "'&'", "'['", "'<-'", "']'", "'[['", + "']]'", "'<<'", "'>>'", "'<'", "'>'", "'<='", "'+='", + "'-='", "'*='", "'/='", "'=='", "'!='", "'<>'", "'>='", + "','", "'-'", "'='", "'*'", "'**'", "'/'", "'%'", "'?'", + "':'", "'::'", "';'", "'''" ] symbolicNames = [ "", "INDENT", "DEDENT", "DOCSTRING_TRIPLEQUOTE", "KERNEL_JOINING", "WS", "LINE_ESCAPE", "DOCSTRING", @@ -271,8 +265,7 @@ class PyNestMLParser ( Parser ): "PARAMETERS_KEYWORD", "INTERNALS_KEYWORD", "UPDATE_KEYWORD", "EQUATIONS_KEYWORD", "INPUT_KEYWORD", "OUTPUT_KEYWORD", "CONTINUOUS_KEYWORD", "ON_RECEIVE_KEYWORD", "ON_CONDITION_KEYWORD", - "SPIKE_KEYWORD", "INHIBITORY_KEYWORD", "EXCITATORY_KEYWORD", - "DECORATOR_HOMOGENEOUS", "DECORATOR_HETEROGENEOUS", + "SPIKE_KEYWORD", "DECORATOR_HOMOGENEOUS", "DECORATOR_HETEROGENEOUS", "AT", "ELLIPSIS", "LEFT_PAREN", "RIGHT_PAREN", "PLUS", "TILDE", "PIPE", "CARET", "AMPERSAND", "LEFT_SQUARE_BRACKET", "LEFT_ANGLE_MINUS", "RIGHT_SQUARE_BRACKET", "LEFT_LEFT_SQUARE", @@ -328,11 +321,10 @@ class PyNestMLParser ( Parser ): RULE_inputBlock = 39 RULE_spikeInputPort = 40 RULE_continuousInputPort = 41 - RULE_inputQualifier = 42 - RULE_outputBlock = 43 - RULE_function = 44 - RULE_parameter = 45 - RULE_constParameter = 46 + RULE_outputBlock = 42 + RULE_function = 43 + RULE_parameter = 44 + RULE_constParameter = 45 ruleNames = [ "dataType", "unitType", "unitTypeExponent", "expression", "simpleExpression", "unaryOperator", "bitOperator", "comparisonOperator", @@ -344,8 +336,8 @@ class PyNestMLParser ( Parser ): "forStmt", "whileStmt", "nestMLCompilationUnit", "model", "modelBody", "onReceiveBlock", "onConditionBlock", "blockWithVariables", "updateBlock", "equationsBlock", "inputBlock", "spikeInputPort", - "continuousInputPort", "inputQualifier", "outputBlock", - "function", "parameter", "constParameter" ] + "continuousInputPort", "outputBlock", "function", "parameter", + "constParameter" ] EOF = Token.EOF INDENT=1 @@ -390,54 +382,52 @@ class PyNestMLParser ( Parser ): ON_RECEIVE_KEYWORD=40 ON_CONDITION_KEYWORD=41 SPIKE_KEYWORD=42 - INHIBITORY_KEYWORD=43 - EXCITATORY_KEYWORD=44 - DECORATOR_HOMOGENEOUS=45 - DECORATOR_HETEROGENEOUS=46 - AT=47 - ELLIPSIS=48 - LEFT_PAREN=49 - RIGHT_PAREN=50 - PLUS=51 - TILDE=52 - PIPE=53 - CARET=54 - AMPERSAND=55 - LEFT_SQUARE_BRACKET=56 - LEFT_ANGLE_MINUS=57 - RIGHT_SQUARE_BRACKET=58 - LEFT_LEFT_SQUARE=59 - RIGHT_RIGHT_SQUARE=60 - LEFT_LEFT_ANGLE=61 - RIGHT_RIGHT_ANGLE=62 - LEFT_ANGLE=63 - RIGHT_ANGLE=64 - LEFT_ANGLE_EQUALS=65 - PLUS_EQUALS=66 - MINUS_EQUALS=67 - STAR_EQUALS=68 - FORWARD_SLASH_EQUALS=69 - EQUALS_EQUALS=70 - EXCLAMATION_EQUALS=71 - LEFT_ANGLE_RIGHT_ANGLE=72 - RIGHT_ANGLE_EQUALS=73 - COMMA=74 - MINUS=75 - EQUALS=76 - STAR=77 - STAR_STAR=78 - FORWARD_SLASH=79 - PERCENT=80 - QUESTION=81 - COLON=82 - DOUBLE_COLON=83 - SEMICOLON=84 - DIFFERENTIAL_ORDER=85 - BOOLEAN_LITERAL=86 - STRING_LITERAL=87 - NAME=88 - UNSIGNED_INTEGER=89 - FLOAT=90 + DECORATOR_HOMOGENEOUS=43 + DECORATOR_HETEROGENEOUS=44 + AT=45 + ELLIPSIS=46 + LEFT_PAREN=47 + RIGHT_PAREN=48 + PLUS=49 + TILDE=50 + PIPE=51 + CARET=52 + AMPERSAND=53 + LEFT_SQUARE_BRACKET=54 + LEFT_ANGLE_MINUS=55 + RIGHT_SQUARE_BRACKET=56 + LEFT_LEFT_SQUARE=57 + RIGHT_RIGHT_SQUARE=58 + LEFT_LEFT_ANGLE=59 + RIGHT_RIGHT_ANGLE=60 + LEFT_ANGLE=61 + RIGHT_ANGLE=62 + LEFT_ANGLE_EQUALS=63 + PLUS_EQUALS=64 + MINUS_EQUALS=65 + STAR_EQUALS=66 + FORWARD_SLASH_EQUALS=67 + EQUALS_EQUALS=68 + EXCLAMATION_EQUALS=69 + LEFT_ANGLE_RIGHT_ANGLE=70 + RIGHT_ANGLE_EQUALS=71 + COMMA=72 + MINUS=73 + EQUALS=74 + STAR=75 + STAR_STAR=76 + FORWARD_SLASH=77 + PERCENT=78 + QUESTION=79 + COLON=80 + DOUBLE_COLON=81 + SEMICOLON=82 + DIFFERENTIAL_ORDER=83 + BOOLEAN_LITERAL=84 + STRING_LITERAL=85 + NAME=86 + UNSIGNED_INTEGER=87 + FLOAT=88 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) @@ -497,37 +487,37 @@ def dataType(self): localctx = PyNestMLParser.DataTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 0, self.RULE_dataType) try: - self.state = 100 + self.state = 98 self._errHandler.sync(self) token = self._input.LA(1) if token in [10]: self.enterOuterAlt(localctx, 1) - self.state = 94 + self.state = 92 localctx.isInt = self.match(PyNestMLParser.INTEGER_KEYWORD) pass elif token in [11]: self.enterOuterAlt(localctx, 2) - self.state = 95 + self.state = 93 localctx.isReal = self.match(PyNestMLParser.REAL_KEYWORD) pass elif token in [12]: self.enterOuterAlt(localctx, 3) - self.state = 96 + self.state = 94 localctx.isString = self.match(PyNestMLParser.STRING_KEYWORD) pass elif token in [13]: self.enterOuterAlt(localctx, 4) - self.state = 97 + self.state = 95 localctx.isBool = self.match(PyNestMLParser.BOOLEAN_KEYWORD) pass elif token in [14]: self.enterOuterAlt(localctx, 5) - self.state = 98 + self.state = 96 localctx.isVoid = self.match(PyNestMLParser.VOID_KEYWORD) pass - elif token in [49, 88, 89]: + elif token in [47, 86, 87]: self.enterOuterAlt(localctx, 6) - self.state = 99 + self.state = 97 localctx.unit = self.unitType(0) pass else: @@ -613,34 +603,34 @@ def unitType(self, _p:int=0): self.enterRecursionRule(localctx, 2, self.RULE_unitType, _p) try: self.enterOuterAlt(localctx, 1) - self.state = 111 + self.state = 109 self._errHandler.sync(self) token = self._input.LA(1) - if token in [49]: - self.state = 103 + if token in [47]: + self.state = 101 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) - self.state = 104 + self.state = 102 localctx.compoundUnit = self.unitType(0) - self.state = 105 + self.state = 103 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass - elif token in [89]: - self.state = 107 + elif token in [87]: + self.state = 105 localctx.unitlessLiteral = self.match(PyNestMLParser.UNSIGNED_INTEGER) - self.state = 108 + self.state = 106 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) - self.state = 109 + self.state = 107 localctx.right = self.unitType(2) pass - elif token in [88]: - self.state = 110 + elif token in [86]: + self.state = 108 localctx.unit = self.match(PyNestMLParser.NAME) pass else: raise NoViableAltException(self) self._ctx.stop = self._input.LT(-1) - self.state = 124 + self.state = 122 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -648,32 +638,32 @@ def unitType(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 122 + self.state = 120 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,3,self._ctx) if la_ == 1: localctx = PyNestMLParser.UnitTypeContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_unitType) - self.state = 113 + self.state = 111 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 116 + self.state = 114 self._errHandler.sync(self) token = self._input.LA(1) - if token in [77]: - self.state = 114 + if token in [75]: + self.state = 112 localctx.timesOp = self.match(PyNestMLParser.STAR) pass - elif token in [79]: - self.state = 115 + elif token in [77]: + self.state = 113 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass else: raise NoViableAltException(self) - self.state = 118 + self.state = 116 localctx.right = self.unitType(4) pass @@ -681,18 +671,18 @@ def unitType(self, _p:int=0): localctx = PyNestMLParser.UnitTypeContext(self, _parentctx, _parentState) localctx.base = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_unitType) - self.state = 119 + self.state = 117 if not self.precpred(self._ctx, 4): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 4)") - self.state = 120 + self.state = 118 localctx.powOp = self.match(PyNestMLParser.STAR_STAR) - self.state = 121 + self.state = 119 localctx.exponent = self.unitTypeExponent() pass - self.state = 126 + self.state = 124 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) @@ -740,20 +730,20 @@ def unitTypeExponent(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 128 + self.state = 126 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==51 or _la==75: - self.state = 127 + if _la==49 or _la==73: + self.state = 125 _la = self._input.LA(1) - if not(_la==51 or _la==75): + if not(_la==49 or _la==73): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 130 + self.state = 128 self.match(PyNestMLParser.UNSIGNED_INTEGER) except RecognitionException as re: localctx.exception = re @@ -866,38 +856,38 @@ def expression(self, _p:int=0): self.enterRecursionRule(localctx, 6, self.RULE_expression, _p) try: self.enterOuterAlt(localctx, 1) - self.state = 143 + self.state = 141 self._errHandler.sync(self) token = self._input.LA(1) - if token in [49]: - self.state = 133 + if token in [47]: + self.state = 131 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) - self.state = 134 + self.state = 132 localctx.term = self.expression(0) - self.state = 135 + self.state = 133 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass - elif token in [51, 52, 75]: - self.state = 137 + elif token in [49, 50, 73]: + self.state = 135 self.unaryOperator() - self.state = 138 + self.state = 136 localctx.term = self.expression(9) pass elif token in [28]: - self.state = 140 + self.state = 138 localctx.logicalNot = self.match(PyNestMLParser.NOT_KEYWORD) - self.state = 141 + self.state = 139 localctx.term = self.expression(4) pass - elif token in [25, 86, 87, 88, 89, 90]: - self.state = 142 + elif token in [25, 84, 85, 86, 87, 88]: + self.state = 140 self.simpleExpression() pass else: raise NoViableAltException(self) self._ctx.stop = self._input.LT(-1) - self.state = 181 + self.state = 179 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,10,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -905,20 +895,20 @@ def expression(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 179 + self.state = 177 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,9,self._ctx) if la_ == 1: localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 145 + self.state = 143 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 146 + self.state = 144 localctx.powOp = self.match(PyNestMLParser.STAR_STAR) - self.state = 147 + self.state = 145 localctx.right = self.expression(10) pass @@ -926,29 +916,29 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 148 + self.state = 146 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 152 + self.state = 150 self._errHandler.sync(self) token = self._input.LA(1) - if token in [77]: - self.state = 149 + if token in [75]: + self.state = 147 localctx.timesOp = self.match(PyNestMLParser.STAR) pass - elif token in [79]: - self.state = 150 + elif token in [77]: + self.state = 148 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass - elif token in [80]: - self.state = 151 + elif token in [78]: + self.state = 149 localctx.moduloOp = self.match(PyNestMLParser.PERCENT) pass else: raise NoViableAltException(self) - self.state = 154 + self.state = 152 localctx.right = self.expression(9) pass @@ -956,25 +946,25 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 155 + self.state = 153 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 158 + self.state = 156 self._errHandler.sync(self) token = self._input.LA(1) - if token in [51]: - self.state = 156 + if token in [49]: + self.state = 154 localctx.plusOp = self.match(PyNestMLParser.PLUS) pass - elif token in [75]: - self.state = 157 + elif token in [73]: + self.state = 155 localctx.minusOp = self.match(PyNestMLParser.MINUS) pass else: raise NoViableAltException(self) - self.state = 160 + self.state = 158 localctx.right = self.expression(8) pass @@ -982,13 +972,13 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 161 + self.state = 159 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 162 + self.state = 160 self.bitOperator() - self.state = 163 + self.state = 161 localctx.right = self.expression(7) pass @@ -996,13 +986,13 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 165 + self.state = 163 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 166 + self.state = 164 self.comparisonOperator() - self.state = 167 + self.state = 165 localctx.right = self.expression(6) pass @@ -1010,13 +1000,13 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 169 + self.state = 167 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 170 + self.state = 168 self.logicalOperator() - self.state = 171 + self.state = 169 localctx.right = self.expression(4) pass @@ -1024,22 +1014,22 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.condition = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 173 + self.state = 171 if not self.precpred(self._ctx, 2): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") - self.state = 174 + self.state = 172 self.match(PyNestMLParser.QUESTION) - self.state = 175 + self.state = 173 localctx.ifTrue = self.expression(0) - self.state = 176 + self.state = 174 self.match(PyNestMLParser.COLON) - self.state = 177 + self.state = 175 localctx.ifNot = self.expression(3) pass - self.state = 183 + self.state = 181 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,10,self._ctx) @@ -1102,35 +1092,35 @@ def simpleExpression(self): self.enterRule(localctx, 8, self.RULE_simpleExpression) self._la = 0 # Token type try: - self.state = 193 + self.state = 191 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,12,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 184 + self.state = 182 self.functionCall() pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 185 + self.state = 183 self.match(PyNestMLParser.BOOLEAN_LITERAL) pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 186 + self.state = 184 _la = self._input.LA(1) - if not(_la==89 or _la==90): + if not(_la==87 or _la==88): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 188 + self.state = 186 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,11,self._ctx) if la_ == 1: - self.state = 187 + self.state = 185 self.variable() @@ -1138,19 +1128,19 @@ def simpleExpression(self): elif la_ == 4: self.enterOuterAlt(localctx, 4) - self.state = 190 + self.state = 188 localctx.string = self.match(PyNestMLParser.STRING_LITERAL) pass elif la_ == 5: self.enterOuterAlt(localctx, 5) - self.state = 191 + self.state = 189 localctx.isInf = self.match(PyNestMLParser.INF_KEYWORD) pass elif la_ == 6: self.enterOuterAlt(localctx, 6) - self.state = 192 + self.state = 190 self.variable() pass @@ -1201,19 +1191,19 @@ def unaryOperator(self): self.enterRule(localctx, 10, self.RULE_unaryOperator) try: self.enterOuterAlt(localctx, 1) - self.state = 198 + self.state = 196 self._errHandler.sync(self) token = self._input.LA(1) - if token in [51]: - self.state = 195 + if token in [49]: + self.state = 193 localctx.unaryPlus = self.match(PyNestMLParser.PLUS) pass - elif token in [75]: - self.state = 196 + elif token in [73]: + self.state = 194 localctx.unaryMinus = self.match(PyNestMLParser.MINUS) pass - elif token in [52]: - self.state = 197 + elif token in [50]: + self.state = 195 localctx.unaryTilde = self.match(PyNestMLParser.TILDE) pass else: @@ -1273,27 +1263,27 @@ def bitOperator(self): self.enterRule(localctx, 12, self.RULE_bitOperator) try: self.enterOuterAlt(localctx, 1) - self.state = 205 + self.state = 203 self._errHandler.sync(self) token = self._input.LA(1) - if token in [55]: - self.state = 200 + if token in [53]: + self.state = 198 localctx.bitAnd = self.match(PyNestMLParser.AMPERSAND) pass - elif token in [54]: - self.state = 201 + elif token in [52]: + self.state = 199 localctx.bitXor = self.match(PyNestMLParser.CARET) pass - elif token in [53]: - self.state = 202 + elif token in [51]: + self.state = 200 localctx.bitOr = self.match(PyNestMLParser.PIPE) pass - elif token in [61]: - self.state = 203 + elif token in [59]: + self.state = 201 localctx.bitShiftLeft = self.match(PyNestMLParser.LEFT_LEFT_ANGLE) pass - elif token in [62]: - self.state = 204 + elif token in [60]: + self.state = 202 localctx.bitShiftRight = self.match(PyNestMLParser.RIGHT_RIGHT_ANGLE) pass else: @@ -1361,35 +1351,35 @@ def comparisonOperator(self): self.enterRule(localctx, 14, self.RULE_comparisonOperator) try: self.enterOuterAlt(localctx, 1) - self.state = 214 + self.state = 212 self._errHandler.sync(self) token = self._input.LA(1) - if token in [63]: - self.state = 207 + if token in [61]: + self.state = 205 localctx.lt = self.match(PyNestMLParser.LEFT_ANGLE) pass - elif token in [65]: - self.state = 208 + elif token in [63]: + self.state = 206 localctx.le = self.match(PyNestMLParser.LEFT_ANGLE_EQUALS) pass - elif token in [70]: - self.state = 209 + elif token in [68]: + self.state = 207 localctx.eq = self.match(PyNestMLParser.EQUALS_EQUALS) pass - elif token in [71]: - self.state = 210 + elif token in [69]: + self.state = 208 localctx.ne = self.match(PyNestMLParser.EXCLAMATION_EQUALS) pass - elif token in [72]: - self.state = 211 + elif token in [70]: + self.state = 209 localctx.ne2 = self.match(PyNestMLParser.LEFT_ANGLE_RIGHT_ANGLE) pass - elif token in [73]: - self.state = 212 + elif token in [71]: + self.state = 210 localctx.ge = self.match(PyNestMLParser.RIGHT_ANGLE_EQUALS) pass - elif token in [64]: - self.state = 213 + elif token in [62]: + self.state = 211 localctx.gt = self.match(PyNestMLParser.RIGHT_ANGLE) pass else: @@ -1437,15 +1427,15 @@ def logicalOperator(self): self.enterRule(localctx, 16, self.RULE_logicalOperator) try: self.enterOuterAlt(localctx, 1) - self.state = 218 + self.state = 216 self._errHandler.sync(self) token = self._input.LA(1) if token in [26]: - self.state = 216 + self.state = 214 localctx.logicalAnd = self.match(PyNestMLParser.AND_KEYWORD) pass elif token in [27]: - self.state = 217 + self.state = 215 localctx.logicalOr = self.match(PyNestMLParser.OR_KEYWORD) pass else: @@ -1506,28 +1496,28 @@ def variable(self): self.enterRule(localctx, 18, self.RULE_variable) try: self.enterOuterAlt(localctx, 1) - self.state = 220 + self.state = 218 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 225 + self.state = 223 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,17,self._ctx) if la_ == 1: - self.state = 221 + self.state = 219 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 222 + self.state = 220 localctx.vectorParameter = self.expression(0) - self.state = 223 + self.state = 221 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 230 + self.state = 228 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,18,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 227 + self.state = 225 self.match(PyNestMLParser.DIFFERENTIAL_ORDER) - self.state = 232 + self.state = 230 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,18,self._ctx) @@ -1589,31 +1579,31 @@ def functionCall(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 233 + self.state = 231 localctx.calleeName = self.match(PyNestMLParser.NAME) - self.state = 234 + self.state = 232 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 243 + self.state = 241 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 63489) != 0): - self.state = 235 + if ((((_la - 25)) & ~0x3f) == 0 and ((1 << (_la - 25)) & -576179277272186871) != 0): + self.state = 233 self.expression(0) - self.state = 240 + self.state = 238 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: - self.state = 236 + while _la==72: + self.state = 234 self.match(PyNestMLParser.COMMA) - self.state = 237 + self.state = 235 self.expression(0) - self.state = 242 + self.state = 240 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 245 + self.state = 243 self.match(PyNestMLParser.RIGHT_PAREN) except RecognitionException as re: localctx.exception = re @@ -1686,43 +1676,43 @@ def inlineExpression(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 248 + self.state = 246 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 247 + self.state = 245 localctx.recordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 250 + self.state = 248 self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 251 + self.state = 249 localctx.variableName = self.match(PyNestMLParser.NAME) - self.state = 252 + self.state = 250 self.dataType() - self.state = 253 + self.state = 251 self.match(PyNestMLParser.EQUALS) - self.state = 254 + self.state = 252 self.expression(0) - self.state = 256 + self.state = 254 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==84: - self.state = 255 + if _la==82: + self.state = 253 self.match(PyNestMLParser.SEMICOLON) - self.state = 261 + self.state = 259 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 258 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): + self.state = 256 localctx.decorator = self.anyDecorator() - self.state = 263 + self.state = 261 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 264 + self.state = 262 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1786,31 +1776,31 @@ def odeEquation(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 266 + self.state = 264 localctx.lhs = self.variable() - self.state = 267 + self.state = 265 self.match(PyNestMLParser.EQUALS) - self.state = 268 + self.state = 266 localctx.rhs = self.expression(0) - self.state = 270 + self.state = 268 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==84: - self.state = 269 + if _la==82: + self.state = 267 self.match(PyNestMLParser.SEMICOLON) - self.state = 275 + self.state = 273 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 272 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): + self.state = 270 localctx.decorator = self.anyDecorator() - self.state = 277 + self.state = 275 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 278 + self.state = 276 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1882,39 +1872,39 @@ def kernel(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 280 + self.state = 278 self.match(PyNestMLParser.KERNEL_KEYWORD) - self.state = 281 + self.state = 279 self.variable() - self.state = 282 + self.state = 280 self.match(PyNestMLParser.EQUALS) - self.state = 283 + self.state = 281 self.expression(0) - self.state = 291 + self.state = 289 self._errHandler.sync(self) _la = self._input.LA(1) while _la==4: - self.state = 284 + self.state = 282 self.match(PyNestMLParser.KERNEL_JOINING) - self.state = 285 + self.state = 283 self.variable() - self.state = 286 + self.state = 284 self.match(PyNestMLParser.EQUALS) - self.state = 287 + self.state = 285 self.expression(0) - self.state = 293 + self.state = 291 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 295 + self.state = 293 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==84: - self.state = 294 + if _la==82: + self.state = 292 self.match(PyNestMLParser.SEMICOLON) - self.state = 297 + self.state = 295 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1967,23 +1957,23 @@ def block(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 299 + self.state = 297 self.match(PyNestMLParser.NEWLINE) - self.state = 300 + self.state = 298 self.match(PyNestMLParser.INDENT) - self.state = 302 + self.state = 300 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 301 + self.state = 299 self.stmt() - self.state = 304 + self.state = 302 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 543621120) != 0) or _la==88): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 543621120) != 0) or _la==86): break - self.state = 306 + self.state = 304 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -2026,17 +2016,17 @@ def stmt(self): localctx = PyNestMLParser.StmtContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_stmt) try: - self.state = 310 + self.state = 308 self._errHandler.sync(self) token = self._input.LA(1) - if token in [16, 17, 29, 88]: + if token in [16, 17, 29, 86]: self.enterOuterAlt(localctx, 1) - self.state = 308 + self.state = 306 self.smallStmt() pass elif token in [18, 21, 22]: self.enterOuterAlt(localctx, 2) - self.state = 309 + self.state = 307 self.compoundStmt() pass else: @@ -2087,22 +2077,22 @@ def compoundStmt(self): localctx = PyNestMLParser.CompoundStmtContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_compoundStmt) try: - self.state = 315 + self.state = 313 self._errHandler.sync(self) token = self._input.LA(1) if token in [18]: self.enterOuterAlt(localctx, 1) - self.state = 312 + self.state = 310 self.ifStmt() pass elif token in [21]: self.enterOuterAlt(localctx, 2) - self.state = 313 + self.state = 311 self.forStmt() pass elif token in [22]: self.enterOuterAlt(localctx, 3) - self.state = 314 + self.state = 312 self.whileStmt() pass else: @@ -2161,31 +2151,31 @@ def smallStmt(self): self.enterRule(localctx, 34, self.RULE_smallStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 321 + self.state = 319 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,31,self._ctx) if la_ == 1: - self.state = 317 + self.state = 315 self.assignment() pass elif la_ == 2: - self.state = 318 + self.state = 316 self.functionCall() pass elif la_ == 3: - self.state = 319 + self.state = 317 self.declaration() pass elif la_ == 4: - self.state = 320 + self.state = 318 self.returnStmt() pass - self.state = 323 + self.state = 321 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2250,35 +2240,35 @@ def assignment(self): self.enterRule(localctx, 36, self.RULE_assignment) try: self.enterOuterAlt(localctx, 1) - self.state = 325 + self.state = 323 localctx.lhs_variable = self.variable() - self.state = 331 + self.state = 329 self._errHandler.sync(self) token = self._input.LA(1) - if token in [76]: - self.state = 326 + if token in [74]: + self.state = 324 localctx.directAssignment = self.match(PyNestMLParser.EQUALS) pass - elif token in [66]: - self.state = 327 + elif token in [64]: + self.state = 325 localctx.compoundSum = self.match(PyNestMLParser.PLUS_EQUALS) pass - elif token in [67]: - self.state = 328 + elif token in [65]: + self.state = 326 localctx.compoundMinus = self.match(PyNestMLParser.MINUS_EQUALS) pass - elif token in [68]: - self.state = 329 + elif token in [66]: + self.state = 327 localctx.compoundProduct = self.match(PyNestMLParser.STAR_EQUALS) pass - elif token in [69]: - self.state = 330 + elif token in [67]: + self.state = 328 localctx.compoundQuotient = self.match(PyNestMLParser.FORWARD_SLASH_EQUALS) pass else: raise NoViableAltException(self) - self.state = 333 + self.state = 331 self.expression(0) except RecognitionException as re: localctx.exception = re @@ -2366,67 +2356,67 @@ def declaration(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 336 + self.state = 334 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 335 + self.state = 333 localctx.isRecordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 339 + self.state = 337 self._errHandler.sync(self) _la = self._input.LA(1) if _la==16: - self.state = 338 + self.state = 336 localctx.isInlineExpression = self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 341 + self.state = 339 self.variable() - self.state = 346 + self.state = 344 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: - self.state = 342 + while _la==72: + self.state = 340 self.match(PyNestMLParser.COMMA) - self.state = 343 + self.state = 341 self.variable() - self.state = 348 + self.state = 346 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 349 + self.state = 347 self.dataType() - self.state = 352 + self.state = 350 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==76: - self.state = 350 + if _la==74: + self.state = 348 self.match(PyNestMLParser.EQUALS) - self.state = 351 + self.state = 349 localctx.rhs = self.expression(0) - self.state = 358 + self.state = 356 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==59: - self.state = 354 + if _la==57: + self.state = 352 self.match(PyNestMLParser.LEFT_LEFT_SQUARE) - self.state = 355 + self.state = 353 localctx.invariant = self.expression(0) - self.state = 356 + self.state = 354 self.match(PyNestMLParser.RIGHT_RIGHT_SQUARE) - self.state = 363 + self.state = 361 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & 246290604621824) != 0): - self.state = 360 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): + self.state = 358 localctx.decorator = self.anyDecorator() - self.state = 365 + self.state = 363 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2471,9 +2461,9 @@ def declaration_newline(self): self.enterRule(localctx, 40, self.RULE_declaration_newline) try: self.enterOuterAlt(localctx, 1) - self.state = 366 + self.state = 364 self.declaration() - self.state = 367 + self.state = 365 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2528,28 +2518,28 @@ def anyDecorator(self): localctx = PyNestMLParser.AnyDecoratorContext(self, self._ctx, self.state) self.enterRule(localctx, 42, self.RULE_anyDecorator) try: - self.state = 376 + self.state = 374 self._errHandler.sync(self) token = self._input.LA(1) - if token in [45]: + if token in [43]: self.enterOuterAlt(localctx, 1) - self.state = 369 + self.state = 367 self.match(PyNestMLParser.DECORATOR_HOMOGENEOUS) pass - elif token in [46]: + elif token in [44]: self.enterOuterAlt(localctx, 2) - self.state = 370 + self.state = 368 self.match(PyNestMLParser.DECORATOR_HETEROGENEOUS) pass - elif token in [47]: + elif token in [45]: self.enterOuterAlt(localctx, 3) - self.state = 371 + self.state = 369 self.match(PyNestMLParser.AT) - self.state = 372 + self.state = 370 self.namespaceDecoratorNamespace() - self.state = 373 + self.state = 371 self.match(PyNestMLParser.DOUBLE_COLON) - self.state = 374 + self.state = 372 self.namespaceDecoratorName() pass else: @@ -2593,7 +2583,7 @@ def namespaceDecoratorNamespace(self): self.enterRule(localctx, 44, self.RULE_namespaceDecoratorNamespace) try: self.enterOuterAlt(localctx, 1) - self.state = 378 + self.state = 376 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2633,7 +2623,7 @@ def namespaceDecoratorName(self): self.enterRule(localctx, 46, self.RULE_namespaceDecoratorName) try: self.enterOuterAlt(localctx, 1) - self.state = 380 + self.state = 378 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2677,13 +2667,13 @@ def returnStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 382 + self.state = 380 self.match(PyNestMLParser.RETURN_KEYWORD) - self.state = 384 + self.state = 382 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 63489) != 0): - self.state = 383 + if ((((_la - 25)) & ~0x3f) == 0 and ((1 << (_la - 25)) & -576179277272186871) != 0): + self.state = 381 self.expression(0) @@ -2737,23 +2727,23 @@ def ifStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 386 + self.state = 384 self.ifClause() - self.state = 390 + self.state = 388 self._errHandler.sync(self) _la = self._input.LA(1) while _la==19: - self.state = 387 + self.state = 385 self.elifClause() - self.state = 392 + self.state = 390 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 394 + self.state = 392 self._errHandler.sync(self) _la = self._input.LA(1) if _la==20: - self.state = 393 + self.state = 391 self.elseClause() @@ -2805,13 +2795,13 @@ def ifClause(self): self.enterRule(localctx, 52, self.RULE_ifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 396 + self.state = 394 self.match(PyNestMLParser.IF_KEYWORD) - self.state = 397 + self.state = 395 self.expression(0) - self.state = 398 + self.state = 396 self.match(PyNestMLParser.COLON) - self.state = 399 + self.state = 397 self.block() except RecognitionException as re: localctx.exception = re @@ -2861,13 +2851,13 @@ def elifClause(self): self.enterRule(localctx, 54, self.RULE_elifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 401 + self.state = 399 self.match(PyNestMLParser.ELIF_KEYWORD) - self.state = 402 + self.state = 400 self.expression(0) - self.state = 403 + self.state = 401 self.match(PyNestMLParser.COLON) - self.state = 404 + self.state = 402 self.block() except RecognitionException as re: localctx.exception = re @@ -2913,11 +2903,11 @@ def elseClause(self): self.enterRule(localctx, 56, self.RULE_elseClause) try: self.enterOuterAlt(localctx, 1) - self.state = 406 + self.state = 404 self.match(PyNestMLParser.ELSE_KEYWORD) - self.state = 407 + self.state = 405 self.match(PyNestMLParser.COLON) - self.state = 408 + self.state = 406 self.block() except RecognitionException as re: localctx.exception = re @@ -2996,39 +2986,39 @@ def forStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 410 + self.state = 408 self.match(PyNestMLParser.FOR_KEYWORD) - self.state = 411 + self.state = 409 localctx.var = self.match(PyNestMLParser.NAME) - self.state = 412 + self.state = 410 self.match(PyNestMLParser.IN_KEYWORD) - self.state = 413 + self.state = 411 localctx.start_from = self.expression(0) - self.state = 414 + self.state = 412 self.match(PyNestMLParser.ELLIPSIS) - self.state = 415 + self.state = 413 localctx.end_at = self.expression(0) - self.state = 416 + self.state = 414 self.match(PyNestMLParser.STEP_KEYWORD) - self.state = 418 + self.state = 416 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==75: - self.state = 417 + if _la==73: + self.state = 415 localctx.negative = self.match(PyNestMLParser.MINUS) - self.state = 420 + self.state = 418 _la = self._input.LA(1) - if not(_la==89 or _la==90): + if not(_la==87 or _la==88): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 421 + self.state = 419 self.match(PyNestMLParser.COLON) - self.state = 422 + self.state = 420 self.block() except RecognitionException as re: localctx.exception = re @@ -3078,13 +3068,13 @@ def whileStmt(self): self.enterRule(localctx, 60, self.RULE_whileStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 424 + self.state = 422 self.match(PyNestMLParser.WHILE_KEYWORD) - self.state = 425 + self.state = 423 self.expression(0) - self.state = 426 + self.state = 424 self.match(PyNestMLParser.COLON) - self.state = 427 + self.state = 425 self.block() except RecognitionException as re: localctx.exception = re @@ -3137,31 +3127,31 @@ def nestMLCompilationUnit(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 431 + self.state = 429 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 431 + self.state = 429 self._errHandler.sync(self) token = self._input.LA(1) if token in [31]: - self.state = 429 + self.state = 427 self.model() pass elif token in [9]: - self.state = 430 + self.state = 428 self.match(PyNestMLParser.NEWLINE) pass else: raise NoViableAltException(self) - self.state = 433 + self.state = 431 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==9 or _la==31): break - self.state = 435 + self.state = 433 self.match(PyNestMLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -3207,11 +3197,11 @@ def model(self): self.enterRule(localctx, 64, self.RULE_model) try: self.enterOuterAlt(localctx, 1) - self.state = 437 + self.state = 435 self.match(PyNestMLParser.MODEL_KEYWORD) - self.state = 438 + self.state = 436 self.match(PyNestMLParser.NAME) - self.state = 439 + self.state = 437 self.modelBody() except RecognitionException as re: localctx.exception = re @@ -3316,61 +3306,61 @@ def modelBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 441 + self.state = 439 self.match(PyNestMLParser.COLON) - self.state = 442 + self.state = 440 self.match(PyNestMLParser.NEWLINE) - self.state = 443 + self.state = 441 self.match(PyNestMLParser.INDENT) - self.state = 452 + self.state = 450 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 452 + self.state = 450 self._errHandler.sync(self) token = self._input.LA(1) if token in [32, 33, 34]: - self.state = 444 + self.state = 442 self.blockWithVariables() pass elif token in [36]: - self.state = 445 + self.state = 443 self.equationsBlock() pass elif token in [37]: - self.state = 446 + self.state = 444 self.inputBlock() pass elif token in [38]: - self.state = 447 + self.state = 445 self.outputBlock() pass elif token in [15]: - self.state = 448 + self.state = 446 self.function() pass elif token in [40]: - self.state = 449 + self.state = 447 self.onReceiveBlock() pass elif token in [41]: - self.state = 450 + self.state = 448 self.onConditionBlock() pass elif token in [35]: - self.state = 451 + self.state = 449 self.updateBlock() pass else: raise NoViableAltException(self) - self.state = 454 + self.state = 452 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 3843995762688) != 0)): break - self.state = 456 + self.state = 454 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3440,29 +3430,29 @@ def onReceiveBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 458 + self.state = 456 self.match(PyNestMLParser.ON_RECEIVE_KEYWORD) - self.state = 459 + self.state = 457 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 460 + self.state = 458 localctx.inputPortName = self.match(PyNestMLParser.NAME) - self.state = 465 + self.state = 463 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: - self.state = 461 + while _la==72: + self.state = 459 self.match(PyNestMLParser.COMMA) - self.state = 462 + self.state = 460 self.constParameter() - self.state = 467 + self.state = 465 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 468 + self.state = 466 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 469 + self.state = 467 self.match(PyNestMLParser.COLON) - self.state = 470 + self.state = 468 self.block() except RecognitionException as re: localctx.exception = re @@ -3533,29 +3523,29 @@ def onConditionBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 472 + self.state = 470 self.match(PyNestMLParser.ON_CONDITION_KEYWORD) - self.state = 473 + self.state = 471 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 474 + self.state = 472 localctx.condition = self.expression(0) - self.state = 479 + self.state = 477 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: - self.state = 475 + while _la==72: + self.state = 473 self.match(PyNestMLParser.COMMA) - self.state = 476 + self.state = 474 self.constParameter() - self.state = 481 + self.state = 479 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 482 + self.state = 480 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 483 + self.state = 481 self.match(PyNestMLParser.COLON) - self.state = 484 + self.state = 482 self.block() except RecognitionException as re: localctx.exception = re @@ -3621,7 +3611,7 @@ def blockWithVariables(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 486 + self.state = 484 localctx.blockType = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 30064771072) != 0)): @@ -3629,25 +3619,25 @@ def blockWithVariables(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 487 + self.state = 485 self.match(PyNestMLParser.COLON) - self.state = 488 + self.state = 486 self.match(PyNestMLParser.NEWLINE) - self.state = 489 + self.state = 487 self.match(PyNestMLParser.INDENT) - self.state = 491 + self.state = 489 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 490 + self.state = 488 self.declaration_newline() - self.state = 493 + self.state = 491 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==16 or _la==29 or _la==88): + if not (_la==16 or _la==29 or _la==86): break - self.state = 495 + self.state = 493 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3693,11 +3683,11 @@ def updateBlock(self): self.enterRule(localctx, 74, self.RULE_updateBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 497 + self.state = 495 self.match(PyNestMLParser.UPDATE_KEYWORD) - self.state = 498 + self.state = 496 self.match(PyNestMLParser.COLON) - self.state = 499 + self.state = 497 self.block() except RecognitionException as re: localctx.exception = re @@ -3770,43 +3760,43 @@ def equationsBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 501 + self.state = 499 self.match(PyNestMLParser.EQUATIONS_KEYWORD) - self.state = 502 + self.state = 500 self.match(PyNestMLParser.COLON) - self.state = 503 + self.state = 501 self.match(PyNestMLParser.NEWLINE) - self.state = 504 + self.state = 502 self.match(PyNestMLParser.INDENT) - self.state = 508 + self.state = 506 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 508 + self.state = 506 self._errHandler.sync(self) token = self._input.LA(1) if token in [16, 29]: - self.state = 505 + self.state = 503 self.inlineExpression() pass - elif token in [88]: - self.state = 506 + elif token in [86]: + self.state = 504 self.odeEquation() pass elif token in [30]: - self.state = 507 + self.state = 505 self.kernel() pass else: raise NoViableAltException(self) - self.state = 510 + self.state = 508 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==88): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==86): break - self.state = 512 + self.state = 510 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3872,39 +3862,39 @@ def inputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 514 + self.state = 512 self.match(PyNestMLParser.INPUT_KEYWORD) - self.state = 515 + self.state = 513 self.match(PyNestMLParser.COLON) - self.state = 516 + self.state = 514 self.match(PyNestMLParser.NEWLINE) - self.state = 517 + self.state = 515 self.match(PyNestMLParser.INDENT) - self.state = 520 + self.state = 518 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 520 + self.state = 518 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,53,self._ctx) if la_ == 1: - self.state = 518 + self.state = 516 self.spikeInputPort() pass elif la_ == 2: - self.state = 519 + self.state = 517 self.continuousInputPort() pass - self.state = 522 + self.state = 520 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==88): + if not (_la==86): break - self.state = 524 + self.state = 522 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3942,13 +3932,6 @@ def LEFT_SQUARE_BRACKET(self): def RIGHT_SQUARE_BRACKET(self): return self.getToken(PyNestMLParser.RIGHT_SQUARE_BRACKET, 0) - def inputQualifier(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PyNestMLParser.InputQualifierContext) - else: - return self.getTypedRuleContext(PyNestMLParser.InputQualifierContext,i) - - def expression(self): return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) @@ -3972,35 +3955,25 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 526 + self.state = 524 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 531 + self.state = 529 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==56: - self.state = 527 + if _la==54: + self.state = 525 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 528 + self.state = 526 localctx.sizeParameter = self.expression(0) - self.state = 529 + self.state = 527 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 533 + self.state = 531 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 537 - self._errHandler.sync(self) - _la = self._input.LA(1) - while _la==43 or _la==44: - self.state = 534 - self.inputQualifier() - self.state = 539 - self._errHandler.sync(self) - _la = self._input.LA(1) - - self.state = 540 + self.state = 532 self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 541 + self.state = 533 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4065,27 +4038,27 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 543 + self.state = 535 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 548 + self.state = 540 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==56: - self.state = 544 + if _la==54: + self.state = 536 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 545 + self.state = 537 localctx.sizeParameter = self.expression(0) - self.state = 546 + self.state = 538 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 550 + self.state = 542 self.dataType() - self.state = 551 + self.state = 543 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 552 + self.state = 544 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 553 + self.state = 545 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4096,62 +4069,6 @@ def continuousInputPort(self): return localctx - class InputQualifierContext(ParserRuleContext): - __slots__ = 'parser' - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - self.isInhibitory = None # Token - self.isExcitatory = None # Token - - def INHIBITORY_KEYWORD(self): - return self.getToken(PyNestMLParser.INHIBITORY_KEYWORD, 0) - - def EXCITATORY_KEYWORD(self): - return self.getToken(PyNestMLParser.EXCITATORY_KEYWORD, 0) - - def getRuleIndex(self): - return PyNestMLParser.RULE_inputQualifier - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitInputQualifier" ): - return visitor.visitInputQualifier(self) - else: - return visitor.visitChildren(self) - - - - - def inputQualifier(self): - - localctx = PyNestMLParser.InputQualifierContext(self, self._ctx, self.state) - self.enterRule(localctx, 84, self.RULE_inputQualifier) - try: - self.enterOuterAlt(localctx, 1) - self.state = 557 - self._errHandler.sync(self) - token = self._input.LA(1) - if token in [43]: - self.state = 555 - localctx.isInhibitory = self.match(PyNestMLParser.INHIBITORY_KEYWORD) - pass - elif token in [44]: - self.state = 556 - localctx.isExcitatory = self.match(PyNestMLParser.EXCITATORY_KEYWORD) - pass - else: - raise NoViableAltException(self) - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - class OutputBlockContext(ParserRuleContext): __slots__ = 'parser' @@ -4200,34 +4117,34 @@ def accept(self, visitor:ParseTreeVisitor): def outputBlock(self): localctx = PyNestMLParser.OutputBlockContext(self, self._ctx, self.state) - self.enterRule(localctx, 86, self.RULE_outputBlock) + self.enterRule(localctx, 84, self.RULE_outputBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 559 + self.state = 547 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 560 + self.state = 548 self.match(PyNestMLParser.COLON) - self.state = 561 + self.state = 549 self.match(PyNestMLParser.NEWLINE) - self.state = 562 + self.state = 550 self.match(PyNestMLParser.INDENT) - self.state = 565 + self.state = 553 self._errHandler.sync(self) token = self._input.LA(1) if token in [42]: - self.state = 563 + self.state = 551 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) pass elif token in [39]: - self.state = 564 + self.state = 552 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 567 + self.state = 555 self.match(PyNestMLParser.NEWLINE) - self.state = 568 + self.state = 556 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4297,49 +4214,49 @@ def accept(self, visitor:ParseTreeVisitor): def function(self): localctx = PyNestMLParser.FunctionContext(self, self._ctx, self.state) - self.enterRule(localctx, 88, self.RULE_function) + self.enterRule(localctx, 86, self.RULE_function) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 570 + self.state = 558 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 571 + self.state = 559 self.match(PyNestMLParser.NAME) - self.state = 572 + self.state = 560 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 581 + self.state = 569 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==88: - self.state = 573 + if _la==86: + self.state = 561 self.parameter() - self.state = 578 + self.state = 566 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==74: - self.state = 574 + while _la==72: + self.state = 562 self.match(PyNestMLParser.COMMA) - self.state = 575 + self.state = 563 self.parameter() - self.state = 580 + self.state = 568 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 583 + self.state = 571 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 585 + self.state = 573 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 562949953453056) != 0) or _la==88 or _la==89: - self.state = 584 + if (((_la) & ~0x3f) == 0 and ((1 << _la) & 140737488387072) != 0) or _la==86 or _la==87: + self.state = 572 localctx.returnType = self.dataType() - self.state = 587 + self.state = 575 self.match(PyNestMLParser.COLON) - self.state = 588 + self.state = 576 self.block() except RecognitionException as re: localctx.exception = re @@ -4379,12 +4296,12 @@ def accept(self, visitor:ParseTreeVisitor): def parameter(self): localctx = PyNestMLParser.ParameterContext(self, self._ctx, self.state) - self.enterRule(localctx, 90, self.RULE_parameter) + self.enterRule(localctx, 88, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 590 + self.state = 578 self.match(PyNestMLParser.NAME) - self.state = 591 + self.state = 579 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4440,18 +4357,18 @@ def accept(self, visitor:ParseTreeVisitor): def constParameter(self): localctx = PyNestMLParser.ConstParameterContext(self, self._ctx, self.state) - self.enterRule(localctx, 92, self.RULE_constParameter) + self.enterRule(localctx, 90, self.RULE_constParameter) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 593 + self.state = 581 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 594 + self.state = 582 self.match(PyNestMLParser.EQUALS) - self.state = 595 + self.state = 583 localctx.value = self._input.LT(1) _la = self._input.LA(1) - if not(_la==25 or ((((_la - 86)) & ~0x3f) == 0 and ((1 << (_la - 86)) & 27) != 0)): + if not(((((_la - 25)) & ~0x3f) == 0 and ((1 << (_la - 25)) & -2882303761517117439) != 0)): localctx.value = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) diff --git a/pynestml/generated/PyNestMLParserVisitor.py b/pynestml/generated/PyNestMLParserVisitor.py index dc4499256..fbce7d03a 100644 --- a/pynestml/generated/PyNestMLParserVisitor.py +++ b/pynestml/generated/PyNestMLParserVisitor.py @@ -219,11 +219,6 @@ def visitContinuousInputPort(self, ctx:PyNestMLParser.ContinuousInputPortContext return self.visitChildren(ctx) - # Visit a parse tree produced by PyNestMLParser#inputQualifier. - def visitInputQualifier(self, ctx:PyNestMLParser.InputQualifierContext): - return self.visitChildren(ctx) - - # Visit a parse tree produced by PyNestMLParser#outputBlock. def visitOutputBlock(self, ctx:PyNestMLParser.OutputBlockContext): return self.visitChildren(ctx) diff --git a/pynestml/grammars/PyNestMLLexer.g4 b/pynestml/grammars/PyNestMLLexer.g4 index c7255e807..376469003 100644 --- a/pynestml/grammars/PyNestMLLexer.g4 +++ b/pynestml/grammars/PyNestMLLexer.g4 @@ -93,8 +93,6 @@ lexer grammar PyNestMLLexer; ON_RECEIVE_KEYWORD : 'onReceive'; ON_CONDITION_KEYWORD : 'onCondition'; SPIKE_KEYWORD : 'spike'; - INHIBITORY_KEYWORD : 'inhibitory'; - EXCITATORY_KEYWORD : 'excitatory'; DECORATOR_HOMOGENEOUS : '@homogeneous'; DECORATOR_HETEROGENEOUS : '@heterogeneous'; diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index 5d2af2d50..3d4c4f459 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -279,7 +279,7 @@ parser grammar PyNestMLParser; /** ASTInputBlock represents a single input block, e.g.: input: - spike_in <- excitatory spike + spike_in <- spike current_in pA <- continuous @attribute inputPort: A list of input ports. */ @@ -287,20 +287,18 @@ parser grammar PyNestMLParser; NEWLINE INDENT (spikeInputPort | continuousInputPort)+ DEDENT; /** ASTInputPort represents a single input port, e.g.: - spike_in[3] <- excitatory spike + spike_in[3] <- spike I_stim[3] pA <- continuous @attribute name: The name of the input port. @attribute sizeParameter: Optional size parameter for model with multiple input ports. @attribute datatype: Optional data type of the port. - @attribute inputQualifier: The qualifier keyword of the input port, to indicate e.g. inhibitory-only or excitatory-only spiking inputs on this port. @attribute isSpike: Indicates that this input port accepts spikes. @attribute isContinuous: Indicates that this input port accepts continuous-time input. */ spikeInputPort: name=NAME (LEFT_SQUARE_BRACKET sizeParameter=expression RIGHT_SQUARE_BRACKET)? - LEFT_ANGLE_MINUS inputQualifier* - SPIKE_KEYWORD NEWLINE; + LEFT_ANGLE_MINUS SPIKE_KEYWORD NEWLINE; continuousInputPort: name = NAME @@ -309,12 +307,6 @@ parser grammar PyNestMLParser; LEFT_ANGLE_MINUS CONTINUOUS_KEYWORD NEWLINE; - /** ASTInputQualifier represents the qualifier of an inputPort. Only valid for spiking inputs. - @attribute isInhibitory: Indicates that this spiking input port is inhibitory. - @attribute isExcitatory: Indicates that this spiking input port is excitatory. - */ - inputQualifier : (isInhibitory=INHIBITORY_KEYWORD | isExcitatory=EXCITATORY_KEYWORD); - /** ASTOutputBlock Represents the output block of the neuron, i.e., declarations of output ports: output: spike diff --git a/pynestml/meta_model/__init__.py b/pynestml/meta_model/__init__.py index 4af860d75..08a312ae3 100644 --- a/pynestml/meta_model/__init__.py +++ b/pynestml/meta_model/__init__.py @@ -41,7 +41,6 @@ 'ast_inline_expression', 'ast_input_block', 'ast_input_port', - 'ast_input_qualifier', 'ast_kernel', 'ast_logical_operator', 'ast_model', diff --git a/pynestml/meta_model/ast_input_block.py b/pynestml/meta_model/ast_input_block.py index d74dd8c36..c50d98741 100644 --- a/pynestml/meta_model/ast_input_block.py +++ b/pynestml/meta_model/ast_input_block.py @@ -32,7 +32,7 @@ class ASTInputBlock(ASTNode): .. code-block:: nestml input: - spike_in <- excitatory spike + spike_in <- spike current_in pA <- continuous Attributes: diff --git a/pynestml/meta_model/ast_input_port.py b/pynestml/meta_model/ast_input_port.py index 45bc87dbb..c5109df24 100644 --- a/pynestml/meta_model/ast_input_port.py +++ b/pynestml/meta_model/ast_input_port.py @@ -25,7 +25,6 @@ from pynestml.meta_model.ast_data_type import ASTDataType from pynestml.meta_model.ast_expression import ASTExpression -from pynestml.meta_model.ast_input_qualifier import ASTInputQualifier from pynestml.meta_model.ast_node import ASTNode from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.utils.port_signal_type import PortSignalType @@ -38,23 +37,13 @@ class ASTInputPort(ASTNode): .. code-block:: nestml - spike_in pA <- excitatory spike + spike_in pA <- spike @attribute name: The name of the input port. @attribute sizeParameter: Optional size parameter for multisynapse neuron. @attribute datatype: Optional data type of the port. - @attribute inputQualifier: The qualifier keyword of the input port, to indicate e.g. inhibitory-only or excitatory-only spiking inputs on this port. @attribute isSpike: Indicates that this input port accepts spikes. @attribute isContinuous: Indicates that this input port accepts continuous time input. - - Grammar: - inputPort: - name=NAME - (LEFT_SQUARE_BRACKET sizeParameter=NAME RIGHT_SQUARE_BRACKET)? - (dataType)? - LEFT_ANGLE_MINUS inputQualifier* - (isContinuous = CONTINUOUS_KEYWORD | isSpike = SPIKE_KEYWORD); - """ def __init__(self, @@ -62,7 +51,6 @@ def __init__(self, signal_type: PortSignalType, size_parameter: Optional[Union[ASTSimpleExpression, ASTExpression]] = None, data_type: Optional[ASTDataType] = None, - input_qualifiers: Optional[List[ASTInputQualifier]] = None, *args, **kwargs): r""" Standard constructor. @@ -72,17 +60,13 @@ def __init__(self, :param name: the name of the port :param size_parameter: a parameter indicating the index in an array. :param data_type: the data type of this input port - :param input_qualifiers: a list of input qualifiers for this port. :param signal_type: type of signal received, i.e., spikes or continuous """ super(ASTInputPort, self).__init__(*args, **kwargs) - if input_qualifiers is None: - input_qualifiers = [] self.name = name self.signal_type = signal_type self.size_parameter = size_parameter self.data_type = data_type - self.input_qualifiers = input_qualifiers def clone(self) -> ASTInputPort: r""" @@ -97,7 +81,6 @@ def clone(self) -> ASTInputPort: signal_type=self.signal_type, size_parameter=self.size_parameter, data_type=data_type_dup, - input_qualifiers=[input_qualifier.clone() for input_qualifier in self.input_qualifiers], # ASTNode common attributes: source_position=self.source_position, scope=self.scope, @@ -129,20 +112,6 @@ def get_size_parameter(self) -> Optional[Union[ASTSimpleExpression, ASTExpressio """ return self.size_parameter - def has_input_qualifiers(self) -> bool: - r""" - Returns whether input qualifiers have been defined. - :return: True, if at least one input qualifier has been defined. - """ - return len(self.input_qualifiers) > 0 - - def get_input_qualifiers(self) -> List[ASTInputQualifier]: - r""" - Returns the list of input qualifiers. - :return: a list of input qualifiers. - """ - return self.input_qualifiers - def is_spike(self) -> bool: r""" Returns whether this is a spiking input port or not. @@ -157,32 +126,6 @@ def is_continuous(self) -> bool: """ return self.signal_type is PortSignalType.CONTINUOUS - def is_excitatory(self) -> bool: - r""" - Returns whether this port is excitatory or not. For this, it has to be marked explicitly by the - excitatory keyword or no keywords at all shall occur (implicitly all types). - :return: True if excitatory, False otherwise. - """ - if self.get_input_qualifiers() is not None and len(self.get_input_qualifiers()) == 0: - return True - for in_type in self.get_input_qualifiers(): - if in_type.is_excitatory: - return True - return False - - def is_inhibitory(self) -> bool: - r""" - Returns whether this port is inhibitory or not. For this, it has to be marked explicitly by the - inhibitory keyword or no keywords at all shall occur (implicitly all types). - :return: True if inhibitory, False otherwise. - """ - if self.get_input_qualifiers() is not None and len(self.get_input_qualifiers()) == 0: - return True - for in_type in self.get_input_qualifiers(): - if in_type.is_inhibitory: - return True - return False - def has_datatype(self): r""" Returns whether this port has a defined data type or not. @@ -206,9 +149,6 @@ def get_children(self) -> List[ASTNode]: if self.has_datatype(): children.append(self.get_datatype()) - for qual in self.get_input_qualifiers(): - children.append(qual) - if self.get_size_parameter(): children.append(self.get_size_parameter()) @@ -228,7 +168,7 @@ def equals(self, other: ASTNode) -> bool: return False if (self.has_size_parameter() and other.has_size_parameter() - and self.get_input_qualifiers() != other.get_size_parameter()): + and self.get_size_parameter() != other.get_size_parameter()): return False if self.has_datatype() + other.has_datatype() == 1: @@ -237,13 +177,4 @@ def equals(self, other: ASTNode) -> bool: if self.has_datatype() and other.has_datatype() and not self.get_datatype().equals(other.get_datatype()): return False - if len(self.get_input_qualifiers()) != len(other.get_input_qualifiers()): - return False - - my_input_qualifiers = self.get_input_qualifiers() - your_input_qualifiers = other.get_input_qualifiers() - for i in range(0, len(my_input_qualifiers)): - if not my_input_qualifiers[i].equals(your_input_qualifiers[i]): - return False - return self.is_spike() == other.is_spike() and self.is_continuous() == other.is_continuous() diff --git a/pynestml/meta_model/ast_input_qualifier.py b/pynestml/meta_model/ast_input_qualifier.py deleted file mode 100644 index 6c34c33ec..000000000 --- a/pynestml/meta_model/ast_input_qualifier.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# -# ast_input_qualifier.py -# -# This file is part of NEST. -# -# Copyright (C) 2004 The NEST Initiative -# -# NEST is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# NEST is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with NEST. If not, see . - -from typing import List - -from pynestml.meta_model.ast_node import ASTNode - - -class ASTInputQualifier(ASTNode): - """ - This class is used to store the qualifier of a buffer. - ASTInputQualifier represents the qualifier of the input port. Only valid for spiking inputs. - @attribute inhibitory true Indicates that this spiking input port is inhibitory. - @attribute excitatory true Indicates that this spiking input port is excitatory. - - Grammar: - inputQualifier : ('inhibitory' | 'excitatory'); - - Attributes: - is_inhibitory = False - is_excitatory = False - """ - - def __init__(self, is_inhibitory=False, is_excitatory=False, *args, **kwargs): - """ - Standard constructor. - - Parameters for superclass (ASTNode) can be passed through :python:`*args` and :python:`**kwargs`. - - :param is_inhibitory: is inhibitory buffer. - :type is_inhibitory: bool - :param is_excitatory: is excitatory buffer. - :type is_excitatory: bool - """ - super(ASTInputQualifier, self).__init__(*args, **kwargs) - self.is_excitatory = is_excitatory - self.is_inhibitory = is_inhibitory - - def clone(self): - """ - Return a clone ("deep copy") of this node. - - :return: new AST node instance - :rtype: ASTInputQualifier - """ - dup = ASTInputQualifier(is_excitatory=self.is_excitatory, - is_inhibitory=self.is_inhibitory, - # ASTNode common attributes: - source_position=self.source_position, - scope=self.scope, - comment=self.comment, - pre_comments=[s for s in self.pre_comments], - in_comment=self.in_comment, - implicit_conversion_factor=self.implicit_conversion_factor) - - return dup - - def get_children(self) -> List[ASTNode]: - r""" - Returns the children of this node, if any. - :return: List of children of this node. - """ - return [] - - def equals(self, other: ASTNode) -> bool: - r""" - The equality method. - """ - if not isinstance(other, ASTInputQualifier): - return False - - return self.is_excitatory == other.is_excitatory and self.is_inhibitory == other.is_inhibitory diff --git a/pynestml/meta_model/ast_model.py b/pynestml/meta_model/ast_model.py index c4b7374bf..85e742b73 100644 --- a/pynestml/meta_model/ast_model.py +++ b/pynestml/meta_model/ast_model.py @@ -308,16 +308,15 @@ def get_multiple_receptors(self) -> List[VariableSymbol]: """ ret = list() for port in self.get_spike_input_ports(): - if port.is_excitatory() and port.is_inhibitory(): - if port is not None: - ret.append(port) - else: - code, message = Messages.get_could_not_resolve(port.get_symbol_name()) - Logger.log_message( - message=message, - code=code, - error_position=port.get_source_position(), - log_level=LoggingLevel.ERROR) + if port is not None: + ret.append(port) + else: + code, message = Messages.get_could_not_resolve(port.get_symbol_name()) + Logger.log_message( + message=message, + code=code, + error_position=port.get_source_position(), + log_level=LoggingLevel.ERROR) return ret def get_kernel_by_name(self, kernel_name: str) -> Optional[ASTKernel]: diff --git a/pynestml/meta_model/ast_node_factory.py b/pynestml/meta_model/ast_node_factory.py index da3986be9..ad3cffed9 100644 --- a/pynestml/meta_model/ast_node_factory.py +++ b/pynestml/meta_model/ast_node_factory.py @@ -40,7 +40,6 @@ from pynestml.meta_model.ast_inline_expression import ASTInlineExpression from pynestml.meta_model.ast_input_block import ASTInputBlock from pynestml.meta_model.ast_input_port import ASTInputPort -from pynestml.meta_model.ast_input_qualifier import ASTInputQualifier from pynestml.meta_model.ast_if_clause import ASTIfClause from pynestml.meta_model.ast_if_stmt import ASTIfStmt from pynestml.meta_model.ast_kernel import ASTKernel @@ -253,16 +252,11 @@ def create_ast_input_block(cls, input_definitions, source_position): return ASTInputBlock(input_definitions, source_position=source_position) @classmethod - def create_ast_input_port(cls, name, size_parameter, data_type, input_qualifiers, signal_type, source_position): - # type:(str,str,(None|ASTDataType),list(ASTInputQualifier),PortSignalType,ASTSourceLocation) -> ASTInputPort - return ASTInputPort(name=name, size_parameter=size_parameter, data_type=data_type, input_qualifiers=input_qualifiers, + def create_ast_input_port(cls, name, size_parameter, data_type, signal_type, source_position): + # type:(str,str,(None|ASTDataType),PortSignalType,ASTSourceLocation) -> ASTInputPort + return ASTInputPort(name=name, size_parameter=size_parameter, data_type=data_type, signal_type=signal_type, source_position=source_position) - @classmethod - def create_ast_input_qualifier(cls, is_inhibitory=False, is_excitatory=False, source_position=None): - # type: (bool,bool,ASTSourceLocation) -> ASTInputQualifier - return ASTInputQualifier(is_inhibitory, is_excitatory, source_position=source_position) - @classmethod def create_ast_logical_operator(cls, is_logical_and=False, is_logical_or=False, source_position=None): # type: (bool,bool,ASTSourceLocation) -> ASTLogicalOperator diff --git a/pynestml/symbols/variable_symbol.py b/pynestml/symbols/variable_symbol.py index 66b4d0a6c..3400a42da 100644 --- a/pynestml/symbols/variable_symbol.py +++ b/pynestml/symbols/variable_symbol.py @@ -221,20 +221,6 @@ def is_continuous_input_port(self) -> bool: """ return isinstance(self.get_referenced_object(), ASTInputPort) and self.get_referenced_object().is_continuous() - def is_excitatory(self) -> bool: - """ - Returns whether this symbol represents an input port with qualifier excitatory. - :return: True if is excitatory, otherwise False. - """ - return isinstance(self.get_referenced_object(), ASTInputPort) and self.get_referenced_object().is_excitatory() - - def is_inhibitory(self) -> bool: - """ - Returns whether this symbol represents an input port with qualifier inhibitory. - :return: True if is inhibitory, otherwise False. - """ - return isinstance(self.get_referenced_object(), ASTInputPort) and self.get_referenced_object().is_inhibitory() - def is_state(self) -> bool: """ Returns whether this variable symbol has been declared in a state block. diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index d66318130..2f801febc 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -1474,7 +1474,7 @@ def is_delta_kernel(cls, kernel: ASTKernel) -> bool: return rhs_is_delta_kernel or rhs_is_multiplied_delta_kernel @classmethod - def get_input_port_by_name(cls, input_blocks: List[ASTInputBlock], port_name: str) -> ASTInputPort: + def get_input_port_by_name(cls, input_blocks: List[ASTInputBlock], port_name: str) -> Optional[ASTInputPort]: """ Get the input port given the port name :param input_block: block to be searched @@ -1483,15 +1483,9 @@ def get_input_port_by_name(cls, input_blocks: List[ASTInputBlock], port_name: st """ for input_block in input_blocks: for input_port in input_block.get_input_ports(): - if input_port.has_size_parameter(): - size_parameter = input_port.get_size_parameter() - if isinstance(size_parameter, ASTSimpleExpression): - size_parameter = size_parameter.get_numeric_literal() - port_name, port_index = port_name.split("_") - assert int(port_index) >= 0 - assert int(port_index) <= size_parameter if input_port.name == port_name: return input_port + return None @classmethod @@ -2462,52 +2456,23 @@ def get_unit_name(cls, variable: ASTVariable) -> str: return '' - @classmethod - def _find_port_in_dict(cls, rport_to_port_map: Dict[int, List[VariableSymbol]], port: VariableSymbol) -> int: - """ - Finds the corresponding "inhibitory" port for a given "excitatory" port and vice versa in the handed over map. - :param rport_to_port_map: map containing NESTML port names for the rport - :param port: port to be searched - :return: key value in the map if the port is found, else None - """ - for key, value in rport_to_port_map.items(): - if len(value) == 1: - if (port.is_excitatory() and value[0].is_inhibitory() and not value[0].is_excitatory()) \ - or (port.is_inhibitory() and value[0].is_excitatory() and not value[0].is_inhibitory()): - if port.has_vector_parameter(): - if cls.get_numeric_vector_size(port) == cls.get_numeric_vector_size(value[0]): - return key - else: - return key - return None - @classmethod def get_spike_input_ports_in_pairs(cls, neuron: ASTModel) -> Dict[int, List[VariableSymbol]]: """ - Returns a list of spike input ports in pairs in case of input port qualifiers. + Returns a list of spike input ports in pairs. The result of this function is used to construct a vector that provides a mapping to the NESTML spike buffer index. The vector looks like below: + .. code-block:: - [ {AMPA_SPIKES, GABA_SPIKES}, {NMDA_SPIKES, -1} ] + [ AMPA_SPIKES, GABA_SPIKES, NMDA_SPIKES ] where the vector index is the NEST rport number. The value is a tuple containing the NESTML index(es) to the spike buffer. - In case if the rport is shared between two NESTML buffers, the vector element contains the tuple of the form (excitatory_port_index, inhibitory_port_index). Otherwise, the tuple is of the form (spike_port_index, -1). """ rport_to_port_map = {} rport = 0 + for port in neuron.get_spike_input_ports(): - if port.is_excitatory() and port.is_inhibitory(): - rport_to_port_map[rport] = [port] - rport += cls.get_numeric_vector_size(port) if port.has_vector_parameter() else 1 - else: - key = cls._find_port_in_dict(rport_to_port_map, port) - if key is not None: - # The corresponding spiking input pair is found. - # Add the port to the list and update rport - rport_to_port_map[key].append(port) - rport += cls.get_numeric_vector_size(port) if port.has_vector_parameter() else 1 - else: - # New input port. Retain the same rport number until the corresponding input port pair is found. - rport_to_port_map[rport] = [port] + rport_to_port_map[rport] = [port] + rport += cls.get_numeric_vector_size(port) if port.has_vector_parameter() else 1 return rport_to_port_map diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index bb8914dd6..8abe09c98 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -56,7 +56,6 @@ class MessageCode(Enum): ARG_NOT_SPIKE_INPUT = 20 NUMERATOR_NOT_ONE = 21 ORDER_NOT_DECLARED = 22 - CONTINUOUS_INPUT_PORT_WITH_QUALIFIERS = 23 BLOCK_NOT_CORRECT = 24 VARIABLE_NOT_IN_STATE_BLOCK = 25 WRONG_NUMBER_OF_ARGS = 26 @@ -139,6 +138,7 @@ class MessageCode(Enum): TIMESTEP_FUNCTION_LEGALLY_USED = 113 RANDOM_FUNCTIONS_LEGALLY_USED = 113 EXPONENT_MUST_BE_INTEGER = 114 + SPIKING_INPUT_PORT_NAME_ILLEGALLY_USED = 115 class Messages: @@ -537,23 +537,6 @@ def get_order_not_declared(cls, lhs): message = 'Order of differential equation for %s is not declared!' % lhs return MessageCode.ORDER_NOT_DECLARED, message - @classmethod - def get_continuous_input_port_specified(cls, name, keyword): - """ - Indicates that the continuous time input port has been specified with an `inputQualifier` keyword. - :param name: the name of the buffer - :type name: str - :param keyword: the keyword - :type keyword: list(str) - :return: a message - :rtype: (MessageCode,str) - """ - assert (name is not None and isinstance(name, str)), \ - '(PyNestML.Utils.Message) Not a string provided (%s)!' % name - message = 'Continuous time input port \'%s\' specified with type keywords (%s)!' % ( - name, keyword) - return MessageCode.CONTINUOUS_INPUT_PORT_WITH_QUALIFIERS, message - @classmethod def get_block_not_defined_correctly(cls, block, missing): """ @@ -1386,3 +1369,8 @@ def get_non_constant_exponent(cls) -> Tuple[MessageCode, str]: def get_random_functions_legally_used(cls, name): message = "The function '" + name + "' can only be used in the update, onReceive, or onCondition blocks." return MessageCode.RANDOM_FUNCTIONS_LEGALLY_USED, message + + @classmethod + def get_spike_input_port_appears_outside_equation_rhs_and_event_handler(cls, name): + message = "Spiking input port names (in this case '" + name + "') can only be used in the right-hand side of equations or in the definition of an onReceive block!" + return MessageCode.SPIKING_INPUT_PORT_NAME_ILLEGALLY_USED, message diff --git a/pynestml/utils/model_parser.py b/pynestml/utils/model_parser.py index d11618119..001934102 100644 --- a/pynestml/utils/model_parser.py +++ b/pynestml/utils/model_parser.py @@ -48,7 +48,6 @@ from pynestml.meta_model.ast_inline_expression import ASTInlineExpression from pynestml.meta_model.ast_input_block import ASTInputBlock from pynestml.meta_model.ast_input_port import ASTInputPort -from pynestml.meta_model.ast_input_qualifier import ASTInputQualifier from pynestml.meta_model.ast_logical_operator import ASTLogicalOperator from pynestml.meta_model.ast_nestml_compilation_unit import ASTNestMLCompilationUnit from pynestml.meta_model.ast_model import ASTModel @@ -323,14 +322,6 @@ def parse_input_port(cls, string): ret.accept(ASTHigherOrderVisitor(log_set_added_source_position)) return ret - @classmethod - def parse_input_qualifier(cls, string): - # type: (str) -> ASTInputQualifier - (builder, parser) = tokenize(string) - ret = builder.visit(parser.inputQualifier()) - ret.accept(ASTHigherOrderVisitor(log_set_added_source_position)) - return ret - @classmethod def parse_logic_operator(cls, string): # type: (str) -> ASTLogicalOperator diff --git a/pynestml/visitors/ast_builder_visitor.py b/pynestml/visitors/ast_builder_visitor.py index bfc4dd902..1e23c75be 100644 --- a/pynestml/visitors/ast_builder_visitor.py +++ b/pynestml/visitors/ast_builder_visitor.py @@ -610,13 +610,9 @@ def visitSpikeInputPort(self, ctx): size_parameter = None if ctx.sizeParameter is not None: size_parameter = self.visit(ctx.sizeParameter) - input_qualifiers = [] - if ctx.inputQualifier() is not None: - for qual in ctx.inputQualifier(): - input_qualifiers.append(self.visit(qual)) signal_type = PortSignalType.SPIKE ret = ASTNodeFactory.create_ast_input_port(name=name, size_parameter=size_parameter, data_type=None, - input_qualifiers=input_qualifiers, signal_type=signal_type, + signal_type=signal_type, source_position=create_source_pos(ctx)) update_node_comments(ret, self.__comments.visit(ctx)) return ret @@ -629,18 +625,11 @@ def visitContinuousInputPort(self, ctx): data_type = self.visit(ctx.dataType()) if ctx.dataType() is not None else None signal_type = PortSignalType.CONTINUOUS ret = ASTNodeFactory.create_ast_input_port(name=name, size_parameter=size_parameter, data_type=data_type, - input_qualifiers=None, signal_type=signal_type, + signal_type=signal_type, source_position=create_source_pos(ctx)) update_node_comments(ret, self.__comments.visit(ctx)) return ret - # Visit a parse tree produced by PyNESTMLParser#inputQualifier. - def visitInputQualifier(self, ctx): - is_inhibitory = True if ctx.isInhibitory is not None else False - is_excitatory = True if ctx.isExcitatory is not None else False - return ASTNodeFactory.create_ast_input_qualifier(is_inhibitory=is_inhibitory, is_excitatory=is_excitatory, - source_position=create_source_pos(ctx)) - # Visit a parse tree produced by PyNESTMLParser#outputBuffer. def visitOutputBlock(self, ctx): source_pos = create_source_pos(ctx) diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index bc85d4cdd..2d7b4e1a0 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -585,9 +585,6 @@ def visit_input_port(self, node): else: node.get_datatype().update_scope(node.get_scope()) - for qual in node.get_input_qualifiers(): - qual.update_scope(node.get_scope()) - def endvisit_input_port(self, node): type_symbol = PredefinedTypes.get_type("s")**-1 if node.is_continuous() and node.has_datatype(): diff --git a/pynestml/visitors/ast_visitor.py b/pynestml/visitors/ast_visitor.py index c2b4dab01..f4711da81 100644 --- a/pynestml/visitors/ast_visitor.py +++ b/pynestml/visitors/ast_visitor.py @@ -39,7 +39,6 @@ from pynestml.meta_model.ast_if_stmt import ASTIfStmt from pynestml.meta_model.ast_input_block import ASTInputBlock from pynestml.meta_model.ast_input_port import ASTInputPort -from pynestml.meta_model.ast_input_qualifier import ASTInputQualifier from pynestml.meta_model.ast_kernel import ASTKernel from pynestml.meta_model.ast_logical_operator import ASTLogicalOperator from pynestml.meta_model.ast_nestml_compilation_unit import ASTNestMLCompilationUnit @@ -371,14 +370,6 @@ def visit_input_port(self, node): """ return - def visit_input_qualifier(self, node): - """ - Used to visit a single input port qualifier. - :param node: a single input port qualifier node. - :type node: ASTInputQualifier - """ - return - def visit_arithmetic_operator(self, node): """ Used to visit a single arithmetic operator. @@ -687,14 +678,6 @@ def endvisit_input_port(self, node): """ return - def endvisit_input_qualifier(self, node): - """ - Used to endvisit a single input port qualifier. - :param node: a single input port qualifier node. - :type node: ASTInputQualifer - """ - return - def endvisit_arithmetic_operator(self, node): """ Used to endvisit a single arithmetic operator. @@ -799,9 +782,6 @@ def visit(self, node: ASTNode): if isinstance(node, ASTInputPort): self.visit_input_port(node) return - if isinstance(node, ASTInputQualifier): - self.visit_input_qualifier(node) - return if isinstance(node, ASTLogicalOperator): self.visit_logical_operator(node) return @@ -930,9 +910,6 @@ def traverse(self, node): if isinstance(node, ASTInputPort): self.traverse_input_port(node) return - if isinstance(node, ASTInputQualifier): - self.traverse_input_qualifier(node) - return if isinstance(node, ASTLogicalOperator): self.traverse_logical_operator(node) return @@ -1061,9 +1038,6 @@ def endvisit(self, node): if isinstance(node, ASTInputPort): self.endvisit_input_port(node) return - if isinstance(node, ASTInputQualifier): - self.endvisit_input_qualifier(node) - return if isinstance(node, ASTLogicalOperator): self.endvisit_logical_operator(node) return @@ -1248,14 +1222,6 @@ def traverse_input_block(self, node): for sub_node in node.get_input_ports(): sub_node.accept(self.get_real_self()) - def traverse_input_port(self, node): - if node.get_input_qualifiers() is not None: - for sub_node in node.get_input_qualifiers(): - sub_node.accept(self.get_real_self()) - - def traverse_input_qualifier(self, node): - return - def traverse_logical_operator(self, node): return @@ -1280,6 +1246,9 @@ def traverse_inline_expression(self, node): if node.get_expression() is not None: node.get_expression().accept(self.get_real_self()) + def traverse_input_port(self, node): + return + def traverse_kernel(self, node): for var, expr in zip(node.get_variables(), node.get_expressions()): var.accept(self.get_real_self()) diff --git a/tests/invalid/CoCoInputPortsIllegal.nestml b/tests/invalid/CoCoInputPortsIllegal.nestml new file mode 100644 index 000000000..7e4021524 --- /dev/null +++ b/tests/invalid/CoCoInputPortsIllegal.nestml @@ -0,0 +1,50 @@ +""" +CoCoInputPortsIllegal.nestml +############################ + + +Description ++++++++++++ + +This test is used to test the declaration of both vectorized and non-vectorized input ports. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model input_ports_illegal_neuron: + state: + bar pA = 0 pA + foo_spikes pA = 0 pA + my_spikes_ip pA = 0 pA + + input: + AMPA_spikes <- spike + GABA_spikes <- spike + NMDA_spikes <- spike + foo[2] <- spike + my_spikes[3] <- spike + my_spikes2[3] <- spike + I_stim pA <- continuous + + update: + bar += (NMDA_spikes + 2 * AMPA_spikes - 3 * GABA_spikes) * (pA * s) + foo_spikes += (foo[0] + 5.5 * foo[1]) * (pA * s) + my_spikes_ip += (my_spikes[0] + my_spikes[1] - my_spikes2[1]) * (pA * s) diff --git a/tests/invalid/CoCoInputPortsIllegal2.nestml b/tests/invalid/CoCoInputPortsIllegal2.nestml new file mode 100644 index 000000000..a1ee1e3db --- /dev/null +++ b/tests/invalid/CoCoInputPortsIllegal2.nestml @@ -0,0 +1,50 @@ +""" +CoCoInputPortsIllegal.nestml +############################ + + +Description ++++++++++++ + +This test is used to test the declaration of both vectorized and non-vectorized input ports. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model input_ports_illegal_neuron: + state: + bar pA = 0 pA + foo_spikes pA = 0 pA + my_spikes_ip pA = 0 pA + + input: + AMPA_spikes <- spike + GABA_spikes <- spike + NMDA_spikes <- spike + foo[2] <- spike + my_spikes[3] <- spike + my_spikes2[3] <- spike + I_stim pA <- continuous + + onReceive(AMPA_spikes): + bar += (NMDA_spikes + 2 * AMPA_spikes - 3 * GABA_spikes) * (pA * s) + foo_spikes += (foo[0] + 5.5 * foo[1]) * (pA * s) + my_spikes_ip += (my_spikes[0] + my_spikes[1] - my_spikes2[1]) * (pA * s) diff --git a/tests/nest_tests/test_input_ports.py b/tests/nest_tests/test_input_ports.py new file mode 100644 index 000000000..2604b4dc2 --- /dev/null +++ b/tests/nest_tests/test_input_ports.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# +# test_input_ports.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +import os +import pytest + +import nest + +from pynestml.frontend.pynestml_frontend import generate_nest_target +from pynestml.codegeneration.nest_tools import NESTTools + + +class TestInputPorts: + """ + Tests the different kind of input ports supported in NESTML. + """ + + @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), + reason="This test does not support NEST 2") + def test_input_ports(self): + input_path = os.path.join(os.path.realpath(os.path.join( + os.path.dirname(__file__), "resources", "input_ports.nestml"))) + target_path = "target" + logging_level = "INFO" + module_name = "nestmlmodule" + suffix = "_nestml" + + generate_nest_target(input_path, + target_path=target_path, + logging_level=logging_level, + module_name=module_name, + suffix=suffix) + nest.ResetKernel() + nest.Install(module_name) + + neuron = nest.Create("input_ports_nestml") + + # List of receptor types for the spiking input ports + receptor_types = nest.GetStatus(neuron, "receptor_types")[0] + + spike_times = [ + [10., 44.], # NMDA_SPIKES + [12., 42.], # AMPA_SPIKES + [14., 40.], # GABA_SPIKES + [16., 38.], # FOO_0 + [18., 36.], # FOO_1 + [20., 34.], # MY_SPIKES_0 + [22., 32.], # MY_SPIKES_1 + [24., 30.], # MY_SPIKES2_1 + ] + sgs = nest.Create('spike_generator', len(spike_times)) + for i, sg in enumerate(sgs): + sg.spike_times = spike_times[i] + + nest.Connect(sgs[0], neuron, syn_spec={'receptor_type': receptor_types["NMDA_SPIKES"], 'weight': -1.0, 'delay': 1.0}) + nest.Connect(sgs[1], neuron, syn_spec={'receptor_type': receptor_types["AMPA_SPIKES"], 'weight': 1.0, 'delay': 1.0}) + nest.Connect(sgs[2], neuron, syn_spec={'receptor_type': receptor_types["GABA_SPIKES"], 'weight': -1.0, 'delay': 1.0}) + nest.Connect(sgs[3], neuron, syn_spec={'receptor_type': receptor_types["FOO_0"], 'weight': 1.0, 'delay': 1.0}) + nest.Connect(sgs[4], neuron, syn_spec={'receptor_type': receptor_types["FOO_1"], 'weight': 1.0, 'delay': 1.0}) + nest.Connect(sgs[5], neuron, syn_spec={'receptor_type': receptor_types["MY_SPIKES_0"], 'weight': 1.0, 'delay': 1.0}) + nest.Connect(sgs[6], neuron, syn_spec={'receptor_type': receptor_types["MY_SPIKES_1"], 'weight': 2.0, 'delay': 1.0}) + nest.Connect(sgs[7], neuron, syn_spec={'receptor_type': receptor_types["MY_SPIKES2_1"], 'weight': -3.0, 'delay': 1.0}) + + mm = nest.Create("multimeter", {"record_from": ["bar", "foo_spikes", "my_spikes_ip"]}) + nest.Connect(mm, neuron) + + nest.Simulate(50.) + + events = mm.get("events") + connections = nest.GetConnections(target=neuron) + + # corresponds to ``bar += NMDA_spikes + 2 * AMPA_spikes - 3 * GABA_spikes`` in the update block + assert events["bar"][-1] == len(spike_times[0]) * abs(connections.get("weight")[0]) \ + + 2 * len(spike_times[1]) * abs(connections.get("weight")[1]) \ + - 3 * len(spike_times[2]) * abs(connections.get("weight")[2]) + + # corresponds to ``foo_spikes += foo[0] + 5.5 * foo[1]`` in the update block + assert events["foo_spikes"][-1] == len(spike_times[3]) * abs(connections.get("weight")[3]) \ + + 5.5 * len(spike_times[4]) * abs(connections.get("weight")[4]) + + # corresponds to ``my_spikes_ip += my_spikes[0] + my_spikes[1] - my_spikes2[1]`` in the update block + assert events["my_spikes_ip"][-1] == len(spike_times[5]) * abs(connections.get("weight")[5]) \ + + len(spike_times[6]) * abs(connections.get("weight")[6]) \ + - len(spike_times[7]) * abs(connections.get("weight")[7]) + + @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), + reason="This test does not support NEST 2") + def test_input_ports_in_loop(self): + input_path = os.path.join(os.path.realpath(os.path.join( + os.path.dirname(__file__), "resources", "input_ports_in_loop.nestml"))) + target_path = "target" + logging_level = "INFO" + module_name = "nestmlmodule" + suffix = "_nestml" + + generate_nest_target(input_path, + target_path=target_path, + logging_level=logging_level, + module_name=module_name, + suffix=suffix) + nest.ResetKernel() + nest.Install(module_name) + + neuron = nest.Create("input_ports_loop_nestml") + + # List of receptor types for the spiking input ports + receptor_types = nest.GetStatus(neuron, "receptor_types")[0] + + spike_times = [ + [10., 39.], # NMDA_SPIKES + [12., 37.], # FOO_0 + [14., 35.], # FOO_1 + [16., 33.], # SPIKE_BUF_0 + [18., 31.], # SPIKE_BUF_1 + [20., 29.], # SPIKE_BUF_2 + [22., 27.], # SPIKE_BUF_3 + [24., 25.], # SPIKE_BUF_4 + ] + sgs = nest.Create('spike_generator', len(spike_times)) + for i, sg in enumerate(sgs): + sg.spike_times = spike_times[i] + + nest.Connect(sgs[0], neuron, + syn_spec={'receptor_type': receptor_types["NMDA_SPIKES"], 'weight': 1.0, 'delay': 1.0}) + nest.Connect(sgs[1], neuron, + syn_spec={'receptor_type': receptor_types["FOO_0"], 'weight': 1.0, 'delay': 1.0}) + nest.Connect(sgs[2], neuron, + syn_spec={'receptor_type': receptor_types["FOO_1"], 'weight': 1.0, 'delay': 1.0}) + nest.Connect(sgs[3], neuron, syn_spec={'receptor_type': receptor_types["SPIKE_BUF_0"], 'weight': 1.0, 'delay': 1.0}) + nest.Connect(sgs[4], neuron, syn_spec={'receptor_type': receptor_types["SPIKE_BUF_1"], 'weight': 1.0, 'delay': 1.0}) + nest.Connect(sgs[5], neuron, + syn_spec={'receptor_type': receptor_types["SPIKE_BUF_2"], 'weight': 1.0, 'delay': 1.0}) + nest.Connect(sgs[6], neuron, + syn_spec={'receptor_type': receptor_types["SPIKE_BUF_3"], 'weight': 2.0, 'delay': 1.0}) + nest.Connect(sgs[7], neuron, + syn_spec={'receptor_type': receptor_types["SPIKE_BUF_4"], 'weight': 3.0, 'delay': 1.0}) + + mm = nest.Create("multimeter", {"record_from": ["bar", "foo_spikes", "MY_SPIKES_IP_2", "MY_SPIKES_IP_3", "MY_SPIKES_IP_4", "MY_SPIKES_IP_5", "MY_SPIKES_IP_6"]}) + nest.Connect(mm, neuron) + + nest.Simulate(41.) + + events = mm.get("events") + assert events["bar"][-1] == 2.0 + assert events["foo_spikes"][-1] == 25.0 + assert events["MY_SPIKES_IP_2"][-1] == 2.0 + assert events["MY_SPIKES_IP_5"][-1] == 4.0 + assert events["MY_SPIKES_IP_6"][-1] == 6.0 From be93fa3799474e697be1a4505c0d9f477d7bbff9 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 30 Oct 2024 12:52:02 +0100 Subject: [PATCH 12/68] add attributes to spiking input ports --- models/neurons/iaf_psc_exp_neuron.nestml | 9 +- ...only_in_equation_rhs_and_event_handlers.py | 21 +- .../codegeneration/printers/nestml_printer.py | 8 + .../printers/nestml_variable_printer.py | 6 +- pynestml/generated/PyNestMLParser.py | 595 +++++++++--------- pynestml/grammars/PyNestMLParser.g4 | 4 +- pynestml/meta_model/ast_input_port.py | 20 +- pynestml/meta_model/ast_node_factory.py | 9 +- pynestml/meta_model/ast_variable.py | 11 + pynestml/symbols/variable_symbol.py | 5 +- pynestml/utils/ast_utils.py | 11 + pynestml/utils/messages.py | 8 +- pynestml/utils/type_caster.py | 1 + pynestml/visitors/ast_builder_visitor.py | 10 +- pynestml/visitors/ast_symbol_table_visitor.py | 67 +- pynestml/visitors/ast_variable_visitor.py | 13 +- ...oInputPortsIllegalMissingAttribute.nestml} | 19 +- tests/nest_tests/nest_integration_test.py | 121 ++-- tests/test_cocos.py | 21 +- ...CoInputPortsIllegalMissingAttribute.nestml | 40 ++ .../CoCoInputPortsLegal.nestml} | 2 +- 21 files changed, 588 insertions(+), 413 deletions(-) rename tests/{valid/CoCoValueAssignedToInputPort.nestml => invalid/CoCoInputPortsIllegalMissingAttribute.nestml} (65%) create mode 100644 tests/valid/CoCoInputPortsIllegalMissingAttribute.nestml rename tests/{invalid/CoCoInputPortsIllegal2.nestml => valid/CoCoInputPortsLegal.nestml} (97%) diff --git a/models/neurons/iaf_psc_exp_neuron.nestml b/models/neurons/iaf_psc_exp_neuron.nestml index 2b2ba8d6e..ce3b6ade7 100644 --- a/models/neurons/iaf_psc_exp_neuron.nestml +++ b/models/neurons/iaf_psc_exp_neuron.nestml @@ -80,7 +80,7 @@ model iaf_psc_exp_neuron: I_e pA = 0 pA input: - spike_in_port <- spike + spike_in_port <- spike(weight real) I_stim pA <- continuous output: @@ -88,10 +88,11 @@ model iaf_psc_exp_neuron: onReceive(spike_in_port): # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike - if spike_in_port < 0: - I_syn_inh += spike_in_port * pA * s + # weight is a real number, and here interpreted as 1 corresponding to 1 pA + if spike_in_port.weight > 0: + I_syn_exc += 0 pA # spike_in_port.weight * pA else: - I_syn_exc += spike_in_port * pA * s + I_syn_inh -= 0 pA # spike_in_port.weight * pA update: if refr_t > 0 ms: diff --git a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py index 130c3f7ff..0e7ff5a7b 100644 --- a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py +++ b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py @@ -21,10 +21,12 @@ from typing import Optional from pynestml.cocos.co_co import CoCo +from pynestml.meta_model.ast_inline_expression import ASTInlineExpression from pynestml.meta_model.ast_input_port import ASTInputPort from pynestml.meta_model.ast_model import ASTModel from pynestml.meta_model.ast_ode_equation import ASTOdeEquation from pynestml.meta_model.ast_on_receive_block import ASTOnReceiveBlock +from pynestml.meta_model.ast_variable import ASTVariable from pynestml.symbols.symbol import SymbolKind from pynestml.utils.ast_utils import ASTUtils from pynestml.utils.logger import Logger, LoggingLevel @@ -51,23 +53,36 @@ def check_co_co(cls, node): class SpikeInputPortsAppearOnlyInEquationRHSAndEventHandlersVisitor(ASTVisitor): - def visit_variable(self, node): + def visit_variable(self, node: ASTVariable): in_port: Optional[ASTInputPort] = ASTUtils.get_input_port_by_name(self.model_.get_input_blocks(), node.get_name()) + + # only check spiking input ports if in_port is not None and in_port.is_spike(): + if in_port.parameters and not node.attribute: + # input port has parameters (for instance, ``x`` in ``foo <- spike(x real)`` but the variable reference is missing an attribute (``foo`` instead of ``foo.x``) + code, message = Messages.get_spike_input_port_attribute_missing(node.get_name()) + Logger.log_message(code=code, message=message, error_position=node.get_source_position(), + log_level=LoggingLevel.ERROR) + _node = node while _node: _node = _node.get_parent() - if isinstance(_node, ASTOnReceiveBlock) and _node.port_name == node.get_name(): - # spike input port was used inside an ``onReceive(spike_in_port)`` block; everything is OK + if isinstance(_node, ASTOnReceiveBlock): + # spike input port was used inside an ``onReceive`` block; everything is OK return if isinstance(_node, ASTOdeEquation): # spike input port was used inside the rhs of an equation; everything is OK return + if isinstance(_node, ASTInlineExpression): + # spike input port was used inside the rhs of an inline expression; everything is OK + return + if isinstance(_node, ASTModel): # we reached the top-level block without running into an ``update`` block on the way --> incorrect usage of the function code, message = Messages.get_spike_input_port_appears_outside_equation_rhs_and_event_handler(node.get_name()) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR) + diff --git a/pynestml/codegeneration/printers/nestml_printer.py b/pynestml/codegeneration/printers/nestml_printer.py index 8853d2660..c077ae677 100644 --- a/pynestml/codegeneration/printers/nestml_printer.py +++ b/pynestml/codegeneration/printers/nestml_printer.py @@ -363,6 +363,11 @@ def print_input_port(self, node: ASTInputPort) -> str: ret += " <- " if node.is_spike(): ret += "spike" + if node.get_parameters(): + ret += "(" + for parameter in node.get_parameters(): + ret += self.print_parameter(parameter) + ret += ")" else: ret += "continuous" ret += print_sl_comment(node.in_comment) + "\n" @@ -544,6 +549,9 @@ def print_variable(self, node: ASTVariable): for i in range(1, node.differential_order + 1): ret += "'" + if node.get_attribute(): + ret += "." + node.get_attribute() + return ret def print_while_stmt(self, node: ASTWhileStmt) -> str: diff --git a/pynestml/codegeneration/printers/nestml_variable_printer.py b/pynestml/codegeneration/printers/nestml_variable_printer.py index 0e7ea9741..07b23203c 100644 --- a/pynestml/codegeneration/printers/nestml_variable_printer.py +++ b/pynestml/codegeneration/printers/nestml_variable_printer.py @@ -34,4 +34,8 @@ def print_variable(self, node: ASTVariable) -> str: :param node: the node to print :return: string representation """ - return node.get_complete_name() + s = node.get_complete_name() + if node.get_attribute(): + s += "." + node.get_attribute() + + return s diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index 8aa006559..f01e259f6 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -10,7 +10,7 @@ def serializedATN(): return [ - 4,1,89,620,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,89,618,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, @@ -52,194 +52,193 @@ def serializedATN(): 35,10,35,12,35,483,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36, 1,36,4,36,494,8,36,11,36,12,36,495,1,36,1,36,1,37,1,37,1,37,1,37, 1,38,1,38,1,38,1,38,1,38,1,38,1,38,4,38,511,8,38,11,38,12,38,512, - 1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,3,39,523,8,39,1,39,1,39, - 1,39,1,39,5,39,529,8,39,10,39,12,39,532,9,39,3,39,534,8,39,1,39, - 3,39,537,8,39,4,39,539,8,39,11,39,12,39,540,1,39,1,39,1,40,1,40, - 1,40,1,40,1,40,3,40,550,8,40,1,40,1,40,1,40,1,40,1,41,1,41,1,41, - 1,41,1,41,3,41,561,8,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,1,42, - 1,42,1,42,1,42,3,42,574,8,42,1,42,1,42,1,42,1,42,5,42,580,8,42,10, - 42,12,42,583,9,42,3,42,585,8,42,1,42,3,42,588,8,42,1,42,1,42,1,42, - 1,43,1,43,1,43,1,43,1,43,1,43,5,43,599,8,43,10,43,12,43,602,9,43, - 3,43,604,8,43,1,43,1,43,3,43,608,8,43,1,43,1,43,1,43,1,44,1,44,1, - 44,1,45,1,45,1,45,1,45,1,45,0,2,2,6,46,0,2,4,6,8,10,12,14,16,18, - 20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62, - 64,66,68,70,72,74,76,78,80,82,84,86,88,90,0,4,2,0,49,49,73,73,1, - 0,88,89,1,0,32,34,3,0,25,25,85,86,88,89,681,0,98,1,0,0,0,2,109,1, - 0,0,0,4,126,1,0,0,0,6,141,1,0,0,0,8,191,1,0,0,0,10,196,1,0,0,0,12, - 203,1,0,0,0,14,212,1,0,0,0,16,216,1,0,0,0,18,218,1,0,0,0,20,235, - 1,0,0,0,22,250,1,0,0,0,24,268,1,0,0,0,26,282,1,0,0,0,28,301,1,0, - 0,0,30,312,1,0,0,0,32,317,1,0,0,0,34,323,1,0,0,0,36,327,1,0,0,0, - 38,338,1,0,0,0,40,368,1,0,0,0,42,378,1,0,0,0,44,380,1,0,0,0,46,382, - 1,0,0,0,48,384,1,0,0,0,50,388,1,0,0,0,52,398,1,0,0,0,54,403,1,0, - 0,0,56,408,1,0,0,0,58,412,1,0,0,0,60,426,1,0,0,0,62,433,1,0,0,0, - 64,439,1,0,0,0,66,443,1,0,0,0,68,460,1,0,0,0,70,474,1,0,0,0,72,488, - 1,0,0,0,74,499,1,0,0,0,76,503,1,0,0,0,78,516,1,0,0,0,80,544,1,0, - 0,0,82,555,1,0,0,0,84,567,1,0,0,0,86,592,1,0,0,0,88,612,1,0,0,0, - 90,615,1,0,0,0,92,99,5,10,0,0,93,99,5,11,0,0,94,99,5,12,0,0,95,99, - 5,13,0,0,96,99,5,14,0,0,97,99,3,2,1,0,98,92,1,0,0,0,98,93,1,0,0, - 0,98,94,1,0,0,0,98,95,1,0,0,0,98,96,1,0,0,0,98,97,1,0,0,0,99,1,1, - 0,0,0,100,101,6,1,-1,0,101,102,5,47,0,0,102,103,3,2,1,0,103,104, - 5,48,0,0,104,110,1,0,0,0,105,106,5,88,0,0,106,107,5,77,0,0,107,110, - 3,2,1,2,108,110,5,87,0,0,109,100,1,0,0,0,109,105,1,0,0,0,109,108, - 1,0,0,0,110,122,1,0,0,0,111,114,10,3,0,0,112,115,5,75,0,0,113,115, - 5,77,0,0,114,112,1,0,0,0,114,113,1,0,0,0,115,116,1,0,0,0,116,121, - 3,2,1,4,117,118,10,4,0,0,118,119,5,76,0,0,119,121,3,4,2,0,120,111, - 1,0,0,0,120,117,1,0,0,0,121,124,1,0,0,0,122,120,1,0,0,0,122,123, - 1,0,0,0,123,3,1,0,0,0,124,122,1,0,0,0,125,127,7,0,0,0,126,125,1, - 0,0,0,126,127,1,0,0,0,127,128,1,0,0,0,128,129,5,88,0,0,129,5,1,0, - 0,0,130,131,6,3,-1,0,131,132,5,47,0,0,132,133,3,6,3,0,133,134,5, - 48,0,0,134,142,1,0,0,0,135,136,3,10,5,0,136,137,3,6,3,9,137,142, - 1,0,0,0,138,139,5,28,0,0,139,142,3,6,3,4,140,142,3,8,4,0,141,130, - 1,0,0,0,141,135,1,0,0,0,141,138,1,0,0,0,141,140,1,0,0,0,142,179, - 1,0,0,0,143,144,10,10,0,0,144,145,5,76,0,0,145,178,3,6,3,10,146, - 150,10,8,0,0,147,151,5,75,0,0,148,151,5,77,0,0,149,151,5,78,0,0, - 150,147,1,0,0,0,150,148,1,0,0,0,150,149,1,0,0,0,151,152,1,0,0,0, - 152,178,3,6,3,9,153,156,10,7,0,0,154,157,5,49,0,0,155,157,5,73,0, - 0,156,154,1,0,0,0,156,155,1,0,0,0,157,158,1,0,0,0,158,178,3,6,3, - 8,159,160,10,6,0,0,160,161,3,12,6,0,161,162,3,6,3,7,162,178,1,0, - 0,0,163,164,10,5,0,0,164,165,3,14,7,0,165,166,3,6,3,6,166,178,1, - 0,0,0,167,168,10,3,0,0,168,169,3,16,8,0,169,170,3,6,3,4,170,178, - 1,0,0,0,171,172,10,2,0,0,172,173,5,79,0,0,173,174,3,6,3,0,174,175, - 5,80,0,0,175,176,3,6,3,3,176,178,1,0,0,0,177,143,1,0,0,0,177,146, - 1,0,0,0,177,153,1,0,0,0,177,159,1,0,0,0,177,163,1,0,0,0,177,167, - 1,0,0,0,177,171,1,0,0,0,178,181,1,0,0,0,179,177,1,0,0,0,179,180, - 1,0,0,0,180,7,1,0,0,0,181,179,1,0,0,0,182,192,3,20,10,0,183,192, - 5,85,0,0,184,186,7,1,0,0,185,187,3,18,9,0,186,185,1,0,0,0,186,187, - 1,0,0,0,187,192,1,0,0,0,188,192,5,86,0,0,189,192,5,25,0,0,190,192, - 3,18,9,0,191,182,1,0,0,0,191,183,1,0,0,0,191,184,1,0,0,0,191,188, - 1,0,0,0,191,189,1,0,0,0,191,190,1,0,0,0,192,9,1,0,0,0,193,197,5, - 49,0,0,194,197,5,73,0,0,195,197,5,50,0,0,196,193,1,0,0,0,196,194, - 1,0,0,0,196,195,1,0,0,0,197,11,1,0,0,0,198,204,5,53,0,0,199,204, - 5,52,0,0,200,204,5,51,0,0,201,204,5,59,0,0,202,204,5,60,0,0,203, - 198,1,0,0,0,203,199,1,0,0,0,203,200,1,0,0,0,203,201,1,0,0,0,203, - 202,1,0,0,0,204,13,1,0,0,0,205,213,5,61,0,0,206,213,5,63,0,0,207, - 213,5,68,0,0,208,213,5,69,0,0,209,213,5,70,0,0,210,213,5,71,0,0, - 211,213,5,62,0,0,212,205,1,0,0,0,212,206,1,0,0,0,212,207,1,0,0,0, - 212,208,1,0,0,0,212,209,1,0,0,0,212,210,1,0,0,0,212,211,1,0,0,0, - 213,15,1,0,0,0,214,217,5,26,0,0,215,217,5,27,0,0,216,214,1,0,0,0, - 216,215,1,0,0,0,217,17,1,0,0,0,218,223,5,87,0,0,219,220,5,54,0,0, - 220,221,3,6,3,0,221,222,5,56,0,0,222,224,1,0,0,0,223,219,1,0,0,0, - 223,224,1,0,0,0,224,228,1,0,0,0,225,227,5,83,0,0,226,225,1,0,0,0, - 227,230,1,0,0,0,228,226,1,0,0,0,228,229,1,0,0,0,229,233,1,0,0,0, - 230,228,1,0,0,0,231,232,5,84,0,0,232,234,3,18,9,0,233,231,1,0,0, - 0,233,234,1,0,0,0,234,19,1,0,0,0,235,236,5,87,0,0,236,245,5,47,0, - 0,237,242,3,6,3,0,238,239,5,72,0,0,239,241,3,6,3,0,240,238,1,0,0, - 0,241,244,1,0,0,0,242,240,1,0,0,0,242,243,1,0,0,0,243,246,1,0,0, - 0,244,242,1,0,0,0,245,237,1,0,0,0,245,246,1,0,0,0,246,247,1,0,0, - 0,247,248,5,48,0,0,248,21,1,0,0,0,249,251,5,29,0,0,250,249,1,0,0, - 0,250,251,1,0,0,0,251,252,1,0,0,0,252,253,5,16,0,0,253,254,5,87, - 0,0,254,255,3,0,0,0,255,256,5,74,0,0,256,258,3,6,3,0,257,259,5,82, - 0,0,258,257,1,0,0,0,258,259,1,0,0,0,259,263,1,0,0,0,260,262,3,42, - 21,0,261,260,1,0,0,0,262,265,1,0,0,0,263,261,1,0,0,0,263,264,1,0, - 0,0,264,266,1,0,0,0,265,263,1,0,0,0,266,267,5,9,0,0,267,23,1,0,0, - 0,268,269,3,18,9,0,269,270,5,74,0,0,270,272,3,6,3,0,271,273,5,82, - 0,0,272,271,1,0,0,0,272,273,1,0,0,0,273,277,1,0,0,0,274,276,3,42, - 21,0,275,274,1,0,0,0,276,279,1,0,0,0,277,275,1,0,0,0,277,278,1,0, - 0,0,278,280,1,0,0,0,279,277,1,0,0,0,280,281,5,9,0,0,281,25,1,0,0, - 0,282,283,5,30,0,0,283,284,3,18,9,0,284,285,5,74,0,0,285,293,3,6, - 3,0,286,287,5,4,0,0,287,288,3,18,9,0,288,289,5,74,0,0,289,290,3, - 6,3,0,290,292,1,0,0,0,291,286,1,0,0,0,292,295,1,0,0,0,293,291,1, - 0,0,0,293,294,1,0,0,0,294,297,1,0,0,0,295,293,1,0,0,0,296,298,5, - 82,0,0,297,296,1,0,0,0,297,298,1,0,0,0,298,299,1,0,0,0,299,300,5, - 9,0,0,300,27,1,0,0,0,301,302,5,9,0,0,302,304,5,1,0,0,303,305,3,30, - 15,0,304,303,1,0,0,0,305,306,1,0,0,0,306,304,1,0,0,0,306,307,1,0, - 0,0,307,308,1,0,0,0,308,309,5,2,0,0,309,29,1,0,0,0,310,313,3,34, - 17,0,311,313,3,32,16,0,312,310,1,0,0,0,312,311,1,0,0,0,313,31,1, - 0,0,0,314,318,3,50,25,0,315,318,3,58,29,0,316,318,3,60,30,0,317, - 314,1,0,0,0,317,315,1,0,0,0,317,316,1,0,0,0,318,33,1,0,0,0,319,324, - 3,36,18,0,320,324,3,20,10,0,321,324,3,38,19,0,322,324,3,48,24,0, - 323,319,1,0,0,0,323,320,1,0,0,0,323,321,1,0,0,0,323,322,1,0,0,0, - 324,325,1,0,0,0,325,326,5,9,0,0,326,35,1,0,0,0,327,333,3,18,9,0, - 328,334,5,74,0,0,329,334,5,64,0,0,330,334,5,65,0,0,331,334,5,66, - 0,0,332,334,5,67,0,0,333,328,1,0,0,0,333,329,1,0,0,0,333,330,1,0, - 0,0,333,331,1,0,0,0,333,332,1,0,0,0,334,335,1,0,0,0,335,336,3,6, - 3,0,336,37,1,0,0,0,337,339,5,29,0,0,338,337,1,0,0,0,338,339,1,0, - 0,0,339,341,1,0,0,0,340,342,5,16,0,0,341,340,1,0,0,0,341,342,1,0, - 0,0,342,343,1,0,0,0,343,348,3,18,9,0,344,345,5,72,0,0,345,347,3, - 18,9,0,346,344,1,0,0,0,347,350,1,0,0,0,348,346,1,0,0,0,348,349,1, - 0,0,0,349,351,1,0,0,0,350,348,1,0,0,0,351,354,3,0,0,0,352,353,5, - 74,0,0,353,355,3,6,3,0,354,352,1,0,0,0,354,355,1,0,0,0,355,360,1, - 0,0,0,356,357,5,57,0,0,357,358,3,6,3,0,358,359,5,58,0,0,359,361, - 1,0,0,0,360,356,1,0,0,0,360,361,1,0,0,0,361,365,1,0,0,0,362,364, - 3,42,21,0,363,362,1,0,0,0,364,367,1,0,0,0,365,363,1,0,0,0,365,366, - 1,0,0,0,366,39,1,0,0,0,367,365,1,0,0,0,368,369,3,38,19,0,369,370, - 5,9,0,0,370,41,1,0,0,0,371,379,5,43,0,0,372,379,5,44,0,0,373,374, - 5,45,0,0,374,375,3,44,22,0,375,376,5,81,0,0,376,377,3,46,23,0,377, - 379,1,0,0,0,378,371,1,0,0,0,378,372,1,0,0,0,378,373,1,0,0,0,379, - 43,1,0,0,0,380,381,5,87,0,0,381,45,1,0,0,0,382,383,5,87,0,0,383, - 47,1,0,0,0,384,386,5,17,0,0,385,387,3,6,3,0,386,385,1,0,0,0,386, - 387,1,0,0,0,387,49,1,0,0,0,388,392,3,52,26,0,389,391,3,54,27,0,390, - 389,1,0,0,0,391,394,1,0,0,0,392,390,1,0,0,0,392,393,1,0,0,0,393, - 396,1,0,0,0,394,392,1,0,0,0,395,397,3,56,28,0,396,395,1,0,0,0,396, - 397,1,0,0,0,397,51,1,0,0,0,398,399,5,18,0,0,399,400,3,6,3,0,400, - 401,5,80,0,0,401,402,3,28,14,0,402,53,1,0,0,0,403,404,5,19,0,0,404, - 405,3,6,3,0,405,406,5,80,0,0,406,407,3,28,14,0,407,55,1,0,0,0,408, - 409,5,20,0,0,409,410,5,80,0,0,410,411,3,28,14,0,411,57,1,0,0,0,412, - 413,5,21,0,0,413,414,5,87,0,0,414,415,5,23,0,0,415,416,3,6,3,0,416, - 417,5,46,0,0,417,418,3,6,3,0,418,420,5,24,0,0,419,421,5,73,0,0,420, - 419,1,0,0,0,420,421,1,0,0,0,421,422,1,0,0,0,422,423,7,1,0,0,423, - 424,5,80,0,0,424,425,3,28,14,0,425,59,1,0,0,0,426,427,5,22,0,0,427, - 428,3,6,3,0,428,429,5,80,0,0,429,430,3,28,14,0,430,61,1,0,0,0,431, - 434,3,64,32,0,432,434,5,9,0,0,433,431,1,0,0,0,433,432,1,0,0,0,434, - 435,1,0,0,0,435,433,1,0,0,0,435,436,1,0,0,0,436,437,1,0,0,0,437, - 438,5,0,0,1,438,63,1,0,0,0,439,440,5,31,0,0,440,441,5,87,0,0,441, - 442,3,66,33,0,442,65,1,0,0,0,443,444,5,80,0,0,444,445,5,9,0,0,445, - 454,5,1,0,0,446,455,3,72,36,0,447,455,3,76,38,0,448,455,3,78,39, - 0,449,455,3,84,42,0,450,455,3,86,43,0,451,455,3,68,34,0,452,455, - 3,70,35,0,453,455,3,74,37,0,454,446,1,0,0,0,454,447,1,0,0,0,454, - 448,1,0,0,0,454,449,1,0,0,0,454,450,1,0,0,0,454,451,1,0,0,0,454, - 452,1,0,0,0,454,453,1,0,0,0,455,456,1,0,0,0,456,454,1,0,0,0,456, - 457,1,0,0,0,457,458,1,0,0,0,458,459,5,2,0,0,459,67,1,0,0,0,460,461, - 5,40,0,0,461,462,5,47,0,0,462,467,5,87,0,0,463,464,5,72,0,0,464, - 466,3,90,45,0,465,463,1,0,0,0,466,469,1,0,0,0,467,465,1,0,0,0,467, - 468,1,0,0,0,468,470,1,0,0,0,469,467,1,0,0,0,470,471,5,48,0,0,471, - 472,5,80,0,0,472,473,3,28,14,0,473,69,1,0,0,0,474,475,5,41,0,0,475, - 476,5,47,0,0,476,481,3,6,3,0,477,478,5,72,0,0,478,480,3,90,45,0, - 479,477,1,0,0,0,480,483,1,0,0,0,481,479,1,0,0,0,481,482,1,0,0,0, - 482,484,1,0,0,0,483,481,1,0,0,0,484,485,5,48,0,0,485,486,5,80,0, - 0,486,487,3,28,14,0,487,71,1,0,0,0,488,489,7,2,0,0,489,490,5,80, - 0,0,490,491,5,9,0,0,491,493,5,1,0,0,492,494,3,40,20,0,493,492,1, - 0,0,0,494,495,1,0,0,0,495,493,1,0,0,0,495,496,1,0,0,0,496,497,1, - 0,0,0,497,498,5,2,0,0,498,73,1,0,0,0,499,500,5,35,0,0,500,501,5, - 80,0,0,501,502,3,28,14,0,502,75,1,0,0,0,503,504,5,36,0,0,504,505, - 5,80,0,0,505,506,5,9,0,0,506,510,5,1,0,0,507,511,3,22,11,0,508,511, - 3,24,12,0,509,511,3,26,13,0,510,507,1,0,0,0,510,508,1,0,0,0,510, - 509,1,0,0,0,511,512,1,0,0,0,512,510,1,0,0,0,512,513,1,0,0,0,513, - 514,1,0,0,0,514,515,5,2,0,0,515,77,1,0,0,0,516,517,5,37,0,0,517, - 518,5,80,0,0,518,519,5,9,0,0,519,538,5,1,0,0,520,523,3,80,40,0,521, - 523,3,82,41,0,522,520,1,0,0,0,522,521,1,0,0,0,523,536,1,0,0,0,524, - 533,5,47,0,0,525,530,3,88,44,0,526,527,5,72,0,0,527,529,3,88,44, - 0,528,526,1,0,0,0,529,532,1,0,0,0,530,528,1,0,0,0,530,531,1,0,0, - 0,531,534,1,0,0,0,532,530,1,0,0,0,533,525,1,0,0,0,533,534,1,0,0, - 0,534,535,1,0,0,0,535,537,5,48,0,0,536,524,1,0,0,0,536,537,1,0,0, - 0,537,539,1,0,0,0,538,522,1,0,0,0,539,540,1,0,0,0,540,538,1,0,0, - 0,540,541,1,0,0,0,541,542,1,0,0,0,542,543,5,2,0,0,543,79,1,0,0,0, - 544,549,5,87,0,0,545,546,5,54,0,0,546,547,3,6,3,0,547,548,5,56,0, - 0,548,550,1,0,0,0,549,545,1,0,0,0,549,550,1,0,0,0,550,551,1,0,0, - 0,551,552,5,55,0,0,552,553,5,42,0,0,553,554,5,9,0,0,554,81,1,0,0, - 0,555,560,5,87,0,0,556,557,5,54,0,0,557,558,3,6,3,0,558,559,5,56, - 0,0,559,561,1,0,0,0,560,556,1,0,0,0,560,561,1,0,0,0,561,562,1,0, - 0,0,562,563,3,0,0,0,563,564,5,55,0,0,564,565,5,39,0,0,565,566,5, - 9,0,0,566,83,1,0,0,0,567,568,5,38,0,0,568,569,5,80,0,0,569,570,5, - 9,0,0,570,573,5,1,0,0,571,574,5,42,0,0,572,574,5,39,0,0,573,571, - 1,0,0,0,573,572,1,0,0,0,574,587,1,0,0,0,575,584,5,47,0,0,576,581, - 3,88,44,0,577,578,5,72,0,0,578,580,3,88,44,0,579,577,1,0,0,0,580, - 583,1,0,0,0,581,579,1,0,0,0,581,582,1,0,0,0,582,585,1,0,0,0,583, - 581,1,0,0,0,584,576,1,0,0,0,584,585,1,0,0,0,585,586,1,0,0,0,586, - 588,5,48,0,0,587,575,1,0,0,0,587,588,1,0,0,0,588,589,1,0,0,0,589, - 590,5,9,0,0,590,591,5,2,0,0,591,85,1,0,0,0,592,593,5,15,0,0,593, - 594,5,87,0,0,594,603,5,47,0,0,595,600,3,88,44,0,596,597,5,72,0,0, - 597,599,3,88,44,0,598,596,1,0,0,0,599,602,1,0,0,0,600,598,1,0,0, - 0,600,601,1,0,0,0,601,604,1,0,0,0,602,600,1,0,0,0,603,595,1,0,0, - 0,603,604,1,0,0,0,604,605,1,0,0,0,605,607,5,48,0,0,606,608,3,0,0, - 0,607,606,1,0,0,0,607,608,1,0,0,0,608,609,1,0,0,0,609,610,5,80,0, - 0,610,611,3,28,14,0,611,87,1,0,0,0,612,613,5,87,0,0,613,614,3,0, - 0,0,614,89,1,0,0,0,615,616,5,87,0,0,616,617,5,74,0,0,617,618,7,3, - 0,0,618,91,1,0,0,0,68,98,109,114,120,122,126,141,150,156,177,179, - 186,191,196,203,212,216,223,228,233,242,245,250,258,263,272,277, - 293,297,306,312,317,323,333,338,341,348,354,360,365,378,386,392, - 396,420,433,435,454,456,467,481,495,510,512,522,530,533,536,540, - 549,560,573,581,584,587,600,603,607 + 1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,4,39,523,8,39,11,39,12,39, + 524,1,39,1,39,1,40,1,40,1,40,1,40,1,40,3,40,534,8,40,1,40,1,40,1, + 40,1,40,1,40,1,40,5,40,542,8,40,10,40,12,40,545,9,40,3,40,547,8, + 40,1,40,3,40,550,8,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,559, + 8,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,1,42,1,42,1,42,1,42,3,42, + 572,8,42,1,42,1,42,1,42,1,42,5,42,578,8,42,10,42,12,42,581,9,42, + 3,42,583,8,42,1,42,3,42,586,8,42,1,42,1,42,1,42,1,43,1,43,1,43,1, + 43,1,43,1,43,5,43,597,8,43,10,43,12,43,600,9,43,3,43,602,8,43,1, + 43,1,43,3,43,606,8,43,1,43,1,43,1,43,1,44,1,44,1,44,1,45,1,45,1, + 45,1,45,1,45,0,2,2,6,46,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28, + 30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72, + 74,76,78,80,82,84,86,88,90,0,4,2,0,49,49,73,73,1,0,88,89,1,0,32, + 34,3,0,25,25,85,86,88,89,679,0,98,1,0,0,0,2,109,1,0,0,0,4,126,1, + 0,0,0,6,141,1,0,0,0,8,191,1,0,0,0,10,196,1,0,0,0,12,203,1,0,0,0, + 14,212,1,0,0,0,16,216,1,0,0,0,18,218,1,0,0,0,20,235,1,0,0,0,22,250, + 1,0,0,0,24,268,1,0,0,0,26,282,1,0,0,0,28,301,1,0,0,0,30,312,1,0, + 0,0,32,317,1,0,0,0,34,323,1,0,0,0,36,327,1,0,0,0,38,338,1,0,0,0, + 40,368,1,0,0,0,42,378,1,0,0,0,44,380,1,0,0,0,46,382,1,0,0,0,48,384, + 1,0,0,0,50,388,1,0,0,0,52,398,1,0,0,0,54,403,1,0,0,0,56,408,1,0, + 0,0,58,412,1,0,0,0,60,426,1,0,0,0,62,433,1,0,0,0,64,439,1,0,0,0, + 66,443,1,0,0,0,68,460,1,0,0,0,70,474,1,0,0,0,72,488,1,0,0,0,74,499, + 1,0,0,0,76,503,1,0,0,0,78,516,1,0,0,0,80,528,1,0,0,0,82,553,1,0, + 0,0,84,565,1,0,0,0,86,590,1,0,0,0,88,610,1,0,0,0,90,613,1,0,0,0, + 92,99,5,10,0,0,93,99,5,11,0,0,94,99,5,12,0,0,95,99,5,13,0,0,96,99, + 5,14,0,0,97,99,3,2,1,0,98,92,1,0,0,0,98,93,1,0,0,0,98,94,1,0,0,0, + 98,95,1,0,0,0,98,96,1,0,0,0,98,97,1,0,0,0,99,1,1,0,0,0,100,101,6, + 1,-1,0,101,102,5,47,0,0,102,103,3,2,1,0,103,104,5,48,0,0,104,110, + 1,0,0,0,105,106,5,88,0,0,106,107,5,77,0,0,107,110,3,2,1,2,108,110, + 5,87,0,0,109,100,1,0,0,0,109,105,1,0,0,0,109,108,1,0,0,0,110,122, + 1,0,0,0,111,114,10,3,0,0,112,115,5,75,0,0,113,115,5,77,0,0,114,112, + 1,0,0,0,114,113,1,0,0,0,115,116,1,0,0,0,116,121,3,2,1,4,117,118, + 10,4,0,0,118,119,5,76,0,0,119,121,3,4,2,0,120,111,1,0,0,0,120,117, + 1,0,0,0,121,124,1,0,0,0,122,120,1,0,0,0,122,123,1,0,0,0,123,3,1, + 0,0,0,124,122,1,0,0,0,125,127,7,0,0,0,126,125,1,0,0,0,126,127,1, + 0,0,0,127,128,1,0,0,0,128,129,5,88,0,0,129,5,1,0,0,0,130,131,6,3, + -1,0,131,132,5,47,0,0,132,133,3,6,3,0,133,134,5,48,0,0,134,142,1, + 0,0,0,135,136,3,10,5,0,136,137,3,6,3,9,137,142,1,0,0,0,138,139,5, + 28,0,0,139,142,3,6,3,4,140,142,3,8,4,0,141,130,1,0,0,0,141,135,1, + 0,0,0,141,138,1,0,0,0,141,140,1,0,0,0,142,179,1,0,0,0,143,144,10, + 10,0,0,144,145,5,76,0,0,145,178,3,6,3,10,146,150,10,8,0,0,147,151, + 5,75,0,0,148,151,5,77,0,0,149,151,5,78,0,0,150,147,1,0,0,0,150,148, + 1,0,0,0,150,149,1,0,0,0,151,152,1,0,0,0,152,178,3,6,3,9,153,156, + 10,7,0,0,154,157,5,49,0,0,155,157,5,73,0,0,156,154,1,0,0,0,156,155, + 1,0,0,0,157,158,1,0,0,0,158,178,3,6,3,8,159,160,10,6,0,0,160,161, + 3,12,6,0,161,162,3,6,3,7,162,178,1,0,0,0,163,164,10,5,0,0,164,165, + 3,14,7,0,165,166,3,6,3,6,166,178,1,0,0,0,167,168,10,3,0,0,168,169, + 3,16,8,0,169,170,3,6,3,4,170,178,1,0,0,0,171,172,10,2,0,0,172,173, + 5,79,0,0,173,174,3,6,3,0,174,175,5,80,0,0,175,176,3,6,3,3,176,178, + 1,0,0,0,177,143,1,0,0,0,177,146,1,0,0,0,177,153,1,0,0,0,177,159, + 1,0,0,0,177,163,1,0,0,0,177,167,1,0,0,0,177,171,1,0,0,0,178,181, + 1,0,0,0,179,177,1,0,0,0,179,180,1,0,0,0,180,7,1,0,0,0,181,179,1, + 0,0,0,182,192,3,20,10,0,183,192,5,85,0,0,184,186,7,1,0,0,185,187, + 3,18,9,0,186,185,1,0,0,0,186,187,1,0,0,0,187,192,1,0,0,0,188,192, + 5,86,0,0,189,192,5,25,0,0,190,192,3,18,9,0,191,182,1,0,0,0,191,183, + 1,0,0,0,191,184,1,0,0,0,191,188,1,0,0,0,191,189,1,0,0,0,191,190, + 1,0,0,0,192,9,1,0,0,0,193,197,5,49,0,0,194,197,5,73,0,0,195,197, + 5,50,0,0,196,193,1,0,0,0,196,194,1,0,0,0,196,195,1,0,0,0,197,11, + 1,0,0,0,198,204,5,53,0,0,199,204,5,52,0,0,200,204,5,51,0,0,201,204, + 5,59,0,0,202,204,5,60,0,0,203,198,1,0,0,0,203,199,1,0,0,0,203,200, + 1,0,0,0,203,201,1,0,0,0,203,202,1,0,0,0,204,13,1,0,0,0,205,213,5, + 61,0,0,206,213,5,63,0,0,207,213,5,68,0,0,208,213,5,69,0,0,209,213, + 5,70,0,0,210,213,5,71,0,0,211,213,5,62,0,0,212,205,1,0,0,0,212,206, + 1,0,0,0,212,207,1,0,0,0,212,208,1,0,0,0,212,209,1,0,0,0,212,210, + 1,0,0,0,212,211,1,0,0,0,213,15,1,0,0,0,214,217,5,26,0,0,215,217, + 5,27,0,0,216,214,1,0,0,0,216,215,1,0,0,0,217,17,1,0,0,0,218,223, + 5,87,0,0,219,220,5,54,0,0,220,221,3,6,3,0,221,222,5,56,0,0,222,224, + 1,0,0,0,223,219,1,0,0,0,223,224,1,0,0,0,224,228,1,0,0,0,225,227, + 5,83,0,0,226,225,1,0,0,0,227,230,1,0,0,0,228,226,1,0,0,0,228,229, + 1,0,0,0,229,233,1,0,0,0,230,228,1,0,0,0,231,232,5,84,0,0,232,234, + 3,18,9,0,233,231,1,0,0,0,233,234,1,0,0,0,234,19,1,0,0,0,235,236, + 5,87,0,0,236,245,5,47,0,0,237,242,3,6,3,0,238,239,5,72,0,0,239,241, + 3,6,3,0,240,238,1,0,0,0,241,244,1,0,0,0,242,240,1,0,0,0,242,243, + 1,0,0,0,243,246,1,0,0,0,244,242,1,0,0,0,245,237,1,0,0,0,245,246, + 1,0,0,0,246,247,1,0,0,0,247,248,5,48,0,0,248,21,1,0,0,0,249,251, + 5,29,0,0,250,249,1,0,0,0,250,251,1,0,0,0,251,252,1,0,0,0,252,253, + 5,16,0,0,253,254,5,87,0,0,254,255,3,0,0,0,255,256,5,74,0,0,256,258, + 3,6,3,0,257,259,5,82,0,0,258,257,1,0,0,0,258,259,1,0,0,0,259,263, + 1,0,0,0,260,262,3,42,21,0,261,260,1,0,0,0,262,265,1,0,0,0,263,261, + 1,0,0,0,263,264,1,0,0,0,264,266,1,0,0,0,265,263,1,0,0,0,266,267, + 5,9,0,0,267,23,1,0,0,0,268,269,3,18,9,0,269,270,5,74,0,0,270,272, + 3,6,3,0,271,273,5,82,0,0,272,271,1,0,0,0,272,273,1,0,0,0,273,277, + 1,0,0,0,274,276,3,42,21,0,275,274,1,0,0,0,276,279,1,0,0,0,277,275, + 1,0,0,0,277,278,1,0,0,0,278,280,1,0,0,0,279,277,1,0,0,0,280,281, + 5,9,0,0,281,25,1,0,0,0,282,283,5,30,0,0,283,284,3,18,9,0,284,285, + 5,74,0,0,285,293,3,6,3,0,286,287,5,4,0,0,287,288,3,18,9,0,288,289, + 5,74,0,0,289,290,3,6,3,0,290,292,1,0,0,0,291,286,1,0,0,0,292,295, + 1,0,0,0,293,291,1,0,0,0,293,294,1,0,0,0,294,297,1,0,0,0,295,293, + 1,0,0,0,296,298,5,82,0,0,297,296,1,0,0,0,297,298,1,0,0,0,298,299, + 1,0,0,0,299,300,5,9,0,0,300,27,1,0,0,0,301,302,5,9,0,0,302,304,5, + 1,0,0,303,305,3,30,15,0,304,303,1,0,0,0,305,306,1,0,0,0,306,304, + 1,0,0,0,306,307,1,0,0,0,307,308,1,0,0,0,308,309,5,2,0,0,309,29,1, + 0,0,0,310,313,3,34,17,0,311,313,3,32,16,0,312,310,1,0,0,0,312,311, + 1,0,0,0,313,31,1,0,0,0,314,318,3,50,25,0,315,318,3,58,29,0,316,318, + 3,60,30,0,317,314,1,0,0,0,317,315,1,0,0,0,317,316,1,0,0,0,318,33, + 1,0,0,0,319,324,3,36,18,0,320,324,3,20,10,0,321,324,3,38,19,0,322, + 324,3,48,24,0,323,319,1,0,0,0,323,320,1,0,0,0,323,321,1,0,0,0,323, + 322,1,0,0,0,324,325,1,0,0,0,325,326,5,9,0,0,326,35,1,0,0,0,327,333, + 3,18,9,0,328,334,5,74,0,0,329,334,5,64,0,0,330,334,5,65,0,0,331, + 334,5,66,0,0,332,334,5,67,0,0,333,328,1,0,0,0,333,329,1,0,0,0,333, + 330,1,0,0,0,333,331,1,0,0,0,333,332,1,0,0,0,334,335,1,0,0,0,335, + 336,3,6,3,0,336,37,1,0,0,0,337,339,5,29,0,0,338,337,1,0,0,0,338, + 339,1,0,0,0,339,341,1,0,0,0,340,342,5,16,0,0,341,340,1,0,0,0,341, + 342,1,0,0,0,342,343,1,0,0,0,343,348,3,18,9,0,344,345,5,72,0,0,345, + 347,3,18,9,0,346,344,1,0,0,0,347,350,1,0,0,0,348,346,1,0,0,0,348, + 349,1,0,0,0,349,351,1,0,0,0,350,348,1,0,0,0,351,354,3,0,0,0,352, + 353,5,74,0,0,353,355,3,6,3,0,354,352,1,0,0,0,354,355,1,0,0,0,355, + 360,1,0,0,0,356,357,5,57,0,0,357,358,3,6,3,0,358,359,5,58,0,0,359, + 361,1,0,0,0,360,356,1,0,0,0,360,361,1,0,0,0,361,365,1,0,0,0,362, + 364,3,42,21,0,363,362,1,0,0,0,364,367,1,0,0,0,365,363,1,0,0,0,365, + 366,1,0,0,0,366,39,1,0,0,0,367,365,1,0,0,0,368,369,3,38,19,0,369, + 370,5,9,0,0,370,41,1,0,0,0,371,379,5,43,0,0,372,379,5,44,0,0,373, + 374,5,45,0,0,374,375,3,44,22,0,375,376,5,81,0,0,376,377,3,46,23, + 0,377,379,1,0,0,0,378,371,1,0,0,0,378,372,1,0,0,0,378,373,1,0,0, + 0,379,43,1,0,0,0,380,381,5,87,0,0,381,45,1,0,0,0,382,383,5,87,0, + 0,383,47,1,0,0,0,384,386,5,17,0,0,385,387,3,6,3,0,386,385,1,0,0, + 0,386,387,1,0,0,0,387,49,1,0,0,0,388,392,3,52,26,0,389,391,3,54, + 27,0,390,389,1,0,0,0,391,394,1,0,0,0,392,390,1,0,0,0,392,393,1,0, + 0,0,393,396,1,0,0,0,394,392,1,0,0,0,395,397,3,56,28,0,396,395,1, + 0,0,0,396,397,1,0,0,0,397,51,1,0,0,0,398,399,5,18,0,0,399,400,3, + 6,3,0,400,401,5,80,0,0,401,402,3,28,14,0,402,53,1,0,0,0,403,404, + 5,19,0,0,404,405,3,6,3,0,405,406,5,80,0,0,406,407,3,28,14,0,407, + 55,1,0,0,0,408,409,5,20,0,0,409,410,5,80,0,0,410,411,3,28,14,0,411, + 57,1,0,0,0,412,413,5,21,0,0,413,414,5,87,0,0,414,415,5,23,0,0,415, + 416,3,6,3,0,416,417,5,46,0,0,417,418,3,6,3,0,418,420,5,24,0,0,419, + 421,5,73,0,0,420,419,1,0,0,0,420,421,1,0,0,0,421,422,1,0,0,0,422, + 423,7,1,0,0,423,424,5,80,0,0,424,425,3,28,14,0,425,59,1,0,0,0,426, + 427,5,22,0,0,427,428,3,6,3,0,428,429,5,80,0,0,429,430,3,28,14,0, + 430,61,1,0,0,0,431,434,3,64,32,0,432,434,5,9,0,0,433,431,1,0,0,0, + 433,432,1,0,0,0,434,435,1,0,0,0,435,433,1,0,0,0,435,436,1,0,0,0, + 436,437,1,0,0,0,437,438,5,0,0,1,438,63,1,0,0,0,439,440,5,31,0,0, + 440,441,5,87,0,0,441,442,3,66,33,0,442,65,1,0,0,0,443,444,5,80,0, + 0,444,445,5,9,0,0,445,454,5,1,0,0,446,455,3,72,36,0,447,455,3,76, + 38,0,448,455,3,78,39,0,449,455,3,84,42,0,450,455,3,86,43,0,451,455, + 3,68,34,0,452,455,3,70,35,0,453,455,3,74,37,0,454,446,1,0,0,0,454, + 447,1,0,0,0,454,448,1,0,0,0,454,449,1,0,0,0,454,450,1,0,0,0,454, + 451,1,0,0,0,454,452,1,0,0,0,454,453,1,0,0,0,455,456,1,0,0,0,456, + 454,1,0,0,0,456,457,1,0,0,0,457,458,1,0,0,0,458,459,5,2,0,0,459, + 67,1,0,0,0,460,461,5,40,0,0,461,462,5,47,0,0,462,467,5,87,0,0,463, + 464,5,72,0,0,464,466,3,90,45,0,465,463,1,0,0,0,466,469,1,0,0,0,467, + 465,1,0,0,0,467,468,1,0,0,0,468,470,1,0,0,0,469,467,1,0,0,0,470, + 471,5,48,0,0,471,472,5,80,0,0,472,473,3,28,14,0,473,69,1,0,0,0,474, + 475,5,41,0,0,475,476,5,47,0,0,476,481,3,6,3,0,477,478,5,72,0,0,478, + 480,3,90,45,0,479,477,1,0,0,0,480,483,1,0,0,0,481,479,1,0,0,0,481, + 482,1,0,0,0,482,484,1,0,0,0,483,481,1,0,0,0,484,485,5,48,0,0,485, + 486,5,80,0,0,486,487,3,28,14,0,487,71,1,0,0,0,488,489,7,2,0,0,489, + 490,5,80,0,0,490,491,5,9,0,0,491,493,5,1,0,0,492,494,3,40,20,0,493, + 492,1,0,0,0,494,495,1,0,0,0,495,493,1,0,0,0,495,496,1,0,0,0,496, + 497,1,0,0,0,497,498,5,2,0,0,498,73,1,0,0,0,499,500,5,35,0,0,500, + 501,5,80,0,0,501,502,3,28,14,0,502,75,1,0,0,0,503,504,5,36,0,0,504, + 505,5,80,0,0,505,506,5,9,0,0,506,510,5,1,0,0,507,511,3,22,11,0,508, + 511,3,24,12,0,509,511,3,26,13,0,510,507,1,0,0,0,510,508,1,0,0,0, + 510,509,1,0,0,0,511,512,1,0,0,0,512,510,1,0,0,0,512,513,1,0,0,0, + 513,514,1,0,0,0,514,515,5,2,0,0,515,77,1,0,0,0,516,517,5,37,0,0, + 517,518,5,80,0,0,518,519,5,9,0,0,519,522,5,1,0,0,520,523,3,80,40, + 0,521,523,3,82,41,0,522,520,1,0,0,0,522,521,1,0,0,0,523,524,1,0, + 0,0,524,522,1,0,0,0,524,525,1,0,0,0,525,526,1,0,0,0,526,527,5,2, + 0,0,527,79,1,0,0,0,528,533,5,87,0,0,529,530,5,54,0,0,530,531,3,6, + 3,0,531,532,5,56,0,0,532,534,1,0,0,0,533,529,1,0,0,0,533,534,1,0, + 0,0,534,535,1,0,0,0,535,536,5,55,0,0,536,549,5,42,0,0,537,546,5, + 47,0,0,538,543,3,88,44,0,539,540,5,72,0,0,540,542,3,88,44,0,541, + 539,1,0,0,0,542,545,1,0,0,0,543,541,1,0,0,0,543,544,1,0,0,0,544, + 547,1,0,0,0,545,543,1,0,0,0,546,538,1,0,0,0,546,547,1,0,0,0,547, + 548,1,0,0,0,548,550,5,48,0,0,549,537,1,0,0,0,549,550,1,0,0,0,550, + 551,1,0,0,0,551,552,5,9,0,0,552,81,1,0,0,0,553,558,5,87,0,0,554, + 555,5,54,0,0,555,556,3,6,3,0,556,557,5,56,0,0,557,559,1,0,0,0,558, + 554,1,0,0,0,558,559,1,0,0,0,559,560,1,0,0,0,560,561,3,0,0,0,561, + 562,5,55,0,0,562,563,5,39,0,0,563,564,5,9,0,0,564,83,1,0,0,0,565, + 566,5,38,0,0,566,567,5,80,0,0,567,568,5,9,0,0,568,571,5,1,0,0,569, + 572,5,42,0,0,570,572,5,39,0,0,571,569,1,0,0,0,571,570,1,0,0,0,572, + 585,1,0,0,0,573,582,5,47,0,0,574,579,3,88,44,0,575,576,5,72,0,0, + 576,578,3,88,44,0,577,575,1,0,0,0,578,581,1,0,0,0,579,577,1,0,0, + 0,579,580,1,0,0,0,580,583,1,0,0,0,581,579,1,0,0,0,582,574,1,0,0, + 0,582,583,1,0,0,0,583,584,1,0,0,0,584,586,5,48,0,0,585,573,1,0,0, + 0,585,586,1,0,0,0,586,587,1,0,0,0,587,588,5,9,0,0,588,589,5,2,0, + 0,589,85,1,0,0,0,590,591,5,15,0,0,591,592,5,87,0,0,592,601,5,47, + 0,0,593,598,3,88,44,0,594,595,5,72,0,0,595,597,3,88,44,0,596,594, + 1,0,0,0,597,600,1,0,0,0,598,596,1,0,0,0,598,599,1,0,0,0,599,602, + 1,0,0,0,600,598,1,0,0,0,601,593,1,0,0,0,601,602,1,0,0,0,602,603, + 1,0,0,0,603,605,5,48,0,0,604,606,3,0,0,0,605,604,1,0,0,0,605,606, + 1,0,0,0,606,607,1,0,0,0,607,608,5,80,0,0,608,609,3,28,14,0,609,87, + 1,0,0,0,610,611,5,87,0,0,611,612,3,0,0,0,612,89,1,0,0,0,613,614, + 5,87,0,0,614,615,5,74,0,0,615,616,7,3,0,0,616,91,1,0,0,0,68,98,109, + 114,120,122,126,141,150,156,177,179,186,191,196,203,212,216,223, + 228,233,242,245,250,258,263,272,277,293,297,306,312,317,323,333, + 338,341,348,354,360,365,378,386,392,396,420,433,435,454,456,467, + 481,495,510,512,522,524,533,543,546,549,558,571,579,582,585,598, + 601,605 ] class PyNestMLParser ( Parser ): @@ -3877,31 +3876,6 @@ def continuousInputPort(self, i:int=None): return self.getTypedRuleContext(PyNestMLParser.ContinuousInputPortContext,i) - def LEFT_PAREN(self, i:int=None): - if i is None: - return self.getTokens(PyNestMLParser.LEFT_PAREN) - else: - return self.getToken(PyNestMLParser.LEFT_PAREN, i) - - def RIGHT_PAREN(self, i:int=None): - if i is None: - return self.getTokens(PyNestMLParser.RIGHT_PAREN) - else: - return self.getToken(PyNestMLParser.RIGHT_PAREN, i) - - def parameter(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PyNestMLParser.ParameterContext) - else: - return self.getTypedRuleContext(PyNestMLParser.ParameterContext,i) - - - def COMMA(self, i:int=None): - if i is None: - return self.getTokens(PyNestMLParser.COMMA) - else: - return self.getToken(PyNestMLParser.COMMA, i) - def getRuleIndex(self): return PyNestMLParser.RULE_inputBlock @@ -3929,7 +3903,7 @@ def inputBlock(self): self.match(PyNestMLParser.NEWLINE) self.state = 519 self.match(PyNestMLParser.INDENT) - self.state = 538 + self.state = 522 self._errHandler.sync(self) _la = self._input.LA(1) while True: @@ -3947,43 +3921,13 @@ def inputBlock(self): pass - self.state = 536 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==47: - self.state = 524 - self.match(PyNestMLParser.LEFT_PAREN) - self.state = 533 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==87: - self.state = 525 - self.parameter() - self.state = 530 - self._errHandler.sync(self) - _la = self._input.LA(1) - while _la==72: - self.state = 526 - self.match(PyNestMLParser.COMMA) - self.state = 527 - self.parameter() - self.state = 532 - self._errHandler.sync(self) - _la = self._input.LA(1) - - - - self.state = 535 - self.match(PyNestMLParser.RIGHT_PAREN) - - - self.state = 540 + self.state = 524 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==87): break - self.state = 542 + self.state = 526 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4021,10 +3965,29 @@ def LEFT_SQUARE_BRACKET(self): def RIGHT_SQUARE_BRACKET(self): return self.getToken(PyNestMLParser.RIGHT_SQUARE_BRACKET, 0) + def LEFT_PAREN(self): + return self.getToken(PyNestMLParser.LEFT_PAREN, 0) + + def RIGHT_PAREN(self): + return self.getToken(PyNestMLParser.RIGHT_PAREN, 0) + def expression(self): return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + def parameter(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PyNestMLParser.ParameterContext) + else: + return self.getTypedRuleContext(PyNestMLParser.ParameterContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.COMMA) + else: + return self.getToken(PyNestMLParser.COMMA, i) + def getRuleIndex(self): return PyNestMLParser.RULE_spikeInputPort @@ -4044,25 +4007,55 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 544 + self.state = 528 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 549 + self.state = 533 self._errHandler.sync(self) _la = self._input.LA(1) if _la==54: - self.state = 545 + self.state = 529 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 546 + self.state = 530 localctx.sizeParameter = self.expression(0) - self.state = 547 + self.state = 531 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 551 + self.state = 535 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 552 + self.state = 536 self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 553 + self.state = 549 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==47: + self.state = 537 + self.match(PyNestMLParser.LEFT_PAREN) + self.state = 546 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==87: + self.state = 538 + self.parameter() + self.state = 543 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==72: + self.state = 539 + self.match(PyNestMLParser.COMMA) + self.state = 540 + self.parameter() + self.state = 545 + self._errHandler.sync(self) + _la = self._input.LA(1) + + + + self.state = 548 + self.match(PyNestMLParser.RIGHT_PAREN) + + + self.state = 551 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4127,27 +4120,27 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 555 + self.state = 553 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 560 + self.state = 558 self._errHandler.sync(self) _la = self._input.LA(1) if _la==54: - self.state = 556 + self.state = 554 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 557 + self.state = 555 localctx.sizeParameter = self.expression(0) - self.state = 558 + self.state = 556 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 562 + self.state = 560 self.dataType() - self.state = 563 + self.state = 561 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 564 + self.state = 562 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 565 + self.state = 563 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4230,61 +4223,61 @@ def outputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 567 + self.state = 565 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 568 + self.state = 566 self.match(PyNestMLParser.COLON) - self.state = 569 + self.state = 567 self.match(PyNestMLParser.NEWLINE) - self.state = 570 + self.state = 568 self.match(PyNestMLParser.INDENT) - self.state = 573 + self.state = 571 self._errHandler.sync(self) token = self._input.LA(1) if token in [42]: - self.state = 571 + self.state = 569 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) pass elif token in [39]: - self.state = 572 + self.state = 570 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 587 + self.state = 585 self._errHandler.sync(self) _la = self._input.LA(1) if _la==47: - self.state = 575 + self.state = 573 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 584 + self.state = 582 self._errHandler.sync(self) _la = self._input.LA(1) if _la==87: - self.state = 576 + self.state = 574 localctx.attribute = self.parameter() - self.state = 581 + self.state = 579 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 577 + self.state = 575 self.match(PyNestMLParser.COMMA) - self.state = 578 + self.state = 576 localctx.attribute = self.parameter() - self.state = 583 + self.state = 581 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 586 + self.state = 584 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 589 + self.state = 587 self.match(PyNestMLParser.NEWLINE) - self.state = 590 + self.state = 588 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4358,45 +4351,45 @@ def function(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 592 + self.state = 590 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 593 + self.state = 591 self.match(PyNestMLParser.NAME) - self.state = 594 + self.state = 592 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 603 + self.state = 601 self._errHandler.sync(self) _la = self._input.LA(1) if _la==87: - self.state = 595 + self.state = 593 self.parameter() - self.state = 600 + self.state = 598 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 596 + self.state = 594 self.match(PyNestMLParser.COMMA) - self.state = 597 + self.state = 595 self.parameter() - self.state = 602 + self.state = 600 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 605 + self.state = 603 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 607 + self.state = 605 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 140737488387072) != 0) or _la==87 or _la==88: - self.state = 606 + self.state = 604 localctx.returnType = self.dataType() - self.state = 609 + self.state = 607 self.match(PyNestMLParser.COLON) - self.state = 610 + self.state = 608 self.block() except RecognitionException as re: localctx.exception = re @@ -4439,9 +4432,9 @@ def parameter(self): self.enterRule(localctx, 88, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 612 + self.state = 610 self.match(PyNestMLParser.NAME) - self.state = 613 + self.state = 611 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4501,11 +4494,11 @@ def constParameter(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 615 + self.state = 613 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 616 + self.state = 614 self.match(PyNestMLParser.EQUALS) - self.state = 617 + self.state = 615 localctx.value = self._input.LT(1) _la = self._input.LA(1) if not(_la==25 or ((((_la - 85)) & ~0x3f) == 0 and ((1 << (_la - 85)) & 27) != 0)): diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index 104862729..37e0fb95f 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -285,7 +285,7 @@ parser grammar PyNestMLParser; @attribute inputPort: A list of input ports. */ inputBlock: INPUT_KEYWORD COLON - NEWLINE INDENT ((spikeInputPort | continuousInputPort) (LEFT_PAREN (parameter (COMMA parameter)*)? RIGHT_PAREN)?)+ DEDENT; + NEWLINE INDENT (spikeInputPort | continuousInputPort)+ DEDENT; /** ASTInputPort represents a single input port, e.g.: spike_in[3] <- spike @@ -299,7 +299,7 @@ parser grammar PyNestMLParser; spikeInputPort: name=NAME (LEFT_SQUARE_BRACKET sizeParameter=expression RIGHT_SQUARE_BRACKET)? - LEFT_ANGLE_MINUS SPIKE_KEYWORD NEWLINE; + LEFT_ANGLE_MINUS SPIKE_KEYWORD (LEFT_PAREN (parameter (COMMA parameter)*)? RIGHT_PAREN)? NEWLINE; continuousInputPort: name = NAME diff --git a/pynestml/meta_model/ast_input_port.py b/pynestml/meta_model/ast_input_port.py index c5109df24..934475f06 100644 --- a/pynestml/meta_model/ast_input_port.py +++ b/pynestml/meta_model/ast_input_port.py @@ -26,6 +26,7 @@ from pynestml.meta_model.ast_data_type import ASTDataType from pynestml.meta_model.ast_expression import ASTExpression from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_parameter import ASTParameter from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.utils.port_signal_type import PortSignalType @@ -49,6 +50,7 @@ class ASTInputPort(ASTNode): def __init__(self, name: str, signal_type: PortSignalType, + parameters: Optional[List[ASTParameter]] = None, size_parameter: Optional[Union[ASTSimpleExpression, ASTExpression]] = None, data_type: Optional[ASTDataType] = None, *args, **kwargs): @@ -58,15 +60,17 @@ def __init__(self, Parameters for superclass (ASTNode) can be passed through :python:`*args` and :python:`**kwargs`. :param name: the name of the port + :param signal_type: type of signal received, i.e., spikes or continuous + :param parameters: spike event parameters (for instance, ``foo ms`` in ``spike_in_port <- spike(foo ms)``) :param size_parameter: a parameter indicating the index in an array. :param data_type: the data type of this input port - :param signal_type: type of signal received, i.e., spikes or continuous """ super(ASTInputPort, self).__init__(*args, **kwargs) self.name = name self.signal_type = signal_type self.size_parameter = size_parameter self.data_type = data_type + self.parameters = parameters def clone(self) -> ASTInputPort: r""" @@ -77,8 +81,12 @@ def clone(self) -> ASTInputPort: data_type_dup = None if self.data_type: data_type_dup = self.data_type.clone() + parameters_dup = None + if self.parameters: + parameters_dup = [parameter.clone() for parameter in self.parameters] dup = ASTInputPort(name=self.name, signal_type=self.signal_type, + parameters=parameters_dup, size_parameter=self.size_parameter, data_type=data_type_dup, # ASTNode common attributes: @@ -98,6 +106,16 @@ def get_name(self) -> str: """ return self.name + def get_parameters(self) -> List[ASTParameter]: + r""" + Returns the parameters of the declared input port. + :return: the parameters. + """ + if self.parameters is not None: + return self.parameters + + return [] + def has_size_parameter(self) -> bool: r""" Returns whether a size parameter has been defined. diff --git a/pynestml/meta_model/ast_node_factory.py b/pynestml/meta_model/ast_node_factory.py index bf921409e..4675bf572 100644 --- a/pynestml/meta_model/ast_node_factory.py +++ b/pynestml/meta_model/ast_node_factory.py @@ -252,10 +252,9 @@ def create_ast_input_block(cls, input_definitions, source_position): return ASTInputBlock(input_definitions, source_position=source_position) @classmethod - def create_ast_input_port(cls, name, size_parameter, data_type, signal_type, source_position): - # type:(str,str,(None|ASTDataType),PortSignalType,ASTSourceLocation) -> ASTInputPort + def create_ast_input_port(cls, name: str, size_parameter: str, data_type: Optional[ASTDataType], signal_type: Optional[PortSignalType], parameters: Optional[List[ASTParameter]], source_position: ASTSourceLocation) -> ASTInputPort: return ASTInputPort(name=name, size_parameter=size_parameter, data_type=data_type, - signal_type=signal_type, source_position=source_position) + signal_type=signal_type, parameters=parameters, source_position=source_position) @classmethod def create_ast_logical_operator(cls, is_logical_and=False, is_logical_or=False, source_position=None): @@ -353,8 +352,8 @@ def create_ast_update_block(cls, block, source_position): return ASTUpdateBlock(block, source_position=source_position) @classmethod - def create_ast_variable(cls, name: str, differential_order: int = 0, vector_parameter=None, is_homogeneous=False, source_position: Optional[ASTSourceLocation] = None, scope: Optional[Scope] = None) -> ASTVariable: - var = ASTVariable(name, differential_order, vector_parameter=vector_parameter, is_homogeneous=is_homogeneous, source_position=source_position) + def create_ast_variable(cls, name: str, differential_order: int = 0, vector_parameter=None, is_homogeneous=False, attribute: Optional[str] = None, source_position: Optional[ASTSourceLocation] = None, scope: Optional[Scope] = None) -> ASTVariable: + var = ASTVariable(name, differential_order, vector_parameter=vector_parameter, is_homogeneous=is_homogeneous, attribute=attribute, source_position=source_position) if scope: var.scope = scope diff --git a/pynestml/meta_model/ast_variable.py b/pynestml/meta_model/ast_variable.py index 7ef70a2df..88c6f8131 100644 --- a/pynestml/meta_model/ast_variable.py +++ b/pynestml/meta_model/ast_variable.py @@ -100,6 +100,13 @@ def get_name(self) -> str: """ return self.name + def get_attribute(self) -> str: + r""" + Returns the attribute of the variable. + :return: the attribute of the variable. + """ + return self.attribute + def set_name(self, name: str) -> None: """ Sets the name of the variable. @@ -128,6 +135,9 @@ def get_complete_name(self) -> str: Returns the complete name, consisting of the name and the differential order. :return: the complete name. """ + if self.attribute: + return self.get_name() + "." + self.attribute + '\'' * self.get_differential_order() + return self.get_name() + '\'' * self.get_differential_order() def get_name_of_lhs(self) -> str: @@ -137,6 +147,7 @@ def get_name_of_lhs(self) -> str: """ if self.get_differential_order() > 0: return self.get_name() + '\'' * (self.get_differential_order() - 1) + return self.get_name() def get_type_symbol(self) -> TypeSymbol: diff --git a/pynestml/symbols/variable_symbol.py b/pynestml/symbols/variable_symbol.py index 3400a42da..92359f05b 100644 --- a/pynestml/symbols/variable_symbol.py +++ b/pynestml/symbols/variable_symbol.py @@ -19,6 +19,8 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . +from typing import Optional + from copy import copy from enum import Enum @@ -86,7 +88,7 @@ def __init__(self, element_reference=None, scope: Scope=None, name: str=None, bl vector_parameter: str=None, delay_parameter: str=None, declaring_expression: ASTExpression=None, is_predefined: bool=False, is_inline_expression: bool=False, is_recordable: bool=False, type_symbol: TypeSymbol=None, initial_value: ASTExpression=None, variable_type: VariableType=None, - decorators=None, namespace_decorators=None): + decorators=None, namespace_decorators=None, attribute: Optional[str] = None): """ Standard constructor. :param element_reference: a reference to the first element where this type has been used/defined @@ -119,6 +121,7 @@ def __init__(self, element_reference=None, scope: Scope=None, name: str=None, bl self.initial_value = initial_value self.variable_type = variable_type self.ode_or_kernel = None + self.attribute = attribute if decorators is None: decorators = [] if namespace_decorators is None: diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index 2f801febc..885e75a56 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -1473,6 +1473,17 @@ def is_delta_kernel(cls, kernel: ASTKernel) -> bool: and expr.get_rhs().get_function_call().get_scope().resolve_to_symbol(expr.get_rhs().get_function_call().get_name(), SymbolKind.FUNCTION).equals(PredefinedFunctions.name2function["delta"]) return rhs_is_delta_kernel or rhs_is_multiplied_delta_kernel + @classmethod + def find_parent_node_by_type(cls, node: ASTNode, type_to_find): + _node = node.get_parent() + while _node: + if isinstance(_node, type_to_find): + return _node + + _node = _node.get_parent() + + return None + @classmethod def get_input_port_by_name(cls, input_blocks: List[ASTInputBlock], port_name: str) -> Optional[ASTInputPort]: """ diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index 39a709888..32a456490 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -141,6 +141,7 @@ class MessageCode(Enum): EMIT_SPIKE_OUTPUT_PORT_TYPE_DIFFERS = 115 SPIKING_INPUT_PORT_NAME_ILLEGALLY_USED = 116 CONTINUOUS_OUTPUT_PORT_MAY_NOT_HAVE_ATTRIBUTES = 117 + SPIKING_INPUT_PORT_REFERENCE_MISSING_ATTRIBUTE = 118 class Messages: @@ -1387,5 +1388,10 @@ def get_spike_input_port_appears_outside_equation_rhs_and_event_handler(cls, nam @classmethod def get_continuous_output_port_cannot_have_attributes(cls): - message = "continuous time output port may not have attributes." + message = "Continuous time output port may not have attributes." return MessageCode.CONTINUOUS_OUTPUT_PORT_MAY_NOT_HAVE_ATTRIBUTES, message + + @classmethod + def get_spike_input_port_attribute_missing(cls, name: str): + message = "Spiking input port '" + name + "' reference is missing attribute." + return MessageCode.SPIKING_INPUT_PORT_REFERENCE_MISSING_ATTRIBUTE, message diff --git a/pynestml/utils/type_caster.py b/pynestml/utils/type_caster.py index 2f7827bad..64f23373f 100644 --- a/pynestml/utils/type_caster.py +++ b/pynestml/utils/type_caster.py @@ -19,6 +19,7 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . +from pynestml.symbols.error_type_symbol import ErrorTypeSymbol from pynestml.symbols.unit_type_symbol import UnitTypeSymbol from pynestml.utils.logger import Logger, LoggingLevel from pynestml.utils.messages import Messages diff --git a/pynestml/visitors/ast_builder_visitor.py b/pynestml/visitors/ast_builder_visitor.py index e4e10a40f..2ccdddedc 100644 --- a/pynestml/visitors/ast_builder_visitor.py +++ b/pynestml/visitors/ast_builder_visitor.py @@ -258,7 +258,10 @@ def visitVariable(self, ctx): vector_parameter = self.visit(ctx.vectorParameter) differential_order = (len(ctx.DIFFERENTIAL_ORDER()) if ctx.DIFFERENTIAL_ORDER() is not None else 0) - attribute = ctx.attribute + if ctx.attribute: + attribute = ctx.attribute.getText() + else: + attribute = None return ASTNodeFactory.create_ast_variable(name=str(ctx.NAME()), differential_order=differential_order, @@ -613,9 +616,13 @@ def visitSpikeInputPort(self, ctx): size_parameter = None if ctx.sizeParameter is not None: size_parameter = self.visit(ctx.sizeParameter) + parameters_ast = None + if ctx.parameter: + parameters_ast = [self.visit(parameter) for parameter in ctx.parameter()] signal_type = PortSignalType.SPIKE ret = ASTNodeFactory.create_ast_input_port(name=name, size_parameter=size_parameter, data_type=None, signal_type=signal_type, + parameters=parameters_ast, source_position=create_source_pos(ctx)) update_node_comments(ret, self.__comments.visit(ctx)) return ret @@ -629,6 +636,7 @@ def visitContinuousInputPort(self, ctx): signal_type = PortSignalType.CONTINUOUS ret = ASTNodeFactory.create_ast_input_port(name=name, size_parameter=size_parameter, data_type=data_type, signal_type=signal_type, + parameters=None, source_position=create_source_pos(ctx)) update_node_comments(ret, self.__comments.visit(ctx)) return ret diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index 2d7b4e1a0..b13dda929 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -19,6 +19,7 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . +from pynestml.meta_model.ast_input_port import ASTInputPort from pynestml.meta_model.ast_model import ASTModel from pynestml.meta_model.ast_model_body import ASTModelBody from pynestml.meta_model.ast_namespace_decorator import ASTNamespaceDecorator @@ -28,6 +29,7 @@ from pynestml.meta_model.ast_stmt import ASTStmt from pynestml.meta_model.ast_variable import ASTVariable from pynestml.symbol_table.scope import Scope, ScopeType +from pynestml.symbols.error_type_symbol import ErrorTypeSymbol from pynestml.symbols.function_symbol import FunctionSymbol from pynestml.symbols.predefined_functions import PredefinedFunctions from pynestml.symbols.predefined_types import PredefinedTypes @@ -423,6 +425,7 @@ def visit_unit_type(self, node): node.get_rhs().update_scope(node.get_scope()) def visit_expression(self, node): + print("Visiting expression: " + str(node)) """ Private method: Used to visit a single rhs and update its scope. :param node: an rhs. @@ -457,6 +460,7 @@ def visit_simple_expression(self, node): :param node: a simple rhs. :type node: ast_simple_expression """ + print("Visiting simple expression: " + str(node)) if node.is_function_call(): node.get_function_call().update_scope(node.get_scope()) elif node.is_variable() or node.has_unit(): @@ -466,6 +470,24 @@ def visit_simple_expression(self, node): node.get_variable().get_vector_parameter().update_scope(node.get_scope()) def visit_variable(self, node: ASTVariable): + print("Visiting variable: " + str(node)) + if node.attribute: + ast_model = ASTUtils.find_parent_node_by_type(node, ASTModel) + assert ast_model + input_port = ASTUtils.get_input_port_by_name(ast_model.get_input_blocks(), node.get_name()) + assert input_port + + for parameter in input_port.get_parameters(): + if parameter.get_name() == node.attribute: + actual_type = parameter.get_data_type() + node.data_type = actual_type + node.set_type_symbol(actual_type) + + assert isinstance(node.get_parent(), ASTSimpleExpression) + node.get_parent().type = actual_type + print("reassigned data type of " + str(node) + " to " + str(node.data_type)) + + if node.has_vector_parameter(): node.get_vector_parameter().update_scope(node.get_scope()) node.get_vector_parameter().accept(self) @@ -585,20 +607,39 @@ def visit_input_port(self, node): else: node.get_datatype().update_scope(node.get_scope()) - def endvisit_input_port(self, node): - type_symbol = PredefinedTypes.get_type("s")**-1 - if node.is_continuous() and node.has_datatype(): + def endvisit_input_port(self, node: ASTInputPort): + if node.is_continuous(): + assert node.has_datatype() type_symbol = node.get_datatype().get_type_symbol() - type_symbol.is_buffer = True # set it as a buffer - if node.has_size_parameter(): - if isinstance(node.get_size_parameter(), ASTSimpleExpression) and node.get_size_parameter().is_variable(): - node.get_size_parameter().update_scope(node.get_scope()) - symbol = VariableSymbol(element_reference=node, scope=node.get_scope(), name=node.get_name(), - block_type=BlockType.INPUT, vector_parameter=node.get_size_parameter(), - is_predefined=False, is_inline_expression=False, is_recordable=False, - type_symbol=type_symbol, variable_type=VariableType.BUFFER) - symbol.set_comment(node.get_comment()) - node.get_scope().add_symbol(symbol) + type_symbol.is_buffer = True # set it as a buffer + if node.has_size_parameter(): + if isinstance(node.get_size_parameter(), ASTSimpleExpression) and node.get_size_parameter().is_variable(): + node.get_size_parameter().update_scope(node.get_scope()) + symbol = VariableSymbol(element_reference=node, scope=node.get_scope(), name=node.get_name(), + block_type=BlockType.INPUT, vector_parameter=node.get_size_parameter(), + is_predefined=False, is_inline_expression=False, is_recordable=False, + type_symbol=type_symbol, variable_type=VariableType.BUFFER) + symbol.set_comment(node.get_comment()) + node.get_scope().add_symbol(symbol) + return + + assert node.is_spike() + + if len(node.parameters) == 0: + type_symbol = ErrorTypeSymbol() # not allowed to use a bare spike input port name in expressions etc. + else: + for parameter in node.parameters: + type_symbol = parameter.get_data_type() + type_symbol.is_buffer = True # set it as a buffer + if node.has_size_parameter(): + if isinstance(node.get_size_parameter(), ASTSimpleExpression) and node.get_size_parameter().is_variable(): + node.get_size_parameter().update_scope(node.get_scope()) + symbol = VariableSymbol(element_reference=node, scope=node.get_scope(), name=node.get_name() + "." + parameter.get_name(), + block_type=BlockType.INPUT, vector_parameter=node.get_size_parameter(), + is_predefined=False, is_inline_expression=False, is_recordable=False, + type_symbol=type_symbol, variable_type=VariableType.BUFFER, + attribute=parameter.get_name()) + node.get_scope().add_symbol(symbol) def visit_stmt(self, node: ASTStmt): """ diff --git a/pynestml/visitors/ast_variable_visitor.py b/pynestml/visitors/ast_variable_visitor.py index 07860acd7..a8d4cb075 100644 --- a/pynestml/visitors/ast_variable_visitor.py +++ b/pynestml/visitors/ast_variable_visitor.py @@ -22,6 +22,7 @@ from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.symbols.error_type_symbol import ErrorTypeSymbol from pynestml.symbols.symbol import SymbolKind +from pynestml.utils.ast_utils import ASTUtils from pynestml.utils.logger import LoggingLevel, Logger from pynestml.utils.messages import MessageCode from pynestml.visitors.ast_visitor import ASTVisitor @@ -32,16 +33,16 @@ class ASTVariableVisitor(ASTVisitor): This visitor visits a single variable and updates its type. """ - def visit_simple_expression(self, node): + def visit_simple_expression(self, node: ASTSimpleExpression): """ Visits a single variable as contained in a simple expression and derives its type. :param node: a single simple expression - :type node: ASTSimpleExpression """ assert isinstance(node, ASTSimpleExpression), \ '(PyNestML.Visitor.VariableVisitor) No or wrong type of simple expression provided (%s)!' % type(node) assert (node.get_scope() is not None), \ '(PyNestML.Visitor.VariableVisitor) No scope found, run symboltable creator!' + print("in visit_simple_expression (" + str(node) + ") ") scope = node.get_scope() var_name = node.get_variable().get_complete_name() @@ -49,6 +50,14 @@ def visit_simple_expression(self, node): # update the type of the variable according to its symbol type. if var_resolve is not None: + print("var_resolve is " + str(var_resolve)) + # print("var_resolve.attribute is " + str(var_resolve.attribute)) + + # if var_resolve.attribute: + # import pdb;pdb.set_trace() + # node.type = var_resolve.attribute.get_type_symbol() + # print("Setting type according to attribute: " + str(var_resolve.attribute) + " = " + str(node.type)) + # else: node.type = var_resolve.get_type_symbol() node.type.referenced_object = node return diff --git a/tests/valid/CoCoValueAssignedToInputPort.nestml b/tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml similarity index 65% rename from tests/valid/CoCoValueAssignedToInputPort.nestml rename to tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml index 4a30a9b43..d110d0a86 100644 --- a/tests/valid/CoCoValueAssignedToInputPort.nestml +++ b/tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml @@ -1,14 +1,12 @@ """ -CoCoValueAssignedToInputPort.nestml -################################### +CoCoInputPortsIllegal.nestml +############################ Description +++++++++++ -This model is used to test if broken CoCos are identified correctly. Here, if assignment of values to input ports is detected. - -Positive case. +This test is used to test the declaration of both vectorized and non-vectorized input ports. Copyright statement @@ -31,9 +29,12 @@ GNU General Public License for more details. You should have received a copy of the GNU General Public License along with NEST. If not, see . """ -model CoCoValueAssignedToInputPort: +model input_ports_illegal_neuron: + state: + foo real = 0. + input: - spikeInh <- spike + spike_in_port <- spike(foo real) - update: # input port not assigned to, thus everything is correct - test integer = spikeInh * s + 10 + onReceive(spike_in_port): + foo += spike_in_port diff --git a/tests/nest_tests/nest_integration_test.py b/tests/nest_tests/nest_integration_test.py index abfa47ea1..193cca517 100644 --- a/tests/nest_tests/nest_integration_test.py +++ b/tests/nest_tests/nest_integration_test.py @@ -54,95 +54,96 @@ def generate_all_models(self): codegen_opts["neuron_parent_class"] = "StructuralPlasticityNode" codegen_opts["neuron_parent_class_include"] = "structural_plasticity_node.h" - generate_nest_target(input_path=["models/neurons/hh_cond_exp_traub_neuron.nestml", - "models/neurons/hh_psc_alpha_neuron.nestml", - "models/neurons/iaf_cond_beta_neuron.nestml", - "models/neurons/iaf_cond_alpha_neuron.nestml", - "models/neurons/iaf_cond_exp_neuron.nestml", - "models/neurons/iaf_psc_alpha_neuron.nestml", + generate_nest_target(input_path=[# "models/neurons/hh_cond_exp_traub_neuron.nestml", + # "models/neurons/hh_psc_alpha_neuron.nestml", + # "models/neurons/iaf_cond_beta_neuron.nestml", + # "models/neurons/iaf_cond_alpha_neuron.nestml", + # "models/neurons/iaf_cond_exp_neuron.nestml", + # "models/neurons/iaf_psc_alpha_neuron.nestml", "models/neurons/iaf_psc_exp_neuron.nestml", - "models/neurons/iaf_psc_delta_neuron.nestml"], + # "models/neurons/iaf_psc_delta_neuron.nestml" + ], target_path="/tmp/nestml-allmodels", logging_level="DEBUG", module_name="nestml_allmodels_module", suffix="_nestml", codegen_opts=codegen_opts) - # generate code with analytic solver disabled - alt_codegen_opts = {**codegen_opts, **{"solver": "numeric"}} + # # generate code with analytic solver disabled + # alt_codegen_opts = {**codegen_opts, **{"solver": "numeric"}} - generate_nest_target(input_path=["models/neurons/aeif_cond_exp_neuron.nestml", - "models/neurons/aeif_cond_alpha_neuron.nestml"], - target_path="/tmp/nestml-alt-allmodels", - logging_level="DEBUG", - module_name="nestml_alt_allmodels_module", - suffix="_alt_nestml", - codegen_opts=alt_codegen_opts) + # generate_nest_target(input_path=["models/neurons/aeif_cond_exp_neuron.nestml", + # "models/neurons/aeif_cond_alpha_neuron.nestml"], + # target_path="/tmp/nestml-alt-allmodels", + # logging_level="DEBUG", + # module_name="nestml_alt_allmodels_module", + # suffix="_alt_nestml", + # codegen_opts=alt_codegen_opts) - # generate code using forward Euler integrator - alt_codegen_opts = {**codegen_opts, **{"numeric_solver": "forward-Euler"}} + # # generate code using forward Euler integrator + # alt_codegen_opts = {**codegen_opts, **{"numeric_solver": "forward-Euler"}} - generate_nest_target(input_path="models/neurons/izhikevich_neuron.nestml", - target_path="/tmp/nestml-alt-int-allmodels", - logging_level="DEBUG", - module_name="nestml_alt_int_allmodels_module", - suffix="_alt_int_nestml", - codegen_opts=alt_codegen_opts) + # generate_nest_target(input_path="models/neurons/izhikevich_neuron.nestml", + # target_path="/tmp/nestml-alt-int-allmodels", + # logging_level="DEBUG", + # module_name="nestml_alt_int_allmodels_module", + # suffix="_alt_int_nestml", + # codegen_opts=alt_codegen_opts) def test_nest_integration(self): self.generate_all_models() nest.Install("nestml_allmodels_module") - nest.Install("nestml_alt_allmodels_module") - nest.Install("nestml_alt_int_allmodels_module") + # nest.Install("nestml_alt_allmodels_module") + # nest.Install("nestml_alt_int_allmodels_module") - self._test_model_equivalence_subthreshold("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - self._test_model_equivalence_spiking("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - self._test_model_equivalence_fI_curve("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - self._test_model_equivalence_curr_inj("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + # self._test_model_equivalence_subthreshold("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + # self._test_model_equivalence_spiking("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + # self._test_model_equivalence_fI_curve("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + # self._test_model_equivalence_curr_inj("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") self._test_model_equivalence_subthreshold("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") self._test_model_equivalence_spiking("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") self._test_model_equivalence_fI_curve("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") self._test_model_equivalence_curr_inj("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - self._test_model_equivalence_subthreshold("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - self._test_model_equivalence_spiking("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - self._test_model_equivalence_fI_curve("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - self._test_model_equivalence_curr_inj("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + # self._test_model_equivalence_subthreshold("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + # self._test_model_equivalence_spiking("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + # self._test_model_equivalence_fI_curve("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + # self._test_model_equivalence_curr_inj("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - self._test_model_equivalence_subthreshold("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL - self._test_model_equivalence_spiking("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL - self._test_model_equivalence_fI_curve("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") - self._test_model_equivalence_curr_inj("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") + # self._test_model_equivalence_subthreshold("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL + # self._test_model_equivalence_spiking("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL + # self._test_model_equivalence_fI_curve("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") + # self._test_model_equivalence_curr_inj("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") - self._test_model_equivalence_subthreshold("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - self._test_model_equivalence_spiking("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - self._test_model_equivalence_fI_curve("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + # self._test_model_equivalence_subthreshold("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + # self._test_model_equivalence_spiking("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + # self._test_model_equivalence_fI_curve("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - iaf_cond_beta_nest_model_parameters = {"tau_rise_ex": 2., "tau_decay_ex": 10.} - iaf_cond_beta_nestml_model_parameters = {"tau_syn_rise_E": 2., "tau_syn_decay_E": 10.} # XXX: TODO: does not work yet when tau_rise = tau_fall (numerical singularity occurs in the propagators) - self._test_model_equivalence_subthreshold("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) - self._test_model_equivalence_spiking("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) - self._test_model_equivalence_fI_curve("iaf_cond_beta", "iaf_cond_beta_neuron_nestml") + # iaf_cond_beta_nest_model_parameters = {"tau_rise_ex": 2., "tau_decay_ex": 10.} + # iaf_cond_beta_nestml_model_parameters = {"tau_syn_rise_E": 2., "tau_syn_decay_E": 10.} # XXX: TODO: does not work yet when tau_rise = tau_fall (numerical singularity occurs in the propagators) + # self._test_model_equivalence_subthreshold("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) + # self._test_model_equivalence_spiking("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) + # self._test_model_equivalence_fI_curve("iaf_cond_beta", "iaf_cond_beta_neuron_nestml") - self._test_model_equivalence_subthreshold("izhikevich", "izhikevich_neuron_alt_int_nestml") - self._test_model_equivalence_spiking("izhikevich", "izhikevich_neuron_alt_int_nestml") - self._test_model_equivalence_fI_curve("izhikevich", "izhikevich_neuron_alt_int_nestml") + # self._test_model_equivalence_subthreshold("izhikevich", "izhikevich_neuron_alt_int_nestml") + # self._test_model_equivalence_spiking("izhikevich", "izhikevich_neuron_alt_int_nestml") + # self._test_model_equivalence_fI_curve("izhikevich", "izhikevich_neuron_alt_int_nestml") - nestml_hh_psc_alpha_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST - self._test_model_equivalence_subthreshold("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - self._test_model_equivalence_spiking("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", tolerance=1E-5, nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - self._test_model_equivalence_fI_curve("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + # nestml_hh_psc_alpha_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST + # self._test_model_equivalence_subthreshold("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + # self._test_model_equivalence_spiking("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", tolerance=1E-5, nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + # self._test_model_equivalence_fI_curve("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - nestml_hh_cond_exp_traub_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST - self._test_model_equivalence_subthreshold("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) - self._test_model_equivalence_fI_curve("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) + # nestml_hh_cond_exp_traub_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST + # self._test_model_equivalence_subthreshold("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) + # self._test_model_equivalence_fI_curve("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) - self._test_model_equivalence_subthreshold("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly - self._test_model_equivalence_fI_curve("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml") + # self._test_model_equivalence_subthreshold("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly + # self._test_model_equivalence_fI_curve("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml") - self._test_model_equivalence_subthreshold("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly - self._test_model_equivalence_fI_curve("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml") + # self._test_model_equivalence_subthreshold("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly + # self._test_model_equivalence_fI_curve("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml") # -------------- # XXX: TODO! diff --git a/tests/test_cocos.py b/tests/test_cocos.py index 3744c3fd3..cb06529f4 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -131,11 +131,7 @@ def test_valid_inline_expression_has_several_lhs(self): def test_invalid_no_values_assigned_to_input_ports(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoValueAssignedToInputPort.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - def test_valid_no_values_assigned_to_input_ports(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoValueAssignedToInputPort.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 def test_invalid_order_of_equations_correct(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoNoOrderOfEquations.nestml')) @@ -402,9 +398,18 @@ def test_invalid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_h model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegal.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 - def test_invalid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers2(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegal2.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 + def test_valid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsLegal.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_co_co_spike_input_ports_illegal_missing_attribute(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + def test_valid_co_co_spike_input_ports_illegal_missing_attribute(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def _parse_and_validate_model(self, fname: str) -> Optional[str]: from pynestml.frontend.pynestml_frontend import generate_target diff --git a/tests/valid/CoCoInputPortsIllegalMissingAttribute.nestml b/tests/valid/CoCoInputPortsIllegalMissingAttribute.nestml new file mode 100644 index 000000000..f08eb972f --- /dev/null +++ b/tests/valid/CoCoInputPortsIllegalMissingAttribute.nestml @@ -0,0 +1,40 @@ +""" +CoCoInputPortsIllegal.nestml +############################ + + +Description ++++++++++++ + +This test is used to test the declaration of both vectorized and non-vectorized input ports. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model input_ports_illegal_neuron: + state: + foo real = 0. + + input: + spike_in_port <- spike(foo real) + + onReceive(spike_in_port): + foo += spike_in_port.foo diff --git a/tests/invalid/CoCoInputPortsIllegal2.nestml b/tests/valid/CoCoInputPortsLegal.nestml similarity index 97% rename from tests/invalid/CoCoInputPortsIllegal2.nestml rename to tests/valid/CoCoInputPortsLegal.nestml index a1ee1e3db..1e735fe5f 100644 --- a/tests/invalid/CoCoInputPortsIllegal2.nestml +++ b/tests/valid/CoCoInputPortsLegal.nestml @@ -29,7 +29,7 @@ GNU General Public License for more details. You should have received a copy of the GNU General Public License along with NEST. If not, see . """ -model input_ports_illegal_neuron: +model input_ports_legal_neuron: state: bar pA = 0 pA foo_spikes pA = 0 pA From 7be429f17239885c3a2a1a7db00f93347f1719de Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 30 Oct 2024 13:09:49 +0100 Subject: [PATCH 13/68] add attributes to spiking input ports --- .../models/iaf_psc_alpha_adapt_curr.nestml | 3 ++- .../codegeneration/printers/cpp_variable_printer.py | 6 ++++-- pynestml/utils/messages.py | 1 + pynestml/visitors/ast_symbol_table_visitor.py | 2 +- tests/nest_tests/resources/FIR_filter.nestml | 11 +++++++++-- 5 files changed, 17 insertions(+), 6 deletions(-) diff --git a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml index fa454127b..02e0ad23f 100644 --- a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml +++ b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml @@ -90,7 +90,8 @@ model iaf_psc_alpha_adapt_curr_neuron: I_e pA = 0 pA input: - in_spikes <- spike + exc_spikes <- spike + inh_spikes <- spike I_stim pA <- continuous output: diff --git a/pynestml/codegeneration/printers/cpp_variable_printer.py b/pynestml/codegeneration/printers/cpp_variable_printer.py index 1e1039165..3cd69d4ef 100644 --- a/pynestml/codegeneration/printers/cpp_variable_printer.py +++ b/pynestml/codegeneration/printers/cpp_variable_printer.py @@ -34,11 +34,13 @@ def _print_cpp_name(cls, variable_name: str) -> str: :param variable_name: a single name. :return: a string representation """ + if "'" in variable_name: + import pdb;pdb.set_trace() differential_order = variable_name.count("\"") if differential_order > 0: - return variable_name.replace("\"", "").replace("$", "__DOLLAR") + "__" + "d" * differential_order + return variable_name.replace(".", "__ATTR__").replace("\"", "").replace("$", "__DOLLAR") + "__" + "d" * differential_order - return variable_name.replace("$", "__DOLLAR") + return variable_name.replace(".", "__ATTR__").replace("$", "__DOLLAR") def print_variable(self, node: ASTVariable) -> str: """ diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index 32a456490..f1592663d 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -142,6 +142,7 @@ class MessageCode(Enum): SPIKING_INPUT_PORT_NAME_ILLEGALLY_USED = 116 CONTINUOUS_OUTPUT_PORT_MAY_NOT_HAVE_ATTRIBUTES = 117 SPIKING_INPUT_PORT_REFERENCE_MISSING_ATTRIBUTE = 118 + CONVOLVE_NEEDS_BUFFER_PARAMETER = 119 class Messages: diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index b13dda929..d35e9286f 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -629,7 +629,7 @@ def endvisit_input_port(self, node: ASTInputPort): type_symbol = ErrorTypeSymbol() # not allowed to use a bare spike input port name in expressions etc. else: for parameter in node.parameters: - type_symbol = parameter.get_data_type() + type_symbol = parameter.get_data_type().type_symbol type_symbol.is_buffer = True # set it as a buffer if node.has_size_parameter(): if isinstance(node.get_size_parameter(), ASTSimpleExpression) and node.get_size_parameter().is_variable(): diff --git a/tests/nest_tests/resources/FIR_filter.nestml b/tests/nest_tests/resources/FIR_filter.nestml index 21387d50e..47f4e27ea 100644 --- a/tests/nest_tests/resources/FIR_filter.nestml +++ b/tests/nest_tests/resources/FIR_filter.nestml @@ -31,6 +31,8 @@ along with NEST. If not, see . """ model fir_filter: state: + spike_in_buffer real = 0. + # FIR filter output (to be recorded by NEST multimeter) y real = 0. @@ -43,11 +45,16 @@ model fir_filter: h[N] real = 1. # filter coefficients input: - spike_in <- spike + spike_in <- spike(weight real) + + + onReceive(spike_in): + spike_in_buffer += spike_in.weight update: # circular buffer for input spike count per timestep - x[i] = spike_in * s + x[i] = spike_in_buffer * s + spike_in_buffer = 0. # compute the new value of y j integer = 0 From 2036109cd0cfe59da2470059044edd42246c1d38 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 30 Oct 2024 13:17:20 +0100 Subject: [PATCH 14/68] add attributes to spiking input ports --- .../point_neuron/common/NeuronHeader.jinja2 | 6 ++-- pynestml/visitors/ast_symbol_table_visitor.py | 32 +++++++++---------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 31b791775..85a592c97 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -554,15 +554,15 @@ private: {%- if port.has_vector_parameter() -%} {% set size = utils.get_numeric_vector_size(port) | int %} {%- for i in range(size) %} - {{port.get_symbol_name().upper()}}_{{i}} = {{ns.count}}, + {{ port.get_symbol_name().upper().split(".")[0] }}_{{ i }} = {{ ns.count }}, {%- set ns.count = ns.count + 1 -%} {%- endfor %} {%- else %} - {{port.get_symbol_name().upper()}} = {{ns.count}}, + {{ port.get_symbol_name().upper().split(".")[0] }} = {{ ns.count }}, {%- set ns.count = ns.count + 1 -%} {%- endif -%} {%- endfor %} - MAX_SPIKE_RECEPTOR = {{ns.count}} + MAX_SPIKE_RECEPTOR = {{ ns.count }} }; static const size_t NUM_SPIKE_RECEPTORS = MAX_SPIKE_RECEPTOR - MIN_SPIKE_RECEPTOR; diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index d35e9286f..f628221bc 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -470,22 +470,22 @@ def visit_simple_expression(self, node): node.get_variable().get_vector_parameter().update_scope(node.get_scope()) def visit_variable(self, node: ASTVariable): - print("Visiting variable: " + str(node)) - if node.attribute: - ast_model = ASTUtils.find_parent_node_by_type(node, ASTModel) - assert ast_model - input_port = ASTUtils.get_input_port_by_name(ast_model.get_input_blocks(), node.get_name()) - assert input_port - - for parameter in input_port.get_parameters(): - if parameter.get_name() == node.attribute: - actual_type = parameter.get_data_type() - node.data_type = actual_type - node.set_type_symbol(actual_type) - - assert isinstance(node.get_parent(), ASTSimpleExpression) - node.get_parent().type = actual_type - print("reassigned data type of " + str(node) + " to " + str(node.data_type)) + # print("Visiting variable: " + str(node)) + # if node.attribute: + # ast_model = ASTUtils.find_parent_node_by_type(node, ASTModel) + # assert ast_model + # input_port = ASTUtils.get_input_port_by_name(ast_model.get_input_blocks(), node.get_name()) + # assert input_port + + # for parameter in input_port.get_parameters(): + # if parameter.get_name() == node.attribute: + # actual_type = parameter.get_data_type() + # node.data_type = actual_type + # node.set_type_symbol(actual_type) + + # assert isinstance(node.get_parent(), ASTSimpleExpression) + # node.get_parent().type = actual_type + # print("reassigned data type of " + str(node) + " to " + str(node.data_type)) if node.has_vector_parameter(): From 39030207668a89c2c7d80a87bd563fc4070dcd87 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 31 Oct 2024 12:33:35 +0100 Subject: [PATCH 15/68] add attributes to spiking input ports --- models/neurons/iaf_psc_exp_neuron.nestml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/models/neurons/iaf_psc_exp_neuron.nestml b/models/neurons/iaf_psc_exp_neuron.nestml index ce3b6ade7..82695e7ea 100644 --- a/models/neurons/iaf_psc_exp_neuron.nestml +++ b/models/neurons/iaf_psc_exp_neuron.nestml @@ -90,9 +90,9 @@ model iaf_psc_exp_neuron: # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike # weight is a real number, and here interpreted as 1 corresponding to 1 pA if spike_in_port.weight > 0: - I_syn_exc += 0 pA # spike_in_port.weight * pA + I_syn_exc += spike_in_port.weight * pA else: - I_syn_inh -= 0 pA # spike_in_port.weight * pA + I_syn_inh -= spike_in_port.weight * pA update: if refr_t > 0 ms: From 90bd81d9fb50d4af1924bf67e3b166a3c084409a Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Fri, 1 Nov 2024 00:02:43 +0100 Subject: [PATCH 16/68] add attributes to spiking input ports --- .../printers/cpp_variable_printer.py | 4 +- .../printers/gsl_variable_printer.py | 5 +- .../printers/nest_variable_printer.py | 7 +- .../printers/spinnaker_c_variable_printer.py | 4 +- .../point_neuron/common/NeuronClass.jinja2 | 167 +++++++++++++----- .../point_neuron/common/NeuronHeader.jinja2 | 110 ++++++++---- .../directives_cpp/SpikeBufferGetter.jinja2 | 8 +- .../@NEURON_NAME@_impl.h.jinja2 | 17 +- pynestml/meta_model/ast_input_port.py | 2 +- pynestml/utils/ast_utils.py | 21 +++ pynestml/visitors/ast_symbol_table_visitor.py | 3 - pynestml/visitors/ast_variable_visitor.py | 2 - 12 files changed, 240 insertions(+), 110 deletions(-) diff --git a/pynestml/codegeneration/printers/cpp_variable_printer.py b/pynestml/codegeneration/printers/cpp_variable_printer.py index 3cd69d4ef..11bbbb254 100644 --- a/pynestml/codegeneration/printers/cpp_variable_printer.py +++ b/pynestml/codegeneration/printers/cpp_variable_printer.py @@ -38,9 +38,9 @@ def _print_cpp_name(cls, variable_name: str) -> str: import pdb;pdb.set_trace() differential_order = variable_name.count("\"") if differential_order > 0: - return variable_name.replace(".", "__ATTR__").replace("\"", "").replace("$", "__DOLLAR") + "__" + "d" * differential_order + return variable_name.replace(".", "__DOT__").replace("\"", "").replace("$", "__DOLLAR") + "__" + "d" * differential_order - return variable_name.replace(".", "__ATTR__").replace("$", "__DOLLAR") + return variable_name.replace(".", "__DOT__").replace("$", "__DOLLAR") def print_variable(self, node: ASTVariable) -> str: """ diff --git a/pynestml/codegeneration/printers/gsl_variable_printer.py b/pynestml/codegeneration/printers/gsl_variable_printer.py index 3705c96d2..15b91f13a 100644 --- a/pynestml/codegeneration/printers/gsl_variable_printer.py +++ b/pynestml/codegeneration/printers/gsl_variable_printer.py @@ -90,11 +90,10 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: var_name += "_" + str(variable.get_vector_parameter()) # add variable attribute if it exists - variable_attr = "" if variable.attribute: - variable_attr = "_" + variable.attribute + return "spike_input_" + str(variable.name) + "__DOT__" + variable.attribute + "_grid_sum_" - return "spike_inputs_grid_sum_" + variable_attr + "[node." + var_name + " - node.MIN_SPIKE_RECEPTOR]" + return "spike_input_" + str(variable) + "_grid_sum_" # case of continuous-type input port return variable_symbol.get_symbol_name() + '_grid_sum_' diff --git a/pynestml/codegeneration/printers/nest_variable_printer.py b/pynestml/codegeneration/printers/nest_variable_printer.py index d481d552e..bd526ae89 100644 --- a/pynestml/codegeneration/printers/nest_variable_printer.py +++ b/pynestml/codegeneration/printers/nest_variable_printer.py @@ -165,7 +165,12 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: var_name += "_0 + " + variable.get_vector_parameter().get_variable().get_name() else: var_name += "_" + str(variable.get_vector_parameter()) - return "spike_inputs_grid_sum_[" + var_name + " - MIN_SPIKE_RECEPTOR]" + + # add variable attribute if it exists + if variable.attribute: + return "spike_input_" + str(variable.name) + "__DOT__" + variable.attribute + "_grid_sum_" + + return "spike_input_" + str(variable) + "_grid_sum_" if self.cpp_variable_suffix: return variable_symbol.get_symbol_name() + self.cpp_variable_suffix diff --git a/pynestml/codegeneration/printers/spinnaker_c_variable_printer.py b/pynestml/codegeneration/printers/spinnaker_c_variable_printer.py index 867a73dbf..29816a2a7 100644 --- a/pynestml/codegeneration/printers/spinnaker_c_variable_printer.py +++ b/pynestml/codegeneration/printers/spinnaker_c_variable_printer.py @@ -130,7 +130,7 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: vector_parameter = ASTUtils.get_numeric_vector_size(variable) var_name = var_name + "_" + str(vector_parameter) - return "input->inputs[" + var_name + " - MIN_SPIKE_RECEPTOR]" + return "input->inputs_" + str(var_name) if variable_symbol.is_continuous_input_port(): var_name = variable_symbol.get_symbol_name().upper() @@ -138,7 +138,7 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: vector_parameter = ASTUtils.get_numeric_vector_size(variable) var_name = var_name + "_" + str(vector_parameter) - return "input->inputs[" + var_name + " - MIN_SPIKE_RECEPTOR]" + return "input->inputs_" + str(var_name) return variable_symbol.get_symbol_name() + '_grid_sum_' diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 24b2f0062..4d21d7374 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -147,7 +147,7 @@ template <> void RecordablesMap<{{ neuronName }}>::create() } } -{%- if neuron.get_spike_input_ports()|length > 1 or neuron.is_multisynapse_spikes() %} +{% if neuron.get_spike_input_ports() | length > 1 or neuron.is_multisynapse_spikes() %} std::vector< size_t > {{ neuronName }}::rport_to_nestml_buffer_idx = { {%- for rport, ports in utils.get_spike_input_ports_in_pairs(neuron).items() %} @@ -213,10 +213,27 @@ std::vector< size_t > {{ neuronName }}::rport_to_nestml_buffer_idx = {{ neuronName }}::Buffers_::Buffers_({{ neuronName }} &n): logger_(n) {%- if neuron.get_spike_input_ports()|length > 0 %} - , spike_inputs_( std::vector< nest::RingBuffer >( NUM_SPIKE_RECEPTORS ) ) - , spike_inputs_grid_sum_( std::vector< double >( NUM_SPIKE_RECEPTORS ) ) - , spike_input_received_( std::vector< nest::RingBuffer >( NUM_SPIKE_RECEPTORS ) ) - , spike_input_received_grid_sum_( std::vector< double >( NUM_SPIKE_RECEPTORS ) ) +{%- for inputPortSymbol in neuron.get_spike_input_ports() %} +{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_grid_sum_( 0. ); +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- endfor %} + , spike_input_{{ inputPort.name }}_spike_input_received_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) +{%- endif %} +{%- endfor %} {%- endif %} {%- if uses_numeric_solver %} {%- if numeric_solver == "rk45" %} @@ -230,10 +247,27 @@ std::vector< size_t > {{ neuronName }}::rport_to_nestml_buffer_idx = {{ neuronName }}::Buffers_::Buffers_(const Buffers_ &, {{ neuronName }} &n): logger_(n) {%- if neuron.get_spike_input_ports()|length > 0 %} - , spike_inputs_( std::vector< nest::RingBuffer >( NUM_SPIKE_RECEPTORS ) ) - , spike_inputs_grid_sum_( std::vector< double >( NUM_SPIKE_RECEPTORS ) ) - , spike_input_received_( std::vector< nest::RingBuffer >( NUM_SPIKE_RECEPTORS ) ) - , spike_input_received_grid_sum_( std::vector< double >( NUM_SPIKE_RECEPTORS ) ) +{%- for inputPortSymbol in neuron.get_spike_input_ports() %} +{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_grid_sum_( 0. ); +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- endfor %} + , spike_input_{{ inputPort.name }}_spike_input_received_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) +{%- endif %} +{%- endfor %} {%- endif %} {%- if uses_numeric_solver %} {%- if numeric_solver == "rk45" %} @@ -438,10 +472,45 @@ void {{ neuronName }}::init_buffers_() {%- if neuron.get_spike_input_ports() | length > 0 %} // spike input buffers - get_spike_inputs_().clear(); - get_spike_inputs_grid_sum_().clear(); - get_spike_input_received_().clear(); - get_spike_input_received_grid_sum_().clear(); + + + + + +{%- for inputPortSymbol in neuron.get_spike_input_ports() %} +{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_.clear(); +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.clear(); +{%- endfor %} + B_.spike_input_{{ inputPort.name }}_spike_input_received_.clear(); +{%- endif %} +{%- endfor %} + + + + + + + + + + + + + + + + {% endif %} {%- if neuron.get_continuous_input_ports() | length > 0 %} @@ -569,14 +638,6 @@ void {{ neuronName }}::pre_run_hook() // parameters might have changed -- recompute internals V_.__h = nest::Time::get_resolution().get_ms(); recompute_internal_variables(); - - // buffers B_ -{%- if ((neuron.get_spike_input_ports())|length > 0) %} - B_.spike_inputs_.resize(NUM_SPIKE_RECEPTORS); - B_.spike_inputs_grid_sum_.resize(NUM_SPIKE_RECEPTORS); - B_.spike_input_received_.resize(NUM_SPIKE_RECEPTORS); - B_.spike_input_received_grid_sum_.resize(NUM_SPIKE_RECEPTORS); -{%- endif %} } {%- if neuron.get_functions()|length > 0 %} @@ -741,11 +802,24 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const * buffer spikes from spiking input ports **/ - for (long i = 0; i < NUM_SPIKE_RECEPTORS; ++i) - { - get_spike_inputs_grid_sum_()[i] = get_spike_inputs_()[i].get_value(lag); - get_spike_input_received_grid_sum_()[i] = get_spike_input_received_()[i].get_value(lag); - } +{%- for inputPortSymbol in neuron.get_spike_input_ports() %} +{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_.get_value(lag); +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_value(lag); +{%- endfor %} + B_.spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_ = B_.spike_input_{{ inputPort.name }}_spike_input_received_.get_value(lag); +{%- endif %} +{%- endfor %} {%- if has_delay_variables %} /** @@ -798,9 +872,9 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const **/ {% for blk in neuron.get_on_receive_blocks() %} {%- set inport = blk.get_port_name() %} - if (B_.spike_input_received_grid_sum_[{{ inport.upper() }} - MIN_SPIKE_RECEPTOR]) + if (B_.spike_input_{{ inport }}_spike_input_received_grid_sum_) { - // B_.spike_input_received_[{{ inport.upper() }} - MIN_SPIKE_RECEPTOR] = false; // no need to reset the flag -- reading from the RingBuffer into the "grid_sum" variables resets the RingBuffer entries + // B_.spike_input_{{ inport }} = false; // no need to reset the flag -- reading from the RingBuffer into the "grid_sum" variables resets the RingBuffer entries on_receive_block_{{ blk.get_port_name() }}(); } {%- endfor %} @@ -1088,19 +1162,32 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) #endif assert(e.get_delay_steps() > 0); - assert( e.get_rport() < B_.spike_inputs_.size() ); + assert(e.get_rport() < {{ utils.nestml_input_port_to_nest_rport_dict(astnode) | length }}); -{% if neuron.get_spike_input_ports() | length > 1 or neuron.is_multisynapse_spikes() -%} - const size_t nestml_buffer_idx = rport_to_nestml_buffer_idx[e.get_rport()]; -{%- else %} - const size_t nestml_buffer_idx = 0; -{%- endif %} - B_.spike_inputs_[ nestml_buffer_idx - MIN_SPIKE_RECEPTOR ].add_value( - e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), - e.get_weight() * e.get_multiplicity() ); - B_.spike_input_received_[ nestml_buffer_idx - MIN_SPIKE_RECEPTOR ].add_value( +{%- for spike_in_port in astnode.get_body().get_spike_input_ports() %} + if (e.get_rport() == {{ utils.nestml_input_port_to_nest_rport(astnode, spike_in_port.name) }}) + { +{%- if spike_in_port.get_parameters() %} +{%- for attribute in spike_in_port.get_parameters() %} + B_.spike_input_{{ spike_in_port.name }}__DOT__{{ attribute.name }}_.add_value( + e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), + e.get_weight() * e.get_multiplicity() ); +{%- endfor %} +{%- else %} +{# no attributes defined for the spike event; in this case, there is only one single buffer #} + B_.spike_input_{{ spike_in_port.name }}_.add_value( + e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), + e.get_weight() * e.get_multiplicity() ); +{%- endif %} + + + B_.spike_input_{{ spike_in_port.name }}_spike_input_received_.add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), 1. ); + + + } +{%- endfor %} } {%- endif %} @@ -1116,11 +1203,11 @@ void {{ neuronName }}::handle(nest::CurrentEvent& e) const double current = e.get_current(); // we assume that in NEST, this returns a current in pA const double weight = e.get_weight(); -{%- for port in neuron.get_continuous_input_ports() %} +{%- for port in neuron.get_continuous_input_ports() %} get_{{port.get_symbol_name()}}().add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin()), weight * current ); -{%- endfor %} +{%- endfor %} } {%- endif %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 85a592c97..211ff429f 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -541,32 +541,9 @@ private: * @note Excluded lower and upper bounds are defined as MIN_, MAX_. * Excluding port 0 avoids accidental connections. **/ - static const nest_port_t MIN_SPIKE_RECEPTOR = 1; -{%- set ns = namespace(count=1) %} -{%- else %} - static const nest_port_t MIN_SPIKE_RECEPTOR = 0; -{%- set ns = namespace(count=0) %} + const nest_port_t MAX_SPIKE_RECEPTOR = {{ len(utils.nestml_input_port_to_nest_rport_dict(astnode)) }}; {%- endif %} - enum SynapseTypes - { -{%- for port in neuron.get_spike_input_ports() %} -{%- if port.has_vector_parameter() -%} -{% set size = utils.get_numeric_vector_size(port) | int %} -{%- for i in range(size) %} - {{ port.get_symbol_name().upper().split(".")[0] }}_{{ i }} = {{ ns.count }}, -{%- set ns.count = ns.count + 1 -%} -{%- endfor %} -{%- else %} - {{ port.get_symbol_name().upper().split(".")[0] }} = {{ ns.count }}, -{%- set ns.count = ns.count + 1 -%} -{%- endif -%} -{%- endfor %} - MAX_SPIKE_RECEPTOR = {{ ns.count }} - }; - - static const size_t NUM_SPIKE_RECEPTORS = MAX_SPIKE_RECEPTOR - MIN_SPIKE_RECEPTOR; - {% if neuron.get_spike_input_ports() | length > 1 or neuron.is_multisynapse_spikes() -%} static std::vector< size_t > rport_to_nestml_buffer_idx; {%- endif %} @@ -810,9 +787,27 @@ private: // Spike buffers and sums of incoming spikes/currents per timestep // ----------------------------------------------------------------------- -{%- filter indent(4, True) -%} -{{ buffer_getter.SpikeBufferGetter(true) }} -{%- endfilter %} +{%- for inputPortSymbol in neuron.get_spike_input_ports() %} +{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + nest::RingBuffer spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_; + double spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + nest::RingBuffer spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; + double spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; +{%- endfor %} + nest::RingBuffer spike_input_{{ inputPort.name }}_spike_input_received_; + double spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; +{%- endif %} +{%- endfor %} // ----------------------------------------------------------------------- // Continuous-input buffers @@ -879,17 +874,61 @@ private: {%- endfilter %} // ------------------------------------------------------------------------- - // Getters/setters for input buffers + // Getters/setters for spike input buffers // ------------------------------------------------------------------------- +{% for inputPortSymbol in neuron.get_spike_input_ports() %} +{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + inline nest::RingBuffer& get_spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_ + { + return B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_; + } -{%- filter indent(2, True) %} -{{ buffer_getter.SpikeBufferGetter(false) }} -{%- endfilter %} + inline double& get_spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_ + { + return B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; + } +{% endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + inline nest::RingBuffer& get_spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}() + { + return B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; + } -{%- for inputPort in neuron.get_continuous_input_ports() %} -{{ continuous_buffer_getter.ContinuousInputBufferGetter(inputPort, false) }} + inline double& get_spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_() + { + return B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; + } +{% endfor %} + inline nest::RingBuffer& get_spike_input_{{ inputPort.name }}_spike_input_received_() + { + return B_.spike_input_{{ inputPort.name }}_spike_input_received_; + } + + inline double& get_spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_() + { + return B_.spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; + } +{% endif %} {%- endfor %} + // ------------------------------------------------------------------------- + // Getters/setters for continuous-time input buffers + // ------------------------------------------------------------------------- + +{% filter indent(2, True) %} +{%- for inputPort in neuron.get_continuous_input_ports() %} +{{ continuous_buffer_getter.ContinuousInputBufferGetter(inputPort, false) }} +{%- endfor %} +{%- endfilter %} + {%- if neuron.get_functions()|length > 0 %} // ------------------------------------------------------------------------- // Function declarations @@ -986,12 +1025,11 @@ inline nest_port_t {{neuronName}}::send_test_event(nest::Node& target, nest_rpor inline nest_port_t {{neuronName}}::handles_test_event(nest::SpikeEvent&, nest_port_t receptor_type) { {%- if (neuron.get_multiple_receptors())|length > 1 or neuron.is_multisynapse_spikes() %} - assert( B_.spike_inputs_.size() == NUM_SPIKE_RECEPTORS ); - if ( receptor_type < MIN_SPIKE_RECEPTOR or receptor_type >= MAX_SPIKE_RECEPTOR ) + if ( receptor_type < 1 or receptor_type >= MAX_SPIKE_RECEPTOR ) { throw nest::UnknownReceptorType( receptor_type, get_name() ); } - return receptor_type - MIN_SPIKE_RECEPTOR; + return receptor_type - 1; {%- else %} // You should usually not change the code in this function. // It confirms to the connection management system that we are able diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/SpikeBufferGetter.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/SpikeBufferGetter.jinja2 index e5f519248..530deb219 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/SpikeBufferGetter.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/SpikeBufferGetter.jinja2 @@ -13,16 +13,16 @@ /** * {{ comment_string }} **/ -inline std::vector< {{data_type}} >& get_{{variable_name}}() +inline {{ data_type }}& get_{{ variable_name }}_{{ suffix }}() { {%- if is_in_struct %} - return {{variable_name}}; + return {{ variable_name }}_{{ suffix }}; {%- else %} - return B_.get_{{variable_name}}(); + return B_.get_{{ variable_name }}()_{{ suffix }}; {%- endif %} } {%- if is_in_struct %} -std::vector< {{data_type}} > {{variable_name}}; +{{ data_type }} {{ variable_name }}_{{ suffix }}; {%- endif %} {%- endfor %} diff --git a/pynestml/codegeneration/resources_spinnaker/@NEURON_NAME@_impl.h.jinja2 b/pynestml/codegeneration/resources_spinnaker/@NEURON_NAME@_impl.h.jinja2 index 891c888dd..60657c357 100644 --- a/pynestml/codegeneration/resources_spinnaker/@NEURON_NAME@_impl.h.jinja2 +++ b/pynestml/codegeneration/resources_spinnaker/@NEURON_NAME@_impl.h.jinja2 @@ -65,21 +65,6 @@ extern "C" inline int {{neuronName}}_dynamics( double, const double ode_state[], double f[], void* pnode ); {% endif %} -// Missing paired_synapse stuff here - -{% if has_multiple_synapses -%} - /** - * Synapse types to connect to - * @note Excluded lower and upper bounds are defined as MIN_, MAX_. - * Excluding port 0 avoids accidental connections. - **/ -const long MIN_SPIKE_RECEPTOR = 1; -{%- set ns = namespace(count=1) %} -{%- else %} -const long MIN_SPIKE_RECEPTOR = 0; -{%- set ns = namespace(count=0) %} -{%- endif %} - enum input_indices { {%- set ns = namespace(count=0) %} @@ -95,7 +80,7 @@ enum input_indices }; typedef struct { -{# Make shure count is |spike_input_ports| + |continous_input_ports| #} +{# Make sure count is |spike_input_ports| + |continous_input_ports| #} // 0: exc, 1: inh, 2: Istim {%- if ns.count > 0 %} REAL inputs[{{ ns.count }}]; diff --git a/pynestml/meta_model/ast_input_port.py b/pynestml/meta_model/ast_input_port.py index 934475f06..8861d0ed7 100644 --- a/pynestml/meta_model/ast_input_port.py +++ b/pynestml/meta_model/ast_input_port.py @@ -38,7 +38,7 @@ class ASTInputPort(ASTNode): .. code-block:: nestml - spike_in pA <- spike + spike_in <- spike(weight real) @attribute name: The name of the input port. @attribute sizeParameter: Optional size parameter for multisynapse neuron. diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index 885e75a56..d37e97068 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -2569,3 +2569,24 @@ def initial_value_or_zero(cls, astnode: ASTModel, var): return astnode.get_initial_value(var) return "0" + + @classmethod + def nestml_input_port_to_nest_rport_dict(cls, astnode: ASTModel) -> Dict[str, int]: + input_port_to_rport = {} + rport = 1 # if there is more than one spiking input port, count begins at 1 + for input_block in astnode.get_input_blocks(): + for input_port in input_block.get_input_ports(): + + if input_port.get_size_parameter(): + for i in range(ASTUtils.get_numeric_vector_size(input_port)): + input_port_to_rport[input_port.name + "_VEC_IDX_" + str(i)] = rport + rport += 1 + else: + input_port_to_rport[input_port.name] = rport + rport += 1 + + return input_port_to_rport + + @classmethod + def nestml_input_port_to_nest_rport(cls, astnode: ASTModel, spike_in_port: ASTInputPort): + return ASTUtils.nestml_input_port_to_nest_rport_dict(astnode)[spike_in_port] diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index f628221bc..db0b73392 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -425,7 +425,6 @@ def visit_unit_type(self, node): node.get_rhs().update_scope(node.get_scope()) def visit_expression(self, node): - print("Visiting expression: " + str(node)) """ Private method: Used to visit a single rhs and update its scope. :param node: an rhs. @@ -460,7 +459,6 @@ def visit_simple_expression(self, node): :param node: a simple rhs. :type node: ast_simple_expression """ - print("Visiting simple expression: " + str(node)) if node.is_function_call(): node.get_function_call().update_scope(node.get_scope()) elif node.is_variable() or node.has_unit(): @@ -470,7 +468,6 @@ def visit_simple_expression(self, node): node.get_variable().get_vector_parameter().update_scope(node.get_scope()) def visit_variable(self, node: ASTVariable): - # print("Visiting variable: " + str(node)) # if node.attribute: # ast_model = ASTUtils.find_parent_node_by_type(node, ASTModel) # assert ast_model diff --git a/pynestml/visitors/ast_variable_visitor.py b/pynestml/visitors/ast_variable_visitor.py index a8d4cb075..013d88c2c 100644 --- a/pynestml/visitors/ast_variable_visitor.py +++ b/pynestml/visitors/ast_variable_visitor.py @@ -42,7 +42,6 @@ def visit_simple_expression(self, node: ASTSimpleExpression): '(PyNestML.Visitor.VariableVisitor) No or wrong type of simple expression provided (%s)!' % type(node) assert (node.get_scope() is not None), \ '(PyNestML.Visitor.VariableVisitor) No scope found, run symboltable creator!' - print("in visit_simple_expression (" + str(node) + ") ") scope = node.get_scope() var_name = node.get_variable().get_complete_name() @@ -50,7 +49,6 @@ def visit_simple_expression(self, node: ASTSimpleExpression): # update the type of the variable according to its symbol type. if var_resolve is not None: - print("var_resolve is " + str(var_resolve)) # print("var_resolve.attribute is " + str(var_resolve.attribute)) # if var_resolve.attribute: From 296d155bc317a9bc09fcf6791bbe0ac9c69d110f Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sat, 2 Nov 2024 00:06:38 +0100 Subject: [PATCH 17/68] add attributes to spiking input ports --- doc/running/running_nest.rst | 2 +- .../co_co_priorities_correctly_specified.py | 2 +- ...only_in_equation_rhs_and_event_handlers.py | 4 +- .../printers/nest_variable_printer.py | 14 ++- .../codegeneration/printers/nestml_printer.py | 2 +- .../point_neuron/common/NeuronClass.jinja2 | 114 ++++++++---------- .../point_neuron/common/NeuronHeader.jinja2 | 55 +++++---- .../point_neuron/@NEURON_NAME@.py.jinja2 | 7 +- pynestml/generated/PyNestMLParser.py | 11 +- pynestml/grammars/PyNestMLParser.g4 | 2 +- pynestml/meta_model/ast_input_block.py | 3 +- pynestml/meta_model/ast_model_body.py | 8 +- pynestml/meta_model/ast_node_factory.py | 4 +- pynestml/meta_model/ast_on_receive_block.py | 29 ++--- pynestml/utils/ast_utils.py | 41 +++---- pynestml/visitors/ast_builder_visitor.py | 4 +- tests/nest_tests/nest_integration_test.py | 9 +- tests/nest_tests/resources/input_ports.nestml | 39 ++++-- tests/nest_tests/test_input_ports.py | 46 +++---- 19 files changed, 205 insertions(+), 191 deletions(-) diff --git a/doc/running/running_nest.rst b/doc/running/running_nest.rst index aca3ddb0c..07573c4eb 100644 --- a/doc/running/running_nest.rst +++ b/doc/running/running_nest.rst @@ -159,7 +159,7 @@ The above code querying for ``receptor_types`` gives a list of port names and NE - 1 * - NMDA_spikes - 2 - * - FOO_0 + * - FOO_0 XXXXX _VEC_IDX_ - 3 * - FOO_1 - 4 diff --git a/pynestml/cocos/co_co_priorities_correctly_specified.py b/pynestml/cocos/co_co_priorities_correctly_specified.py index a36cdc631..5b97272c1 100644 --- a/pynestml/cocos/co_co_priorities_correctly_specified.py +++ b/pynestml/cocos/co_co_priorities_correctly_specified.py @@ -42,7 +42,7 @@ def check_co_co(cls, node: ASTModel): priorities = {} # type: Dict[str, int] for on_receive_block in node.get_on_receive_blocks(): if "priority" in on_receive_block.get_const_parameters(): - priorities[on_receive_block.get_port_name()] = int(on_receive_block.get_const_parameters()["priority"]) + priorities[on_receive_block.get_input_port_variable().get_name()] = int(on_receive_block.get_const_parameters()["priority"]) if len(priorities) == 1: on_receive_block_name = list(priorities.keys())[0] diff --git a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py index 0e7ff5a7b..2ba9ac436 100644 --- a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py +++ b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py @@ -68,8 +68,8 @@ def visit_variable(self, node: ASTVariable): while _node: _node = _node.get_parent() - if isinstance(_node, ASTOnReceiveBlock): - # spike input port was used inside an ``onReceive`` block; everything is OK + if isinstance(_node, ASTOnReceiveBlock) and _node.input_port_variable.name == in_port.name: + # spike input port was used inside an ``onReceive`` block for this spike port; everything is OK return if isinstance(_node, ASTOdeEquation): diff --git a/pynestml/codegeneration/printers/nest_variable_printer.py b/pynestml/codegeneration/printers/nest_variable_printer.py index bd526ae89..dea1fb916 100644 --- a/pynestml/codegeneration/printers/nest_variable_printer.py +++ b/pynestml/codegeneration/printers/nest_variable_printer.py @@ -166,10 +166,18 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: else: var_name += "_" + str(variable.get_vector_parameter()) - # add variable attribute if it exists - if variable.attribute: - return "spike_input_" + str(variable.name) + "__DOT__" + variable.attribute + "_grid_sum_" + if variable.has_vector_parameter(): + + # add variable attribute if it exists + if variable.attribute: + return "spike_input_" + str(variable.name) + "_VEC_IDX_" + str(variable.get_vector_parameter()) + "__DOT__" + variable.attribute + "_grid_sum_" + + else: + # add variable attribute if it exists + if variable.attribute: + return "spike_input_" + str(variable.name) + "__DOT__" + variable.attribute + "_grid_sum_" + # no vector indices, no attributes return "spike_input_" + str(variable) + "_grid_sum_" if self.cpp_variable_suffix: diff --git a/pynestml/codegeneration/printers/nestml_printer.py b/pynestml/codegeneration/printers/nestml_printer.py index c077ae677..a3babcc97 100644 --- a/pynestml/codegeneration/printers/nestml_printer.py +++ b/pynestml/codegeneration/printers/nestml_printer.py @@ -524,7 +524,7 @@ def print_unit_type(self, node: ASTUnitType) -> str: def print_on_receive_block(self, node: ASTOnReceiveBlock) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) - ret += print_n_spaces(self.indent) + "onReceive(" + node.port_name + "):" + print_sl_comment(node.in_comment) + "\n" + ret += print_n_spaces(self.indent) + "onReceive(" + self.print(node.get_input_port_variable()) + "):" + print_sl_comment(node.in_comment) + "\n" ret += self.print(node.get_block()) return ret diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 4d21d7374..2b58273dd 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -72,7 +72,7 @@ along with NEST. If not, see . // uncomment the next line to enable printing of detailed debug information // #define DEBUG -{%- if state_vars_that_need_continuous_buffering | length > 0 %} +{% if state_vars_that_need_continuous_buffering | length > 0 %} {%- if continuous_state_buffering_method == "continuous_time_buffer" %} continuous_variable_histentry_{{ neuronName }}::continuous_variable_histentry_{{ neuronName }}( double t, {%- for state_var in state_vars_that_need_continuous_buffering %} @@ -147,24 +147,6 @@ template <> void RecordablesMap<{{ neuronName }}>::create() } } -{% if neuron.get_spike_input_ports() | length > 1 or neuron.is_multisynapse_spikes() %} -std::vector< size_t > {{ neuronName }}::rport_to_nestml_buffer_idx = -{ -{%- for rport, ports in utils.get_spike_input_ports_in_pairs(neuron).items() %} -{%- set ns = namespace(rport=rport) %} -{%- if ports[0].has_vector_parameter() %} -{%- set size = utils.get_numeric_vector_size(ports[0]) %} -{%- for i in range(size) %} - {{ rport_to_port_map_entry.RportToBufferIndexEntry(ports, ns.rport, index=i) }}, -{%- set ns.rport = ns.rport + 1 %} -{%- endfor %} -{%- else %} - {{ rport_to_port_map_entry.RportToBufferIndexEntry(ports, ns.rport) }}, -{%- endif %} -{%- endfor %} -}; -{%- endif %} - {%- if has_state_vectors %} std::string {{ neuronName }}::get_var_name(size_t elem, std::string var_name) { @@ -220,8 +202,8 @@ std::vector< size_t > {{ neuronName }}::rport_to_nestml_buffer_idx = {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_( nest::RingBuffer() ) - , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_grid_sum_( 0. ); + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- endfor %} {%- endif %} {%- endfor %} @@ -254,9 +236,11 @@ std::vector< size_t > {{ neuronName }}::rport_to_nestml_buffer_idx = {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_( nest::RingBuffer() ) - , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_grid_sum_( 0. ); + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- endfor %} +{%- else %} + ????????????? {%- endif %} {%- endfor %} {%- else %} @@ -484,7 +468,7 @@ void {{ neuronName }}::init_buffers_() {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_.clear(); + B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.clear(); {%- endfor %} {%- endif %} {%- endfor %} @@ -725,9 +709,9 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const for ( long lag = from ; lag < to ; ++lag ) { {% if propagators_are_state_dependent %} - // the propagators are state dependent; update them! - V_.__h = nest::Time::get_resolution().get_ms(); - recompute_internal_variables(); + // the propagators are state dependent; update them! + V_.__h = nest::Time::get_resolution().get_ms(); + recompute_internal_variables(); {% endif %} @@ -743,32 +727,32 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- if not gap_junction_membrane_potential_variable_is_numeric %} {# in case V_m is solved analytically, need to compute __I_gap here so dV_m/dt can be computed below #} - // set I_gap depending on interpolation order - double __I_gap = 0.0; + // set I_gap depending on interpolation order + double __I_gap = 0.0; - const double __t_gap = gap_junction_step / nest::Time::get_resolution().get_ms(); + const double __t_gap = gap_junction_step / nest::Time::get_resolution().get_ms(); - switch ( nest::kernel().simulation_manager.get_wfr_interpolation_order() ) - { - case 0: - __I_gap = -B_.sumj_g_ij_ * {{ gap_junction_membrane_potential_variable_cpp }} + B_.interpolation_coefficients[ B_.lag_ ]; - break; - - case 1: - __I_gap = -B_.sumj_g_ij_ * {{ gap_junction_membrane_potential_variable_cpp }} + B_.interpolation_coefficients[ B_.lag_ * 2 + 0 ] - + B_.interpolation_coefficients[ B_.lag_ * 2 + 1 ] * __t_gap; - break; - - case 3: - __I_gap = -B_.sumj_g_ij_ * {{ gap_junction_membrane_potential_variable_cpp }} + B_.interpolation_coefficients[ B_.lag_ * 4 + 0 ] - + B_.interpolation_coefficients[ B_.lag_ * 4 + 1 ] * __t_gap - + B_.interpolation_coefficients[ B_.lag_ * 4 + 2 ] * __t_gap * __t_gap - + B_.interpolation_coefficients[ B_.lag_ * 4 + 3 ] * __t_gap * __t_gap * __t_gap; - break; - - default: - throw nest::BadProperty( "Interpolation order must be 0, 1, or 3." ); - } + switch ( nest::kernel().simulation_manager.get_wfr_interpolation_order() ) + { + case 0: + __I_gap = -B_.sumj_g_ij_ * {{ gap_junction_membrane_potential_variable_cpp }} + B_.interpolation_coefficients[ B_.lag_ ]; + break; + + case 1: + __I_gap = -B_.sumj_g_ij_ * {{ gap_junction_membrane_potential_variable_cpp }} + B_.interpolation_coefficients[ B_.lag_ * 2 + 0 ] + + B_.interpolation_coefficients[ B_.lag_ * 2 + 1 ] * __t_gap; + break; + + case 3: + __I_gap = -B_.sumj_g_ij_ * {{ gap_junction_membrane_potential_variable_cpp }} + B_.interpolation_coefficients[ B_.lag_ * 4 + 0 ] + + B_.interpolation_coefficients[ B_.lag_ * 4 + 1 ] * __t_gap + + B_.interpolation_coefficients[ B_.lag_ * 4 + 2 ] * __t_gap * __t_gap + + B_.interpolation_coefficients[ B_.lag_ * 4 + 3 ] * __t_gap * __t_gap * __t_gap; + break; + + default: + throw nest::BadProperty( "Interpolation order must be 0, 1, or 3." ); + } {%- endif %} if ( called_from_wfr_update ) @@ -809,7 +793,7 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_.get_value(lag); + B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_value(lag); {%- endfor %} {%- endif %} {%- endfor %} @@ -871,11 +855,11 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const * Begin NESTML generated code for the onReceive block(s) **/ {% for blk in neuron.get_on_receive_blocks() %} -{%- set inport = blk.get_port_name() %} +{%- set inport = utils.port_name_printer(blk.get_input_port_variable()) %} if (B_.spike_input_{{ inport }}_spike_input_received_grid_sum_) { // B_.spike_input_{{ inport }} = false; // no need to reset the flag -- reading from the RingBuffer into the "grid_sum" variables resets the RingBuffer entries - on_receive_block_{{ blk.get_port_name() }}(); + on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(); } {%- endfor %} @@ -1164,28 +1148,28 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) assert(e.get_delay_steps() > 0); assert(e.get_rport() < {{ utils.nestml_input_port_to_nest_rport_dict(astnode) | length }}); -{%- for spike_in_port in astnode.get_body().get_spike_input_ports() %} - if (e.get_rport() == {{ utils.nestml_input_port_to_nest_rport(astnode, spike_in_port.name) }}) +{%- for spike_in_port_name, rport in utils.nestml_input_port_to_nest_rport_dict(astnode).items() %} +{%- set spike_in_port = utils.get_input_port_by_name(astnode.get_input_blocks(), spike_in_port_name.split("_VEC_IDX_")[0]) %} +{%- if astnode.get_body().get_spike_input_ports() | length > 1 %} + if (e.get_rport() == {{ rport }}) +{%- endif %} { {%- if spike_in_port.get_parameters() %} {%- for attribute in spike_in_port.get_parameters() %} - B_.spike_input_{{ spike_in_port.name }}__DOT__{{ attribute.name }}_.add_value( + B_.spike_input_{{ spike_in_port_name }}__DOT__{{ attribute.name }}_.add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_weight() * e.get_multiplicity() ); {%- endfor %} {%- else %} {# no attributes defined for the spike event; in this case, there is only one single buffer #} - B_.spike_input_{{ spike_in_port.name }}_.add_value( + B_.spike_input_{{ spike_in_port_name }}_.add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_weight() * e.get_multiplicity() ); {%- endif %} - - B_.spike_input_{{ spike_in_port.name }}_spike_input_received_.add_value( - e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), - 1. ); - - + B_.spike_input_{{ spike_in_port_name }}_spike_input_received_.add_value( + e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), + 1. ); } {%- endfor %} } @@ -1218,7 +1202,7 @@ void {{ neuronName }}::handle(nest::CurrentEvent& e) {%- for blk in neuron.get_on_receive_blocks() %} {%- set ast = blk.get_block() %} void -{{ neuronName }}::on_receive_block_{{ blk.get_port_name() }}() +{{ neuronName }}::on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}() { const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 211ff429f..14ea459c6 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -475,7 +475,7 @@ public: {% filter indent(2, True) -%} {%- for blk in neuron.get_on_receive_blocks() %} - void on_receive_block_{{ blk.get_port_name() }}(); + void on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(); {%- endfor %} {%- endfilter %} @@ -541,12 +541,14 @@ private: * @note Excluded lower and upper bounds are defined as MIN_, MAX_. * Excluding port 0 avoids accidental connections. **/ - const nest_port_t MAX_SPIKE_RECEPTOR = {{ len(utils.nestml_input_port_to_nest_rport_dict(astnode)) }}; + const nest_port_t MAX_SPIKE_RECEPTOR = {{ utils.nestml_input_port_to_nest_rport_dict(astnode) | length }}; {%- endif %} +{# {% if neuron.get_spike_input_ports() | length > 1 or neuron.is_multisynapse_spikes() -%} static std::vector< size_t > rport_to_nestml_buffer_idx; {%- endif %} +#} /** * Reset state of neuron. @@ -789,15 +791,21 @@ private: {%- for inputPortSymbol in neuron.get_spike_input_ports() %} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} + + // input port: {{ inputPort.name }} {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - nest::RingBuffer spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_; - double spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; + nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_; + double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; {%- endfor %} +{%- else %} + XXX: vector port, but no parameters {%- endif %} + nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; + double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_; {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} @@ -877,23 +885,33 @@ private: // Getters/setters for spike input buffers // ------------------------------------------------------------------------- {% for inputPortSymbol in neuron.get_spike_input_ports() %} + // input port: {{ inputPortSymbol.name }} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - inline nest::RingBuffer& get_spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_ + inline nest::RingBuffer& get_spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_() { - return B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_VEC_IDX_{{ i }}_; + return B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_; } - inline double& get_spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_ + inline double get_spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_() { - return B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; + return B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; } {% endfor %} {%- endif %} + inline nest::RingBuffer& get_spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_() + { + return B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; + } + + inline double get_spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_grid_sum_() + { + return B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_; + } {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} @@ -902,7 +920,7 @@ private: return B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; } - inline double& get_spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_() + inline double get_spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_() { return B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; } @@ -912,7 +930,7 @@ private: return B_.spike_input_{{ inputPort.name }}_spike_input_received_; } - inline double& get_spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_() + inline double get_spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_() { return B_.spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; } @@ -1098,19 +1116,12 @@ inline void {{neuronName}}::get_status(DictionaryDatum &__d) const {%- if (neuron.get_multiple_receptors())|length > 1 or neuron.is_multisynapse_spikes() %} DictionaryDatum __receptor_type = new Dictionary(); -{%- for rport, ports in utils.get_spike_input_ports_in_pairs(neuron).items() %} -{%- set ns = namespace(rport=rport) %} -{%- for port in ports %} -{%- if not port.has_vector_parameter() %} - ( *__receptor_type )[ "{{port.get_symbol_name().upper()}}" ] = {{ns.rport + 1}}; -{%- else %} -{%- set size = utils.get_numeric_vector_size(port) %} -{%- for i in range(size) %} - ( *__receptor_type )[ "{{port.get_symbol_name().upper()}}_{{i}}" ] = {{ns.rport + i + 1}}, -{%- endfor %} -{%- endif %} -{%- endfor %} + +{%- for spike_in_port_name, rport in utils.nestml_input_port_to_nest_rport_dict(astnode).items() %} +{%- set spike_in_port = utils.get_input_port_by_name(astnode.get_input_blocks(), spike_in_port_name.split("_VEC_IDX_")[0]) %} +( *__receptor_type )[ "{{spike_in_port_name.upper()}}" ] = {{ rport }}; {%- endfor %} + ( *__d )[ "receptor_types" ] = __receptor_type; {%- endif %} diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 index 9e3d1e404..1657e2010 100644 --- a/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 +++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 @@ -326,9 +326,8 @@ class Neuron_{{neuronName}}(Neuron): # ------------------------------------------------------------------------- {% for blk in neuron.get_on_receive_blocks() %} -{%- set inport = blk.get_port_name() %} - if self.B_.spike_received_{{ inport }}: - self.on_receive_block_{{ blk.get_port_name() }}() + if self.B_.spike_received_{{ utils.port_name_printer(blk.get_input_port_variable()) }}: + self.on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}() {%- endfor %} # ------------------------------------------------------------------------- @@ -384,7 +383,7 @@ class Neuron_{{neuronName}}(Neuron): {%- for blk in neuron.get_on_receive_blocks() %} {%- set ast = blk.get_block() %} - def on_receive_block_{{ blk.get_port_name() }}(self): + def on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(self): {%- filter indent(4, True) -%} {%- include "directives_py/Block.jinja2" %} {%- endfilter %} diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index f01e259f6..c8fa09514 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -189,7 +189,7 @@ def serializedATN(): 447,1,0,0,0,454,448,1,0,0,0,454,449,1,0,0,0,454,450,1,0,0,0,454, 451,1,0,0,0,454,452,1,0,0,0,454,453,1,0,0,0,455,456,1,0,0,0,456, 454,1,0,0,0,456,457,1,0,0,0,457,458,1,0,0,0,458,459,5,2,0,0,459, - 67,1,0,0,0,460,461,5,40,0,0,461,462,5,47,0,0,462,467,5,87,0,0,463, + 67,1,0,0,0,460,461,5,40,0,0,461,462,5,47,0,0,462,467,3,18,9,0,463, 464,5,72,0,0,464,466,3,90,45,0,465,463,1,0,0,0,466,469,1,0,0,0,467, 465,1,0,0,0,467,468,1,0,0,0,468,470,1,0,0,0,469,467,1,0,0,0,470, 471,5,48,0,0,471,472,5,80,0,0,472,473,3,28,14,0,473,69,1,0,0,0,474, @@ -3410,7 +3410,7 @@ class OnReceiveBlockContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser - self.inputPortName = None # Token + self.inputPortVariable = None # VariableContext def ON_RECEIVE_KEYWORD(self): return self.getToken(PyNestMLParser.ON_RECEIVE_KEYWORD, 0) @@ -3428,8 +3428,9 @@ def block(self): return self.getTypedRuleContext(PyNestMLParser.BlockContext,0) - def NAME(self): - return self.getToken(PyNestMLParser.NAME, 0) + def variable(self): + return self.getTypedRuleContext(PyNestMLParser.VariableContext,0) + def COMMA(self, i:int=None): if i is None: @@ -3468,7 +3469,7 @@ def onReceiveBlock(self): self.state = 461 self.match(PyNestMLParser.LEFT_PAREN) self.state = 462 - localctx.inputPortName = self.match(PyNestMLParser.NAME) + localctx.inputPortVariable = self.variable() self.state = 467 self._errHandler.sync(self) _la = self._input.LA(1) diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index 37e0fb95f..6a773b247 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -239,7 +239,7 @@ parser grammar PyNestMLParser; /** ASTOnReceiveBlock @attribute block implementation of the dynamics */ - onReceiveBlock: ON_RECEIVE_KEYWORD LEFT_PAREN inputPortName=NAME (COMMA constParameter)* RIGHT_PAREN COLON + onReceiveBlock: ON_RECEIVE_KEYWORD LEFT_PAREN inputPortVariable=variable (COMMA constParameter)* RIGHT_PAREN COLON block; /** ASTOnConditionBlock diff --git a/pynestml/meta_model/ast_input_block.py b/pynestml/meta_model/ast_input_block.py index c50d98741..9aa875ac5 100644 --- a/pynestml/meta_model/ast_input_block.py +++ b/pynestml/meta_model/ast_input_block.py @@ -77,11 +77,10 @@ def clone(self): return dup - def get_input_ports(self): + def get_input_ports(self) -> List[ASTInputPort]: """ Returns the list of input ports. :return: a list of input ports - :rtype: list(ASTInputPort) """ return self.input_definitions diff --git a/pynestml/meta_model/ast_model_body.py b/pynestml/meta_model/ast_model_body.py index 6e32561ce..bf14d7adb 100644 --- a/pynestml/meta_model/ast_model_body.py +++ b/pynestml/meta_model/ast_model_body.py @@ -151,8 +151,10 @@ def get_internals_blocks(self) -> List[ASTBlockWithVariables]: def get_on_receive_block(self, port_name) -> Optional[ASTOnReceiveBlock]: for elem in self.get_body_elements(): - if isinstance(elem, ASTOnReceiveBlock) and elem.port_name == port_name: + assert not "." in elem.input_port_variable.name # XXX REMOVE + if isinstance(elem, ASTOnReceiveBlock) and elem.input_port_variable.name == port_name: return elem + return None def get_on_receive_blocks(self) -> List[ASTOnReceiveBlock]: @@ -177,8 +179,10 @@ def get_on_receive_blocks(self) -> List[ASTOnReceiveBlock]: def get_on_condition_block(self, port_name) -> Optional[ASTOnConditionBlock]: for elem in self.get_body_elements(): - if isinstance(elem, ASTOnConditionBlock) and elem.port_name == port_name: + assert not "." in elem.input_port_variable.name + if isinstance(elem, ASTOnConditionBlock) and elem.input_port_variable.name == port_name: return elem + return None def get_on_condition_blocks(self) -> List[ASTOnConditionBlock]: diff --git a/pynestml/meta_model/ast_node_factory.py b/pynestml/meta_model/ast_node_factory.py index 4675bf572..f5ccc3583 100644 --- a/pynestml/meta_model/ast_node_factory.py +++ b/pynestml/meta_model/ast_node_factory.py @@ -115,8 +115,8 @@ def create_ast_namespace_decorator(cls, namespace=None, name=None, source_positi return ASTNamespaceDecorator(namespace, name, source_position=source_position) @classmethod - def create_ast_on_receive_block(cls, block=None, port_name=None, const_parameters=None, source_position=None): - return ASTOnReceiveBlock(block, port_name, const_parameters, source_position=source_position) + def create_ast_on_receive_block(cls, input_port_variable: ASTInputPort, block=None, const_parameters=None, source_position=None): + return ASTOnReceiveBlock(input_port_variable, block, const_parameters, source_position=source_position) @classmethod def create_ast_on_condition_block(cls, block=None, cond_expr=None, const_parameters=None, source_position=None): diff --git a/pynestml/meta_model/ast_on_receive_block.py b/pynestml/meta_model/ast_on_receive_block.py index d7ca37812..23cad130c 100644 --- a/pynestml/meta_model/ast_on_receive_block.py +++ b/pynestml/meta_model/ast_on_receive_block.py @@ -21,32 +21,29 @@ from __future__ import annotations -from typing import Any, List, Optional, Mapping +from typing import List, Optional, Mapping from pynestml.meta_model.ast_block import ASTBlock from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_variable import ASTVariable class ASTOnReceiveBlock(ASTNode): r""" - This class is used to store a declaration of an onReceive block, for example: - - .. code-block:: nestml - - onReceive(pre_spikes): - pre_tr += 1 - + This class is used to store a declaration of an onReceive block. """ - def __init__(self, block: ASTBlock, port_name: str, const_parameters: Optional[Mapping] = None, *args, **kwargs): + def __init__(self, input_port_variable: ASTVariable, block: ASTBlock, const_parameters: Optional[Mapping] = None, *args, **kwargs): r""" Standard constructor. :param block: a block of definitions. + :param input_port_variable: the variable referencing the corresponding input port. + :param const_parameters: constant parameters like priority. :param source_position: the position of this element in the source file. """ super(ASTOnReceiveBlock, self).__init__(*args, **kwargs) self.block = block - self.port_name = port_name + self.input_port_variable = input_port_variable self.const_parameters = const_parameters if self.const_parameters is None: self.const_parameters = {} @@ -58,7 +55,7 @@ def clone(self) -> ASTOnReceiveBlock: :return: new AST node instance """ dup = ASTOnReceiveBlock(block=self.block.clone(), - port_name=self.port_name, + input_port_variable=self.input_port_variable, const_parameters=self.const_parameters, # ASTNode common attributes: source_position=self.source_position, @@ -80,12 +77,12 @@ def get_block(self) -> ASTBlock: """ return self.block - def get_port_name(self) -> str: + def get_input_port_variable(self) -> ASTVariable: r""" - Returns the port name. - :return: the port name + Returns the port. + :return: the port """ - return self.port_name + return self.input_port_variable def get_children(self) -> List[ASTNode]: r""" @@ -101,4 +98,4 @@ def equals(self, other: ASTNode) -> bool: if not isinstance(other, ASTOnReceiveBlock): return False - return self.get_block().equals(other.get_block()) and self.port_name == other.port_name + return self.get_block().equals(other.get_block()) and self.input_port.equals(other.input_port) diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index d37e97068..bf5d50d0e 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -343,12 +343,16 @@ def get_vectorized_variable(cls, ast, scope): return None @classmethod - def get_numeric_vector_size(cls, variable: ASTVariable) -> int: + def get_numeric_vector_size(cls, variable: VariableSymbol) -> int: """ Returns the numerical size of the vector by resolving any variable used as a size parameter in declaration :param variable: vector variable :return: the size of the vector as a numerical value """ + + if isinstance(variable, ASTVariable): + variable = variable.get_scope().resolve_to_symbol(variable.get_complete_name(), SymbolKind.VARIABLE) + vector_parameter = variable.get_vector_parameter() if vector_parameter.is_variable(): symbol = vector_parameter.get_scope().resolve_to_symbol(vector_parameter.get_variable().get_complete_name(), SymbolKind.VARIABLE) @@ -2467,26 +2471,6 @@ def get_unit_name(cls, variable: ASTVariable) -> str: return '' - @classmethod - def get_spike_input_ports_in_pairs(cls, neuron: ASTModel) -> Dict[int, List[VariableSymbol]]: - """ - Returns a list of spike input ports in pairs. - The result of this function is used to construct a vector that provides a mapping to the NESTML spike buffer index. The vector looks like below: - - .. code-block:: - [ AMPA_SPIKES, GABA_SPIKES, NMDA_SPIKES ] - - where the vector index is the NEST rport number. The value is a tuple containing the NESTML index(es) to the spike buffer. - """ - rport_to_port_map = {} - rport = 0 - - for port in neuron.get_spike_input_ports(): - rport_to_port_map[rport] = [port] - rport += cls.get_numeric_vector_size(port) if port.has_vector_parameter() else 1 - - return rport_to_port_map - @classmethod def assign_numeric_non_numeric_state_variables(cls, neuron, numeric_state_variable_names, numeric_update_expressions, update_expressions): r"""For each ASTVariable, set the ``node._is_numeric`` member to True or False based on whether this variable will be solved with the analytic or numeric solver. @@ -2558,7 +2542,7 @@ def get_on_receive_blocks_by_input_port_name(cls, model: ASTModel, port_name: st r"""Get the onReceive blocks in the model associated with a given input port.""" blks = [] for blk in model.get_on_receive_blocks(): - if blk.get_port_name() == port_name: + if blk.get_input_port_variable().get_name() == port_name: blks.append(blk) return blks @@ -2576,9 +2560,11 @@ def nestml_input_port_to_nest_rport_dict(cls, astnode: ASTModel) -> Dict[str, in rport = 1 # if there is more than one spiking input port, count begins at 1 for input_block in astnode.get_input_blocks(): for input_port in input_block.get_input_ports(): + if not input_port.is_spike(): + continue if input_port.get_size_parameter(): - for i in range(ASTUtils.get_numeric_vector_size(input_port)): + for i in range(int(str(input_port.size_parameter))): # XXX: should be able to convert size_parameter expression to an integer more generically (allowing for e.g. parameters) input_port_to_rport[input_port.name + "_VEC_IDX_" + str(i)] = rport rport += 1 else: @@ -2590,3 +2576,12 @@ def nestml_input_port_to_nest_rport_dict(cls, astnode: ASTModel) -> Dict[str, in @classmethod def nestml_input_port_to_nest_rport(cls, astnode: ASTModel, spike_in_port: ASTInputPort): return ASTUtils.nestml_input_port_to_nest_rport_dict(astnode)[spike_in_port] + + @classmethod + def port_name_printer(cls, variable: ASTVariable) -> str: + s = variable.get_name() + if variable.has_vector_parameter(): + s += "_VEC_IDX_" + s += str(variable.get_vector_parameter()) + + return s diff --git a/pynestml/visitors/ast_builder_visitor.py b/pynestml/visitors/ast_builder_visitor.py index 2ccdddedc..4b8ed8825 100644 --- a/pynestml/visitors/ast_builder_visitor.py +++ b/pynestml/visitors/ast_builder_visitor.py @@ -694,12 +694,12 @@ def visitStmt(self, ctx): return ASTNodeFactory.create_ast_stmt(small, compound, create_source_pos(ctx)) def visitOnReceiveBlock(self, ctx): + input_port_variable = self.visit(ctx.inputPortVariable) block = self.visit(ctx.block()) if ctx.block() is not None else None - port_name = ctx.inputPortName.text const_parameters = {} for el in ctx.constParameter(): const_parameters[el.name.text] = el.value.text - ret = ASTNodeFactory.create_ast_on_receive_block(block=block, port_name=port_name, const_parameters=const_parameters, source_position=create_source_pos(ctx)) + ret = ASTNodeFactory.create_ast_on_receive_block(block=block, input_port_variable=input_port_variable, const_parameters=const_parameters, source_position=create_source_pos(ctx)) update_node_comments(ret, self.__comments.visit(ctx)) return ret diff --git a/tests/nest_tests/nest_integration_test.py b/tests/nest_tests/nest_integration_test.py index 193cca517..f6bc07bcb 100644 --- a/tests/nest_tests/nest_integration_test.py +++ b/tests/nest_tests/nest_integration_test.py @@ -28,13 +28,10 @@ from pynestml.codegeneration.nest_tools import NESTTools from pynestml.frontend.pynestml_frontend import generate_nest_target -try: - import matplotlib - import matplotlib.pyplot as plt +import matplotlib +import matplotlib.pyplot as plt - TEST_PLOTS = True -except BaseException: - TEST_PLOTS = False +TEST_PLOTS = True def get_model_doc_title(model_fname: str): diff --git a/tests/nest_tests/resources/input_ports.nestml b/tests/nest_tests/resources/input_ports.nestml index cec915dff..5931e6257 100644 --- a/tests/nest_tests/resources/input_ports.nestml +++ b/tests/nest_tests/resources/input_ports.nestml @@ -36,15 +36,34 @@ model input_ports: my_spikes_ip pA = 0 pA input: - AMPA_spikes <- spike - GABA_spikes <- spike - NMDA_spikes <- spike - foo[2] <- spike - my_spikes[3] <- spike - my_spikes2[3] <- spike + AMPA_spikes <- spike(weight pA) + GABA_spikes <- spike(weight pA) + NMDA_spikes <- spike(weight pA) + foo[2] <- spike(weight pA) + my_spikes[3] <- spike(weight pA) + my_spikes2[3] <- spike(weight pA) I_stim pA <- continuous - update: - bar += (NMDA_spikes + 2 * AMPA_spikes - 3 * GABA_spikes) * (pA * s) - foo_spikes += (foo[0] + 5.5 * foo[1]) * (pA * s) - my_spikes_ip += (my_spikes[0] + my_spikes[1] - my_spikes2[1]) * (pA * s) + onReceive(NMDA_spikes): + bar += NMDA_spikes.weight + + onReceive(AMPA_spikes): + bar += 2 * AMPA_spikes.weight + + onReceive(GABA_spikes): + bar += GABA_spikes.weight + + onReceive(foo[0]): + foo_spikes += foo[0].weight + + onReceive(foo[1]): + foo_spikes += 5.5 * foo[1].weight + + onReceive(my_spikes[0]): + my_spikes_ip += my_spikes[0].weight + + onReceive(my_spikes[1]): + my_spikes_ip += my_spikes[0].weight + + onReceive(my_spikes2[1]): + my_spikes_ip -= my_spikes2[1].weight diff --git a/tests/nest_tests/test_input_ports.py b/tests/nest_tests/test_input_ports.py index 2604b4dc2..886c41c3a 100644 --- a/tests/nest_tests/test_input_ports.py +++ b/tests/nest_tests/test_input_ports.py @@ -60,24 +60,24 @@ def test_input_ports(self): [10., 44.], # NMDA_SPIKES [12., 42.], # AMPA_SPIKES [14., 40.], # GABA_SPIKES - [16., 38.], # FOO_0 - [18., 36.], # FOO_1 - [20., 34.], # MY_SPIKES_0 - [22., 32.], # MY_SPIKES_1 - [24., 30.], # MY_SPIKES2_1 + [16., 38.], # FOO_VEC_IDX_0 + [18., 36.], # FOO_VEC_IDX_1 + [20., 34.], # MY_SPIKES_VEC_IDX_0 + [22., 32.], # MY_SPIKES_VEC_IDX_1 + [24., 30.], # MY_SPIKES2_VEC_IDX_1 ] - sgs = nest.Create('spike_generator', len(spike_times)) + sgs = nest.Create("spike_generator", len(spike_times)) for i, sg in enumerate(sgs): sg.spike_times = spike_times[i] - nest.Connect(sgs[0], neuron, syn_spec={'receptor_type': receptor_types["NMDA_SPIKES"], 'weight': -1.0, 'delay': 1.0}) - nest.Connect(sgs[1], neuron, syn_spec={'receptor_type': receptor_types["AMPA_SPIKES"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[2], neuron, syn_spec={'receptor_type': receptor_types["GABA_SPIKES"], 'weight': -1.0, 'delay': 1.0}) - nest.Connect(sgs[3], neuron, syn_spec={'receptor_type': receptor_types["FOO_0"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[4], neuron, syn_spec={'receptor_type': receptor_types["FOO_1"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[5], neuron, syn_spec={'receptor_type': receptor_types["MY_SPIKES_0"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[6], neuron, syn_spec={'receptor_type': receptor_types["MY_SPIKES_1"], 'weight': 2.0, 'delay': 1.0}) - nest.Connect(sgs[7], neuron, syn_spec={'receptor_type': receptor_types["MY_SPIKES2_1"], 'weight': -3.0, 'delay': 1.0}) + nest.Connect(sgs[0], neuron, syn_spec={"receptor_type": receptor_types["NMDA_SPIKES"], "weight": -1.0, "delay": 1.0}) + nest.Connect(sgs[1], neuron, syn_spec={"receptor_type": receptor_types["AMPA_SPIKES"], "weight": 1.0, "delay": 1.0}) + nest.Connect(sgs[2], neuron, syn_spec={"receptor_type": receptor_types["GABA_SPIKES"], "weight": -1.0, "delay": 1.0}) + nest.Connect(sgs[3], neuron, syn_spec={"receptor_type": receptor_types["FOO_VEC_IDX_0"], "weight": 1.0, "delay": 1.0}) + nest.Connect(sgs[4], neuron, syn_spec={"receptor_type": receptor_types["FOO_VEC_IDX_1"], "weight": 1.0, "delay": 1.0}) + nest.Connect(sgs[5], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES_VEC_IDX_0"], "weight": 1.0, "delay": 1.0}) + nest.Connect(sgs[6], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES_VEC_IDX_1"], "weight": 2.0, "delay": 1.0}) + nest.Connect(sgs[7], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES2_VEC_IDX_1"], "weight": -3.0, "delay": 1.0}) mm = nest.Create("multimeter", {"record_from": ["bar", "foo_spikes", "my_spikes_ip"]}) nest.Connect(mm, neuron) @@ -134,24 +134,24 @@ def test_input_ports_in_loop(self): [22., 27.], # SPIKE_BUF_3 [24., 25.], # SPIKE_BUF_4 ] - sgs = nest.Create('spike_generator', len(spike_times)) + sgs = nest.Create("spike_generator", len(spike_times)) for i, sg in enumerate(sgs): sg.spike_times = spike_times[i] nest.Connect(sgs[0], neuron, - syn_spec={'receptor_type': receptor_types["NMDA_SPIKES"], 'weight': 1.0, 'delay': 1.0}) + syn_spec={"receptor_type": receptor_types["NMDA_SPIKES"], "weight": 1.0, "delay": 1.0}) nest.Connect(sgs[1], neuron, - syn_spec={'receptor_type': receptor_types["FOO_0"], 'weight': 1.0, 'delay': 1.0}) + syn_spec={"receptor_type": receptor_types["FOO_0"], "weight": 1.0, "delay": 1.0}) nest.Connect(sgs[2], neuron, - syn_spec={'receptor_type': receptor_types["FOO_1"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[3], neuron, syn_spec={'receptor_type': receptor_types["SPIKE_BUF_0"], 'weight': 1.0, 'delay': 1.0}) - nest.Connect(sgs[4], neuron, syn_spec={'receptor_type': receptor_types["SPIKE_BUF_1"], 'weight': 1.0, 'delay': 1.0}) + syn_spec={"receptor_type": receptor_types["FOO_1"], "weight": 1.0, "delay": 1.0}) + nest.Connect(sgs[3], neuron, syn_spec={"receptor_type": receptor_types["SPIKE_BUF_0"], "weight": 1.0, "delay": 1.0}) + nest.Connect(sgs[4], neuron, syn_spec={"receptor_type": receptor_types["SPIKE_BUF_1"], "weight": 1.0, "delay": 1.0}) nest.Connect(sgs[5], neuron, - syn_spec={'receptor_type': receptor_types["SPIKE_BUF_2"], 'weight': 1.0, 'delay': 1.0}) + syn_spec={"receptor_type": receptor_types["SPIKE_BUF_2"], "weight": 1.0, "delay": 1.0}) nest.Connect(sgs[6], neuron, - syn_spec={'receptor_type': receptor_types["SPIKE_BUF_3"], 'weight': 2.0, 'delay': 1.0}) + syn_spec={"receptor_type": receptor_types["SPIKE_BUF_3"], "weight": 2.0, "delay": 1.0}) nest.Connect(sgs[7], neuron, - syn_spec={'receptor_type': receptor_types["SPIKE_BUF_4"], 'weight': 3.0, 'delay': 1.0}) + syn_spec={"receptor_type": receptor_types["SPIKE_BUF_4"], "weight": 3.0, "delay": 1.0}) mm = nest.Create("multimeter", {"record_from": ["bar", "foo_spikes", "MY_SPIKES_IP_2", "MY_SPIKES_IP_3", "MY_SPIKES_IP_4", "MY_SPIKES_IP_5", "MY_SPIKES_IP_6"]}) nest.Connect(mm, neuron) From 33043794fd178659d2ec84ecb481b9c154b6aecb Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 4 Nov 2024 00:17:16 +0100 Subject: [PATCH 18/68] add attributes to spiking input ports --- .../point_neuron/common/NeuronClass.jinja2 | 37 +++++-------------- .../point_neuron/common/NeuronHeader.jinja2 | 4 +- 2 files changed, 10 insertions(+), 31 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 2b58273dd..79d4ecf11 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -106,12 +106,11 @@ nest::RecordablesMap<{{ neuronName }}> {{ neuronName }}::recordablesMap_; namespace nest { - // Override the create() method with one call to RecordablesMap::insert_() - // for each quantity to be recorded. + // Override the create() method with one call to RecordablesMap::insert_() for each quantity to be recorded. {%- if has_state_vectors %} -template <> void DynamicRecordablesMap<{{ neuronName }}>::create({{ neuronName }}& host) + template <> void DynamicRecordablesMap<{{ neuronName }}>::create({{ neuronName }}& host) {%- else %} -template <> void RecordablesMap<{{ neuronName }}>::create() + template <> void RecordablesMap<{{ neuronName }}>::create() {%- endif %} { @@ -125,7 +124,7 @@ template <> void RecordablesMap<{{ neuronName }}>::create() {%- else %} // add state variables to recordables map {%- for variable in recordable_state_variables %} - insert_({{names_namespace}}::_{{ variable.get_complete_name() }}, &{{ neuronName }}::get_{{ printer_no_origin.print(variable) }}); + insert_({{names_namespace}}::_{{ variable.get_complete_name() }}, &{{ neuronName }}::get_{{ printer_no_origin.print(variable) }}); {%- endfor %} {%- endif %} {%- endif %} @@ -134,7 +133,7 @@ template <> void RecordablesMap<{{ neuronName }}>::create() // add recordable inline expressions to recordables map {%- for variable_symbol in recordable_inline_expressions %} {%- set variable = utils.get_variable_by_name(astnode, variable_symbol.get_symbol_name()) %} - insert_({{ names_namespace }}::_{{ variable_symbol.get_symbol_name() }}, &{{ neuronName }}::{{ printer_no_origin.print(variable)[:-2] }}); + insert_({{ names_namespace }}::_{{ variable_symbol.get_symbol_name() }}, &{{ neuronName }}::{{ printer_no_origin.print(variable)[:-2] }}); {%- endfor %} {%- endif %} @@ -457,10 +456,6 @@ void {{ neuronName }}::init_buffers_() {%- if neuron.get_spike_input_ports() | length > 0 %} // spike input buffers - - - - {%- for inputPortSymbol in neuron.get_spike_input_ports() %} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} {%- if inputPortSymbol.has_vector_parameter() %} @@ -471,6 +466,7 @@ void {{ neuronName }}::init_buffers_() B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.clear(); {%- endfor %} {%- endif %} + B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_.clear(); {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} @@ -479,23 +475,8 @@ void {{ neuronName }}::init_buffers_() B_.spike_input_{{ inputPort.name }}_spike_input_received_.clear(); {%- endif %} {%- endfor %} - - - - - - - - - - - - - - - - {% endif %} + {%- if neuron.get_continuous_input_ports() | length > 0 %} // continuous time input buffers @@ -799,9 +780,9 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} - B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_value(lag); + B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_value(lag); {%- endfor %} - B_.spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_ = B_.spike_input_{{ inputPort.name }}_spike_input_received_.get_value(lag); + B_.spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_ = B_.spike_input_{{ inputPort.name }}_spike_input_received_.get_value(lag); {%- endif %} {%- endfor %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 14ea459c6..883644481 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -801,8 +801,6 @@ private: nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; {%- endfor %} -{%- else %} - XXX: vector port, but no parameters {%- endif %} nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_; @@ -885,7 +883,7 @@ private: // Getters/setters for spike input buffers // ------------------------------------------------------------------------- {% for inputPortSymbol in neuron.get_spike_input_ports() %} - // input port: {{ inputPortSymbol.name }} + // input port: {{ inputPortSymbol.name.split(".")[0] }} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} From 0e0718f6a37f6f6acc44612b1e0d1756bd8dd864 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 5 Nov 2024 15:49:17 +0100 Subject: [PATCH 19/68] add attributes to spiking input ports --- models/neurons/iaf_cond_alpha_neuron.nestml | 16 +- models/neurons/iaf_psc_exp_neuron.nestml | 9 +- models/neurons/wb_cond_multisyn_neuron.nestml | 4 +- pynestml/cocos/co_co_all_variables_defined.py | 2 +- ...receive_vectors_should_be_constant_size.py | 54 + ...only_in_equation_rhs_and_event_handlers.py | 25 +- .../co_co_vector_declaration_right_size.py | 1 + pynestml/cocos/co_cos_manager.py | 10 +- pynestml/codegeneration/autodoc_builder.py | 25 + .../codegeneration/nest_code_generator.py | 17 +- .../printers/cpp_variable_printer.py | 2 - .../point_neuron/common/NeuronClass.jinja2 | 15 +- .../point_neuron/common/NeuronHeader.jinja2 | 12 +- pynestml/generated/PyNestMLParser.py | 1296 +++++++++-------- pynestml/generated/PyNestMLParserVisitor.py | 5 + pynestml/grammars/PyNestMLParser.g4 | 51 +- pynestml/meta_model/ast_on_receive_block.py | 2 +- pynestml/meta_model/ast_variable.py | 5 +- pynestml/symbols/predefined_functions.py | 6 +- pynestml/symbols/variable_symbol.py | 1 + pynestml/utils/ast_utils.py | 21 +- pynestml/utils/messages.py | 7 +- pynestml/visitors/ast_builder_visitor.py | 2 + .../visitors/ast_function_call_visitor.py | 38 +- pynestml/visitors/ast_symbol_table_visitor.py | 37 +- pynestml/visitors/ast_variable_visitor.py | 7 - pynestml/visitors/ast_visitor.py | 2 + .../CoCoAssignmentToInlineExpression.nestml | 4 +- ...nReceiveVectorsShouldBeConstantSize.nestml | 43 + tests/nest_tests/nest_integration_test.py | 95 +- tests/nest_tests/resources/FIR_filter.nestml | 3 +- tests/nest_tests/resources/input_ports.nestml | 7 +- .../resources/input_ports_in_loop.nestml | 39 +- tests/nest_tests/test_input_ports.py | 134 +- tests/test_cocos.py | 7 + ...nReceiveVectorsShouldBeConstantSize.nestml | 40 + 36 files changed, 1198 insertions(+), 846 deletions(-) create mode 100644 pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py create mode 100644 tests/invalid/CoCoOnReceiveVectorsShouldBeConstantSize.nestml create mode 100644 tests/valid/CoCoOnReceiveVectorsShouldBeConstantSize.nestml diff --git a/models/neurons/iaf_cond_alpha_neuron.nestml b/models/neurons/iaf_cond_alpha_neuron.nestml index ced68d31d..97d53ed1d 100644 --- a/models/neurons/iaf_cond_alpha_neuron.nestml +++ b/models/neurons/iaf_cond_alpha_neuron.nestml @@ -41,14 +41,14 @@ model iaf_cond_alpha_neuron: refr_t ms = 0 ms # Refractory period timer equations: - kernel g_inh = (e/tau_syn_inh) * t * exp(-t/tau_syn_inh) - kernel g_exc = (e/tau_syn_exc) * t * exp(-t/tau_syn_exc) + kernel g_inh = (e / tau_syn_inh) * t * exp(-t / tau_syn_inh) + kernel g_exc = (e / tau_syn_exc) * t * exp(-t / tau_syn_exc) - inline I_syn_exc pA = convolve(g_exc, exc_spikes) * nS * ( V_m - E_exc ) - inline I_syn_inh pA = convolve(g_inh, inh_spikes) * nS * ( V_m - E_inh ) - inline I_leak pA = g_L * ( V_m - E_L ) + inline I_syn_exc pA = convolve(g_exc, exc_spikes.weight) * (V_m - E_exc) + inline I_syn_inh pA = convolve(g_inh, inh_spikes.weight) * (V_m - E_inh) + inline I_leak pA = g_L * (V_m - E_L) - V_m' = ( -I_leak - I_syn_exc - I_syn_inh + I_e + I_stim ) / C_m + V_m' = (-I_leak - I_syn_exc - I_syn_inh + I_e + I_stim) / C_m refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) parameters: @@ -68,8 +68,8 @@ model iaf_cond_alpha_neuron: I_e pA = 0 pA input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight nS) + inh_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/iaf_psc_exp_neuron.nestml b/models/neurons/iaf_psc_exp_neuron.nestml index 82695e7ea..e403607a2 100644 --- a/models/neurons/iaf_psc_exp_neuron.nestml +++ b/models/neurons/iaf_psc_exp_neuron.nestml @@ -80,7 +80,7 @@ model iaf_psc_exp_neuron: I_e pA = 0 pA input: - spike_in_port <- spike(weight real) + spike_in_port <- spike(weight pA) I_stim pA <- continuous output: @@ -88,11 +88,10 @@ model iaf_psc_exp_neuron: onReceive(spike_in_port): # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike - # weight is a real number, and here interpreted as 1 corresponding to 1 pA - if spike_in_port.weight > 0: - I_syn_exc += spike_in_port.weight * pA + if spike_in_port.weight > 0 pA: + I_syn_exc += spike_in_port.weight else: - I_syn_inh -= spike_in_port.weight * pA + I_syn_inh -= spike_in_port.weight update: if refr_t > 0 ms: diff --git a/models/neurons/wb_cond_multisyn_neuron.nestml b/models/neurons/wb_cond_multisyn_neuron.nestml index ff88dcb06..a9cf2ae67 100644 --- a/models/neurons/wb_cond_multisyn_neuron.nestml +++ b/models/neurons/wb_cond_multisyn_neuron.nestml @@ -45,7 +45,7 @@ model wb_cond_multisyn_neuron: g_GABAB$ real = GABA_BInitialValue equations: - recordable inline I_syn_ampa pA = -convolve(g_AMPA, AMPA) * nS * ( V_m - AMPA_E_rev ) + recordable inline I_syn_ampa pA = -convolve(g_AMPA, AMPA.weight) * ( V_m - AMPA_E_rev ) recordable inline I_syn_nmda pA = -convolve(g_NMDA, NMDA) * nS * ( V_m - NMDA_E_rev ) / ( 1 + exp( ( NMDA_Vact - V_m ) / NMDA_Sact ) ) recordable inline I_syn_gaba_a pA = -convolve(g_GABAA, GABA_A) * nS * ( V_m - GABA_A_E_rev ) recordable inline I_syn_gaba_b pA = -convolve(g_GABAB, GABA_B) * nS * ( V_m - GABA_B_E_rev ) @@ -128,7 +128,7 @@ model wb_cond_multisyn_neuron: beta_h_init real = 5.0 / (exp(-0.1 * (V_m / mV + 28.0)) + 1.0) input: - AMPA <- spike + AMPA <- spike(weight nS) NMDA <- spike GABA_A <- spike GABA_B <- spike diff --git a/pynestml/cocos/co_co_all_variables_defined.py b/pynestml/cocos/co_co_all_variables_defined.py index 3ec7f16c4..f7ac15413 100644 --- a/pynestml/cocos/co_co_all_variables_defined.py +++ b/pynestml/cocos/co_co_all_variables_defined.py @@ -85,7 +85,7 @@ def check_co_co(cls, node: ASTModel): continue # symbol is a type symbol code, message = Messages.get_variable_not_defined(var.get_complete_name()) - Logger.log_message(code=code, message=message, error_position=node.get_source_position(), + Logger.log_message(code=code, message=message, error_position=var.get_source_position(), log_level=LoggingLevel.ERROR, node=node) continue diff --git a/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py b/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py new file mode 100644 index 000000000..11902fd59 --- /dev/null +++ b/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# +# co_co_on_receive_vectors_should_be_constant_size.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.cocos.co_co import CoCo +from pynestml.meta_model.ast_declaration import ASTDeclaration +from pynestml.meta_model.ast_expression import ASTExpression +from pynestml.meta_model.ast_input_port import ASTInputPort +from pynestml.meta_model.ast_model import ASTModel +from pynestml.meta_model.ast_variable import ASTVariable +from pynestml.symbols.integer_type_symbol import IntegerTypeSymbol +from pynestml.symbols.symbol import SymbolKind +from pynestml.utils.logger import LoggingLevel, Logger +from pynestml.utils.messages import Messages +from pynestml.visitors.ast_visitor import ASTVisitor + + +class CoCoOnReceiveVectorsShouldBeConstantSize(CoCo): + r""" + This CoCo is used to test the usage of onReceive blocks for vector ports of variable length. + """ + + @classmethod + def check_co_co(cls, node: ASTModel): + visitor = CoCoOnReceiveVectorsShouldBeConstantSizeVisitor() + node.accept(visitor) + + +class CoCoOnReceiveVectorsShouldBeConstantSizeVisitor(ASTVisitor): + def visit_input_port(self, node: ASTInputPort): + if node.has_size_parameter(): + try: + int(node.get_size_parameter()) + except ValueError: + # exception converting size parameter to int; hence, not allowed + code, message = Messages.get_vector_input_ports_should_be_of_constant_size() + Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, code=code, message=message) diff --git a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py index 2ba9ac436..d0d412b7d 100644 --- a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py +++ b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py @@ -58,6 +58,10 @@ def visit_variable(self, node: ASTVariable): # only check spiking input ports if in_port is not None and in_port.is_spike(): + if isinstance(node.get_parent(), ASTOnReceiveBlock) and node.get_parent().get_input_port_variable() == node: + # input port appears inside the declaration of an onReceive block; everything is OK + return + if in_port.parameters and not node.attribute: # input port has parameters (for instance, ``x`` in ``foo <- spike(x real)`` but the variable reference is missing an attribute (``foo`` instead of ``foo.x``) code, message = Messages.get_spike_input_port_attribute_missing(node.get_name()) @@ -69,8 +73,18 @@ def visit_variable(self, node: ASTVariable): _node = _node.get_parent() if isinstance(_node, ASTOnReceiveBlock) and _node.input_port_variable.name == in_port.name: - # spike input port was used inside an ``onReceive`` block for this spike port; everything is OK - return + if not node.get_vector_parameter(): + # non-vector spike input port was used inside an ``onReceive`` block for this spike port; everything is OK + return + + try: + if int(str(node.get_vector_parameter())) == int(str(_node.get_input_port_variable().get_vector_parameter())): + # vector spike input port was used inside an ``onReceive`` block for this spike port and numerical index is correct; everything is OK + return + except ValueError: + # in case vector parameter was not an integer numeral + return # XXX: DO MORE CHECKS! + pass if isinstance(_node, ASTOdeEquation): # spike input port was used inside the rhs of an equation; everything is OK @@ -82,7 +96,12 @@ def visit_variable(self, node: ASTVariable): if isinstance(_node, ASTModel): # we reached the top-level block without running into an ``update`` block on the way --> incorrect usage of the function - code, message = Messages.get_spike_input_port_appears_outside_equation_rhs_and_event_handler(node.get_name()) + + node_name = node.get_name() + if node.get_vector_parameter(): + node_name += "[" + str(node.get_vector_parameter()) + "]" + + code, message = Messages.get_spike_input_port_appears_outside_equation_rhs_and_event_handler(node_name) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR) diff --git a/pynestml/cocos/co_co_vector_declaration_right_size.py b/pynestml/cocos/co_co_vector_declaration_right_size.py index 6597f481f..fcb8536eb 100644 --- a/pynestml/cocos/co_co_vector_declaration_right_size.py +++ b/pynestml/cocos/co_co_vector_declaration_right_size.py @@ -18,6 +18,7 @@ # # You should have received a copy of the GNU General Public License # along with NEST. If not, see . + from pynestml.cocos.co_co import CoCo from pynestml.meta_model.ast_declaration import ASTDeclaration from pynestml.meta_model.ast_expression import ASTExpression diff --git a/pynestml/cocos/co_cos_manager.py b/pynestml/cocos/co_cos_manager.py index 80d9652b4..908030af6 100644 --- a/pynestml/cocos/co_cos_manager.py +++ b/pynestml/cocos/co_cos_manager.py @@ -54,6 +54,7 @@ from pynestml.cocos.co_co_no_duplicate_compilation_unit_names import CoCoNoDuplicateCompilationUnitNames from pynestml.cocos.co_co_odes_have_consistent_units import CoCoOdesHaveConsistentUnits from pynestml.cocos.co_co_ode_functions_have_consistent_units import CoCoOdeFunctionsHaveConsistentUnits +from pynestml.cocos.co_co_on_receive_vectors_should_be_constant_size import CoCoOnReceiveVectorsShouldBeConstantSize from pynestml.cocos.co_co_output_port_defined_if_emit_call import CoCoOutputPortDefinedIfEmitCall from pynestml.cocos.co_co_parameters_assigned_only_in_parameter_block import CoCoParametersAssignedOnlyInParameterBlock from pynestml.cocos.co_co_priorities_correctly_specified import CoCoPrioritiesCorrectlySpecified @@ -420,6 +421,13 @@ def check_input_port_size_type(cls, model: ASTModel): """ CoCoVectorInputPortsCorrectSizeType.check_co_co(model) + @classmethod + def check_on_receive_vectors_should_be_constant_size(cls, model: ASTModel): + """ + :param model: a single model object + """ + CoCoOnReceiveVectorsShouldBeConstantSize.check_co_co(model) + @classmethod def check_co_co_nest_random_functions_legally_used(cls, model: ASTModel): """ @@ -476,12 +484,12 @@ def check_cocos(cls, model: ASTModel, after_ast_rewrite: bool = False): cls.check_ode_functions_have_consistent_units(model) cls.check_correct_usage_of_kernels(model) cls.check_resolution_func_used(model) # ``__h = resolution()`` is added after transformations; put this check inside the ``if`` to make sure it's not always triggered + cls.check_expression_correct(model) if FrontendConfiguration.get_target_platform().upper() != 'NEST_COMPARTMENTAL': cls.check_integrate_odes_called_if_equations_defined(model) cls.check_invariant_type_correct(model) cls.check_vector_in_non_vector_declaration_detected(model) cls.check_convolve_has_correct_parameter(model) - cls.check_expression_correct(model) cls.check_simple_delta_function(model) cls.check_function_argument_template_types_consistent(model) cls.check_vector_parameter_declaration(model) diff --git a/pynestml/codegeneration/autodoc_builder.py b/pynestml/codegeneration/autodoc_builder.py index e6b2840d3..423db3bdc 100644 --- a/pynestml/codegeneration/autodoc_builder.py +++ b/pynestml/codegeneration/autodoc_builder.py @@ -234,6 +234,31 @@ def _test_model_psp(self, model_name, max_weight: float = 10., model_opts=None, spikegenerator = nest.Create("spike_generator", params={"spike_times": spike_times, "spike_weights": spike_weights}) + + nest.Connect(spikegenerator, neuron1, syn_spec=syn_spec) + if len(neuron2.get("receptor_types")) > 1: + # this NESTML neuron is written as having separate input ports for excitatory and inhibitory spikes + spikegenerator_exc = nest.Create("spike_generator", + params={"spike_times": spike_times, + "spike_weights": spike_weights}) + spikegenerator_inh = nest.Create("spike_generator", + params={"spike_times": spike_times, + "spike_weights": spike_weights}) + nest.Connect(spikegenerator_exc, neuron2, syn_spec=syn_spec | {"receptor_type": neuron2.get("receptor_type")["EXC_SPIKES"]}) + spikegenerator_inh = nest.Create("spike_generator", + params={"spike_times": spike_times, + "spike_weights": spike_weights}) + nest.Connect(spikegenerator_inh, neuron2, syn_spec=syn_spec | {"receptor_type": neuron2.get("receptor_type")["INH_SPIKES"]}) + else: + # this NESTML neuron is written as having one input port for excitatory and inhibitory spikes (with sign of the weight telling the difference) + nest.Connect(spikegenerator, neuron2, syn_spec=syn_spec) + + + + + + + nest.Connect(spikegenerator, neuron) spike_recorder = nest.Create("spike_recorder") diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index c50d75b21..0a8c5b17e 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -178,6 +178,7 @@ def run_nest_target_specific_cocos(self, neurons: Sequence[ASTModel], synapses: for model in neurons + synapses: # Check if the random number functions are used in the right blocks CoCosManager.check_co_co_nest_random_functions_legally_used(model) + CoCosManager.check_on_receive_vectors_should_be_constant_size(model) if Logger.has_errors(model.name): raise Exception("Error(s) occurred during code generation") @@ -956,7 +957,7 @@ def get_spike_update_expressions(self, neuron: ASTModel, kernel_buffers, solver_ spike_input_port_name = spike_input_port.get_variable().get_name() if not spike_input_port_name in spike_updates.keys(): - spike_updates[str(spike_input_port)] = [] + spike_updates[spike_input_port_name] = [] if "_is_post_port" in dir(spike_input_port.get_variable()) \ and spike_input_port.get_variable()._is_post_port: @@ -964,13 +965,13 @@ def get_spike_update_expressions(self, neuron: ASTModel, kernel_buffers, solver_ orig_port_name = spike_input_port_name[:spike_input_port_name.index("__for_")] buffer_type = neuron.paired_synapse.get_scope().resolve_to_symbol(orig_port_name, SymbolKind.VARIABLE).get_type_symbol() else: - buffer_type = neuron.get_scope().resolve_to_symbol(spike_input_port_name, SymbolKind.VARIABLE).get_type_symbol() + buffer_type = neuron.get_scope().resolve_to_symbol(spike_input_port_name + "." + str(spike_input_port.get_variable().get_attribute()), SymbolKind.VARIABLE).get_type_symbol() assert not buffer_type is None for kernel_var in kernel.get_variables(): for var_order in range(ASTUtils.get_kernel_var_order_from_ode_toolbox_result(kernel_var.get_name(), solver_dicts)): - kernel_spike_buf_name = ASTUtils.construct_kernel_X_spike_buf_name(kernel_var.get_name(), spike_input_port, var_order) + kernel_spike_buf_name = ASTUtils.construct_kernel_X_spike_buf_name(kernel_var.get_name(), spike_input_port, var_order, attribute=spike_input_port.get_variable().get_attribute()) expr = ASTUtils.get_initial_value_from_ode_toolbox_result(kernel_spike_buf_name, solver_dicts) assert expr is not None, "Initial value not found for kernel " + kernel_var expr = str(expr) @@ -993,14 +994,18 @@ def get_spike_update_expressions(self, neuron: ASTModel, kernel_buffers, solver_ ast_assignment.update_scope(neuron.get_scope()) ast_assignment.accept(ASTSymbolTableVisitor()) - if neuron.get_scope().resolve_to_symbol(spike_input_port_name, SymbolKind.VARIABLE) is None: + spike_input_port_name_with_attr = spike_input_port_name + if spike_input_port.get_variable().get_attribute(): + spike_input_port_name_with_attr += "." + str(spike_input_port.get_variable().get_attribute()) + + if neuron.get_scope().resolve_to_symbol(spike_input_port_name_with_attr, SymbolKind.VARIABLE) is None: # this case covers variables that were moved from synapse to the neuron post_spike_updates[kernel_var.get_name()] = ast_assignment elif "_is_post_port" in dir(spike_input_port.get_variable()) and spike_input_port.get_variable()._is_post_port: Logger.log_message(None, None, "Adding post assignment string: " + str(ast_assignment), None, LoggingLevel.INFO) - spike_updates[str(spike_input_port)].append(ast_assignment) + spike_updates[spike_input_port_name].append(ast_assignment) else: - spike_updates[str(spike_input_port)].append(ast_assignment) + spike_updates[spike_input_port_name].append(ast_assignment) for k, factor in delta_factors.items(): var = k[0] diff --git a/pynestml/codegeneration/printers/cpp_variable_printer.py b/pynestml/codegeneration/printers/cpp_variable_printer.py index 11bbbb254..35186ec88 100644 --- a/pynestml/codegeneration/printers/cpp_variable_printer.py +++ b/pynestml/codegeneration/printers/cpp_variable_printer.py @@ -34,8 +34,6 @@ def _print_cpp_name(cls, variable_name: str) -> str: :param variable_name: a single name. :return: a string representation """ - if "'" in variable_name: - import pdb;pdb.set_trace() differential_order = variable_name.count("\"") if differential_order > 0: return variable_name.replace(".", "__DOT__").replace("\"", "").replace("$", "__DOLLAR") + "__" + "d" * differential_order diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 79d4ecf11..ae2ef176b 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -454,7 +454,7 @@ void {{ neuronName }}::init_buffers_() {%- endif %} {%- if neuron.get_spike_input_ports() | length > 0 %} - // spike input buffers + // spike input buffers -- note that .clear() includes a resize {%- for inputPortSymbol in neuron.get_spike_input_ports() %} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} @@ -777,6 +777,7 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_value(lag); {%- endfor %} {%- endif %} + B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_.get_value(lag); {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} @@ -1123,7 +1124,7 @@ void {{ neuronName }}::handle(nest::DataLoggingRequest& e) void {{ neuronName }}::handle(nest::SpikeEvent &e) { #ifdef DEBUG - std::cout << "[neuron " << this << "] {{ neuronName }}::handle(SpikeEvent)" << std::endl; + std::cout << "[neuron " << this << "] {{ neuronName }}::handle(SpikeEvent) on rport " << e.get_rport() << std::endl; #endif assert(e.get_delay_steps() > 0); @@ -1177,7 +1178,7 @@ void {{ neuronName }}::handle(nest::CurrentEvent& e) {%- endif %} // ------------------------------------------------------------------------- -// Methods corresponding to event handlers +// Methods corresponding to onReceive blocks // ------------------------------------------------------------------------- {%- for blk in neuron.get_on_receive_blocks() %} @@ -1187,6 +1188,14 @@ void { const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function +{%- if blk.get_input_port_variable().has_vector_parameter() %} +boop +{{ blk.get_input_port_variable().get_vector_parameter() }} +{% if utils.is_parameter(blk.get_input_port_variable().get_vector_parameter()) %} +is_parameter +{%- endif %} +{%- endif %} + {%- filter indent(2, True) -%} {%- include "directives_cpp/Block.jinja2" %} {%- endfilter %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 883644481..e7c35a877 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -1031,7 +1031,7 @@ inline nest_port_t {{neuronName}}::send_test_event(nest::Node& target, nest_rpor { // You should usually not change the code in this function. // It confirms that the target of connection @c c accepts @c {{ output_event.OutputEvent() }} on - // the given @c receptor_type. + // the given receptor_type. {{ output_event.OutputEvent() }} e; e.set_sender(*this); return target.handles_test_event(e, receptor_type); @@ -1040,16 +1040,16 @@ inline nest_port_t {{neuronName}}::send_test_event(nest::Node& target, nest_rpor inline nest_port_t {{neuronName}}::handles_test_event(nest::SpikeEvent&, nest_port_t receptor_type) { -{%- if (neuron.get_multiple_receptors())|length > 1 or neuron.is_multisynapse_spikes() %} +{%- if (neuron.get_multiple_receptors()) | length > 1 or neuron.is_multisynapse_spikes() %} if ( receptor_type < 1 or receptor_type >= MAX_SPIKE_RECEPTOR ) { throw nest::UnknownReceptorType( receptor_type, get_name() ); } - return receptor_type - 1; + return receptor_type; {%- else %} // You should usually not change the code in this function. // It confirms to the connection management system that we are able - // to handle @c SpikeEvent on port 0. You need to extend the function + // to handle a SpikeEvent on port 0. You need to extend the function // if you want to differentiate between input ports. if (receptor_type != 0) { @@ -1065,7 +1065,7 @@ inline nest_port_t {{neuronName}}::handles_test_event(nest::CurrentEvent&, nest_ { // You should usually not change the code in this function. // It confirms to the connection management system that we are able - // to handle @c CurrentEvent on port 0. You need to extend the function + // to handle a CurrentEvent on port 0. You need to extend the function // if you want to differentiate between input ports. if (receptor_type != 0) { @@ -1079,7 +1079,7 @@ inline nest_port_t {{neuronName}}::handles_test_event(nest::DataLoggingRequest& { // You should usually not change the code in this function. // It confirms to the connection management system that we are able - // to handle @c DataLoggingRequest on port 0. + // to handle a DataLoggingRequest on port 0. // The function also tells the built-in UniversalDataLogger that this node // is recorded from and that it thus needs to collect data during simulation. if (receptor_type != 0) diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index c8fa09514..958cf16c1 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -10,235 +10,237 @@ def serializedATN(): return [ - 4,1,89,618,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,89,624,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, 2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,2,31,7,31,2,32,7,32,2,33, 7,33,2,34,7,34,2,35,7,35,2,36,7,36,2,37,7,37,2,38,7,38,2,39,7,39, - 2,40,7,40,2,41,7,41,2,42,7,42,2,43,7,43,2,44,7,44,2,45,7,45,1,0, - 1,0,1,0,1,0,1,0,1,0,3,0,99,8,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, - 1,3,1,110,8,1,1,1,1,1,1,1,3,1,115,8,1,1,1,1,1,1,1,1,1,5,1,121,8, - 1,10,1,12,1,124,9,1,1,2,3,2,127,8,2,1,2,1,2,1,3,1,3,1,3,1,3,1,3, - 1,3,1,3,1,3,1,3,1,3,1,3,3,3,142,8,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3, - 3,3,151,8,3,1,3,1,3,1,3,1,3,3,3,157,8,3,1,3,1,3,1,3,1,3,1,3,1,3, - 1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,5,3,178,8,3, - 10,3,12,3,181,9,3,1,4,1,4,1,4,1,4,3,4,187,8,4,1,4,1,4,1,4,3,4,192, - 8,4,1,5,1,5,1,5,3,5,197,8,5,1,6,1,6,1,6,1,6,1,6,3,6,204,8,6,1,7, - 1,7,1,7,1,7,1,7,1,7,1,7,3,7,213,8,7,1,8,1,8,3,8,217,8,8,1,9,1,9, - 1,9,1,9,1,9,3,9,224,8,9,1,9,5,9,227,8,9,10,9,12,9,230,9,9,1,9,1, - 9,3,9,234,8,9,1,10,1,10,1,10,1,10,1,10,5,10,241,8,10,10,10,12,10, - 244,9,10,3,10,246,8,10,1,10,1,10,1,11,3,11,251,8,11,1,11,1,11,1, - 11,1,11,1,11,1,11,3,11,259,8,11,1,11,5,11,262,8,11,10,11,12,11,265, - 9,11,1,11,1,11,1,12,1,12,1,12,1,12,3,12,273,8,12,1,12,5,12,276,8, - 12,10,12,12,12,279,9,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13, - 1,13,1,13,1,13,5,13,292,8,13,10,13,12,13,295,9,13,1,13,3,13,298, - 8,13,1,13,1,13,1,14,1,14,1,14,4,14,305,8,14,11,14,12,14,306,1,14, - 1,14,1,15,1,15,3,15,313,8,15,1,16,1,16,1,16,3,16,318,8,16,1,17,1, - 17,1,17,1,17,3,17,324,8,17,1,17,1,17,1,18,1,18,1,18,1,18,1,18,1, - 18,3,18,334,8,18,1,18,1,18,1,19,3,19,339,8,19,1,19,3,19,342,8,19, - 1,19,1,19,1,19,5,19,347,8,19,10,19,12,19,350,9,19,1,19,1,19,1,19, - 3,19,355,8,19,1,19,1,19,1,19,1,19,3,19,361,8,19,1,19,5,19,364,8, - 19,10,19,12,19,367,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1,21,1,21, - 1,21,1,21,3,21,379,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3,24,387,8, - 24,1,25,1,25,5,25,391,8,25,10,25,12,25,394,9,25,1,25,3,25,397,8, - 25,1,26,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1, - 28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,421,8,29,1, - 29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31,434,8, - 31,11,31,12,31,435,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33,1,33, - 1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,4,33,455,8,33,11,33,12,33, - 456,1,33,1,33,1,34,1,34,1,34,1,34,1,34,5,34,466,8,34,10,34,12,34, - 469,9,34,1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35,480,8, - 35,10,35,12,35,483,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36, - 1,36,4,36,494,8,36,11,36,12,36,495,1,36,1,36,1,37,1,37,1,37,1,37, - 1,38,1,38,1,38,1,38,1,38,1,38,1,38,4,38,511,8,38,11,38,12,38,512, - 1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,4,39,523,8,39,11,39,12,39, - 524,1,39,1,39,1,40,1,40,1,40,1,40,1,40,3,40,534,8,40,1,40,1,40,1, - 40,1,40,1,40,1,40,5,40,542,8,40,10,40,12,40,545,9,40,3,40,547,8, - 40,1,40,3,40,550,8,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,559, - 8,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,1,42,1,42,1,42,1,42,3,42, - 572,8,42,1,42,1,42,1,42,1,42,5,42,578,8,42,10,42,12,42,581,9,42, - 3,42,583,8,42,1,42,3,42,586,8,42,1,42,1,42,1,42,1,43,1,43,1,43,1, - 43,1,43,1,43,5,43,597,8,43,10,43,12,43,600,9,43,3,43,602,8,43,1, - 43,1,43,3,43,606,8,43,1,43,1,43,1,43,1,44,1,44,1,44,1,45,1,45,1, - 45,1,45,1,45,0,2,2,6,46,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28, - 30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72, - 74,76,78,80,82,84,86,88,90,0,4,2,0,49,49,73,73,1,0,88,89,1,0,32, - 34,3,0,25,25,85,86,88,89,679,0,98,1,0,0,0,2,109,1,0,0,0,4,126,1, - 0,0,0,6,141,1,0,0,0,8,191,1,0,0,0,10,196,1,0,0,0,12,203,1,0,0,0, - 14,212,1,0,0,0,16,216,1,0,0,0,18,218,1,0,0,0,20,235,1,0,0,0,22,250, - 1,0,0,0,24,268,1,0,0,0,26,282,1,0,0,0,28,301,1,0,0,0,30,312,1,0, - 0,0,32,317,1,0,0,0,34,323,1,0,0,0,36,327,1,0,0,0,38,338,1,0,0,0, - 40,368,1,0,0,0,42,378,1,0,0,0,44,380,1,0,0,0,46,382,1,0,0,0,48,384, - 1,0,0,0,50,388,1,0,0,0,52,398,1,0,0,0,54,403,1,0,0,0,56,408,1,0, - 0,0,58,412,1,0,0,0,60,426,1,0,0,0,62,433,1,0,0,0,64,439,1,0,0,0, - 66,443,1,0,0,0,68,460,1,0,0,0,70,474,1,0,0,0,72,488,1,0,0,0,74,499, - 1,0,0,0,76,503,1,0,0,0,78,516,1,0,0,0,80,528,1,0,0,0,82,553,1,0, - 0,0,84,565,1,0,0,0,86,590,1,0,0,0,88,610,1,0,0,0,90,613,1,0,0,0, - 92,99,5,10,0,0,93,99,5,11,0,0,94,99,5,12,0,0,95,99,5,13,0,0,96,99, - 5,14,0,0,97,99,3,2,1,0,98,92,1,0,0,0,98,93,1,0,0,0,98,94,1,0,0,0, - 98,95,1,0,0,0,98,96,1,0,0,0,98,97,1,0,0,0,99,1,1,0,0,0,100,101,6, - 1,-1,0,101,102,5,47,0,0,102,103,3,2,1,0,103,104,5,48,0,0,104,110, - 1,0,0,0,105,106,5,88,0,0,106,107,5,77,0,0,107,110,3,2,1,2,108,110, - 5,87,0,0,109,100,1,0,0,0,109,105,1,0,0,0,109,108,1,0,0,0,110,122, - 1,0,0,0,111,114,10,3,0,0,112,115,5,75,0,0,113,115,5,77,0,0,114,112, - 1,0,0,0,114,113,1,0,0,0,115,116,1,0,0,0,116,121,3,2,1,4,117,118, - 10,4,0,0,118,119,5,76,0,0,119,121,3,4,2,0,120,111,1,0,0,0,120,117, - 1,0,0,0,121,124,1,0,0,0,122,120,1,0,0,0,122,123,1,0,0,0,123,3,1, - 0,0,0,124,122,1,0,0,0,125,127,7,0,0,0,126,125,1,0,0,0,126,127,1, - 0,0,0,127,128,1,0,0,0,128,129,5,88,0,0,129,5,1,0,0,0,130,131,6,3, - -1,0,131,132,5,47,0,0,132,133,3,6,3,0,133,134,5,48,0,0,134,142,1, - 0,0,0,135,136,3,10,5,0,136,137,3,6,3,9,137,142,1,0,0,0,138,139,5, - 28,0,0,139,142,3,6,3,4,140,142,3,8,4,0,141,130,1,0,0,0,141,135,1, - 0,0,0,141,138,1,0,0,0,141,140,1,0,0,0,142,179,1,0,0,0,143,144,10, - 10,0,0,144,145,5,76,0,0,145,178,3,6,3,10,146,150,10,8,0,0,147,151, - 5,75,0,0,148,151,5,77,0,0,149,151,5,78,0,0,150,147,1,0,0,0,150,148, - 1,0,0,0,150,149,1,0,0,0,151,152,1,0,0,0,152,178,3,6,3,9,153,156, - 10,7,0,0,154,157,5,49,0,0,155,157,5,73,0,0,156,154,1,0,0,0,156,155, - 1,0,0,0,157,158,1,0,0,0,158,178,3,6,3,8,159,160,10,6,0,0,160,161, - 3,12,6,0,161,162,3,6,3,7,162,178,1,0,0,0,163,164,10,5,0,0,164,165, - 3,14,7,0,165,166,3,6,3,6,166,178,1,0,0,0,167,168,10,3,0,0,168,169, - 3,16,8,0,169,170,3,6,3,4,170,178,1,0,0,0,171,172,10,2,0,0,172,173, - 5,79,0,0,173,174,3,6,3,0,174,175,5,80,0,0,175,176,3,6,3,3,176,178, - 1,0,0,0,177,143,1,0,0,0,177,146,1,0,0,0,177,153,1,0,0,0,177,159, - 1,0,0,0,177,163,1,0,0,0,177,167,1,0,0,0,177,171,1,0,0,0,178,181, - 1,0,0,0,179,177,1,0,0,0,179,180,1,0,0,0,180,7,1,0,0,0,181,179,1, - 0,0,0,182,192,3,20,10,0,183,192,5,85,0,0,184,186,7,1,0,0,185,187, - 3,18,9,0,186,185,1,0,0,0,186,187,1,0,0,0,187,192,1,0,0,0,188,192, - 5,86,0,0,189,192,5,25,0,0,190,192,3,18,9,0,191,182,1,0,0,0,191,183, - 1,0,0,0,191,184,1,0,0,0,191,188,1,0,0,0,191,189,1,0,0,0,191,190, - 1,0,0,0,192,9,1,0,0,0,193,197,5,49,0,0,194,197,5,73,0,0,195,197, - 5,50,0,0,196,193,1,0,0,0,196,194,1,0,0,0,196,195,1,0,0,0,197,11, - 1,0,0,0,198,204,5,53,0,0,199,204,5,52,0,0,200,204,5,51,0,0,201,204, - 5,59,0,0,202,204,5,60,0,0,203,198,1,0,0,0,203,199,1,0,0,0,203,200, - 1,0,0,0,203,201,1,0,0,0,203,202,1,0,0,0,204,13,1,0,0,0,205,213,5, - 61,0,0,206,213,5,63,0,0,207,213,5,68,0,0,208,213,5,69,0,0,209,213, - 5,70,0,0,210,213,5,71,0,0,211,213,5,62,0,0,212,205,1,0,0,0,212,206, - 1,0,0,0,212,207,1,0,0,0,212,208,1,0,0,0,212,209,1,0,0,0,212,210, - 1,0,0,0,212,211,1,0,0,0,213,15,1,0,0,0,214,217,5,26,0,0,215,217, - 5,27,0,0,216,214,1,0,0,0,216,215,1,0,0,0,217,17,1,0,0,0,218,223, - 5,87,0,0,219,220,5,54,0,0,220,221,3,6,3,0,221,222,5,56,0,0,222,224, - 1,0,0,0,223,219,1,0,0,0,223,224,1,0,0,0,224,228,1,0,0,0,225,227, - 5,83,0,0,226,225,1,0,0,0,227,230,1,0,0,0,228,226,1,0,0,0,228,229, - 1,0,0,0,229,233,1,0,0,0,230,228,1,0,0,0,231,232,5,84,0,0,232,234, - 3,18,9,0,233,231,1,0,0,0,233,234,1,0,0,0,234,19,1,0,0,0,235,236, - 5,87,0,0,236,245,5,47,0,0,237,242,3,6,3,0,238,239,5,72,0,0,239,241, - 3,6,3,0,240,238,1,0,0,0,241,244,1,0,0,0,242,240,1,0,0,0,242,243, - 1,0,0,0,243,246,1,0,0,0,244,242,1,0,0,0,245,237,1,0,0,0,245,246, - 1,0,0,0,246,247,1,0,0,0,247,248,5,48,0,0,248,21,1,0,0,0,249,251, - 5,29,0,0,250,249,1,0,0,0,250,251,1,0,0,0,251,252,1,0,0,0,252,253, - 5,16,0,0,253,254,5,87,0,0,254,255,3,0,0,0,255,256,5,74,0,0,256,258, - 3,6,3,0,257,259,5,82,0,0,258,257,1,0,0,0,258,259,1,0,0,0,259,263, - 1,0,0,0,260,262,3,42,21,0,261,260,1,0,0,0,262,265,1,0,0,0,263,261, - 1,0,0,0,263,264,1,0,0,0,264,266,1,0,0,0,265,263,1,0,0,0,266,267, - 5,9,0,0,267,23,1,0,0,0,268,269,3,18,9,0,269,270,5,74,0,0,270,272, - 3,6,3,0,271,273,5,82,0,0,272,271,1,0,0,0,272,273,1,0,0,0,273,277, - 1,0,0,0,274,276,3,42,21,0,275,274,1,0,0,0,276,279,1,0,0,0,277,275, - 1,0,0,0,277,278,1,0,0,0,278,280,1,0,0,0,279,277,1,0,0,0,280,281, - 5,9,0,0,281,25,1,0,0,0,282,283,5,30,0,0,283,284,3,18,9,0,284,285, - 5,74,0,0,285,293,3,6,3,0,286,287,5,4,0,0,287,288,3,18,9,0,288,289, - 5,74,0,0,289,290,3,6,3,0,290,292,1,0,0,0,291,286,1,0,0,0,292,295, - 1,0,0,0,293,291,1,0,0,0,293,294,1,0,0,0,294,297,1,0,0,0,295,293, - 1,0,0,0,296,298,5,82,0,0,297,296,1,0,0,0,297,298,1,0,0,0,298,299, - 1,0,0,0,299,300,5,9,0,0,300,27,1,0,0,0,301,302,5,9,0,0,302,304,5, - 1,0,0,303,305,3,30,15,0,304,303,1,0,0,0,305,306,1,0,0,0,306,304, - 1,0,0,0,306,307,1,0,0,0,307,308,1,0,0,0,308,309,5,2,0,0,309,29,1, - 0,0,0,310,313,3,34,17,0,311,313,3,32,16,0,312,310,1,0,0,0,312,311, - 1,0,0,0,313,31,1,0,0,0,314,318,3,50,25,0,315,318,3,58,29,0,316,318, - 3,60,30,0,317,314,1,0,0,0,317,315,1,0,0,0,317,316,1,0,0,0,318,33, - 1,0,0,0,319,324,3,36,18,0,320,324,3,20,10,0,321,324,3,38,19,0,322, - 324,3,48,24,0,323,319,1,0,0,0,323,320,1,0,0,0,323,321,1,0,0,0,323, - 322,1,0,0,0,324,325,1,0,0,0,325,326,5,9,0,0,326,35,1,0,0,0,327,333, - 3,18,9,0,328,334,5,74,0,0,329,334,5,64,0,0,330,334,5,65,0,0,331, - 334,5,66,0,0,332,334,5,67,0,0,333,328,1,0,0,0,333,329,1,0,0,0,333, - 330,1,0,0,0,333,331,1,0,0,0,333,332,1,0,0,0,334,335,1,0,0,0,335, - 336,3,6,3,0,336,37,1,0,0,0,337,339,5,29,0,0,338,337,1,0,0,0,338, - 339,1,0,0,0,339,341,1,0,0,0,340,342,5,16,0,0,341,340,1,0,0,0,341, - 342,1,0,0,0,342,343,1,0,0,0,343,348,3,18,9,0,344,345,5,72,0,0,345, - 347,3,18,9,0,346,344,1,0,0,0,347,350,1,0,0,0,348,346,1,0,0,0,348, - 349,1,0,0,0,349,351,1,0,0,0,350,348,1,0,0,0,351,354,3,0,0,0,352, - 353,5,74,0,0,353,355,3,6,3,0,354,352,1,0,0,0,354,355,1,0,0,0,355, - 360,1,0,0,0,356,357,5,57,0,0,357,358,3,6,3,0,358,359,5,58,0,0,359, - 361,1,0,0,0,360,356,1,0,0,0,360,361,1,0,0,0,361,365,1,0,0,0,362, - 364,3,42,21,0,363,362,1,0,0,0,364,367,1,0,0,0,365,363,1,0,0,0,365, - 366,1,0,0,0,366,39,1,0,0,0,367,365,1,0,0,0,368,369,3,38,19,0,369, - 370,5,9,0,0,370,41,1,0,0,0,371,379,5,43,0,0,372,379,5,44,0,0,373, - 374,5,45,0,0,374,375,3,44,22,0,375,376,5,81,0,0,376,377,3,46,23, - 0,377,379,1,0,0,0,378,371,1,0,0,0,378,372,1,0,0,0,378,373,1,0,0, - 0,379,43,1,0,0,0,380,381,5,87,0,0,381,45,1,0,0,0,382,383,5,87,0, - 0,383,47,1,0,0,0,384,386,5,17,0,0,385,387,3,6,3,0,386,385,1,0,0, - 0,386,387,1,0,0,0,387,49,1,0,0,0,388,392,3,52,26,0,389,391,3,54, - 27,0,390,389,1,0,0,0,391,394,1,0,0,0,392,390,1,0,0,0,392,393,1,0, - 0,0,393,396,1,0,0,0,394,392,1,0,0,0,395,397,3,56,28,0,396,395,1, - 0,0,0,396,397,1,0,0,0,397,51,1,0,0,0,398,399,5,18,0,0,399,400,3, - 6,3,0,400,401,5,80,0,0,401,402,3,28,14,0,402,53,1,0,0,0,403,404, - 5,19,0,0,404,405,3,6,3,0,405,406,5,80,0,0,406,407,3,28,14,0,407, - 55,1,0,0,0,408,409,5,20,0,0,409,410,5,80,0,0,410,411,3,28,14,0,411, - 57,1,0,0,0,412,413,5,21,0,0,413,414,5,87,0,0,414,415,5,23,0,0,415, - 416,3,6,3,0,416,417,5,46,0,0,417,418,3,6,3,0,418,420,5,24,0,0,419, - 421,5,73,0,0,420,419,1,0,0,0,420,421,1,0,0,0,421,422,1,0,0,0,422, - 423,7,1,0,0,423,424,5,80,0,0,424,425,3,28,14,0,425,59,1,0,0,0,426, - 427,5,22,0,0,427,428,3,6,3,0,428,429,5,80,0,0,429,430,3,28,14,0, - 430,61,1,0,0,0,431,434,3,64,32,0,432,434,5,9,0,0,433,431,1,0,0,0, - 433,432,1,0,0,0,434,435,1,0,0,0,435,433,1,0,0,0,435,436,1,0,0,0, - 436,437,1,0,0,0,437,438,5,0,0,1,438,63,1,0,0,0,439,440,5,31,0,0, - 440,441,5,87,0,0,441,442,3,66,33,0,442,65,1,0,0,0,443,444,5,80,0, - 0,444,445,5,9,0,0,445,454,5,1,0,0,446,455,3,72,36,0,447,455,3,76, - 38,0,448,455,3,78,39,0,449,455,3,84,42,0,450,455,3,86,43,0,451,455, - 3,68,34,0,452,455,3,70,35,0,453,455,3,74,37,0,454,446,1,0,0,0,454, - 447,1,0,0,0,454,448,1,0,0,0,454,449,1,0,0,0,454,450,1,0,0,0,454, - 451,1,0,0,0,454,452,1,0,0,0,454,453,1,0,0,0,455,456,1,0,0,0,456, - 454,1,0,0,0,456,457,1,0,0,0,457,458,1,0,0,0,458,459,5,2,0,0,459, - 67,1,0,0,0,460,461,5,40,0,0,461,462,5,47,0,0,462,467,3,18,9,0,463, - 464,5,72,0,0,464,466,3,90,45,0,465,463,1,0,0,0,466,469,1,0,0,0,467, - 465,1,0,0,0,467,468,1,0,0,0,468,470,1,0,0,0,469,467,1,0,0,0,470, - 471,5,48,0,0,471,472,5,80,0,0,472,473,3,28,14,0,473,69,1,0,0,0,474, - 475,5,41,0,0,475,476,5,47,0,0,476,481,3,6,3,0,477,478,5,72,0,0,478, - 480,3,90,45,0,479,477,1,0,0,0,480,483,1,0,0,0,481,479,1,0,0,0,481, - 482,1,0,0,0,482,484,1,0,0,0,483,481,1,0,0,0,484,485,5,48,0,0,485, - 486,5,80,0,0,486,487,3,28,14,0,487,71,1,0,0,0,488,489,7,2,0,0,489, - 490,5,80,0,0,490,491,5,9,0,0,491,493,5,1,0,0,492,494,3,40,20,0,493, - 492,1,0,0,0,494,495,1,0,0,0,495,493,1,0,0,0,495,496,1,0,0,0,496, - 497,1,0,0,0,497,498,5,2,0,0,498,73,1,0,0,0,499,500,5,35,0,0,500, - 501,5,80,0,0,501,502,3,28,14,0,502,75,1,0,0,0,503,504,5,36,0,0,504, - 505,5,80,0,0,505,506,5,9,0,0,506,510,5,1,0,0,507,511,3,22,11,0,508, - 511,3,24,12,0,509,511,3,26,13,0,510,507,1,0,0,0,510,508,1,0,0,0, - 510,509,1,0,0,0,511,512,1,0,0,0,512,510,1,0,0,0,512,513,1,0,0,0, - 513,514,1,0,0,0,514,515,5,2,0,0,515,77,1,0,0,0,516,517,5,37,0,0, - 517,518,5,80,0,0,518,519,5,9,0,0,519,522,5,1,0,0,520,523,3,80,40, - 0,521,523,3,82,41,0,522,520,1,0,0,0,522,521,1,0,0,0,523,524,1,0, - 0,0,524,522,1,0,0,0,524,525,1,0,0,0,525,526,1,0,0,0,526,527,5,2, - 0,0,527,79,1,0,0,0,528,533,5,87,0,0,529,530,5,54,0,0,530,531,3,6, - 3,0,531,532,5,56,0,0,532,534,1,0,0,0,533,529,1,0,0,0,533,534,1,0, - 0,0,534,535,1,0,0,0,535,536,5,55,0,0,536,549,5,42,0,0,537,546,5, - 47,0,0,538,543,3,88,44,0,539,540,5,72,0,0,540,542,3,88,44,0,541, - 539,1,0,0,0,542,545,1,0,0,0,543,541,1,0,0,0,543,544,1,0,0,0,544, - 547,1,0,0,0,545,543,1,0,0,0,546,538,1,0,0,0,546,547,1,0,0,0,547, - 548,1,0,0,0,548,550,5,48,0,0,549,537,1,0,0,0,549,550,1,0,0,0,550, - 551,1,0,0,0,551,552,5,9,0,0,552,81,1,0,0,0,553,558,5,87,0,0,554, - 555,5,54,0,0,555,556,3,6,3,0,556,557,5,56,0,0,557,559,1,0,0,0,558, - 554,1,0,0,0,558,559,1,0,0,0,559,560,1,0,0,0,560,561,3,0,0,0,561, - 562,5,55,0,0,562,563,5,39,0,0,563,564,5,9,0,0,564,83,1,0,0,0,565, - 566,5,38,0,0,566,567,5,80,0,0,567,568,5,9,0,0,568,571,5,1,0,0,569, - 572,5,42,0,0,570,572,5,39,0,0,571,569,1,0,0,0,571,570,1,0,0,0,572, - 585,1,0,0,0,573,582,5,47,0,0,574,579,3,88,44,0,575,576,5,72,0,0, - 576,578,3,88,44,0,577,575,1,0,0,0,578,581,1,0,0,0,579,577,1,0,0, - 0,579,580,1,0,0,0,580,583,1,0,0,0,581,579,1,0,0,0,582,574,1,0,0, - 0,582,583,1,0,0,0,583,584,1,0,0,0,584,586,5,48,0,0,585,573,1,0,0, - 0,585,586,1,0,0,0,586,587,1,0,0,0,587,588,5,9,0,0,588,589,5,2,0, - 0,589,85,1,0,0,0,590,591,5,15,0,0,591,592,5,87,0,0,592,601,5,47, - 0,0,593,598,3,88,44,0,594,595,5,72,0,0,595,597,3,88,44,0,596,594, - 1,0,0,0,597,600,1,0,0,0,598,596,1,0,0,0,598,599,1,0,0,0,599,602, - 1,0,0,0,600,598,1,0,0,0,601,593,1,0,0,0,601,602,1,0,0,0,602,603, - 1,0,0,0,603,605,5,48,0,0,604,606,3,0,0,0,605,604,1,0,0,0,605,606, - 1,0,0,0,606,607,1,0,0,0,607,608,5,80,0,0,608,609,3,28,14,0,609,87, - 1,0,0,0,610,611,5,87,0,0,611,612,3,0,0,0,612,89,1,0,0,0,613,614, - 5,87,0,0,614,615,5,74,0,0,615,616,7,3,0,0,616,91,1,0,0,0,68,98,109, - 114,120,122,126,141,150,156,177,179,186,191,196,203,212,216,223, - 228,233,242,245,250,258,263,272,277,293,297,306,312,317,323,333, - 338,341,348,354,360,365,378,386,392,396,420,433,435,454,456,467, - 481,495,510,512,522,524,533,543,546,549,558,571,579,582,585,598, - 601,605 + 2,40,7,40,2,41,7,41,2,42,7,42,2,43,7,43,2,44,7,44,2,45,7,45,2,46, + 7,46,1,0,1,0,1,0,1,0,1,0,1,0,3,0,101,8,0,1,1,1,1,1,1,1,1,1,1,1,1, + 1,1,1,1,1,1,3,1,112,8,1,1,1,1,1,1,1,3,1,117,8,1,1,1,1,1,1,1,1,1, + 5,1,123,8,1,10,1,12,1,126,9,1,1,2,3,2,129,8,2,1,2,1,2,1,3,1,3,1, + 3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,3,3,144,8,3,1,3,1,3,1,3,1,3,1, + 3,1,3,1,3,3,3,153,8,3,1,3,1,3,1,3,1,3,3,3,159,8,3,1,3,1,3,1,3,1, + 3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,1,3,5, + 3,180,8,3,10,3,12,3,183,9,3,1,4,1,4,1,4,1,4,3,4,189,8,4,1,4,1,4, + 1,4,3,4,194,8,4,1,5,1,5,1,5,3,5,199,8,5,1,6,1,6,1,6,1,6,1,6,3,6, + 206,8,6,1,7,1,7,1,7,1,7,1,7,1,7,1,7,3,7,215,8,7,1,8,1,8,3,8,219, + 8,8,1,9,1,9,1,9,1,9,1,9,3,9,226,8,9,1,9,5,9,229,8,9,10,9,12,9,232, + 9,9,1,9,1,9,3,9,236,8,9,1,10,1,10,1,10,1,10,1,10,5,10,243,8,10,10, + 10,12,10,246,9,10,3,10,248,8,10,1,10,1,10,1,11,3,11,253,8,11,1,11, + 1,11,1,11,1,11,1,11,1,11,3,11,261,8,11,1,11,5,11,264,8,11,10,11, + 12,11,267,9,11,1,11,1,11,1,12,1,12,1,12,1,12,3,12,275,8,12,1,12, + 5,12,278,8,12,10,12,12,12,281,9,12,1,12,1,12,1,13,1,13,1,13,1,13, + 1,13,1,13,1,13,1,13,1,13,5,13,294,8,13,10,13,12,13,297,9,13,1,13, + 3,13,300,8,13,1,13,1,13,1,14,1,14,1,14,4,14,307,8,14,11,14,12,14, + 308,1,14,1,14,1,15,1,15,3,15,315,8,15,1,16,1,16,1,16,3,16,320,8, + 16,1,17,1,17,1,17,1,17,3,17,326,8,17,1,17,1,17,1,18,1,18,1,18,1, + 18,1,18,1,18,3,18,336,8,18,1,18,1,18,1,19,3,19,341,8,19,1,19,3,19, + 344,8,19,1,19,1,19,1,19,5,19,349,8,19,10,19,12,19,352,9,19,1,19, + 1,19,1,19,3,19,357,8,19,1,19,1,19,1,19,1,19,3,19,363,8,19,1,19,5, + 19,366,8,19,10,19,12,19,369,9,19,1,20,1,20,1,20,1,21,1,21,1,21,1, + 21,1,21,1,21,1,21,3,21,381,8,21,1,22,1,22,1,23,1,23,1,24,1,24,3, + 24,389,8,24,1,25,1,25,5,25,393,8,25,10,25,12,25,396,9,25,1,25,3, + 25,399,8,25,1,26,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1, + 28,1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,423, + 8,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31, + 436,8,31,11,31,12,31,437,1,31,1,31,1,32,1,32,1,32,1,32,1,33,1,33, + 1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,4,33,457,8,33,11,33, + 12,33,458,1,33,1,33,1,34,1,34,1,34,1,34,1,34,5,34,468,8,34,10,34, + 12,34,471,9,34,1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35, + 482,8,35,10,35,12,35,485,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36, + 1,36,1,36,4,36,496,8,36,11,36,12,36,497,1,36,1,36,1,37,1,37,1,37, + 1,37,1,38,1,38,1,38,1,38,1,38,1,38,1,38,4,38,513,8,38,11,38,12,38, + 514,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,4,39,525,8,39,11,39, + 12,39,526,1,39,1,39,1,40,1,40,1,40,1,40,1,40,3,40,536,8,40,1,40, + 1,40,1,40,1,40,1,40,1,40,5,40,544,8,40,10,40,12,40,547,9,40,3,40, + 549,8,40,1,40,3,40,552,8,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3, + 41,561,8,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,1,42,1,42,1,42,1, + 42,3,42,574,8,42,1,42,1,42,1,42,1,42,5,42,580,8,42,10,42,12,42,583, + 9,42,3,42,585,8,42,1,42,3,42,588,8,42,1,42,1,42,1,42,1,43,1,43,1, + 43,1,43,1,43,1,43,5,43,599,8,43,10,43,12,43,602,9,43,3,43,604,8, + 43,1,43,1,43,3,43,608,8,43,1,43,1,43,1,43,1,44,1,44,1,44,1,45,1, + 45,3,45,618,8,45,1,46,1,46,1,46,1,46,1,46,0,2,2,6,47,0,2,4,6,8,10, + 12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54, + 56,58,60,62,64,66,68,70,72,74,76,78,80,82,84,86,88,90,92,0,4,2,0, + 49,49,73,73,1,0,88,89,1,0,32,34,3,0,25,25,85,86,88,89,685,0,100, + 1,0,0,0,2,111,1,0,0,0,4,128,1,0,0,0,6,143,1,0,0,0,8,193,1,0,0,0, + 10,198,1,0,0,0,12,205,1,0,0,0,14,214,1,0,0,0,16,218,1,0,0,0,18,220, + 1,0,0,0,20,237,1,0,0,0,22,252,1,0,0,0,24,270,1,0,0,0,26,284,1,0, + 0,0,28,303,1,0,0,0,30,314,1,0,0,0,32,319,1,0,0,0,34,325,1,0,0,0, + 36,329,1,0,0,0,38,340,1,0,0,0,40,370,1,0,0,0,42,380,1,0,0,0,44,382, + 1,0,0,0,46,384,1,0,0,0,48,386,1,0,0,0,50,390,1,0,0,0,52,400,1,0, + 0,0,54,405,1,0,0,0,56,410,1,0,0,0,58,414,1,0,0,0,60,428,1,0,0,0, + 62,435,1,0,0,0,64,441,1,0,0,0,66,445,1,0,0,0,68,462,1,0,0,0,70,476, + 1,0,0,0,72,490,1,0,0,0,74,501,1,0,0,0,76,505,1,0,0,0,78,518,1,0, + 0,0,80,530,1,0,0,0,82,555,1,0,0,0,84,567,1,0,0,0,86,592,1,0,0,0, + 88,612,1,0,0,0,90,617,1,0,0,0,92,619,1,0,0,0,94,101,5,10,0,0,95, + 101,5,11,0,0,96,101,5,12,0,0,97,101,5,13,0,0,98,101,5,14,0,0,99, + 101,3,2,1,0,100,94,1,0,0,0,100,95,1,0,0,0,100,96,1,0,0,0,100,97, + 1,0,0,0,100,98,1,0,0,0,100,99,1,0,0,0,101,1,1,0,0,0,102,103,6,1, + -1,0,103,104,5,47,0,0,104,105,3,2,1,0,105,106,5,48,0,0,106,112,1, + 0,0,0,107,108,5,88,0,0,108,109,5,77,0,0,109,112,3,2,1,2,110,112, + 5,87,0,0,111,102,1,0,0,0,111,107,1,0,0,0,111,110,1,0,0,0,112,124, + 1,0,0,0,113,116,10,3,0,0,114,117,5,75,0,0,115,117,5,77,0,0,116,114, + 1,0,0,0,116,115,1,0,0,0,117,118,1,0,0,0,118,123,3,2,1,4,119,120, + 10,4,0,0,120,121,5,76,0,0,121,123,3,4,2,0,122,113,1,0,0,0,122,119, + 1,0,0,0,123,126,1,0,0,0,124,122,1,0,0,0,124,125,1,0,0,0,125,3,1, + 0,0,0,126,124,1,0,0,0,127,129,7,0,0,0,128,127,1,0,0,0,128,129,1, + 0,0,0,129,130,1,0,0,0,130,131,5,88,0,0,131,5,1,0,0,0,132,133,6,3, + -1,0,133,134,5,47,0,0,134,135,3,6,3,0,135,136,5,48,0,0,136,144,1, + 0,0,0,137,138,3,10,5,0,138,139,3,6,3,9,139,144,1,0,0,0,140,141,5, + 28,0,0,141,144,3,6,3,4,142,144,3,8,4,0,143,132,1,0,0,0,143,137,1, + 0,0,0,143,140,1,0,0,0,143,142,1,0,0,0,144,181,1,0,0,0,145,146,10, + 10,0,0,146,147,5,76,0,0,147,180,3,6,3,10,148,152,10,8,0,0,149,153, + 5,75,0,0,150,153,5,77,0,0,151,153,5,78,0,0,152,149,1,0,0,0,152,150, + 1,0,0,0,152,151,1,0,0,0,153,154,1,0,0,0,154,180,3,6,3,9,155,158, + 10,7,0,0,156,159,5,49,0,0,157,159,5,73,0,0,158,156,1,0,0,0,158,157, + 1,0,0,0,159,160,1,0,0,0,160,180,3,6,3,8,161,162,10,6,0,0,162,163, + 3,12,6,0,163,164,3,6,3,7,164,180,1,0,0,0,165,166,10,5,0,0,166,167, + 3,14,7,0,167,168,3,6,3,6,168,180,1,0,0,0,169,170,10,3,0,0,170,171, + 3,16,8,0,171,172,3,6,3,4,172,180,1,0,0,0,173,174,10,2,0,0,174,175, + 5,79,0,0,175,176,3,6,3,0,176,177,5,80,0,0,177,178,3,6,3,3,178,180, + 1,0,0,0,179,145,1,0,0,0,179,148,1,0,0,0,179,155,1,0,0,0,179,161, + 1,0,0,0,179,165,1,0,0,0,179,169,1,0,0,0,179,173,1,0,0,0,180,183, + 1,0,0,0,181,179,1,0,0,0,181,182,1,0,0,0,182,7,1,0,0,0,183,181,1, + 0,0,0,184,194,3,20,10,0,185,194,5,85,0,0,186,188,7,1,0,0,187,189, + 3,18,9,0,188,187,1,0,0,0,188,189,1,0,0,0,189,194,1,0,0,0,190,194, + 5,86,0,0,191,194,5,25,0,0,192,194,3,18,9,0,193,184,1,0,0,0,193,185, + 1,0,0,0,193,186,1,0,0,0,193,190,1,0,0,0,193,191,1,0,0,0,193,192, + 1,0,0,0,194,9,1,0,0,0,195,199,5,49,0,0,196,199,5,73,0,0,197,199, + 5,50,0,0,198,195,1,0,0,0,198,196,1,0,0,0,198,197,1,0,0,0,199,11, + 1,0,0,0,200,206,5,53,0,0,201,206,5,52,0,0,202,206,5,51,0,0,203,206, + 5,59,0,0,204,206,5,60,0,0,205,200,1,0,0,0,205,201,1,0,0,0,205,202, + 1,0,0,0,205,203,1,0,0,0,205,204,1,0,0,0,206,13,1,0,0,0,207,215,5, + 61,0,0,208,215,5,63,0,0,209,215,5,68,0,0,210,215,5,69,0,0,211,215, + 5,70,0,0,212,215,5,71,0,0,213,215,5,62,0,0,214,207,1,0,0,0,214,208, + 1,0,0,0,214,209,1,0,0,0,214,210,1,0,0,0,214,211,1,0,0,0,214,212, + 1,0,0,0,214,213,1,0,0,0,215,15,1,0,0,0,216,219,5,26,0,0,217,219, + 5,27,0,0,218,216,1,0,0,0,218,217,1,0,0,0,219,17,1,0,0,0,220,225, + 5,87,0,0,221,222,5,54,0,0,222,223,3,90,45,0,223,224,5,56,0,0,224, + 226,1,0,0,0,225,221,1,0,0,0,225,226,1,0,0,0,226,230,1,0,0,0,227, + 229,5,83,0,0,228,227,1,0,0,0,229,232,1,0,0,0,230,228,1,0,0,0,230, + 231,1,0,0,0,231,235,1,0,0,0,232,230,1,0,0,0,233,234,5,84,0,0,234, + 236,3,18,9,0,235,233,1,0,0,0,235,236,1,0,0,0,236,19,1,0,0,0,237, + 238,5,87,0,0,238,247,5,47,0,0,239,244,3,6,3,0,240,241,5,72,0,0,241, + 243,3,6,3,0,242,240,1,0,0,0,243,246,1,0,0,0,244,242,1,0,0,0,244, + 245,1,0,0,0,245,248,1,0,0,0,246,244,1,0,0,0,247,239,1,0,0,0,247, + 248,1,0,0,0,248,249,1,0,0,0,249,250,5,48,0,0,250,21,1,0,0,0,251, + 253,5,29,0,0,252,251,1,0,0,0,252,253,1,0,0,0,253,254,1,0,0,0,254, + 255,5,16,0,0,255,256,5,87,0,0,256,257,3,0,0,0,257,258,5,74,0,0,258, + 260,3,6,3,0,259,261,5,82,0,0,260,259,1,0,0,0,260,261,1,0,0,0,261, + 265,1,0,0,0,262,264,3,42,21,0,263,262,1,0,0,0,264,267,1,0,0,0,265, + 263,1,0,0,0,265,266,1,0,0,0,266,268,1,0,0,0,267,265,1,0,0,0,268, + 269,5,9,0,0,269,23,1,0,0,0,270,271,3,18,9,0,271,272,5,74,0,0,272, + 274,3,6,3,0,273,275,5,82,0,0,274,273,1,0,0,0,274,275,1,0,0,0,275, + 279,1,0,0,0,276,278,3,42,21,0,277,276,1,0,0,0,278,281,1,0,0,0,279, + 277,1,0,0,0,279,280,1,0,0,0,280,282,1,0,0,0,281,279,1,0,0,0,282, + 283,5,9,0,0,283,25,1,0,0,0,284,285,5,30,0,0,285,286,3,18,9,0,286, + 287,5,74,0,0,287,295,3,6,3,0,288,289,5,4,0,0,289,290,3,18,9,0,290, + 291,5,74,0,0,291,292,3,6,3,0,292,294,1,0,0,0,293,288,1,0,0,0,294, + 297,1,0,0,0,295,293,1,0,0,0,295,296,1,0,0,0,296,299,1,0,0,0,297, + 295,1,0,0,0,298,300,5,82,0,0,299,298,1,0,0,0,299,300,1,0,0,0,300, + 301,1,0,0,0,301,302,5,9,0,0,302,27,1,0,0,0,303,304,5,9,0,0,304,306, + 5,1,0,0,305,307,3,30,15,0,306,305,1,0,0,0,307,308,1,0,0,0,308,306, + 1,0,0,0,308,309,1,0,0,0,309,310,1,0,0,0,310,311,5,2,0,0,311,29,1, + 0,0,0,312,315,3,34,17,0,313,315,3,32,16,0,314,312,1,0,0,0,314,313, + 1,0,0,0,315,31,1,0,0,0,316,320,3,50,25,0,317,320,3,58,29,0,318,320, + 3,60,30,0,319,316,1,0,0,0,319,317,1,0,0,0,319,318,1,0,0,0,320,33, + 1,0,0,0,321,326,3,36,18,0,322,326,3,20,10,0,323,326,3,38,19,0,324, + 326,3,48,24,0,325,321,1,0,0,0,325,322,1,0,0,0,325,323,1,0,0,0,325, + 324,1,0,0,0,326,327,1,0,0,0,327,328,5,9,0,0,328,35,1,0,0,0,329,335, + 3,18,9,0,330,336,5,74,0,0,331,336,5,64,0,0,332,336,5,65,0,0,333, + 336,5,66,0,0,334,336,5,67,0,0,335,330,1,0,0,0,335,331,1,0,0,0,335, + 332,1,0,0,0,335,333,1,0,0,0,335,334,1,0,0,0,336,337,1,0,0,0,337, + 338,3,6,3,0,338,37,1,0,0,0,339,341,5,29,0,0,340,339,1,0,0,0,340, + 341,1,0,0,0,341,343,1,0,0,0,342,344,5,16,0,0,343,342,1,0,0,0,343, + 344,1,0,0,0,344,345,1,0,0,0,345,350,3,18,9,0,346,347,5,72,0,0,347, + 349,3,18,9,0,348,346,1,0,0,0,349,352,1,0,0,0,350,348,1,0,0,0,350, + 351,1,0,0,0,351,353,1,0,0,0,352,350,1,0,0,0,353,356,3,0,0,0,354, + 355,5,74,0,0,355,357,3,6,3,0,356,354,1,0,0,0,356,357,1,0,0,0,357, + 362,1,0,0,0,358,359,5,57,0,0,359,360,3,6,3,0,360,361,5,58,0,0,361, + 363,1,0,0,0,362,358,1,0,0,0,362,363,1,0,0,0,363,367,1,0,0,0,364, + 366,3,42,21,0,365,364,1,0,0,0,366,369,1,0,0,0,367,365,1,0,0,0,367, + 368,1,0,0,0,368,39,1,0,0,0,369,367,1,0,0,0,370,371,3,38,19,0,371, + 372,5,9,0,0,372,41,1,0,0,0,373,381,5,43,0,0,374,381,5,44,0,0,375, + 376,5,45,0,0,376,377,3,44,22,0,377,378,5,81,0,0,378,379,3,46,23, + 0,379,381,1,0,0,0,380,373,1,0,0,0,380,374,1,0,0,0,380,375,1,0,0, + 0,381,43,1,0,0,0,382,383,5,87,0,0,383,45,1,0,0,0,384,385,5,87,0, + 0,385,47,1,0,0,0,386,388,5,17,0,0,387,389,3,6,3,0,388,387,1,0,0, + 0,388,389,1,0,0,0,389,49,1,0,0,0,390,394,3,52,26,0,391,393,3,54, + 27,0,392,391,1,0,0,0,393,396,1,0,0,0,394,392,1,0,0,0,394,395,1,0, + 0,0,395,398,1,0,0,0,396,394,1,0,0,0,397,399,3,56,28,0,398,397,1, + 0,0,0,398,399,1,0,0,0,399,51,1,0,0,0,400,401,5,18,0,0,401,402,3, + 6,3,0,402,403,5,80,0,0,403,404,3,28,14,0,404,53,1,0,0,0,405,406, + 5,19,0,0,406,407,3,6,3,0,407,408,5,80,0,0,408,409,3,28,14,0,409, + 55,1,0,0,0,410,411,5,20,0,0,411,412,5,80,0,0,412,413,3,28,14,0,413, + 57,1,0,0,0,414,415,5,21,0,0,415,416,5,87,0,0,416,417,5,23,0,0,417, + 418,3,6,3,0,418,419,5,46,0,0,419,420,3,6,3,0,420,422,5,24,0,0,421, + 423,5,73,0,0,422,421,1,0,0,0,422,423,1,0,0,0,423,424,1,0,0,0,424, + 425,7,1,0,0,425,426,5,80,0,0,426,427,3,28,14,0,427,59,1,0,0,0,428, + 429,5,22,0,0,429,430,3,6,3,0,430,431,5,80,0,0,431,432,3,28,14,0, + 432,61,1,0,0,0,433,436,3,64,32,0,434,436,5,9,0,0,435,433,1,0,0,0, + 435,434,1,0,0,0,436,437,1,0,0,0,437,435,1,0,0,0,437,438,1,0,0,0, + 438,439,1,0,0,0,439,440,5,0,0,1,440,63,1,0,0,0,441,442,5,31,0,0, + 442,443,5,87,0,0,443,444,3,66,33,0,444,65,1,0,0,0,445,446,5,80,0, + 0,446,447,5,9,0,0,447,456,5,1,0,0,448,457,3,72,36,0,449,457,3,76, + 38,0,450,457,3,78,39,0,451,457,3,84,42,0,452,457,3,86,43,0,453,457, + 3,68,34,0,454,457,3,70,35,0,455,457,3,74,37,0,456,448,1,0,0,0,456, + 449,1,0,0,0,456,450,1,0,0,0,456,451,1,0,0,0,456,452,1,0,0,0,456, + 453,1,0,0,0,456,454,1,0,0,0,456,455,1,0,0,0,457,458,1,0,0,0,458, + 456,1,0,0,0,458,459,1,0,0,0,459,460,1,0,0,0,460,461,5,2,0,0,461, + 67,1,0,0,0,462,463,5,40,0,0,463,464,5,47,0,0,464,469,3,18,9,0,465, + 466,5,72,0,0,466,468,3,92,46,0,467,465,1,0,0,0,468,471,1,0,0,0,469, + 467,1,0,0,0,469,470,1,0,0,0,470,472,1,0,0,0,471,469,1,0,0,0,472, + 473,5,48,0,0,473,474,5,80,0,0,474,475,3,28,14,0,475,69,1,0,0,0,476, + 477,5,41,0,0,477,478,5,47,0,0,478,483,3,6,3,0,479,480,5,72,0,0,480, + 482,3,92,46,0,481,479,1,0,0,0,482,485,1,0,0,0,483,481,1,0,0,0,483, + 484,1,0,0,0,484,486,1,0,0,0,485,483,1,0,0,0,486,487,5,48,0,0,487, + 488,5,80,0,0,488,489,3,28,14,0,489,71,1,0,0,0,490,491,7,2,0,0,491, + 492,5,80,0,0,492,493,5,9,0,0,493,495,5,1,0,0,494,496,3,40,20,0,495, + 494,1,0,0,0,496,497,1,0,0,0,497,495,1,0,0,0,497,498,1,0,0,0,498, + 499,1,0,0,0,499,500,5,2,0,0,500,73,1,0,0,0,501,502,5,35,0,0,502, + 503,5,80,0,0,503,504,3,28,14,0,504,75,1,0,0,0,505,506,5,36,0,0,506, + 507,5,80,0,0,507,508,5,9,0,0,508,512,5,1,0,0,509,513,3,22,11,0,510, + 513,3,24,12,0,511,513,3,26,13,0,512,509,1,0,0,0,512,510,1,0,0,0, + 512,511,1,0,0,0,513,514,1,0,0,0,514,512,1,0,0,0,514,515,1,0,0,0, + 515,516,1,0,0,0,516,517,5,2,0,0,517,77,1,0,0,0,518,519,5,37,0,0, + 519,520,5,80,0,0,520,521,5,9,0,0,521,524,5,1,0,0,522,525,3,80,40, + 0,523,525,3,82,41,0,524,522,1,0,0,0,524,523,1,0,0,0,525,526,1,0, + 0,0,526,524,1,0,0,0,526,527,1,0,0,0,527,528,1,0,0,0,528,529,5,2, + 0,0,529,79,1,0,0,0,530,535,5,87,0,0,531,532,5,54,0,0,532,533,3,6, + 3,0,533,534,5,56,0,0,534,536,1,0,0,0,535,531,1,0,0,0,535,536,1,0, + 0,0,536,537,1,0,0,0,537,538,5,55,0,0,538,551,5,42,0,0,539,548,5, + 47,0,0,540,545,3,88,44,0,541,542,5,72,0,0,542,544,3,88,44,0,543, + 541,1,0,0,0,544,547,1,0,0,0,545,543,1,0,0,0,545,546,1,0,0,0,546, + 549,1,0,0,0,547,545,1,0,0,0,548,540,1,0,0,0,548,549,1,0,0,0,549, + 550,1,0,0,0,550,552,5,48,0,0,551,539,1,0,0,0,551,552,1,0,0,0,552, + 553,1,0,0,0,553,554,5,9,0,0,554,81,1,0,0,0,555,560,5,87,0,0,556, + 557,5,54,0,0,557,558,3,6,3,0,558,559,5,56,0,0,559,561,1,0,0,0,560, + 556,1,0,0,0,560,561,1,0,0,0,561,562,1,0,0,0,562,563,3,0,0,0,563, + 564,5,55,0,0,564,565,5,39,0,0,565,566,5,9,0,0,566,83,1,0,0,0,567, + 568,5,38,0,0,568,569,5,80,0,0,569,570,5,9,0,0,570,573,5,1,0,0,571, + 574,5,42,0,0,572,574,5,39,0,0,573,571,1,0,0,0,573,572,1,0,0,0,574, + 587,1,0,0,0,575,584,5,47,0,0,576,581,3,88,44,0,577,578,5,72,0,0, + 578,580,3,88,44,0,579,577,1,0,0,0,580,583,1,0,0,0,581,579,1,0,0, + 0,581,582,1,0,0,0,582,585,1,0,0,0,583,581,1,0,0,0,584,576,1,0,0, + 0,584,585,1,0,0,0,585,586,1,0,0,0,586,588,5,48,0,0,587,575,1,0,0, + 0,587,588,1,0,0,0,588,589,1,0,0,0,589,590,5,9,0,0,590,591,5,2,0, + 0,591,85,1,0,0,0,592,593,5,15,0,0,593,594,5,87,0,0,594,603,5,47, + 0,0,595,600,3,88,44,0,596,597,5,72,0,0,597,599,3,88,44,0,598,596, + 1,0,0,0,599,602,1,0,0,0,600,598,1,0,0,0,600,601,1,0,0,0,601,604, + 1,0,0,0,602,600,1,0,0,0,603,595,1,0,0,0,603,604,1,0,0,0,604,605, + 1,0,0,0,605,607,5,48,0,0,606,608,3,0,0,0,607,606,1,0,0,0,607,608, + 1,0,0,0,608,609,1,0,0,0,609,610,5,80,0,0,610,611,3,28,14,0,611,87, + 1,0,0,0,612,613,5,87,0,0,613,614,3,0,0,0,614,89,1,0,0,0,615,618, + 3,88,44,0,616,618,3,6,3,0,617,615,1,0,0,0,617,616,1,0,0,0,618,91, + 1,0,0,0,619,620,5,87,0,0,620,621,5,74,0,0,621,622,7,3,0,0,622,93, + 1,0,0,0,69,100,111,116,122,124,128,143,152,158,179,181,188,193,198, + 205,214,218,225,230,235,244,247,252,260,265,274,279,295,299,308, + 314,319,325,335,340,343,350,356,362,367,380,388,394,398,422,435, + 437,456,458,469,483,497,512,514,524,526,535,545,548,551,560,573, + 581,584,587,600,603,607,617 ] class PyNestMLParser ( Parser ): @@ -338,7 +340,8 @@ class PyNestMLParser ( Parser ): RULE_outputBlock = 42 RULE_function = 43 RULE_parameter = 44 - RULE_constParameter = 45 + RULE_expressionOrParameter = 45 + RULE_constParameter = 46 ruleNames = [ "dataType", "unitType", "unitTypeExponent", "expression", "simpleExpression", "unaryOperator", "bitOperator", "comparisonOperator", @@ -351,7 +354,7 @@ class PyNestMLParser ( Parser ): "modelBody", "onReceiveBlock", "onConditionBlock", "blockWithVariables", "updateBlock", "equationsBlock", "inputBlock", "spikeInputPort", "continuousInputPort", "outputBlock", "function", "parameter", - "constParameter" ] + "expressionOrParameter", "constParameter" ] EOF = Token.EOF INDENT=1 @@ -502,37 +505,37 @@ def dataType(self): localctx = PyNestMLParser.DataTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 0, self.RULE_dataType) try: - self.state = 98 + self.state = 100 self._errHandler.sync(self) token = self._input.LA(1) if token in [10]: self.enterOuterAlt(localctx, 1) - self.state = 92 + self.state = 94 localctx.isInt = self.match(PyNestMLParser.INTEGER_KEYWORD) pass elif token in [11]: self.enterOuterAlt(localctx, 2) - self.state = 93 + self.state = 95 localctx.isReal = self.match(PyNestMLParser.REAL_KEYWORD) pass elif token in [12]: self.enterOuterAlt(localctx, 3) - self.state = 94 + self.state = 96 localctx.isString = self.match(PyNestMLParser.STRING_KEYWORD) pass elif token in [13]: self.enterOuterAlt(localctx, 4) - self.state = 95 + self.state = 97 localctx.isBool = self.match(PyNestMLParser.BOOLEAN_KEYWORD) pass elif token in [14]: self.enterOuterAlt(localctx, 5) - self.state = 96 + self.state = 98 localctx.isVoid = self.match(PyNestMLParser.VOID_KEYWORD) pass elif token in [47, 87, 88]: self.enterOuterAlt(localctx, 6) - self.state = 97 + self.state = 99 localctx.unit = self.unitType(0) pass else: @@ -618,34 +621,34 @@ def unitType(self, _p:int=0): self.enterRecursionRule(localctx, 2, self.RULE_unitType, _p) try: self.enterOuterAlt(localctx, 1) - self.state = 109 + self.state = 111 self._errHandler.sync(self) token = self._input.LA(1) if token in [47]: - self.state = 101 + self.state = 103 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) - self.state = 102 + self.state = 104 localctx.compoundUnit = self.unitType(0) - self.state = 103 + self.state = 105 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass elif token in [88]: - self.state = 105 + self.state = 107 localctx.unitlessLiteral = self.match(PyNestMLParser.UNSIGNED_INTEGER) - self.state = 106 + self.state = 108 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) - self.state = 107 + self.state = 109 localctx.right = self.unitType(2) pass elif token in [87]: - self.state = 108 + self.state = 110 localctx.unit = self.match(PyNestMLParser.NAME) pass else: raise NoViableAltException(self) self._ctx.stop = self._input.LT(-1) - self.state = 122 + self.state = 124 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -653,32 +656,32 @@ def unitType(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 120 + self.state = 122 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,3,self._ctx) if la_ == 1: localctx = PyNestMLParser.UnitTypeContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_unitType) - self.state = 111 + self.state = 113 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 114 + self.state = 116 self._errHandler.sync(self) token = self._input.LA(1) if token in [75]: - self.state = 112 + self.state = 114 localctx.timesOp = self.match(PyNestMLParser.STAR) pass elif token in [77]: - self.state = 113 + self.state = 115 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass else: raise NoViableAltException(self) - self.state = 116 + self.state = 118 localctx.right = self.unitType(4) pass @@ -686,18 +689,18 @@ def unitType(self, _p:int=0): localctx = PyNestMLParser.UnitTypeContext(self, _parentctx, _parentState) localctx.base = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_unitType) - self.state = 117 + self.state = 119 if not self.precpred(self._ctx, 4): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 4)") - self.state = 118 + self.state = 120 localctx.powOp = self.match(PyNestMLParser.STAR_STAR) - self.state = 119 + self.state = 121 localctx.exponent = self.unitTypeExponent() pass - self.state = 124 + self.state = 126 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) @@ -745,11 +748,11 @@ def unitTypeExponent(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 126 + self.state = 128 self._errHandler.sync(self) _la = self._input.LA(1) if _la==49 or _la==73: - self.state = 125 + self.state = 127 _la = self._input.LA(1) if not(_la==49 or _la==73): self._errHandler.recoverInline(self) @@ -758,7 +761,7 @@ def unitTypeExponent(self): self.consume() - self.state = 128 + self.state = 130 self.match(PyNestMLParser.UNSIGNED_INTEGER) except RecognitionException as re: localctx.exception = re @@ -871,38 +874,38 @@ def expression(self, _p:int=0): self.enterRecursionRule(localctx, 6, self.RULE_expression, _p) try: self.enterOuterAlt(localctx, 1) - self.state = 141 + self.state = 143 self._errHandler.sync(self) token = self._input.LA(1) if token in [47]: - self.state = 131 + self.state = 133 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) - self.state = 132 + self.state = 134 localctx.term = self.expression(0) - self.state = 133 + self.state = 135 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass elif token in [49, 50, 73]: - self.state = 135 + self.state = 137 self.unaryOperator() - self.state = 136 + self.state = 138 localctx.term = self.expression(9) pass elif token in [28]: - self.state = 138 + self.state = 140 localctx.logicalNot = self.match(PyNestMLParser.NOT_KEYWORD) - self.state = 139 + self.state = 141 localctx.term = self.expression(4) pass elif token in [25, 85, 86, 87, 88, 89]: - self.state = 140 + self.state = 142 self.simpleExpression() pass else: raise NoViableAltException(self) self._ctx.stop = self._input.LT(-1) - self.state = 179 + self.state = 181 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,10,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -910,20 +913,20 @@ def expression(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 177 + self.state = 179 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,9,self._ctx) if la_ == 1: localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 143 + self.state = 145 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 144 + self.state = 146 localctx.powOp = self.match(PyNestMLParser.STAR_STAR) - self.state = 145 + self.state = 147 localctx.right = self.expression(10) pass @@ -931,29 +934,29 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 146 + self.state = 148 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 150 + self.state = 152 self._errHandler.sync(self) token = self._input.LA(1) if token in [75]: - self.state = 147 + self.state = 149 localctx.timesOp = self.match(PyNestMLParser.STAR) pass elif token in [77]: - self.state = 148 + self.state = 150 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass elif token in [78]: - self.state = 149 + self.state = 151 localctx.moduloOp = self.match(PyNestMLParser.PERCENT) pass else: raise NoViableAltException(self) - self.state = 152 + self.state = 154 localctx.right = self.expression(9) pass @@ -961,25 +964,25 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 153 + self.state = 155 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 156 + self.state = 158 self._errHandler.sync(self) token = self._input.LA(1) if token in [49]: - self.state = 154 + self.state = 156 localctx.plusOp = self.match(PyNestMLParser.PLUS) pass elif token in [73]: - self.state = 155 + self.state = 157 localctx.minusOp = self.match(PyNestMLParser.MINUS) pass else: raise NoViableAltException(self) - self.state = 158 + self.state = 160 localctx.right = self.expression(8) pass @@ -987,13 +990,13 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 159 + self.state = 161 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 160 + self.state = 162 self.bitOperator() - self.state = 161 + self.state = 163 localctx.right = self.expression(7) pass @@ -1001,13 +1004,13 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 163 + self.state = 165 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 164 + self.state = 166 self.comparisonOperator() - self.state = 165 + self.state = 167 localctx.right = self.expression(6) pass @@ -1015,13 +1018,13 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 167 + self.state = 169 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 168 + self.state = 170 self.logicalOperator() - self.state = 169 + self.state = 171 localctx.right = self.expression(4) pass @@ -1029,22 +1032,22 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.condition = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 171 + self.state = 173 if not self.precpred(self._ctx, 2): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") - self.state = 172 + self.state = 174 self.match(PyNestMLParser.QUESTION) - self.state = 173 + self.state = 175 localctx.ifTrue = self.expression(0) - self.state = 174 + self.state = 176 self.match(PyNestMLParser.COLON) - self.state = 175 + self.state = 177 localctx.ifNot = self.expression(3) pass - self.state = 181 + self.state = 183 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,10,self._ctx) @@ -1107,35 +1110,35 @@ def simpleExpression(self): self.enterRule(localctx, 8, self.RULE_simpleExpression) self._la = 0 # Token type try: - self.state = 191 + self.state = 193 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,12,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 182 + self.state = 184 self.functionCall() pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 183 + self.state = 185 self.match(PyNestMLParser.BOOLEAN_LITERAL) pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 184 + self.state = 186 _la = self._input.LA(1) if not(_la==88 or _la==89): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 186 + self.state = 188 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,11,self._ctx) if la_ == 1: - self.state = 185 + self.state = 187 self.variable() @@ -1143,19 +1146,19 @@ def simpleExpression(self): elif la_ == 4: self.enterOuterAlt(localctx, 4) - self.state = 188 + self.state = 190 localctx.string = self.match(PyNestMLParser.STRING_LITERAL) pass elif la_ == 5: self.enterOuterAlt(localctx, 5) - self.state = 189 + self.state = 191 localctx.isInf = self.match(PyNestMLParser.INF_KEYWORD) pass elif la_ == 6: self.enterOuterAlt(localctx, 6) - self.state = 190 + self.state = 192 self.variable() pass @@ -1206,19 +1209,19 @@ def unaryOperator(self): self.enterRule(localctx, 10, self.RULE_unaryOperator) try: self.enterOuterAlt(localctx, 1) - self.state = 196 + self.state = 198 self._errHandler.sync(self) token = self._input.LA(1) if token in [49]: - self.state = 193 + self.state = 195 localctx.unaryPlus = self.match(PyNestMLParser.PLUS) pass elif token in [73]: - self.state = 194 + self.state = 196 localctx.unaryMinus = self.match(PyNestMLParser.MINUS) pass elif token in [50]: - self.state = 195 + self.state = 197 localctx.unaryTilde = self.match(PyNestMLParser.TILDE) pass else: @@ -1278,27 +1281,27 @@ def bitOperator(self): self.enterRule(localctx, 12, self.RULE_bitOperator) try: self.enterOuterAlt(localctx, 1) - self.state = 203 + self.state = 205 self._errHandler.sync(self) token = self._input.LA(1) if token in [53]: - self.state = 198 + self.state = 200 localctx.bitAnd = self.match(PyNestMLParser.AMPERSAND) pass elif token in [52]: - self.state = 199 + self.state = 201 localctx.bitXor = self.match(PyNestMLParser.CARET) pass elif token in [51]: - self.state = 200 + self.state = 202 localctx.bitOr = self.match(PyNestMLParser.PIPE) pass elif token in [59]: - self.state = 201 + self.state = 203 localctx.bitShiftLeft = self.match(PyNestMLParser.LEFT_LEFT_ANGLE) pass elif token in [60]: - self.state = 202 + self.state = 204 localctx.bitShiftRight = self.match(PyNestMLParser.RIGHT_RIGHT_ANGLE) pass else: @@ -1366,35 +1369,35 @@ def comparisonOperator(self): self.enterRule(localctx, 14, self.RULE_comparisonOperator) try: self.enterOuterAlt(localctx, 1) - self.state = 212 + self.state = 214 self._errHandler.sync(self) token = self._input.LA(1) if token in [61]: - self.state = 205 + self.state = 207 localctx.lt = self.match(PyNestMLParser.LEFT_ANGLE) pass elif token in [63]: - self.state = 206 + self.state = 208 localctx.le = self.match(PyNestMLParser.LEFT_ANGLE_EQUALS) pass elif token in [68]: - self.state = 207 + self.state = 209 localctx.eq = self.match(PyNestMLParser.EQUALS_EQUALS) pass elif token in [69]: - self.state = 208 + self.state = 210 localctx.ne = self.match(PyNestMLParser.EXCLAMATION_EQUALS) pass elif token in [70]: - self.state = 209 + self.state = 211 localctx.ne2 = self.match(PyNestMLParser.LEFT_ANGLE_RIGHT_ANGLE) pass elif token in [71]: - self.state = 210 + self.state = 212 localctx.ge = self.match(PyNestMLParser.RIGHT_ANGLE_EQUALS) pass elif token in [62]: - self.state = 211 + self.state = 213 localctx.gt = self.match(PyNestMLParser.RIGHT_ANGLE) pass else: @@ -1442,15 +1445,15 @@ def logicalOperator(self): self.enterRule(localctx, 16, self.RULE_logicalOperator) try: self.enterOuterAlt(localctx, 1) - self.state = 216 + self.state = 218 self._errHandler.sync(self) token = self._input.LA(1) if token in [26]: - self.state = 214 + self.state = 216 localctx.logicalAnd = self.match(PyNestMLParser.AND_KEYWORD) pass elif token in [27]: - self.state = 215 + self.state = 217 localctx.logicalOr = self.match(PyNestMLParser.OR_KEYWORD) pass else: @@ -1472,7 +1475,7 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser self.name = None # Token - self.vectorParameter = None # ExpressionContext + self.vectorParameter = None # ExpressionOrParameterContext self.attribute = None # VariableContext def NAME(self): @@ -1493,8 +1496,8 @@ def DIFFERENTIAL_ORDER(self, i:int=None): def FULLSTOP(self): return self.getToken(PyNestMLParser.FULLSTOP, 0) - def expression(self): - return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + def expressionOrParameter(self): + return self.getTypedRuleContext(PyNestMLParser.ExpressionOrParameterContext,0) def variable(self): @@ -1519,38 +1522,38 @@ def variable(self): self.enterRule(localctx, 18, self.RULE_variable) try: self.enterOuterAlt(localctx, 1) - self.state = 218 + self.state = 220 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 223 + self.state = 225 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,17,self._ctx) if la_ == 1: - self.state = 219 - self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 220 - localctx.vectorParameter = self.expression(0) self.state = 221 + self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) + self.state = 222 + localctx.vectorParameter = self.expressionOrParameter() + self.state = 223 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 228 + self.state = 230 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,18,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 225 + self.state = 227 self.match(PyNestMLParser.DIFFERENTIAL_ORDER) - self.state = 230 + self.state = 232 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,18,self._ctx) - self.state = 233 + self.state = 235 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,19,self._ctx) if la_ == 1: - self.state = 231 + self.state = 233 self.match(PyNestMLParser.FULLSTOP) - self.state = 232 + self.state = 234 localctx.attribute = self.variable() @@ -1612,31 +1615,31 @@ def functionCall(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 235 + self.state = 237 localctx.calleeName = self.match(PyNestMLParser.NAME) - self.state = 236 + self.state = 238 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 245 + self.state = 247 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 1829587650609152) != 0) or ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & 126977) != 0): - self.state = 237 + self.state = 239 self.expression(0) - self.state = 242 + self.state = 244 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 238 + self.state = 240 self.match(PyNestMLParser.COMMA) - self.state = 239 + self.state = 241 self.expression(0) - self.state = 244 + self.state = 246 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 247 + self.state = 249 self.match(PyNestMLParser.RIGHT_PAREN) except RecognitionException as re: localctx.exception = re @@ -1709,43 +1712,43 @@ def inlineExpression(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 250 + self.state = 252 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 249 + self.state = 251 localctx.recordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 252 + self.state = 254 self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 253 + self.state = 255 localctx.variableName = self.match(PyNestMLParser.NAME) - self.state = 254 + self.state = 256 self.dataType() - self.state = 255 + self.state = 257 self.match(PyNestMLParser.EQUALS) - self.state = 256 - self.expression(0) self.state = 258 + self.expression(0) + self.state = 260 self._errHandler.sync(self) _la = self._input.LA(1) if _la==82: - self.state = 257 + self.state = 259 self.match(PyNestMLParser.SEMICOLON) - self.state = 263 + self.state = 265 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): - self.state = 260 + self.state = 262 localctx.decorator = self.anyDecorator() - self.state = 265 + self.state = 267 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 266 + self.state = 268 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1809,31 +1812,31 @@ def odeEquation(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 268 + self.state = 270 localctx.lhs = self.variable() - self.state = 269 + self.state = 271 self.match(PyNestMLParser.EQUALS) - self.state = 270 - localctx.rhs = self.expression(0) self.state = 272 + localctx.rhs = self.expression(0) + self.state = 274 self._errHandler.sync(self) _la = self._input.LA(1) if _la==82: - self.state = 271 + self.state = 273 self.match(PyNestMLParser.SEMICOLON) - self.state = 277 + self.state = 279 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): - self.state = 274 + self.state = 276 localctx.decorator = self.anyDecorator() - self.state = 279 + self.state = 281 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 280 + self.state = 282 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1905,39 +1908,39 @@ def kernel(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 282 + self.state = 284 self.match(PyNestMLParser.KERNEL_KEYWORD) - self.state = 283 + self.state = 285 self.variable() - self.state = 284 + self.state = 286 self.match(PyNestMLParser.EQUALS) - self.state = 285 + self.state = 287 self.expression(0) - self.state = 293 + self.state = 295 self._errHandler.sync(self) _la = self._input.LA(1) while _la==4: - self.state = 286 + self.state = 288 self.match(PyNestMLParser.KERNEL_JOINING) - self.state = 287 + self.state = 289 self.variable() - self.state = 288 + self.state = 290 self.match(PyNestMLParser.EQUALS) - self.state = 289 + self.state = 291 self.expression(0) - self.state = 295 + self.state = 297 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 297 + self.state = 299 self._errHandler.sync(self) _la = self._input.LA(1) if _la==82: - self.state = 296 + self.state = 298 self.match(PyNestMLParser.SEMICOLON) - self.state = 299 + self.state = 301 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1990,23 +1993,23 @@ def block(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 301 + self.state = 303 self.match(PyNestMLParser.NEWLINE) - self.state = 302 + self.state = 304 self.match(PyNestMLParser.INDENT) - self.state = 304 + self.state = 306 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 303 + self.state = 305 self.stmt() - self.state = 306 + self.state = 308 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 543621120) != 0) or _la==87): break - self.state = 308 + self.state = 310 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -2049,17 +2052,17 @@ def stmt(self): localctx = PyNestMLParser.StmtContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_stmt) try: - self.state = 312 + self.state = 314 self._errHandler.sync(self) token = self._input.LA(1) if token in [16, 17, 29, 87]: self.enterOuterAlt(localctx, 1) - self.state = 310 + self.state = 312 self.smallStmt() pass elif token in [18, 21, 22]: self.enterOuterAlt(localctx, 2) - self.state = 311 + self.state = 313 self.compoundStmt() pass else: @@ -2110,22 +2113,22 @@ def compoundStmt(self): localctx = PyNestMLParser.CompoundStmtContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_compoundStmt) try: - self.state = 317 + self.state = 319 self._errHandler.sync(self) token = self._input.LA(1) if token in [18]: self.enterOuterAlt(localctx, 1) - self.state = 314 + self.state = 316 self.ifStmt() pass elif token in [21]: self.enterOuterAlt(localctx, 2) - self.state = 315 + self.state = 317 self.forStmt() pass elif token in [22]: self.enterOuterAlt(localctx, 3) - self.state = 316 + self.state = 318 self.whileStmt() pass else: @@ -2184,31 +2187,31 @@ def smallStmt(self): self.enterRule(localctx, 34, self.RULE_smallStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 323 + self.state = 325 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,32,self._ctx) if la_ == 1: - self.state = 319 + self.state = 321 self.assignment() pass elif la_ == 2: - self.state = 320 + self.state = 322 self.functionCall() pass elif la_ == 3: - self.state = 321 + self.state = 323 self.declaration() pass elif la_ == 4: - self.state = 322 + self.state = 324 self.returnStmt() pass - self.state = 325 + self.state = 327 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2273,35 +2276,35 @@ def assignment(self): self.enterRule(localctx, 36, self.RULE_assignment) try: self.enterOuterAlt(localctx, 1) - self.state = 327 + self.state = 329 localctx.lhs_variable = self.variable() - self.state = 333 + self.state = 335 self._errHandler.sync(self) token = self._input.LA(1) if token in [74]: - self.state = 328 + self.state = 330 localctx.directAssignment = self.match(PyNestMLParser.EQUALS) pass elif token in [64]: - self.state = 329 + self.state = 331 localctx.compoundSum = self.match(PyNestMLParser.PLUS_EQUALS) pass elif token in [65]: - self.state = 330 + self.state = 332 localctx.compoundMinus = self.match(PyNestMLParser.MINUS_EQUALS) pass elif token in [66]: - self.state = 331 + self.state = 333 localctx.compoundProduct = self.match(PyNestMLParser.STAR_EQUALS) pass elif token in [67]: - self.state = 332 + self.state = 334 localctx.compoundQuotient = self.match(PyNestMLParser.FORWARD_SLASH_EQUALS) pass else: raise NoViableAltException(self) - self.state = 335 + self.state = 337 self.expression(0) except RecognitionException as re: localctx.exception = re @@ -2389,67 +2392,67 @@ def declaration(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 338 + self.state = 340 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 337 + self.state = 339 localctx.isRecordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 341 + self.state = 343 self._errHandler.sync(self) _la = self._input.LA(1) if _la==16: - self.state = 340 + self.state = 342 localctx.isInlineExpression = self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 343 + self.state = 345 self.variable() - self.state = 348 + self.state = 350 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 344 + self.state = 346 self.match(PyNestMLParser.COMMA) - self.state = 345 + self.state = 347 self.variable() - self.state = 350 + self.state = 352 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 351 + self.state = 353 self.dataType() - self.state = 354 + self.state = 356 self._errHandler.sync(self) _la = self._input.LA(1) if _la==74: - self.state = 352 + self.state = 354 self.match(PyNestMLParser.EQUALS) - self.state = 353 + self.state = 355 localctx.rhs = self.expression(0) - self.state = 360 + self.state = 362 self._errHandler.sync(self) _la = self._input.LA(1) if _la==57: - self.state = 356 + self.state = 358 self.match(PyNestMLParser.LEFT_LEFT_SQUARE) - self.state = 357 + self.state = 359 localctx.invariant = self.expression(0) - self.state = 358 + self.state = 360 self.match(PyNestMLParser.RIGHT_RIGHT_SQUARE) - self.state = 365 + self.state = 367 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): - self.state = 362 + self.state = 364 localctx.decorator = self.anyDecorator() - self.state = 367 + self.state = 369 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2494,9 +2497,9 @@ def declaration_newline(self): self.enterRule(localctx, 40, self.RULE_declaration_newline) try: self.enterOuterAlt(localctx, 1) - self.state = 368 + self.state = 370 self.declaration() - self.state = 369 + self.state = 371 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2551,28 +2554,28 @@ def anyDecorator(self): localctx = PyNestMLParser.AnyDecoratorContext(self, self._ctx, self.state) self.enterRule(localctx, 42, self.RULE_anyDecorator) try: - self.state = 378 + self.state = 380 self._errHandler.sync(self) token = self._input.LA(1) if token in [43]: self.enterOuterAlt(localctx, 1) - self.state = 371 + self.state = 373 self.match(PyNestMLParser.DECORATOR_HOMOGENEOUS) pass elif token in [44]: self.enterOuterAlt(localctx, 2) - self.state = 372 + self.state = 374 self.match(PyNestMLParser.DECORATOR_HETEROGENEOUS) pass elif token in [45]: self.enterOuterAlt(localctx, 3) - self.state = 373 + self.state = 375 self.match(PyNestMLParser.AT) - self.state = 374 + self.state = 376 self.namespaceDecoratorNamespace() - self.state = 375 + self.state = 377 self.match(PyNestMLParser.DOUBLE_COLON) - self.state = 376 + self.state = 378 self.namespaceDecoratorName() pass else: @@ -2616,7 +2619,7 @@ def namespaceDecoratorNamespace(self): self.enterRule(localctx, 44, self.RULE_namespaceDecoratorNamespace) try: self.enterOuterAlt(localctx, 1) - self.state = 380 + self.state = 382 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2656,7 +2659,7 @@ def namespaceDecoratorName(self): self.enterRule(localctx, 46, self.RULE_namespaceDecoratorName) try: self.enterOuterAlt(localctx, 1) - self.state = 382 + self.state = 384 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -2700,13 +2703,13 @@ def returnStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 384 - self.match(PyNestMLParser.RETURN_KEYWORD) self.state = 386 + self.match(PyNestMLParser.RETURN_KEYWORD) + self.state = 388 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 1829587650609152) != 0) or ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & 126977) != 0): - self.state = 385 + self.state = 387 self.expression(0) @@ -2760,23 +2763,23 @@ def ifStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 388 + self.state = 390 self.ifClause() - self.state = 392 + self.state = 394 self._errHandler.sync(self) _la = self._input.LA(1) while _la==19: - self.state = 389 + self.state = 391 self.elifClause() - self.state = 394 + self.state = 396 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 396 + self.state = 398 self._errHandler.sync(self) _la = self._input.LA(1) if _la==20: - self.state = 395 + self.state = 397 self.elseClause() @@ -2828,13 +2831,13 @@ def ifClause(self): self.enterRule(localctx, 52, self.RULE_ifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 398 + self.state = 400 self.match(PyNestMLParser.IF_KEYWORD) - self.state = 399 + self.state = 401 self.expression(0) - self.state = 400 + self.state = 402 self.match(PyNestMLParser.COLON) - self.state = 401 + self.state = 403 self.block() except RecognitionException as re: localctx.exception = re @@ -2884,13 +2887,13 @@ def elifClause(self): self.enterRule(localctx, 54, self.RULE_elifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 403 + self.state = 405 self.match(PyNestMLParser.ELIF_KEYWORD) - self.state = 404 + self.state = 406 self.expression(0) - self.state = 405 + self.state = 407 self.match(PyNestMLParser.COLON) - self.state = 406 + self.state = 408 self.block() except RecognitionException as re: localctx.exception = re @@ -2936,11 +2939,11 @@ def elseClause(self): self.enterRule(localctx, 56, self.RULE_elseClause) try: self.enterOuterAlt(localctx, 1) - self.state = 408 + self.state = 410 self.match(PyNestMLParser.ELSE_KEYWORD) - self.state = 409 + self.state = 411 self.match(PyNestMLParser.COLON) - self.state = 410 + self.state = 412 self.block() except RecognitionException as re: localctx.exception = re @@ -3019,39 +3022,39 @@ def forStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 412 + self.state = 414 self.match(PyNestMLParser.FOR_KEYWORD) - self.state = 413 + self.state = 415 localctx.var = self.match(PyNestMLParser.NAME) - self.state = 414 + self.state = 416 self.match(PyNestMLParser.IN_KEYWORD) - self.state = 415 + self.state = 417 localctx.start_from = self.expression(0) - self.state = 416 + self.state = 418 self.match(PyNestMLParser.ELLIPSIS) - self.state = 417 + self.state = 419 localctx.end_at = self.expression(0) - self.state = 418 + self.state = 420 self.match(PyNestMLParser.STEP_KEYWORD) - self.state = 420 + self.state = 422 self._errHandler.sync(self) _la = self._input.LA(1) if _la==73: - self.state = 419 + self.state = 421 localctx.negative = self.match(PyNestMLParser.MINUS) - self.state = 422 + self.state = 424 _la = self._input.LA(1) if not(_la==88 or _la==89): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 423 + self.state = 425 self.match(PyNestMLParser.COLON) - self.state = 424 + self.state = 426 self.block() except RecognitionException as re: localctx.exception = re @@ -3101,13 +3104,13 @@ def whileStmt(self): self.enterRule(localctx, 60, self.RULE_whileStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 426 + self.state = 428 self.match(PyNestMLParser.WHILE_KEYWORD) - self.state = 427 + self.state = 429 self.expression(0) - self.state = 428 + self.state = 430 self.match(PyNestMLParser.COLON) - self.state = 429 + self.state = 431 self.block() except RecognitionException as re: localctx.exception = re @@ -3160,31 +3163,31 @@ def nestMLCompilationUnit(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 433 + self.state = 435 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 433 + self.state = 435 self._errHandler.sync(self) token = self._input.LA(1) if token in [31]: - self.state = 431 + self.state = 433 self.model() pass elif token in [9]: - self.state = 432 + self.state = 434 self.match(PyNestMLParser.NEWLINE) pass else: raise NoViableAltException(self) - self.state = 435 + self.state = 437 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==9 or _la==31): break - self.state = 437 + self.state = 439 self.match(PyNestMLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -3230,11 +3233,11 @@ def model(self): self.enterRule(localctx, 64, self.RULE_model) try: self.enterOuterAlt(localctx, 1) - self.state = 439 + self.state = 441 self.match(PyNestMLParser.MODEL_KEYWORD) - self.state = 440 + self.state = 442 self.match(PyNestMLParser.NAME) - self.state = 441 + self.state = 443 self.modelBody() except RecognitionException as re: localctx.exception = re @@ -3339,61 +3342,61 @@ def modelBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 443 + self.state = 445 self.match(PyNestMLParser.COLON) - self.state = 444 + self.state = 446 self.match(PyNestMLParser.NEWLINE) - self.state = 445 + self.state = 447 self.match(PyNestMLParser.INDENT) - self.state = 454 + self.state = 456 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 454 + self.state = 456 self._errHandler.sync(self) token = self._input.LA(1) if token in [32, 33, 34]: - self.state = 446 + self.state = 448 self.blockWithVariables() pass elif token in [36]: - self.state = 447 + self.state = 449 self.equationsBlock() pass elif token in [37]: - self.state = 448 + self.state = 450 self.inputBlock() pass elif token in [38]: - self.state = 449 + self.state = 451 self.outputBlock() pass elif token in [15]: - self.state = 450 + self.state = 452 self.function() pass elif token in [40]: - self.state = 451 + self.state = 453 self.onReceiveBlock() pass elif token in [41]: - self.state = 452 + self.state = 454 self.onConditionBlock() pass elif token in [35]: - self.state = 453 + self.state = 455 self.updateBlock() pass else: raise NoViableAltException(self) - self.state = 456 + self.state = 458 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 3843995762688) != 0)): break - self.state = 458 + self.state = 460 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3464,29 +3467,29 @@ def onReceiveBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 460 + self.state = 462 self.match(PyNestMLParser.ON_RECEIVE_KEYWORD) - self.state = 461 + self.state = 463 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 462 + self.state = 464 localctx.inputPortVariable = self.variable() - self.state = 467 + self.state = 469 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 463 + self.state = 465 self.match(PyNestMLParser.COMMA) - self.state = 464 + self.state = 466 self.constParameter() - self.state = 469 + self.state = 471 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 470 + self.state = 472 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 471 + self.state = 473 self.match(PyNestMLParser.COLON) - self.state = 472 + self.state = 474 self.block() except RecognitionException as re: localctx.exception = re @@ -3557,29 +3560,29 @@ def onConditionBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 474 + self.state = 476 self.match(PyNestMLParser.ON_CONDITION_KEYWORD) - self.state = 475 + self.state = 477 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 476 + self.state = 478 localctx.condition = self.expression(0) - self.state = 481 + self.state = 483 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 477 + self.state = 479 self.match(PyNestMLParser.COMMA) - self.state = 478 + self.state = 480 self.constParameter() - self.state = 483 + self.state = 485 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 484 + self.state = 486 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 485 + self.state = 487 self.match(PyNestMLParser.COLON) - self.state = 486 + self.state = 488 self.block() except RecognitionException as re: localctx.exception = re @@ -3645,7 +3648,7 @@ def blockWithVariables(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 488 + self.state = 490 localctx.blockType = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 30064771072) != 0)): @@ -3653,25 +3656,25 @@ def blockWithVariables(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 489 + self.state = 491 self.match(PyNestMLParser.COLON) - self.state = 490 + self.state = 492 self.match(PyNestMLParser.NEWLINE) - self.state = 491 + self.state = 493 self.match(PyNestMLParser.INDENT) - self.state = 493 + self.state = 495 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 492 + self.state = 494 self.declaration_newline() - self.state = 495 + self.state = 497 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==16 or _la==29 or _la==87): break - self.state = 497 + self.state = 499 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3717,11 +3720,11 @@ def updateBlock(self): self.enterRule(localctx, 74, self.RULE_updateBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 499 + self.state = 501 self.match(PyNestMLParser.UPDATE_KEYWORD) - self.state = 500 + self.state = 502 self.match(PyNestMLParser.COLON) - self.state = 501 + self.state = 503 self.block() except RecognitionException as re: localctx.exception = re @@ -3794,43 +3797,43 @@ def equationsBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 503 + self.state = 505 self.match(PyNestMLParser.EQUATIONS_KEYWORD) - self.state = 504 + self.state = 506 self.match(PyNestMLParser.COLON) - self.state = 505 + self.state = 507 self.match(PyNestMLParser.NEWLINE) - self.state = 506 + self.state = 508 self.match(PyNestMLParser.INDENT) - self.state = 510 + self.state = 512 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 510 + self.state = 512 self._errHandler.sync(self) token = self._input.LA(1) if token in [16, 29]: - self.state = 507 + self.state = 509 self.inlineExpression() pass elif token in [87]: - self.state = 508 + self.state = 510 self.odeEquation() pass elif token in [30]: - self.state = 509 + self.state = 511 self.kernel() pass else: raise NoViableAltException(self) - self.state = 512 + self.state = 514 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==87): break - self.state = 514 + self.state = 516 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3896,39 +3899,39 @@ def inputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 516 + self.state = 518 self.match(PyNestMLParser.INPUT_KEYWORD) - self.state = 517 + self.state = 519 self.match(PyNestMLParser.COLON) - self.state = 518 + self.state = 520 self.match(PyNestMLParser.NEWLINE) - self.state = 519 + self.state = 521 self.match(PyNestMLParser.INDENT) - self.state = 522 + self.state = 524 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 522 + self.state = 524 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,54,self._ctx) if la_ == 1: - self.state = 520 + self.state = 522 self.spikeInputPort() pass elif la_ == 2: - self.state = 521 + self.state = 523 self.continuousInputPort() pass - self.state = 524 + self.state = 526 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==87): break - self.state = 526 + self.state = 528 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4008,55 +4011,55 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 528 + self.state = 530 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 533 + self.state = 535 self._errHandler.sync(self) _la = self._input.LA(1) if _la==54: - self.state = 529 + self.state = 531 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 530 + self.state = 532 localctx.sizeParameter = self.expression(0) - self.state = 531 + self.state = 533 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 535 + self.state = 537 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 536 + self.state = 538 self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 549 + self.state = 551 self._errHandler.sync(self) _la = self._input.LA(1) if _la==47: - self.state = 537 + self.state = 539 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 546 + self.state = 548 self._errHandler.sync(self) _la = self._input.LA(1) if _la==87: - self.state = 538 + self.state = 540 self.parameter() - self.state = 543 + self.state = 545 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 539 + self.state = 541 self.match(PyNestMLParser.COMMA) - self.state = 540 + self.state = 542 self.parameter() - self.state = 545 + self.state = 547 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 548 + self.state = 550 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 551 + self.state = 553 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4121,27 +4124,27 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 553 + self.state = 555 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 558 + self.state = 560 self._errHandler.sync(self) _la = self._input.LA(1) if _la==54: - self.state = 554 + self.state = 556 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 555 + self.state = 557 localctx.sizeParameter = self.expression(0) - self.state = 556 + self.state = 558 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 560 + self.state = 562 self.dataType() - self.state = 561 + self.state = 563 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 562 + self.state = 564 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 563 + self.state = 565 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4224,61 +4227,61 @@ def outputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 565 + self.state = 567 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 566 + self.state = 568 self.match(PyNestMLParser.COLON) - self.state = 567 + self.state = 569 self.match(PyNestMLParser.NEWLINE) - self.state = 568 + self.state = 570 self.match(PyNestMLParser.INDENT) - self.state = 571 + self.state = 573 self._errHandler.sync(self) token = self._input.LA(1) if token in [42]: - self.state = 569 + self.state = 571 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) pass elif token in [39]: - self.state = 570 + self.state = 572 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 585 + self.state = 587 self._errHandler.sync(self) _la = self._input.LA(1) if _la==47: - self.state = 573 + self.state = 575 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 582 + self.state = 584 self._errHandler.sync(self) _la = self._input.LA(1) if _la==87: - self.state = 574 + self.state = 576 localctx.attribute = self.parameter() - self.state = 579 + self.state = 581 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 575 + self.state = 577 self.match(PyNestMLParser.COMMA) - self.state = 576 + self.state = 578 localctx.attribute = self.parameter() - self.state = 581 + self.state = 583 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 584 + self.state = 586 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 587 + self.state = 589 self.match(PyNestMLParser.NEWLINE) - self.state = 588 + self.state = 590 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4352,45 +4355,45 @@ def function(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 590 + self.state = 592 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 591 + self.state = 593 self.match(PyNestMLParser.NAME) - self.state = 592 + self.state = 594 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 601 + self.state = 603 self._errHandler.sync(self) _la = self._input.LA(1) if _la==87: - self.state = 593 + self.state = 595 self.parameter() - self.state = 598 + self.state = 600 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 594 + self.state = 596 self.match(PyNestMLParser.COMMA) - self.state = 595 + self.state = 597 self.parameter() - self.state = 600 + self.state = 602 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 603 - self.match(PyNestMLParser.RIGHT_PAREN) self.state = 605 + self.match(PyNestMLParser.RIGHT_PAREN) + self.state = 607 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 140737488387072) != 0) or _la==87 or _la==88: - self.state = 604 + self.state = 606 localctx.returnType = self.dataType() - self.state = 607 + self.state = 609 self.match(PyNestMLParser.COLON) - self.state = 608 + self.state = 610 self.block() except RecognitionException as re: localctx.exception = re @@ -4433,9 +4436,9 @@ def parameter(self): self.enterRule(localctx, 88, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 610 + self.state = 612 self.match(PyNestMLParser.NAME) - self.state = 611 + self.state = 613 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4446,6 +4449,63 @@ def parameter(self): return localctx + class ExpressionOrParameterContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def parameter(self): + return self.getTypedRuleContext(PyNestMLParser.ParameterContext,0) + + + def expression(self): + return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + + + def getRuleIndex(self): + return PyNestMLParser.RULE_expressionOrParameter + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitExpressionOrParameter" ): + return visitor.visitExpressionOrParameter(self) + else: + return visitor.visitChildren(self) + + + + + def expressionOrParameter(self): + + localctx = PyNestMLParser.ExpressionOrParameterContext(self, self._ctx, self.state) + self.enterRule(localctx, 90, self.RULE_expressionOrParameter) + try: + self.state = 617 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,68,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 615 + self.parameter() + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 616 + self.expression(0) + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class ConstParameterContext(ParserRuleContext): __slots__ = 'parser' @@ -4491,15 +4551,15 @@ def accept(self, visitor:ParseTreeVisitor): def constParameter(self): localctx = PyNestMLParser.ConstParameterContext(self, self._ctx, self.state) - self.enterRule(localctx, 90, self.RULE_constParameter) + self.enterRule(localctx, 92, self.RULE_constParameter) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 613 + self.state = 619 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 614 + self.state = 620 self.match(PyNestMLParser.EQUALS) - self.state = 615 + self.state = 621 localctx.value = self._input.LT(1) _la = self._input.LA(1) if not(_la==25 or ((((_la - 85)) & ~0x3f) == 0 and ((1 << (_la - 85)) & 27) != 0)): diff --git a/pynestml/generated/PyNestMLParserVisitor.py b/pynestml/generated/PyNestMLParserVisitor.py index fbce7d03a..f0193358a 100644 --- a/pynestml/generated/PyNestMLParserVisitor.py +++ b/pynestml/generated/PyNestMLParserVisitor.py @@ -234,6 +234,11 @@ def visitParameter(self, ctx:PyNestMLParser.ParameterContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PyNestMLParser#expressionOrParameter. + def visitExpressionOrParameter(self, ctx:PyNestMLParser.ExpressionOrParameterContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PyNestMLParser#constParameter. def visitConstParameter(self, ctx:PyNestMLParser.ConstParameterContext): return self.visitChildren(ctx) diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index 6a773b247..078e9af97 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -101,18 +101,21 @@ parser grammar PyNestMLParser; logicalOperator : (logicalAnd=AND_KEYWORD | logicalOr=OR_KEYWORD ); /** - ASTVariable Provides a 'marker' AST node to identify variables used in expressions. + ASTVariable: A variable used in expressions. Can optionally be a vector, have a differential order, and attributes. + + If it is a vector, the square brackets contain a variable declaration (used for example as in ``onReceive(my_vector_input_port[i integer])``). + @attribute name: The name of the variable without the differential order, e.g. V_m @attribute vectorParameter: An optional array parameter, e.g., 'tau_syn ms[n_receptors]'. @attribute differentialOrder: The corresponding differential order, e.g. 2 */ variable : name=NAME - (LEFT_SQUARE_BRACKET vectorParameter=expression RIGHT_SQUARE_BRACKET)? - (DIFFERENTIAL_ORDER)* - (FULLSTOP attribute=variable)?; + (LEFT_SQUARE_BRACKET vectorParameter=expressionOrParameter RIGHT_SQUARE_BRACKET)? + (DIFFERENTIAL_ORDER)* + (FULLSTOP attribute=variable)?; /** - ASTFunctionCall Represents a function call, e.g. myFun("a", "b"). + ASTFunctionCall: Represents a function call, e.g. ``myFun("a", "b")``. @attribute calleeName: The (qualified) name of the functions @attribute args: Comma separated list of expressions representing parameters. */ @@ -170,7 +173,7 @@ parser grammar PyNestMLParser; (LEFT_LEFT_SQUARE invariant=expression RIGHT_RIGHT_SQUARE)? decorator=anyDecorator*; - declaration_newline: declaration NEWLINE; + declaration_newline : declaration NEWLINE; /** ... */ @@ -213,7 +216,7 @@ parser grammar PyNestMLParser; /** ASTNestMLCompilationUnit represents a collection of models. @attribute model: A list of processed models. */ - nestMLCompilationUnit: ( model | NEWLINE )+ EOF; + nestMLCompilationUnit : ( model | NEWLINE )+ EOF; /********************************************************************************************************************* * NestML model and model blocks @@ -233,19 +236,19 @@ parser grammar PyNestMLParser; @attribute updateBlock: A single update block containing the dynamic behavior. @attribute function: A block declaring a user-defined function. */ - modelBody: COLON + modelBody : COLON NEWLINE INDENT ( blockWithVariables | equationsBlock | inputBlock | outputBlock | function | onReceiveBlock | onConditionBlock | updateBlock )+ DEDENT; /** ASTOnReceiveBlock @attribute block implementation of the dynamics */ - onReceiveBlock: ON_RECEIVE_KEYWORD LEFT_PAREN inputPortVariable=variable (COMMA constParameter)* RIGHT_PAREN COLON + onReceiveBlock : ON_RECEIVE_KEYWORD LEFT_PAREN inputPortVariable=variable (COMMA constParameter)* RIGHT_PAREN COLON block; /** ASTOnConditionBlock @attribute block implementation of the dynamics */ - onConditionBlock: ON_CONDITION_KEYWORD LEFT_PAREN condition=expression (COMMA constParameter)* RIGHT_PAREN COLON + onConditionBlock : ON_CONDITION_KEYWORD LEFT_PAREN condition=expression (COMMA constParameter)* RIGHT_PAREN COLON block; /** ASTBlockWithVariables Represent a block with variables and constants, e.g.: @@ -257,7 +260,7 @@ parser grammar PyNestMLParser; @attribute internals: True iff the varblock is a state internals block. @attribute declaration: A list of corresponding declarations. */ - blockWithVariables: + blockWithVariables : blockType=(STATE_KEYWORD | PARAMETERS_KEYWORD | INTERNALS_KEYWORD) COLON NEWLINE INDENT declaration_newline+ DEDENT; @@ -268,14 +271,14 @@ parser grammar PyNestMLParser; integrate(V) @attribute block Implementation of the dynamics. */ - updateBlock: UPDATE_KEYWORD COLON + updateBlock : UPDATE_KEYWORD COLON block; /** ASTEquationsBlock A block declaring equations and inline expressions. @attribute inlineExpression: A single inline expression, e.g., inline V_m mV = ... @attribute odeEquation: A single ode equation statement, e.g., V_m' = ... */ - equationsBlock: EQUATIONS_KEYWORD COLON + equationsBlock : EQUATIONS_KEYWORD COLON NEWLINE INDENT ( inlineExpression | odeEquation | kernel )+ DEDENT; /** ASTInputBlock represents a single input block, e.g.: @@ -284,8 +287,8 @@ parser grammar PyNestMLParser; current_in pA <- continuous @attribute inputPort: A list of input ports. */ - inputBlock: INPUT_KEYWORD COLON - NEWLINE INDENT (spikeInputPort | continuousInputPort)+ DEDENT; + inputBlock : INPUT_KEYWORD COLON + NEWLINE INDENT (spikeInputPort | continuousInputPort)+ DEDENT; /** ASTInputPort represents a single input port, e.g.: spike_in[3] <- spike @@ -296,12 +299,12 @@ parser grammar PyNestMLParser; @attribute isSpike: Indicates that this input port accepts spikes. @attribute isContinuous: Indicates that this input port accepts continuous-time input. */ - spikeInputPort: + spikeInputPort : name=NAME (LEFT_SQUARE_BRACKET sizeParameter=expression RIGHT_SQUARE_BRACKET)? LEFT_ANGLE_MINUS SPIKE_KEYWORD (LEFT_PAREN (parameter (COMMA parameter)*)? RIGHT_PAREN)? NEWLINE; - continuousInputPort: + continuousInputPort : name = NAME (LEFT_SQUARE_BRACKET sizeParameter=expression RIGHT_SQUARE_BRACKET)? dataType @@ -314,7 +317,7 @@ parser grammar PyNestMLParser; @attribute isSpike: true if and only if the neuron has a spike output. @attribute isContinuous: true if and only if the neuron has a continuous-time output. */ - outputBlock: OUTPUT_KEYWORD COLON + outputBlock : OUTPUT_KEYWORD COLON NEWLINE INDENT (isSpike=SPIKE_KEYWORD | isContinuous=CONTINUOUS_KEYWORD) (LEFT_PAREN (attribute=parameter (COMMA attribute=parameter)*)? RIGHT_PAREN)? NEWLINE DEDENT; @@ -327,7 +330,7 @@ parser grammar PyNestMLParser; @attribute returnType: An arbitrary return type, e.g. string or mV. @attribute block: Implementation of the function. */ - function: FUNCTION_KEYWORD NAME LEFT_PAREN (parameter (COMMA parameter)*)? RIGHT_PAREN (returnType=dataType)? + function : FUNCTION_KEYWORD NAME LEFT_PAREN (parameter (COMMA parameter)*)? RIGHT_PAREN (returnType=dataType)? COLON block ; @@ -339,12 +342,14 @@ parser grammar PyNestMLParser; */ parameter : NAME dataType; + expressionOrParameter : parameter | expression; + /** ASTConstParameter represents a single parameter consisting of a name and a literal default value, e.g. "foo=42". @attribute name: The name of the parameter. @attribute value: The corresponding default value. */ constParameter : name=NAME EQUALS value=(BOOLEAN_LITERAL - | UNSIGNED_INTEGER - | FLOAT - | STRING_LITERAL - | INF_KEYWORD); + | UNSIGNED_INTEGER + | FLOAT + | STRING_LITERAL + | INF_KEYWORD); diff --git a/pynestml/meta_model/ast_on_receive_block.py b/pynestml/meta_model/ast_on_receive_block.py index 23cad130c..246089a17 100644 --- a/pynestml/meta_model/ast_on_receive_block.py +++ b/pynestml/meta_model/ast_on_receive_block.py @@ -89,7 +89,7 @@ def get_children(self) -> List[ASTNode]: Returns the children of this node, if any. :return: List of children of this node. """ - return [self.get_block()] + return [self.get_input_port_variable(), self.get_block()] def equals(self, other: ASTNode) -> bool: r""" diff --git a/pynestml/meta_model/ast_variable.py b/pynestml/meta_model/ast_variable.py index 88c6f8131..ca2895960 100644 --- a/pynestml/meta_model/ast_variable.py +++ b/pynestml/meta_model/ast_variable.py @@ -19,11 +19,12 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -from typing import Any, List, Optional +from typing import Any, List, Optional, Union from copy import copy from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_parameter import ASTParameter from pynestml.symbol_table.scope import Scope from pynestml.symbols.type_symbol import TypeSymbol @@ -43,7 +44,7 @@ class ASTVariable(ASTNode): """ def __init__(self, name, differential_order=0, type_symbol: Optional[str] = None, - vector_parameter: Optional[str] = None, is_homogeneous: bool = False, delay_parameter: Optional[str] = None, attribute: Optional[str] = None, *args, **kwargs): + vector_parameter: Optional[Union[str, ASTParameter]] = None, is_homogeneous: bool = False, delay_parameter: Optional[str] = None, attribute: Optional[str] = None, *args, **kwargs): r""" Standard constructor. :param name: the name of the variable diff --git a/pynestml/symbols/predefined_functions.py b/pynestml/symbols/predefined_functions.py index efdf483a3..7fd273032 100644 --- a/pynestml/symbols/predefined_functions.py +++ b/pynestml/symbols/predefined_functions.py @@ -512,10 +512,10 @@ def __register_convolve(cls): Registers the convolve function into the system. """ params = list() - params.append(PredefinedTypes.get_real_type()) - params.append(PredefinedTypes.get_real_type()) + params.append(PredefinedTypes.get_real_type()) # kernel + params.append(PredefinedTypes.get_template_type(0)) # spike input buffer symbol = FunctionSymbol(name=cls.CONVOLVE, param_types=params, - return_type=PredefinedTypes.get_real_type(), + return_type=PredefinedTypes.get_template_type(0), element_reference=None, is_predefined=True) cls.name2function[cls.CONVOLVE] = symbol diff --git a/pynestml/symbols/variable_symbol.py b/pynestml/symbols/variable_symbol.py index 92359f05b..7e9736e3e 100644 --- a/pynestml/symbols/variable_symbol.py +++ b/pynestml/symbols/variable_symbol.py @@ -63,6 +63,7 @@ class BlockType(Enum): INPUT = 7 OUTPUT = 8 PREDEFINED = 9 + ON_RECEIVE = 10 class VariableSymbol(Symbol): diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index bf5d50d0e..d4fc5398f 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -50,6 +50,7 @@ from pynestml.meta_model.ast_node_factory import ASTNodeFactory from pynestml.meta_model.ast_ode_equation import ASTOdeEquation from pynestml.meta_model.ast_on_receive_block import ASTOnReceiveBlock +from pynestml.meta_model.ast_parameter import ASTParameter from pynestml.meta_model.ast_return_stmt import ASTReturnStmt from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.meta_model.ast_small_stmt import ASTSmallStmt @@ -1366,8 +1367,8 @@ def all_convolution_variable_names(cls, model: ASTModel) -> List[str]: @classmethod def construct_kernel_X_spike_buf_name(cls, kernel_var_name: str, spike_input_port: ASTInputPort, order: int, - diff_order_symbol="__d", suffix=""): - """ + diff_order_symbol="__d", suffix="", attribute: Optional[str] = None): + r""" Construct a kernel-buffer name as For example, if the kernel is @@ -1396,7 +1397,12 @@ def construct_kernel_X_spike_buf_name(cls, kernel_var_name: str, spike_input_por if spike_input_port.has_vector_parameter(): spike_input_port_name += "_" + str(cls.get_numeric_vector_size(spike_input_port)) - return kernel_var_name.replace("$", "__DOLLAR") + suffix + "__X__" + spike_input_port_name + diff_order_symbol * order + suffix + if attribute is not None: + attribute = "__DOT__" + attribute + else: + attribute = "" + + return kernel_var_name.replace("$", "__DOLLAR") + suffix + "__X__" + spike_input_port_name + attribute + diff_order_symbol * order + suffix @classmethod def replace_rhs_variable(cls, expr: ASTExpression, variable_name_to_replace: str, kernel_var: ASTVariable, @@ -2132,8 +2138,9 @@ def transform_ode_and_kernels_to_json(cls, model: ASTModel, parameters_blocks: S for kernel_var in kernel.get_variables(): expr = cls.get_expr_from_kernel_var(kernel, kernel_var.get_complete_name()) kernel_order = kernel_var.get_differential_order() + attribute = spike_input_port.get_variable().get_attribute() kernel_X_spike_buf_name_ticks = cls.construct_kernel_X_spike_buf_name( - kernel_var.get_name(), spike_input_port, kernel_order, diff_order_symbol="'") + kernel_var.get_name(), spike_input_port, kernel_order, diff_order_symbol="'", attribute=attribute) cls.replace_rhs_variables(expr, kernel_buffers) @@ -2346,7 +2353,7 @@ def replace_function_call_through_var(_expr=None): _expr.set_function_call(None) buffer_var = cls.construct_kernel_X_spike_buf_name( - var.get_name(), spike_input_port, var.get_differential_order() - 1) + var.get_name(), spike_input_port, var.get_differential_order() - 1, attribute=spike_input_port.get_attribute()) if cls.is_delta_kernel(kernel): # delta kernels are treated separately, and should be kept out of the dynamics (computing derivates etc.) --> set to zero _expr.set_variable(None) @@ -2585,3 +2592,7 @@ def port_name_printer(cls, variable: ASTVariable) -> str: s += str(variable.get_vector_parameter()) return s + + @classmethod + def is_parameter(cls, variable) -> str: + return isinstance(variable, ASTParameter) \ No newline at end of file diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index f1592663d..27b2e81c1 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -1384,7 +1384,7 @@ def get_random_functions_legally_used(cls, name): @classmethod def get_spike_input_port_appears_outside_equation_rhs_and_event_handler(cls, name): - message = "Spiking input port names (in this case '" + name + "') can only be used in the right-hand side of equations or in the definition of an onReceive block!" + message = "Spiking input port names (in this case '" + name + "') can only be used in the right-hand side of equations or in an onReceive block!" return MessageCode.SPIKING_INPUT_PORT_NAME_ILLEGALLY_USED, message @classmethod @@ -1396,3 +1396,8 @@ def get_continuous_output_port_cannot_have_attributes(cls): def get_spike_input_port_attribute_missing(cls, name: str): message = "Spiking input port '" + name + "' reference is missing attribute." return MessageCode.SPIKING_INPUT_PORT_REFERENCE_MISSING_ATTRIBUTE, message + + @classmethod + def get_vector_input_ports_should_be_of_constant_size(cls): + message = "Vector input ports should be of constant size (this is a limitation of NEST Simulator)" + return MessageCode.VECTOR_INPUT_PORTS_SHOULD_BE_OF_CONSTANT_SIZE, message diff --git a/pynestml/visitors/ast_builder_visitor.py b/pynestml/visitors/ast_builder_visitor.py index 4b8ed8825..0ae3668a8 100644 --- a/pynestml/visitors/ast_builder_visitor.py +++ b/pynestml/visitors/ast_builder_visitor.py @@ -695,6 +695,8 @@ def visitStmt(self, ctx): def visitOnReceiveBlock(self, ctx): input_port_variable = self.visit(ctx.inputPortVariable) + print("ctx.inputPortVariable = " + str(input_port_variable)) + print("ctx.inputPortVariable = " + str(type(input_port_variable))) block = self.visit(ctx.block()) if ctx.block() is not None else None const_parameters = {} for el in ctx.constParameter(): diff --git a/pynestml/visitors/ast_function_call_visitor.py b/pynestml/visitors/ast_function_call_visitor.py index e4ec8650e..8fad92f92 100644 --- a/pynestml/visitors/ast_function_call_visitor.py +++ b/pynestml/visitors/ast_function_call_visitor.py @@ -53,6 +53,26 @@ def visit_simple_expression(self, node: ASTSimpleExpression) -> None: function_name = node.get_function_call().get_name() method_symbol = scope.resolve_to_symbol(function_name, SymbolKind.FUNCTION) + # return type of the convolve function is the type of the second parameter (the spike input buffer) + if function_name == PredefinedFunctions.CONVOLVE: + buffer_parameter = node.get_function_call().get_args()[1] + + if buffer_parameter.get_variable() is not None: + if not buffer_parameter.get_variable().get_attribute(): + # an attribute is missing for the spiking input port + code, message = Messages.get_spike_input_port_attribute_missing(buffer_name) + Logger.log_message(code=code, message=message, error_position=node.get_source_position(), + log_level=LoggingLevel.ERROR) + node.type = ErrorTypeSymbol() + return + + buffer_name = buffer_parameter.get_variable().get_name() + "." + str(buffer_parameter.get_variable().get_attribute()) + buffer_symbol_resolve = scope.resolve_to_symbol(buffer_name, SymbolKind.VARIABLE) + + assert buffer_symbol_resolve is not None + node.type = buffer_symbol_resolve.get_type_symbol() + return + # check if this is a delay variable symbol = ASTUtils.get_delay_variable_symbol(node.get_function_call()) if method_symbol is None and symbol is not None: @@ -92,24 +112,6 @@ def visit_simple_expression(self, node: ASTSimpleExpression) -> None: return_type.referenced_object = node - # return type of the convolve function is the type of the second parameter multiplied by the unit of time (s) - if function_name == PredefinedFunctions.CONVOLVE: - buffer_parameter = node.get_function_call().get_args()[1] - - if buffer_parameter.get_variable() is not None: - buffer_name = buffer_parameter.get_variable().get_name() - buffer_symbol_resolve = scope.resolve_to_symbol(buffer_name, SymbolKind.VARIABLE) - if buffer_symbol_resolve is not None: - node.type = buffer_symbol_resolve.get_type_symbol() * UnitTypeSymbol(PredefinedUnits.get_unit("s")) - return - - # getting here means there is an error with the parameters to convolve - code, message = Messages.get_convolve_needs_buffer_parameter() - Logger.log_message(code=code, message=message, error_position=node.get_source_position(), - log_level=LoggingLevel.ERROR) - node.type = ErrorTypeSymbol() - return - if isinstance(method_symbol.get_return_type(), VoidTypeSymbol): code, message = Messages.get_void_function_on_rhs(function_name) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index db0b73392..61ade49a3 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -25,6 +25,8 @@ from pynestml.meta_model.ast_namespace_decorator import ASTNamespaceDecorator from pynestml.meta_model.ast_declaration import ASTDeclaration from pynestml.meta_model.ast_inline_expression import ASTInlineExpression +from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_parameter import ASTParameter from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.meta_model.ast_stmt import ASTStmt from pynestml.meta_model.ast_variable import ASTVariable @@ -182,6 +184,10 @@ def visit_on_receive_block(self, node): node.get_scope().add_scope(scope) node.get_block().update_scope(scope) + # if node.input_port_variable: + # node.input_port_variable.update_scope(node.get_scope()) + + def endvisit_on_receive_block(self, node=None): self.block_type_stack.pop() @@ -484,11 +490,40 @@ def visit_variable(self, node: ASTVariable): # node.get_parent().type = actual_type # print("reassigned data type of " + str(node) + " to " + str(node.data_type)) - if node.has_vector_parameter(): node.get_vector_parameter().update_scope(node.get_scope()) node.get_vector_parameter().accept(self) + # if ASTUtils.vector_parameter_is_variable(node.get_vector_parameter()): + # symbol = VariableSymbol(element_reference=node, scope=node.get_scope(), name=node.get_name(), + # block_type=BlockType.INPUT, vector_parameter=node.get_size_parameter(), + # is_predefined=False, is_inline_expression=False, is_recordable=False, + # type_symbol=type_symbol, variable_type=VariableType.BUFFER) + # symbol.set_comment(node.get_comment()) + # node.get_scope().add_symbol(symbol) + + print("in symboltablevisitor : variable is " + str(node.get_vector_parameter())) + + if isinstance(node.get_vector_parameter(), ASTParameter): + # vector parameter is a declaration + print("in symboltablevisitor : \tvector parameter is a declaration: adding " + node.get_vector_parameter().get_name()) + symbol = VariableSymbol(element_reference=node, + scope=node.get_scope(), + name=node.get_vector_parameter().get_name(), + block_type=BlockType.ON_RECEIVE, + declaring_expression=None, + is_predefined=False, + is_inline_expression=False, + is_recordable=False, + type_symbol=node.get_vector_parameter().get_data_type(), + initial_value=None, + vector_parameter=None, + variable_type=VariableType.VARIABLE, + decorators=None, + namespace_decorators=None) + symbol.set_comment(node.get_vector_parameter().get_comment()) + node.get_parent().get_scope().add_symbol(symbol) + def visit_inline_expression(self, node: ASTInlineExpression): """ Private method: Used to visit a single inline expression, create the corresponding symbol and update the scope. diff --git a/pynestml/visitors/ast_variable_visitor.py b/pynestml/visitors/ast_variable_visitor.py index 013d88c2c..6e1030f89 100644 --- a/pynestml/visitors/ast_variable_visitor.py +++ b/pynestml/visitors/ast_variable_visitor.py @@ -49,13 +49,6 @@ def visit_simple_expression(self, node: ASTSimpleExpression): # update the type of the variable according to its symbol type. if var_resolve is not None: - # print("var_resolve.attribute is " + str(var_resolve.attribute)) - - # if var_resolve.attribute: - # import pdb;pdb.set_trace() - # node.type = var_resolve.attribute.get_type_symbol() - # print("Setting type according to attribute: " + str(var_resolve.attribute) + " = " + str(node.type)) - # else: node.type = var_resolve.get_type_symbol() node.type.referenced_object = node return diff --git a/pynestml/visitors/ast_visitor.py b/pynestml/visitors/ast_visitor.py index f4711da81..7449dcb2d 100644 --- a/pynestml/visitors/ast_visitor.py +++ b/pynestml/visitors/ast_visitor.py @@ -1302,6 +1302,8 @@ def traverse_update_block(self, node): def traverse_on_receive_block(self, node): if node.get_block() is not None: node.get_block().accept(self.get_real_self()) + if node.input_port_variable is not None: + node.input_port_variable.accept(self.get_real_self()) def traverse_on_condition_block(self, node): if node.get_cond_expr() is not None: diff --git a/tests/invalid/CoCoAssignmentToInlineExpression.nestml b/tests/invalid/CoCoAssignmentToInlineExpression.nestml index 2f807328d..28bdedc74 100644 --- a/tests/invalid/CoCoAssignmentToInlineExpression.nestml +++ b/tests/invalid/CoCoAssignmentToInlineExpression.nestml @@ -29,10 +29,10 @@ model CoCoAssignmentToInlineExpression: equations: kernel alpha_kernel = (e / tau_syn) * t * exp(-t / tau_syn) - inline foo real = convolve(alpha_kernel, spikes_in) + inline foo real = convolve(alpha_kernel, spikes_in.weight) input: - spikes_in <- spike + spikes_in <- spike(weight real) update: foo = 42. diff --git a/tests/invalid/CoCoOnReceiveVectorsShouldBeConstantSize.nestml b/tests/invalid/CoCoOnReceiveVectorsShouldBeConstantSize.nestml new file mode 100644 index 000000000..1499bab83 --- /dev/null +++ b/tests/invalid/CoCoOnReceiveVectorsShouldBeConstantSize.nestml @@ -0,0 +1,43 @@ +""" +CoCoOnReceiveVectorsShouldBeConstantSize.nestml +############################################### + + +Description ++++++++++++ + +This test is used to test the usage of onReceive blocks for vector ports of variable length. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model CoCoOnReceiveVectorsShouldBeConstantSize: + state: + GABA_spikes_sum real = 0 + + parameters: + N_buf integer = 5 + + input: + GABA_spikes[N_buf] <- spike(weight real) + + onReceive(GABA_spikes[i integer]): + GABA_spikes_sum += GABA_spikes[i].weight diff --git a/tests/nest_tests/nest_integration_test.py b/tests/nest_tests/nest_integration_test.py index f6bc07bcb..80b36cd9a 100644 --- a/tests/nest_tests/nest_integration_test.py +++ b/tests/nest_tests/nest_integration_test.py @@ -54,10 +54,10 @@ def generate_all_models(self): generate_nest_target(input_path=[# "models/neurons/hh_cond_exp_traub_neuron.nestml", # "models/neurons/hh_psc_alpha_neuron.nestml", # "models/neurons/iaf_cond_beta_neuron.nestml", - # "models/neurons/iaf_cond_alpha_neuron.nestml", + "models/neurons/iaf_cond_alpha_neuron.nestml", # "models/neurons/iaf_cond_exp_neuron.nestml", # "models/neurons/iaf_psc_alpha_neuron.nestml", - "models/neurons/iaf_psc_exp_neuron.nestml", + # "models/neurons/iaf_psc_exp_neuron.nestml", # "models/neurons/iaf_psc_delta_neuron.nestml" ], target_path="/tmp/nestml-allmodels", @@ -98,10 +98,10 @@ def test_nest_integration(self): # self._test_model_equivalence_fI_curve("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") # self._test_model_equivalence_curr_inj("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - self._test_model_equivalence_subthreshold("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - self._test_model_equivalence_spiking("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - self._test_model_equivalence_fI_curve("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - self._test_model_equivalence_curr_inj("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + # self._test_model_equivalence_subthreshold("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + # self._test_model_equivalence_spiking("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + # self._test_model_equivalence_fI_curve("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + # self._test_model_equivalence_curr_inj("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") # self._test_model_equivalence_subthreshold("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") # self._test_model_equivalence_spiking("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") @@ -113,9 +113,9 @@ def test_nest_integration(self): # self._test_model_equivalence_fI_curve("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") # self._test_model_equivalence_curr_inj("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") - # self._test_model_equivalence_subthreshold("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - # self._test_model_equivalence_spiking("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - # self._test_model_equivalence_fI_curve("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + self._test_model_equivalence_subthreshold("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + self._test_model_equivalence_spiking("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + self._test_model_equivalence_fI_curve("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") # iaf_cond_beta_nest_model_parameters = {"tau_rise_ex": 2., "tau_decay_ex": 10.} # iaf_cond_beta_nestml_model_parameters = {"tau_syn_rise_E": 2., "tau_syn_decay_E": 10.} # XXX: TODO: does not work yet when tau_rise = tau_fall (numerical singularity occurs in the propagators) @@ -177,19 +177,19 @@ def _test_model_equivalence_curr_inj(self, nest_model_name, nestml_model_name, g # ResetKernel() does not unload modules for NEST Simulator < v3.7; ignore exception if module is already loaded on earlier versions pass - neuron1 = nest.Create(nest_model_name, params=nest_model_parameters) - neuron2 = nest.Create(nestml_model_name, params=nestml_model_parameters) + nest_neuron = nest.Create(nest_model_name, params=nest_model_parameters) + nestml_neuron = nest.Create(nestml_model_name, params=nestml_model_parameters) if model_initial_state is not None: - nest.SetStatus(neuron1, model_initial_state) - nest.SetStatus(neuron2, model_initial_state) + nest.SetStatus(nest_neuron, model_initial_state) + nest.SetStatus(nestml_neuron, model_initial_state) # if gsl_error_tol is not None: - # nest.SetStatus(neuron2, {"gsl_error_tol": gsl_error_tol}) + # nest.SetStatus(nestml_neuron, {"gsl_error_tol": gsl_error_tol}) dc = nest.Create("dc_generator", params={"amplitude": 0.}) - nest.Connect(dc, neuron1) - nest.Connect(dc, neuron2) + nest.Connect(dc, nest_neuron) + nest.Connect(dc, nestml_neuron) multimeter1 = nest.Create("multimeter") multimeter2 = nest.Create("multimeter") @@ -198,8 +198,8 @@ def _test_model_equivalence_curr_inj(self, nest_model_name, nestml_model_name, g nest.SetStatus(multimeter1, {"record_from": [V_m_specifier]}) nest.SetStatus(multimeter2, {"record_from": [V_m_specifier]}) - nest.Connect(multimeter1, neuron1) - nest.Connect(multimeter2, neuron2) + nest.Connect(multimeter1, nest_neuron) + nest.Connect(multimeter2, nestml_neuron) if NESTTools.detect_nest_version().startswith("v2"): sd_reference = nest.Create("spike_detector") @@ -208,8 +208,8 @@ def _test_model_equivalence_curr_inj(self, nest_model_name, nestml_model_name, g sd_reference = nest.Create("spike_recorder") sd_testant = nest.Create("spike_recorder") - nest.Connect(neuron1, sd_reference) - nest.Connect(neuron2, sd_testant) + nest.Connect(nest_neuron, sd_reference) + nest.Connect(nestml_neuron, sd_testant) nest.Simulate(t_pulse_start) dc.amplitude = I_stim * 1E12 # 1E12: convert A to pA @@ -260,19 +260,19 @@ def _test_model_equivalence_fI_curve(self, nest_model_name, nestml_model_name, g # ResetKernel() does not unload modules for NEST Simulator < v3.7; ignore exception if module is already loaded on earlier versions pass - neuron1 = nest.Create(nest_model_name, params=nest_model_parameters) - neuron2 = nest.Create(nestml_model_name, params=nestml_model_parameters) + nest_neuron = nest.Create(nest_model_name, params=nest_model_parameters) + nestml_neuron = nest.Create(nestml_model_name, params=nestml_model_parameters) if model_initial_state is not None: - nest.SetStatus(neuron1, model_initial_state) - nest.SetStatus(neuron2, model_initial_state) + nest.SetStatus(nest_neuron, model_initial_state) + nest.SetStatus(nestml_neuron, model_initial_state) # if gsl_error_tol is not None: - # nest.SetStatus(neuron2, {"gsl_error_tol": gsl_error_tol}) + # nest.SetStatus(nestml_neuron, {"gsl_error_tol": gsl_error_tol}) dc = nest.Create("dc_generator", params={"amplitude": 1E12 * I_stim}) # 1E12: convert A to pA - nest.Connect(dc, neuron1) - nest.Connect(dc, neuron2) + nest.Connect(dc, nest_neuron) + nest.Connect(dc, nestml_neuron) multimeter1 = nest.Create("multimeter") multimeter2 = nest.Create("multimeter") @@ -281,8 +281,8 @@ def _test_model_equivalence_fI_curve(self, nest_model_name, nestml_model_name, g nest.SetStatus(multimeter1, {"record_from": [V_m_specifier]}) nest.SetStatus(multimeter2, {"record_from": [V_m_specifier]}) - nest.Connect(multimeter1, neuron1) - nest.Connect(multimeter2, neuron2) + nest.Connect(multimeter1, nest_neuron) + nest.Connect(multimeter2, nestml_neuron) if NESTTools.detect_nest_version().startswith("v2"): sd_reference = nest.Create("spike_detector") @@ -291,8 +291,8 @@ def _test_model_equivalence_fI_curve(self, nest_model_name, nestml_model_name, g sd_reference = nest.Create("spike_recorder") sd_testant = nest.Create("spike_recorder") - nest.Connect(neuron1, sd_reference) - nest.Connect(neuron2, sd_testant) + nest.Connect(nest_neuron, sd_reference) + nest.Connect(nestml_neuron, sd_testant) nest.Simulate(t_stop) @@ -363,26 +363,35 @@ def _test_model_equivalence_psc(self, nest_model_name, nestml_model_name, gsl_er # ResetKernel() does not unload modules for NEST Simulator < v3.7; ignore exception if module is already loaded on earlier versions pass - neuron1 = nest.Create(nest_model_name, params=nest_model_parameters) - neuron2 = nest.Create(nestml_model_name, params=nestml_model_parameters) + nest_neuron = nest.Create(nest_model_name, params=nest_model_parameters) + nestml_neuron = nest.Create(nestml_model_name, params=nestml_model_parameters) if model_initial_state is not None: - nest.SetStatus(neuron1, model_initial_state) - nest.SetStatus(neuron2, model_initial_state) + nest.SetStatus(nest_neuron, model_initial_state) + nest.SetStatus(nestml_neuron, model_initial_state) # if gsl_error_tol is not None: - # nest.SetStatus(neuron2, {"gsl_error_tol": gsl_error_tol}) + # nest.SetStatus(nestml_neuron, {"gsl_error_tol": gsl_error_tol}) spikegenerator = nest.Create("spike_generator", params={"spike_times": spike_times, "spike_weights": spike_weights}) - - nest.Connect(spikegenerator, neuron1, syn_spec=syn_spec) - nest.Connect(spikegenerator, neuron2, syn_spec=syn_spec) + nest.Connect(spikegenerator, nest_neuron, syn_spec=syn_spec) + + if len(nestml_neuron.get("receptor_types")) > 1: + # this NESTML neuron is written as having separate input ports for excitatory and inhibitory spikes + syn_spec_nestml = syn_spec + if syn_spec_nestml is None: + syn_spec_nestml = {} + syn_spec_nestml.update({"receptor_type": nestml_neuron.get("receptor_types")["EXC_SPIKES"]}) + nest.Connect(spikegenerator, nestml_neuron, syn_spec=syn_spec_nestml) + else: + # this NESTML neuron is written as having one input port for excitatory and inhibitory spikes (with sign of the weight telling the difference) + nest.Connect(spikegenerator, nestml_neuron, syn_spec=syn_spec) spike_recorder1 = nest.Create("spike_recorder") spike_recorder2 = nest.Create("spike_recorder") - nest.Connect(neuron1, spike_recorder1) - nest.Connect(neuron2, spike_recorder2) + nest.Connect(nest_neuron, spike_recorder1) + nest.Connect(nestml_neuron, spike_recorder2) multimeter1 = nest.Create("multimeter") multimeter2 = nest.Create("multimeter") @@ -391,8 +400,8 @@ def _test_model_equivalence_psc(self, nest_model_name, nestml_model_name, gsl_er nest.SetStatus(multimeter1, {"record_from": [V_m_specifier]}) nest.SetStatus(multimeter2, {"record_from": [V_m_specifier]}) - nest.Connect(multimeter1, neuron1) - nest.Connect(multimeter2, neuron2) + nest.Connect(multimeter1, nest_neuron) + nest.Connect(multimeter2, nestml_neuron) nest.Simulate(400.) diff --git a/tests/nest_tests/resources/FIR_filter.nestml b/tests/nest_tests/resources/FIR_filter.nestml index 47f4e27ea..daa5d8def 100644 --- a/tests/nest_tests/resources/FIR_filter.nestml +++ b/tests/nest_tests/resources/FIR_filter.nestml @@ -47,13 +47,12 @@ model fir_filter: input: spike_in <- spike(weight real) - onReceive(spike_in): spike_in_buffer += spike_in.weight update: # circular buffer for input spike count per timestep - x[i] = spike_in_buffer * s + x[i] = spike_in_buffer spike_in_buffer = 0. # compute the new value of y diff --git a/tests/nest_tests/resources/input_ports.nestml b/tests/nest_tests/resources/input_ports.nestml index 5931e6257..bbd280c4d 100644 --- a/tests/nest_tests/resources/input_ports.nestml +++ b/tests/nest_tests/resources/input_ports.nestml @@ -51,7 +51,7 @@ model input_ports: bar += 2 * AMPA_spikes.weight onReceive(GABA_spikes): - bar += GABA_spikes.weight + bar += 3 * GABA_spikes.weight onReceive(foo[0]): foo_spikes += foo[0].weight @@ -60,10 +60,13 @@ model input_ports: foo_spikes += 5.5 * foo[1].weight onReceive(my_spikes[0]): + print("SPIKE RECEIVED 1") my_spikes_ip += my_spikes[0].weight onReceive(my_spikes[1]): - my_spikes_ip += my_spikes[0].weight + print("SPIKE RECEIVED 2") + my_spikes_ip += my_spikes[1].weight onReceive(my_spikes2[1]): + print("SPIKE RECEIVED 3") my_spikes_ip -= my_spikes2[1].weight diff --git a/tests/nest_tests/resources/input_ports_in_loop.nestml b/tests/nest_tests/resources/input_ports_in_loop.nestml index 8fea931f1..f5013e1bd 100644 --- a/tests/nest_tests/resources/input_ports_in_loop.nestml +++ b/tests/nest_tests/resources/input_ports_in_loop.nestml @@ -31,31 +31,42 @@ along with NEST. If not, see . """ model input_ports_loop: state: - bar real = 0 - foo_spikes real = 0 - my_spikes_ip[N_spikes] real = 0 + AMPA_spikes_buf[4] real = 0 + GABA_spikes_buf[5] real = 0 + + NMDA_spikes_sum real = 0 + AMPA_spikes_sum real = 0 + GABA_spikes_sum[N_spikes] real = 0 parameters: - N_buf integer = 5 N_spikes integer = 10 input: - NMDA_spikes <- spike - foo[2] <- spike - spike_buf[N_buf] <- spike + NMDA_spikes <- spike(weight real) + AMPA_spikes[2] <- spike(weight real) + GABA_spikes[5] <- spike(weight real) - update: - bar += NMDA_spikes * s + onReceive(NMDA_spikes): + NMDA_spikes_sum += NMDA_spikes.weight + + onReceive(AMPA_spikes[i integer]): + AMPA_spikes_buf[i + 2] = AMPA_spikes[i].weight - # foo spikes + onReceive(GABA_spikes[j integer]): + GABA_spikes_buf[j] = GABA_spikes[j].weight + + update: + # AMPA_spikes i integer = 0 - for i in 0 ... (N_buf) step 1: - foo_spikes += 2.5 * foo[i] * s + for i in 0 ... 2 step 1: + AMPA_spikes_sum += 2.5 * AMPA_spikes_buf[i + 2] + AMPA_spikes_buf[i + 2] = 0 - # spike_buf spikes + # GABA_spikes j integer = 0 k integer = 0 while j < N_buf and k < N_spikes: - my_spikes_ip[k+2] += spike_buf[j] * s + GABA_spikes_sum[k + 2] += GABA_spikes_buf[j] + GABA_spikes_buf[j] = 0 j += 1 k += 1 diff --git a/tests/nest_tests/test_input_ports.py b/tests/nest_tests/test_input_ports.py index 886c41c3a..0cebd7aa3 100644 --- a/tests/nest_tests/test_input_ports.py +++ b/tests/nest_tests/test_input_ports.py @@ -33,73 +33,73 @@ class TestInputPorts: Tests the different kind of input ports supported in NESTML. """ - @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), - reason="This test does not support NEST 2") - def test_input_ports(self): - input_path = os.path.join(os.path.realpath(os.path.join( - os.path.dirname(__file__), "resources", "input_ports.nestml"))) - target_path = "target" - logging_level = "INFO" - module_name = "nestmlmodule" - suffix = "_nestml" - - generate_nest_target(input_path, - target_path=target_path, - logging_level=logging_level, - module_name=module_name, - suffix=suffix) - nest.ResetKernel() - nest.Install(module_name) - - neuron = nest.Create("input_ports_nestml") - - # List of receptor types for the spiking input ports - receptor_types = nest.GetStatus(neuron, "receptor_types")[0] - - spike_times = [ - [10., 44.], # NMDA_SPIKES - [12., 42.], # AMPA_SPIKES - [14., 40.], # GABA_SPIKES - [16., 38.], # FOO_VEC_IDX_0 - [18., 36.], # FOO_VEC_IDX_1 - [20., 34.], # MY_SPIKES_VEC_IDX_0 - [22., 32.], # MY_SPIKES_VEC_IDX_1 - [24., 30.], # MY_SPIKES2_VEC_IDX_1 - ] - sgs = nest.Create("spike_generator", len(spike_times)) - for i, sg in enumerate(sgs): - sg.spike_times = spike_times[i] - - nest.Connect(sgs[0], neuron, syn_spec={"receptor_type": receptor_types["NMDA_SPIKES"], "weight": -1.0, "delay": 1.0}) - nest.Connect(sgs[1], neuron, syn_spec={"receptor_type": receptor_types["AMPA_SPIKES"], "weight": 1.0, "delay": 1.0}) - nest.Connect(sgs[2], neuron, syn_spec={"receptor_type": receptor_types["GABA_SPIKES"], "weight": -1.0, "delay": 1.0}) - nest.Connect(sgs[3], neuron, syn_spec={"receptor_type": receptor_types["FOO_VEC_IDX_0"], "weight": 1.0, "delay": 1.0}) - nest.Connect(sgs[4], neuron, syn_spec={"receptor_type": receptor_types["FOO_VEC_IDX_1"], "weight": 1.0, "delay": 1.0}) - nest.Connect(sgs[5], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES_VEC_IDX_0"], "weight": 1.0, "delay": 1.0}) - nest.Connect(sgs[6], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES_VEC_IDX_1"], "weight": 2.0, "delay": 1.0}) - nest.Connect(sgs[7], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES2_VEC_IDX_1"], "weight": -3.0, "delay": 1.0}) - - mm = nest.Create("multimeter", {"record_from": ["bar", "foo_spikes", "my_spikes_ip"]}) - nest.Connect(mm, neuron) - - nest.Simulate(50.) - - events = mm.get("events") - connections = nest.GetConnections(target=neuron) - - # corresponds to ``bar += NMDA_spikes + 2 * AMPA_spikes - 3 * GABA_spikes`` in the update block - assert events["bar"][-1] == len(spike_times[0]) * abs(connections.get("weight")[0]) \ - + 2 * len(spike_times[1]) * abs(connections.get("weight")[1]) \ - - 3 * len(spike_times[2]) * abs(connections.get("weight")[2]) - - # corresponds to ``foo_spikes += foo[0] + 5.5 * foo[1]`` in the update block - assert events["foo_spikes"][-1] == len(spike_times[3]) * abs(connections.get("weight")[3]) \ - + 5.5 * len(spike_times[4]) * abs(connections.get("weight")[4]) - - # corresponds to ``my_spikes_ip += my_spikes[0] + my_spikes[1] - my_spikes2[1]`` in the update block - assert events["my_spikes_ip"][-1] == len(spike_times[5]) * abs(connections.get("weight")[5]) \ - + len(spike_times[6]) * abs(connections.get("weight")[6]) \ - - len(spike_times[7]) * abs(connections.get("weight")[7]) + # @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), + # reason="This test does not support NEST 2") + # def test_input_ports(self): + # input_path = os.path.join(os.path.realpath(os.path.join( + # os.path.dirname(__file__), "resources", "input_ports.nestml"))) + # target_path = "target" + # logging_level = "INFO" + # module_name = "nestmlmodule" + # suffix = "_nestml" + + # generate_nest_target(input_path, + # target_path=target_path, + # logging_level=logging_level, + # module_name=module_name, + # suffix=suffix) + # nest.ResetKernel() + # nest.Install(module_name) + + # neuron = nest.Create("input_ports_nestml") + + # # List of receptor types for the spiking input ports + # receptor_types = nest.GetStatus(neuron, "receptor_types")[0] + + # spike_times = [ + # [10., 44.], # NMDA_SPIKES + # [12., 42.], # AMPA_SPIKES + # [14., 40.], # GABA_SPIKES + # [16., 38.], # FOO_VEC_IDX_0 + # [18., 36.], # FOO_VEC_IDX_1 + # [20., 34.], # MY_SPIKES_VEC_IDX_0 + # [22., 32.], # MY_SPIKES_VEC_IDX_1 + # [24., 30.], # MY_SPIKES2_VEC_IDX_1 + # ] + # sgs = nest.Create("spike_generator", len(spike_times)) + # for i, sg in enumerate(sgs): + # sg.spike_times = spike_times[i] + + # nest.Connect(sgs[0], neuron, syn_spec={"receptor_type": receptor_types["NMDA_SPIKES"], "weight": -1.0, "delay": 1.0}) + # nest.Connect(sgs[1], neuron, syn_spec={"receptor_type": receptor_types["AMPA_SPIKES"], "weight": 1.0, "delay": 1.0}) + # nest.Connect(sgs[2], neuron, syn_spec={"receptor_type": receptor_types["GABA_SPIKES"], "weight": -1.0, "delay": 1.0}) + # nest.Connect(sgs[3], neuron, syn_spec={"receptor_type": receptor_types["FOO_VEC_IDX_0"], "weight": 1.0, "delay": 1.0}) + # nest.Connect(sgs[4], neuron, syn_spec={"receptor_type": receptor_types["FOO_VEC_IDX_1"], "weight": 1.0, "delay": 1.0}) + # nest.Connect(sgs[5], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES_VEC_IDX_0"], "weight": 1.0, "delay": 1.0}) + # nest.Connect(sgs[6], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES_VEC_IDX_1"], "weight": 2.0, "delay": 1.0}) + # nest.Connect(sgs[7], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES2_VEC_IDX_1"], "weight": -3.0, "delay": 1.0}) + + # mm = nest.Create("multimeter", {"record_from": ["bar", "foo_spikes", "my_spikes_ip"]}) + # nest.Connect(mm, neuron) + + # nest.Simulate(50.) + + # events = mm.get("events") + # connections = nest.GetConnections(target=neuron) + + # # corresponds to ``bar += NMDA_spikes + 2 * AMPA_spikes - 3 * GABA_spikes`` in the update block + # assert events["bar"][-1] == len(spike_times[0]) * connections.get("weight")[0] \ + # + 2 * len(spike_times[1]) * connections.get("weight")[1] \ + # + 3 * len(spike_times[2]) * connections.get("weight")[2] + + # # corresponds to ``foo_spikes += foo[0] + 5.5 * foo[1]`` in the update block + # assert events["foo_spikes"][-1] == len(spike_times[3]) * connections.get("weight")[3] \ + # + 5.5 * len(spike_times[4]) * connections.get("weight")[4] + + # # corresponds to ``my_spikes_ip += my_spikes[0] + my_spikes[1] - my_spikes2[1]`` in the update block + # assert events["my_spikes_ip"][-1] == len(spike_times[5]) * connections.get("weight")[5] \ + # + len(spike_times[6]) * connections.get("weight")[6] \ + # - len(spike_times[7]) * connections.get("weight")[7] # minus because of a minus in the model @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), reason="This test does not support NEST 2") diff --git a/tests/test_cocos.py b/tests/test_cocos.py index cb06529f4..096a10135 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -410,6 +410,13 @@ def test_valid_co_co_spike_input_ports_illegal_missing_attribute(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_valid_co_co_on_receive_vectors_should_be_constant_size(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOnReceiveVectorsShouldBeConstantSize.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_co_co_on_receive_vectors_should_be_constant_size(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOnReceiveVectorsShouldBeConstantSize.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 def _parse_and_validate_model(self, fname: str) -> Optional[str]: from pynestml.frontend.pynestml_frontend import generate_target diff --git a/tests/valid/CoCoOnReceiveVectorsShouldBeConstantSize.nestml b/tests/valid/CoCoOnReceiveVectorsShouldBeConstantSize.nestml new file mode 100644 index 000000000..ab3145321 --- /dev/null +++ b/tests/valid/CoCoOnReceiveVectorsShouldBeConstantSize.nestml @@ -0,0 +1,40 @@ +""" +CoCoOnReceiveVectorsShouldBeConstantSize.nestml +############################################### + + +Description ++++++++++++ + +This test is used to test the usage of onReceive blocks for vector ports of variable length. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model CoCoOnReceiveVectorsShouldBeConstantSize: + state: + GABA_spikes_sum real = 0 + + input: + GABA_spikes[5] <- spike(weight real) + + onReceive(GABA_spikes[i integer]): + GABA_spikes_sum += GABA_spikes[i].weight From b5f8200d0554e81dabced13240b7ca28ed250858 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 6 Nov 2024 23:28:14 +0100 Subject: [PATCH 20/68] add attributes to spiking input ports --- ...ppear_only_in_equation_rhs_and_event_handlers.py | 3 +-- pynestml/codegeneration/autodoc_builder.py | 13 ++++++------- pynestml/meta_model/ast_model_body.py | 2 +- pynestml/visitors/ast_symbol_table_visitor.py | 4 ---- 4 files changed, 8 insertions(+), 14 deletions(-) diff --git a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py index d0d412b7d..ad387d38f 100644 --- a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py +++ b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py @@ -83,8 +83,7 @@ def visit_variable(self, node: ASTVariable): return except ValueError: # in case vector parameter was not an integer numeral - return # XXX: DO MORE CHECKS! - pass + return # XXX: DO MORE CHECKS! if isinstance(_node, ASTOdeEquation): # spike input port was used inside the rhs of an equation; everything is OK diff --git a/pynestml/codegeneration/autodoc_builder.py b/pynestml/codegeneration/autodoc_builder.py index 423db3bdc..6dc5b5ab5 100644 --- a/pynestml/codegeneration/autodoc_builder.py +++ b/pynestml/codegeneration/autodoc_builder.py @@ -234,20 +234,19 @@ def _test_model_psp(self, model_name, max_weight: float = 10., model_opts=None, spikegenerator = nest.Create("spike_generator", params={"spike_times": spike_times, "spike_weights": spike_weights}) - nest.Connect(spikegenerator, neuron1, syn_spec=syn_spec) if len(neuron2.get("receptor_types")) > 1: # this NESTML neuron is written as having separate input ports for excitatory and inhibitory spikes spikegenerator_exc = nest.Create("spike_generator", - params={"spike_times": spike_times, - "spike_weights": spike_weights}) + params={"spike_times": spike_times, + "spike_weights": spike_weights}) spikegenerator_inh = nest.Create("spike_generator", - params={"spike_times": spike_times, - "spike_weights": spike_weights}) + params={"spike_times": spike_times, + "spike_weights": spike_weights}) nest.Connect(spikegenerator_exc, neuron2, syn_spec=syn_spec | {"receptor_type": neuron2.get("receptor_type")["EXC_SPIKES"]}) spikegenerator_inh = nest.Create("spike_generator", - params={"spike_times": spike_times, - "spike_weights": spike_weights}) + params={"spike_times": spike_times, + "spike_weights": spike_weights}) nest.Connect(spikegenerator_inh, neuron2, syn_spec=syn_spec | {"receptor_type": neuron2.get("receptor_type")["INH_SPIKES"]}) else: # this NESTML neuron is written as having one input port for excitatory and inhibitory spikes (with sign of the weight telling the difference) diff --git a/pynestml/meta_model/ast_model_body.py b/pynestml/meta_model/ast_model_body.py index bf14d7adb..835255553 100644 --- a/pynestml/meta_model/ast_model_body.py +++ b/pynestml/meta_model/ast_model_body.py @@ -151,7 +151,7 @@ def get_internals_blocks(self) -> List[ASTBlockWithVariables]: def get_on_receive_block(self, port_name) -> Optional[ASTOnReceiveBlock]: for elem in self.get_body_elements(): - assert not "." in elem.input_port_variable.name # XXX REMOVE + assert not "." in elem.input_port_variable.name # XXX REMOVE if isinstance(elem, ASTOnReceiveBlock) and elem.input_port_variable.name == port_name: return elem diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index 8985b0072..c1a97eb69 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -181,10 +181,6 @@ def visit_on_receive_block(self, node): node.get_scope().add_scope(scope) node.get_block().update_scope(scope) - # if node.input_port_variable: - # node.input_port_variable.update_scope(node.get_scope()) - - def endvisit_on_receive_block(self, node=None): self.block_type_stack.pop() From 9b804253719eaa36aeaee9c73755899562d53908 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 6 Nov 2024 23:42:20 +0100 Subject: [PATCH 21/68] add attributes to spiking input ports --- pynestml/utils/messages.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index f4975d27e..379f8f8be 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -140,9 +140,8 @@ class MessageCode(Enum): EMIT_SPIKE_OUTPUT_PORT_TYPE_DIFFERS = 115 SPIKING_INPUT_PORT_NAME_ILLEGALLY_USED = 116 CONTINUOUS_OUTPUT_PORT_MAY_NOT_HAVE_ATTRIBUTES = 117 - CONTINUOUS_OUTPUT_PORT_MAY_NOT_HAVE_ATTRIBUTES = 118 - SPIKING_INPUT_PORT_REFERENCE_MISSING_ATTRIBUTE = 119 - CONVOLVE_NEEDS_BUFFER_PARAMETER = 120 + SPIKING_INPUT_PORT_REFERENCE_MISSING_ATTRIBUTE = 118 + CONVOLVE_NEEDS_BUFFER_PARAMETER = 119 class Messages: From 6ae9e2198c89be0886087ef11a24bbff6aa8acb1 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 7 Nov 2024 00:00:30 +0100 Subject: [PATCH 22/68] add attributes to spiking input ports --- pynestml/meta_model/ast_model_body.py | 1 - pynestml/visitors/ast_symbol_table_visitor.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/pynestml/meta_model/ast_model_body.py b/pynestml/meta_model/ast_model_body.py index 835255553..7bf2503f1 100644 --- a/pynestml/meta_model/ast_model_body.py +++ b/pynestml/meta_model/ast_model_body.py @@ -151,7 +151,6 @@ def get_internals_blocks(self) -> List[ASTBlockWithVariables]: def get_on_receive_block(self, port_name) -> Optional[ASTOnReceiveBlock]: for elem in self.get_body_elements(): - assert not "." in elem.input_port_variable.name # XXX REMOVE if isinstance(elem, ASTOnReceiveBlock) and elem.input_port_variable.name == port_name: return elem diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index c1a97eb69..d3cc5e713 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -650,7 +650,7 @@ def endvisit_input_port(self, node: ASTInputPort): assert node.is_spike() - if len(node.parameters) == 0: + if node.parameters is None or len(node.parameters) == 0: type_symbol = ErrorTypeSymbol() # not allowed to use a bare spike input port name in expressions etc. else: for parameter in node.parameters: From 35dbeb69da4179cc23ed71ba2bb6d592d6e1c21d Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 11 Dec 2024 10:23:36 +0100 Subject: [PATCH 23/68] add explicit output parameters to spiking output port --- ...only_in_equation_rhs_and_event_handlers.py | 2 +- pynestml/codegeneration/autodoc_builder.py | 6 - pynestml/generated/PyNestMLParser.py | 1407 +++++++++-------- pynestml/generated/PyNestMLParserVisitor.py | 5 + pynestml/grammars/PyNestMLParser.g4 | 4 +- pynestml/utils/ast_utils.py | 4 +- .../visitors/ast_function_call_visitor.py | 24 +- .../CoCoConvolveNotCorrectlyProvided.nestml | 4 +- tests/lexer_parser_test.py | 2 - tests/nest_tests/nest_integration_test.py | 123 +- tests/test_cocos.py | 2 +- .../CoCoConvolveNotCorrectlyProvided.nestml | 2 +- 12 files changed, 824 insertions(+), 761 deletions(-) diff --git a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py index ad387d38f..5a1a3c8e5 100644 --- a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py +++ b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py @@ -20,6 +20,7 @@ # along with NEST. If not, see . from typing import Optional + from pynestml.cocos.co_co import CoCo from pynestml.meta_model.ast_inline_expression import ASTInlineExpression from pynestml.meta_model.ast_input_port import ASTInputPort @@ -103,4 +104,3 @@ def visit_variable(self, node: ASTVariable): code, message = Messages.get_spike_input_port_appears_outside_equation_rhs_and_event_handler(node_name) Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR) - diff --git a/pynestml/codegeneration/autodoc_builder.py b/pynestml/codegeneration/autodoc_builder.py index dc0849b07..2d83816ae 100644 --- a/pynestml/codegeneration/autodoc_builder.py +++ b/pynestml/codegeneration/autodoc_builder.py @@ -252,12 +252,6 @@ def _test_model_psp(self, model_name, max_weight: float = 10., model_opts=None, # this NESTML neuron is written as having one input port for excitatory and inhibitory spikes (with sign of the weight telling the difference) nest.Connect(spikegenerator, neuron2, syn_spec=syn_spec) - - - - - - nest.Connect(spikegenerator, neuron) spike_recorder = nest.Create("spike_recorder") diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index 7c72ba754..003718b2e 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -10,252 +10,254 @@ def serializedATN(): return [ - 4,1,89,661,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,89,667,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, 2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,2,31,7,31,2,32,7,32,2,33, 7,33,2,34,7,34,2,35,7,35,2,36,7,36,2,37,7,37,2,38,7,38,2,39,7,39, - 2,40,7,40,2,41,7,41,2,42,7,42,2,43,7,43,2,44,7,44,2,45,7,45,1,0, - 1,0,1,0,1,0,1,0,1,0,3,0,99,8,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, - 1,3,1,110,8,1,1,1,1,1,1,1,3,1,115,8,1,1,1,1,1,1,1,1,1,5,1,121,8, - 1,10,1,12,1,124,9,1,1,2,3,2,127,8,2,1,2,1,2,1,3,1,3,1,3,1,3,1,3, - 1,3,1,3,3,3,138,8,3,1,4,1,4,1,5,1,5,1,6,1,6,1,6,1,6,1,6,1,6,1,6, - 1,6,1,6,1,6,1,6,3,6,155,8,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,3,6,164, - 8,6,1,6,1,6,1,6,1,6,3,6,170,8,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6, - 1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,5,6,191,8,6,10,6,12, - 6,194,9,6,1,7,1,7,1,7,1,7,3,7,200,8,7,1,7,1,7,1,7,3,7,205,8,7,1, - 8,1,8,1,8,3,8,210,8,8,1,9,1,9,1,9,1,9,1,9,3,9,217,8,9,1,10,1,10, - 1,10,1,10,1,10,1,10,1,10,3,10,226,8,10,1,11,1,11,3,11,230,8,11,1, - 12,1,12,1,12,1,12,1,12,3,12,237,8,12,1,12,5,12,240,8,12,10,12,12, - 12,243,9,12,1,12,1,12,3,12,247,8,12,1,13,1,13,1,13,1,13,1,13,5,13, - 254,8,13,10,13,12,13,257,9,13,3,13,259,8,13,1,13,1,13,1,14,3,14, - 264,8,14,1,14,1,14,1,14,1,14,1,14,1,14,3,14,272,8,14,1,14,5,14,275, - 8,14,10,14,12,14,278,9,14,1,14,1,14,1,15,1,15,1,15,1,15,3,15,286, - 8,15,1,15,5,15,289,8,15,10,15,12,15,292,9,15,1,15,1,15,1,16,1,16, - 1,16,1,16,1,16,1,16,1,16,1,16,1,16,5,16,305,8,16,10,16,12,16,308, - 9,16,1,16,3,16,311,8,16,1,16,1,16,1,17,1,17,3,17,317,8,17,1,18,1, - 18,1,18,3,18,322,8,18,1,19,1,19,1,19,1,19,3,19,328,8,19,1,19,1,19, - 1,20,1,20,1,20,1,20,1,20,1,20,3,20,338,8,20,1,20,1,20,1,21,3,21, - 343,8,21,1,21,3,21,346,8,21,1,21,1,21,1,21,5,21,351,8,21,10,21,12, - 21,354,9,21,1,21,1,21,1,21,3,21,359,8,21,1,21,1,21,1,21,1,21,3,21, - 365,8,21,1,21,5,21,368,8,21,10,21,12,21,371,9,21,1,22,1,22,1,22, - 1,23,3,23,377,8,23,1,23,1,23,1,23,5,23,382,8,23,10,23,12,23,385, - 9,23,1,24,1,24,3,24,389,8,24,1,25,1,25,5,25,393,8,25,10,25,12,25, - 396,9,25,1,25,3,25,399,8,25,1,26,1,26,1,26,1,26,1,26,1,26,1,26,1, - 26,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1, - 28,1,28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,432,8, - 29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1, - 30,1,30,1,30,1,31,1,31,4,31,451,8,31,11,31,12,31,452,1,31,1,31,1, - 32,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1, - 33,1,33,4,33,472,8,33,11,33,12,33,473,1,33,1,33,1,34,1,34,1,34,1, - 34,1,34,5,34,483,8,34,10,34,12,34,486,9,34,1,34,1,34,1,34,1,34,1, - 34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35,500,8,35,10,35,12,35, - 503,9,35,1,35,1,35,1,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36, - 1,36,4,36,517,8,36,11,36,12,36,518,1,36,1,36,1,37,1,37,1,37,1,37, - 1,37,1,37,1,37,1,38,1,38,1,38,1,38,1,38,1,38,1,38,4,38,537,8,38, - 11,38,12,38,538,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,4,39,549, - 8,39,11,39,12,39,550,1,39,1,39,1,40,1,40,1,40,1,40,1,40,3,40,560, - 8,40,1,40,1,40,1,40,1,40,1,40,1,40,5,40,568,8,40,10,40,12,40,571, - 9,40,3,40,573,8,40,1,40,3,40,576,8,40,1,40,1,40,1,41,1,41,1,41,1, - 41,1,41,3,41,585,8,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,5,41,594, - 8,41,10,41,12,41,597,9,41,3,41,599,8,41,1,41,3,41,602,8,41,1,41, - 1,41,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,5,42,615,8,42, - 10,42,12,42,618,9,42,3,42,620,8,42,1,42,3,42,623,8,42,1,42,3,42, - 626,8,42,1,42,1,42,1,42,1,43,1,43,1,43,1,43,1,43,1,43,5,43,637,8, - 43,10,43,12,43,640,9,43,3,43,642,8,43,1,43,1,43,3,43,646,8,43,1, - 43,1,43,1,43,1,43,1,43,1,43,1,44,1,44,1,44,1,45,1,45,1,45,1,45,1, - 45,0,2,2,12,46,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34, - 36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,76,78, - 80,82,84,86,88,90,0,4,2,0,49,49,73,73,1,0,88,89,1,0,32,34,3,0,25, - 25,85,86,88,89,727,0,98,1,0,0,0,2,109,1,0,0,0,4,126,1,0,0,0,6,137, - 1,0,0,0,8,139,1,0,0,0,10,141,1,0,0,0,12,154,1,0,0,0,14,204,1,0,0, - 0,16,209,1,0,0,0,18,216,1,0,0,0,20,225,1,0,0,0,22,229,1,0,0,0,24, - 231,1,0,0,0,26,248,1,0,0,0,28,263,1,0,0,0,30,281,1,0,0,0,32,295, - 1,0,0,0,34,316,1,0,0,0,36,321,1,0,0,0,38,327,1,0,0,0,40,331,1,0, - 0,0,42,342,1,0,0,0,44,372,1,0,0,0,46,376,1,0,0,0,48,386,1,0,0,0, - 50,390,1,0,0,0,52,400,1,0,0,0,54,408,1,0,0,0,56,416,1,0,0,0,58,423, - 1,0,0,0,60,440,1,0,0,0,62,450,1,0,0,0,64,456,1,0,0,0,66,461,1,0, - 0,0,68,477,1,0,0,0,70,494,1,0,0,0,72,511,1,0,0,0,74,522,1,0,0,0, - 76,529,1,0,0,0,78,542,1,0,0,0,80,554,1,0,0,0,82,579,1,0,0,0,84,605, - 1,0,0,0,86,630,1,0,0,0,88,653,1,0,0,0,90,656,1,0,0,0,92,99,5,10, - 0,0,93,99,5,11,0,0,94,99,5,12,0,0,95,99,5,13,0,0,96,99,5,14,0,0, - 97,99,3,2,1,0,98,92,1,0,0,0,98,93,1,0,0,0,98,94,1,0,0,0,98,95,1, - 0,0,0,98,96,1,0,0,0,98,97,1,0,0,0,99,1,1,0,0,0,100,101,6,1,-1,0, - 101,102,5,47,0,0,102,103,3,2,1,0,103,104,5,48,0,0,104,110,1,0,0, - 0,105,106,5,88,0,0,106,107,5,77,0,0,107,110,3,2,1,2,108,110,5,87, - 0,0,109,100,1,0,0,0,109,105,1,0,0,0,109,108,1,0,0,0,110,122,1,0, - 0,0,111,114,10,3,0,0,112,115,5,75,0,0,113,115,5,77,0,0,114,112,1, - 0,0,0,114,113,1,0,0,0,115,116,1,0,0,0,116,121,3,2,1,4,117,118,10, - 4,0,0,118,119,5,76,0,0,119,121,3,4,2,0,120,111,1,0,0,0,120,117,1, - 0,0,0,121,124,1,0,0,0,122,120,1,0,0,0,122,123,1,0,0,0,123,3,1,0, - 0,0,124,122,1,0,0,0,125,127,7,0,0,0,126,125,1,0,0,0,126,127,1,0, - 0,0,127,128,1,0,0,0,128,129,5,88,0,0,129,5,1,0,0,0,130,138,5,43, - 0,0,131,138,5,44,0,0,132,133,5,45,0,0,133,134,3,8,4,0,134,135,5, - 81,0,0,135,136,3,10,5,0,136,138,1,0,0,0,137,130,1,0,0,0,137,131, - 1,0,0,0,137,132,1,0,0,0,138,7,1,0,0,0,139,140,5,87,0,0,140,9,1,0, - 0,0,141,142,5,87,0,0,142,11,1,0,0,0,143,144,6,6,-1,0,144,145,5,47, - 0,0,145,146,3,12,6,0,146,147,5,48,0,0,147,155,1,0,0,0,148,149,3, - 16,8,0,149,150,3,12,6,9,150,155,1,0,0,0,151,152,5,28,0,0,152,155, - 3,12,6,4,153,155,3,14,7,0,154,143,1,0,0,0,154,148,1,0,0,0,154,151, - 1,0,0,0,154,153,1,0,0,0,155,192,1,0,0,0,156,157,10,10,0,0,157,158, - 5,76,0,0,158,191,3,12,6,10,159,163,10,8,0,0,160,164,5,75,0,0,161, - 164,5,77,0,0,162,164,5,78,0,0,163,160,1,0,0,0,163,161,1,0,0,0,163, - 162,1,0,0,0,164,165,1,0,0,0,165,191,3,12,6,9,166,169,10,7,0,0,167, - 170,5,49,0,0,168,170,5,73,0,0,169,167,1,0,0,0,169,168,1,0,0,0,170, - 171,1,0,0,0,171,191,3,12,6,8,172,173,10,6,0,0,173,174,3,18,9,0,174, - 175,3,12,6,7,175,191,1,0,0,0,176,177,10,5,0,0,177,178,3,20,10,0, - 178,179,3,12,6,6,179,191,1,0,0,0,180,181,10,3,0,0,181,182,3,22,11, - 0,182,183,3,12,6,4,183,191,1,0,0,0,184,185,10,2,0,0,185,186,5,79, - 0,0,186,187,3,12,6,0,187,188,5,80,0,0,188,189,3,12,6,3,189,191,1, - 0,0,0,190,156,1,0,0,0,190,159,1,0,0,0,190,166,1,0,0,0,190,172,1, - 0,0,0,190,176,1,0,0,0,190,180,1,0,0,0,190,184,1,0,0,0,191,194,1, - 0,0,0,192,190,1,0,0,0,192,193,1,0,0,0,193,13,1,0,0,0,194,192,1,0, - 0,0,195,205,3,26,13,0,196,205,5,85,0,0,197,199,7,1,0,0,198,200,3, - 24,12,0,199,198,1,0,0,0,199,200,1,0,0,0,200,205,1,0,0,0,201,205, - 5,86,0,0,202,205,5,25,0,0,203,205,3,24,12,0,204,195,1,0,0,0,204, - 196,1,0,0,0,204,197,1,0,0,0,204,201,1,0,0,0,204,202,1,0,0,0,204, - 203,1,0,0,0,205,15,1,0,0,0,206,210,5,49,0,0,207,210,5,73,0,0,208, - 210,5,50,0,0,209,206,1,0,0,0,209,207,1,0,0,0,209,208,1,0,0,0,210, - 17,1,0,0,0,211,217,5,53,0,0,212,217,5,52,0,0,213,217,5,51,0,0,214, - 217,5,59,0,0,215,217,5,60,0,0,216,211,1,0,0,0,216,212,1,0,0,0,216, - 213,1,0,0,0,216,214,1,0,0,0,216,215,1,0,0,0,217,19,1,0,0,0,218,226, - 5,61,0,0,219,226,5,63,0,0,220,226,5,68,0,0,221,226,5,69,0,0,222, - 226,5,70,0,0,223,226,5,71,0,0,224,226,5,62,0,0,225,218,1,0,0,0,225, - 219,1,0,0,0,225,220,1,0,0,0,225,221,1,0,0,0,225,222,1,0,0,0,225, - 223,1,0,0,0,225,224,1,0,0,0,226,21,1,0,0,0,227,230,5,26,0,0,228, - 230,5,27,0,0,229,227,1,0,0,0,229,228,1,0,0,0,230,23,1,0,0,0,231, - 236,5,87,0,0,232,233,5,54,0,0,233,234,3,12,6,0,234,235,5,56,0,0, - 235,237,1,0,0,0,236,232,1,0,0,0,236,237,1,0,0,0,237,241,1,0,0,0, - 238,240,5,83,0,0,239,238,1,0,0,0,240,243,1,0,0,0,241,239,1,0,0,0, - 241,242,1,0,0,0,242,246,1,0,0,0,243,241,1,0,0,0,244,245,5,84,0,0, - 245,247,3,24,12,0,246,244,1,0,0,0,246,247,1,0,0,0,247,25,1,0,0,0, - 248,249,5,87,0,0,249,258,5,47,0,0,250,255,3,12,6,0,251,252,5,72, - 0,0,252,254,3,12,6,0,253,251,1,0,0,0,254,257,1,0,0,0,255,253,1,0, - 0,0,255,256,1,0,0,0,256,259,1,0,0,0,257,255,1,0,0,0,258,250,1,0, - 0,0,258,259,1,0,0,0,259,260,1,0,0,0,260,261,5,48,0,0,261,27,1,0, - 0,0,262,264,5,29,0,0,263,262,1,0,0,0,263,264,1,0,0,0,264,265,1,0, - 0,0,265,266,5,16,0,0,266,267,5,87,0,0,267,268,3,0,0,0,268,269,5, - 74,0,0,269,271,3,12,6,0,270,272,5,82,0,0,271,270,1,0,0,0,271,272, - 1,0,0,0,272,276,1,0,0,0,273,275,3,6,3,0,274,273,1,0,0,0,275,278, - 1,0,0,0,276,274,1,0,0,0,276,277,1,0,0,0,277,279,1,0,0,0,278,276, - 1,0,0,0,279,280,5,9,0,0,280,29,1,0,0,0,281,282,3,24,12,0,282,283, - 5,74,0,0,283,285,3,12,6,0,284,286,5,82,0,0,285,284,1,0,0,0,285,286, - 1,0,0,0,286,290,1,0,0,0,287,289,3,6,3,0,288,287,1,0,0,0,289,292, - 1,0,0,0,290,288,1,0,0,0,290,291,1,0,0,0,291,293,1,0,0,0,292,290, - 1,0,0,0,293,294,5,9,0,0,294,31,1,0,0,0,295,296,5,30,0,0,296,297, - 3,24,12,0,297,298,5,74,0,0,298,306,3,12,6,0,299,300,5,4,0,0,300, - 301,3,24,12,0,301,302,5,74,0,0,302,303,3,12,6,0,303,305,1,0,0,0, - 304,299,1,0,0,0,305,308,1,0,0,0,306,304,1,0,0,0,306,307,1,0,0,0, - 307,310,1,0,0,0,308,306,1,0,0,0,309,311,5,82,0,0,310,309,1,0,0,0, - 310,311,1,0,0,0,311,312,1,0,0,0,312,313,5,9,0,0,313,33,1,0,0,0,314, - 317,3,38,19,0,315,317,3,36,18,0,316,314,1,0,0,0,316,315,1,0,0,0, - 317,35,1,0,0,0,318,322,3,50,25,0,319,322,3,58,29,0,320,322,3,60, - 30,0,321,318,1,0,0,0,321,319,1,0,0,0,321,320,1,0,0,0,322,37,1,0, - 0,0,323,328,3,40,20,0,324,328,3,26,13,0,325,328,3,42,21,0,326,328, - 3,48,24,0,327,323,1,0,0,0,327,324,1,0,0,0,327,325,1,0,0,0,327,326, - 1,0,0,0,328,329,1,0,0,0,329,330,5,9,0,0,330,39,1,0,0,0,331,337,3, - 24,12,0,332,338,5,74,0,0,333,338,5,64,0,0,334,338,5,65,0,0,335,338, - 5,66,0,0,336,338,5,67,0,0,337,332,1,0,0,0,337,333,1,0,0,0,337,334, - 1,0,0,0,337,335,1,0,0,0,337,336,1,0,0,0,338,339,1,0,0,0,339,340, - 3,12,6,0,340,41,1,0,0,0,341,343,5,29,0,0,342,341,1,0,0,0,342,343, - 1,0,0,0,343,345,1,0,0,0,344,346,5,16,0,0,345,344,1,0,0,0,345,346, - 1,0,0,0,346,347,1,0,0,0,347,352,3,24,12,0,348,349,5,72,0,0,349,351, - 3,24,12,0,350,348,1,0,0,0,351,354,1,0,0,0,352,350,1,0,0,0,352,353, - 1,0,0,0,353,355,1,0,0,0,354,352,1,0,0,0,355,358,3,0,0,0,356,357, - 5,74,0,0,357,359,3,12,6,0,358,356,1,0,0,0,358,359,1,0,0,0,359,364, - 1,0,0,0,360,361,5,57,0,0,361,362,3,12,6,0,362,363,5,58,0,0,363,365, - 1,0,0,0,364,360,1,0,0,0,364,365,1,0,0,0,365,369,1,0,0,0,366,368, - 3,6,3,0,367,366,1,0,0,0,368,371,1,0,0,0,369,367,1,0,0,0,369,370, - 1,0,0,0,370,43,1,0,0,0,371,369,1,0,0,0,372,373,3,42,21,0,373,374, - 5,9,0,0,374,45,1,0,0,0,375,377,5,9,0,0,376,375,1,0,0,0,376,377,1, - 0,0,0,377,378,1,0,0,0,378,383,3,34,17,0,379,382,5,9,0,0,380,382, - 3,34,17,0,381,379,1,0,0,0,381,380,1,0,0,0,382,385,1,0,0,0,383,381, - 1,0,0,0,383,384,1,0,0,0,384,47,1,0,0,0,385,383,1,0,0,0,386,388,5, - 17,0,0,387,389,3,12,6,0,388,387,1,0,0,0,388,389,1,0,0,0,389,49,1, - 0,0,0,390,394,3,52,26,0,391,393,3,54,27,0,392,391,1,0,0,0,393,396, - 1,0,0,0,394,392,1,0,0,0,394,395,1,0,0,0,395,398,1,0,0,0,396,394, - 1,0,0,0,397,399,3,56,28,0,398,397,1,0,0,0,398,399,1,0,0,0,399,51, - 1,0,0,0,400,401,5,18,0,0,401,402,3,12,6,0,402,403,5,80,0,0,403,404, - 5,9,0,0,404,405,5,1,0,0,405,406,3,46,23,0,406,407,5,2,0,0,407,53, - 1,0,0,0,408,409,5,19,0,0,409,410,3,12,6,0,410,411,5,80,0,0,411,412, - 5,9,0,0,412,413,5,1,0,0,413,414,3,46,23,0,414,415,5,2,0,0,415,55, - 1,0,0,0,416,417,5,20,0,0,417,418,5,80,0,0,418,419,5,9,0,0,419,420, - 5,1,0,0,420,421,3,46,23,0,421,422,5,2,0,0,422,57,1,0,0,0,423,424, - 5,21,0,0,424,425,5,87,0,0,425,426,5,23,0,0,426,427,3,12,6,0,427, - 428,5,46,0,0,428,429,3,12,6,0,429,431,5,24,0,0,430,432,5,73,0,0, - 431,430,1,0,0,0,431,432,1,0,0,0,432,433,1,0,0,0,433,434,7,1,0,0, - 434,435,5,80,0,0,435,436,5,9,0,0,436,437,5,1,0,0,437,438,3,46,23, - 0,438,439,5,2,0,0,439,59,1,0,0,0,440,441,5,22,0,0,441,442,3,12,6, - 0,442,443,5,80,0,0,443,444,5,9,0,0,444,445,5,1,0,0,445,446,3,46, - 23,0,446,447,5,2,0,0,447,61,1,0,0,0,448,451,3,64,32,0,449,451,5, - 9,0,0,450,448,1,0,0,0,450,449,1,0,0,0,451,452,1,0,0,0,452,450,1, - 0,0,0,452,453,1,0,0,0,453,454,1,0,0,0,454,455,5,0,0,1,455,63,1,0, - 0,0,456,457,5,31,0,0,457,458,5,87,0,0,458,459,5,80,0,0,459,460,3, - 66,33,0,460,65,1,0,0,0,461,462,5,9,0,0,462,471,5,1,0,0,463,472,3, - 72,36,0,464,472,3,76,38,0,465,472,3,78,39,0,466,472,3,84,42,0,467, - 472,3,86,43,0,468,472,3,68,34,0,469,472,3,70,35,0,470,472,3,74,37, - 0,471,463,1,0,0,0,471,464,1,0,0,0,471,465,1,0,0,0,471,466,1,0,0, - 0,471,467,1,0,0,0,471,468,1,0,0,0,471,469,1,0,0,0,471,470,1,0,0, - 0,472,473,1,0,0,0,473,471,1,0,0,0,473,474,1,0,0,0,474,475,1,0,0, - 0,475,476,5,2,0,0,476,67,1,0,0,0,477,478,5,40,0,0,478,479,5,47,0, - 0,479,484,3,24,12,0,480,481,5,72,0,0,481,483,3,90,45,0,482,480,1, - 0,0,0,483,486,1,0,0,0,484,482,1,0,0,0,484,485,1,0,0,0,485,487,1, - 0,0,0,486,484,1,0,0,0,487,488,5,48,0,0,488,489,5,80,0,0,489,490, - 5,9,0,0,490,491,5,1,0,0,491,492,3,46,23,0,492,493,5,2,0,0,493,69, - 1,0,0,0,494,495,5,41,0,0,495,496,5,47,0,0,496,501,3,12,6,0,497,498, - 5,72,0,0,498,500,3,90,45,0,499,497,1,0,0,0,500,503,1,0,0,0,501,499, - 1,0,0,0,501,502,1,0,0,0,502,504,1,0,0,0,503,501,1,0,0,0,504,505, - 5,48,0,0,505,506,5,80,0,0,506,507,5,9,0,0,507,508,5,1,0,0,508,509, - 3,46,23,0,509,510,5,2,0,0,510,71,1,0,0,0,511,512,7,2,0,0,512,513, - 5,80,0,0,513,514,5,9,0,0,514,516,5,1,0,0,515,517,3,44,22,0,516,515, - 1,0,0,0,517,518,1,0,0,0,518,516,1,0,0,0,518,519,1,0,0,0,519,520, - 1,0,0,0,520,521,5,2,0,0,521,73,1,0,0,0,522,523,5,35,0,0,523,524, - 5,80,0,0,524,525,5,9,0,0,525,526,5,1,0,0,526,527,3,46,23,0,527,528, - 5,2,0,0,528,75,1,0,0,0,529,530,5,36,0,0,530,531,5,80,0,0,531,532, - 5,9,0,0,532,536,5,1,0,0,533,537,3,28,14,0,534,537,3,30,15,0,535, - 537,3,32,16,0,536,533,1,0,0,0,536,534,1,0,0,0,536,535,1,0,0,0,537, - 538,1,0,0,0,538,536,1,0,0,0,538,539,1,0,0,0,539,540,1,0,0,0,540, - 541,5,2,0,0,541,77,1,0,0,0,542,543,5,37,0,0,543,544,5,80,0,0,544, - 545,5,9,0,0,545,548,5,1,0,0,546,549,3,80,40,0,547,549,3,82,41,0, - 548,546,1,0,0,0,548,547,1,0,0,0,549,550,1,0,0,0,550,548,1,0,0,0, - 550,551,1,0,0,0,551,552,1,0,0,0,552,553,5,2,0,0,553,79,1,0,0,0,554, - 559,5,87,0,0,555,556,5,54,0,0,556,557,3,12,6,0,557,558,5,56,0,0, - 558,560,1,0,0,0,559,555,1,0,0,0,559,560,1,0,0,0,560,561,1,0,0,0, - 561,562,5,55,0,0,562,575,5,42,0,0,563,572,5,47,0,0,564,569,3,88, - 44,0,565,566,5,72,0,0,566,568,3,88,44,0,567,565,1,0,0,0,568,571, - 1,0,0,0,569,567,1,0,0,0,569,570,1,0,0,0,570,573,1,0,0,0,571,569, - 1,0,0,0,572,564,1,0,0,0,572,573,1,0,0,0,573,574,1,0,0,0,574,576, - 5,48,0,0,575,563,1,0,0,0,575,576,1,0,0,0,576,577,1,0,0,0,577,578, - 5,9,0,0,578,81,1,0,0,0,579,584,5,87,0,0,580,581,5,54,0,0,581,582, - 3,12,6,0,582,583,5,56,0,0,583,585,1,0,0,0,584,580,1,0,0,0,584,585, - 1,0,0,0,585,586,1,0,0,0,586,587,3,0,0,0,587,588,5,55,0,0,588,601, - 5,39,0,0,589,598,5,47,0,0,590,595,3,88,44,0,591,592,5,72,0,0,592, - 594,3,88,44,0,593,591,1,0,0,0,594,597,1,0,0,0,595,593,1,0,0,0,595, - 596,1,0,0,0,596,599,1,0,0,0,597,595,1,0,0,0,598,590,1,0,0,0,598, - 599,1,0,0,0,599,600,1,0,0,0,600,602,5,48,0,0,601,589,1,0,0,0,601, - 602,1,0,0,0,602,603,1,0,0,0,603,604,5,9,0,0,604,83,1,0,0,0,605,606, - 5,38,0,0,606,607,5,80,0,0,607,608,5,9,0,0,608,625,5,1,0,0,609,622, - 5,42,0,0,610,619,5,47,0,0,611,616,3,88,44,0,612,613,5,72,0,0,613, - 615,3,88,44,0,614,612,1,0,0,0,615,618,1,0,0,0,616,614,1,0,0,0,616, - 617,1,0,0,0,617,620,1,0,0,0,618,616,1,0,0,0,619,611,1,0,0,0,619, - 620,1,0,0,0,620,621,1,0,0,0,621,623,5,48,0,0,622,610,1,0,0,0,622, - 623,1,0,0,0,623,626,1,0,0,0,624,626,5,39,0,0,625,609,1,0,0,0,625, - 624,1,0,0,0,626,627,1,0,0,0,627,628,5,9,0,0,628,629,5,2,0,0,629, - 85,1,0,0,0,630,631,5,15,0,0,631,632,5,87,0,0,632,641,5,47,0,0,633, - 638,3,88,44,0,634,635,5,72,0,0,635,637,3,88,44,0,636,634,1,0,0,0, - 637,640,1,0,0,0,638,636,1,0,0,0,638,639,1,0,0,0,639,642,1,0,0,0, - 640,638,1,0,0,0,641,633,1,0,0,0,641,642,1,0,0,0,642,643,1,0,0,0, - 643,645,5,48,0,0,644,646,3,0,0,0,645,644,1,0,0,0,645,646,1,0,0,0, - 646,647,1,0,0,0,647,648,5,80,0,0,648,649,5,9,0,0,649,650,5,1,0,0, - 650,651,3,46,23,0,651,652,5,2,0,0,652,87,1,0,0,0,653,654,5,87,0, - 0,654,655,3,0,0,0,655,89,1,0,0,0,656,657,5,87,0,0,657,658,5,74,0, - 0,658,659,7,3,0,0,659,91,1,0,0,0,73,98,109,114,120,122,126,137,154, - 163,169,190,192,199,204,209,216,225,229,236,241,246,255,258,263, - 271,276,285,290,306,310,316,321,327,337,342,345,352,358,364,369, - 376,381,383,388,394,398,431,450,452,471,473,484,501,518,536,538, - 548,550,559,569,572,575,584,595,598,601,616,619,622,625,638,641, - 645 + 2,40,7,40,2,41,7,41,2,42,7,42,2,43,7,43,2,44,7,44,2,45,7,45,2,46, + 7,46,1,0,1,0,1,0,1,0,1,0,1,0,3,0,101,8,0,1,1,1,1,1,1,1,1,1,1,1,1, + 1,1,1,1,1,1,3,1,112,8,1,1,1,1,1,1,1,3,1,117,8,1,1,1,1,1,1,1,1,1, + 5,1,123,8,1,10,1,12,1,126,9,1,1,2,3,2,129,8,2,1,2,1,2,1,3,1,3,1, + 3,1,3,1,3,1,3,1,3,3,3,140,8,3,1,4,1,4,1,5,1,5,1,6,1,6,1,6,1,6,1, + 6,1,6,1,6,1,6,1,6,1,6,1,6,3,6,157,8,6,1,6,1,6,1,6,1,6,1,6,1,6,1, + 6,3,6,166,8,6,1,6,1,6,1,6,1,6,3,6,172,8,6,1,6,1,6,1,6,1,6,1,6,1, + 6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,1,6,5,6,193,8, + 6,10,6,12,6,196,9,6,1,7,1,7,1,7,1,7,3,7,202,8,7,1,7,1,7,1,7,3,7, + 207,8,7,1,8,1,8,1,8,3,8,212,8,8,1,9,1,9,1,9,1,9,1,9,3,9,219,8,9, + 1,10,1,10,1,10,1,10,1,10,1,10,1,10,3,10,228,8,10,1,11,1,11,3,11, + 232,8,11,1,12,1,12,1,12,1,12,1,12,3,12,239,8,12,1,12,5,12,242,8, + 12,10,12,12,12,245,9,12,1,12,1,12,3,12,249,8,12,1,13,1,13,1,13,1, + 13,1,13,5,13,256,8,13,10,13,12,13,259,9,13,3,13,261,8,13,1,13,1, + 13,1,14,3,14,266,8,14,1,14,1,14,1,14,1,14,1,14,1,14,3,14,274,8,14, + 1,14,5,14,277,8,14,10,14,12,14,280,9,14,1,14,1,14,1,15,1,15,1,15, + 1,15,3,15,288,8,15,1,15,5,15,291,8,15,10,15,12,15,294,9,15,1,15, + 1,15,1,16,1,16,1,16,1,16,1,16,1,16,1,16,1,16,1,16,5,16,307,8,16, + 10,16,12,16,310,9,16,1,16,3,16,313,8,16,1,16,1,16,1,17,1,17,3,17, + 319,8,17,1,18,1,18,1,18,3,18,324,8,18,1,19,1,19,1,19,1,19,3,19,330, + 8,19,1,19,1,19,1,20,1,20,1,20,1,20,1,20,1,20,3,20,340,8,20,1,20, + 1,20,1,21,3,21,345,8,21,1,21,3,21,348,8,21,1,21,1,21,1,21,5,21,353, + 8,21,10,21,12,21,356,9,21,1,21,1,21,1,21,3,21,361,8,21,1,21,1,21, + 1,21,1,21,3,21,367,8,21,1,21,5,21,370,8,21,10,21,12,21,373,9,21, + 1,22,1,22,1,22,1,23,3,23,379,8,23,1,23,1,23,1,23,5,23,384,8,23,10, + 23,12,23,387,9,23,1,24,1,24,3,24,391,8,24,1,25,1,25,5,25,395,8,25, + 10,25,12,25,398,9,25,1,25,3,25,401,8,25,1,26,1,26,1,26,1,26,1,26, + 1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,28,1,28, + 1,28,1,28,1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29, + 3,29,434,8,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30, + 1,30,1,30,1,30,1,30,1,30,1,31,1,31,4,31,453,8,31,11,31,12,31,454, + 1,31,1,31,1,32,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1,33, + 1,33,1,33,1,33,1,33,4,33,474,8,33,11,33,12,33,475,1,33,1,33,1,34, + 1,34,1,34,1,34,1,34,5,34,485,8,34,10,34,12,34,488,9,34,1,34,1,34, + 1,34,1,34,1,34,1,34,1,34,1,35,1,35,1,35,1,35,1,35,5,35,502,8,35, + 10,35,12,35,505,9,35,1,35,1,35,1,35,1,35,1,35,1,35,1,35,1,36,1,36, + 1,36,1,36,1,36,4,36,519,8,36,11,36,12,36,520,1,36,1,36,1,37,1,37, + 1,37,1,37,1,37,1,37,1,37,1,38,1,38,1,38,1,38,1,38,1,38,1,38,4,38, + 539,8,38,11,38,12,38,540,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39, + 4,39,551,8,39,11,39,12,39,552,1,39,1,39,1,40,1,40,1,40,1,40,1,40, + 3,40,562,8,40,1,40,1,40,1,40,1,40,1,40,1,40,5,40,570,8,40,10,40, + 12,40,573,9,40,3,40,575,8,40,1,40,3,40,578,8,40,1,40,1,40,1,41,1, + 41,1,41,1,41,1,41,3,41,587,8,41,1,41,1,41,1,41,1,41,1,41,1,41,1, + 41,5,41,596,8,41,10,41,12,41,599,9,41,3,41,601,8,41,1,41,3,41,604, + 8,41,1,41,1,41,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,5,42, + 617,8,42,10,42,12,42,620,9,42,3,42,622,8,42,1,42,3,42,625,8,42,1, + 42,3,42,628,8,42,1,42,1,42,1,42,1,43,1,43,1,43,1,43,1,43,1,43,5, + 43,639,8,43,10,43,12,43,642,9,43,3,43,644,8,43,1,43,1,43,3,43,648, + 8,43,1,43,1,43,1,43,1,43,1,43,1,43,1,44,1,44,1,44,1,45,1,45,3,45, + 661,8,45,1,46,1,46,1,46,1,46,1,46,0,2,2,12,47,0,2,4,6,8,10,12,14, + 16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58, + 60,62,64,66,68,70,72,74,76,78,80,82,84,86,88,90,92,0,4,2,0,49,49, + 73,73,1,0,88,89,1,0,32,34,3,0,25,25,85,86,88,89,733,0,100,1,0,0, + 0,2,111,1,0,0,0,4,128,1,0,0,0,6,139,1,0,0,0,8,141,1,0,0,0,10,143, + 1,0,0,0,12,156,1,0,0,0,14,206,1,0,0,0,16,211,1,0,0,0,18,218,1,0, + 0,0,20,227,1,0,0,0,22,231,1,0,0,0,24,233,1,0,0,0,26,250,1,0,0,0, + 28,265,1,0,0,0,30,283,1,0,0,0,32,297,1,0,0,0,34,318,1,0,0,0,36,323, + 1,0,0,0,38,329,1,0,0,0,40,333,1,0,0,0,42,344,1,0,0,0,44,374,1,0, + 0,0,46,378,1,0,0,0,48,388,1,0,0,0,50,392,1,0,0,0,52,402,1,0,0,0, + 54,410,1,0,0,0,56,418,1,0,0,0,58,425,1,0,0,0,60,442,1,0,0,0,62,452, + 1,0,0,0,64,458,1,0,0,0,66,463,1,0,0,0,68,479,1,0,0,0,70,496,1,0, + 0,0,72,513,1,0,0,0,74,524,1,0,0,0,76,531,1,0,0,0,78,544,1,0,0,0, + 80,556,1,0,0,0,82,581,1,0,0,0,84,607,1,0,0,0,86,632,1,0,0,0,88,655, + 1,0,0,0,90,660,1,0,0,0,92,662,1,0,0,0,94,101,5,10,0,0,95,101,5,11, + 0,0,96,101,5,12,0,0,97,101,5,13,0,0,98,101,5,14,0,0,99,101,3,2,1, + 0,100,94,1,0,0,0,100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100, + 98,1,0,0,0,100,99,1,0,0,0,101,1,1,0,0,0,102,103,6,1,-1,0,103,104, + 5,47,0,0,104,105,3,2,1,0,105,106,5,48,0,0,106,112,1,0,0,0,107,108, + 5,88,0,0,108,109,5,77,0,0,109,112,3,2,1,2,110,112,5,87,0,0,111,102, + 1,0,0,0,111,107,1,0,0,0,111,110,1,0,0,0,112,124,1,0,0,0,113,116, + 10,3,0,0,114,117,5,75,0,0,115,117,5,77,0,0,116,114,1,0,0,0,116,115, + 1,0,0,0,117,118,1,0,0,0,118,123,3,2,1,4,119,120,10,4,0,0,120,121, + 5,76,0,0,121,123,3,4,2,0,122,113,1,0,0,0,122,119,1,0,0,0,123,126, + 1,0,0,0,124,122,1,0,0,0,124,125,1,0,0,0,125,3,1,0,0,0,126,124,1, + 0,0,0,127,129,7,0,0,0,128,127,1,0,0,0,128,129,1,0,0,0,129,130,1, + 0,0,0,130,131,5,88,0,0,131,5,1,0,0,0,132,140,5,43,0,0,133,140,5, + 44,0,0,134,135,5,45,0,0,135,136,3,8,4,0,136,137,5,81,0,0,137,138, + 3,10,5,0,138,140,1,0,0,0,139,132,1,0,0,0,139,133,1,0,0,0,139,134, + 1,0,0,0,140,7,1,0,0,0,141,142,5,87,0,0,142,9,1,0,0,0,143,144,5,87, + 0,0,144,11,1,0,0,0,145,146,6,6,-1,0,146,147,5,47,0,0,147,148,3,12, + 6,0,148,149,5,48,0,0,149,157,1,0,0,0,150,151,3,16,8,0,151,152,3, + 12,6,9,152,157,1,0,0,0,153,154,5,28,0,0,154,157,3,12,6,4,155,157, + 3,14,7,0,156,145,1,0,0,0,156,150,1,0,0,0,156,153,1,0,0,0,156,155, + 1,0,0,0,157,194,1,0,0,0,158,159,10,10,0,0,159,160,5,76,0,0,160,193, + 3,12,6,10,161,165,10,8,0,0,162,166,5,75,0,0,163,166,5,77,0,0,164, + 166,5,78,0,0,165,162,1,0,0,0,165,163,1,0,0,0,165,164,1,0,0,0,166, + 167,1,0,0,0,167,193,3,12,6,9,168,171,10,7,0,0,169,172,5,49,0,0,170, + 172,5,73,0,0,171,169,1,0,0,0,171,170,1,0,0,0,172,173,1,0,0,0,173, + 193,3,12,6,8,174,175,10,6,0,0,175,176,3,18,9,0,176,177,3,12,6,7, + 177,193,1,0,0,0,178,179,10,5,0,0,179,180,3,20,10,0,180,181,3,12, + 6,6,181,193,1,0,0,0,182,183,10,3,0,0,183,184,3,22,11,0,184,185,3, + 12,6,4,185,193,1,0,0,0,186,187,10,2,0,0,187,188,5,79,0,0,188,189, + 3,12,6,0,189,190,5,80,0,0,190,191,3,12,6,3,191,193,1,0,0,0,192,158, + 1,0,0,0,192,161,1,0,0,0,192,168,1,0,0,0,192,174,1,0,0,0,192,178, + 1,0,0,0,192,182,1,0,0,0,192,186,1,0,0,0,193,196,1,0,0,0,194,192, + 1,0,0,0,194,195,1,0,0,0,195,13,1,0,0,0,196,194,1,0,0,0,197,207,3, + 26,13,0,198,207,5,85,0,0,199,201,7,1,0,0,200,202,3,24,12,0,201,200, + 1,0,0,0,201,202,1,0,0,0,202,207,1,0,0,0,203,207,5,86,0,0,204,207, + 5,25,0,0,205,207,3,24,12,0,206,197,1,0,0,0,206,198,1,0,0,0,206,199, + 1,0,0,0,206,203,1,0,0,0,206,204,1,0,0,0,206,205,1,0,0,0,207,15,1, + 0,0,0,208,212,5,49,0,0,209,212,5,73,0,0,210,212,5,50,0,0,211,208, + 1,0,0,0,211,209,1,0,0,0,211,210,1,0,0,0,212,17,1,0,0,0,213,219,5, + 53,0,0,214,219,5,52,0,0,215,219,5,51,0,0,216,219,5,59,0,0,217,219, + 5,60,0,0,218,213,1,0,0,0,218,214,1,0,0,0,218,215,1,0,0,0,218,216, + 1,0,0,0,218,217,1,0,0,0,219,19,1,0,0,0,220,228,5,61,0,0,221,228, + 5,63,0,0,222,228,5,68,0,0,223,228,5,69,0,0,224,228,5,70,0,0,225, + 228,5,71,0,0,226,228,5,62,0,0,227,220,1,0,0,0,227,221,1,0,0,0,227, + 222,1,0,0,0,227,223,1,0,0,0,227,224,1,0,0,0,227,225,1,0,0,0,227, + 226,1,0,0,0,228,21,1,0,0,0,229,232,5,26,0,0,230,232,5,27,0,0,231, + 229,1,0,0,0,231,230,1,0,0,0,232,23,1,0,0,0,233,238,5,87,0,0,234, + 235,5,54,0,0,235,236,3,90,45,0,236,237,5,56,0,0,237,239,1,0,0,0, + 238,234,1,0,0,0,238,239,1,0,0,0,239,243,1,0,0,0,240,242,5,83,0,0, + 241,240,1,0,0,0,242,245,1,0,0,0,243,241,1,0,0,0,243,244,1,0,0,0, + 244,248,1,0,0,0,245,243,1,0,0,0,246,247,5,84,0,0,247,249,3,24,12, + 0,248,246,1,0,0,0,248,249,1,0,0,0,249,25,1,0,0,0,250,251,5,87,0, + 0,251,260,5,47,0,0,252,257,3,12,6,0,253,254,5,72,0,0,254,256,3,12, + 6,0,255,253,1,0,0,0,256,259,1,0,0,0,257,255,1,0,0,0,257,258,1,0, + 0,0,258,261,1,0,0,0,259,257,1,0,0,0,260,252,1,0,0,0,260,261,1,0, + 0,0,261,262,1,0,0,0,262,263,5,48,0,0,263,27,1,0,0,0,264,266,5,29, + 0,0,265,264,1,0,0,0,265,266,1,0,0,0,266,267,1,0,0,0,267,268,5,16, + 0,0,268,269,5,87,0,0,269,270,3,0,0,0,270,271,5,74,0,0,271,273,3, + 12,6,0,272,274,5,82,0,0,273,272,1,0,0,0,273,274,1,0,0,0,274,278, + 1,0,0,0,275,277,3,6,3,0,276,275,1,0,0,0,277,280,1,0,0,0,278,276, + 1,0,0,0,278,279,1,0,0,0,279,281,1,0,0,0,280,278,1,0,0,0,281,282, + 5,9,0,0,282,29,1,0,0,0,283,284,3,24,12,0,284,285,5,74,0,0,285,287, + 3,12,6,0,286,288,5,82,0,0,287,286,1,0,0,0,287,288,1,0,0,0,288,292, + 1,0,0,0,289,291,3,6,3,0,290,289,1,0,0,0,291,294,1,0,0,0,292,290, + 1,0,0,0,292,293,1,0,0,0,293,295,1,0,0,0,294,292,1,0,0,0,295,296, + 5,9,0,0,296,31,1,0,0,0,297,298,5,30,0,0,298,299,3,24,12,0,299,300, + 5,74,0,0,300,308,3,12,6,0,301,302,5,4,0,0,302,303,3,24,12,0,303, + 304,5,74,0,0,304,305,3,12,6,0,305,307,1,0,0,0,306,301,1,0,0,0,307, + 310,1,0,0,0,308,306,1,0,0,0,308,309,1,0,0,0,309,312,1,0,0,0,310, + 308,1,0,0,0,311,313,5,82,0,0,312,311,1,0,0,0,312,313,1,0,0,0,313, + 314,1,0,0,0,314,315,5,9,0,0,315,33,1,0,0,0,316,319,3,38,19,0,317, + 319,3,36,18,0,318,316,1,0,0,0,318,317,1,0,0,0,319,35,1,0,0,0,320, + 324,3,50,25,0,321,324,3,58,29,0,322,324,3,60,30,0,323,320,1,0,0, + 0,323,321,1,0,0,0,323,322,1,0,0,0,324,37,1,0,0,0,325,330,3,40,20, + 0,326,330,3,26,13,0,327,330,3,42,21,0,328,330,3,48,24,0,329,325, + 1,0,0,0,329,326,1,0,0,0,329,327,1,0,0,0,329,328,1,0,0,0,330,331, + 1,0,0,0,331,332,5,9,0,0,332,39,1,0,0,0,333,339,3,24,12,0,334,340, + 5,74,0,0,335,340,5,64,0,0,336,340,5,65,0,0,337,340,5,66,0,0,338, + 340,5,67,0,0,339,334,1,0,0,0,339,335,1,0,0,0,339,336,1,0,0,0,339, + 337,1,0,0,0,339,338,1,0,0,0,340,341,1,0,0,0,341,342,3,12,6,0,342, + 41,1,0,0,0,343,345,5,29,0,0,344,343,1,0,0,0,344,345,1,0,0,0,345, + 347,1,0,0,0,346,348,5,16,0,0,347,346,1,0,0,0,347,348,1,0,0,0,348, + 349,1,0,0,0,349,354,3,24,12,0,350,351,5,72,0,0,351,353,3,24,12,0, + 352,350,1,0,0,0,353,356,1,0,0,0,354,352,1,0,0,0,354,355,1,0,0,0, + 355,357,1,0,0,0,356,354,1,0,0,0,357,360,3,0,0,0,358,359,5,74,0,0, + 359,361,3,12,6,0,360,358,1,0,0,0,360,361,1,0,0,0,361,366,1,0,0,0, + 362,363,5,57,0,0,363,364,3,12,6,0,364,365,5,58,0,0,365,367,1,0,0, + 0,366,362,1,0,0,0,366,367,1,0,0,0,367,371,1,0,0,0,368,370,3,6,3, + 0,369,368,1,0,0,0,370,373,1,0,0,0,371,369,1,0,0,0,371,372,1,0,0, + 0,372,43,1,0,0,0,373,371,1,0,0,0,374,375,3,42,21,0,375,376,5,9,0, + 0,376,45,1,0,0,0,377,379,5,9,0,0,378,377,1,0,0,0,378,379,1,0,0,0, + 379,380,1,0,0,0,380,385,3,34,17,0,381,384,5,9,0,0,382,384,3,34,17, + 0,383,381,1,0,0,0,383,382,1,0,0,0,384,387,1,0,0,0,385,383,1,0,0, + 0,385,386,1,0,0,0,386,47,1,0,0,0,387,385,1,0,0,0,388,390,5,17,0, + 0,389,391,3,12,6,0,390,389,1,0,0,0,390,391,1,0,0,0,391,49,1,0,0, + 0,392,396,3,52,26,0,393,395,3,54,27,0,394,393,1,0,0,0,395,398,1, + 0,0,0,396,394,1,0,0,0,396,397,1,0,0,0,397,400,1,0,0,0,398,396,1, + 0,0,0,399,401,3,56,28,0,400,399,1,0,0,0,400,401,1,0,0,0,401,51,1, + 0,0,0,402,403,5,18,0,0,403,404,3,12,6,0,404,405,5,80,0,0,405,406, + 5,9,0,0,406,407,5,1,0,0,407,408,3,46,23,0,408,409,5,2,0,0,409,53, + 1,0,0,0,410,411,5,19,0,0,411,412,3,12,6,0,412,413,5,80,0,0,413,414, + 5,9,0,0,414,415,5,1,0,0,415,416,3,46,23,0,416,417,5,2,0,0,417,55, + 1,0,0,0,418,419,5,20,0,0,419,420,5,80,0,0,420,421,5,9,0,0,421,422, + 5,1,0,0,422,423,3,46,23,0,423,424,5,2,0,0,424,57,1,0,0,0,425,426, + 5,21,0,0,426,427,5,87,0,0,427,428,5,23,0,0,428,429,3,12,6,0,429, + 430,5,46,0,0,430,431,3,12,6,0,431,433,5,24,0,0,432,434,5,73,0,0, + 433,432,1,0,0,0,433,434,1,0,0,0,434,435,1,0,0,0,435,436,7,1,0,0, + 436,437,5,80,0,0,437,438,5,9,0,0,438,439,5,1,0,0,439,440,3,46,23, + 0,440,441,5,2,0,0,441,59,1,0,0,0,442,443,5,22,0,0,443,444,3,12,6, + 0,444,445,5,80,0,0,445,446,5,9,0,0,446,447,5,1,0,0,447,448,3,46, + 23,0,448,449,5,2,0,0,449,61,1,0,0,0,450,453,3,64,32,0,451,453,5, + 9,0,0,452,450,1,0,0,0,452,451,1,0,0,0,453,454,1,0,0,0,454,452,1, + 0,0,0,454,455,1,0,0,0,455,456,1,0,0,0,456,457,5,0,0,1,457,63,1,0, + 0,0,458,459,5,31,0,0,459,460,5,87,0,0,460,461,5,80,0,0,461,462,3, + 66,33,0,462,65,1,0,0,0,463,464,5,9,0,0,464,473,5,1,0,0,465,474,3, + 72,36,0,466,474,3,76,38,0,467,474,3,78,39,0,468,474,3,84,42,0,469, + 474,3,86,43,0,470,474,3,68,34,0,471,474,3,70,35,0,472,474,3,74,37, + 0,473,465,1,0,0,0,473,466,1,0,0,0,473,467,1,0,0,0,473,468,1,0,0, + 0,473,469,1,0,0,0,473,470,1,0,0,0,473,471,1,0,0,0,473,472,1,0,0, + 0,474,475,1,0,0,0,475,473,1,0,0,0,475,476,1,0,0,0,476,477,1,0,0, + 0,477,478,5,2,0,0,478,67,1,0,0,0,479,480,5,40,0,0,480,481,5,47,0, + 0,481,486,3,24,12,0,482,483,5,72,0,0,483,485,3,92,46,0,484,482,1, + 0,0,0,485,488,1,0,0,0,486,484,1,0,0,0,486,487,1,0,0,0,487,489,1, + 0,0,0,488,486,1,0,0,0,489,490,5,48,0,0,490,491,5,80,0,0,491,492, + 5,9,0,0,492,493,5,1,0,0,493,494,3,46,23,0,494,495,5,2,0,0,495,69, + 1,0,0,0,496,497,5,41,0,0,497,498,5,47,0,0,498,503,3,12,6,0,499,500, + 5,72,0,0,500,502,3,92,46,0,501,499,1,0,0,0,502,505,1,0,0,0,503,501, + 1,0,0,0,503,504,1,0,0,0,504,506,1,0,0,0,505,503,1,0,0,0,506,507, + 5,48,0,0,507,508,5,80,0,0,508,509,5,9,0,0,509,510,5,1,0,0,510,511, + 3,46,23,0,511,512,5,2,0,0,512,71,1,0,0,0,513,514,7,2,0,0,514,515, + 5,80,0,0,515,516,5,9,0,0,516,518,5,1,0,0,517,519,3,44,22,0,518,517, + 1,0,0,0,519,520,1,0,0,0,520,518,1,0,0,0,520,521,1,0,0,0,521,522, + 1,0,0,0,522,523,5,2,0,0,523,73,1,0,0,0,524,525,5,35,0,0,525,526, + 5,80,0,0,526,527,5,9,0,0,527,528,5,1,0,0,528,529,3,46,23,0,529,530, + 5,2,0,0,530,75,1,0,0,0,531,532,5,36,0,0,532,533,5,80,0,0,533,534, + 5,9,0,0,534,538,5,1,0,0,535,539,3,28,14,0,536,539,3,30,15,0,537, + 539,3,32,16,0,538,535,1,0,0,0,538,536,1,0,0,0,538,537,1,0,0,0,539, + 540,1,0,0,0,540,538,1,0,0,0,540,541,1,0,0,0,541,542,1,0,0,0,542, + 543,5,2,0,0,543,77,1,0,0,0,544,545,5,37,0,0,545,546,5,80,0,0,546, + 547,5,9,0,0,547,550,5,1,0,0,548,551,3,80,40,0,549,551,3,82,41,0, + 550,548,1,0,0,0,550,549,1,0,0,0,551,552,1,0,0,0,552,550,1,0,0,0, + 552,553,1,0,0,0,553,554,1,0,0,0,554,555,5,2,0,0,555,79,1,0,0,0,556, + 561,5,87,0,0,557,558,5,54,0,0,558,559,3,12,6,0,559,560,5,56,0,0, + 560,562,1,0,0,0,561,557,1,0,0,0,561,562,1,0,0,0,562,563,1,0,0,0, + 563,564,5,55,0,0,564,577,5,42,0,0,565,574,5,47,0,0,566,571,3,88, + 44,0,567,568,5,72,0,0,568,570,3,88,44,0,569,567,1,0,0,0,570,573, + 1,0,0,0,571,569,1,0,0,0,571,572,1,0,0,0,572,575,1,0,0,0,573,571, + 1,0,0,0,574,566,1,0,0,0,574,575,1,0,0,0,575,576,1,0,0,0,576,578, + 5,48,0,0,577,565,1,0,0,0,577,578,1,0,0,0,578,579,1,0,0,0,579,580, + 5,9,0,0,580,81,1,0,0,0,581,586,5,87,0,0,582,583,5,54,0,0,583,584, + 3,12,6,0,584,585,5,56,0,0,585,587,1,0,0,0,586,582,1,0,0,0,586,587, + 1,0,0,0,587,588,1,0,0,0,588,589,3,0,0,0,589,590,5,55,0,0,590,603, + 5,39,0,0,591,600,5,47,0,0,592,597,3,88,44,0,593,594,5,72,0,0,594, + 596,3,88,44,0,595,593,1,0,0,0,596,599,1,0,0,0,597,595,1,0,0,0,597, + 598,1,0,0,0,598,601,1,0,0,0,599,597,1,0,0,0,600,592,1,0,0,0,600, + 601,1,0,0,0,601,602,1,0,0,0,602,604,5,48,0,0,603,591,1,0,0,0,603, + 604,1,0,0,0,604,605,1,0,0,0,605,606,5,9,0,0,606,83,1,0,0,0,607,608, + 5,38,0,0,608,609,5,80,0,0,609,610,5,9,0,0,610,627,5,1,0,0,611,624, + 5,42,0,0,612,621,5,47,0,0,613,618,3,88,44,0,614,615,5,72,0,0,615, + 617,3,88,44,0,616,614,1,0,0,0,617,620,1,0,0,0,618,616,1,0,0,0,618, + 619,1,0,0,0,619,622,1,0,0,0,620,618,1,0,0,0,621,613,1,0,0,0,621, + 622,1,0,0,0,622,623,1,0,0,0,623,625,5,48,0,0,624,612,1,0,0,0,624, + 625,1,0,0,0,625,628,1,0,0,0,626,628,5,39,0,0,627,611,1,0,0,0,627, + 626,1,0,0,0,628,629,1,0,0,0,629,630,5,9,0,0,630,631,5,2,0,0,631, + 85,1,0,0,0,632,633,5,15,0,0,633,634,5,87,0,0,634,643,5,47,0,0,635, + 640,3,88,44,0,636,637,5,72,0,0,637,639,3,88,44,0,638,636,1,0,0,0, + 639,642,1,0,0,0,640,638,1,0,0,0,640,641,1,0,0,0,641,644,1,0,0,0, + 642,640,1,0,0,0,643,635,1,0,0,0,643,644,1,0,0,0,644,645,1,0,0,0, + 645,647,5,48,0,0,646,648,3,0,0,0,647,646,1,0,0,0,647,648,1,0,0,0, + 648,649,1,0,0,0,649,650,5,80,0,0,650,651,5,9,0,0,651,652,5,1,0,0, + 652,653,3,46,23,0,653,654,5,2,0,0,654,87,1,0,0,0,655,656,5,87,0, + 0,656,657,3,0,0,0,657,89,1,0,0,0,658,661,3,88,44,0,659,661,3,12, + 6,0,660,658,1,0,0,0,660,659,1,0,0,0,661,91,1,0,0,0,662,663,5,87, + 0,0,663,664,5,74,0,0,664,665,7,3,0,0,665,93,1,0,0,0,74,100,111,116, + 122,124,128,139,156,165,171,192,194,201,206,211,218,227,231,238, + 243,248,257,260,265,273,278,287,292,308,312,318,323,329,339,344, + 347,354,360,366,371,378,383,385,390,396,400,433,452,454,473,475, + 486,503,520,538,540,550,552,561,571,574,577,586,597,600,603,618, + 621,624,627,640,643,647,660 ] class PyNestMLParser ( Parser ): @@ -355,7 +357,8 @@ class PyNestMLParser ( Parser ): RULE_outputBlock = 42 RULE_function = 43 RULE_parameter = 44 - RULE_constParameter = 45 + RULE_expressionOrParameter = 45 + RULE_constParameter = 46 ruleNames = [ "dataType", "unitType", "unitTypeExponent", "anyDecorator", "namespaceDecoratorNamespace", "namespaceDecoratorName", @@ -368,7 +371,8 @@ class PyNestMLParser ( Parser ): "nestMLCompilationUnit", "model", "modelBody", "onReceiveBlock", "onConditionBlock", "blockWithVariables", "updateBlock", "equationsBlock", "inputBlock", "spikeInputPort", "continuousInputPort", - "outputBlock", "function", "parameter", "constParameter" ] + "outputBlock", "function", "parameter", "expressionOrParameter", + "constParameter" ] EOF = Token.EOF INDENT=1 @@ -519,37 +523,37 @@ def dataType(self): localctx = PyNestMLParser.DataTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 0, self.RULE_dataType) try: - self.state = 98 + self.state = 100 self._errHandler.sync(self) token = self._input.LA(1) if token in [10]: self.enterOuterAlt(localctx, 1) - self.state = 92 + self.state = 94 localctx.isInt = self.match(PyNestMLParser.INTEGER_KEYWORD) pass elif token in [11]: self.enterOuterAlt(localctx, 2) - self.state = 93 + self.state = 95 localctx.isReal = self.match(PyNestMLParser.REAL_KEYWORD) pass elif token in [12]: self.enterOuterAlt(localctx, 3) - self.state = 94 + self.state = 96 localctx.isString = self.match(PyNestMLParser.STRING_KEYWORD) pass elif token in [13]: self.enterOuterAlt(localctx, 4) - self.state = 95 + self.state = 97 localctx.isBool = self.match(PyNestMLParser.BOOLEAN_KEYWORD) pass elif token in [14]: self.enterOuterAlt(localctx, 5) - self.state = 96 + self.state = 98 localctx.isVoid = self.match(PyNestMLParser.VOID_KEYWORD) pass elif token in [47, 87, 88]: self.enterOuterAlt(localctx, 6) - self.state = 97 + self.state = 99 localctx.unit = self.unitType(0) pass else: @@ -635,34 +639,34 @@ def unitType(self, _p:int=0): self.enterRecursionRule(localctx, 2, self.RULE_unitType, _p) try: self.enterOuterAlt(localctx, 1) - self.state = 109 + self.state = 111 self._errHandler.sync(self) token = self._input.LA(1) if token in [47]: - self.state = 101 + self.state = 103 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) - self.state = 102 + self.state = 104 localctx.compoundUnit = self.unitType(0) - self.state = 103 + self.state = 105 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass elif token in [88]: - self.state = 105 + self.state = 107 localctx.unitlessLiteral = self.match(PyNestMLParser.UNSIGNED_INTEGER) - self.state = 106 + self.state = 108 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) - self.state = 107 + self.state = 109 localctx.right = self.unitType(2) pass elif token in [87]: - self.state = 108 + self.state = 110 localctx.unit = self.match(PyNestMLParser.NAME) pass else: raise NoViableAltException(self) self._ctx.stop = self._input.LT(-1) - self.state = 122 + self.state = 124 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -670,32 +674,32 @@ def unitType(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 120 + self.state = 122 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,3,self._ctx) if la_ == 1: localctx = PyNestMLParser.UnitTypeContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_unitType) - self.state = 111 + self.state = 113 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 114 + self.state = 116 self._errHandler.sync(self) token = self._input.LA(1) if token in [75]: - self.state = 112 + self.state = 114 localctx.timesOp = self.match(PyNestMLParser.STAR) pass elif token in [77]: - self.state = 113 + self.state = 115 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass else: raise NoViableAltException(self) - self.state = 116 + self.state = 118 localctx.right = self.unitType(4) pass @@ -703,18 +707,18 @@ def unitType(self, _p:int=0): localctx = PyNestMLParser.UnitTypeContext(self, _parentctx, _parentState) localctx.base = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_unitType) - self.state = 117 + self.state = 119 if not self.precpred(self._ctx, 4): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 4)") - self.state = 118 + self.state = 120 localctx.powOp = self.match(PyNestMLParser.STAR_STAR) - self.state = 119 + self.state = 121 localctx.exponent = self.unitTypeExponent() pass - self.state = 124 + self.state = 126 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) @@ -762,11 +766,11 @@ def unitTypeExponent(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 126 + self.state = 128 self._errHandler.sync(self) _la = self._input.LA(1) if _la==49 or _la==73: - self.state = 125 + self.state = 127 _la = self._input.LA(1) if not(_la==49 or _la==73): self._errHandler.recoverInline(self) @@ -775,7 +779,7 @@ def unitTypeExponent(self): self.consume() - self.state = 128 + self.state = 130 self.match(PyNestMLParser.UNSIGNED_INTEGER) except RecognitionException as re: localctx.exception = re @@ -830,28 +834,28 @@ def anyDecorator(self): localctx = PyNestMLParser.AnyDecoratorContext(self, self._ctx, self.state) self.enterRule(localctx, 6, self.RULE_anyDecorator) try: - self.state = 137 + self.state = 139 self._errHandler.sync(self) token = self._input.LA(1) if token in [43]: self.enterOuterAlt(localctx, 1) - self.state = 130 + self.state = 132 self.match(PyNestMLParser.DECORATOR_HOMOGENEOUS) pass elif token in [44]: self.enterOuterAlt(localctx, 2) - self.state = 131 + self.state = 133 self.match(PyNestMLParser.DECORATOR_HETEROGENEOUS) pass elif token in [45]: self.enterOuterAlt(localctx, 3) - self.state = 132 + self.state = 134 self.match(PyNestMLParser.AT) - self.state = 133 + self.state = 135 self.namespaceDecoratorNamespace() - self.state = 134 + self.state = 136 self.match(PyNestMLParser.DOUBLE_COLON) - self.state = 135 + self.state = 137 self.namespaceDecoratorName() pass else: @@ -895,7 +899,7 @@ def namespaceDecoratorNamespace(self): self.enterRule(localctx, 8, self.RULE_namespaceDecoratorNamespace) try: self.enterOuterAlt(localctx, 1) - self.state = 139 + self.state = 141 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -935,7 +939,7 @@ def namespaceDecoratorName(self): self.enterRule(localctx, 10, self.RULE_namespaceDecoratorName) try: self.enterOuterAlt(localctx, 1) - self.state = 141 + self.state = 143 localctx.name = self.match(PyNestMLParser.NAME) except RecognitionException as re: localctx.exception = re @@ -1048,38 +1052,38 @@ def expression(self, _p:int=0): self.enterRecursionRule(localctx, 12, self.RULE_expression, _p) try: self.enterOuterAlt(localctx, 1) - self.state = 154 + self.state = 156 self._errHandler.sync(self) token = self._input.LA(1) if token in [47]: - self.state = 144 + self.state = 146 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) - self.state = 145 + self.state = 147 localctx.term = self.expression(0) - self.state = 146 + self.state = 148 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass elif token in [49, 50, 73]: - self.state = 148 + self.state = 150 self.unaryOperator() - self.state = 149 + self.state = 151 localctx.term = self.expression(9) pass elif token in [28]: - self.state = 151 + self.state = 153 localctx.logicalNot = self.match(PyNestMLParser.NOT_KEYWORD) - self.state = 152 + self.state = 154 localctx.term = self.expression(4) pass elif token in [25, 85, 86, 87, 88, 89]: - self.state = 153 + self.state = 155 self.simpleExpression() pass else: raise NoViableAltException(self) self._ctx.stop = self._input.LT(-1) - self.state = 192 + self.state = 194 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,11,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -1087,20 +1091,20 @@ def expression(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 190 + self.state = 192 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,10,self._ctx) if la_ == 1: localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 156 + self.state = 158 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 157 + self.state = 159 localctx.powOp = self.match(PyNestMLParser.STAR_STAR) - self.state = 158 + self.state = 160 localctx.right = self.expression(10) pass @@ -1108,29 +1112,29 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 159 + self.state = 161 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 163 + self.state = 165 self._errHandler.sync(self) token = self._input.LA(1) if token in [75]: - self.state = 160 + self.state = 162 localctx.timesOp = self.match(PyNestMLParser.STAR) pass elif token in [77]: - self.state = 161 + self.state = 163 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass elif token in [78]: - self.state = 162 + self.state = 164 localctx.moduloOp = self.match(PyNestMLParser.PERCENT) pass else: raise NoViableAltException(self) - self.state = 165 + self.state = 167 localctx.right = self.expression(9) pass @@ -1138,25 +1142,25 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 166 + self.state = 168 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 169 + self.state = 171 self._errHandler.sync(self) token = self._input.LA(1) if token in [49]: - self.state = 167 + self.state = 169 localctx.plusOp = self.match(PyNestMLParser.PLUS) pass elif token in [73]: - self.state = 168 + self.state = 170 localctx.minusOp = self.match(PyNestMLParser.MINUS) pass else: raise NoViableAltException(self) - self.state = 171 + self.state = 173 localctx.right = self.expression(8) pass @@ -1164,13 +1168,13 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 172 + self.state = 174 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 173 + self.state = 175 self.bitOperator() - self.state = 174 + self.state = 176 localctx.right = self.expression(7) pass @@ -1178,13 +1182,13 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 176 + self.state = 178 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 177 + self.state = 179 self.comparisonOperator() - self.state = 178 + self.state = 180 localctx.right = self.expression(6) pass @@ -1192,13 +1196,13 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 180 + self.state = 182 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 181 + self.state = 183 self.logicalOperator() - self.state = 182 + self.state = 184 localctx.right = self.expression(4) pass @@ -1206,22 +1210,22 @@ def expression(self, _p:int=0): localctx = PyNestMLParser.ExpressionContext(self, _parentctx, _parentState) localctx.condition = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expression) - self.state = 184 + self.state = 186 if not self.precpred(self._ctx, 2): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") - self.state = 185 + self.state = 187 self.match(PyNestMLParser.QUESTION) - self.state = 186 + self.state = 188 localctx.ifTrue = self.expression(0) - self.state = 187 + self.state = 189 self.match(PyNestMLParser.COLON) - self.state = 188 + self.state = 190 localctx.ifNot = self.expression(3) pass - self.state = 194 + self.state = 196 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,11,self._ctx) @@ -1284,35 +1288,35 @@ def simpleExpression(self): self.enterRule(localctx, 14, self.RULE_simpleExpression) self._la = 0 # Token type try: - self.state = 204 + self.state = 206 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,13,self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 195 + self.state = 197 self.functionCall() pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 196 + self.state = 198 self.match(PyNestMLParser.BOOLEAN_LITERAL) pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 197 + self.state = 199 _la = self._input.LA(1) if not(_la==88 or _la==89): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 199 + self.state = 201 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,12,self._ctx) if la_ == 1: - self.state = 198 + self.state = 200 self.variable() @@ -1320,19 +1324,19 @@ def simpleExpression(self): elif la_ == 4: self.enterOuterAlt(localctx, 4) - self.state = 201 + self.state = 203 localctx.string = self.match(PyNestMLParser.STRING_LITERAL) pass elif la_ == 5: self.enterOuterAlt(localctx, 5) - self.state = 202 + self.state = 204 localctx.isInf = self.match(PyNestMLParser.INF_KEYWORD) pass elif la_ == 6: self.enterOuterAlt(localctx, 6) - self.state = 203 + self.state = 205 self.variable() pass @@ -1382,22 +1386,22 @@ def unaryOperator(self): localctx = PyNestMLParser.UnaryOperatorContext(self, self._ctx, self.state) self.enterRule(localctx, 16, self.RULE_unaryOperator) try: - self.state = 209 + self.state = 211 self._errHandler.sync(self) token = self._input.LA(1) if token in [49]: self.enterOuterAlt(localctx, 1) - self.state = 206 + self.state = 208 localctx.unaryPlus = self.match(PyNestMLParser.PLUS) pass elif token in [73]: self.enterOuterAlt(localctx, 2) - self.state = 207 + self.state = 209 localctx.unaryMinus = self.match(PyNestMLParser.MINUS) pass elif token in [50]: self.enterOuterAlt(localctx, 3) - self.state = 208 + self.state = 210 localctx.unaryTilde = self.match(PyNestMLParser.TILDE) pass else: @@ -1456,32 +1460,32 @@ def bitOperator(self): localctx = PyNestMLParser.BitOperatorContext(self, self._ctx, self.state) self.enterRule(localctx, 18, self.RULE_bitOperator) try: - self.state = 216 + self.state = 218 self._errHandler.sync(self) token = self._input.LA(1) if token in [53]: self.enterOuterAlt(localctx, 1) - self.state = 211 + self.state = 213 localctx.bitAnd = self.match(PyNestMLParser.AMPERSAND) pass elif token in [52]: self.enterOuterAlt(localctx, 2) - self.state = 212 + self.state = 214 localctx.bitXor = self.match(PyNestMLParser.CARET) pass elif token in [51]: self.enterOuterAlt(localctx, 3) - self.state = 213 + self.state = 215 localctx.bitOr = self.match(PyNestMLParser.PIPE) pass elif token in [59]: self.enterOuterAlt(localctx, 4) - self.state = 214 + self.state = 216 localctx.bitShiftLeft = self.match(PyNestMLParser.LEFT_LEFT_ANGLE) pass elif token in [60]: self.enterOuterAlt(localctx, 5) - self.state = 215 + self.state = 217 localctx.bitShiftRight = self.match(PyNestMLParser.RIGHT_RIGHT_ANGLE) pass else: @@ -1548,42 +1552,42 @@ def comparisonOperator(self): localctx = PyNestMLParser.ComparisonOperatorContext(self, self._ctx, self.state) self.enterRule(localctx, 20, self.RULE_comparisonOperator) try: - self.state = 225 + self.state = 227 self._errHandler.sync(self) token = self._input.LA(1) if token in [61]: self.enterOuterAlt(localctx, 1) - self.state = 218 + self.state = 220 localctx.lt = self.match(PyNestMLParser.LEFT_ANGLE) pass elif token in [63]: self.enterOuterAlt(localctx, 2) - self.state = 219 + self.state = 221 localctx.le = self.match(PyNestMLParser.LEFT_ANGLE_EQUALS) pass elif token in [68]: self.enterOuterAlt(localctx, 3) - self.state = 220 + self.state = 222 localctx.eq = self.match(PyNestMLParser.EQUALS_EQUALS) pass elif token in [69]: self.enterOuterAlt(localctx, 4) - self.state = 221 + self.state = 223 localctx.ne = self.match(PyNestMLParser.EXCLAMATION_EQUALS) pass elif token in [70]: self.enterOuterAlt(localctx, 5) - self.state = 222 + self.state = 224 localctx.ne2 = self.match(PyNestMLParser.LEFT_ANGLE_RIGHT_ANGLE) pass elif token in [71]: self.enterOuterAlt(localctx, 6) - self.state = 223 + self.state = 225 localctx.ge = self.match(PyNestMLParser.RIGHT_ANGLE_EQUALS) pass elif token in [62]: self.enterOuterAlt(localctx, 7) - self.state = 224 + self.state = 226 localctx.gt = self.match(PyNestMLParser.RIGHT_ANGLE) pass else: @@ -1630,17 +1634,17 @@ def logicalOperator(self): localctx = PyNestMLParser.LogicalOperatorContext(self, self._ctx, self.state) self.enterRule(localctx, 22, self.RULE_logicalOperator) try: - self.state = 229 + self.state = 231 self._errHandler.sync(self) token = self._input.LA(1) if token in [26]: self.enterOuterAlt(localctx, 1) - self.state = 227 + self.state = 229 localctx.logicalAnd = self.match(PyNestMLParser.AND_KEYWORD) pass elif token in [27]: self.enterOuterAlt(localctx, 2) - self.state = 228 + self.state = 230 localctx.logicalOr = self.match(PyNestMLParser.OR_KEYWORD) pass else: @@ -1662,7 +1666,7 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser self.name = None # Token - self.vectorParameter = None # ExpressionContext + self.vectorParameter = None # ExpressionOrParameterContext self.attribute = None # VariableContext def NAME(self): @@ -1683,8 +1687,8 @@ def DIFFERENTIAL_ORDER(self, i:int=None): def FULLSTOP(self): return self.getToken(PyNestMLParser.FULLSTOP, 0) - def expression(self): - return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + def expressionOrParameter(self): + return self.getTypedRuleContext(PyNestMLParser.ExpressionOrParameterContext,0) def variable(self): @@ -1709,38 +1713,38 @@ def variable(self): self.enterRule(localctx, 24, self.RULE_variable) try: self.enterOuterAlt(localctx, 1) - self.state = 231 + self.state = 233 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 236 + self.state = 238 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,18,self._ctx) if la_ == 1: - self.state = 232 - self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 233 - localctx.vectorParameter = self.expression(0) self.state = 234 + self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) + self.state = 235 + localctx.vectorParameter = self.expressionOrParameter() + self.state = 236 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 241 + self.state = 243 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,19,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 238 + self.state = 240 self.match(PyNestMLParser.DIFFERENTIAL_ORDER) - self.state = 243 + self.state = 245 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,19,self._ctx) - self.state = 246 + self.state = 248 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,20,self._ctx) if la_ == 1: - self.state = 244 + self.state = 246 self.match(PyNestMLParser.FULLSTOP) - self.state = 245 + self.state = 247 localctx.attribute = self.variable() @@ -1802,31 +1806,31 @@ def functionCall(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 248 + self.state = 250 localctx.calleeName = self.match(PyNestMLParser.NAME) - self.state = 249 + self.state = 251 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 258 + self.state = 260 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 1829587650609152) != 0) or ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & 126977) != 0): - self.state = 250 + self.state = 252 self.expression(0) - self.state = 255 + self.state = 257 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 251 + self.state = 253 self.match(PyNestMLParser.COMMA) - self.state = 252 + self.state = 254 self.expression(0) - self.state = 257 + self.state = 259 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 260 + self.state = 262 self.match(PyNestMLParser.RIGHT_PAREN) except RecognitionException as re: localctx.exception = re @@ -1899,43 +1903,43 @@ def inlineExpression(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 263 + self.state = 265 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 262 + self.state = 264 localctx.recordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 265 + self.state = 267 self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 266 + self.state = 268 localctx.variableName = self.match(PyNestMLParser.NAME) - self.state = 267 + self.state = 269 self.dataType() - self.state = 268 + self.state = 270 self.match(PyNestMLParser.EQUALS) - self.state = 269 - self.expression(0) self.state = 271 + self.expression(0) + self.state = 273 self._errHandler.sync(self) _la = self._input.LA(1) if _la==82: - self.state = 270 + self.state = 272 self.match(PyNestMLParser.SEMICOLON) - self.state = 276 + self.state = 278 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): - self.state = 273 + self.state = 275 localctx.decorator = self.anyDecorator() - self.state = 278 + self.state = 280 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 279 + self.state = 281 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -1999,31 +2003,31 @@ def odeEquation(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 281 + self.state = 283 localctx.lhs = self.variable() - self.state = 282 + self.state = 284 self.match(PyNestMLParser.EQUALS) - self.state = 283 - localctx.rhs = self.expression(0) self.state = 285 + localctx.rhs = self.expression(0) + self.state = 287 self._errHandler.sync(self) _la = self._input.LA(1) if _la==82: - self.state = 284 + self.state = 286 self.match(PyNestMLParser.SEMICOLON) - self.state = 290 + self.state = 292 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): - self.state = 287 + self.state = 289 localctx.decorator = self.anyDecorator() - self.state = 292 + self.state = 294 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 293 + self.state = 295 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2095,39 +2099,39 @@ def kernel(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 295 + self.state = 297 self.match(PyNestMLParser.KERNEL_KEYWORD) - self.state = 296 + self.state = 298 self.variable() - self.state = 297 + self.state = 299 self.match(PyNestMLParser.EQUALS) - self.state = 298 + self.state = 300 self.expression(0) - self.state = 306 + self.state = 308 self._errHandler.sync(self) _la = self._input.LA(1) while _la==4: - self.state = 299 + self.state = 301 self.match(PyNestMLParser.KERNEL_JOINING) - self.state = 300 + self.state = 302 self.variable() - self.state = 301 + self.state = 303 self.match(PyNestMLParser.EQUALS) - self.state = 302 + self.state = 304 self.expression(0) - self.state = 308 + self.state = 310 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 310 + self.state = 312 self._errHandler.sync(self) _la = self._input.LA(1) if _la==82: - self.state = 309 + self.state = 311 self.match(PyNestMLParser.SEMICOLON) - self.state = 312 + self.state = 314 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2170,17 +2174,17 @@ def stmt(self): localctx = PyNestMLParser.StmtContext(self, self._ctx, self.state) self.enterRule(localctx, 34, self.RULE_stmt) try: - self.state = 316 + self.state = 318 self._errHandler.sync(self) token = self._input.LA(1) if token in [16, 17, 29, 87]: self.enterOuterAlt(localctx, 1) - self.state = 314 + self.state = 316 self.smallStmt() pass elif token in [18, 21, 22]: self.enterOuterAlt(localctx, 2) - self.state = 315 + self.state = 317 self.compoundStmt() pass else: @@ -2231,22 +2235,22 @@ def compoundStmt(self): localctx = PyNestMLParser.CompoundStmtContext(self, self._ctx, self.state) self.enterRule(localctx, 36, self.RULE_compoundStmt) try: - self.state = 321 + self.state = 323 self._errHandler.sync(self) token = self._input.LA(1) if token in [18]: self.enterOuterAlt(localctx, 1) - self.state = 318 + self.state = 320 self.ifStmt() pass elif token in [21]: self.enterOuterAlt(localctx, 2) - self.state = 319 + self.state = 321 self.forStmt() pass elif token in [22]: self.enterOuterAlt(localctx, 3) - self.state = 320 + self.state = 322 self.whileStmt() pass else: @@ -2305,31 +2309,31 @@ def smallStmt(self): self.enterRule(localctx, 38, self.RULE_smallStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 327 + self.state = 329 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,32,self._ctx) if la_ == 1: - self.state = 323 + self.state = 325 self.assignment() pass elif la_ == 2: - self.state = 324 + self.state = 326 self.functionCall() pass elif la_ == 3: - self.state = 325 + self.state = 327 self.declaration() pass elif la_ == 4: - self.state = 326 + self.state = 328 self.returnStmt() pass - self.state = 329 + self.state = 331 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2394,35 +2398,35 @@ def assignment(self): self.enterRule(localctx, 40, self.RULE_assignment) try: self.enterOuterAlt(localctx, 1) - self.state = 331 + self.state = 333 localctx.lhs_variable = self.variable() - self.state = 337 + self.state = 339 self._errHandler.sync(self) token = self._input.LA(1) if token in [74]: - self.state = 332 + self.state = 334 localctx.directAssignment = self.match(PyNestMLParser.EQUALS) pass elif token in [64]: - self.state = 333 + self.state = 335 localctx.compoundSum = self.match(PyNestMLParser.PLUS_EQUALS) pass elif token in [65]: - self.state = 334 + self.state = 336 localctx.compoundMinus = self.match(PyNestMLParser.MINUS_EQUALS) pass elif token in [66]: - self.state = 335 + self.state = 337 localctx.compoundProduct = self.match(PyNestMLParser.STAR_EQUALS) pass elif token in [67]: - self.state = 336 + self.state = 338 localctx.compoundQuotient = self.match(PyNestMLParser.FORWARD_SLASH_EQUALS) pass else: raise NoViableAltException(self) - self.state = 339 + self.state = 341 self.expression(0) except RecognitionException as re: localctx.exception = re @@ -2510,67 +2514,67 @@ def declaration(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 342 + self.state = 344 self._errHandler.sync(self) _la = self._input.LA(1) if _la==29: - self.state = 341 + self.state = 343 localctx.isRecordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 345 + self.state = 347 self._errHandler.sync(self) _la = self._input.LA(1) if _la==16: - self.state = 344 + self.state = 346 localctx.isInlineExpression = self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 347 + self.state = 349 self.variable() - self.state = 352 + self.state = 354 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 348 + self.state = 350 self.match(PyNestMLParser.COMMA) - self.state = 349 + self.state = 351 self.variable() - self.state = 354 + self.state = 356 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 355 + self.state = 357 self.dataType() - self.state = 358 + self.state = 360 self._errHandler.sync(self) _la = self._input.LA(1) if _la==74: - self.state = 356 + self.state = 358 self.match(PyNestMLParser.EQUALS) - self.state = 357 + self.state = 359 localctx.rhs = self.expression(0) - self.state = 364 + self.state = 366 self._errHandler.sync(self) _la = self._input.LA(1) if _la==57: - self.state = 360 + self.state = 362 self.match(PyNestMLParser.LEFT_LEFT_SQUARE) - self.state = 361 + self.state = 363 localctx.invariant = self.expression(0) - self.state = 362 + self.state = 364 self.match(PyNestMLParser.RIGHT_RIGHT_SQUARE) - self.state = 369 + self.state = 371 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): - self.state = 366 + self.state = 368 localctx.decorator = self.anyDecorator() - self.state = 371 + self.state = 373 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2615,9 +2619,9 @@ def declaration_newline(self): self.enterRule(localctx, 44, self.RULE_declaration_newline) try: self.enterOuterAlt(localctx, 1) - self.state = 372 + self.state = 374 self.declaration() - self.state = 373 + self.state = 375 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2667,35 +2671,35 @@ def stmtsBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 376 + self.state = 378 self._errHandler.sync(self) _la = self._input.LA(1) if _la==9: - self.state = 375 + self.state = 377 self.match(PyNestMLParser.NEWLINE) - self.state = 378 + self.state = 380 self.stmt() - self.state = 383 + self.state = 385 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & 543621632) != 0) or _la==87: - self.state = 381 + self.state = 383 self._errHandler.sync(self) token = self._input.LA(1) if token in [9]: - self.state = 379 + self.state = 381 self.match(PyNestMLParser.NEWLINE) pass elif token in [16, 17, 18, 21, 22, 29, 87]: - self.state = 380 + self.state = 382 self.stmt() pass else: raise NoViableAltException(self) - self.state = 385 + self.state = 387 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2741,13 +2745,13 @@ def returnStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 386 - self.match(PyNestMLParser.RETURN_KEYWORD) self.state = 388 + self.match(PyNestMLParser.RETURN_KEYWORD) + self.state = 390 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 1829587650609152) != 0) or ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & 126977) != 0): - self.state = 387 + self.state = 389 self.expression(0) @@ -2801,23 +2805,23 @@ def ifStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 390 + self.state = 392 self.ifClause() - self.state = 394 + self.state = 396 self._errHandler.sync(self) _la = self._input.LA(1) while _la==19: - self.state = 391 + self.state = 393 self.elifClause() - self.state = 396 + self.state = 398 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 398 + self.state = 400 self._errHandler.sync(self) _la = self._input.LA(1) if _la==20: - self.state = 397 + self.state = 399 self.elseClause() @@ -2878,19 +2882,19 @@ def ifClause(self): self.enterRule(localctx, 52, self.RULE_ifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 400 + self.state = 402 self.match(PyNestMLParser.IF_KEYWORD) - self.state = 401 + self.state = 403 self.expression(0) - self.state = 402 + self.state = 404 self.match(PyNestMLParser.COLON) - self.state = 403 + self.state = 405 self.match(PyNestMLParser.NEWLINE) - self.state = 404 + self.state = 406 self.match(PyNestMLParser.INDENT) - self.state = 405 + self.state = 407 self.stmtsBody() - self.state = 406 + self.state = 408 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -2949,19 +2953,19 @@ def elifClause(self): self.enterRule(localctx, 54, self.RULE_elifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 408 + self.state = 410 self.match(PyNestMLParser.ELIF_KEYWORD) - self.state = 409 + self.state = 411 self.expression(0) - self.state = 410 + self.state = 412 self.match(PyNestMLParser.COLON) - self.state = 411 + self.state = 413 self.match(PyNestMLParser.NEWLINE) - self.state = 412 + self.state = 414 self.match(PyNestMLParser.INDENT) - self.state = 413 + self.state = 415 self.stmtsBody() - self.state = 414 + self.state = 416 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3016,17 +3020,17 @@ def elseClause(self): self.enterRule(localctx, 56, self.RULE_elseClause) try: self.enterOuterAlt(localctx, 1) - self.state = 416 + self.state = 418 self.match(PyNestMLParser.ELSE_KEYWORD) - self.state = 417 + self.state = 419 self.match(PyNestMLParser.COLON) - self.state = 418 + self.state = 420 self.match(PyNestMLParser.NEWLINE) - self.state = 419 + self.state = 421 self.match(PyNestMLParser.INDENT) - self.state = 420 + self.state = 422 self.stmtsBody() - self.state = 421 + self.state = 423 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3114,45 +3118,45 @@ def forStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 423 + self.state = 425 self.match(PyNestMLParser.FOR_KEYWORD) - self.state = 424 + self.state = 426 localctx.var = self.match(PyNestMLParser.NAME) - self.state = 425 + self.state = 427 self.match(PyNestMLParser.IN_KEYWORD) - self.state = 426 + self.state = 428 localctx.start_from = self.expression(0) - self.state = 427 + self.state = 429 self.match(PyNestMLParser.ELLIPSIS) - self.state = 428 + self.state = 430 localctx.end_at = self.expression(0) - self.state = 429 + self.state = 431 self.match(PyNestMLParser.STEP_KEYWORD) - self.state = 431 + self.state = 433 self._errHandler.sync(self) _la = self._input.LA(1) if _la==73: - self.state = 430 + self.state = 432 localctx.negative = self.match(PyNestMLParser.MINUS) - self.state = 433 + self.state = 435 _la = self._input.LA(1) if not(_la==88 or _la==89): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 434 + self.state = 436 self.match(PyNestMLParser.COLON) - self.state = 435 + self.state = 437 self.match(PyNestMLParser.NEWLINE) - self.state = 436 + self.state = 438 self.match(PyNestMLParser.INDENT) - self.state = 437 + self.state = 439 self.stmtsBody() - self.state = 438 + self.state = 440 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3211,19 +3215,19 @@ def whileStmt(self): self.enterRule(localctx, 60, self.RULE_whileStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 440 + self.state = 442 self.match(PyNestMLParser.WHILE_KEYWORD) - self.state = 441 + self.state = 443 self.expression(0) - self.state = 442 + self.state = 444 self.match(PyNestMLParser.COLON) - self.state = 443 + self.state = 445 self.match(PyNestMLParser.NEWLINE) - self.state = 444 + self.state = 446 self.match(PyNestMLParser.INDENT) - self.state = 445 + self.state = 447 self.stmtsBody() - self.state = 446 + self.state = 448 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3276,31 +3280,31 @@ def nestMLCompilationUnit(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 450 + self.state = 452 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 450 + self.state = 452 self._errHandler.sync(self) token = self._input.LA(1) if token in [31]: - self.state = 448 + self.state = 450 self.model() pass elif token in [9]: - self.state = 449 + self.state = 451 self.match(PyNestMLParser.NEWLINE) pass else: raise NoViableAltException(self) - self.state = 452 + self.state = 454 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==9 or _la==31): break - self.state = 454 + self.state = 456 self.match(PyNestMLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -3349,13 +3353,13 @@ def model(self): self.enterRule(localctx, 64, self.RULE_model) try: self.enterOuterAlt(localctx, 1) - self.state = 456 + self.state = 458 self.match(PyNestMLParser.MODEL_KEYWORD) - self.state = 457 + self.state = 459 self.match(PyNestMLParser.NAME) - self.state = 458 + self.state = 460 self.match(PyNestMLParser.COLON) - self.state = 459 + self.state = 461 self.modelBody() except RecognitionException as re: localctx.exception = re @@ -3457,59 +3461,59 @@ def modelBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 461 + self.state = 463 self.match(PyNestMLParser.NEWLINE) - self.state = 462 + self.state = 464 self.match(PyNestMLParser.INDENT) - self.state = 471 + self.state = 473 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 471 + self.state = 473 self._errHandler.sync(self) token = self._input.LA(1) if token in [32, 33, 34]: - self.state = 463 + self.state = 465 self.blockWithVariables() pass elif token in [36]: - self.state = 464 + self.state = 466 self.equationsBlock() pass elif token in [37]: - self.state = 465 + self.state = 467 self.inputBlock() pass elif token in [38]: - self.state = 466 + self.state = 468 self.outputBlock() pass elif token in [15]: - self.state = 467 + self.state = 469 self.function() pass elif token in [40]: - self.state = 468 + self.state = 470 self.onReceiveBlock() pass elif token in [41]: - self.state = 469 + self.state = 471 self.onConditionBlock() pass elif token in [35]: - self.state = 470 + self.state = 472 self.updateBlock() pass else: raise NoViableAltException(self) - self.state = 473 + self.state = 475 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 3843995762688) != 0)): break - self.state = 475 + self.state = 477 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3589,35 +3593,35 @@ def onReceiveBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 477 + self.state = 479 self.match(PyNestMLParser.ON_RECEIVE_KEYWORD) - self.state = 478 + self.state = 480 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 479 + self.state = 481 localctx.inputPortVariable = self.variable() - self.state = 484 + self.state = 486 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 480 + self.state = 482 self.match(PyNestMLParser.COMMA) - self.state = 481 + self.state = 483 self.constParameter() - self.state = 486 + self.state = 488 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 487 + self.state = 489 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 488 + self.state = 490 self.match(PyNestMLParser.COLON) - self.state = 489 + self.state = 491 self.match(PyNestMLParser.NEWLINE) - self.state = 490 + self.state = 492 self.match(PyNestMLParser.INDENT) - self.state = 491 + self.state = 493 self.stmtsBody() - self.state = 492 + self.state = 494 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3697,35 +3701,35 @@ def onConditionBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 494 + self.state = 496 self.match(PyNestMLParser.ON_CONDITION_KEYWORD) - self.state = 495 + self.state = 497 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 496 + self.state = 498 localctx.condition = self.expression(0) - self.state = 501 + self.state = 503 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 497 + self.state = 499 self.match(PyNestMLParser.COMMA) - self.state = 498 + self.state = 500 self.constParameter() - self.state = 503 + self.state = 505 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 504 + self.state = 506 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 505 + self.state = 507 self.match(PyNestMLParser.COLON) - self.state = 506 + self.state = 508 self.match(PyNestMLParser.NEWLINE) - self.state = 507 + self.state = 509 self.match(PyNestMLParser.INDENT) - self.state = 508 + self.state = 510 self.stmtsBody() - self.state = 509 + self.state = 511 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3791,7 +3795,7 @@ def blockWithVariables(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 511 + self.state = 513 localctx.blockType = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 30064771072) != 0)): @@ -3799,25 +3803,25 @@ def blockWithVariables(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 512 + self.state = 514 self.match(PyNestMLParser.COLON) - self.state = 513 + self.state = 515 self.match(PyNestMLParser.NEWLINE) - self.state = 514 + self.state = 516 self.match(PyNestMLParser.INDENT) - self.state = 516 + self.state = 518 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 515 + self.state = 517 self.declaration_newline() - self.state = 518 + self.state = 520 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==16 or _la==29 or _la==87): break - self.state = 520 + self.state = 522 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3872,17 +3876,17 @@ def updateBlock(self): self.enterRule(localctx, 74, self.RULE_updateBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 522 + self.state = 524 self.match(PyNestMLParser.UPDATE_KEYWORD) - self.state = 523 + self.state = 525 self.match(PyNestMLParser.COLON) - self.state = 524 + self.state = 526 self.match(PyNestMLParser.NEWLINE) - self.state = 525 + self.state = 527 self.match(PyNestMLParser.INDENT) - self.state = 526 + self.state = 528 self.stmtsBody() - self.state = 527 + self.state = 529 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3955,43 +3959,43 @@ def equationsBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 529 + self.state = 531 self.match(PyNestMLParser.EQUATIONS_KEYWORD) - self.state = 530 + self.state = 532 self.match(PyNestMLParser.COLON) - self.state = 531 + self.state = 533 self.match(PyNestMLParser.NEWLINE) - self.state = 532 + self.state = 534 self.match(PyNestMLParser.INDENT) - self.state = 536 + self.state = 538 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 536 + self.state = 538 self._errHandler.sync(self) token = self._input.LA(1) if token in [16, 29]: - self.state = 533 + self.state = 535 self.inlineExpression() pass elif token in [87]: - self.state = 534 + self.state = 536 self.odeEquation() pass elif token in [30]: - self.state = 535 + self.state = 537 self.kernel() pass else: raise NoViableAltException(self) - self.state = 538 + self.state = 540 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==87): break - self.state = 540 + self.state = 542 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4057,39 +4061,39 @@ def inputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 542 + self.state = 544 self.match(PyNestMLParser.INPUT_KEYWORD) - self.state = 543 + self.state = 545 self.match(PyNestMLParser.COLON) - self.state = 544 + self.state = 546 self.match(PyNestMLParser.NEWLINE) - self.state = 545 + self.state = 547 self.match(PyNestMLParser.INDENT) - self.state = 548 + self.state = 550 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 548 + self.state = 550 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,56,self._ctx) if la_ == 1: - self.state = 546 + self.state = 548 self.spikeInputPort() pass elif la_ == 2: - self.state = 547 + self.state = 549 self.continuousInputPort() pass - self.state = 550 + self.state = 552 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==87): break - self.state = 552 + self.state = 554 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4169,55 +4173,55 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 554 + self.state = 556 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 559 + self.state = 561 self._errHandler.sync(self) _la = self._input.LA(1) if _la==54: - self.state = 555 + self.state = 557 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 556 + self.state = 558 localctx.sizeParameter = self.expression(0) - self.state = 557 + self.state = 559 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 561 + self.state = 563 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 562 + self.state = 564 self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 575 + self.state = 577 self._errHandler.sync(self) _la = self._input.LA(1) if _la==47: - self.state = 563 + self.state = 565 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 572 + self.state = 574 self._errHandler.sync(self) _la = self._input.LA(1) if _la==87: - self.state = 564 + self.state = 566 self.parameter() - self.state = 569 + self.state = 571 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 565 + self.state = 567 self.match(PyNestMLParser.COMMA) - self.state = 566 + self.state = 568 self.parameter() - self.state = 571 + self.state = 573 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 574 + self.state = 576 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 577 + self.state = 579 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4301,57 +4305,57 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 579 + self.state = 581 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 584 + self.state = 586 self._errHandler.sync(self) _la = self._input.LA(1) if _la==54: - self.state = 580 + self.state = 582 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 581 + self.state = 583 localctx.sizeParameter = self.expression(0) - self.state = 582 + self.state = 584 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 586 + self.state = 588 self.dataType() - self.state = 587 + self.state = 589 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 588 + self.state = 590 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 601 + self.state = 603 self._errHandler.sync(self) _la = self._input.LA(1) if _la==47: - self.state = 589 + self.state = 591 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 598 + self.state = 600 self._errHandler.sync(self) _la = self._input.LA(1) if _la==87: - self.state = 590 + self.state = 592 self.parameter() - self.state = 595 + self.state = 597 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 591 + self.state = 593 self.match(PyNestMLParser.COMMA) - self.state = 592 + self.state = 594 self.parameter() - self.state = 597 + self.state = 599 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 600 + self.state = 602 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 603 + self.state = 605 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4434,61 +4438,61 @@ def outputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 605 + self.state = 607 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 606 + self.state = 608 self.match(PyNestMLParser.COLON) - self.state = 607 + self.state = 609 self.match(PyNestMLParser.NEWLINE) - self.state = 608 + self.state = 610 self.match(PyNestMLParser.INDENT) - self.state = 625 + self.state = 627 self._errHandler.sync(self) token = self._input.LA(1) if token in [42]: - self.state = 609 + self.state = 611 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 622 + self.state = 624 self._errHandler.sync(self) _la = self._input.LA(1) if _la==47: - self.state = 610 + self.state = 612 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 619 + self.state = 621 self._errHandler.sync(self) _la = self._input.LA(1) if _la==87: - self.state = 611 + self.state = 613 localctx.attribute = self.parameter() - self.state = 616 + self.state = 618 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 612 + self.state = 614 self.match(PyNestMLParser.COMMA) - self.state = 613 + self.state = 615 localctx.attribute = self.parameter() - self.state = 618 + self.state = 620 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 621 + self.state = 623 self.match(PyNestMLParser.RIGHT_PAREN) pass elif token in [39]: - self.state = 624 + self.state = 626 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 627 + self.state = 629 self.match(PyNestMLParser.NEWLINE) - self.state = 628 + self.state = 630 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4571,51 +4575,51 @@ def function(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 630 + self.state = 632 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 631 + self.state = 633 self.match(PyNestMLParser.NAME) - self.state = 632 + self.state = 634 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 641 + self.state = 643 self._errHandler.sync(self) _la = self._input.LA(1) if _la==87: - self.state = 633 + self.state = 635 self.parameter() - self.state = 638 + self.state = 640 self._errHandler.sync(self) _la = self._input.LA(1) while _la==72: - self.state = 634 + self.state = 636 self.match(PyNestMLParser.COMMA) - self.state = 635 + self.state = 637 self.parameter() - self.state = 640 + self.state = 642 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 643 - self.match(PyNestMLParser.RIGHT_PAREN) self.state = 645 + self.match(PyNestMLParser.RIGHT_PAREN) + self.state = 647 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & 140737488387072) != 0) or _la==87 or _la==88: - self.state = 644 + self.state = 646 localctx.returnType = self.dataType() - self.state = 647 + self.state = 649 self.match(PyNestMLParser.COLON) - self.state = 648 + self.state = 650 self.match(PyNestMLParser.NEWLINE) - self.state = 649 + self.state = 651 self.match(PyNestMLParser.INDENT) - self.state = 650 + self.state = 652 self.stmtsBody() - self.state = 651 + self.state = 653 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4658,9 +4662,9 @@ def parameter(self): self.enterRule(localctx, 88, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 653 + self.state = 655 self.match(PyNestMLParser.NAME) - self.state = 654 + self.state = 656 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4671,6 +4675,63 @@ def parameter(self): return localctx + class ExpressionOrParameterContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def parameter(self): + return self.getTypedRuleContext(PyNestMLParser.ParameterContext,0) + + + def expression(self): + return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + + + def getRuleIndex(self): + return PyNestMLParser.RULE_expressionOrParameter + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitExpressionOrParameter" ): + return visitor.visitExpressionOrParameter(self) + else: + return visitor.visitChildren(self) + + + + + def expressionOrParameter(self): + + localctx = PyNestMLParser.ExpressionOrParameterContext(self, self._ctx, self.state) + self.enterRule(localctx, 90, self.RULE_expressionOrParameter) + try: + self.state = 660 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,73,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 658 + self.parameter() + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 659 + self.expression(0) + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class ConstParameterContext(ParserRuleContext): __slots__ = 'parser' @@ -4716,15 +4777,15 @@ def accept(self, visitor:ParseTreeVisitor): def constParameter(self): localctx = PyNestMLParser.ConstParameterContext(self, self._ctx, self.state) - self.enterRule(localctx, 90, self.RULE_constParameter) + self.enterRule(localctx, 92, self.RULE_constParameter) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 656 + self.state = 662 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 657 + self.state = 663 self.match(PyNestMLParser.EQUALS) - self.state = 658 + self.state = 664 localctx.value = self._input.LT(1) _la = self._input.LA(1) if not(_la==25 or ((((_la - 85)) & ~0x3f) == 0 and ((1 << (_la - 85)) & 27) != 0)): diff --git a/pynestml/generated/PyNestMLParserVisitor.py b/pynestml/generated/PyNestMLParserVisitor.py index d0cd6f710..b1a4902dd 100644 --- a/pynestml/generated/PyNestMLParserVisitor.py +++ b/pynestml/generated/PyNestMLParserVisitor.py @@ -234,6 +234,11 @@ def visitParameter(self, ctx:PyNestMLParser.ParameterContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PyNestMLParser#expressionOrParameter. + def visitExpressionOrParameter(self, ctx:PyNestMLParser.ExpressionOrParameterContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PyNestMLParser#constParameter. def visitConstParameter(self, ctx:PyNestMLParser.ConstParameterContext): return self.visitChildren(ctx) diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index 2036db3b3..057bf4f3e 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -128,7 +128,7 @@ parser grammar PyNestMLParser; * @attribute differentialOrder: The corresponding differential order, e.g. 2 **/ variable : name=NAME - (LEFT_SQUARE_BRACKET vectorParameter=expression RIGHT_SQUARE_BRACKET)? + (LEFT_SQUARE_BRACKET vectorParameter=expressionOrParameter RIGHT_SQUARE_BRACKET)? (DIFFERENTIAL_ORDER)* (FULLSTOP attribute=variable)?; @@ -347,6 +347,8 @@ parser grammar PyNestMLParser; **/ parameter : NAME dataType; + expressionOrParameter : parameter | expression; + /** * ASTConstParameter represents a single parameter consisting of a name and a literal default value, e.g. ``foo=42``. * @attribute name: The name of the parameter. diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index 3829a1107..30349f1c7 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -2585,7 +2585,7 @@ def nestml_input_port_to_nest_rport_dict(cls, astnode: ASTModel) -> Dict[str, in continue if input_port.get_size_parameter(): - for i in range(int(str(input_port.size_parameter))): # XXX: should be able to convert size_parameter expression to an integer more generically (allowing for e.g. parameters) + for i in range(int(str(input_port.size_parameter))): # XXX: should be able to convert size_parameter expression to an integer more generically (allowing for e.g. parameters) input_port_to_rport[input_port.name + "_VEC_IDX_" + str(i)] = rport rport += 1 else: @@ -2609,4 +2609,4 @@ def port_name_printer(cls, variable: ASTVariable) -> str: @classmethod def is_parameter(cls, variable) -> str: - return isinstance(variable, ASTParameter) \ No newline at end of file + return isinstance(variable, ASTParameter) diff --git a/pynestml/visitors/ast_function_call_visitor.py b/pynestml/visitors/ast_function_call_visitor.py index 8fad92f92..2bf5b245f 100644 --- a/pynestml/visitors/ast_function_call_visitor.py +++ b/pynestml/visitors/ast_function_call_visitor.py @@ -18,10 +18,9 @@ # # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -from pynestml.symbols.unit_type_symbol import UnitTypeSymbol - -from pynestml.symbols.predefined_units import PredefinedUnits +from typing import Optional +from pynestml.meta_model.ast_input_port import ASTInputPort from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.symbols.error_type_symbol import ErrorTypeSymbol from pynestml.symbols.template_type_symbol import TemplateTypeSymbol @@ -58,17 +57,22 @@ def visit_simple_expression(self, node: ASTSimpleExpression) -> None: buffer_parameter = node.get_function_call().get_args()[1] if buffer_parameter.get_variable() is not None: - if not buffer_parameter.get_variable().get_attribute(): - # an attribute is missing for the spiking input port - code, message = Messages.get_spike_input_port_attribute_missing(buffer_name) - Logger.log_message(code=code, message=message, error_position=node.get_source_position(), - log_level=LoggingLevel.ERROR) - node.type = ErrorTypeSymbol() - return + # if not buffer_parameter.get_variable().get_attribute(): + # # an attribute is missing for the spiking input port + # XXX: attributes only required for ports that have them, but don't have access to the ASTModel object, so can't run ASTUtils.get_input_port_by_name!!! + # import pdb;pdb.set_trace() + # code, message = Messages.get_spike_input_port_attribute_missing(buffer_parameter.get_variable().get_name()) + # Logger.log_message(code=code, message=message, error_position=node.get_source_position(), + # log_level=LoggingLevel.ERROR) + # node.type = ErrorTypeSymbol() + # return buffer_name = buffer_parameter.get_variable().get_name() + "." + str(buffer_parameter.get_variable().get_attribute()) buffer_symbol_resolve = scope.resolve_to_symbol(buffer_name, SymbolKind.VARIABLE) + if not buffer_symbol_resolve: + buffer_symbol_resolve = scope.resolve_to_symbol(buffer_parameter.get_variable().get_name(), SymbolKind.VARIABLE) + assert buffer_symbol_resolve is not None node.type = buffer_symbol_resolve.get_type_symbol() return diff --git a/tests/invalid/CoCoConvolveNotCorrectlyProvided.nestml b/tests/invalid/CoCoConvolveNotCorrectlyProvided.nestml index 9a9b6571e..2e70b073a 100644 --- a/tests/invalid/CoCoConvolveNotCorrectlyProvided.nestml +++ b/tests/invalid/CoCoConvolveNotCorrectlyProvided.nestml @@ -37,8 +37,8 @@ model CoCoConvolveNotCorrectlyProvided: g_ex mV = 10mV equations: - kernel test = 10 - inline testB pA = convolve(g_ex, g_ex) + test + kernel test = delta(t) + inline testB pA = convolve(g_ex, g_ex) V_m' = 20 mV/ms input: diff --git a/tests/lexer_parser_test.py b/tests/lexer_parser_test.py index b5d35696e..92e872b82 100644 --- a/tests/lexer_parser_test.py +++ b/tests/lexer_parser_test.py @@ -55,8 +55,6 @@ def test(self): stream.fill() # parse the file parser = PyNestMLParser(stream) - parser._errHandler = BailErrorStrategy() - parser._errHandler.reset(parser) compilation_unit = parser.nestMLCompilationUnit() assert compilation_unit is not None diff --git a/tests/nest_tests/nest_integration_test.py b/tests/nest_tests/nest_integration_test.py index 80b36cd9a..ca4a7224d 100644 --- a/tests/nest_tests/nest_integration_test.py +++ b/tests/nest_tests/nest_integration_test.py @@ -51,96 +51,95 @@ def generate_all_models(self): codegen_opts["neuron_parent_class"] = "StructuralPlasticityNode" codegen_opts["neuron_parent_class_include"] = "structural_plasticity_node.h" - generate_nest_target(input_path=[# "models/neurons/hh_cond_exp_traub_neuron.nestml", - # "models/neurons/hh_psc_alpha_neuron.nestml", - # "models/neurons/iaf_cond_beta_neuron.nestml", + generate_nest_target(input_path=["models/neurons/hh_cond_exp_traub_neuron.nestml", + "models/neurons/hh_psc_alpha_neuron.nestml", + "models/neurons/iaf_cond_beta_neuron.nestml", "models/neurons/iaf_cond_alpha_neuron.nestml", - # "models/neurons/iaf_cond_exp_neuron.nestml", - # "models/neurons/iaf_psc_alpha_neuron.nestml", - # "models/neurons/iaf_psc_exp_neuron.nestml", - # "models/neurons/iaf_psc_delta_neuron.nestml" - ], + "models/neurons/iaf_cond_exp_neuron.nestml", + "models/neurons/iaf_psc_alpha_neuron.nestml", + "models/neurons/iaf_psc_exp_neuron.nestml", + "models/neurons/iaf_psc_delta_neuron.nestml"], target_path="/tmp/nestml-allmodels", logging_level="DEBUG", module_name="nestml_allmodels_module", suffix="_nestml", codegen_opts=codegen_opts) - # # generate code with analytic solver disabled - # alt_codegen_opts = {**codegen_opts, **{"solver": "numeric"}} + # generate code with analytic solver disabled + alt_codegen_opts = {**codegen_opts, **{"solver": "numeric"}} - # generate_nest_target(input_path=["models/neurons/aeif_cond_exp_neuron.nestml", - # "models/neurons/aeif_cond_alpha_neuron.nestml"], - # target_path="/tmp/nestml-alt-allmodels", - # logging_level="DEBUG", - # module_name="nestml_alt_allmodels_module", - # suffix="_alt_nestml", - # codegen_opts=alt_codegen_opts) + generate_nest_target(input_path=["models/neurons/aeif_cond_exp_neuron.nestml", + "models/neurons/aeif_cond_alpha_neuron.nestml"], + target_path="/tmp/nestml-alt-allmodels", + logging_level="DEBUG", + module_name="nestml_alt_allmodels_module", + suffix="_alt_nestml", + codegen_opts=alt_codegen_opts) - # # generate code using forward Euler integrator - # alt_codegen_opts = {**codegen_opts, **{"numeric_solver": "forward-Euler"}} + # generate code using forward Euler integrator + alt_codegen_opts = {**codegen_opts, **{"numeric_solver": "forward-Euler"}} - # generate_nest_target(input_path="models/neurons/izhikevich_neuron.nestml", - # target_path="/tmp/nestml-alt-int-allmodels", - # logging_level="DEBUG", - # module_name="nestml_alt_int_allmodels_module", - # suffix="_alt_int_nestml", - # codegen_opts=alt_codegen_opts) + generate_nest_target(input_path="models/neurons/izhikevich_neuron.nestml", + target_path="/tmp/nestml-alt-int-allmodels", + logging_level="DEBUG", + module_name="nestml_alt_int_allmodels_module", + suffix="_alt_int_nestml", + codegen_opts=alt_codegen_opts) def test_nest_integration(self): self.generate_all_models() nest.Install("nestml_allmodels_module") - # nest.Install("nestml_alt_allmodels_module") - # nest.Install("nestml_alt_int_allmodels_module") + nest.Install("nestml_alt_allmodels_module") + nest.Install("nestml_alt_int_allmodels_module") - # self._test_model_equivalence_subthreshold("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - # self._test_model_equivalence_spiking("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - # self._test_model_equivalence_fI_curve("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - # self._test_model_equivalence_curr_inj("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + self._test_model_equivalence_subthreshold("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + self._test_model_equivalence_spiking("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + self._test_model_equivalence_fI_curve("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + self._test_model_equivalence_curr_inj("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - # self._test_model_equivalence_subthreshold("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - # self._test_model_equivalence_spiking("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - # self._test_model_equivalence_fI_curve("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - # self._test_model_equivalence_curr_inj("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + self._test_model_equivalence_subthreshold("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + self._test_model_equivalence_spiking("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + self._test_model_equivalence_fI_curve("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + self._test_model_equivalence_curr_inj("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - # self._test_model_equivalence_subthreshold("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - # self._test_model_equivalence_spiking("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - # self._test_model_equivalence_fI_curve("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - # self._test_model_equivalence_curr_inj("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + self._test_model_equivalence_subthreshold("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + self._test_model_equivalence_spiking("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + self._test_model_equivalence_fI_curve("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + self._test_model_equivalence_curr_inj("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - # self._test_model_equivalence_subthreshold("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL - # self._test_model_equivalence_spiking("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL - # self._test_model_equivalence_fI_curve("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") - # self._test_model_equivalence_curr_inj("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") + self._test_model_equivalence_subthreshold("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL + self._test_model_equivalence_spiking("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL + self._test_model_equivalence_fI_curve("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") + self._test_model_equivalence_curr_inj("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") self._test_model_equivalence_subthreshold("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") self._test_model_equivalence_spiking("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") self._test_model_equivalence_fI_curve("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - # iaf_cond_beta_nest_model_parameters = {"tau_rise_ex": 2., "tau_decay_ex": 10.} - # iaf_cond_beta_nestml_model_parameters = {"tau_syn_rise_E": 2., "tau_syn_decay_E": 10.} # XXX: TODO: does not work yet when tau_rise = tau_fall (numerical singularity occurs in the propagators) - # self._test_model_equivalence_subthreshold("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) - # self._test_model_equivalence_spiking("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) - # self._test_model_equivalence_fI_curve("iaf_cond_beta", "iaf_cond_beta_neuron_nestml") + iaf_cond_beta_nest_model_parameters = {"tau_rise_ex": 2., "tau_decay_ex": 10.} + iaf_cond_beta_nestml_model_parameters = {"tau_syn_rise_E": 2., "tau_syn_decay_E": 10.} # XXX: TODO: does not work yet when tau_rise = tau_fall (numerical singularity occurs in the propagators) + self._test_model_equivalence_subthreshold("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) + self._test_model_equivalence_spiking("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) + self._test_model_equivalence_fI_curve("iaf_cond_beta", "iaf_cond_beta_neuron_nestml") - # self._test_model_equivalence_subthreshold("izhikevich", "izhikevich_neuron_alt_int_nestml") - # self._test_model_equivalence_spiking("izhikevich", "izhikevich_neuron_alt_int_nestml") - # self._test_model_equivalence_fI_curve("izhikevich", "izhikevich_neuron_alt_int_nestml") + self._test_model_equivalence_subthreshold("izhikevich", "izhikevich_neuron_alt_int_nestml") + self._test_model_equivalence_spiking("izhikevich", "izhikevich_neuron_alt_int_nestml") + self._test_model_equivalence_fI_curve("izhikevich", "izhikevich_neuron_alt_int_nestml") - # nestml_hh_psc_alpha_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST - # self._test_model_equivalence_subthreshold("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - # self._test_model_equivalence_spiking("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", tolerance=1E-5, nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - # self._test_model_equivalence_fI_curve("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + nestml_hh_psc_alpha_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST + self._test_model_equivalence_subthreshold("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + self._test_model_equivalence_spiking("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", tolerance=1E-5, nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + self._test_model_equivalence_fI_curve("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - # nestml_hh_cond_exp_traub_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST - # self._test_model_equivalence_subthreshold("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) - # self._test_model_equivalence_fI_curve("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) + nestml_hh_cond_exp_traub_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST + self._test_model_equivalence_subthreshold("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) + self._test_model_equivalence_fI_curve("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) - # self._test_model_equivalence_subthreshold("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly - # self._test_model_equivalence_fI_curve("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml") + self._test_model_equivalence_subthreshold("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly + self._test_model_equivalence_fI_curve("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml") - # self._test_model_equivalence_subthreshold("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly - # self._test_model_equivalence_fI_curve("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml") + self._test_model_equivalence_subthreshold("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly + self._test_model_equivalence_fI_curve("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml") # -------------- # XXX: TODO! diff --git a/tests/test_cocos.py b/tests/test_cocos.py index af6eb6c16..572614045 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -219,7 +219,7 @@ def test_valid_ode_vars_outside_init_block_detected(self): def test_invalid_convolve_correctly_defined(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + assert any(["Type of 'convolve(g_ex,g_ex)' could not be derived!" in log_entry[2] for log_entry in Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)]) def test_valid_convolve_correctly_defined(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) diff --git a/tests/valid/CoCoConvolveNotCorrectlyProvided.nestml b/tests/valid/CoCoConvolveNotCorrectlyProvided.nestml index 0d7f464f7..3126b5cf9 100644 --- a/tests/valid/CoCoConvolveNotCorrectlyProvided.nestml +++ b/tests/valid/CoCoConvolveNotCorrectlyProvided.nestml @@ -33,7 +33,7 @@ along with NEST. If not, see . """ model CoCoConvolveNotCorrectlyProvided: equations: - kernel test = 10 + kernel test = delta(t) inline testB pA = convolve(test, spikeExc) * pA # convolve provided with a kernel and a spike input port, thus correct input: From 4e1ddf12e8a12ae33c45a961e1173c52039537fb Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Fri, 13 Dec 2024 16:14:21 +0100 Subject: [PATCH 24/68] add attributes to spiking input ports --- models/neurons/aeif_cond_exp_neuron.nestml | 2 +- .../co_co_convolve_has_correct_parameter.py | 2 +- .../codegeneration/nest_code_generator.py | 8 +- .../point_neuron/common/NeuronClass.jinja2 | 17 ++- .../point_neuron/common/NeuronHeader.jinja2 | 8 +- pynestml/meta_model/ast_on_receive_block.py | 2 +- .../visitors/ast_function_call_visitor.py | 75 +++++++--- pynestml/visitors/ast_symbol_table_visitor.py | 27 +++- tests/nest_tests/nest_integration_test.py | 138 +++++++++--------- .../ConvolveSpikingNoAttributes.nestml | 44 ++++++ tests/nest_tests/test_convolve.py | 73 +++++++++ tests/test_cocos.py | 2 +- 12 files changed, 291 insertions(+), 107 deletions(-) create mode 100644 tests/nest_tests/resources/ConvolveSpikingNoAttributes.nestml create mode 100644 tests/nest_tests/test_convolve.py diff --git a/models/neurons/aeif_cond_exp_neuron.nestml b/models/neurons/aeif_cond_exp_neuron.nestml index fffe5c09d..a5c1f5b93 100644 --- a/models/neurons/aeif_cond_exp_neuron.nestml +++ b/models/neurons/aeif_cond_exp_neuron.nestml @@ -65,7 +65,7 @@ model aeif_cond_exp_neuron: inline exp_arg real = (V_bounded - V_th) / Delta_T inline I_spike pA = g_L * Delta_T * exp(exp_arg) inline I_syn_exc pA = g_syn_exc * (V_bounded - E_exc) - inline I_syn_inh pA = g_syn_inh * nS * (V_bounded - E_inh) + inline I_syn_inh pA = g_syn_inh * (V_bounded - E_inh) V_m' = (-g_L * (V_bounded - E_L) + I_spike - I_syn_exc - I_syn_inh - w + I_e + I_stim) / C_m w' = (a * (V_bounded - E_L) - w) / tau_w diff --git a/pynestml/cocos/co_co_convolve_has_correct_parameter.py b/pynestml/cocos/co_co_convolve_has_correct_parameter.py index 8e3661a2d..38d0ee3d4 100644 --- a/pynestml/cocos/co_co_convolve_has_correct_parameter.py +++ b/pynestml/cocos/co_co_convolve_has_correct_parameter.py @@ -19,9 +19,9 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . +from pynestml.cocos.co_co import CoCo from pynestml.meta_model.ast_function_call import ASTFunctionCall from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression -from pynestml.cocos.co_co import CoCo from pynestml.symbols.predefined_functions import PredefinedFunctions from pynestml.utils.logger import LoggingLevel, Logger from pynestml.utils.messages import Messages diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index a9d01e2df..be560e486 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -967,7 +967,13 @@ def get_spike_update_expressions(self, neuron: ASTModel, kernel_buffers, solver_ orig_port_name = spike_input_port_name[:spike_input_port_name.index("__for_")] buffer_type = neuron.paired_synapse.get_scope().resolve_to_symbol(orig_port_name, SymbolKind.VARIABLE).get_type_symbol() else: - buffer_type = neuron.get_scope().resolve_to_symbol(spike_input_port_name + "." + str(spike_input_port.get_variable().get_attribute()), SymbolKind.VARIABLE).get_type_symbol() + # not a post port + if spike_input_port.get_variable().get_attribute(): + variable_name = spike_input_port_name + "." + str(spike_input_port.get_variable().get_attribute()) + else: + variable_name = spike_input_port_name + + buffer_type = neuron.get_scope().resolve_to_symbol(variable_name, SymbolKind.VARIABLE).get_type_symbol() assert not buffer_type is None diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 6a48915ba..613031bd9 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -211,6 +211,8 @@ namespace nest , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- endfor %} + , spike_input_{{ inputPort.name }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_grid_sum_( 0. ) , spike_input_{{ inputPort.name }}_spike_input_received_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) {%- endif %} @@ -247,6 +249,8 @@ namespace nest , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- endfor %} + , spike_input_{{ inputPort.name }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_grid_sum_( 0. ) , spike_input_{{ inputPort.name }}_spike_input_received_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) {%- endif %} @@ -466,12 +470,14 @@ void {{ neuronName }}::init_buffers_() B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.clear(); {%- endfor %} {%- endif %} + B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_.clear(); B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_.clear(); {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.clear(); {%- endfor %} + B_.spike_input_{{ inputPort.name }}_.clear(); B_.spike_input_{{ inputPort.name }}_spike_input_received_.clear(); {%- endif %} {%- endfor %} @@ -777,12 +783,14 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_value(lag); {%- endfor %} {%- endif %} + B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_.get_value(lag); B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_.get_value(lag); {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_value(lag); {%- endfor %} + B_.spike_input_{{ inputPort.name }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_.get_value(lag); B_.spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_ = B_.spike_input_{{ inputPort.name }}_spike_input_received_.get_value(lag); {%- endif %} {%- endfor %} @@ -1142,13 +1150,14 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_weight() * e.get_multiplicity() ); {%- endfor %} -{%- else %} -{# no attributes defined for the spike event; in this case, there is only one single buffer #} +{%- endif %} + + // add an unweighted spike to the general "train of delta pulses" input buffer B_.spike_input_{{ spike_in_port_name }}_.add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), - e.get_weight() * e.get_multiplicity() ); -{%- endif %} + e.get_multiplicity() ); + // set the "spike received" flag B_.spike_input_{{ spike_in_port_name }}_spike_input_received_.add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), 1. ); diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index e7c35a877..8319173b2 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -802,6 +802,8 @@ private: double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; {%- endfor %} {%- endif %} + nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_; + double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_grid_sum_; nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_; {%- endfor %} @@ -810,8 +812,10 @@ private: nest::RingBuffer spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; double spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; {%- endfor %} - nest::RingBuffer spike_input_{{ inputPort.name }}_spike_input_received_; - double spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; + nest::RingBuffer spike_input_{{ inputPort.name }}_; // buffer for unweighted spikes + nest::RingBuffer spike_input_{{ inputPort.name }}_spike_input_received_; // buffer for the "spike received" boolean flag + double spike_input_{{ inputPort.name }}_grid_sum_; // buffer for unweighted spikes + double spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; // buffer for the "spike received" boolean flag {%- endif %} {%- endfor %} diff --git a/pynestml/meta_model/ast_on_receive_block.py b/pynestml/meta_model/ast_on_receive_block.py index d1c4f86b7..26619d28a 100644 --- a/pynestml/meta_model/ast_on_receive_block.py +++ b/pynestml/meta_model/ast_on_receive_block.py @@ -89,7 +89,7 @@ def get_children(self) -> List[ASTNode]: Returns the children of this node, if any. :return: List of children of this node. """ - return [self.get_input_port_variable(), self.get_block(), self.get_stmts_body()] + return [self.get_input_port_variable(), self.get_stmts_body(), self.get_stmts_body()] def equals(self, other: ASTNode) -> bool: r""" diff --git a/pynestml/visitors/ast_function_call_visitor.py b/pynestml/visitors/ast_function_call_visitor.py index 2bf5b245f..df84bac11 100644 --- a/pynestml/visitors/ast_function_call_visitor.py +++ b/pynestml/visitors/ast_function_call_visitor.py @@ -20,12 +20,16 @@ # along with NEST. If not, see . from typing import Optional +from pynestml.meta_model.ast_equations_block import ASTEquationsBlock from pynestml.meta_model.ast_input_port import ASTInputPort from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.symbols.error_type_symbol import ErrorTypeSymbol +from pynestml.symbols.predefined_units import PredefinedUnits +from pynestml.symbols.real_type_symbol import RealTypeSymbol from pynestml.symbols.template_type_symbol import TemplateTypeSymbol from pynestml.symbols.predefined_functions import PredefinedFunctions from pynestml.symbols.symbol import SymbolKind +from pynestml.symbols.unit_type_symbol import UnitTypeSymbol from pynestml.symbols.void_type_symbol import VoidTypeSymbol from pynestml.utils.ast_utils import ASTUtils from pynestml.utils.logger import LoggingLevel, Logger @@ -45,7 +49,7 @@ def visit_simple_expression(self, node: ASTSimpleExpression) -> None: :param node: a simple expression """ assert isinstance(node, ASTSimpleExpression), \ - '(PyNestML.Visitor.FunctionCallVisitor) No or wrong type of simple expression provided (%s)!' % tuple(node) + "(PyNestML.Visitor.FunctionCallVisitor) No or wrong type of simple expression provided (%s)!" % tuple(node) assert (node.get_scope() is not None), \ "(PyNestML.Visitor.FunctionCallVisitor) No scope found, run symboltable creator!" scope = node.get_scope() @@ -55,28 +59,57 @@ def visit_simple_expression(self, node: ASTSimpleExpression) -> None: # return type of the convolve function is the type of the second parameter (the spike input buffer) if function_name == PredefinedFunctions.CONVOLVE: buffer_parameter = node.get_function_call().get_args()[1] - - if buffer_parameter.get_variable() is not None: - # if not buffer_parameter.get_variable().get_attribute(): - # # an attribute is missing for the spiking input port - # XXX: attributes only required for ports that have them, but don't have access to the ASTModel object, so can't run ASTUtils.get_input_port_by_name!!! - # import pdb;pdb.set_trace() - # code, message = Messages.get_spike_input_port_attribute_missing(buffer_parameter.get_variable().get_name()) - # Logger.log_message(code=code, message=message, error_position=node.get_source_position(), - # log_level=LoggingLevel.ERROR) - # node.type = ErrorTypeSymbol() - # return - - buffer_name = buffer_parameter.get_variable().get_name() + "." + str(buffer_parameter.get_variable().get_attribute()) - buffer_symbol_resolve = scope.resolve_to_symbol(buffer_name, SymbolKind.VARIABLE) - - if not buffer_symbol_resolve: - buffer_symbol_resolve = scope.resolve_to_symbol(buffer_parameter.get_variable().get_name(), SymbolKind.VARIABLE) - - assert buffer_symbol_resolve is not None - node.type = buffer_symbol_resolve.get_type_symbol() + print("var === " + str(buffer_parameter)) + + assert buffer_parameter.get_variable() is not None + + if "." in buffer_parameter.get_variable().get_name(): + # the type of the convolve call is [the type of the attribute] * [s] + input_port = ASTUtils.get_input_port_by_name(buffer_parameter.get_name()) + assert input_port is not None + import pdb;pdb.set_trace() + else: + # convolve with a train of delta pulses --> the type of the convolve call is [1] + node.type = RealTypeSymbol() return + # if not buffer_parameter.get_variable().get_attribute(): + # # an attribute is missing for the spiking input port + # XXX: attributes only required for ports that have them, but don't have access to the ASTModel object, so can't run ASTUtils.get_input_port_by_name!!! + # import pdb;pdb.set_trace() + # code, message = Messages.get_spike_input_port_attribute_missing(buffer_parameter.get_variable().get_name()) + # Logger.log_message(code=code, message=message, error_position=node.get_source_position(), + # log_level=LoggingLevel.ERROR) + # node.type = ErrorTypeSymbol() + # return + + # buffer_name = buffer_parameter.get_variable().get_name() + "." + str(buffer_parameter.get_variable().get_attribute()) + # buffer_symbol_resolve = scope.resolve_to_symbol(buffer_name, SymbolKind.VARIABLE) + + # if not buffer_symbol_resolve: + # buffer_symbol_resolve = scope.resolve_to_symbol(buffer_parameter.get_variable().get_name(), SymbolKind.VARIABLE) + + # import pdb;pdb.set_trace() + + # if buffer_symbol_resolve is None: + # # the name of the input port is used without attributes + # print("ASSIGN TO CONVOLVE CLAL?????????") + + # if ASTUtils.find_parent_node_by_type(node, ASTEquationsBlock): + # # if this port name appears inside an equations block, it is interpreted as a train of delta pulses with units [1/s]; after applying the convolve() function, a unit of [1] remains + # from astropy import units as u + # from pynestml.utils.unit_type import UnitType + # node.type = RealTypeSymbol() #UnitTypeSymbol(UnitType(name=str("1/s"), unit=1/u.si.s)) + # print("ASSIGNED REAL TO CONVOLVE CLAL") + # return + + # # if this port name appears elsewhere, it cannot be interpreted by itself without an attribute + # node.type = ErrorTypeSymbol() + # return + + # node.type = buffer_symbol_resolve.get_type_symbol() + # return + # check if this is a delay variable symbol = ASTUtils.get_delay_variable_symbol(node.get_function_call()) if method_symbol is None and symbol is not None: diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index 629dee0af..91f2ee796 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -180,6 +180,7 @@ def visit_on_receive_block(self, node): source_position=node.get_source_position()) node.get_scope().add_scope(scope) node.get_stmts_body().update_scope(scope) + node.get_input_port_variable().update_scope(scope) def endvisit_on_receive_block(self, node=None): self.block_type_stack.pop() @@ -650,15 +651,10 @@ def endvisit_input_port(self, node: ASTInputPort): assert node.is_spike() - if node.parameters is None or len(node.parameters) == 0: - type_symbol = ErrorTypeSymbol() # not allowed to use a bare spike input port name in expressions etc. - else: + if node.parameters: for parameter in node.parameters: type_symbol = parameter.get_data_type().type_symbol type_symbol.is_buffer = True # set it as a buffer - if node.has_size_parameter(): - if isinstance(node.get_size_parameter(), ASTSimpleExpression) and node.get_size_parameter().is_variable(): - node.get_size_parameter().update_scope(node.get_scope()) symbol = VariableSymbol(element_reference=node, scope=node.get_scope(), name=node.get_name() + "." + parameter.get_name(), block_type=BlockType.INPUT, vector_parameter=node.get_size_parameter(), is_predefined=False, is_inline_expression=False, is_recordable=False, @@ -666,6 +662,25 @@ def endvisit_input_port(self, node: ASTInputPort): attribute=parameter.get_name()) node.get_scope().add_symbol(symbol) + # add a symbol for the bare input port (without any attributes) + symbol = VariableSymbol(element_reference=node, + scope=node.get_scope(), + name=node.get_name(), + block_type=BlockType.INPUT, + declaring_expression=None, + is_predefined=False, + is_inline_expression=False, + is_recordable=False, + type_symbol=PredefinedTypes.get_type("s")**-1, + vector_parameter=node.get_size_parameter(), + variable_type=VariableType.BUFFER) + symbol.set_comment(node.get_comment()) + node.get_scope().add_symbol(symbol) + + if node.has_size_parameter(): + if isinstance(node.get_size_parameter(), ASTSimpleExpression) and node.get_size_parameter().is_variable(): + node.get_size_parameter().update_scope(node.get_scope()) + def visit_stmt(self, node: ASTStmt): """ Private method: Used to visit a single stmt and update its scope. diff --git a/tests/nest_tests/nest_integration_test.py b/tests/nest_tests/nest_integration_test.py index ca4a7224d..747876910 100644 --- a/tests/nest_tests/nest_integration_test.py +++ b/tests/nest_tests/nest_integration_test.py @@ -51,19 +51,19 @@ def generate_all_models(self): codegen_opts["neuron_parent_class"] = "StructuralPlasticityNode" codegen_opts["neuron_parent_class_include"] = "structural_plasticity_node.h" - generate_nest_target(input_path=["models/neurons/hh_cond_exp_traub_neuron.nestml", - "models/neurons/hh_psc_alpha_neuron.nestml", - "models/neurons/iaf_cond_beta_neuron.nestml", - "models/neurons/iaf_cond_alpha_neuron.nestml", - "models/neurons/iaf_cond_exp_neuron.nestml", - "models/neurons/iaf_psc_alpha_neuron.nestml", - "models/neurons/iaf_psc_exp_neuron.nestml", - "models/neurons/iaf_psc_delta_neuron.nestml"], - target_path="/tmp/nestml-allmodels", - logging_level="DEBUG", - module_name="nestml_allmodels_module", - suffix="_nestml", - codegen_opts=codegen_opts) + # generate_nest_target(input_path=["models/neurons/hh_cond_exp_traub_neuron.nestml", + # "models/neurons/hh_psc_alpha_neuron.nestml", + # "models/neurons/iaf_cond_beta_neuron.nestml", + # "models/neurons/iaf_cond_alpha_neuron.nestml", + # "models/neurons/iaf_cond_exp_neuron.nestml", + # "models/neurons/iaf_psc_alpha_neuron.nestml", + # "models/neurons/iaf_psc_exp_neuron.nestml", + # "models/neurons/iaf_psc_delta_neuron.nestml"], + # target_path="/tmp/nestml-allmodels", + # logging_level="DEBUG", + # module_name="nestml_allmodels_module", + # suffix="_nestml", + # codegen_opts=codegen_opts) # generate code with analytic solver disabled alt_codegen_opts = {**codegen_opts, **{"solver": "numeric"}} @@ -76,67 +76,67 @@ def generate_all_models(self): suffix="_alt_nestml", codegen_opts=alt_codegen_opts) - # generate code using forward Euler integrator - alt_codegen_opts = {**codegen_opts, **{"numeric_solver": "forward-Euler"}} + # # generate code using forward Euler integrator + # alt_codegen_opts = {**codegen_opts, **{"numeric_solver": "forward-Euler"}} - generate_nest_target(input_path="models/neurons/izhikevich_neuron.nestml", - target_path="/tmp/nestml-alt-int-allmodels", - logging_level="DEBUG", - module_name="nestml_alt_int_allmodels_module", - suffix="_alt_int_nestml", - codegen_opts=alt_codegen_opts) + # generate_nest_target(input_path="models/neurons/izhikevich_neuron.nestml", + # target_path="/tmp/nestml-alt-int-allmodels", + # logging_level="DEBUG", + # module_name="nestml_alt_int_allmodels_module", + # suffix="_alt_int_nestml", + # codegen_opts=alt_codegen_opts) def test_nest_integration(self): self.generate_all_models() - nest.Install("nestml_allmodels_module") + # nest.Install("nestml_allmodels_module") nest.Install("nestml_alt_allmodels_module") - nest.Install("nestml_alt_int_allmodels_module") - - self._test_model_equivalence_subthreshold("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - self._test_model_equivalence_spiking("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - self._test_model_equivalence_fI_curve("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - self._test_model_equivalence_curr_inj("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - - self._test_model_equivalence_subthreshold("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - self._test_model_equivalence_spiking("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - self._test_model_equivalence_fI_curve("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - self._test_model_equivalence_curr_inj("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - - self._test_model_equivalence_subthreshold("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - self._test_model_equivalence_spiking("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - self._test_model_equivalence_fI_curve("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - self._test_model_equivalence_curr_inj("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - - self._test_model_equivalence_subthreshold("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL - self._test_model_equivalence_spiking("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL - self._test_model_equivalence_fI_curve("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") - self._test_model_equivalence_curr_inj("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") - - self._test_model_equivalence_subthreshold("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - self._test_model_equivalence_spiking("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - self._test_model_equivalence_fI_curve("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - - iaf_cond_beta_nest_model_parameters = {"tau_rise_ex": 2., "tau_decay_ex": 10.} - iaf_cond_beta_nestml_model_parameters = {"tau_syn_rise_E": 2., "tau_syn_decay_E": 10.} # XXX: TODO: does not work yet when tau_rise = tau_fall (numerical singularity occurs in the propagators) - self._test_model_equivalence_subthreshold("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) - self._test_model_equivalence_spiking("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) - self._test_model_equivalence_fI_curve("iaf_cond_beta", "iaf_cond_beta_neuron_nestml") - - self._test_model_equivalence_subthreshold("izhikevich", "izhikevich_neuron_alt_int_nestml") - self._test_model_equivalence_spiking("izhikevich", "izhikevich_neuron_alt_int_nestml") - self._test_model_equivalence_fI_curve("izhikevich", "izhikevich_neuron_alt_int_nestml") - - nestml_hh_psc_alpha_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST - self._test_model_equivalence_subthreshold("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - self._test_model_equivalence_spiking("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", tolerance=1E-5, nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - self._test_model_equivalence_fI_curve("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - - nestml_hh_cond_exp_traub_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST - self._test_model_equivalence_subthreshold("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) - self._test_model_equivalence_fI_curve("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) - - self._test_model_equivalence_subthreshold("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly - self._test_model_equivalence_fI_curve("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml") + # nest.Install("nestml_alt_int_allmodels_module") + + # self._test_model_equivalence_subthreshold("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + # self._test_model_equivalence_spiking("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + # self._test_model_equivalence_fI_curve("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + # self._test_model_equivalence_curr_inj("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + + # self._test_model_equivalence_subthreshold("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + # self._test_model_equivalence_spiking("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + # self._test_model_equivalence_fI_curve("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + # self._test_model_equivalence_curr_inj("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + + # self._test_model_equivalence_subthreshold("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + # self._test_model_equivalence_spiking("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + # self._test_model_equivalence_fI_curve("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + # self._test_model_equivalence_curr_inj("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + + # self._test_model_equivalence_subthreshold("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL + # self._test_model_equivalence_spiking("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL + # self._test_model_equivalence_fI_curve("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") + # self._test_model_equivalence_curr_inj("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") + + # self._test_model_equivalence_subthreshold("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + # self._test_model_equivalence_spiking("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + # self._test_model_equivalence_fI_curve("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + + # iaf_cond_beta_nest_model_parameters = {"tau_rise_ex": 2., "tau_decay_ex": 10.} + # iaf_cond_beta_nestml_model_parameters = {"tau_syn_rise_E": 2., "tau_syn_decay_E": 10.} # XXX: TODO: does not work yet when tau_rise = tau_fall (numerical singularity occurs in the propagators) + # self._test_model_equivalence_subthreshold("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) + # self._test_model_equivalence_spiking("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) + # self._test_model_equivalence_fI_curve("iaf_cond_beta", "iaf_cond_beta_neuron_nestml") + + # self._test_model_equivalence_subthreshold("izhikevich", "izhikevich_neuron_alt_int_nestml") + # self._test_model_equivalence_spiking("izhikevich", "izhikevich_neuron_alt_int_nestml") + # self._test_model_equivalence_fI_curve("izhikevich", "izhikevich_neuron_alt_int_nestml") + + # nestml_hh_psc_alpha_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST + # self._test_model_equivalence_subthreshold("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + # self._test_model_equivalence_spiking("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", tolerance=1E-5, nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + # self._test_model_equivalence_fI_curve("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + + # nestml_hh_cond_exp_traub_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST + # self._test_model_equivalence_subthreshold("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) + # self._test_model_equivalence_fI_curve("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) + + # self._test_model_equivalence_subthreshold("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly + # self._test_model_equivalence_fI_curve("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml") self._test_model_equivalence_subthreshold("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly self._test_model_equivalence_fI_curve("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml") diff --git a/tests/nest_tests/resources/ConvolveSpikingNoAttributes.nestml b/tests/nest_tests/resources/ConvolveSpikingNoAttributes.nestml new file mode 100644 index 000000000..d2f8509fe --- /dev/null +++ b/tests/nest_tests/resources/ConvolveSpikingNoAttributes.nestml @@ -0,0 +1,44 @@ +""" +ConvolveSpikingNoAttributes - Test convolution with spiking input ports without attributes +########################################################################################## + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model convolve_spiking_no_attributes_neuron: + state: + x real = 0. + y real = 0. + + equations: + #kernel K = (e / tau_syn) * t * exp(-t / tau_syn) + kernel K = delta(t) + x' = convolve(K, spikes_in) / s + y' = spikes_in + + parameters: + tau_syn ms = 2 ms + + input: + spikes_in <- spike + + update: + integrate_odes() diff --git a/tests/nest_tests/test_convolve.py b/tests/nest_tests/test_convolve.py new file mode 100644 index 000000000..b6b44ffab --- /dev/null +++ b/tests/nest_tests/test_convolve.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# +# test_input_ports.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +import os +import pytest + +import nest + +from pynestml.frontend.pynestml_frontend import generate_nest_target +from pynestml.codegeneration.nest_tools import NESTTools + + +class TestConvolve: + """ + """ + + @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), + reason="This test does not support NEST 2") + def test_input_ports_in_loop(self): + input_path = os.path.join(os.path.realpath(os.path.join( + os.path.dirname(__file__), "resources", "ConvolveSpikingNoAttributes.nestml"))) + target_path = "target" + logging_level = "DEBUG" + module_name = "nestmlmodule" + suffix = "_nestml" + + generate_nest_target(input_path, + target_path=target_path, + logging_level=logging_level, + module_name=module_name, + suffix=suffix) + nest.ResetKernel() + nest.Install(module_name) + + neuron = nest.Create("convolve_spiking_no_attributes_neuron_nestml") + sg = nest.Create("spike_generator") + sg.spike_times = [10., 50.] + + nest.Connect(sg, neuron) + + mm = nest.Create("multimeter", {"record_from": ["x", "y"]}) + nest.Connect(mm, neuron) + + nest.Simulate(100.) + + events = mm.get("events") + + import matplotlib.pyplot as plt + plt.subplots() + plt.plot(events["times"], events["x"]) + plt.plot(events["times"], events["y"]) + plt.savefig("/tmp/test_convolve.png") + + assert events["x"][-1] == 2E-3 + assert events["y"][-1] == 2E-4 # XXX: this should be 2; see https://github.com/nest/nestml/pull/1050 diff --git a/tests/test_cocos.py b/tests/test_cocos.py index 572614045..cd831da29 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -219,7 +219,7 @@ def test_valid_ode_vars_outside_init_block_detected(self): def test_invalid_convolve_correctly_defined(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) - assert any(["Type of 'convolve(g_ex,g_ex)' could not be derived!" in log_entry[2] for log_entry in Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)]) + assert any(["Actual type different from expected. Expected: 'pA', got: 'mV'!" in log_entry[2] for log_entry in Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)]) def test_valid_convolve_correctly_defined(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) From 91d23e5b79a3ad38f735a65a517414d6e223bb51 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 19 Dec 2024 00:11:44 +0100 Subject: [PATCH 25/68] add attributes to spiking input ports --- .../nestml_language_concepts.rst | 2 + models/neurons/aeif_cond_alpha_neuron.nestml | 8 +- models/neurons/aeif_cond_exp_neuron.nestml | 8 +- models/neurons/iaf_psc_delta_neuron.nestml | 4 +- ...t_port_in_equation_rhs_outside_convolve.py | 87 +++ pynestml/cocos/co_cos_manager.py | 9 + .../codegeneration/nest_code_generator.py | 10 + .../printers/ode_toolbox_variable_printer.py | 11 +- .../point_neuron/common/NeuronClass.jinja2 | 6 + .../point_neuron/common/NeuronHeader.jinja2 | 15 +- pynestml/meta_model/ast_model.py | 2 +- pynestml/utils/ast_utils.py | 6 +- pynestml/utils/messages.py | 6 + pynestml/utils/ode_toolbox_utils.py | 7 +- .../visitors/ast_function_call_visitor.py | 12 +- pynestml/visitors/ast_symbol_table_visitor.py | 8 +- pynestml/visitors/ast_variable_visitor.py | 22 +- pynestml/visitors/ast_visitor.py | 25 +- tests/nest_tests/nest_integration_test.py | 58 +- ...rt_on_equation_rhs_outside_convolve.nestml | 25 + ...t_on_equation_rhs_outside_convolve2.nestml | 25 + tests/test_cocos.py | 644 +++++++++--------- 22 files changed, 603 insertions(+), 397 deletions(-) create mode 100644 pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py create mode 100644 tests/resources/spiking_input_port_on_equation_rhs_outside_convolve.nestml create mode 100644 tests/resources/spiking_input_port_on_equation_rhs_outside_convolve2.nestml diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index f2c658d05..9a2fe05df 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -895,6 +895,8 @@ To specify in which sequence the event handlers should be called in case multipl In this case, if a pre- and postsynaptic spike are received at the exact same time, the higher-priority ``post_spikes`` handler will be invoked first. +XXX: mention no_spike_input_port_in_equation_rhs_outside_convolve + Output ------ diff --git a/models/neurons/aeif_cond_alpha_neuron.nestml b/models/neurons/aeif_cond_alpha_neuron.nestml index 1fa04f66f..7b54b010b 100644 --- a/models/neurons/aeif_cond_alpha_neuron.nestml +++ b/models/neurons/aeif_cond_alpha_neuron.nestml @@ -61,8 +61,8 @@ model aeif_cond_alpha_neuron: # Add inlines to simplify the equation definition of V_m inline exp_arg real = (V_bounded - V_th) / Delta_T inline I_spike pA = g_L * Delta_T * exp(exp_arg) - inline I_syn_exc pA = convolve(g_exc, exc_spikes) * nS * (V_bounded - E_exc) - inline I_syn_inh pA = convolve(g_inh, inh_spikes) * nS * (V_bounded - E_inh) + inline I_syn_exc pA = convolve(g_exc, exc_spikes.weight) * (V_bounded - E_exc) + inline I_syn_inh pA = convolve(g_inh, inh_spikes.weight) * (V_bounded - E_inh) V_m' = (-g_L * (V_bounded - E_L) + I_spike - I_syn_exc - I_syn_inh - w + I_e + I_stim) / C_m w' = (a * (V_bounded - E_L) - w) / tau_w @@ -105,8 +105,8 @@ model aeif_cond_alpha_neuron: PSConInit_I nS/ms = nS * e / tau_syn_inh input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight nS) + inh_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/aeif_cond_exp_neuron.nestml b/models/neurons/aeif_cond_exp_neuron.nestml index a5c1f5b93..a8facccee 100644 --- a/models/neurons/aeif_cond_exp_neuron.nestml +++ b/models/neurons/aeif_cond_exp_neuron.nestml @@ -98,7 +98,7 @@ model aeif_cond_exp_neuron: I_e pA = 0 pA input: - spike_in_port <- spike + spike_in_port <- spike(weight nS) I_stim pA <- continuous output: @@ -106,10 +106,10 @@ model aeif_cond_exp_neuron: onReceive(spike_in_port): # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike - if spike_in_port < 0: - g_syn_inh += spike_in_port * nS * s + if spike_in_port.weight < 0: + g_syn_inh += spike_in_port.weight else: - g_syn_exc += spike_in_port * nS * s + g_syn_exc += spike_in_port.weight update: if refr_t > 0 ms: diff --git a/models/neurons/iaf_psc_delta_neuron.nestml b/models/neurons/iaf_psc_delta_neuron.nestml index 0c5012fea..b17c852b3 100644 --- a/models/neurons/iaf_psc_delta_neuron.nestml +++ b/models/neurons/iaf_psc_delta_neuron.nestml @@ -47,7 +47,7 @@ model iaf_psc_delta_neuron: equations: kernel K_delta = delta(t) - V_m' = -(V_m - E_L) / tau_m + convolve(K_delta, spikes) * (mV / ms) + (I_e + I_stim) / C_m + V_m' = -(V_m - E_L) / tau_m + convolve(K_delta, spikes.weight) / s + (I_e + I_stim) / C_m # XXX: TODO: instead of the convolution, this should just read ``... + spikes.weight + ...``. This is a known issue (see https://github.com/nest/nestml/pull/1050). refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) parameters: @@ -64,7 +64,7 @@ model iaf_psc_delta_neuron: I_e pA = 0 pA input: - spikes <- spike + spikes <- spike(weight mV) I_stim pA <- continuous output: diff --git a/pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py b/pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py new file mode 100644 index 000000000..7cd86e4db --- /dev/null +++ b/pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# +# co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.cocos.co_co import CoCo +from pynestml.meta_model.ast_equations_block import ASTEquationsBlock +from pynestml.meta_model.ast_function_call import ASTFunctionCall +from pynestml.meta_model.ast_model import ASTModel +from pynestml.meta_model.ast_variable import ASTVariable +from pynestml.symbols.predefined_functions import PredefinedFunctions +from pynestml.utils.ast_utils import ASTUtils +from pynestml.utils.logger import Logger, LoggingLevel +from pynestml.utils.messages import Messages +from pynestml.visitors.ast_visitor import ASTVisitor + + +class CoCoNoSpikeInputPortInEquationRhsOutsideConvolve(CoCo): + """ + This coco checks that no spiking input port appears on the right-hand side of equations, outside a convolve() call. + + For instance, provided: + + .. code:: nestml + + input: + spikes_in_port <- spikes + + The following is allowed: + + .. code:: nestml + + equations: + kernel K = delta(t) + x' = convolve(K, spikes_in_port) / s + + But the following is not: + + .. code:: nestml + + equations: + x' = spikes_in_port + + """ + + @classmethod + def check_co_co(cls, model): + """ + Ensures the coco for the handed over model. + :param model: a single model instance. + """ + model.accept(NoSpikeInputPortInEquationRhsOutsideConvolveVisitor()) + + +class NoSpikeInputPortInEquationRhsOutsideConvolveVisitor(ASTVisitor): + def visit_variable(self, node: ASTVariable): + model = ASTUtils.find_parent_node_by_type(node, ASTModel) + assert model is not None + inport = ASTUtils.get_input_port_by_name(model.get_input_blocks(), node.get_name()) + if inport and inport.is_spike(): + if ASTUtils.find_parent_node_by_type(node, ASTEquationsBlock): + func_call = ASTUtils.find_parent_node_by_type(node, ASTFunctionCall) + if func_call and func_call.callee_name == PredefinedFunctions.CONVOLVE: + # it appears inside a convolve() call -- everything is fine! + return + + # it's an input port inside the equations block, but not inside a convolve() call -- error + code, message = Messages.get_spike_input_port_in_equation_rhs_outside_convolve() + Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, node=node) + import pdb;pdb.set_trace() + return diff --git a/pynestml/cocos/co_cos_manager.py b/pynestml/cocos/co_cos_manager.py index 908030af6..f8bb564eb 100644 --- a/pynestml/cocos/co_cos_manager.py +++ b/pynestml/cocos/co_cos_manager.py @@ -52,6 +52,7 @@ from pynestml.cocos.co_co_no_kernels_except_in_convolve import CoCoNoKernelsExceptInConvolve from pynestml.cocos.co_co_no_nest_name_space_collision import CoCoNoNestNameSpaceCollision from pynestml.cocos.co_co_no_duplicate_compilation_unit_names import CoCoNoDuplicateCompilationUnitNames +from pynestml.cocos.co_co_no_spike_input_port_in_equation_rhs_outside_convolve import CoCoNoSpikeInputPortInEquationRhsOutsideConvolve from pynestml.cocos.co_co_odes_have_consistent_units import CoCoOdesHaveConsistentUnits from pynestml.cocos.co_co_ode_functions_have_consistent_units import CoCoOdeFunctionsHaveConsistentUnits from pynestml.cocos.co_co_on_receive_vectors_should_be_constant_size import CoCoOnReceiveVectorsShouldBeConstantSize @@ -102,6 +103,13 @@ def check_each_block_defined_at_most_once(cls, node: ASTModel): """ CoCoEachBlockDefinedAtMostOnce.check_co_co(node) + @classmethod + def check_no_spike_input_port_in_equation_rhs_outside_convolve(cls, node: ASTModel): + """ + :param node: a single model instance + """ + CoCoNoSpikeInputPortInEquationRhsOutsideConvolve.check_co_co(node) + @classmethod def check_input_ports_appear_only_in_equation_rhs_and_event_handlers(cls, node: ASTModel): """ @@ -500,5 +508,6 @@ def check_cocos(cls, model: ASTModel, after_ast_rewrite: bool = False): cls.check_timestep_func_legally_used(model) cls.check_co_co_no_attributes_on_continuous_port(model) cls.check_input_ports_appear_only_in_equation_rhs_and_event_handlers(model) + cls.check_no_spike_input_port_in_equation_rhs_outside_convolve(model) Logger.set_current_node(None) diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index be560e486..a041bedc8 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -899,6 +899,16 @@ def ode_toolbox_analysis(self, neuron: ASTModel, kernel_buffers: Mapping[ASTKern preserve_expressions=self.get_option("preserve_expressions"), simplify_expression=self.get_option("simplify_expression"), log_level=FrontendConfiguration.logging_level) + + for solver in solver_result: + if "propagators" in solver.keys(): + for k, v in solver["propagators"].items(): + solver["propagators"][k] = v.replace("__DOT__", ".") + + if "update_expressions" in solver.keys(): + for k, v in solver["update_expressions"].items(): + solver["update_expressions"][k] = v.replace("__DOT__", ".") + analytic_solver = None analytic_solvers = [x for x in solver_result if x["solver"] == "analytical"] assert len(analytic_solvers) <= 1, "More than one analytic solver not presently supported" diff --git a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py index 734eb530d..1e65c260f 100644 --- a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py +++ b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py @@ -20,7 +20,9 @@ # along with NEST. If not, see . from pynestml.codegeneration.printers.variable_printer import VariablePrinter +from pynestml.meta_model.ast_model import ASTModel from pynestml.meta_model.ast_variable import ASTVariable +from pynestml.utils.ast_utils import ASTUtils class ODEToolboxVariablePrinter(VariablePrinter): @@ -34,4 +36,11 @@ def print_variable(self, node: ASTVariable) -> str: :param node: the node to print :return: string representation """ - return node.get_complete_name().replace("$", "__DOLLAR") + s = node.get_complete_name().replace("$", "__DOLLAR") + + # input ports that appear here should be treated as trains of delta pulses + model = ASTUtils.find_parent_node_by_type(node, ASTModel) + if ASTUtils.get_input_port_by_name(model.get_input_blocks(), node.get_name()): + return "0.0" + + return s diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 613031bd9..46bb6c2e7 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -1375,6 +1375,12 @@ void {{ printer.print(utils.get_variable_by_name(astnode, spike_update.get_variable().get_complete_name())) }} += 1.; {%- endfor %} + /** + * updates due to spiking input ports that appear on the right-hand side of equations (but not inside convolutions) + **/ + + // sorry, this is not supported yet! See https://github.com/nest/nestml/pull/1050 + /** * push back history **/ diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 8319173b2..11832b961 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -1116,16 +1116,13 @@ inline void {{neuronName}}::get_status(DictionaryDatum &__d) const {{neuron_parent_class}}::get_status( __d ); -{%- if (neuron.get_multiple_receptors())|length > 1 or neuron.is_multisynapse_spikes() %} + // input ports to NEST rport number mapping DictionaryDatum __receptor_type = new Dictionary(); - -{%- for spike_in_port_name, rport in utils.nestml_input_port_to_nest_rport_dict(astnode).items() %} -{%- set spike_in_port = utils.get_input_port_by_name(astnode.get_input_blocks(), spike_in_port_name.split("_VEC_IDX_")[0]) %} -( *__receptor_type )[ "{{spike_in_port_name.upper()}}" ] = {{ rport }}; -{%- endfor %} - - ( *__d )[ "receptor_types" ] = __receptor_type; -{%- endif %} +{%- for spike_in_port_name, rport in utils.nestml_input_port_to_nest_rport_dict(astnode).items() %} +{%- set spike_in_port = utils.get_input_port_by_name(astnode.get_input_blocks(), spike_in_port_name.split("_VEC_IDX_")[0]) %} + ( *__receptor_type )[ "{{spike_in_port_name.upper()}}" ] = {{ rport }}; +{%- endfor %} + (*__d)["receptor_types"] = __receptor_type; (*__d)[nest::names::recordables] = recordablesMap_.get_list(); {%- if uses_numeric_solver %} diff --git a/pynestml/meta_model/ast_model.py b/pynestml/meta_model/ast_model.py index 1d7f481ac..5dfadf7db 100644 --- a/pynestml/meta_model/ast_model.py +++ b/pynestml/meta_model/ast_model.py @@ -553,7 +553,7 @@ def get_input_ports(self) -> List[VariableSymbol]: symbols = self.get_scope().get_symbols_in_this_scope() ret = list() for symbol in symbols: - if isinstance(symbol, VariableSymbol) and symbol.block_type == BlockType.INPUT: + if isinstance(symbol, VariableSymbol) and symbol.block_type == BlockType.INPUT and not "." in symbol.name: ret.append(symbol) return ret diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index 30349f1c7..d71535e4c 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -2213,15 +2213,15 @@ def get_delta_factors_(cls, neuron: ASTModel, equations_block: ASTEquationsBlock if cls.is_delta_kernel(neuron.get_kernel_by_name(kernel.get_variable().get_name())): inport = conv_call.args[1].get_variable() expr_str = str(expr) - sympy_expr = sympy.parsing.sympy_parser.parse_expr(expr_str, global_dict=odetoolbox.Shape._sympy_globals) + sympy_expr = sympy.parsing.sympy_parser.parse_expr(expr_str.replace(".", "__DOT__"), global_dict=odetoolbox.Shape._sympy_globals) sympy_expr = sympy.expand(sympy_expr) - sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(str(conv_call), global_dict=odetoolbox.Shape._sympy_globals) + sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(str(conv_call).replace(".", "__DOT__"), global_dict=odetoolbox.Shape._sympy_globals) factor_str = [] for term in sympy.Add.make_args(sympy_expr): if term.find(sympy_conv_expr): factor_str.append(str(term.replace(sympy_conv_expr, 1))) factor_str = " + ".join(factor_str) - delta_factors[(var, inport)] = factor_str + delta_factors[(var, inport)] = factor_str.replace("__DOT__", ".") return delta_factors diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index b56dab630..9198e48a7 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -143,6 +143,7 @@ class MessageCode(Enum): INTEGRATE_ODES_ARG_HIGHER_ORDER = 117 SPIKING_INPUT_PORT_REFERENCE_MISSING_ATTRIBUTE = 119 CONVOLVE_NEEDS_BUFFER_PARAMETER = 120 + SPIKE_INPUT_PORT_IN_EQUATION_RHS_OUTSIDE_CONVOLVE = 121 class Messages: @@ -1402,3 +1403,8 @@ def get_spike_input_port_attribute_missing(cls, name: str): def get_vector_input_ports_should_be_of_constant_size(cls): message = "Vector input ports should be of constant size (this is a limitation of NEST Simulator)" return MessageCode.VECTOR_INPUT_PORTS_SHOULD_BE_OF_CONSTANT_SIZE, message + + @classmethod + def get_spike_input_port_in_equation_rhs_outside_convolve(cls): + message = "Spike input port appears in right-hand side of equation outside of convolve(). This is a known issue (see https://github.com/nest/nestml/pull/1050)." + return MessageCode.SPIKE_INPUT_PORT_IN_EQUATION_RHS_OUTSIDE_CONVOLVE, message diff --git a/pynestml/utils/ode_toolbox_utils.py b/pynestml/utils/ode_toolbox_utils.py index a4162a4d0..ec3fc68a6 100644 --- a/pynestml/utils/ode_toolbox_utils.py +++ b/pynestml/utils/ode_toolbox_utils.py @@ -38,7 +38,10 @@ def _rewrite_piecewise_into_ternary(cls, s: str) -> str: "Float": sympy.Float, "Function": sympy.Function} - sympy_expr = sympy.parsing.sympy_parser.parse_expr(s, global_dict=_sympy_globals_no_functions) + pattern = r'(? None: assert buffer_parameter.get_variable() is not None - if "." in buffer_parameter.get_variable().get_name(): + if "." in str(buffer_parameter): # the type of the convolve call is [the type of the attribute] * [s] - input_port = ASTUtils.get_input_port_by_name(buffer_parameter.get_name()) - assert input_port is not None - import pdb;pdb.set_trace() + # input_port = ASTUtils.get_input_port_by_name(buffer_parameter.get_variable().get_name()) + input_port = ASTUtils.get_input_port_by_name(ASTUtils.find_parent_node_by_type(node, ASTModel).get_input_blocks(), buffer_parameter.get_variable().get_name()) + node.type = input_port.get_parameters()[0].get_data_type().get_type_symbol() + return + # assert input_port is not None + # import pdb;pdb.set_trace() else: # convolve with a train of delta pulses --> the type of the convolve call is [1] node.type = RealTypeSymbol() diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index 91f2ee796..21d1ec358 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -26,6 +26,7 @@ from pynestml.meta_model.ast_declaration import ASTDeclaration from pynestml.meta_model.ast_inline_expression import ASTInlineExpression from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_on_receive_block import ASTOnReceiveBlock from pynestml.meta_model.ast_parameter import ASTParameter from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.meta_model.ast_stmt import ASTStmt @@ -163,17 +164,14 @@ def visit_update_block(self, node): source_position=node.get_source_position()) node.get_scope().add_scope(scope) node.get_stmts_body().update_scope(scope) - return def endvisit_update_block(self, node=None): self.block_type_stack.pop() - return - def visit_on_receive_block(self, node): + def visit_on_receive_block(self, node: ASTOnReceiveBlock) -> None: """ Private method: Used to visit a single onReceive block and create the corresponding scope. :param node: an onReceive block object. - :type node: ASTOnReceiveBlock """ self.block_type_stack.push(BlockType.LOCAL) scope = Scope(scope_type=ScopeType.ON_RECEIVE, enclosing_scope=node.get_scope(), @@ -182,7 +180,7 @@ def visit_on_receive_block(self, node): node.get_stmts_body().update_scope(scope) node.get_input_port_variable().update_scope(scope) - def endvisit_on_receive_block(self, node=None): + def endvisit_on_receive_block(self, node: ASTOnReceiveBlock): self.block_type_stack.pop() def visit_on_condition_block(self, node): diff --git a/pynestml/visitors/ast_variable_visitor.py b/pynestml/visitors/ast_variable_visitor.py index 6e1030f89..faa80bd5b 100644 --- a/pynestml/visitors/ast_variable_visitor.py +++ b/pynestml/visitors/ast_variable_visitor.py @@ -19,12 +19,16 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . +from pynestml.meta_model.ast_equations_block import ASTEquationsBlock +from pynestml.meta_model.ast_model import ASTModel from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.symbols.error_type_symbol import ErrorTypeSymbol from pynestml.symbols.symbol import SymbolKind +from pynestml.symbols.unit_type_symbol import UnitTypeSymbol from pynestml.utils.ast_utils import ASTUtils from pynestml.utils.logger import LoggingLevel, Logger from pynestml.utils.messages import MessageCode +from pynestml.utils.unit_type import UnitType from pynestml.visitors.ast_visitor import ASTVisitor @@ -49,7 +53,23 @@ def visit_simple_expression(self, node: ASTSimpleExpression): # update the type of the variable according to its symbol type. if var_resolve is not None: - node.type = var_resolve.get_type_symbol() + inport = ASTUtils.get_input_port_by_name(ASTUtils.find_parent_node_by_type(node, ASTModel).get_input_blocks(), node.get_variable().get_name()) + if inport and inport.is_spike(): + # this variable represents a spiking input port + if ASTUtils.find_parent_node_by_type(node, ASTEquationsBlock): + # it appears in an equations block; units are [units of attribute / s] + from astropy import units as u + if inport.get_parameters(): + node.type = var_resolve.get_type_symbol() * UnitTypeSymbol(UnitType(name=str("1/s"), unit=1/u.si.s)) + else: + node.type = var_resolve.get_type_symbol() # the type of the base port is [1/s] + else: + # it appears in an equations block; units are [units of attribute] + node.type = var_resolve.get_type_symbol() + else: + # variable does not represent a spiking input port + node.type = var_resolve.get_type_symbol() + node.type.referenced_object = node return diff --git a/pynestml/visitors/ast_visitor.py b/pynestml/visitors/ast_visitor.py index 5a3fed5af..72b442e87 100644 --- a/pynestml/visitors/ast_visitor.py +++ b/pynestml/visitors/ast_visitor.py @@ -670,7 +670,7 @@ def endvisit_output_block(self, node): """ return - def endvisit_input_port(self, node): + def endvisit_input_port(self, node) -> None: """ Used to endvisit a single input port. :param node: a single input port. @@ -678,7 +678,7 @@ def endvisit_input_port(self, node): """ return - def endvisit_arithmetic_operator(self, node): + def endvisit_arithmetic_operator(self, node) -> None: """ Used to endvisit a single arithmetic operator. :param node: a single arithmetic operator. @@ -686,7 +686,7 @@ def endvisit_arithmetic_operator(self, node): """ return - def endvisit_parameter(self, node): + def endvisit_parameter(self, node) -> None: """ Used to endvisit a single parameter. :param node: a single parameter. @@ -694,11 +694,10 @@ def endvisit_parameter(self, node): """ return - def endvisit_stmt(self, node): + def endvisit_stmt(self, node) -> None: """ Used to endvisit a single stmt. :param node: a single stmt - :return: ASTStmt """ return @@ -709,15 +708,15 @@ def set_real_self(self, _visitor): def get_real_self(self): return self.real_self - def handle(self, _node): + def handle(self, _node: ASTNode) -> None: self.get_real_self().visit(_node) self.get_real_self().traverse(_node) self.get_real_self().endvisit(_node) - def visit(self, node: ASTNode): + def visit(self, node: ASTNode) -> None: """ Dispatcher for visitor pattern. - :param node: The ASTNode to visit + :param node: the node to visit """ if isinstance(node, ASTArithmeticOperator): self.visit_arithmetic_operator(node) @@ -841,11 +840,10 @@ def visit(self, node: ASTNode): return return - def traverse(self, node): + def traverse(self, node: ASTNode) -> None: """ Dispatcher for traverse method. - :param node: The ASTElement to visit - :type node: Inherited from ASTElement + :param node: the node to traverse """ if isinstance(node, ASTArithmeticOperator): self.traverse_arithmetic_operator(node) @@ -969,11 +967,10 @@ def traverse(self, node): return return - def endvisit(self, node): + def endvisit(self, node: ASTNode) -> None: """ Dispatcher for endvisit. - :param node: The ASTElement to endvisit - :type node: ASTElement or inherited + :param node: the node to end-visit """ if isinstance(node, ASTArithmeticOperator): self.endvisit_arithmetic_operator(node) diff --git a/tests/nest_tests/nest_integration_test.py b/tests/nest_tests/nest_integration_test.py index 747876910..f337a6988 100644 --- a/tests/nest_tests/nest_integration_test.py +++ b/tests/nest_tests/nest_integration_test.py @@ -51,30 +51,30 @@ def generate_all_models(self): codegen_opts["neuron_parent_class"] = "StructuralPlasticityNode" codegen_opts["neuron_parent_class_include"] = "structural_plasticity_node.h" - # generate_nest_target(input_path=["models/neurons/hh_cond_exp_traub_neuron.nestml", - # "models/neurons/hh_psc_alpha_neuron.nestml", - # "models/neurons/iaf_cond_beta_neuron.nestml", - # "models/neurons/iaf_cond_alpha_neuron.nestml", - # "models/neurons/iaf_cond_exp_neuron.nestml", - # "models/neurons/iaf_psc_alpha_neuron.nestml", - # "models/neurons/iaf_psc_exp_neuron.nestml", - # "models/neurons/iaf_psc_delta_neuron.nestml"], - # target_path="/tmp/nestml-allmodels", - # logging_level="DEBUG", - # module_name="nestml_allmodels_module", - # suffix="_nestml", - # codegen_opts=codegen_opts) + generate_nest_target(input_path=[#"models/neurons/hh_cond_exp_traub_neuron.nestml", + # "models/neurons/hh_psc_alpha_neuron.nestml", + # "models/neurons/iaf_cond_beta_neuron.nestml", + # "models/neurons/iaf_cond_alpha_neuron.nestml", + # "models/neurons/iaf_cond_exp_neuron.nestml", + # "models/neurons/iaf_psc_alpha_neuron.nestml", + # "models/neurons/iaf_psc_exp_neuron.nestml", + "models/neurons/iaf_psc_delta_neuron.nestml"], + target_path="/tmp/nestml-allmodels", + logging_level="DEBUG", + module_name="nestml_allmodels_module", + suffix="_nestml", + codegen_opts=codegen_opts) # generate code with analytic solver disabled - alt_codegen_opts = {**codegen_opts, **{"solver": "numeric"}} + # alt_codegen_opts = {**codegen_opts, **{"solver": "numeric"}} - generate_nest_target(input_path=["models/neurons/aeif_cond_exp_neuron.nestml", - "models/neurons/aeif_cond_alpha_neuron.nestml"], - target_path="/tmp/nestml-alt-allmodels", - logging_level="DEBUG", - module_name="nestml_alt_allmodels_module", - suffix="_alt_nestml", - codegen_opts=alt_codegen_opts) + # generate_nest_target(input_path=["models/neurons/aeif_cond_exp_neuron.nestml", + # "models/neurons/aeif_cond_alpha_neuron.nestml"], + # target_path="/tmp/nestml-alt-allmodels", + # logging_level="DEBUG", + # module_name="nestml_alt_allmodels_module", + # suffix="_alt_nestml", + # codegen_opts=alt_codegen_opts) # # generate code using forward Euler integrator # alt_codegen_opts = {**codegen_opts, **{"numeric_solver": "forward-Euler"}} @@ -88,14 +88,14 @@ def generate_all_models(self): def test_nest_integration(self): self.generate_all_models() - # nest.Install("nestml_allmodels_module") - nest.Install("nestml_alt_allmodels_module") + nest.Install("nestml_allmodels_module") + # nest.Install("nestml_alt_allmodels_module") # nest.Install("nestml_alt_int_allmodels_module") - # self._test_model_equivalence_subthreshold("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - # self._test_model_equivalence_spiking("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - # self._test_model_equivalence_fI_curve("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - # self._test_model_equivalence_curr_inj("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + self._test_model_equivalence_subthreshold("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + self._test_model_equivalence_spiking("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + self._test_model_equivalence_fI_curve("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") + self._test_model_equivalence_curr_inj("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") # self._test_model_equivalence_subthreshold("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") # self._test_model_equivalence_spiking("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") @@ -138,8 +138,8 @@ def test_nest_integration(self): # self._test_model_equivalence_subthreshold("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly # self._test_model_equivalence_fI_curve("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml") - self._test_model_equivalence_subthreshold("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly - self._test_model_equivalence_fI_curve("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml") + # self._test_model_equivalence_subthreshold("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly + # self._test_model_equivalence_fI_curve("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml") # -------------- # XXX: TODO! diff --git a/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve.nestml b/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve.nestml new file mode 100644 index 000000000..867e04d1c --- /dev/null +++ b/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve.nestml @@ -0,0 +1,25 @@ +""" +spiking_input_port_on_equation_rhs_outside_convolve +################################################### + +Description ++++++++++++ + +.... + +copyright + + +""" +model spiking_input_port_on_equation_rhs_outside_convolve_neuron: + state: + x real = 0. + + equations: + x' = spikes_in_port + + input: + spikes_in_port <- spike + + update: + integrate_odes() diff --git a/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve2.nestml b/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve2.nestml new file mode 100644 index 000000000..807c276f7 --- /dev/null +++ b/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve2.nestml @@ -0,0 +1,25 @@ +""" +spiking_input_port_on_equation_rhs_outside_convolve +################################################### + +Description ++++++++++++ + +.... + +copyright + + +""" +model spiking_input_port_on_equation_rhs_outside_convolve2_neuron: + state: + x real = 0. + + equations: + x' = spikes_in_port.weight + + input: + spikes_in_port <- spike(weight real) + + update: + integrate_odes() diff --git a/tests/test_cocos.py b/tests/test_cocos.py index cd831da29..33bdfb192 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -56,366 +56,374 @@ def test_invalid_element_defined_after_usage(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVariableDefinedAfterUsage.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - def test_valid_element_defined_after_usage(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableDefinedAfterUsage.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_element_in_same_line(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoElementInSameLine.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - def test_valid_element_in_same_line(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoElementInSameLine.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_integrate_odes_called_if_equations_defined(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - def test_valid_integrate_odes_called_if_equations_defined(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_element_not_defined_in_scope(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVariableNotDefined.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 6 - - def test_valid_element_not_defined_in_scope(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableNotDefined.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_variable_with_same_name_as_unit(self): - Logger.set_logging_level(LoggingLevel.NO) - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableWithSameNameAsUnit.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 3 - - def test_invalid_variable_redeclaration(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVariableRedeclared.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - def test_valid_variable_redeclaration(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableRedeclared.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_each_block_unique(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoEachBlockUnique.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - def test_valid_each_block_unique(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoEachBlockUnique.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_function_unique_and_defined(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoFunctionNotUnique.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 - - def test_valid_function_unique_and_defined(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoFunctionNotUnique.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_inline_expressions_have_rhs(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionHasNoRhs.nestml')) - assert model is None # parse error - - def test_valid_inline_expressions_have_rhs(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionHasNoRhs.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_inline_expression_has_several_lhs(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) - assert model is None # parse error - - def test_valid_inline_expression_has_several_lhs(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_no_values_assigned_to_input_ports(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoValueAssignedToInputPort.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - def test_invalid_order_of_equations_correct(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoNoOrderOfEquations.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - def test_valid_order_of_equations_correct(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNoOrderOfEquations.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_numerator_of_unit_one(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoUnitNumeratorNotOne.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - def test_valid_numerator_of_unit_one(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoUnitNumeratorNotOne.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_names_of_neurons_unique(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoMultipleNeuronsWithEqualName.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 3 - - def test_valid_names_of_neurons_unique(self): - self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoMultipleNeuronsWithEqualName.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(None, LoggingLevel.ERROR)) == 0 - - def test_invalid_no_nest_collision(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoNestNamespaceCollision.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - def test_valid_no_nest_collision(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNestNamespaceCollision.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_spiking_input_port_on_equation_rhs_outside_convolve(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources')), 'spiking_input_port_on_equation_rhs_outside_convolve.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 - def test_invalid_parameters_assigned_only_in_parameters_block(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoParameterAssignedOutsideBlock.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_spiking_input_port_on_equation_rhs_outside_convolve2(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources')), 'spiking_input_port_on_equation_rhs_outside_convolve2.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 - def test_valid_parameters_assigned_only_in_parameters_block(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoParameterAssignedOutsideBlock.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_valid_element_defined_after_usage(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableDefinedAfterUsage.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_inline_expressions_assigned_only_in_declaration(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoAssignmentToInlineExpression.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + # def test_invalid_element_in_same_line(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoElementInSameLine.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - def test_invalid_internals_assigned_only_in_internals_block(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInternalAssignedOutsideBlock.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + # def test_valid_element_in_same_line(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoElementInSameLine.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_valid_internals_assigned_only_in_internals_block(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInternalAssignedOutsideBlock.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_invalid_integrate_odes_called_if_equations_defined(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - def test_invalid_function_with_wrong_arg_number_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + # def test_valid_integrate_odes_called_if_equations_defined(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_valid_function_with_wrong_arg_number_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_invalid_element_not_defined_in_scope(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVariableNotDefined.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 6 - def test_invalid_init_values_have_rhs_and_ode(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInitValuesWithoutOde.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 2 + # def test_valid_element_not_defined_in_scope(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableNotDefined.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_valid_init_values_have_rhs_and_ode(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInitValuesWithoutOde.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 3 + # def test_variable_with_same_name_as_unit(self): + # Logger.set_logging_level(LoggingLevel.NO) + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableWithSameNameAsUnit.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 3 - def test_invalid_incorrect_return_stmt_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIncorrectReturnStatement.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 + # def test_invalid_variable_redeclaration(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVariableRedeclared.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - def test_valid_incorrect_return_stmt_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIncorrectReturnStatement.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_valid_variable_redeclaration(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableRedeclared.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_ode_vars_outside_init_block_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOdeVarNotInInitialValues.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + # def test_invalid_each_block_unique(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoEachBlockUnique.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - def test_valid_ode_vars_outside_init_block_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOdeVarNotInInitialValues.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_valid_each_block_unique(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoEachBlockUnique.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_convolve_correctly_defined(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) - assert any(["Actual type different from expected. Expected: 'pA', got: 'mV'!" in log_entry[2] for log_entry in Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)]) + # def test_invalid_function_unique_and_defined(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoFunctionNotUnique.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 - def test_valid_convolve_correctly_defined(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_valid_function_unique_and_defined(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoFunctionNotUnique.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_vector_in_non_vector_declaration_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorInNonVectorDeclaration.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + # def test_invalid_inline_expressions_have_rhs(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionHasNoRhs.nestml')) + # assert model is None # parse error - def test_valid_vector_in_non_vector_declaration_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorInNonVectorDeclaration.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_valid_inline_expressions_have_rhs(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionHasNoRhs.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_vector_parameter_declaration(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorParameterDeclaration.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + # def test_invalid_inline_expression_has_several_lhs(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) + # assert model is None # parse error - def test_valid_vector_parameter_declaration(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorParameterDeclaration.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_valid_inline_expression_has_several_lhs(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_vector_parameter_type(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorParameterType.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + # def test_invalid_no_values_assigned_to_input_ports(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoValueAssignedToInputPort.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - def test_valid_vector_parameter_type(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorParameterType.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_invalid_order_of_equations_correct(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoNoOrderOfEquations.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - def test_invalid_vector_parameter_size(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorDeclarationSize.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + # def test_valid_order_of_equations_correct(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNoOrderOfEquations.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_valid_vector_parameter_size(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorDeclarationSize.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_invalid_numerator_of_unit_one(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoUnitNumeratorNotOne.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - def test_invalid_convolve_correctly_parameterized(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoConvolveNotCorrectlyParametrized.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + # def test_valid_numerator_of_unit_one(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoUnitNumeratorNotOne.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_valid_convolve_correctly_parameterized(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoConvolveNotCorrectlyParametrized.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_invalid_names_of_neurons_unique(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoMultipleNeuronsWithEqualName.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 3 - def test_invalid_invariant_correctly_typed(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInvariantNotBool.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + # def test_valid_names_of_neurons_unique(self): + # self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoMultipleNeuronsWithEqualName.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(None, LoggingLevel.ERROR)) == 0 - def test_valid_invariant_correctly_typed(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInvariantNotBool.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_invalid_no_nest_collision(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoNestNamespaceCollision.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - def test_invalid_expression_correctly_typed(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIllegalExpression.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + # def test_valid_no_nest_collision(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNestNamespaceCollision.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_valid_expression_correctly_typed(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIllegalExpression.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_invalid_parameters_assigned_only_in_parameters_block(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoParameterAssignedOutsideBlock.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - def test_invalid_compound_expression_correctly_typed(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 10 + # def test_valid_parameters_assigned_only_in_parameters_block(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoParameterAssignedOutsideBlock.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_valid_compound_expression_correctly_typed(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_invalid_inline_expressions_assigned_only_in_declaration(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoAssignmentToInlineExpression.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - def test_invalid_ode_correctly_typed(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOdeIncorrectlyTyped.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + # def test_invalid_internals_assigned_only_in_internals_block(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInternalAssignedOutsideBlock.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - def test_valid_ode_correctly_typed(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOdeCorrectlyTyped.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_valid_internals_assigned_only_in_internals_block(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInternalAssignedOutsideBlock.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_output_block_defined_if_emit_call(self): - """test that an error is raised when the emit_spike() function is called by the neuron, but an output block is not defined""" - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + # def test_invalid_function_with_wrong_arg_number_detected(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - def test_invalid_output_port_defined_if_emit_call(self): - """test that an error is raised when the emit_spike() function is called by the neuron, but a spiking output port is not defined""" - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortDefinedIfEmitCall-2.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + # def test_valid_function_with_wrong_arg_number_detected(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_valid_output_port_defined_if_emit_call(self): - """test that no error is raised when the output block is missing, but not emit_spike() functions are called""" - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_invalid_init_values_have_rhs_and_ode(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInitValuesWithoutOde.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 2 - def test_invalid_output_port_type_if_emit_call(self): - """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + # def test_valid_init_values_have_rhs_and_ode(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInitValuesWithoutOde.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 3 - def test_invalid_output_port_type_if_emit_call(self): - """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-2.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + # def test_invalid_incorrect_return_stmt_detected(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIncorrectReturnStatement.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 - def test_valid_output_port_type_if_emit_call(self): - """test that a warning is raised when the emit_spike() function is called with parameter types castable to the types defined in the spiking output port""" - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-3.nestml')) - assert model is not None - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) > 0 - - def test_invalid_output_port_type_continuous(self): - """test that an error is raised when a continous-time output port is defined as having attributes.""" - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeContinuous.nestml')) - assert model is None # should result in a parse error - - def test_valid_coco_kernel_type(self): - """ - Test the functionality of CoCoKernelType. - """ - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoKernelType.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_coco_kernel_type(self): - """ - Test the functionality of CoCoKernelType. - """ - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoKernelType.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - def test_invalid_coco_kernel_type_initial_values(self): - """ - Test the functionality of CoCoKernelType. - """ - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoKernelTypeInitialValues.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 4 - - def test_valid_coco_state_variables_initialized(self): - """ - Test that the CoCo condition is applicable for all the variables in the state block initialized with a value - """ - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoStateVariablesInitialized.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_coco_state_variables_initialized(self): - """ - Test that the CoCo condition is applicable for all the variables in the state block not initialized - """ - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoStateVariablesInitialized.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - def test_invalid_co_co_priorities_correctly_specified(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoPrioritiesCorrectlySpecified.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - def test_valid_co_co_priorities_correctly_specified(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoPrioritiesCorrectlySpecified.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_co_co_resolution_legally_used(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoResolutionLegallyUsed.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - def test_valid_co_co_resolution_legally_used(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoResolutionLegallyUsed.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_valid_co_co_vector_input_port(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorInputPortSizeAndType.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # def test_valid_incorrect_return_stmt_detected(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIncorrectReturnStatement.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_co_co_vector_input_port(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorInputPortSizeAndType.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - def test_invalid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegal.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 + # def test_invalid_ode_vars_outside_init_block_detected(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOdeVarNotInInitialValues.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - def test_valid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsLegal.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_co_co_spike_input_ports_illegal_missing_attribute(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + # def test_valid_ode_vars_outside_init_block_detected(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOdeVarNotInInitialValues.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_valid_co_co_spike_input_ports_illegal_missing_attribute(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_valid_co_co_on_receive_vectors_should_be_constant_size(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOnReceiveVectorsShouldBeConstantSize.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - def test_invalid_co_co_on_receive_vectors_should_be_constant_size(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOnReceiveVectorsShouldBeConstantSize.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + # def test_invalid_convolve_correctly_defined(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) + # assert any(["Actual type different from expected. Expected: 'pA', got: 'mV'!" in log_entry[2] for log_entry in Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)]) + + # def test_valid_convolve_correctly_defined(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_vector_in_non_vector_declaration_detected(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorInNonVectorDeclaration.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + # def test_valid_vector_in_non_vector_declaration_detected(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorInNonVectorDeclaration.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_vector_parameter_declaration(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorParameterDeclaration.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + # def test_valid_vector_parameter_declaration(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorParameterDeclaration.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_vector_parameter_type(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorParameterType.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + # def test_valid_vector_parameter_type(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorParameterType.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_vector_parameter_size(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorDeclarationSize.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + + # def test_valid_vector_parameter_size(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorDeclarationSize.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_convolve_correctly_parameterized(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoConvolveNotCorrectlyParametrized.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + + # def test_valid_convolve_correctly_parameterized(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoConvolveNotCorrectlyParametrized.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_invariant_correctly_typed(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInvariantNotBool.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + # def test_valid_invariant_correctly_typed(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInvariantNotBool.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_expression_correctly_typed(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIllegalExpression.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + + # def test_valid_expression_correctly_typed(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIllegalExpression.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_compound_expression_correctly_typed(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 10 + + # def test_valid_compound_expression_correctly_typed(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_ode_correctly_typed(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOdeIncorrectlyTyped.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + # def test_valid_ode_correctly_typed(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOdeCorrectlyTyped.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_output_block_defined_if_emit_call(self): + # """test that an error is raised when the emit_spike() function is called by the neuron, but an output block is not defined""" + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + # def test_invalid_output_port_defined_if_emit_call(self): + # """test that an error is raised when the emit_spike() function is called by the neuron, but a spiking output port is not defined""" + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortDefinedIfEmitCall-2.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + # def test_valid_output_port_defined_if_emit_call(self): + # """test that no error is raised when the output block is missing, but not emit_spike() functions are called""" + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_output_port_type_if_emit_call(self): + # """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + # def test_invalid_output_port_type_if_emit_call(self): + # """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-2.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + # def test_valid_output_port_type_if_emit_call(self): + # """test that a warning is raised when the emit_spike() function is called with parameter types castable to the types defined in the spiking output port""" + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-3.nestml')) + # assert model is not None + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) > 0 + + # def test_invalid_output_port_type_continuous(self): + # """test that an error is raised when a continous-time output port is defined as having attributes.""" + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeContinuous.nestml')) + # assert model is None # should result in a parse error + + # def test_valid_coco_kernel_type(self): + # """ + # Test the functionality of CoCoKernelType. + # """ + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoKernelType.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_coco_kernel_type(self): + # """ + # Test the functionality of CoCoKernelType. + # """ + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoKernelType.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + # def test_invalid_coco_kernel_type_initial_values(self): + # """ + # Test the functionality of CoCoKernelType. + # """ + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoKernelTypeInitialValues.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 4 + + # def test_valid_coco_state_variables_initialized(self): + # """ + # Test that the CoCo condition is applicable for all the variables in the state block initialized with a value + # """ + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoStateVariablesInitialized.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_coco_state_variables_initialized(self): + # """ + # Test that the CoCo condition is applicable for all the variables in the state block not initialized + # """ + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoStateVariablesInitialized.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + + # def test_invalid_co_co_priorities_correctly_specified(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoPrioritiesCorrectlySpecified.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + # def test_valid_co_co_priorities_correctly_specified(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoPrioritiesCorrectlySpecified.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_co_co_resolution_legally_used(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoResolutionLegallyUsed.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + + # def test_valid_co_co_resolution_legally_used(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoResolutionLegallyUsed.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_valid_co_co_vector_input_port(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorInputPortSizeAndType.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_co_co_vector_input_port(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorInputPortSizeAndType.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + # def test_invalid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegal.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 + + # def test_valid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsLegal.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_co_co_spike_input_ports_illegal_missing_attribute(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + # def test_valid_co_co_spike_input_ports_illegal_missing_attribute(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_valid_co_co_on_receive_vectors_should_be_constant_size(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOnReceiveVectorsShouldBeConstantSize.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + # def test_invalid_co_co_on_receive_vectors_should_be_constant_size(self): + # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOnReceiveVectorsShouldBeConstantSize.nestml')) + # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 def _parse_and_validate_model(self, fname: str) -> Optional[str]: from pynestml.frontend.pynestml_frontend import generate_target From 31dc251d3d8f7982896a491f9896e8f6f0aaa324 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Fri, 20 Dec 2024 18:50:22 +0100 Subject: [PATCH 26/68] add attributes to spiking input ports --- models/neurons/aeif_cond_exp_neuron.nestml | 4 +- .../hh_cond_exp_destexhe_neuron.nestml | 8 +- .../neurons/hh_cond_exp_traub_neuron.nestml | 8 +- models/neurons/hh_moto_5ht_neuron.nestml | 8 +- models/neurons/hh_psc_alpha_neuron.nestml | 8 +- models/neurons/hill_tononi_neuron.nestml | 16 +- models/neurons/iaf_chxk_2008_neuron.nestml | 8 +- models/neurons/iaf_cond_beta_neuron.nestml | 8 +- models/neurons/iaf_cond_exp_neuron.nestml | 8 +- .../neurons/iaf_cond_exp_sfa_rr_neuron.nestml | 8 +- models/neurons/iaf_psc_alpha_neuron.nestml | 6 +- ...iaf_psc_delta_fixed_timestep_neuron.nestml | 4 +- models/neurons/iaf_psc_exp_dend_neuron.nestml | 6 +- models/neurons/iaf_psc_exp_htum_neuron.nestml | 6 +- models/neurons/izhikevich_neuron.nestml | 13 +- .../izhikevich_psc_alpha_neuron.nestml | 8 +- models/neurons/mat2_psc_exp_neuron.nestml | 6 +- models/neurons/terub_gpe_neuron.nestml | 8 +- models/neurons/terub_stn_neuron.nestml | 8 +- .../neurons/traub_cond_multisyn_neuron.nestml | 16 +- models/neurons/traub_psc_alpha_neuron.nestml | 8 +- models/neurons/wb_cond_exp_neuron.nestml | 8 +- models/neurons/wb_cond_multisyn_neuron.nestml | 12 +- ...t_port_in_equation_rhs_outside_convolve.py | 1 - pynestml/codegeneration/code_generator.py | 7 +- .../codegeneration/nest_code_generator.py | 11 +- .../printers/ode_toolbox_variable_printer.py | 3 +- .../point_neuron/common/NeuronClass.jinja2 | 2 +- .../point_neuron/common/NeuronHeader.jinja2 | 2 +- .../ApplySpikesFromBuffers.jinja2 | 4 +- pynestml/utils/ast_utils.py | 6 +- pynestml/utils/messages.py | 16 -- pynestml/utils/ode_toolbox_utils.py | 4 +- pynestml/visitors/ast_builder_visitor.py | 2 - .../visitors/ast_function_call_visitor.py | 41 ----- pynestml/visitors/ast_symbol_table_visitor.py | 18 --- pynestml/visitors/ast_variable_visitor.py | 2 +- tests/nest_tests/fir_filter_test.py | 2 +- tests/nest_tests/nest_integration_test.py | 142 +++++++++--------- tests/nest_tests/test_convolve.py | 4 +- 40 files changed, 193 insertions(+), 267 deletions(-) diff --git a/models/neurons/aeif_cond_exp_neuron.nestml b/models/neurons/aeif_cond_exp_neuron.nestml index a8facccee..abdac81ba 100644 --- a/models/neurons/aeif_cond_exp_neuron.nestml +++ b/models/neurons/aeif_cond_exp_neuron.nestml @@ -114,10 +114,10 @@ model aeif_cond_exp_neuron: update: if refr_t > 0 ms: # neuron is absolute refractory, do not evolve V_m - integrate_odes(w, refr_t) + integrate_odes(g_syn_exc, g_syn_inh, w, refr_t) else: # neuron not refractory - integrate_odes(w, V_m) + integrate_odes(g_syn_exc, g_syn_inh, w, V_m) onCondition(refr_t <= 0 ms and V_m >= V_peak): # threshold crossing diff --git a/models/neurons/hh_cond_exp_destexhe_neuron.nestml b/models/neurons/hh_cond_exp_destexhe_neuron.nestml index 51e46f88a..9717f7293 100644 --- a/models/neurons/hh_cond_exp_destexhe_neuron.nestml +++ b/models/neurons/hh_cond_exp_destexhe_neuron.nestml @@ -59,8 +59,8 @@ model hh_cond_exp_destexhe_neuron: inline I_M pA = g_M * Noninact_p * (V_m - E_K) inline I_noise pA = (g_noise_exc * (V_m - E_exc) + g_noise_inh * (V_m - E_inh)) - inline I_syn_exc pA = convolve(g_exc, exc_spikes) * nS * ( V_m - E_exc ) - inline I_syn_inh pA = convolve(g_inh, inh_spikes) * nS * ( V_m - E_inh ) + inline I_syn_exc pA = convolve(g_exc, exc_spikes.weight) * ( V_m - E_exc ) + inline I_syn_inh pA = convolve(g_inh, inh_spikes.weight) * ( V_m - E_inh ) V_m' =( -I_Na - I_K - I_M - I_L - I_syn_exc - I_syn_inh + I_e + I_stim - I_noise) / C_m refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) @@ -126,8 +126,8 @@ model hh_cond_exp_destexhe_neuron: D_inh uS**2/ms = 2 * sigma_noise_inh**2 / tau_syn_inh input: - inh_spikes <- spike - exc_spikes <- spike + inh_spikes <- spike(weight nS) + exc_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/hh_cond_exp_traub_neuron.nestml b/models/neurons/hh_cond_exp_traub_neuron.nestml index 97b067501..48a3db12e 100644 --- a/models/neurons/hh_cond_exp_traub_neuron.nestml +++ b/models/neurons/hh_cond_exp_traub_neuron.nestml @@ -67,8 +67,8 @@ model hh_cond_exp_traub_neuron: inline I_Na pA = g_Na * Act_m * Act_m * Act_m * Act_h * ( V_m - E_Na ) inline I_K pA = g_K * Inact_n * Inact_n * Inact_n * Inact_n * ( V_m - E_K ) inline I_L pA = g_L * ( V_m - E_L ) - inline I_syn_exc pA = convolve(g_exc, exc_spikes) * nS * ( V_m - E_exc ) - inline I_syn_inh pA = convolve(g_inh, inh_spikes) * nS * ( V_m - E_inh ) + inline I_syn_exc pA = convolve(g_exc, exc_spikes.weight) * ( V_m - E_exc ) + inline I_syn_inh pA = convolve(g_inh, inh_spikes.weight) * ( V_m - E_inh ) V_m' = ( -I_Na - I_K - I_L - I_syn_exc - I_syn_inh + I_e + I_stim ) / C_m refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) @@ -113,8 +113,8 @@ model hh_cond_exp_traub_neuron: I_e pA = 0 pA input: - inh_spikes <- spike - exc_spikes <- spike + inh_spikes <- spike(weight nS) + exc_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/hh_moto_5ht_neuron.nestml b/models/neurons/hh_moto_5ht_neuron.nestml index 8da40a08c..608f98563 100644 --- a/models/neurons/hh_moto_5ht_neuron.nestml +++ b/models/neurons/hh_moto_5ht_neuron.nestml @@ -47,8 +47,8 @@ model hh_moto_5ht_neuron: # synapses: alpha functions kernel I_syn_in = (e/tau_syn_in) * t * exp(-t/tau_syn_in) kernel I_syn_ex = (e/tau_syn_ex) * t * exp(-t/tau_syn_ex) - inline I_syn_exc pA = convolve(I_syn_ex, exc_spikes) * pA - inline I_syn_inh pA = convolve(I_syn_in, inh_spikes) * pA + inline I_syn_exc pA = convolve(I_syn_ex, exc_spikes.weight) + inline I_syn_inh pA = convolve(I_syn_in, inh_spikes.weight) inline E_Ca mV = ((1000.0 * R_const * T_current) / (2. * F_const)) * log10(Ca_out / Ca_in) * mV @@ -108,8 +108,8 @@ model hh_moto_5ht_neuron: alpha mmol/pA = 1E-5 mmol/pA input: - inh_spikes <- spike - exc_spikes <- spike + inh_spikes <- spike(weight pA) + exc_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/models/neurons/hh_psc_alpha_neuron.nestml b/models/neurons/hh_psc_alpha_neuron.nestml index 53c80c320..9b41f4180 100644 --- a/models/neurons/hh_psc_alpha_neuron.nestml +++ b/models/neurons/hh_psc_alpha_neuron.nestml @@ -58,8 +58,8 @@ model hh_psc_alpha_neuron: kernel K_syn_inh = (e/tau_syn_inh) * t * exp(-t/tau_syn_inh) kernel K_syn_exc = (e/tau_syn_exc) * t * exp(-t/tau_syn_exc) - inline I_syn_exc pA = convolve(K_syn_exc, exc_spikes) * pA - inline I_syn_inh pA = convolve(K_syn_inh, inh_spikes) * pA + inline I_syn_exc pA = convolve(K_syn_exc, exc_spikes.weight) + inline I_syn_inh pA = convolve(K_syn_inh, inh_spikes.weight) inline I_Na pA = g_Na * Act_m * Act_m * Act_m * Inact_h * ( V_m - E_Na ) inline I_K pA = g_K * Act_n * Act_n * Act_n * Act_n * ( V_m - E_K ) inline I_L pA = g_L * ( V_m - E_L ) @@ -108,8 +108,8 @@ model hh_psc_alpha_neuron: beta_h_init real = 1. / ( 1. + exp( -( V_m_init / mV + 35. ) / 10. ) ) input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight pA) + inh_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/models/neurons/hill_tononi_neuron.nestml b/models/neurons/hill_tononi_neuron.nestml index 6d7b0f22a..f2d11d465 100644 --- a/models/neurons/hill_tononi_neuron.nestml +++ b/models/neurons/hill_tononi_neuron.nestml @@ -57,10 +57,10 @@ model hill_tononi_neuron: # V_m ############# - inline I_syn_ampa pA = -convolve(g_AMPA, AMPA) * nS * ( V_m - AMPA_E_rev ) - inline I_syn_nmda pA = -convolve(g_NMDA, NMDA) * nS * ( V_m - NMDA_E_rev ) / ( 1 + exp( ( NMDA_Vact - V_m ) / NMDA_Sact ) ) - inline I_syn_gaba_a pA = -convolve(g_GABAA, GABA_A) * nS * ( V_m - GABA_A_E_rev ) - inline I_syn_gaba_b pA = -convolve(g_GABAB, GABA_B) * nS * ( V_m - GABA_B_E_rev ) + inline I_syn_ampa pA = -convolve(g_AMPA, AMPA.weight) * ( V_m - AMPA_E_rev ) + inline I_syn_nmda pA = -convolve(g_NMDA, NMDA.weight) * ( V_m - NMDA_E_rev ) / ( 1 + exp( ( NMDA_Vact - V_m ) / NMDA_Sact ) ) + inline I_syn_gaba_a pA = -convolve(g_GABAA, GABA_A.weight) * ( V_m - GABA_A_E_rev ) + inline I_syn_gaba_b pA = -convolve(g_GABAB, GABA_B.weight) * ( V_m - GABA_B_E_rev ) inline I_syn pA = I_syn_ampa + I_syn_nmda + I_syn_gaba_a + I_syn_gaba_b inline I_Na pA = -g_NaL * ( V_m - E_Na ) @@ -182,10 +182,10 @@ model hill_tononi_neuron: GABA_BInitialValue real = compute_synapse_constant( GABA_B_Tau_1, GABA_B_Tau_2, GABA_B_g_peak ) input: - AMPA <- spike - NMDA <- spike - GABA_A <- spike - GABA_B <- spike + AMPA <- spike(weight nS) + NMDA <- spike(weight nS) + GABA_A <- spike(weight nS) + GABA_B <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/iaf_chxk_2008_neuron.nestml b/models/neurons/iaf_chxk_2008_neuron.nestml index dafe98864..e790676b4 100644 --- a/models/neurons/iaf_chxk_2008_neuron.nestml +++ b/models/neurons/iaf_chxk_2008_neuron.nestml @@ -46,8 +46,8 @@ model iaf_chxk_2008_neuron: kernel g_exc = (e/tau_syn_exc) * t * exp(-t/tau_syn_exc) g_ahp'' = -2 * g_ahp' / tau_ahp - g_ahp / tau_ahp**2 - inline I_syn_exc pA = convolve(g_exc, exc_spikes) * nS * ( V_m - E_exc ) - inline I_syn_inh pA = convolve(g_inh, inh_spikes) * nS * ( V_m - E_inh ) + inline I_syn_exc pA = convolve(g_exc, exc_spikes.weight) * ( V_m - E_exc ) + inline I_syn_inh pA = convolve(g_inh, inh_spikes.weight) * ( V_m - E_inh ) inline I_ahp pA = g_ahp * ( V_m - E_ahp ) inline I_leak pA = g_L * ( V_m - E_L ) @@ -80,8 +80,8 @@ model iaf_chxk_2008_neuron: PSConInit_AHP real = G_ahp * e / tau_ahp * (ms/nS) input: - inh_spikes <- spike - exc_spikes <- spike + inh_spikes <- spike(weight nS) + exc_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/iaf_cond_beta_neuron.nestml b/models/neurons/iaf_cond_beta_neuron.nestml index ddea838ca..592551d34 100644 --- a/models/neurons/iaf_cond_beta_neuron.nestml +++ b/models/neurons/iaf_cond_beta_neuron.nestml @@ -63,8 +63,8 @@ model iaf_cond_beta_neuron: kernel g_ex' = g_ex$ - g_ex / tau_syn_rise_E, g_ex$' = -g_ex$ / tau_syn_decay_E - inline I_syn_exc pA = (F_E + convolve(g_ex, exc_spikes) * nS) * (V_m - E_ex) - inline I_syn_inh pA = (F_I + convolve(g_in, inh_spikes)* nS) * (V_m - E_in) + inline I_syn_exc pA = (F_E + convolve(g_ex, exc_spikes.weight)) * (V_m - E_ex) + inline I_syn_inh pA = (F_I + convolve(g_in, inh_spikes.weight)) * (V_m - E_in) inline I_leak pA = g_L * (V_m - E_L) # pA = nS * mV V_m' = (-I_leak - I_syn_exc - I_syn_inh + I_e + I_stim ) / C_m @@ -100,8 +100,8 @@ model iaf_cond_beta_neuron: g_I_const real = 1 / (exp(-t_peak_I / tau_syn_decay_I) - exp(-t_peak_I / tau_syn_rise_I)) input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight nS) + inh_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/iaf_cond_exp_neuron.nestml b/models/neurons/iaf_cond_exp_neuron.nestml index 018297ae5..0e0847369 100644 --- a/models/neurons/iaf_cond_exp_neuron.nestml +++ b/models/neurons/iaf_cond_exp_neuron.nestml @@ -35,8 +35,8 @@ model iaf_cond_exp_neuron: kernel g_inh = exp(-t/tau_syn_inh) # inputs from the inh conductance kernel g_exc = exp(-t/tau_syn_exc) # inputs from the exc conductance - inline I_syn_exc pA = convolve(g_exc, exc_spikes) * nS * ( V_m - E_exc ) - inline I_syn_inh pA = convolve(g_inh, inh_spikes) * nS * ( V_m - E_inh ) + inline I_syn_exc pA = convolve(g_exc, exc_spikes.weight) * ( V_m - E_exc ) + inline I_syn_inh pA = convolve(g_inh, inh_spikes.weight) * ( V_m - E_inh ) inline I_leak pA = g_L * ( V_m - E_L ) V_m' = ( -I_leak - I_syn_exc - I_syn_inh + I_e + I_stim ) / C_m @@ -59,8 +59,8 @@ model iaf_cond_exp_neuron: I_e pA = 0 pA input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight nS) + inh_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/iaf_cond_exp_sfa_rr_neuron.nestml b/models/neurons/iaf_cond_exp_sfa_rr_neuron.nestml index 0d5474065..24f830af7 100644 --- a/models/neurons/iaf_cond_exp_sfa_rr_neuron.nestml +++ b/models/neurons/iaf_cond_exp_sfa_rr_neuron.nestml @@ -48,8 +48,8 @@ model iaf_cond_exp_sfa_rr_neuron: g_sfa' = -g_sfa / tau_sfa g_rr' = -g_rr / tau_rr - inline I_syn_exc pA = convolve(g_exc, exc_spikes) * nS * ( V_m - E_exc ) - inline I_syn_inh pA = convolve(g_inh, inh_spikes) * nS * ( V_m - E_inh ) + inline I_syn_exc pA = convolve(g_exc, exc_spikes.weight) * ( V_m - E_exc ) + inline I_syn_inh pA = convolve(g_inh, inh_spikes.weight) * ( V_m - E_inh ) inline I_L pA = g_L * ( V_m - E_L ) inline I_sfa pA = g_sfa * ( V_m - E_sfa ) inline I_rr pA = g_rr * ( V_m - E_rr ) @@ -79,8 +79,8 @@ model iaf_cond_exp_sfa_rr_neuron: I_e pA = 0 pA input: - inh_spikes <- spike - exc_spikes <- spike + inh_spikes <- spike(weight nS) + exc_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/iaf_psc_alpha_neuron.nestml b/models/neurons/iaf_psc_alpha_neuron.nestml index a90b689f8..4d9b4b077 100644 --- a/models/neurons/iaf_psc_alpha_neuron.nestml +++ b/models/neurons/iaf_psc_alpha_neuron.nestml @@ -65,7 +65,7 @@ model iaf_psc_alpha_neuron: equations: kernel I_kernel_inh = (e / tau_syn_inh) * t * exp(-t / tau_syn_inh) kernel I_kernel_exc = (e / tau_syn_exc) * t * exp(-t / tau_syn_exc) - inline I pA = convolve(I_kernel_exc, exc_spikes) * pA - convolve(I_kernel_inh, inh_spikes) * pA + I_e + I_stim + inline I pA = convolve(I_kernel_exc, exc_spikes.weight) - convolve(I_kernel_inh, inh_spikes.weight) + I_e + I_stim V_m' = -(V_m - E_L) / tau_m + I / C_m refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) @@ -83,8 +83,8 @@ model iaf_psc_alpha_neuron: I_e pA = 0 pA input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight pA) + inh_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/models/neurons/iaf_psc_delta_fixed_timestep_neuron.nestml b/models/neurons/iaf_psc_delta_fixed_timestep_neuron.nestml index 75a53489a..eef78c5ac 100644 --- a/models/neurons/iaf_psc_delta_fixed_timestep_neuron.nestml +++ b/models/neurons/iaf_psc_delta_fixed_timestep_neuron.nestml @@ -55,7 +55,7 @@ model iaf_psc_delta_fixed_timestep_neuron: refr_counts integer = steps(refr_T) input: - spikes <- spike + spikes <- spike(weight mV) I_stim pA <- continuous output: @@ -64,7 +64,7 @@ model iaf_psc_delta_fixed_timestep_neuron: onReceive(spikes): # discard spikes if neuron is refractory if refr_counter == 0: - V_m += spikes * mV * s + V_m += spikes.weight update: if refr_counter > 0: diff --git a/models/neurons/iaf_psc_exp_dend_neuron.nestml b/models/neurons/iaf_psc_exp_dend_neuron.nestml index 49f60ba02..0751bb414 100644 --- a/models/neurons/iaf_psc_exp_dend_neuron.nestml +++ b/models/neurons/iaf_psc_exp_dend_neuron.nestml @@ -45,7 +45,7 @@ model iaf_psc_exp_dend_neuron: equations: kernel I_kernel_inh = exp(-t/tau_syn_inh) kernel I_kernel_exc = exp(-t/tau_syn_exc) - inline I_syn pA = convolve(I_kernel_exc, exc_spikes) * pA - convolve(I_kernel_inh, inh_spikes) * pA + inline I_syn pA = convolve(I_kernel_exc, exc_spikes.weight) - convolve(I_kernel_inh, inh_spikes.weight) V_m' = -(V_m - E_L) / tau_m + (I_syn + I_e + I_stim) / C_m refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) @@ -63,8 +63,8 @@ model iaf_psc_exp_dend_neuron: I_e pA = 0 pA input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight pA) + inh_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/models/neurons/iaf_psc_exp_htum_neuron.nestml b/models/neurons/iaf_psc_exp_htum_neuron.nestml index 6b0cb7462..014f3d1ec 100644 --- a/models/neurons/iaf_psc_exp_htum_neuron.nestml +++ b/models/neurons/iaf_psc_exp_htum_neuron.nestml @@ -60,7 +60,7 @@ model iaf_psc_exp_htum_neuron: equations: kernel I_kernel_inh = exp(-t / tau_syn_inh) kernel I_kernel_exc = exp(-t / tau_syn_exc) - inline I_syn pA = convolve(I_kernel_exc, exc_spikes) * pA - convolve(I_kernel_inh, inh_spikes) * pA + inline I_syn pA = convolve(I_kernel_exc, exc_spikes.weight) - convolve(I_kernel_inh, inh_spikes.weight) V_m' = -V_m / tau_m + (I_syn + I_e + I_stim) / C_m parameters: @@ -98,8 +98,8 @@ model iaf_psc_exp_htum_neuron: RefractoryCountsTot integer = steps(t_ref_tot) [[RefractoryCountsTot > 0]] input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight pA) + inh_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/models/neurons/izhikevich_neuron.nestml b/models/neurons/izhikevich_neuron.nestml index 1b43e8155..923c53edd 100644 --- a/models/neurons/izhikevich_neuron.nestml +++ b/models/neurons/izhikevich_neuron.nestml @@ -58,21 +58,22 @@ model izhikevich_neuron: I_e pA = 0 pA input: - spikes <- spike + spikes <- spike(weight mV) I_stim pA <- continuous output: spike - update: - integrate_odes() - - # Add synaptic current - V_m += spikes * mV * s + onReceive(spikes): + # Add synaptic contribution + V_m += spikes.weight # lower bound of membrane potential V_m = max(V_min, V_m) + update: + integrate_odes() + onCondition(V_m >= V_th): # threshold crossing V_m = c diff --git a/models/neurons/izhikevich_psc_alpha_neuron.nestml b/models/neurons/izhikevich_psc_alpha_neuron.nestml index 1c3ff7b37..862ac547f 100644 --- a/models/neurons/izhikevich_psc_alpha_neuron.nestml +++ b/models/neurons/izhikevich_psc_alpha_neuron.nestml @@ -47,8 +47,8 @@ model izhikevich_psc_alpha_neuron: kernel K_syn_inh = (e/tau_syn_inh) * t * exp(-t/tau_syn_inh) kernel K_syn_exc = (e/tau_syn_exc) * t * exp(-t/tau_syn_exc) - inline I_syn_exc pA = convolve(K_syn_exc, exc_spikes) * pA - inline I_syn_inh pA = convolve(K_syn_inh, inh_spikes) * pA + inline I_syn_exc pA = convolve(K_syn_exc, exc_spikes.weight) + inline I_syn_inh pA = convolve(K_syn_inh, inh_spikes.weight) V_m' = (k * (V_m - V_r) * (V_m - V_t) - U_m + I_e + I_stim + I_syn_exc - I_syn_inh) / C_m U_m' = a * (b * (V_m - V_r) - U_m) @@ -72,8 +72,8 @@ model izhikevich_psc_alpha_neuron: I_e pA = 0 pA input: - inh_spikes <- spike - exc_spikes <- spike + inh_spikes <- spike(weight pA) + exc_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/models/neurons/mat2_psc_exp_neuron.nestml b/models/neurons/mat2_psc_exp_neuron.nestml index 43d85dce7..c7a0d40cf 100644 --- a/models/neurons/mat2_psc_exp_neuron.nestml +++ b/models/neurons/mat2_psc_exp_neuron.nestml @@ -54,7 +54,7 @@ model mat2_psc_exp_neuron: kernel I_kernel_inh = exp(-t/tau_syn_inh) kernel I_kernel_exc = exp(-t/tau_syn_exc) - inline I_syn pA = convolve(I_kernel_exc, exc_spikes) * pA - convolve(I_kernel_inh, inh_spikes) * pA + inline I_syn pA = convolve(I_kernel_exc, exc_spikes.weight) - convolve(I_kernel_inh, inh_spikes.weight) V_m' = -(V_m - E_L) / tau_m + (I_syn + I_e + I_stim) / C_m refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) @@ -80,8 +80,8 @@ model mat2_psc_exp_neuron: P22th real = exp(-h / tau_2) input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight pA) + inh_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/models/neurons/terub_gpe_neuron.nestml b/models/neurons/terub_gpe_neuron.nestml index f847dd546..3843b8809 100644 --- a/models/neurons/terub_gpe_neuron.nestml +++ b/models/neurons/terub_gpe_neuron.nestml @@ -78,8 +78,8 @@ model terub_gpe_neuron: inline g_k_Ca real = 15.0 #Report:15, Terman Rubin 2002: 20.0 inline g_k1 real = 30.0 - inline I_exc_mod real = -convolve(g_exc, exc_spikes) * nS * V_m - inline I_inh_mod real = convolve(g_inh, inh_spikes) * nS * (V_m-E_gg) + inline I_exc_mod real = -convolve(g_exc, exc_spikes) * V_m + inline I_inh_mod real = convolve(g_inh, inh_spikes) * (V_m - E_gg) inline tau_n real = g_tau_n_0 + g_tau_n_1 / (1. + exp(-(V_m-g_theta_n_tau)/g_sigma_n_tau)) inline tau_h real = g_tau_h_0 + g_tau_h_1 / (1. + exp(-(V_m-g_theta_h_tau)/g_sigma_h_tau)) @@ -138,8 +138,8 @@ model terub_gpe_neuron: I_e pA = 0 pA input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight nS) + inh_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/terub_stn_neuron.nestml b/models/neurons/terub_stn_neuron.nestml index a480dbb69..669a27453 100644 --- a/models/neurons/terub_stn_neuron.nestml +++ b/models/neurons/terub_stn_neuron.nestml @@ -83,8 +83,8 @@ model terub_stn_neuron: inline k_Ca real = 22.5 inline k1 real = 15.0 - inline I_exc_mod pA = -convolve(g_exc, exc_spikes) * nS * V_m - inline I_inh_mod pA = convolve(g_inh, inh_spikes) * nS * (V_m - E_gs) + inline I_exc_mod pA = -convolve(g_exc, exc_spikes.weight) * V_m + inline I_inh_mod pA = convolve(g_inh, inh_spikes.weight) * (V_m - E_gs) inline tau_n ms = tau_n_0 + tau_n_1 / (1. + exp(-(V_m-theta_n_tau)/sigma_n_tau)) inline tau_h ms = tau_h_0 + tau_h_1 / (1. + exp(-(V_m-theta_h_tau)/sigma_h_tau)) @@ -144,8 +144,8 @@ model terub_stn_neuron: I_e pA = 0 pA input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight nS) + inh_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/traub_cond_multisyn_neuron.nestml b/models/neurons/traub_cond_multisyn_neuron.nestml index 26b122b37..7bdf8436f 100644 --- a/models/neurons/traub_cond_multisyn_neuron.nestml +++ b/models/neurons/traub_cond_multisyn_neuron.nestml @@ -45,10 +45,10 @@ model traub_cond_multisyn_neuron: g_GABAB$ real = GABA_BInitialValue equations: - recordable inline I_syn_ampa pA = -convolve(g_AMPA, AMPA) * nS * ( V_m - AMPA_E_rev ) - recordable inline I_syn_nmda pA = -convolve(g_NMDA, NMDA) * nS * ( V_m - NMDA_E_rev ) / ( 1 + exp( ( NMDA_Vact - V_m ) / NMDA_Sact ) ) - recordable inline I_syn_gaba_a pA = -convolve(g_GABAA, GABA_A) * nS * ( V_m - GABA_A_E_rev ) - recordable inline I_syn_gaba_b pA = -convolve(g_GABAB, GABA_B) * nS * ( V_m - GABA_B_E_rev ) + recordable inline I_syn_ampa pA = -convolve(g_AMPA, AMPA.weight) * ( V_m - AMPA_E_rev ) + recordable inline I_syn_nmda pA = -convolve(g_NMDA, NMDA.weight) * ( V_m - NMDA_E_rev ) / ( 1 + exp( ( NMDA_Vact - V_m ) / NMDA_Sact ) ) + recordable inline I_syn_gaba_a pA = -convolve(g_GABAA, GABA_A.weight) * ( V_m - GABA_A_E_rev ) + recordable inline I_syn_gaba_b pA = -convolve(g_GABAB, GABA_B.weight) * ( V_m - GABA_B_E_rev ) recordable inline I_syn pA = I_syn_ampa + I_syn_nmda + I_syn_gaba_a + I_syn_gaba_b inline I_Na pA = g_Na * Act_m * Act_m * Act_m * Inact_h * ( V_m - E_Na ) @@ -140,10 +140,10 @@ model traub_cond_multisyn_neuron: beta_h_init real = 4.0 / (1.0 + exp(-(V_m / mV + 27.) / 5.)) input: - AMPA <- spike - NMDA <- spike - GABA_A <- spike - GABA_B <- spike + AMPA <- spike(weight nS) + NMDA <- spike(weight nS) + GABA_A <- spike(weight nS) + GABA_B <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/traub_psc_alpha_neuron.nestml b/models/neurons/traub_psc_alpha_neuron.nestml index 5b97d1bd9..bfcb8abc8 100644 --- a/models/neurons/traub_psc_alpha_neuron.nestml +++ b/models/neurons/traub_psc_alpha_neuron.nestml @@ -35,8 +35,8 @@ model traub_psc_alpha_neuron: kernel K_syn_inh = (e/tau_syn_inh) * t * exp(-t/tau_syn_inh) kernel K_syn_exc = (e/tau_syn_exc) * t * exp(-t/tau_syn_exc) - inline I_syn_exc pA = convolve(K_syn_exc, exc_spikes) * pA - inline I_syn_inh pA = convolve(K_syn_inh, inh_spikes) * pA + inline I_syn_exc pA = convolve(K_syn_exc, exc_spikes.weight) + inline I_syn_inh pA = convolve(K_syn_inh, inh_spikes.weight) inline I_Na pA = g_Na * Act_m * Act_m * Act_m * Inact_h * ( V_m - E_Na ) inline I_K pA = g_K * Act_n * Act_n * Act_n * Act_n * ( V_m - E_K ) inline I_L pA = g_L * ( V_m - E_L ) @@ -86,8 +86,8 @@ model traub_psc_alpha_neuron: beta_h_init real = 4.0 / (1.0 + exp(-(V_m / mV + 27.) / 5.)) input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight pA) + inh_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/models/neurons/wb_cond_exp_neuron.nestml b/models/neurons/wb_cond_exp_neuron.nestml index ae32c5238..9a4f36851 100644 --- a/models/neurons/wb_cond_exp_neuron.nestml +++ b/models/neurons/wb_cond_exp_neuron.nestml @@ -40,8 +40,8 @@ model wb_cond_exp_neuron: kernel g_inh = exp(-t / tau_syn_inh) kernel g_exc = exp(-t / tau_syn_exc) - recordable inline I_syn_exc pA = convolve(g_exc, exc_spikes) * nS * ( V_m - E_exc ) - recordable inline I_syn_inh pA = convolve(g_inh, inh_spikes) * nS * ( V_m - E_inh ) + recordable inline I_syn_exc pA = convolve(g_exc, exc_spikes.weight) * ( V_m - E_exc ) + recordable inline I_syn_inh pA = convolve(g_inh, inh_spikes.weight) * ( V_m - E_inh ) inline I_Na pA = g_Na * _subexpr(V_m) * Inact_h * ( V_m - E_Na ) inline I_K pA = g_K * Act_n**4 * ( V_m - E_K ) @@ -78,8 +78,8 @@ model wb_cond_exp_neuron: beta_h_init 1/ms = 5.0 / (exp(-0.1 / mV * (E_L + 28.0 mV)) + 1.0) /ms input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight nS) + inh_spikes <- spike(weight nS) I_stim pA <- continuous output: diff --git a/models/neurons/wb_cond_multisyn_neuron.nestml b/models/neurons/wb_cond_multisyn_neuron.nestml index a9cf2ae67..da77b6516 100644 --- a/models/neurons/wb_cond_multisyn_neuron.nestml +++ b/models/neurons/wb_cond_multisyn_neuron.nestml @@ -46,9 +46,9 @@ model wb_cond_multisyn_neuron: equations: recordable inline I_syn_ampa pA = -convolve(g_AMPA, AMPA.weight) * ( V_m - AMPA_E_rev ) - recordable inline I_syn_nmda pA = -convolve(g_NMDA, NMDA) * nS * ( V_m - NMDA_E_rev ) / ( 1 + exp( ( NMDA_Vact - V_m ) / NMDA_Sact ) ) - recordable inline I_syn_gaba_a pA = -convolve(g_GABAA, GABA_A) * nS * ( V_m - GABA_A_E_rev ) - recordable inline I_syn_gaba_b pA = -convolve(g_GABAB, GABA_B) * nS * ( V_m - GABA_B_E_rev ) + recordable inline I_syn_nmda pA = -convolve(g_NMDA, NMDA.weight) * ( V_m - NMDA_E_rev ) / ( 1 + exp( ( NMDA_Vact - V_m ) / NMDA_Sact ) ) + recordable inline I_syn_gaba_a pA = -convolve(g_GABAA, GABA_A.weight) * ( V_m - GABA_A_E_rev ) + recordable inline I_syn_gaba_b pA = -convolve(g_GABAB, GABA_B.weight) * ( V_m - GABA_B_E_rev ) recordable inline I_syn pA = I_syn_ampa + I_syn_nmda + I_syn_gaba_a + I_syn_gaba_b inline I_Na pA = g_Na * Act_m_inf(V_m)**3 * Inact_h * ( V_m - E_Na ) @@ -129,9 +129,9 @@ model wb_cond_multisyn_neuron: input: AMPA <- spike(weight nS) - NMDA <- spike - GABA_A <- spike - GABA_B <- spike + NMDA <- spike(weight nS) + GABA_A <- spike(weight nS) + GABA_B <- spike(weight nS) I_stim pA <- continuous output: diff --git a/pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py b/pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py index 7cd86e4db..dad03e423 100644 --- a/pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py +++ b/pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py @@ -83,5 +83,4 @@ def visit_variable(self, node: ASTVariable): # it's an input port inside the equations block, but not inside a convolve() call -- error code, message = Messages.get_spike_input_port_in_equation_rhs_outside_convolve() Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, node=node) - import pdb;pdb.set_trace() return diff --git a/pynestml/codegeneration/code_generator.py b/pynestml/codegeneration/code_generator.py index 1c463efb4..939130980 100644 --- a/pynestml/codegeneration/code_generator.py +++ b/pynestml/codegeneration/code_generator.py @@ -169,11 +169,10 @@ def generate_synapses(self, synapses: Sequence[ASTModel]) -> None: from pynestml.frontend.frontend_configuration import FrontendConfiguration for synapse in synapses: - if Logger.logging_level == LoggingLevel.INFO: - print("Generating code for the synapse {}.".format(synapse.get_name())) self.generate_synapse_code(synapse) - code, message = Messages.get_code_generated(synapse.get_name(), FrontendConfiguration.get_target_path()) - Logger.log_message(synapse, code, message, synapse.get_source_position(), LoggingLevel.INFO) + if not Logger.has_errors(synapse): + code, message = Messages.get_code_generated(synapse.get_name(), FrontendConfiguration.get_target_path()) + Logger.log_message(synapse, code, message, synapse.get_source_position(), LoggingLevel.INFO) def generate_model_code(self, model_name: str, diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index a041bedc8..fd6d4531e 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -75,8 +75,8 @@ from pynestml.visitors.ast_equations_with_delay_vars_visitor import ASTEquationsWithDelayVarsVisitor from pynestml.visitors.ast_equations_with_vector_variables import ASTEquationsWithVectorVariablesVisitor from pynestml.visitors.ast_mark_delay_vars_visitor import ASTMarkDelayVarsVisitor -from pynestml.visitors.ast_set_vector_parameter_in_update_expressions import \ - ASTSetVectorParameterInUpdateExpressionVisitor +from pynestml.visitors.ast_parent_visitor import ASTParentVisitor +from pynestml.visitors.ast_set_vector_parameter_in_update_expressions import ASTSetVectorParameterInUpdateExpressionVisitor from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor from pynestml.visitors.ast_random_number_generator_visitor import ASTRandomNumberGeneratorVisitor @@ -372,6 +372,8 @@ def analyse_neuron(self, neuron: ASTModel) -> Tuple[Dict[str, ASTAssignment], Di neuron.accept(eqns_with_vector_vars_visitor) equations_with_vector_vars = eqns_with_vector_vars_visitor.equations + neuron.accept(ASTParentVisitor()) + analytic_solver, numeric_solver = self.ode_toolbox_analysis(neuron, kernel_buffers) self.analytic_solver[neuron.get_name()] = analytic_solver self.numeric_solver[neuron.get_name()] = numeric_solver @@ -995,13 +997,14 @@ def get_spike_update_expressions(self, neuron: ASTModel, kernel_buffers, solver_ expr = str(expr) if expr in ["0", "0.", "0.0"]: continue # skip adding the statement if we are only adding zero - assignment_str = kernel_spike_buf_name + " += " if "_is_post_port" in dir(spike_input_port.get_variable()) \ and spike_input_port.get_variable()._is_post_port: assignment_str += "1." else: - assignment_str += "(" + str(spike_input_port) + ")" + var_name = str(spike_input_port) + assignment_str += "(" + var_name + ")" + if not expr in ["1.", "1.0", "1"]: assignment_str += " * (" + expr + ")" diff --git a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py index 1e65c260f..8707fc7ea 100644 --- a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py +++ b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py @@ -40,7 +40,8 @@ def print_variable(self, node: ASTVariable) -> str: # input ports that appear here should be treated as trains of delta pulses model = ASTUtils.find_parent_node_by_type(node, ASTModel) - if ASTUtils.get_input_port_by_name(model.get_input_blocks(), node.get_name()): + inport = ASTUtils.get_input_port_by_name(model.get_input_blocks(), node.get_name()) + if inport and inport.is_spike(): return "0.0" return s diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 46bb6c2e7..86c60105e 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -1136,7 +1136,7 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) #endif assert(e.get_delay_steps() > 0); - assert(e.get_rport() < {{ utils.nestml_input_port_to_nest_rport_dict(astnode) | length }}); + assert(e.get_rport() <= {{ utils.nestml_input_port_to_nest_rport_dict(astnode) | length }}); {%- for spike_in_port_name, rport in utils.nestml_input_port_to_nest_rport_dict(astnode).items() %} {%- set spike_in_port = utils.get_input_port_by_name(astnode.get_input_blocks(), spike_in_port_name.split("_VEC_IDX_")[0]) %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 11832b961..0ae0522bf 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -1045,7 +1045,7 @@ inline nest_port_t {{neuronName}}::send_test_event(nest::Node& target, nest_rpor inline nest_port_t {{neuronName}}::handles_test_event(nest::SpikeEvent&, nest_port_t receptor_type) { {%- if (neuron.get_multiple_receptors()) | length > 1 or neuron.is_multisynapse_spikes() %} - if ( receptor_type < 1 or receptor_type >= MAX_SPIKE_RECEPTOR ) + if ( receptor_type < 1 or receptor_type > MAX_SPIKE_RECEPTOR ) { throw nest::UnknownReceptorType( receptor_type, get_name() ); } diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/ApplySpikesFromBuffers.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/ApplySpikesFromBuffers.jinja2 index 881257451..2ef3aa91a 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/ApplySpikesFromBuffers.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/ApplySpikesFromBuffers.jinja2 @@ -1,6 +1,6 @@ {% if tracing %}/* generated by {{self._TemplateReference__context.name}} */ {% endif %} {%- for spike_updates_for_port in spike_updates.values() %} -{%- for ast in spike_updates_for_port -%} +{%- for ast in spike_updates_for_port -%} {%- include "directives_cpp/Assignment.jinja2" %} -{%- endfor %} +{%- endfor %} {%- endfor %} diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index d71535e4c..d0157c484 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -1391,7 +1391,7 @@ def construct_kernel_X_spike_buf_name(cls, kernel_var_name: str, spike_input_por and the input port is .. code-block:: - pre_spikes nS <- spike + pre_spikes <- spike then the constructed variable will be 'I_kernel__X__pre_pikes' """ @@ -1435,7 +1435,7 @@ def replace_kernel_var(node): and node.get_variable().get_name() == variable_name_to_replace: var_order = node.get_variable().get_differential_order() new_variable_name = cls.construct_kernel_X_spike_buf_name( - kernel_var.get_name(), spike_buf, var_order - 1, diff_order_symbol="'") + kernel_var.get_name(), spike_buf, var_order - 1, diff_order_symbol="'", attribute=spike_buf.get_variable().get_attribute()) new_variable = ASTVariable(new_variable_name, var_order) new_variable.set_source_position(node.get_variable().get_source_position()) node.set_variable(new_variable) @@ -2164,7 +2164,7 @@ def transform_ode_and_kernels_to_json(cls, model: ASTModel, parameters_blocks: S # f(t) = ...; 1 for kernel ODE f'(t) = ...; 2 for f''(t) = ... and so on) for order in range(kernel_order): iv_sym_name_ode_toolbox = cls.construct_kernel_X_spike_buf_name( - kernel_var.get_name(), spike_input_port, order, diff_order_symbol="'") + kernel_var.get_name(), spike_input_port, order, diff_order_symbol="'", attribute=attribute) symbol_name_ = kernel_var.get_name() + "'" * order symbol = equations_block.get_scope().resolve_to_symbol(symbol_name_, SymbolKind.VARIABLE) assert symbol is not None, "Could not find initial value for variable " + symbol_name_ diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index 9198e48a7..a756f8d8c 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -84,7 +84,6 @@ class MessageCode(Enum): TYPE_MISMATCH = 50 NEURON_SOLVED_BY_GSL = 52 NO_UNIT = 53 - NOT_NEUROSCIENCE_UNIT = 54 INTERNAL_WARNING = 55 OPERATION_NOT_DEFINED = 56 INPUT_PATH_NOT_FOUND = 58 @@ -909,21 +908,6 @@ def get_unit_does_not_exist(cls, name): message = 'Unit does not exist (%s).' % name return MessageCode.NO_UNIT, message - @classmethod - def get_not_neuroscience_unit_used(cls, name): - """ - Indicates that a non-neuroscientific unit, e.g., kg, has been used. Those units can not be converted to - a corresponding representation in the simulation and are therefore represented by the factor 1. - :param name: the name of the variable - :type name: str - :return: a nes code,message tuple - :rtype: (MessageCode,str) - """ - assert (name is not None and isinstance(name, str)), \ - '(PyNestML.Utils.Message) Not a string provided (%s)!' % type(name) - message = 'Not convertible unit \'%s\' used, 1 assumed as factor!' % name - return MessageCode.NOT_NEUROSCIENCE_UNIT, message - @classmethod def get_ode_needs_consistent_units(cls, name, differential_order, lhs_type, rhs_type): assert (name is not None and isinstance(name, str)), \ diff --git a/pynestml/utils/ode_toolbox_utils.py b/pynestml/utils/ode_toolbox_utils.py index ec3fc68a6..fabc72fca 100644 --- a/pynestml/utils/ode_toolbox_utils.py +++ b/pynestml/utils/ode_toolbox_utils.py @@ -44,7 +44,7 @@ def _rewrite_piecewise_into_ternary(cls, s: str) -> str: sympy_expr = sympy.parsing.sympy_parser.parse_expr(re.sub(pattern, '__DOT__', s), global_dict=_sympy_globals_no_functions) class MySympyPrinter(StrPrinter): - """Resulting expressions will be parsed by NESTML parser. R + """Resulting expressions will be parsed by NESTML parser. """ def _print_Function(self, expr): if expr.func.__name__ == "Piecewise": @@ -58,6 +58,6 @@ def _print_Function(self, expr): return super()._print_Function(expr) - s_reformatted = MySympyPrinter().doprint(sympy_expr).replace("__DOT__", ".") + s_reformatted = MySympyPrinter().doprint(sympy_expr) return s_reformatted diff --git a/pynestml/visitors/ast_builder_visitor.py b/pynestml/visitors/ast_builder_visitor.py index 025ae7db7..c08588df2 100644 --- a/pynestml/visitors/ast_builder_visitor.py +++ b/pynestml/visitors/ast_builder_visitor.py @@ -698,8 +698,6 @@ def visitStmt(self, ctx): def visitOnReceiveBlock(self, ctx): input_port_variable = self.visit(ctx.inputPortVariable) block = self.visit(ctx.stmtsBody()) if ctx.stmtsBody() is not None else None - print("ctx.inputPortVariable = " + str(input_port_variable)) - print("ctx.inputPortVariable = " + str(type(input_port_variable))) const_parameters = {} for el in ctx.constParameter(): const_parameters[el.name.text] = el.value.text diff --git a/pynestml/visitors/ast_function_call_visitor.py b/pynestml/visitors/ast_function_call_visitor.py index a2aac8eb1..0dc2b0be1 100644 --- a/pynestml/visitors/ast_function_call_visitor.py +++ b/pynestml/visitors/ast_function_call_visitor.py @@ -60,60 +60,19 @@ def visit_simple_expression(self, node: ASTSimpleExpression) -> None: # return type of the convolve function is the type of the second parameter (the spike input buffer) if function_name == PredefinedFunctions.CONVOLVE: buffer_parameter = node.get_function_call().get_args()[1] - print("var === " + str(buffer_parameter)) assert buffer_parameter.get_variable() is not None if "." in str(buffer_parameter): # the type of the convolve call is [the type of the attribute] * [s] - # input_port = ASTUtils.get_input_port_by_name(buffer_parameter.get_variable().get_name()) input_port = ASTUtils.get_input_port_by_name(ASTUtils.find_parent_node_by_type(node, ASTModel).get_input_blocks(), buffer_parameter.get_variable().get_name()) node.type = input_port.get_parameters()[0].get_data_type().get_type_symbol() return - # assert input_port is not None - # import pdb;pdb.set_trace() else: # convolve with a train of delta pulses --> the type of the convolve call is [1] node.type = RealTypeSymbol() return - # if not buffer_parameter.get_variable().get_attribute(): - # # an attribute is missing for the spiking input port - # XXX: attributes only required for ports that have them, but don't have access to the ASTModel object, so can't run ASTUtils.get_input_port_by_name!!! - # import pdb;pdb.set_trace() - # code, message = Messages.get_spike_input_port_attribute_missing(buffer_parameter.get_variable().get_name()) - # Logger.log_message(code=code, message=message, error_position=node.get_source_position(), - # log_level=LoggingLevel.ERROR) - # node.type = ErrorTypeSymbol() - # return - - # buffer_name = buffer_parameter.get_variable().get_name() + "." + str(buffer_parameter.get_variable().get_attribute()) - # buffer_symbol_resolve = scope.resolve_to_symbol(buffer_name, SymbolKind.VARIABLE) - - # if not buffer_symbol_resolve: - # buffer_symbol_resolve = scope.resolve_to_symbol(buffer_parameter.get_variable().get_name(), SymbolKind.VARIABLE) - - # import pdb;pdb.set_trace() - - # if buffer_symbol_resolve is None: - # # the name of the input port is used without attributes - # print("ASSIGN TO CONVOLVE CLAL?????????") - - # if ASTUtils.find_parent_node_by_type(node, ASTEquationsBlock): - # # if this port name appears inside an equations block, it is interpreted as a train of delta pulses with units [1/s]; after applying the convolve() function, a unit of [1] remains - # from astropy import units as u - # from pynestml.utils.unit_type import UnitType - # node.type = RealTypeSymbol() #UnitTypeSymbol(UnitType(name=str("1/s"), unit=1/u.si.s)) - # print("ASSIGNED REAL TO CONVOLVE CLAL") - # return - - # # if this port name appears elsewhere, it cannot be interpreted by itself without an attribute - # node.type = ErrorTypeSymbol() - # return - - # node.type = buffer_symbol_resolve.get_type_symbol() - # return - # check if this is a delay variable symbol = ASTUtils.get_delay_variable_symbol(node.get_function_call()) if method_symbol is None and symbol is not None: diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index 21d1ec358..629bac41b 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -466,21 +466,6 @@ def visit_simple_expression(self, node): node.get_variable().get_vector_parameter().update_scope(node.get_scope()) def visit_variable(self, node: ASTVariable): - # if node.attribute: - # ast_model = ASTUtils.find_parent_node_by_type(node, ASTModel) - # assert ast_model - # input_port = ASTUtils.get_input_port_by_name(ast_model.get_input_blocks(), node.get_name()) - # assert input_port - - # for parameter in input_port.get_parameters(): - # if parameter.get_name() == node.attribute: - # actual_type = parameter.get_data_type() - # node.data_type = actual_type - # node.set_type_symbol(actual_type) - - # assert isinstance(node.get_parent(), ASTSimpleExpression) - # node.get_parent().type = actual_type - # print("reassigned data type of " + str(node) + " to " + str(node.data_type)) if node.has_vector_parameter(): node.get_vector_parameter().update_scope(node.get_scope()) @@ -494,11 +479,8 @@ def visit_variable(self, node: ASTVariable): # symbol.set_comment(node.get_comment()) # node.get_scope().add_symbol(symbol) - print("in symboltablevisitor : variable is " + str(node.get_vector_parameter())) - if isinstance(node.get_vector_parameter(), ASTParameter): # vector parameter is a declaration - print("in symboltablevisitor : \tvector parameter is a declaration: adding " + node.get_vector_parameter().get_name()) symbol = VariableSymbol(element_reference=node, scope=node.get_scope(), name=node.get_vector_parameter().get_name(), diff --git a/pynestml/visitors/ast_variable_visitor.py b/pynestml/visitors/ast_variable_visitor.py index faa80bd5b..241d672c2 100644 --- a/pynestml/visitors/ast_variable_visitor.py +++ b/pynestml/visitors/ast_variable_visitor.py @@ -60,7 +60,7 @@ def visit_simple_expression(self, node: ASTSimpleExpression): # it appears in an equations block; units are [units of attribute / s] from astropy import units as u if inport.get_parameters(): - node.type = var_resolve.get_type_symbol() * UnitTypeSymbol(UnitType(name=str("1/s"), unit=1/u.si.s)) + node.type = var_resolve.get_type_symbol() * UnitTypeSymbol(UnitType(name=str("1/s"), unit=1 / u.si.s)) else: node.type = var_resolve.get_type_symbol() # the type of the base port is [1/s] else: diff --git a/tests/nest_tests/fir_filter_test.py b/tests/nest_tests/fir_filter_test.py index 220aceaa9..a7ad1bad8 100644 --- a/tests/nest_tests/fir_filter_test.py +++ b/tests/nest_tests/fir_filter_test.py @@ -111,7 +111,7 @@ def test_fir_filter(self): spike_times = nest.GetStatus(sr, keys="events")[0]["times"] # Scipy filtering - spikes, bin_edges = np.histogram(spike_times, np.arange(0, t_sim, resolution)) + spikes, bin_edges = np.histogram(spike_times + resolution * 1.5, np.arange(0, t_sim, resolution)) output = scipy.signal.lfilter(h, 1, spikes) # Plots diff --git a/tests/nest_tests/nest_integration_test.py b/tests/nest_tests/nest_integration_test.py index f337a6988..ca4a7224d 100644 --- a/tests/nest_tests/nest_integration_test.py +++ b/tests/nest_tests/nest_integration_test.py @@ -51,13 +51,13 @@ def generate_all_models(self): codegen_opts["neuron_parent_class"] = "StructuralPlasticityNode" codegen_opts["neuron_parent_class_include"] = "structural_plasticity_node.h" - generate_nest_target(input_path=[#"models/neurons/hh_cond_exp_traub_neuron.nestml", - # "models/neurons/hh_psc_alpha_neuron.nestml", - # "models/neurons/iaf_cond_beta_neuron.nestml", - # "models/neurons/iaf_cond_alpha_neuron.nestml", - # "models/neurons/iaf_cond_exp_neuron.nestml", - # "models/neurons/iaf_psc_alpha_neuron.nestml", - # "models/neurons/iaf_psc_exp_neuron.nestml", + generate_nest_target(input_path=["models/neurons/hh_cond_exp_traub_neuron.nestml", + "models/neurons/hh_psc_alpha_neuron.nestml", + "models/neurons/iaf_cond_beta_neuron.nestml", + "models/neurons/iaf_cond_alpha_neuron.nestml", + "models/neurons/iaf_cond_exp_neuron.nestml", + "models/neurons/iaf_psc_alpha_neuron.nestml", + "models/neurons/iaf_psc_exp_neuron.nestml", "models/neurons/iaf_psc_delta_neuron.nestml"], target_path="/tmp/nestml-allmodels", logging_level="DEBUG", @@ -66,80 +66,80 @@ def generate_all_models(self): codegen_opts=codegen_opts) # generate code with analytic solver disabled - # alt_codegen_opts = {**codegen_opts, **{"solver": "numeric"}} - - # generate_nest_target(input_path=["models/neurons/aeif_cond_exp_neuron.nestml", - # "models/neurons/aeif_cond_alpha_neuron.nestml"], - # target_path="/tmp/nestml-alt-allmodels", - # logging_level="DEBUG", - # module_name="nestml_alt_allmodels_module", - # suffix="_alt_nestml", - # codegen_opts=alt_codegen_opts) - - # # generate code using forward Euler integrator - # alt_codegen_opts = {**codegen_opts, **{"numeric_solver": "forward-Euler"}} - - # generate_nest_target(input_path="models/neurons/izhikevich_neuron.nestml", - # target_path="/tmp/nestml-alt-int-allmodels", - # logging_level="DEBUG", - # module_name="nestml_alt_int_allmodels_module", - # suffix="_alt_int_nestml", - # codegen_opts=alt_codegen_opts) + alt_codegen_opts = {**codegen_opts, **{"solver": "numeric"}} + + generate_nest_target(input_path=["models/neurons/aeif_cond_exp_neuron.nestml", + "models/neurons/aeif_cond_alpha_neuron.nestml"], + target_path="/tmp/nestml-alt-allmodels", + logging_level="DEBUG", + module_name="nestml_alt_allmodels_module", + suffix="_alt_nestml", + codegen_opts=alt_codegen_opts) + + # generate code using forward Euler integrator + alt_codegen_opts = {**codegen_opts, **{"numeric_solver": "forward-Euler"}} + + generate_nest_target(input_path="models/neurons/izhikevich_neuron.nestml", + target_path="/tmp/nestml-alt-int-allmodels", + logging_level="DEBUG", + module_name="nestml_alt_int_allmodels_module", + suffix="_alt_int_nestml", + codegen_opts=alt_codegen_opts) def test_nest_integration(self): self.generate_all_models() nest.Install("nestml_allmodels_module") - # nest.Install("nestml_alt_allmodels_module") - # nest.Install("nestml_alt_int_allmodels_module") + nest.Install("nestml_alt_allmodels_module") + nest.Install("nestml_alt_int_allmodels_module") self._test_model_equivalence_subthreshold("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") self._test_model_equivalence_spiking("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") self._test_model_equivalence_fI_curve("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") self._test_model_equivalence_curr_inj("iaf_psc_delta", "iaf_psc_delta_neuron_nestml") - # self._test_model_equivalence_subthreshold("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - # self._test_model_equivalence_spiking("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - # self._test_model_equivalence_fI_curve("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - # self._test_model_equivalence_curr_inj("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") - - # self._test_model_equivalence_subthreshold("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - # self._test_model_equivalence_spiking("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - # self._test_model_equivalence_fI_curve("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - # self._test_model_equivalence_curr_inj("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") - - # self._test_model_equivalence_subthreshold("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL - # self._test_model_equivalence_spiking("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL - # self._test_model_equivalence_fI_curve("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") - # self._test_model_equivalence_curr_inj("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") - - # self._test_model_equivalence_subthreshold("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - # self._test_model_equivalence_spiking("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - # self._test_model_equivalence_fI_curve("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") - - # iaf_cond_beta_nest_model_parameters = {"tau_rise_ex": 2., "tau_decay_ex": 10.} - # iaf_cond_beta_nestml_model_parameters = {"tau_syn_rise_E": 2., "tau_syn_decay_E": 10.} # XXX: TODO: does not work yet when tau_rise = tau_fall (numerical singularity occurs in the propagators) - # self._test_model_equivalence_subthreshold("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) - # self._test_model_equivalence_spiking("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) - # self._test_model_equivalence_fI_curve("iaf_cond_beta", "iaf_cond_beta_neuron_nestml") - - # self._test_model_equivalence_subthreshold("izhikevich", "izhikevich_neuron_alt_int_nestml") - # self._test_model_equivalence_spiking("izhikevich", "izhikevich_neuron_alt_int_nestml") - # self._test_model_equivalence_fI_curve("izhikevich", "izhikevich_neuron_alt_int_nestml") - - # nestml_hh_psc_alpha_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST - # self._test_model_equivalence_subthreshold("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - # self._test_model_equivalence_spiking("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", tolerance=1E-5, nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - # self._test_model_equivalence_fI_curve("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) - - # nestml_hh_cond_exp_traub_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST - # self._test_model_equivalence_subthreshold("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) - # self._test_model_equivalence_fI_curve("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) - - # self._test_model_equivalence_subthreshold("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly - # self._test_model_equivalence_fI_curve("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml") - - # self._test_model_equivalence_subthreshold("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly - # self._test_model_equivalence_fI_curve("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml") + self._test_model_equivalence_subthreshold("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + self._test_model_equivalence_spiking("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + self._test_model_equivalence_fI_curve("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + self._test_model_equivalence_curr_inj("iaf_psc_exp", "iaf_psc_exp_neuron_nestml") + + self._test_model_equivalence_subthreshold("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + self._test_model_equivalence_spiking("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + self._test_model_equivalence_fI_curve("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + self._test_model_equivalence_curr_inj("iaf_psc_alpha", "iaf_psc_alpha_neuron_nestml") + + self._test_model_equivalence_subthreshold("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL + self._test_model_equivalence_spiking("iaf_cond_exp", "iaf_cond_exp_neuron_nestml", tolerance=1E-6) # large tolerance because NESTML integrates PSCs precisely whereas NEST uses GSL + self._test_model_equivalence_fI_curve("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") + self._test_model_equivalence_curr_inj("iaf_cond_exp", "iaf_cond_exp_neuron_nestml") + + self._test_model_equivalence_subthreshold("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + self._test_model_equivalence_spiking("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + self._test_model_equivalence_fI_curve("iaf_cond_alpha", "iaf_cond_alpha_neuron_nestml") + + iaf_cond_beta_nest_model_parameters = {"tau_rise_ex": 2., "tau_decay_ex": 10.} + iaf_cond_beta_nestml_model_parameters = {"tau_syn_rise_E": 2., "tau_syn_decay_E": 10.} # XXX: TODO: does not work yet when tau_rise = tau_fall (numerical singularity occurs in the propagators) + self._test_model_equivalence_subthreshold("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) + self._test_model_equivalence_spiking("iaf_cond_beta", "iaf_cond_beta_neuron_nestml", nest_model_parameters=iaf_cond_beta_nest_model_parameters, nestml_model_parameters=iaf_cond_beta_nestml_model_parameters) + self._test_model_equivalence_fI_curve("iaf_cond_beta", "iaf_cond_beta_neuron_nestml") + + self._test_model_equivalence_subthreshold("izhikevich", "izhikevich_neuron_alt_int_nestml") + self._test_model_equivalence_spiking("izhikevich", "izhikevich_neuron_alt_int_nestml") + self._test_model_equivalence_fI_curve("izhikevich", "izhikevich_neuron_alt_int_nestml") + + nestml_hh_psc_alpha_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST + self._test_model_equivalence_subthreshold("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + self._test_model_equivalence_spiking("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", tolerance=1E-5, nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + self._test_model_equivalence_fI_curve("hh_psc_alpha", "hh_psc_alpha_neuron_nestml", nestml_model_parameters=nestml_hh_psc_alpha_model_parameters) + + nestml_hh_cond_exp_traub_model_parameters = {"gsl_abs_error_tol": 1E-3, "gsl_rel_error_tol": 0.} # matching the defaults in NEST + self._test_model_equivalence_subthreshold("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) + self._test_model_equivalence_fI_curve("hh_cond_exp_traub", "hh_cond_exp_traub_neuron_nestml", nestml_model_parameters=nestml_hh_cond_exp_traub_model_parameters) + + self._test_model_equivalence_subthreshold("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly + self._test_model_equivalence_fI_curve("aeif_cond_exp", "aeif_cond_exp_neuron_alt_nestml") + + self._test_model_equivalence_subthreshold("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml", kernel_opts={"resolution": .01}) # needs resolution 0.01 because the NEST model overrides this internally. Subthreshold only because threshold detection is inside the while...gsl_odeiv_evolve_apply() loop in NEST but outside the loop (strictly after gsl_odeiv_evolve_apply()) in NESTML, causing spike times to differ slightly + self._test_model_equivalence_fI_curve("aeif_cond_alpha", "aeif_cond_alpha_neuron_alt_nestml") # -------------- # XXX: TODO! diff --git a/tests/nest_tests/test_convolve.py b/tests/nest_tests/test_convolve.py index b6b44ffab..1375353d8 100644 --- a/tests/nest_tests/test_convolve.py +++ b/tests/nest_tests/test_convolve.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# test_input_ports.py +# test_convolve.py # # This file is part of NEST. # @@ -34,7 +34,7 @@ class TestConvolve: @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), reason="This test does not support NEST 2") - def test_input_ports_in_loop(self): + def test_convolve(self): input_path = os.path.join(os.path.realpath(os.path.join( os.path.dirname(__file__), "resources", "ConvolveSpikingNoAttributes.nestml"))) target_path = "target" From b0e16352165e425d207d0e56793a3533292f2bfb Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Fri, 20 Dec 2024 19:28:30 +0100 Subject: [PATCH 27/68] add attributes to spiking input ports --- .../nestml_language_concepts.rst | 28 +++++++++++-------- ...receive_vectors_should_be_constant_size.py | 2 +- pynestml/utils/ode_toolbox_utils.py | 2 +- tests/nest_tests/non_linear_dendrite_test.py | 2 +- .../BiexponentialPostSynapticResponse.nestml | 16 +++++------ .../ConvolveSpikingNoAttributes.nestml | 2 -- .../resources/add_spikes_to_ode.nestml | 3 -- .../iaf_psc_exp_nonlineardendrite.nestml | 10 +++---- .../resources/input_ports_in_loop.nestml | 1 + tests/nest_tests/test_add_spikes_to_ode.py | 3 +- .../test_biexponential_synapse_kernel.py | 13 +++++---- tests/nest_tests/test_convolve.py | 4 +-- 12 files changed, 43 insertions(+), 43 deletions(-) diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index 9a2fe05df..68a2d1e38 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -846,31 +846,35 @@ Continuous-time input ports receive a time-varying signal :math:`f(t)` (possibly Spiking input ports ~~~~~~~~~~~~~~~~~~~ -The incoming spikes at the spiking input port are modelled as Dirac delta functions. The Dirac Delta function :math:`\delta(x)` is an impulsive function defined as zero at every value of :math:`x`, except for :math:`x=u`, and whose integral is equal to 1: +The incoming spikes at the spiking input port are modelled as Dirac delta functions. The Dirac delta function :math:`\delta(x)` is an impulsive function defined as zero at every value of :math:`x`, except for :math:`x=0`, and whose integral is equal to 1: .. math:: - \int \delta(x - u) dx = 1 + \int \delta(t) dt = 1 The unit of the Dirac delta function follows from its definition: .. math:: - f(0) = \int \delta(x) f(x) dx + f(0) = \int \delta(t) f(t) dt -Here :math:`f(x)` is a continuous function of x. As the unit of the :math:`f()` is the same on both left- and right-hand side, the unit of :math:`dx \delta(x)` must be equal to 1. -Therefore, the unit of :math:`\delta(x)` must be equal to the inverse of the unit of :math:`x`. - -In the context of neuroscience, the spikes are represented as events in time with a unit of :math:`\text{s}`. Consequently, the delta pulses will have a unit of inverse of time, :math:`\text{1/s}`. -Therefore, all the incoming spikes defined in the input block will have an implicit unit of :math:`\text{1/s}`. +Here :math:`f(t)` is a continuous function of :math:`t`. As the unit of the :math:`f()` is the same on both left-and right-hand side, the unit of :math:`dt \delta(t)` must be equal to 1. Therefore, the unit of :math:`\delta(t)` must be equal to the inverse of the unit of :math:`t`, that is :math:`s^{-1}`. Therefore, all the incoming spikes defined in the input block will have an implicit unit of :math:`\text{1/s}`. Physical units such as millivolts (:math:`\text{mV}`) and nanoamperes (:math:`\text{nA}`) can be directly combined with the Dirac delta function to model an impulse with a physical quantity such as voltage or current. In such cases, the Dirac delta function is multiplied by the appropriate unit of the physical quantity, such as :math:`\text{mV}` or :math:`\text{nA}`, to obtain a quantity with units of volts or amperes, respectively. For example, the product of a Dirac delta function and millivolt (:math:`\text{mV}`) unit can be written as :math:`\delta(t) \text{mV}`. This can be interpreted as an impulse in voltage with a magnitude of one millivolt. +Handling spiking input by convolutions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + +XXX: mention no_spike_input_port_in_equation_rhs_outside_convolve + -Handling spiking input -~~~~~~~~~~~~~~~~~~~~~~ + + +Handling spiking input by event handlers +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Spiking input can be handled by convolutions with kernels (see :ref:`Integrating spiking input`) or by means of ``onReceive`` event handler blocks. An ``onReceive`` block can be defined for every spiking input port, for example, if a port named ``pre_spikes`` is defined, the corresponding event handler has the general structure: @@ -895,8 +899,6 @@ To specify in which sequence the event handlers should be called in case multipl In this case, if a pre- and postsynaptic spike are received at the exact same time, the higher-priority ``post_spikes`` handler will be invoked first. -XXX: mention no_spike_input_port_in_equation_rhs_outside_convolve - Output ------ @@ -909,6 +911,8 @@ Each model can only send a single type of event. The type of the event has to be Calling the ``emit_spike()`` function in the ``update`` block results in firing a spike to all target neurons and devices time stamped with the simulation time at the end of the time interval ``t + timestep()``. +XXX: mention attributes here?! + Event attributes ~~~~~~~~~~~~~~~~ diff --git a/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py b/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py index 11902fd59..526e1b53e 100644 --- a/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py +++ b/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py @@ -47,7 +47,7 @@ class CoCoOnReceiveVectorsShouldBeConstantSizeVisitor(ASTVisitor): def visit_input_port(self, node: ASTInputPort): if node.has_size_parameter(): try: - int(node.get_size_parameter()) + int(str(node.get_size_parameter())) except ValueError: # exception converting size parameter to int; hence, not allowed code, message = Messages.get_vector_input_ports_should_be_of_constant_size() diff --git a/pynestml/utils/ode_toolbox_utils.py b/pynestml/utils/ode_toolbox_utils.py index fabc72fca..fd2200a5b 100644 --- a/pynestml/utils/ode_toolbox_utils.py +++ b/pynestml/utils/ode_toolbox_utils.py @@ -38,7 +38,7 @@ def _rewrite_piecewise_into_ternary(cls, s: str) -> str: "Float": sympy.Float, "Function": sympy.Function} - pattern = r'(?. model convolve_spiking_no_attributes_neuron: state: x real = 0. - y real = 0. equations: #kernel K = (e / tau_syn) * t * exp(-t / tau_syn) kernel K = delta(t) x' = convolve(K, spikes_in) / s - y' = spikes_in parameters: tau_syn ms = 2 ms diff --git a/tests/nest_tests/resources/add_spikes_to_ode.nestml b/tests/nest_tests/resources/add_spikes_to_ode.nestml index 9801faa02..5e7d7a7e5 100644 --- a/tests/nest_tests/resources/add_spikes_to_ode.nestml +++ b/tests/nest_tests/resources/add_spikes_to_ode.nestml @@ -41,6 +41,3 @@ model add_spikes_to_ode: update: integrate_odes() - - x += spikes / 5 - y += spikes / 42 diff --git a/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite.nestml b/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite.nestml index a81642585..0e38af6f5 100644 --- a/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite.nestml +++ b/tests/nest_tests/resources/iaf_psc_exp_nonlineardendrite.nestml @@ -43,9 +43,9 @@ model iaf_psc_exp_nonlineardendrite: kernel I_kernel2 = (e / tau_syn2) * t * exp(-t / tau_syn2) kernel I_kernel3 = exp(-t / tau_syn3) - recordable inline I_dend pA = convolve(I_kernel2, I_2) * pA + recordable inline I_dend pA = convolve(I_kernel2, I_2.weight) - inline I_syn pA = convolve(I_kernel1, I_1) * pA + dend_curr_enabled * I_dend + I_dend_ap + convolve(I_kernel3, I_3) * pA + I_e + inline I_syn pA = convolve(I_kernel1, I_1.weight) + dend_curr_enabled * I_dend + I_dend_ap + convolve(I_kernel3, I_3.weight) + I_e V_m' = -(V_m - E_L) / tau_m + I_syn / C_m @@ -66,9 +66,9 @@ model iaf_psc_exp_nonlineardendrite: T_dend_ap ms = 10 ms # time window over which the dendritic current clamp is active input: - I_1 <- spike - I_2 <- spike - I_3 <- spike + I_1 <- spike(weight pA) + I_2 <- spike(weight pA) + I_3 <- spike(weight pA) output: spike diff --git a/tests/nest_tests/resources/input_ports_in_loop.nestml b/tests/nest_tests/resources/input_ports_in_loop.nestml index f5013e1bd..561ba3b07 100644 --- a/tests/nest_tests/resources/input_ports_in_loop.nestml +++ b/tests/nest_tests/resources/input_ports_in_loop.nestml @@ -39,6 +39,7 @@ model input_ports_loop: GABA_spikes_sum[N_spikes] real = 0 parameters: + N_buf integer = 5 N_spikes integer = 10 input: diff --git a/tests/nest_tests/test_add_spikes_to_ode.py b/tests/nest_tests/test_add_spikes_to_ode.py index 9f91e7d13..8d4743a43 100644 --- a/tests/nest_tests/test_add_spikes_to_ode.py +++ b/tests/nest_tests/test_add_spikes_to_ode.py @@ -19,9 +19,10 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -import nest import os +import nest + from pynestml.frontend.pynestml_frontend import generate_nest_target diff --git a/tests/nest_tests/test_biexponential_synapse_kernel.py b/tests/nest_tests/test_biexponential_synapse_kernel.py index 0ed0954d8..eb9b106bb 100644 --- a/tests/nest_tests/test_biexponential_synapse_kernel.py +++ b/tests/nest_tests/test_biexponential_synapse_kernel.py @@ -19,11 +19,12 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -import nest import numpy as np import os import pytest +import nest + from pynestml.codegeneration.nest_tools import NESTTools from pynestml.frontend.pynestml_frontend import generate_nest_target @@ -75,7 +76,7 @@ def test_biexp_synapse(self): nest.Connect(sg4, neuron, syn_spec={"receptor_type": 4, "weight": 100.}) i_1 = nest.Create("multimeter", params={"record_from": [ - "g_gap__X__spikeGap", "g_ex__X__spikeExc", "g_in__X__spikeInh", "g_GABA__X__spikeGABA"], "interval": .1}) + "g_gap__X__spikeGap__DOT__weight", "g_ex__X__spikeExc__DOT__weight", "g_in__X__spikeInh__DOT__weight", "g_GABA__X__spikeGABA__DOT__weight"], "interval": .1}) nest.Connect(i_1, neuron) vm_1 = nest.Create("voltmeter") @@ -105,16 +106,16 @@ def plot(self, vm_1, i_1, sd): ax[0].scatter(sd.events["times"], np.mean(vm_1["V_m"]) * np.ones_like(sd.events["times"])) - ax[1].plot(i_1["times"], i_1["g_gap__X__spikeGap"], label="g_gap__X__spikeGap") + ax[1].plot(i_1["times"], i_1["g_gap__X__spikeGap__DOT__weight"], label="g_gap__X__spikeGap") ax[1].set_ylabel("current") - ax[2].plot(i_1["times"], i_1["g_ex__X__spikeExc"], label="g_ex__X__spikeExc") + ax[2].plot(i_1["times"], i_1["g_ex__X__spikeExc__DOT__weight"], label="g_ex__X__spikeExc") ax[2].set_ylabel("current") - ax[3].plot(i_1["times"], i_1["g_in__X__spikeInh"], label="g_in__X__spikeInh") + ax[3].plot(i_1["times"], i_1["g_in__X__spikeInh__DOT__weight"], label="g_in__X__spikeInh") ax[3].set_ylabel("current") - ax[4].plot(i_1["times"], i_1["g_GABA__X__spikeGABA"], label="g_GABA__X__spikeGABA") + ax[4].plot(i_1["times"], i_1["g_GABA__X__spikeGABA__DOT__weight"], label="g_GABA__X__spikeGABA") ax[4].set_ylabel("current") for _ax in ax: diff --git a/tests/nest_tests/test_convolve.py b/tests/nest_tests/test_convolve.py index 1375353d8..7c9a9a981 100644 --- a/tests/nest_tests/test_convolve.py +++ b/tests/nest_tests/test_convolve.py @@ -56,7 +56,7 @@ def test_convolve(self): nest.Connect(sg, neuron) - mm = nest.Create("multimeter", {"record_from": ["x", "y"]}) + mm = nest.Create("multimeter", {"record_from": ["x"]}) nest.Connect(mm, neuron) nest.Simulate(100.) @@ -66,8 +66,6 @@ def test_convolve(self): import matplotlib.pyplot as plt plt.subplots() plt.plot(events["times"], events["x"]) - plt.plot(events["times"], events["y"]) plt.savefig("/tmp/test_convolve.png") assert events["x"][-1] == 2E-3 - assert events["y"][-1] == 2E-4 # XXX: this should be 2; see https://github.com/nest/nestml/pull/1050 From 03013f9aa57cd42ee556cfe13ba2599931aac579 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sat, 21 Dec 2024 01:40:59 +0100 Subject: [PATCH 28/68] add attributes to spiking input ports --- .../nestml_language_concepts.rst | 66 ++++++- .../printers/nest_variable_printer.py | 2 +- .../point_neuron/common/NeuronClass.jinja2 | 6 +- .../point_neuron/common/NeuronHeader.jinja2 | 8 +- pynestml/utils/ast_utils.py | 2 +- .../resources/add_spikes_to_ode.nestml | 5 +- .../aeif_cond_alpha_alt_neuron.nestml | 13 +- ...alpha_function_2nd_order_ode_neuron.nestml | 10 +- .../resources/iaf_psc_exp_multisynapse.nestml | 8 +- .../iaf_psc_exp_multisynapse_vectors.nestml | 4 +- .../resources/input_ports_in_loop.nestml | 73 -------- .../test_plasticity_dynamics_neuron.nestml | 5 +- tests/nest_tests/test_input_ports.py | 166 ------------------ tests/nest_tests/test_multisynapse.py | 22 +-- 14 files changed, 101 insertions(+), 289 deletions(-) delete mode 100644 tests/nest_tests/resources/input_ports_in_loop.nestml delete mode 100644 tests/nest_tests/test_input_ports.py diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index 68a2d1e38..17f205dc7 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -860,23 +860,63 @@ The unit of the Dirac delta function follows from its definition: Here :math:`f(t)` is a continuous function of :math:`t`. As the unit of the :math:`f()` is the same on both left-and right-hand side, the unit of :math:`dt \delta(t)` must be equal to 1. Therefore, the unit of :math:`\delta(t)` must be equal to the inverse of the unit of :math:`t`, that is :math:`s^{-1}`. Therefore, all the incoming spikes defined in the input block will have an implicit unit of :math:`\text{1/s}`. -Physical units such as millivolts (:math:`\text{mV}`) and nanoamperes (:math:`\text{nA}`) can be directly combined with the Dirac delta function to model an impulse with a physical quantity such as voltage or current. -In such cases, the Dirac delta function is multiplied by the appropriate unit of the physical quantity, such as :math:`\text{mV}` or :math:`\text{nA}`, to obtain a quantity with units of volts or amperes, respectively. -For example, the product of a Dirac delta function and millivolt (:math:`\text{mV}`) unit can be written as :math:`\delta(t) \text{mV}`. This can be interpreted as an impulse in voltage with a magnitude of one millivolt. +Given an input port ``spikes_in``, we can define the incoming spikes as a train of delta pulses: + +.. math:: + + \mathrm{spikes_in}(t) = \sum_k \delta(t - t_k) + +The units are the same as for a single delta function. + +Spiking input can be handled by convolutions with kernels (see :ref:`Integrating spiking input`) or by means of ``onReceive`` event handler blocks. + Handling spiking input by convolutions ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +The spiking input port name ``spikes_in`` can subsequently be used in the right-hand side of ODEs: -XXX: mention no_spike_input_port_in_equation_rhs_outside_convolve +.. math:: + + \frac{dx}{dt} = -x / tau + \mathrm{spikes_in}(t) + +If ``x`` is a real number, then the units here are consistent. This can be written in NESTML as: + +.. code-block:: nestml + + x' = -x / tau + spikes_in + +``spikes_in`` can also be used inside a convolution; for instance, if ``K`` is a kernel, then: + +.. math:: + + \frac{dx}{dt} = -x / tau + (K \ast \mathrm{spikes_in}) / s + +This can be written in NESTML as: + +.. code-block:: nestml + x' = -x / tau + convolve(K, spikes_in) / s + +Note that applying the convolution means integrating over time, hence dropping the [1/s] unit, leaving a unitless quantity. To make the units consistent in this case, an explicit division by seconds is required. + +Physical units such as millivolts (:math:`\text{mV}`) and picoamperes (:math:`\text{pA}`) can be directly combined with the Dirac delta function to model an impulse with a physical quantity such as voltage or current. In such cases, the Dirac delta function is multiplied by the appropriate unit of the physical quantity to obtain a quantity with units of volts or amperes, for instance, if ``x`` is in ``pA``, then we can write: + +.. code-block:: nestml + + x = -x / tau + pA * spikes_in + + +XXX: mention no_spike_input_port_in_equation_rhs_outside_convolve Handling spiking input by event handlers ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Spiking input can be handled by convolutions with kernels (see :ref:`Integrating spiking input`) or by means of ``onReceive`` event handler blocks. An ``onReceive`` block can be defined for every spiking input port, for example, if a port named ``pre_spikes`` is defined, the corresponding event handler has the general structure: +An ``onReceive`` block can be defined for every spiking input port, for example, if a port named ``pre_spikes`` is defined, the corresponding event handler has the general structure: + + .. code-block:: nestml @@ -898,6 +938,20 @@ To specify in which sequence the event handlers should be called in case multipl In this case, if a pre- and postsynaptic spike are received at the exact same time, the higher-priority ``post_spikes`` handler will be invoked first. +Vector input ports of constant size can be used: + +.. code-block:: nestml + + input: + foo[2] <- spike + + onReceive(foo[0]): + # ... handle foo[0] spikes... + + onReceive(foo[1]): + # ... handle foo[1] spikes... + + Output ------ @@ -913,6 +967,8 @@ Calling the ``emit_spike()`` function in the ``update`` block results in firing XXX: mention attributes here?! + + Event attributes ~~~~~~~~~~~~~~~~ diff --git a/pynestml/codegeneration/printers/nest_variable_printer.py b/pynestml/codegeneration/printers/nest_variable_printer.py index dea1fb916..acff89080 100644 --- a/pynestml/codegeneration/printers/nest_variable_printer.py +++ b/pynestml/codegeneration/printers/nest_variable_printer.py @@ -194,7 +194,7 @@ def _print(self, variable: ASTVariable, symbol, with_origin: bool = True) -> str if variable.is_delay_variable(): return self._print_delay_variable(variable) - if with_origin: + if with_origin and NESTCodeGeneratorUtils.print_symbol_origin(symbol, variable): return NESTCodeGeneratorUtils.print_symbol_origin(symbol, variable) % variable_name return variable_name diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 86c60105e..c7a3f12a2 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -849,7 +849,7 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const if (B_.spike_input_{{ inport }}_spike_input_received_grid_sum_) { // B_.spike_input_{{ inport }} = false; // no need to reset the flag -- reading from the RingBuffer into the "grid_sum" variables resets the RingBuffer entries - on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(); + on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(origin, lag); } {%- endfor %} @@ -1140,7 +1140,7 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) {%- for spike_in_port_name, rport in utils.nestml_input_port_to_nest_rport_dict(astnode).items() %} {%- set spike_in_port = utils.get_input_port_by_name(astnode.get_input_blocks(), spike_in_port_name.split("_VEC_IDX_")[0]) %} -{%- if astnode.get_body().get_spike_input_ports() | length > 1 %} +{%- if astnode.get_body().get_spike_input_ports() | length > 1 or astnode.is_multisynapse_spikes() %} if (e.get_rport() == {{ rport }}) {%- endif %} { @@ -1193,7 +1193,7 @@ void {{ neuronName }}::handle(nest::CurrentEvent& e) {%- for blk in neuron.get_on_receive_blocks() %} {%- set ast = blk.get_stmts_body() %} void -{{ neuronName }}::on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}() +{{ neuronName }}::on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(nest::Time const &origin, const long lag) { const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 0ae0522bf..69581e419 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -475,7 +475,7 @@ public: {% filter indent(2, True) -%} {%- for blk in neuron.get_on_receive_blocks() %} - void on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(); + void on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(nest::Time const &origin, const long lag); {%- endfor %} {%- endfilter %} @@ -544,12 +544,6 @@ private: const nest_port_t MAX_SPIKE_RECEPTOR = {{ utils.nestml_input_port_to_nest_rport_dict(astnode) | length }}; {%- endif %} -{# -{% if neuron.get_spike_input_ports() | length > 1 or neuron.is_multisynapse_spikes() -%} - static std::vector< size_t > rport_to_nestml_buffer_idx; -{%- endif %} -#} - /** * Reset state of neuron. **/ diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index d0157c484..00a6d0b4a 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -1409,7 +1409,7 @@ def construct_kernel_X_spike_buf_name(cls, kernel_var_name: str, spike_input_por if isinstance(spike_input_port, ASTVariable): if spike_input_port.has_vector_parameter(): - spike_input_port_name += "_" + str(cls.get_numeric_vector_size(spike_input_port)) + spike_input_port_name += "_" + str(spike_input_port.get_vector_parameter()) if attribute is not None: attribute = "__DOT__" + attribute diff --git a/tests/nest_tests/resources/add_spikes_to_ode.nestml b/tests/nest_tests/resources/add_spikes_to_ode.nestml index 5e7d7a7e5..3260fadc5 100644 --- a/tests/nest_tests/resources/add_spikes_to_ode.nestml +++ b/tests/nest_tests/resources/add_spikes_to_ode.nestml @@ -30,8 +30,9 @@ model add_spikes_to_ode: y real = 0 equations: - x' = -x + 42 * spikes # linear eq - y' = -y**2 + 123 * spikes # nonlinear eq + kernel K = delta(t) + x' = -x + 42 * convolve(K, spikes) / s # linear eq # XXX: this should just read ``x' = -x + 42 * spikes``; this is a known issue (see https://github.com/nest/nestml/pull/1050). + y' = -y**2 + 123 * convolve(K, spikes) / s # nonlinear eq # XXX: this should just read ``x' = -x + 42 * spikes``; this is a known issue (see https://github.com/nest/nestml/pull/1050). input: spikes <- spike diff --git a/tests/nest_tests/resources/aeif_cond_alpha_alt_neuron.nestml b/tests/nest_tests/resources/aeif_cond_alpha_alt_neuron.nestml index 9121ef2b9..f10604571 100644 --- a/tests/nest_tests/resources/aeif_cond_alpha_alt_neuron.nestml +++ b/tests/nest_tests/resources/aeif_cond_alpha_alt_neuron.nestml @@ -91,8 +91,7 @@ model aeif_cond_alpha_alt_neuron: I_e pA = 0 pA input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike + in_spikes <- spike(weight nS) I_stim pA <- continuous output: @@ -106,11 +105,11 @@ model aeif_cond_alpha_alt_neuron: # neuron not refractory integrate_odes(g_exc, g_inh, V_m, w) - onReceive(exc_spikes): - g_exc' += exc_spikes * (e / tau_syn_exc) * nS * s - - onReceive(inh_spikes): - g_inh' += inh_spikes * (e / tau_syn_inh) * nS * s + onReceive(in_spikes): + if in_spikes.weight > 0: + g_exc' += in_spikes.weight * (e / tau_syn_exc) + else: + g_inh' -= in_spikes.weight * (e / tau_syn_inh) onCondition(refr_t <= 0 ms and V_m >= V_th): # threshold crossing diff --git a/tests/nest_tests/resources/alpha_function_2nd_order_ode_neuron.nestml b/tests/nest_tests/resources/alpha_function_2nd_order_ode_neuron.nestml index fa0d1ef88..b912673d6 100644 --- a/tests/nest_tests/resources/alpha_function_2nd_order_ode_neuron.nestml +++ b/tests/nest_tests/resources/alpha_function_2nd_order_ode_neuron.nestml @@ -26,19 +26,19 @@ along with NEST. If not, see . """ model alpha_function_2nd_order_ode_neuron: state: - x real = 0 - x' ms**-1 = 0 * ms**-1 + x real = 0 + x' ms**-1 = 0 * ms**-1 y real = 0 input: - fX <- spike + fX <- spike(weight real) equations: - x'' = - 2 * x' / ms - x / ms**2 + x'' = -2 * x' / ms - x / ms**2 y' = (-y + 42) / s update: integrate_odes(x, y) onReceive(fX): - x' += e*fX * s / ms + x' += e * fX.weight / ms diff --git a/tests/nest_tests/resources/iaf_psc_exp_multisynapse.nestml b/tests/nest_tests/resources/iaf_psc_exp_multisynapse.nestml index 621918172..6e820eec2 100644 --- a/tests/nest_tests/resources/iaf_psc_exp_multisynapse.nestml +++ b/tests/nest_tests/resources/iaf_psc_exp_multisynapse.nestml @@ -22,7 +22,7 @@ model iaf_psc_exp_multisynapse_neuron: kernel I_kernel2 = (e / tau_syn2) * t * exp(-t / tau_syn2) kernel I_kernel3 = -exp(-t / tau_syn3) - recordable inline I_syn pA = (convolve(I_kernel1, spikes1) - convolve(I_kernel2, spikes2) + convolve(I_kernel3, spikes3)) * pA + recordable inline I_syn pA = convolve(I_kernel1, spikes1.weight) - convolve(I_kernel2, spikes2.weight) + convolve(I_kernel3, spikes3.weight) V_m' = -(V_m - E_L) / tau_m + (I_syn + I_e + I_stim) / C_m @@ -38,9 +38,9 @@ model iaf_psc_exp_multisynapse_neuron: I_e pA = 0 pA # External current. input: - spikes1 <- spike - spikes2 <- spike - spikes3 <- spike + spikes1 <- spike(weight pA) + spikes2 <- spike(weight pA) + spikes3 <- spike(weight pA) I_stim pA <- continuous output: diff --git a/tests/nest_tests/resources/iaf_psc_exp_multisynapse_vectors.nestml b/tests/nest_tests/resources/iaf_psc_exp_multisynapse_vectors.nestml index eec261c0f..6e7c685ef 100644 --- a/tests/nest_tests/resources/iaf_psc_exp_multisynapse_vectors.nestml +++ b/tests/nest_tests/resources/iaf_psc_exp_multisynapse_vectors.nestml @@ -22,7 +22,7 @@ model iaf_psc_exp_multisynapse_vectors_neuron: kernel I_kernel2 = exp(-1/tau_syn2*t) kernel I_kernel3 = -exp(-1/tau_syn3*t) - inline I_syn pA = (convolve(I_kernel1, spikes[0]) - convolve(I_kernel2, spikes[1]) + convolve(I_kernel3, spikes[2])) * pA + inline I_syn pA = convolve(I_kernel1, spikes[0].weight) - convolve(I_kernel2, spikes[1].weight) + convolve(I_kernel3, spikes[2].weight) V_m' = -(V_m - E_L) / tau_m + (I_syn + I_e + I_stim) / C_m @@ -39,7 +39,7 @@ model iaf_psc_exp_multisynapse_vectors_neuron: I_e pA = 0pA # External current. input: - spikes[3] <- spike + spikes[3] <- spike(weight pA) I_stim pA <- continuous output: diff --git a/tests/nest_tests/resources/input_ports_in_loop.nestml b/tests/nest_tests/resources/input_ports_in_loop.nestml deleted file mode 100644 index 561ba3b07..000000000 --- a/tests/nest_tests/resources/input_ports_in_loop.nestml +++ /dev/null @@ -1,73 +0,0 @@ -""" -input_ports_in_loop.nestml -########################## - - -Description -+++++++++++ - -This test is used to test the usage of both vectorized and non-vectorized input ports in loops - - -Copyright statement -+++++++++++++++++++ - -This file is part of NEST. - -Copyright (C) 2004 The NEST Initiative - -NEST is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation, either version 2 of the License, or -(at your option) any later version. - -NEST is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with NEST. If not, see . -""" -model input_ports_loop: - state: - AMPA_spikes_buf[4] real = 0 - GABA_spikes_buf[5] real = 0 - - NMDA_spikes_sum real = 0 - AMPA_spikes_sum real = 0 - GABA_spikes_sum[N_spikes] real = 0 - - parameters: - N_buf integer = 5 - N_spikes integer = 10 - - input: - NMDA_spikes <- spike(weight real) - AMPA_spikes[2] <- spike(weight real) - GABA_spikes[5] <- spike(weight real) - - onReceive(NMDA_spikes): - NMDA_spikes_sum += NMDA_spikes.weight - - onReceive(AMPA_spikes[i integer]): - AMPA_spikes_buf[i + 2] = AMPA_spikes[i].weight - - onReceive(GABA_spikes[j integer]): - GABA_spikes_buf[j] = GABA_spikes[j].weight - - update: - # AMPA_spikes - i integer = 0 - for i in 0 ... 2 step 1: - AMPA_spikes_sum += 2.5 * AMPA_spikes_buf[i + 2] - AMPA_spikes_buf[i + 2] = 0 - - # GABA_spikes - j integer = 0 - k integer = 0 - while j < N_buf and k < N_spikes: - GABA_spikes_sum[k + 2] += GABA_spikes_buf[j] - GABA_spikes_buf[j] = 0 - j += 1 - k += 1 diff --git a/tests/nest_tests/resources/test_plasticity_dynamics_neuron.nestml b/tests/nest_tests/resources/test_plasticity_dynamics_neuron.nestml index cd7b44d13..ed51dd1bf 100644 --- a/tests/nest_tests/resources/test_plasticity_dynamics_neuron.nestml +++ b/tests/nest_tests/resources/test_plasticity_dynamics_neuron.nestml @@ -46,7 +46,8 @@ model test_plasticity_dynamics_neuron: output: spike + onReceive(spikes_parrot): + emit_spike() + update: integrate_odes() - if spikes_parrot * 1 s != 0: - emit_spike() diff --git a/tests/nest_tests/test_input_ports.py b/tests/nest_tests/test_input_ports.py deleted file mode 100644 index 0cebd7aa3..000000000 --- a/tests/nest_tests/test_input_ports.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# -# test_input_ports.py -# -# This file is part of NEST. -# -# Copyright (C) 2004 The NEST Initiative -# -# NEST is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# NEST is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with NEST. If not, see . - -import os -import pytest - -import nest - -from pynestml.frontend.pynestml_frontend import generate_nest_target -from pynestml.codegeneration.nest_tools import NESTTools - - -class TestInputPorts: - """ - Tests the different kind of input ports supported in NESTML. - """ - - # @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), - # reason="This test does not support NEST 2") - # def test_input_ports(self): - # input_path = os.path.join(os.path.realpath(os.path.join( - # os.path.dirname(__file__), "resources", "input_ports.nestml"))) - # target_path = "target" - # logging_level = "INFO" - # module_name = "nestmlmodule" - # suffix = "_nestml" - - # generate_nest_target(input_path, - # target_path=target_path, - # logging_level=logging_level, - # module_name=module_name, - # suffix=suffix) - # nest.ResetKernel() - # nest.Install(module_name) - - # neuron = nest.Create("input_ports_nestml") - - # # List of receptor types for the spiking input ports - # receptor_types = nest.GetStatus(neuron, "receptor_types")[0] - - # spike_times = [ - # [10., 44.], # NMDA_SPIKES - # [12., 42.], # AMPA_SPIKES - # [14., 40.], # GABA_SPIKES - # [16., 38.], # FOO_VEC_IDX_0 - # [18., 36.], # FOO_VEC_IDX_1 - # [20., 34.], # MY_SPIKES_VEC_IDX_0 - # [22., 32.], # MY_SPIKES_VEC_IDX_1 - # [24., 30.], # MY_SPIKES2_VEC_IDX_1 - # ] - # sgs = nest.Create("spike_generator", len(spike_times)) - # for i, sg in enumerate(sgs): - # sg.spike_times = spike_times[i] - - # nest.Connect(sgs[0], neuron, syn_spec={"receptor_type": receptor_types["NMDA_SPIKES"], "weight": -1.0, "delay": 1.0}) - # nest.Connect(sgs[1], neuron, syn_spec={"receptor_type": receptor_types["AMPA_SPIKES"], "weight": 1.0, "delay": 1.0}) - # nest.Connect(sgs[2], neuron, syn_spec={"receptor_type": receptor_types["GABA_SPIKES"], "weight": -1.0, "delay": 1.0}) - # nest.Connect(sgs[3], neuron, syn_spec={"receptor_type": receptor_types["FOO_VEC_IDX_0"], "weight": 1.0, "delay": 1.0}) - # nest.Connect(sgs[4], neuron, syn_spec={"receptor_type": receptor_types["FOO_VEC_IDX_1"], "weight": 1.0, "delay": 1.0}) - # nest.Connect(sgs[5], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES_VEC_IDX_0"], "weight": 1.0, "delay": 1.0}) - # nest.Connect(sgs[6], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES_VEC_IDX_1"], "weight": 2.0, "delay": 1.0}) - # nest.Connect(sgs[7], neuron, syn_spec={"receptor_type": receptor_types["MY_SPIKES2_VEC_IDX_1"], "weight": -3.0, "delay": 1.0}) - - # mm = nest.Create("multimeter", {"record_from": ["bar", "foo_spikes", "my_spikes_ip"]}) - # nest.Connect(mm, neuron) - - # nest.Simulate(50.) - - # events = mm.get("events") - # connections = nest.GetConnections(target=neuron) - - # # corresponds to ``bar += NMDA_spikes + 2 * AMPA_spikes - 3 * GABA_spikes`` in the update block - # assert events["bar"][-1] == len(spike_times[0]) * connections.get("weight")[0] \ - # + 2 * len(spike_times[1]) * connections.get("weight")[1] \ - # + 3 * len(spike_times[2]) * connections.get("weight")[2] - - # # corresponds to ``foo_spikes += foo[0] + 5.5 * foo[1]`` in the update block - # assert events["foo_spikes"][-1] == len(spike_times[3]) * connections.get("weight")[3] \ - # + 5.5 * len(spike_times[4]) * connections.get("weight")[4] - - # # corresponds to ``my_spikes_ip += my_spikes[0] + my_spikes[1] - my_spikes2[1]`` in the update block - # assert events["my_spikes_ip"][-1] == len(spike_times[5]) * connections.get("weight")[5] \ - # + len(spike_times[6]) * connections.get("weight")[6] \ - # - len(spike_times[7]) * connections.get("weight")[7] # minus because of a minus in the model - - @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), - reason="This test does not support NEST 2") - def test_input_ports_in_loop(self): - input_path = os.path.join(os.path.realpath(os.path.join( - os.path.dirname(__file__), "resources", "input_ports_in_loop.nestml"))) - target_path = "target" - logging_level = "INFO" - module_name = "nestmlmodule" - suffix = "_nestml" - - generate_nest_target(input_path, - target_path=target_path, - logging_level=logging_level, - module_name=module_name, - suffix=suffix) - nest.ResetKernel() - nest.Install(module_name) - - neuron = nest.Create("input_ports_loop_nestml") - - # List of receptor types for the spiking input ports - receptor_types = nest.GetStatus(neuron, "receptor_types")[0] - - spike_times = [ - [10., 39.], # NMDA_SPIKES - [12., 37.], # FOO_0 - [14., 35.], # FOO_1 - [16., 33.], # SPIKE_BUF_0 - [18., 31.], # SPIKE_BUF_1 - [20., 29.], # SPIKE_BUF_2 - [22., 27.], # SPIKE_BUF_3 - [24., 25.], # SPIKE_BUF_4 - ] - sgs = nest.Create("spike_generator", len(spike_times)) - for i, sg in enumerate(sgs): - sg.spike_times = spike_times[i] - - nest.Connect(sgs[0], neuron, - syn_spec={"receptor_type": receptor_types["NMDA_SPIKES"], "weight": 1.0, "delay": 1.0}) - nest.Connect(sgs[1], neuron, - syn_spec={"receptor_type": receptor_types["FOO_0"], "weight": 1.0, "delay": 1.0}) - nest.Connect(sgs[2], neuron, - syn_spec={"receptor_type": receptor_types["FOO_1"], "weight": 1.0, "delay": 1.0}) - nest.Connect(sgs[3], neuron, syn_spec={"receptor_type": receptor_types["SPIKE_BUF_0"], "weight": 1.0, "delay": 1.0}) - nest.Connect(sgs[4], neuron, syn_spec={"receptor_type": receptor_types["SPIKE_BUF_1"], "weight": 1.0, "delay": 1.0}) - nest.Connect(sgs[5], neuron, - syn_spec={"receptor_type": receptor_types["SPIKE_BUF_2"], "weight": 1.0, "delay": 1.0}) - nest.Connect(sgs[6], neuron, - syn_spec={"receptor_type": receptor_types["SPIKE_BUF_3"], "weight": 2.0, "delay": 1.0}) - nest.Connect(sgs[7], neuron, - syn_spec={"receptor_type": receptor_types["SPIKE_BUF_4"], "weight": 3.0, "delay": 1.0}) - - mm = nest.Create("multimeter", {"record_from": ["bar", "foo_spikes", "MY_SPIKES_IP_2", "MY_SPIKES_IP_3", "MY_SPIKES_IP_4", "MY_SPIKES_IP_5", "MY_SPIKES_IP_6"]}) - nest.Connect(mm, neuron) - - nest.Simulate(41.) - - events = mm.get("events") - assert events["bar"][-1] == 2.0 - assert events["foo_spikes"][-1] == 25.0 - assert events["MY_SPIKES_IP_2"][-1] == 2.0 - assert events["MY_SPIKES_IP_5"][-1] == 4.0 - assert events["MY_SPIKES_IP_6"][-1] == 6.0 diff --git a/tests/nest_tests/test_multisynapse.py b/tests/nest_tests/test_multisynapse.py index 60fc50abe..3080b2260 100644 --- a/tests/nest_tests/test_multisynapse.py +++ b/tests/nest_tests/test_multisynapse.py @@ -75,7 +75,7 @@ def test_multisynapse(self): nest.Connect(sg3, neuron, syn_spec={"receptor_type": receptor_types["SPIKES3"], "weight": 500., "delay": 0.1}) mm = nest.Create("multimeter", params={"record_from": [ - "I_syn", "I_kernel2__X__spikes2", "I_kernel3__X__spikes3"], "interval": nest.resolution}) + "I_syn", "I_kernel2__X__spikes2__DOT__weight", "I_kernel3__X__spikes3__DOT__weight"], "interval": nest.resolution}) nest.Connect(mm, neuron) vm_1 = nest.Create("voltmeter", params={"interval": nest.resolution}) @@ -98,10 +98,10 @@ def test_multisynapse(self): ax[1].plot(mm["times"], mm["I_syn"], label="I_syn") ax[1].set_ylabel("current") - ax[2].plot(mm["times"], mm["I_kernel2__X__spikes2"], label="I_kernel2") + ax[2].plot(mm["times"], mm["I_kernel2__X__spikes2__DOT__weight"], label="I_kernel2") ax[2].set_ylabel("current") - ax[3].plot(mm["times"], mm["I_kernel3__X__spikes3"], label="I_kernel3") + ax[3].plot(mm["times"], mm["I_kernel3__X__spikes3__DOT__weight"], label="I_kernel3") ax[3].set_ylabel("current") for _ax in ax: @@ -117,7 +117,7 @@ def test_multisynapse(self): fig.savefig("/tmp/test_multisynapse.png") # testing - np.testing.assert_almost_equal(V_m[-1], -72.77625579314515) + np.testing.assert_almost_equal(V_m[-1], -72.58743039242219) def test_multisynapse_with_vector_input_ports(self): input_path = os.path.join(os.path.realpath(os.path.join( @@ -147,16 +147,16 @@ def test_multisynapse_with_vector_input_ports(self): receptor_types = nest.GetStatus(neuron, "receptor_types")[0] sg = nest.Create("spike_generator", params={"spike_times": [20., 80.]}) - nest.Connect(sg, neuron, syn_spec={"receptor_type": receptor_types["SPIKES_0"], "weight": 1000., "delay": 0.1}) + nest.Connect(sg, neuron, syn_spec={"receptor_type": receptor_types["SPIKES_VEC_IDX_0"], "weight": 1000., "delay": 0.1}) sg2 = nest.Create("spike_generator", params={"spike_times": [40., 60.]}) - nest.Connect(sg2, neuron, syn_spec={"receptor_type": receptor_types["SPIKES_1"], "weight": 1000., "delay": 0.1}) + nest.Connect(sg2, neuron, syn_spec={"receptor_type": receptor_types["SPIKES_VEC_IDX_1"], "weight": 1000., "delay": 0.1}) sg3 = nest.Create("spike_generator", params={"spike_times": [30., 70.]}) - nest.Connect(sg3, neuron, syn_spec={"receptor_type": receptor_types["SPIKES_2"], "weight": 500., "delay": 0.1}) + nest.Connect(sg3, neuron, syn_spec={"receptor_type": receptor_types["SPIKES_VEC_IDX_2"], "weight": 500., "delay": 0.1}) mm = nest.Create("multimeter", params={"record_from": [ - "I_kernel1__X__spikes_0", "I_kernel2__X__spikes_1", "I_kernel3__X__spikes_2"], "interval": nest.resolution}) + "I_kernel1__X__spikes_0__DOT__weight", "I_kernel2__X__spikes_1__DOT__weight", "I_kernel3__X__spikes_2__DOT__weight"], "interval": nest.resolution}) nest.Connect(mm, neuron) vm_1 = nest.Create("voltmeter", params={"interval": nest.resolution}) @@ -175,13 +175,13 @@ def test_multisynapse_with_vector_input_ports(self): ax[0].plot(V_m_timevec, V_m, label="V_m") ax[0].set_ylabel("voltage") - ax[1].plot(mm.events["times"], mm.events["I_kernel1__X__spikes_0"], label="I_kernel0") + ax[1].plot(mm.events["times"], mm.events["I_kernel1__X__spikes_0__DOT__weight"], label="I_kernel0") ax[1].set_ylabel("current") - ax[2].plot(mm.events["times"], mm.events["I_kernel2__X__spikes_1"], label="I_kernel1") + ax[2].plot(mm.events["times"], mm.events["I_kernel2__X__spikes_1__DOT__weight"], label="I_kernel1") ax[2].set_ylabel("current") - ax[3].plot(mm.events["times"], mm.events["I_kernel3__X__spikes_2"], label="I_kernel2") + ax[3].plot(mm.events["times"], mm.events["I_kernel3__X__spikes_2__DOT__weight"], label="I_kernel2") ax[3].set_ylabel("current") for _ax in ax: From 597d2dbd129f6ae29f6d63f236d67f7a833fabc5 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 7 Jan 2025 15:01:59 +0100 Subject: [PATCH 29/68] add attributes to spiking input ports --- models/neurons/terub_gpe_neuron.nestml | 4 ++-- .../printers/python_variable_printer.py | 4 ++-- .../point_neuron/@NEURON_NAME@.py.jinja2 | 20 +++++++++++++++---- .../ApplySpikesFromBuffers.jinja2 | 6 +++--- .../test_python_standalone_module.py.jinja2 | 4 ++-- 5 files changed, 25 insertions(+), 13 deletions(-) diff --git a/models/neurons/terub_gpe_neuron.nestml b/models/neurons/terub_gpe_neuron.nestml index 3843b8809..97be6d104 100644 --- a/models/neurons/terub_gpe_neuron.nestml +++ b/models/neurons/terub_gpe_neuron.nestml @@ -78,8 +78,8 @@ model terub_gpe_neuron: inline g_k_Ca real = 15.0 #Report:15, Terman Rubin 2002: 20.0 inline g_k1 real = 30.0 - inline I_exc_mod real = -convolve(g_exc, exc_spikes) * V_m - inline I_inh_mod real = convolve(g_inh, inh_spikes) * (V_m - E_gg) + inline I_exc_mod real = -convolve(g_exc, exc_spikes.weight) * V_m + inline I_inh_mod real = convolve(g_inh, inh_spikes.weight) * (V_m - E_gg) inline tau_n real = g_tau_n_0 + g_tau_n_1 / (1. + exp(-(V_m-g_theta_n_tau)/g_sigma_n_tau)) inline tau_h real = g_tau_h_0 + g_tau_h_1 / (1. + exp(-(V_m-g_theta_h_tau)/g_sigma_h_tau)) diff --git a/pynestml/codegeneration/printers/python_variable_printer.py b/pynestml/codegeneration/printers/python_variable_printer.py index d03bdadd0..d7db7a79a 100644 --- a/pynestml/codegeneration/printers/python_variable_printer.py +++ b/pynestml/codegeneration/printers/python_variable_printer.py @@ -56,9 +56,9 @@ def _print_python_name(cls, variable_name: str) -> str: """ differential_order = variable_name.count("\"") if differential_order > 0: - return variable_name.replace("\"", "").replace("$", "__DOLLAR") + "__" + "d" * differential_order + return variable_name.replace(".", "__DOT__").replace("\"", "").replace("$", "__DOLLAR") + "__" + "d" * differential_order - return variable_name.replace("$", "__DOLLAR") + return variable_name.replace(".", "__DOT__").replace("$", "__DOLLAR") def print_variable(self, variable: ASTVariable) -> str: """ diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 index e7ec5e773..ec23c940f 100644 --- a/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 +++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 @@ -103,7 +103,11 @@ class Neuron_{{neuronName}}(Neuron): {{ port.get_symbol_name() }}: List[float] = [] spike_received_{{ port.get_symbol_name() }}: List[bool] = [] {%- else %} - {{ port.get_symbol_name() }}: float = 0. +{% set ast_input_port = utils.get_input_port_by_name(astnode.get_input_blocks(), port.name) %} +{%- for attribute in ast_input_port.get_parameters() %} + {{ port.get_symbol_name() }}__DOT__{{ attribute.name }}: float = 0. +{%- endfor %} + {{ port.get_symbol_name() }}: float = 0. # buffer for the port name by itself (train of unweighted delta pulses) spike_received_{{ port.get_symbol_name() }}: bool = False {%- endif %} {%- endfor %} @@ -337,9 +341,17 @@ class Neuron_{{neuronName}}(Neuron): def handle(self, t_spike: float, w: float, port_name: str) -> None: {%- for port in neuron.get_spike_input_ports() %} - if port_name == "{{port.name}}": - self.B_.{{port.get_symbol_name()}} += abs(w) - self.B_.spike_received_{{port.get_symbol_name()}} = True + if port_name == "{{ port.name }}": + self.B_.{{ port.get_symbol_name() }} += 1. # unweighted spike port +{% set ast_input_port = utils.get_input_port_by_name(astnode.get_input_blocks(), port.name) %} +{%- for attribute in ast_input_port.get_parameters() %} +{% if attribute.name == "weight" %} + self.B_.{{ port.get_symbol_name() }}__DOT__{{ attribute.name }} += abs(w) # unweighted spike port +{% else %} +{{ raise('The Python-standalone code generator only supports \'weight\' spike input port attribute for now') }} +{% endif %} +{% endfor %} + self.B_.spike_received_{{ port.get_symbol_name() }} = True return {%- endfor %} raise Exception("Received a spike on unknown input port \"" + port_name + "\" at t = " + "{0:E}".format(t_spike)) diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/directives_py/ApplySpikesFromBuffers.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/directives_py/ApplySpikesFromBuffers.jinja2 index c0952b2f5..bb74e623b 100644 --- a/pynestml/codegeneration/resources_python_standalone/point_neuron/directives_py/ApplySpikesFromBuffers.jinja2 +++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/directives_py/ApplySpikesFromBuffers.jinja2 @@ -1,6 +1,6 @@ {%- if tracing %}# generated by {{self._TemplateReference__context.name}}{% endif %} {%- for spike_updates_for_port in spike_updates.values() %} -{%- for ast in spike_updates_for_port -%} -{%- include "directives_py/Assignment.jinja2" %} -{%- endfor %} +{%- for ast in spike_updates_for_port -%} +{%- include "directives_py/Assignment.jinja2" %} +{%- endfor %} {%- endfor %} diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 index cf0e4d5a1..4c3d511fe 100644 --- a/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 +++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 @@ -43,8 +43,8 @@ class TestSimulator: sg_inh = simulator.add_neuron(SpikeGenerator(interval=50.)) {% for neuron in neurons %} neuron = simulator.add_neuron(Neuron_{{neuron.get_name()}}(timestep=simulator.timestep)) - simulator.connect(sg_exc, neuron, "exc_spikes",w=1000.) - simulator.connect(sg_inh, neuron, "inh_spikes",w=4000.) + simulator.connect(sg_exc, neuron, "spike_in_port",w=1000.) + simulator.connect(sg_inh, neuron, "spike_in_port",w=-4000.) {% endfor %} simulator.run(t_stop) From e3ad3c315d7e7937c43264968ce87e6f8dcc5ef3 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 16 Jan 2025 16:19:48 +0100 Subject: [PATCH 30/68] add attributes to spiking input ports --- models/neurons/aeif_cond_exp_neuron.nestml | 6 +- .../printers/nest_variable_printer.py | 20 +- .../printers/python_variable_printer.py | 10 +- .../point_neuron/common/NeuronClass.jinja2 | 256 +++++++++++++++--- .../point_neuron/common/NeuronHeader.jinja2 | 68 +---- .../point_neuron/@NEURON_NAME@.py.jinja2 | 55 ++-- .../point_neuron/@SYNAPSE_NAME@.py.jinja2 | 6 +- .../test_python_standalone_module.py.jinja2 | 4 +- ...est_simultaneous_spikes_different_ports.py | 90 ++++++ 9 files changed, 391 insertions(+), 124 deletions(-) create mode 100644 tests/nest_tests/test_simultaneous_spikes_different_ports.py diff --git a/models/neurons/aeif_cond_exp_neuron.nestml b/models/neurons/aeif_cond_exp_neuron.nestml index abdac81ba..39f3c3598 100644 --- a/models/neurons/aeif_cond_exp_neuron.nestml +++ b/models/neurons/aeif_cond_exp_neuron.nestml @@ -106,10 +106,10 @@ model aeif_cond_exp_neuron: onReceive(spike_in_port): # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike - if spike_in_port.weight < 0: - g_syn_inh += spike_in_port.weight - else: + if spike_in_port.weight > 0: g_syn_exc += spike_in_port.weight + else: + g_syn_inh -= spike_in_port.weight update: if refr_t > 0 ms: diff --git a/pynestml/codegeneration/printers/nest_variable_printer.py b/pynestml/codegeneration/printers/nest_variable_printer.py index acff89080..295668860 100644 --- a/pynestml/codegeneration/printers/nest_variable_printer.py +++ b/pynestml/codegeneration/printers/nest_variable_printer.py @@ -53,9 +53,15 @@ def __init__(self, expression_printer: ExpressionPrinter, with_origin: bool = Tr self.postsynaptic_getter_string_ = "start->get_%s()" def set_getter_string(self, s): + r"""Returns the empty string, because this method can be called from inside the Jinja template""" self.postsynaptic_getter_string_ = s return "" + def set_cpp_variable_suffix(self, s): + r"""Returns the empty string, because this method can be called from inside the Jinja template""" + self.cpp_variable_suffix = s + return "" + def print_variable(self, variable: ASTVariable) -> str: """ Converts a single variable to nest processable format. @@ -167,22 +173,22 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: var_name += "_" + str(variable.get_vector_parameter()) if variable.has_vector_parameter(): - # add variable attribute if it exists if variable.attribute: - return "spike_input_" + str(variable.name) + "_VEC_IDX_" + str(variable.get_vector_parameter()) + "__DOT__" + variable.attribute + "_grid_sum_" + return "__spike_input_" + str(variable.name) + "_VEC_IDX_" + str(variable.get_vector_parameter()) + "__DOT__" + variable.attribute else: # add variable attribute if it exists if variable.attribute: - return "spike_input_" + str(variable.name) + "__DOT__" + variable.attribute + "_grid_sum_" + return "__spike_input_" + str(variable.name) + "__DOT__" + variable.attribute # no vector indices, no attributes - return "spike_input_" + str(variable) + "_grid_sum_" + return "__spike_input_" + str(variable) if self.cpp_variable_suffix: return variable_symbol.get_symbol_name() + self.cpp_variable_suffix + # case of continuous-time input port return variable_symbol.get_symbol_name() + '_grid_sum_' def _print(self, variable: ASTVariable, symbol, with_origin: bool = True) -> str: @@ -194,7 +200,11 @@ def _print(self, variable: ASTVariable, symbol, with_origin: bool = True) -> str if variable.is_delay_variable(): return self._print_delay_variable(variable) - if with_origin and NESTCodeGeneratorUtils.print_symbol_origin(symbol, variable): + with_origin_ = with_origin + if symbol.is_spike_input_port(): + with_origin_ = False + + if with_origin_ and NESTCodeGeneratorUtils.print_symbol_origin(symbol, variable): return NESTCodeGeneratorUtils.print_symbol_origin(symbol, variable) % variable_name return variable_name diff --git a/pynestml/codegeneration/printers/python_variable_printer.py b/pynestml/codegeneration/printers/python_variable_printer.py index d7db7a79a..acdc8d44a 100644 --- a/pynestml/codegeneration/printers/python_variable_printer.py +++ b/pynestml/codegeneration/printers/python_variable_printer.py @@ -112,10 +112,18 @@ def print_variable(self, variable: ASTVariable) -> str: s = "" if not units_conversion_factor == 1: s += "(" + str(units_conversion_factor) + " * " - s += self._print(variable, symbol, with_origin=self.with_origin) + vector_param + + if symbol.is_spike_input_port(): + # spike buffer variables are prefixed with "B__" as the values are grabbed from the buffers one-by-one + s += "B__" + self._print(variable, symbol, with_origin=False) + vector_param + else: + assert symbol.is_continuous_input_port() + s += self._print(variable, symbol, with_origin=self.with_origin) + vector_param + s += vector_param if not units_conversion_factor == 1: s += ")" + return s if symbol.is_inline_expression: diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index c7a3f12a2..19376ace2 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -70,7 +70,8 @@ along with NEST. If not, see . #include "{{ neuronName }}.h" // uncomment the next line to enable printing of detailed debug information -// #define DEBUG +#define DEBUG +// XXX RESTORE COMMENT LINE ABOVE {% if state_vars_that_need_continuous_buffering | length > 0 %} {%- if continuous_state_buffering_method == "continuous_time_buffer" %} @@ -201,20 +202,19 @@ namespace nest {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) - , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) + // , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- endfor %} {%- endif %} {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} - , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) - , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) + //, spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- endfor %} - , spike_input_{{ inputPort.name }}_( nest::RingBuffer() ) - , spike_input_{{ inputPort.name }}_grid_sum_( 0. ) + , spike_input_{{ inputPort.name }}_( nest::ListRingBuffer() ) , spike_input_{{ inputPort.name }}_spike_input_received_( nest::RingBuffer() ) - , spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) + //, spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) {%- endif %} {%- endfor %} {%- endif %} @@ -237,8 +237,8 @@ namespace nest {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) - , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) + //, spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- endfor %} {%- else %} ????????????? @@ -246,13 +246,12 @@ namespace nest {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} - , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) - , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) + //, spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- endfor %} - , spike_input_{{ inputPort.name }}_( nest::RingBuffer() ) - , spike_input_{{ inputPort.name }}_grid_sum_( 0. ) + , spike_input_{{ inputPort.name }}_( nest::ListRingBuffer() ) , spike_input_{{ inputPort.name }}_spike_input_received_( nest::RingBuffer() ) - , spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) + //, spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) {%- endif %} {%- endfor %} {%- endif %} @@ -770,7 +769,7 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- endif %} /** - * buffer spikes from spiking input ports + * buffer spikes from spiking input ports -- in case of convolutions with spiking input ports **/ {%- for inputPortSymbol in neuron.get_spike_input_ports() %} @@ -780,21 +779,18 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_value(lag); + const double __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }} = std::accumulate(B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag).begin(), spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag).end(), 0.0); {%- endfor %} {%- endif %} - B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_.get_value(lag); - B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_.get_value(lag); {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} - B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_value(lag); + const double __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }} = std::accumulate(B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag).begin(), B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag).end(), 0.0); {%- endfor %} - B_.spike_input_{{ inputPort.name }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_.get_value(lag); - B_.spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_ = B_.spike_input_{{ inputPort.name }}_spike_input_received_.get_value(lag); {%- endif %} {%- endfor %} + {%- if has_delay_variables %} /** * delay variables @@ -846,9 +842,9 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const **/ {% for blk in neuron.get_on_receive_blocks() %} {%- set inport = utils.port_name_printer(blk.get_input_port_variable()) %} - if (B_.spike_input_{{ inport }}_spike_input_received_grid_sum_) + if (B_.spike_input_{{ inport }}_spike_input_received_.get_value(lag)) { - // B_.spike_input_{{ inport }} = false; // no need to reset the flag -- reading from the RingBuffer into the "grid_sum" variables resets the RingBuffer entries + // B_.spike_input_{{ inport }} = false; // no need to reset the flag -- reading from the RingBuffer resets the RingBuffer entries on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(origin, lag); } {%- endfor %} @@ -890,9 +886,11 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const /** * spike updates due to convolutions **/ +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix(" ") }} {# prevent printing origin #} {% filter indent(4) %} {%- include "directives_cpp/ApplySpikesFromBuffers.jinja2" %} {%- endfilter %} +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix("") }} /** * Begin NESTML generated code for the onCondition block(s) @@ -932,6 +930,61 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- endif %} {%- endfor %} + + + + + + + + + + + /** + * clear spike buffers at end of timestep (all spikes have been processed at this point) + **/ + +{#- if neuron.get_on_receive_blocks() | length == 0 and neuron.get_spike_input_ports() |length > 0 #} +{%- if neuron.get_spike_input_ports() |length > 0 %} +{%- for inputPortSymbol in neuron.get_spike_input_ports() %} +{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} + +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + std::list< double >& __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag); + __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list.clear(); +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + std::list< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); + __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.clear(); +{%- endfor %} +{%- endif %} + + std::list< double >& __spike_input_{{ inputPort.name }}_list = B_.spike_input_{{ inputPort.name }}_.get_list(lag); + __spike_input_{{ inputPort.name }}_list.clear(); +{%- endfor %} +{%- endif %} + + + + + + + + + + + + + + + {%- if use_gap_junctions %} if ( called_from_wfr_update ) { @@ -1041,6 +1094,43 @@ std::cout << "[neuron " << this << "] Writing history at time " << nest::Time(ne return wfr_tol_exceeded; {%- endif %} + + + + + +/* +{%- if neuron.get_spike_input_ports()|length > 0 %} +{%- for inputPortSymbol in neuron.get_spike_input_ports() %} +{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.clear(); + // , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.clear(); + //, spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- endfor %} + B_.spike_input_{{ inputPort.name }}_.clear(); + //, spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) +{%- endif %} +{%- endfor %} +{%- endif %} +*/ + + + + + + + } {%- if state_vars_that_need_continuous_buffering | length > 0 %} @@ -1146,17 +1236,22 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) { {%- if spike_in_port.get_parameters() %} {%- for attribute in spike_in_port.get_parameters() %} - B_.spike_input_{{ spike_in_port_name }}__DOT__{{ attribute.name }}_.add_value( + B_.spike_input_{{ spike_in_port_name }}__DOT__{{ attribute.name }}_.append_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_weight() * e.get_multiplicity() ); {%- endfor %} {%- endif %} // add an unweighted spike to the general "train of delta pulses" input buffer - B_.spike_input_{{ spike_in_port_name }}_.add_value( + //std::cout << "\tappending spike at offset = " << e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin()) << "; buffer size = " << B_.spike_input_{{ spike_in_port_name }}_.size() << "; nest::kernel().connection_manager.get_min_delay() = " << nest::kernel().connection_manager.get_min_delay() << "\n"; + //std::cout << "\tappending spike at offset = " << e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin()) << " to B_.spike_input_{{ spike_in_port_name }}_, before length = " << B_.spike_input_{{ spike_in_port_name }}_.get_list(e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin())).size() << "\n"; + // B_.spike_input_{{ spike_in_port_name }}_.resize(); + B_.spike_input_{{ spike_in_port_name }}_.append_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_multiplicity() ); + //std::cout << "\tappending spike to B_.spike_input_{{ spike_in_port_name }}_, after length = " << B_.spike_input_{{ spike_in_port_name }}_.get_list(e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin())).size() << "\n"; + // set the "spike received" flag B_.spike_input_{{ spike_in_port_name }}_spike_input_received_.add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), @@ -1191,23 +1286,120 @@ void {{ neuronName }}::handle(nest::CurrentEvent& e) // ------------------------------------------------------------------------- {%- for blk in neuron.get_on_receive_blocks() %} +{%- set inport = utils.port_name_printer(blk.get_input_port_variable()) %} {%- set ast = blk.get_stmts_body() %} +{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inport.split(".")[0]) %} +{%- set inputPortSymbol = astnode.get_scope().resolve_to_symbol(inputPort.name, SymbolKind.VARIABLE) %} void {{ neuronName }}::on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(nest::Time const &origin, const long lag) { +#ifdef DEBUG + std::cout << "[neuron " << this << "] {{ neuronName }}::on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}()" << std::endl; +#endif + const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function + auto get_t = [origin, lag](){ return nest::Time( nest::Time::step( origin.get_steps() + lag + 1) ).get_ms(); }; -{%- if blk.get_input_port_variable().has_vector_parameter() %} -boop -{{ blk.get_input_port_variable().get_vector_parameter() }} -{% if utils.is_parameter(blk.get_input_port_variable().get_vector_parameter()) %} -is_parameter + // grab the lists of spike events from the buffers for the current timepoint +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + std::list< double >& __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag); + std::list< double >::iterator __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list_iterator = __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list.begin(); +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + std::list< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); + std::list< double >::iterator __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list_iterator = __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.begin(); +{%- endfor %} {%- endif %} + + std::list< double >& __spike_input_{{ inputPort.name }}_list = B_.spike_input_{{ inputPort.name }}_.get_list(lag); + std::list< double >::iterator __spike_input_{{ inputPort.name }}_list_iterator = __spike_input_{{ inputPort.name }}_list.begin(); + + const size_t n_spikes = __spike_input_{{ inputPort.name }}_list.size(); + std::cout << "\tPROCESSING " << n_spikes << " spikes\n"; + for (size_t spike_idx = 0; spike_idx < n_spikes; ++spike_idx) + { + /** + * Grab the actual spike event data from the buffers (for the current timepoint ``origin lag``) + **/ +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + const double __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }} = *__spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list_iterator; +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + const double __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }} = *__spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list_iterator; +{%- endfor %} {%- endif %} + const double __spike_input_{{ inputPort.name }} = *__spike_input_{{ inputPort.name }}_list_iterator; -{%- filter indent(2, True) -%} + /** + * Begin NESTML generated code for the onReceive() block statements + **/ + +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix(" ") }} {# prevent printing origin #} +{% filter indent(4, True) -%} {%- include "directives_cpp/StmtsBody.jinja2" %} {%- endfilter %} +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix("") }} + + /** + * Advance the iterators + **/ +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + ++__spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list_iterator; +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + ++__spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list_iterator; +{%- endfor %} +{%- endif %} + ++__spike_input_{{ inputPort.name }}_list_iterator; + } +/* + // clear the processed spike events from the list +std::cout << "\tclearing spike buffers....\n"; + std::cout << "\tbefore clearing " << __spike_input_{{ inputPort.name }}_list.size() << " spikes\n"; + +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list.clear(); +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.clear(); +{%- endfor %} +{%- endif %} + + __spike_input_{{ inputPort.name }}_list.clear(); + + + + std::cout << "\tafter clearing " << __spike_input_{{ inputPort.name }}_list.size() << " spikes\n"; + std::cout << "\tafter clearing (orig list) " << B_.spike_input_{{ inputPort.name }}_.get_list(lag).size() << " spikes\n"; +*/ } {% endfor %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 69581e419..edf934e87 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -56,6 +56,7 @@ along with NEST. If not, see . // C++ includes: #include +#include #include "config.h" {%- if norm_rng %} @@ -792,23 +793,22 @@ private: {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_; + nest::ListRingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; {%- endfor %} {%- endif %} - nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_; + nest::ListRingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_grid_sum_; - nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; + nest::ListRingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_; {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} - nest::RingBuffer spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; + nest::ListRingBuffer spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; double spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; {%- endfor %} - nest::RingBuffer spike_input_{{ inputPort.name }}_; // buffer for unweighted spikes + nest::ListRingBuffer spike_input_{{ inputPort.name }}_; // buffer for unweighted spikes nest::RingBuffer spike_input_{{ inputPort.name }}_spike_input_received_; // buffer for the "spike received" boolean flag - double spike_input_{{ inputPort.name }}_grid_sum_; // buffer for unweighted spikes double spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; // buffer for the "spike received" boolean flag {%- endif %} {%- endfor %} @@ -877,62 +877,6 @@ private: {%- endfor %} {%- endfilter %} - // ------------------------------------------------------------------------- - // Getters/setters for spike input buffers - // ------------------------------------------------------------------------- -{% for inputPortSymbol in neuron.get_spike_input_ports() %} - // input port: {{ inputPortSymbol.name.split(".")[0] }} -{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} -{%- if inputPortSymbol.has_vector_parameter() %} -{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} -{%- for i in range(size) %} -{%- if inputPort.get_parameters() %} -{%- for parameter in inputPort.get_parameters() %} - inline nest::RingBuffer& get_spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_() - { - return B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_; - } - - inline double get_spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_() - { - return B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; - } -{% endfor %} -{%- endif %} - inline nest::RingBuffer& get_spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_() - { - return B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; - } - - inline double get_spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_grid_sum_() - { - return B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_; - } -{%- endfor %} -{%- else %} -{%- for parameter in inputPort.get_parameters() %} - inline nest::RingBuffer& get_spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}() - { - return B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; - } - - inline double get_spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_() - { - return B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; - } -{% endfor %} - inline nest::RingBuffer& get_spike_input_{{ inputPort.name }}_spike_input_received_() - { - return B_.spike_input_{{ inputPort.name }}_spike_input_received_; - } - - inline double get_spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_() - { - return B_.spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; - } -{% endif %} -{%- endfor %} - // ------------------------------------------------------------------------- // Getters/setters for continuous-time input buffers // ------------------------------------------------------------------------- diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 index ec23c940f..cd72326bb 100644 --- a/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 +++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 @@ -105,9 +105,9 @@ class Neuron_{{neuronName}}(Neuron): {%- else %} {% set ast_input_port = utils.get_input_port_by_name(astnode.get_input_blocks(), port.name) %} {%- for attribute in ast_input_port.get_parameters() %} - {{ port.get_symbol_name() }}__DOT__{{ attribute.name }}: float = 0. + {{ port.get_symbol_name() }}__DOT__{{ attribute.name }}: List[float] = [] {%- endfor %} - {{ port.get_symbol_name() }}: float = 0. # buffer for the port name by itself (train of unweighted delta pulses) + {{ port.get_symbol_name() }}: List[float] = [] # buffer for the port name by itself (train of unweighted delta pulses) spike_received_{{ port.get_symbol_name() }}: bool = False {%- endif %} {%- endfor %} @@ -268,9 +268,11 @@ class Neuron_{{neuronName}}(Neuron): # integrate variables related to convolutions # ------------------------------------------------------------------------- -{%- with analytic_state_variables_ = analytic_state_variables_from_convolutions %} -{%- include "directives_py/AnalyticIntegrationStep_begin.jinja2" %} -{%- endwith %} +{%- filter indent(4, True) -%} +{%- with analytic_state_variables_ = analytic_state_variables_from_convolutions %} +{%- include "directives_py/AnalyticIntegrationStep_begin.jinja2" %} +{%- endwith %} +{%- endfilter %} # ------------------------------------------------------------------------- # NESTML generated code for the update block @@ -289,9 +291,11 @@ class Neuron_{{neuronName}}(Neuron): # integrate variables related to convolutions # ------------------------------------------------------------------------- -{%- with analytic_state_variables_ = analytic_state_variables_from_convolutions %} -{%- include "directives_py/AnalyticIntegrationStep_end.jinja2" %} -{%- endwith %} +{%- filter indent(4, True) -%} +{%- with analytic_state_variables_ = analytic_state_variables_from_convolutions %} +{%- include "directives_py/AnalyticIntegrationStep_end.jinja2" %} +{%- endwith %} +{%- endfilter %} # ------------------------------------------------------------------------- # process spikes from buffers @@ -306,7 +310,9 @@ class Neuron_{{neuronName}}(Neuron): {% for blk in neuron.get_on_receive_blocks() %} if self.B_.spike_received_{{ utils.port_name_printer(blk.get_input_port_variable()) }}: - self.on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}() + # loop over all spikes received since last timestep + for i in range(len(self.B_.{{ utils.port_name_printer(blk.get_input_port_variable()) }})): + self.on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}() {%- endfor %} # ------------------------------------------------------------------------- @@ -314,7 +320,11 @@ class Neuron_{{neuronName}}(Neuron): # ------------------------------------------------------------------------- {%- for port in neuron.get_spike_input_ports() %} - self.B_.{{port.get_symbol_name()}} = 0. + self.B_.{{port.get_symbol_name()}} = [] +{%- set ast_input_port = utils.get_input_port_by_name(astnode.get_input_blocks(), port.name) %} +{%- for attribute in ast_input_port.get_parameters() %} + self.B_.{{ port.get_symbol_name() }}__DOT__{{ attribute.name }} = [] +{%- endfor %} self.B_.spike_received_{{port.get_symbol_name()}} = False {%- endfor %} @@ -342,15 +352,15 @@ class Neuron_{{neuronName}}(Neuron): def handle(self, t_spike: float, w: float, port_name: str) -> None: {%- for port in neuron.get_spike_input_ports() %} if port_name == "{{ port.name }}": - self.B_.{{ port.get_symbol_name() }} += 1. # unweighted spike port -{% set ast_input_port = utils.get_input_port_by_name(astnode.get_input_blocks(), port.name) %} -{%- for attribute in ast_input_port.get_parameters() %} -{% if attribute.name == "weight" %} - self.B_.{{ port.get_symbol_name() }}__DOT__{{ attribute.name }} += abs(w) # unweighted spike port -{% else %} + self.B_.{{ port.get_symbol_name() }}.append(1.) # unweighted spike port +{%- set ast_input_port = utils.get_input_port_by_name(astnode.get_input_blocks(), port.name) %} +{%- for attribute in ast_input_port.get_parameters() %} +{%- if attribute.name == "weight" %} + self.B_.{{ port.get_symbol_name() }}__DOT__{{ attribute.name }}.append(w) # weighted spike port +{%- else %} {{ raise('The Python-standalone code generator only supports \'weight\' spike input port attribute for now') }} -{% endif %} -{% endfor %} +{%- endif %} +{%- endfor %} self.B_.spike_received_{{ port.get_symbol_name() }} = True return {%- endfor %} @@ -370,7 +380,16 @@ class Neuron_{{neuronName}}(Neuron): {%- for blk in neuron.get_on_receive_blocks() %} {%- set ast = blk.get_stmts_body() %} +{%- set port = blk.get_input_port_variable() %} +{% set ast_input_port = utils.get_input_port_by_name(astnode.get_input_blocks(), port.name) %} def on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(self): + # first, grab all the buffer values from the buffers + B__{{ utils.port_name_printer(blk.get_input_port_variable()) }} = self.B_.{{ utils.port_name_printer(blk.get_input_port_variable()) }}.pop() +{%- for attribute in ast_input_port.get_parameters() %} + B__{{ ast_input_port.get_name() }}__DOT__{{ attribute.name }} = self.B_.{{ ast_input_port.get_name() }}__DOT__{{ attribute.name }}.pop() +{%- endfor %} + # XXX: TODO: check that NESTML models are only alowed to have on onreceive block per input port + {%- filter indent(4, True) -%} {%- include "directives_py/StmtsBody.jinja2" %} {%- endfilter %} diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/@SYNAPSE_NAME@.py.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/@SYNAPSE_NAME@.py.jinja2 index fee9771d0..a792b5bb4 100644 --- a/pynestml/codegeneration/resources_python_standalone/point_neuron/@SYNAPSE_NAME@.py.jinja2 +++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/@SYNAPSE_NAME@.py.jinja2 @@ -340,6 +340,10 @@ class Synapse_{{ astnode.name }}(Synapse): {%- for port in astnode.get_spike_input_ports() %} self.B_.{{port.get_symbol_name()}} = 0. +{%- set ast_input_port = utils.get_input_port_by_name(astnode.get_input_blocks(), port.name) %} +{%- for attribute in ast_input_port.get_parameters() %} + self.B_.{{ port.get_symbol_name() }}__DOT__{{ attribute.name }} = 0. +{%- endfor %} self.B_.spike_received_{{port.get_symbol_name()}} = False {%- endfor %} @@ -368,7 +372,7 @@ class Synapse_{{ astnode.name }}(Synapse): {%- for port in astnode.get_spike_input_ports() %} if port_name == "{{port.name}}": - self.B_.{{port.get_symbol_name()}} += abs(w) + self.B_.{{port.get_symbol_name()}} += w self.B_.spike_received_{{port.get_symbol_name()}} = True return {%- endfor %} diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 index 4c3d511fe..4def932b1 100644 --- a/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 +++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 @@ -43,8 +43,8 @@ class TestSimulator: sg_inh = simulator.add_neuron(SpikeGenerator(interval=50.)) {% for neuron in neurons %} neuron = simulator.add_neuron(Neuron_{{neuron.get_name()}}(timestep=simulator.timestep)) - simulator.connect(sg_exc, neuron, "spike_in_port",w=1000.) - simulator.connect(sg_inh, neuron, "spike_in_port",w=-4000.) + simulator.connect(sg_exc, neuron, "spike_in_port", w=1000.) + simulator.connect(sg_inh, neuron, "spike_in_port", w=-4000.) {% endfor %} simulator.run(t_stop) diff --git a/tests/nest_tests/test_simultaneous_spikes_different_ports.py b/tests/nest_tests/test_simultaneous_spikes_different_ports.py new file mode 100644 index 000000000..238d3c682 --- /dev/null +++ b/tests/nest_tests/test_simultaneous_spikes_different_ports.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# +# test_simultaneous_spikes_different_ports.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +import matplotlib.pyplot as plt +import numpy as np +import os +import pytest + +import nest + +from pynestml.frontend.pynestml_frontend import generate_nest_target + + +class TestSimultaneousSpikesDifferentPorts: + """ + Tests the code generation and running a little simulation. Check that the numerical membrane voltage at the end of the simulation is close to a hard-coded numeric value. + """ + + @pytest.mark.parametrize("neuron_name", ["aeif_cond_exp", "iaf_psc_delta"]) + def test_simultaneous_spikes_different_ports(self, neuron_name: str): + input_path = os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join(os.pardir, os.pardir, "models", "neurons", neuron_name + "_neuron.nestml")))) + target_path = "nestmlmodule" + logging_level = "DEBUG" + suffix = "_nestml" + module_name = "nestmlmodule" + codegen_opts = {} + + generate_nest_target(input_path, target_path, + module_name=module_name, + logging_level=logging_level, + suffix=suffix, + codegen_opts=codegen_opts) + + nest.ResetKernel() + nest.Install(module_name) + + sg_exc = nest.Create("spike_generator", {"spike_times": [10., 20., 30., 40., 50.]}) + sg_exc2 = nest.Create("spike_generator", {"spike_times": [40.]}) + sg_inh = nest.Create("spike_generator", {"spike_times": [20., 40.]}) + # sg_inh = nest.Create("spike_generator", {"spike_times": []}) + + neuron_nest = nest.Create(neuron_name) + neuron_nestml = nest.Create(neuron_name + "_neuron_nestml") + mm_nest = nest.Create("voltmeter") + mm_nestml = nest.Create("voltmeter") + + nest.Connect(sg_exc, neuron_nest) + nest.Connect(sg_exc, neuron_nestml) + nest.Connect(sg_exc2, neuron_nest) + nest.Connect(sg_exc2, neuron_nestml) + nest.Connect(sg_inh, neuron_nest, syn_spec={"weight": -1}) + nest.Connect(sg_inh, neuron_nestml, syn_spec={"weight": -1}) + + nest.Connect(mm_nest, neuron_nest) + nest.Connect(mm_nestml, neuron_nestml) + + nest.Simulate(60.) + + # plot the results + + fig, ax = plt.subplots(nrows=1) + + ax.plot(mm_nest.events["times"], mm_nest.events["V_m"], label="NEST") + ax.plot(mm_nestml.events["times"], mm_nestml.events["V_m"], label="NESTML") + ax.legend() + + fig.savefig("/tmp/test_simultaneous_spikes_different_ports.png") + + # test that membrane potential is the same at the end of the simulation + + assert neuron_nestml.V_m != neuron_nestml.E_L + np.testing.assert_allclose(neuron_nest.V_m, neuron_nestml.V_m) From 022ac91d8f3c602a890409c356525f30d36280fb Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 16 Jan 2025 16:19:54 +0100 Subject: [PATCH 31/68] add attributes to spiking input ports --- .../resources_nest/point_neuron/common/NeuronClass.jinja2 | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 19376ace2..f4682e567 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -70,8 +70,7 @@ along with NEST. If not, see . #include "{{ neuronName }}.h" // uncomment the next line to enable printing of detailed debug information -#define DEBUG -// XXX RESTORE COMMENT LINE ABOVE +// #define DEBUG {% if state_vars_that_need_continuous_buffering | length > 0 %} {%- if continuous_state_buffering_method == "continuous_time_buffer" %} @@ -1322,7 +1321,6 @@ void std::list< double >::iterator __spike_input_{{ inputPort.name }}_list_iterator = __spike_input_{{ inputPort.name }}_list.begin(); const size_t n_spikes = __spike_input_{{ inputPort.name }}_list.size(); - std::cout << "\tPROCESSING " << n_spikes << " spikes\n"; for (size_t spike_idx = 0; spike_idx < n_spikes; ++spike_idx) { /** From 4320b0ce425a7f2cfb93a1ee5d076115d565534e Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 16 Jan 2025 20:37:42 +0100 Subject: [PATCH 32/68] add attributes to spiking input ports --- models/neurons/iaf_psc_delta_neuron.nestml | 4 +- .../codegeneration/nest_code_generator.py | 6 +- .../point_neuron/common/NeuronClass.jinja2 | 161 +++++++++++++++++- .../point_neuron/common/NeuronHeader.jinja2 | 33 +++- tests/nest_tests/recordable_variables_test.py | 2 +- .../resources/RecordableVariables.nestml | 6 +- ..._time_invariant_input_port_optimisation.py | 130 ++++++++++++++ tests/nest_tests/test_multisynapse.py | 3 +- 8 files changed, 328 insertions(+), 17 deletions(-) create mode 100644 tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py diff --git a/models/neurons/iaf_psc_delta_neuron.nestml b/models/neurons/iaf_psc_delta_neuron.nestml index b17c852b3..e48cbba85 100644 --- a/models/neurons/iaf_psc_delta_neuron.nestml +++ b/models/neurons/iaf_psc_delta_neuron.nestml @@ -47,7 +47,7 @@ model iaf_psc_delta_neuron: equations: kernel K_delta = delta(t) - V_m' = -(V_m - E_L) / tau_m + convolve(K_delta, spikes.weight) / s + (I_e + I_stim) / C_m # XXX: TODO: instead of the convolution, this should just read ``... + spikes.weight + ...``. This is a known issue (see https://github.com/nest/nestml/pull/1050). + V_m' = -(V_m - E_L) / tau_m + convolve(K_delta, spike_in_port.weight) / s + (I_e + I_stim) / C_m # XXX: TODO: instead of the convolution, this should just read ``... + spike_in_port.weight + ...``. This is a known issue (see https://github.com/nest/nestml/pull/1050). refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) parameters: @@ -64,7 +64,7 @@ model iaf_psc_delta_neuron: I_e pA = 0 pA input: - spikes <- spike(weight mV) + spike_in_port <- spike(weight mV) I_stim pA <- continuous output: diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index fd6d4531e..ee667f38b 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -118,6 +118,7 @@ class NESTCodeGenerator(CodeGenerator): - **continuous_state_buffering_method**: Which method to use for buffering state variables between neuron and synapse pairs. When a synapse has a "continuous" input port, connected to a postsynaptic neuron, either the value is obtained taking the synaptic (dendritic, that is, synapse-soma) delay into account, requiring a buffer to store the value at each timepoint (``continuous_state_buffering_method = "continuous_time_buffer"); or the value is obtained at the times of the somatic spikes of the postsynaptic neuron, ignoring the synaptic delay (``continuous_state_buffering_method == "post_spike_based"``). The former is more physically accurate but requires a large buffer and can require a long time to simulate. The latter ignores the dendritic delay but is much more computationally efficient. - **delay_variable**: A mapping identifying, for each synapse (the name of which is given as a key), the variable or parameter in the model that corresponds with the NEST ``Connection`` class delay property. - **weight_variable**: Like ``delay_variable``, but for synaptic weight. + - **linear_time_invariant_spiking_input_ports**: A list of spiking input ports which can be treated as linear and time-invariant; this implies that, for the given port(s), the weight of all spikes received within a timestep can be added together, improving memory consumption and runtime performance. Use with caution, for example, this is not compatible with using an input port as one processing inhibitory vs. excitatory spikes depending on the sign of the weight of the spike event. - **redirect_build_output**: An optional boolean key for redirecting the build output. Setting the key to ``True``, two files will be created for redirecting the ``stdout`` and the ``stderr`. The ``target_path`` will be used as the default location for creating the two files. - **build_output_dir**: An optional string key representing the new path where the files corresponding to the output of the build phase will be created. This key requires that the ``redirect_build_output`` is set to ``True``. @@ -150,7 +151,8 @@ class NESTCodeGenerator(CodeGenerator): "numeric_solver": "rk45", "continuous_state_buffering_method": "continuous_time_buffer", "delay_variable": {}, - "weight_variable": {} + "weight_variable": {}, + "linear_time_invariant_spiking_input_ports": [] } def __init__(self, options: Optional[Mapping[str, Any]] = None): @@ -531,6 +533,8 @@ def _get_model_namespace(self, astnode: ASTModel) -> Dict: if "continuous_post_ports" in dir(astnode): namespace["continuous_post_ports"] = astnode.continuous_post_ports + namespace["linear_time_invariant_spiking_input_ports"] = self.get_option("linear_time_invariant_spiking_input_ports") + return namespace def _get_synapse_model_namespace(self, synapse: ASTModel) -> Dict: diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index f4682e567..3bcaec42a 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -201,19 +201,37 @@ namespace nest {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) - // , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- endif %} {%- endfor %} {%- endif %} {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) - //, spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- endif %} {%- endfor %} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + , spike_input_{{ inputPort.name }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_grid_sum_( 0. ) +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} , spike_input_{{ inputPort.name }}_( nest::ListRingBuffer() ) +{%- endif %} , spike_input_{{ inputPort.name }}_spike_input_received_( nest::RingBuffer() ) - //, spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) {%- endif %} {%- endfor %} {%- endif %} @@ -236,8 +254,15 @@ namespace nest {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} + +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) - //, spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- endif %} {%- endfor %} {%- else %} ????????????? @@ -245,12 +270,27 @@ namespace nest {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} + + + +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) - //, spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) +{%- endif %} {%- endfor %} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + , spike_input_{{ inputPort.name }}_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_grid_sum_( 0. ) +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} , spike_input_{{ inputPort.name }}_( nest::ListRingBuffer() ) +{%- endif %} , spike_input_{{ inputPort.name }}_spike_input_received_( nest::RingBuffer() ) - //, spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) {%- endif %} {%- endfor %} {%- endif %} @@ -773,19 +813,62 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- for inputPortSymbol in neuron.get_spike_input_ports() %} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} + + + {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - const double __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }} = std::accumulate(B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag).begin(), spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag).end(), 0.0); + + +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_value(lag); + const double __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }} = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} + const double __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }} = std::accumulate(B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag).begin(), B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag).end(), 0.0); +{%- endif %} + + + + {%- endfor %} {%- endif %} {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} + + +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_value(lag); + const double __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }} = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} const double __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }} = std::accumulate(B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag).begin(), B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag).end(), 0.0); +{%- endif %} + + + + + {%- endfor %} + +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + B_.spike_input_{{ inputPort.name }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_.get_value(lag); + const double __spike_input_{{ inputPort.name }} = B_.spike_input_{{ inputPort.name }}_grid_sum_; +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} + const double __spike_input_{{ inputPort.name }} = std::accumulate(B_.spike_input_{{ inputPort.name }}_.get_list(lag).begin(), B_.spike_input_{{ inputPort.name }}_.get_list(lag).end(), 0.0); +{%- endif %} + + + + {%- endif %} {%- endfor %} @@ -948,6 +1031,10 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- for inputPortSymbol in neuron.get_spike_input_ports() %} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} + +{%- if inputPortSymbol.name not in linear_time_invariant_spiking_input_ports %} + + {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} @@ -963,10 +1050,13 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const std::list< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.clear(); {%- endfor %} -{%- endif %} std::list< double >& __spike_input_{{ inputPort.name }}_list = B_.spike_input_{{ inputPort.name }}_.get_list(lag); __spike_input_{{ inputPort.name }}_list.clear(); + +{%- endif %} +{%- endif %} + {%- endfor %} {%- endif %} @@ -1235,9 +1325,21 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) { {%- if spike_in_port.get_parameters() %} {%- for attribute in spike_in_port.get_parameters() %} + + +{%- if spike_in_port_name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + B_.spike_input_{{ spike_in_port_name }}__DOT__{{ attribute.name }}_.add_value( + e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), + e.get_weight() * e.get_multiplicity() ); +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} B_.spike_input_{{ spike_in_port_name }}__DOT__{{ attribute.name }}_.append_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_weight() * e.get_multiplicity() ); +{%- endif %} + + {%- endfor %} {%- endif %} @@ -1245,9 +1347,18 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) //std::cout << "\tappending spike at offset = " << e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin()) << "; buffer size = " << B_.spike_input_{{ spike_in_port_name }}_.size() << "; nest::kernel().connection_manager.get_min_delay() = " << nest::kernel().connection_manager.get_min_delay() << "\n"; //std::cout << "\tappending spike at offset = " << e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin()) << " to B_.spike_input_{{ spike_in_port_name }}_, before length = " << B_.spike_input_{{ spike_in_port_name }}_.get_list(e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin())).size() << "\n"; // B_.spike_input_{{ spike_in_port_name }}_.resize(); + +{%- if spike_in_port_name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + B_.spike_input_{{ spike_in_port_name }}_.add_value( + e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), + e.get_multiplicity() ); +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} B_.spike_input_{{ spike_in_port_name }}_.append_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_multiplicity() ); +{%- endif %} //std::cout << "\tappending spike to B_.spike_input_{{ spike_in_port_name }}_, after length = " << B_.spike_input_{{ spike_in_port_name }}_.get_list(e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin())).size() << "\n"; @@ -1299,6 +1410,39 @@ void const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function auto get_t = [origin, lag](){ return nest::Time( nest::Time::step( origin.get_steps() + lag + 1) ).get_ms(); }; +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + /** + * Grab the actual spike event data from the buffers (for the current timepoint ``origin + lag``) + **/ +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + const double __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }} = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + const double __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }} = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; +{%- endfor %} +{%- endif %} + const double __spike_input_{{ inputPort.name }} = B_.spike_input_{{ inputPort.name }}_grid_sum_; + + + /** + * Begin NESTML generated code for the onReceive() block statements + **/ + +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix(" ") }} {# prevent printing origin #} +{% filter indent(4, True) -%} +{%- include "directives_cpp/StmtsBody.jinja2" %} +{%- endfilter %} +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix("") }} +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} // grab the lists of spike events from the buffers for the current timepoint {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} @@ -1398,6 +1542,7 @@ std::cout << "\tclearing spike buffers....\n"; std::cout << "\tafter clearing " << __spike_input_{{ inputPort.name }}_list.size() << " spikes\n"; std::cout << "\tafter clearing (orig list) " << B_.spike_input_{{ inputPort.name }}_.get_list(lag).size() << " spikes\n"; */ +{%- endif %} } {% endfor %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index edf934e87..0f9c5c77c 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -788,6 +788,36 @@ private: {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} // input port: {{ inputPort.name }} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} + +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- for parameter in inputPort.get_parameters() %} + nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_; + double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; +{%- endfor %} +{%- endif %} + nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_; + double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_grid_sum_; + nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; + double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_; +{%- endfor %} +{%- else %} +{%- for parameter in inputPort.get_parameters() %} + nest::RingBuffer spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; + double spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; +{%- endfor %} + nest::RingBuffer spike_input_{{ inputPort.name }}_; // buffer for unweighted spikes + double spike_input_{{ inputPort.name }}_grid_sum_; // buffer for unweighted spikes + nest::RingBuffer spike_input_{{ inputPort.name }}_spike_input_received_; // buffer for the "spike received" boolean flag + double spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; // buffer for the "spike received" boolean flag +{%- endif %} +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} + {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} @@ -799,7 +829,7 @@ private: {%- endif %} nest::ListRingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_grid_sum_; - nest::ListRingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; + nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_; {%- endfor %} {%- else %} @@ -811,6 +841,7 @@ private: nest::RingBuffer spike_input_{{ inputPort.name }}_spike_input_received_; // buffer for the "spike received" boolean flag double spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; // buffer for the "spike received" boolean flag {%- endif %} +{%- endif %} {%- endfor %} // ----------------------------------------------------------------------- diff --git a/tests/nest_tests/recordable_variables_test.py b/tests/nest_tests/recordable_variables_test.py index b8c126d23..9b6f59966 100644 --- a/tests/nest_tests/recordable_variables_test.py +++ b/tests/nest_tests/recordable_variables_test.py @@ -62,7 +62,7 @@ def test_recordable_variables(self): sg = nest.Create("spike_generator", params={"spike_times": [20., 80.]}) nest.Connect(sg, neuron) - mm = nest.Create('multimeter', params={'record_from': ['V_ex', 'V_rel', 'V_m', 'I_kernel__X__spikes'], + mm = nest.Create('multimeter', params={'record_from': ['V_ex', 'V_rel', 'V_m', 'I_kernel__X__spike_in_port__DOT__weight'], 'interval': 0.1}) nest.Connect(mm, neuron) diff --git a/tests/nest_tests/resources/RecordableVariables.nestml b/tests/nest_tests/resources/RecordableVariables.nestml index d31496002..4be47c96f 100644 --- a/tests/nest_tests/resources/RecordableVariables.nestml +++ b/tests/nest_tests/resources/RecordableVariables.nestml @@ -34,8 +34,8 @@ model recordable_variables: V_rel mV = 0 mV # Membrane potential relative to the reset potential equations: - kernel I_kernel = exp(-1/tau_syn*t) - inline I_syn pA = convolve(I_kernel, spikes) * pA + kernel I_kernel = exp(-t / tau_syn) + inline I_syn pA = convolve(I_kernel, spike_in_port.weight) recordable inline V_m mV = V_rel + V_reset V_rel' = -V_rel / tau_m + (I_syn + I_e + I_stim) / C_m @@ -48,7 +48,7 @@ model recordable_variables: V_thr mV = -55 mV input: - spikes <- spike + spike_in_port <- spike(weight pA) I_stim pA <- continuous update: diff --git a/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py b/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py new file mode 100644 index 000000000..51fbf27e8 --- /dev/null +++ b/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# +# test_linear_time_invariant_input_port_optimisation.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from typing import Optional + +import matplotlib.pyplot as plt +import numpy as np +import os +import pytest + +import nest + +from pynestml.frontend.pynestml_frontend import generate_nest_target + +TestLinearTimeInvariantInputPortOptimisation_neuron_types = ["aeif_cond_exp", "iaf_psc_delta"] + + +class TestLinearTimeInvariantInputPortOptimisation: + """ + Test that the optimisations with the ``linear_time_invariant_spiking_input_ports`` NEST code generator option are working correctly. + """ + + module_name: Optional[str] = None + + @pytest.fixture(scope="module", autouse=True) + def generate_code(self): + TestLinearTimeInvariantInputPortOptimisation.module_name = "nestmlmodule" # unfortunately, pytest only allows us to set static attributes on the class + input_path = [os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.join(os.pardir, os.pardir, "models", "neurons", neuron_name + "_neuron.nestml")))) for neuron_name in TestLinearTimeInvariantInputPortOptimisation_neuron_types] + target_path = "nestmlmodule" + logging_level = "DEBUG" + suffix = "_nestml" + codegen_opts = {"linear_time_invariant_spiking_input_ports": ["spike_in_port"]} + + generate_nest_target(input_path, target_path, + module_name=TestLinearTimeInvariantInputPortOptimisation.module_name, + logging_level=logging_level, + suffix=suffix, + codegen_opts=codegen_opts) + + @pytest.mark.xfail + @pytest.mark.parametrize("neuron_name", TestLinearTimeInvariantInputPortOptimisation_neuron_types) + def test_simultaneous_spikes_different_ports(self, neuron_name: str): + r"""This is known to not work if there are simultaneous spikes!""" + spike_times_sg_exc = [10., 20., 30., 40., 50.] + spike_times_sg_exc2 = [40.] + spike_times_sg_inh = [20., 40.] + self.run_experiment(neuron_name, + spike_times_sg_exc, + spike_times_sg_exc2, + spike_times_sg_inh) + + @pytest.mark.xfail + @pytest.mark.parametrize("neuron_name", TestLinearTimeInvariantInputPortOptimisation_neuron_types) + def test_simultaneous_spikes_different_ports2(self, neuron_name: str): + r"""This is known to not work if there are simultaneous spikes!""" + spike_times_sg_exc = [10., 20., 30., 40., 50.] + spike_times_sg_exc2 = [0.] + spike_times_sg_inh = [20., 40.] + self.run_experiment(neuron_name, + spike_times_sg_exc, + spike_times_sg_exc2, + spike_times_sg_inh) + + @pytest.mark.parametrize("neuron_name", TestLinearTimeInvariantInputPortOptimisation_neuron_types) + def test_non_simultaneous_spikes_different_ports(self, neuron_name: str): + spike_times_sg_exc = [10., 20., 30., 40., 50.] + spike_times_sg_exc2 = [45.] + spike_times_sg_inh = [25., 55.] + self.run_experiment(neuron_name, + spike_times_sg_exc, + spike_times_sg_exc2, + spike_times_sg_inh) + + def run_experiment(self, neuron_name, spike_times_sg_exc, spike_times_sg_exc2, spike_times_sg_inh): + nest.ResetKernel() + nest.Install(TestLinearTimeInvariantInputPortOptimisation.module_name) + + sg_exc = nest.Create("spike_generator", {"spike_times": spike_times_sg_exc}) + sg_exc2 = nest.Create("spike_generator", {"spike_times": spike_times_sg_exc2}) + sg_inh = nest.Create("spike_generator", {"spike_times": spike_times_sg_inh}) + + neuron_nest = nest.Create(neuron_name) + neuron_nestml = nest.Create(neuron_name + "_neuron_nestml") + mm_nest = nest.Create("voltmeter") + mm_nestml = nest.Create("voltmeter") + + nest.Connect(sg_exc, neuron_nest) + nest.Connect(sg_exc, neuron_nestml) + nest.Connect(sg_exc2, neuron_nest) + nest.Connect(sg_exc2, neuron_nestml) + nest.Connect(sg_inh, neuron_nest, syn_spec={"weight": -1}) + nest.Connect(sg_inh, neuron_nestml, syn_spec={"weight": -1}) + + nest.Connect(mm_nest, neuron_nest) + nest.Connect(mm_nestml, neuron_nestml) + + nest.Simulate(60.) + + # plot the results + + fig, ax = plt.subplots(nrows=1) + + ax.plot(mm_nest.events["times"], mm_nest.events["V_m"], label="NEST") + ax.plot(mm_nestml.events["times"], mm_nestml.events["V_m"], label="NESTML") + ax.legend() + + fig.savefig("/tmp/test_simultaneous_spikes_different_ports_[neuron=" + neuron_name + "].png") + + # test that membrane potential is the same at the end of the simulation + + assert neuron_nestml.V_m != neuron_nestml.E_L + np.testing.assert_allclose(neuron_nest.V_m, neuron_nestml.V_m) diff --git a/tests/nest_tests/test_multisynapse.py b/tests/nest_tests/test_multisynapse.py index 3080b2260..add2b5fee 100644 --- a/tests/nest_tests/test_multisynapse.py +++ b/tests/nest_tests/test_multisynapse.py @@ -19,11 +19,12 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -import nest import numpy as np import os import pytest +import nest + from pynestml.codegeneration.nest_tools import NESTTools from pynestml.frontend.pynestml_frontend import generate_nest_target From 70537d4c5782ccf1e530b888e46a604ae7d23437 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 16 Jan 2025 22:28:27 +0100 Subject: [PATCH 33/68] add attributes to spiking input ports --- .../izhikevich/izhikevich_solution.nestml | 14 +++++++------- doc/tutorials/izhikevich/izhikevich_task.nestml | 4 ++-- .../iaf_psc_exp_nonlineardendrite_neuron.nestml | 12 ++++++------ .../models/iaf_psc_alpha.nestml | 6 +++--- .../models/iaf_psc_alpha_adapt_curr.nestml | 6 +++--- .../models/iaf_psc_alpha_adapt_thresh.nestml | 6 +++--- .../models/iaf_psc_alpha_adapt_thresh_OU.nestml | 6 +++--- .../stdp_third_factor_active_dendrite.ipynb | 10 +++++----- 8 files changed, 32 insertions(+), 32 deletions(-) diff --git a/doc/tutorials/izhikevich/izhikevich_solution.nestml b/doc/tutorials/izhikevich/izhikevich_solution.nestml index c7f99b877..66c0f3681 100644 --- a/doc/tutorials/izhikevich/izhikevich_solution.nestml +++ b/doc/tutorials/izhikevich/izhikevich_solution.nestml @@ -1,8 +1,8 @@ model izhikevich_tutorial_neuron: state: - v mV = -65 mV # Membrane potential in mV - u real = 0 # Membrane potential recovery variable + v mV = -65 mV # Membrane potential in mV + u real = 0 # Membrane potential recovery variable equations: v' = (.04 * v * v / mV + 5 * v + (140 - u) * mV + (I_e * GOhm)) / ms @@ -10,12 +10,12 @@ model izhikevich_tutorial_neuron: parameters: a real = .02 # describes time scale of recovery variable - b real = .2 # sensitivity of recovery variable - c mV = -65 mV # after-spike reset value of v - d real = 8. # after-spike reset value of u + b real = .2 # sensitivity of recovery variable + c mV = -65 mV # after-spike reset value of v + d real = 8. # after-spike reset value of u input: - spikes <- spike + spikes <- spike(weight mV) I_e pA <- continuous output: @@ -26,7 +26,7 @@ model izhikevich_tutorial_neuron: onReceive(spikes): # add synaptic current - v += spikes * mV * s + v += spikes.weight onCondition(v >= 30mV): # threshold crossing diff --git a/doc/tutorials/izhikevich/izhikevich_task.nestml b/doc/tutorials/izhikevich/izhikevich_task.nestml index c26c304c7..22339f4c0 100644 --- a/doc/tutorials/izhikevich/izhikevich_task.nestml +++ b/doc/tutorials/izhikevich/izhikevich_task.nestml @@ -15,7 +15,7 @@ model izhikevich_tutorial_neuron: # TODO: add remaining variables input: - spikes <- spike + spikes <- spike(weight mV) I_e pA <- continuous output: @@ -26,7 +26,7 @@ model izhikevich_tutorial_neuron: onReceive(spikes): # add synaptic current - v += spikes * mV * s + v += spikes.weight onCondition(v >= 30mV): # TODO: implement threshold crossing check diff --git a/doc/tutorials/sequence_learning/iaf_psc_exp_nonlineardendrite_neuron.nestml b/doc/tutorials/sequence_learning/iaf_psc_exp_nonlineardendrite_neuron.nestml index 790ef7391..9c7a9163b 100644 --- a/doc/tutorials/sequence_learning/iaf_psc_exp_nonlineardendrite_neuron.nestml +++ b/doc/tutorials/sequence_learning/iaf_psc_exp_nonlineardendrite_neuron.nestml @@ -48,8 +48,8 @@ model iaf_psc_exp_nonlineardendrite_neuron: kernel I_kernel3 = exp(-1/tau_syn3*t) # diff. eq. for membrane potential - inline I_syn pA = convolve(I_kernel1, I_1) * pA - convolve(I_kernel3, I_3) * pA + I_e - V_m' = -(V_m - E_L)/tau_m + (I_syn + I_dend) / C_m + inline I_syn pA = convolve(I_kernel1, I_1.weight) - convolve(I_kernel3, I_3.weight) + I_e + V_m' = -(V_m - E_L) / tau_m + (I_syn + I_dend) / C_m # diff. eq. for dAP trace dAP_trace' = -evolve_dAP_trace * dAP_trace / tau_h @@ -81,15 +81,15 @@ model iaf_psc_exp_nonlineardendrite_neuron: input: - I_1 <- spike - I_2 <- spike - I_3 <- spike + I_1 <- spike(weight pA) + I_2 <- spike(weight real) + I_3 <- spike(weight pA) output: spike onReceive(I_2): - I_dend$ += I_2 * s * I_dend_incr + I_dend$ += I_2.weight * I_dend_incr update: # solve ODEs diff --git a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha.nestml b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha.nestml index faf90cc7c..52cd89ecf 100644 --- a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha.nestml +++ b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha.nestml @@ -66,7 +66,7 @@ model iaf_psc_alpha_neuron: equations: kernel I_kernel_inh = (e / tau_syn_inh) * t * exp(-t / tau_syn_inh) kernel I_kernel_exc = (e / tau_syn_exc) * t * exp(-t / tau_syn_exc) - inline I pA = (convolve(I_kernel_exc, exc_spikes) - convolve(I_kernel_inh, inh_spikes)) * pA + I_e + I_stim + inline I pA = convolve(I_kernel_exc, exc_spikes.weight) - convolve(I_kernel_inh, inh_spikes.weight) + I_e + I_stim V_m' = -(V_m - E_L) / tau_m + I / C_m parameters: @@ -83,8 +83,8 @@ model iaf_psc_alpha_neuron: I_e pA = 0 pA input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight pA) + inh_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml index 02e0ad23f..4e8e80ab8 100644 --- a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml +++ b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_curr.nestml @@ -69,7 +69,7 @@ model iaf_psc_alpha_adapt_curr_neuron: equations: kernel I_kernel_inh = (e / tau_syn_inh) * t * exp(-t / tau_syn_inh) kernel I_kernel_exc = (e / tau_syn_exc) * t * exp(-t / tau_syn_exc) - inline I pA = (convolve(I_kernel_exc, exc_spikes) - convolve(I_kernel_inh, inh_spikes)) * pA - I_sfa + I_e + I_stim + inline I pA = convolve(I_kernel_exc, exc_spikes.weight) - convolve(I_kernel_inh, inh_spikes.weight) - I_sfa + I_e + I_stim V_m' = -(V_m - E_L) / tau_m + I / C_m I_sfa' = -I_sfa / tau_sfa @@ -90,8 +90,8 @@ model iaf_psc_alpha_adapt_curr_neuron: I_e pA = 0 pA input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight pA) + inh_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh.nestml b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh.nestml index 791099c13..f6c09447c 100644 --- a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh.nestml +++ b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh.nestml @@ -69,7 +69,7 @@ model iaf_psc_alpha_adapt_thresh_neuron: equations: kernel I_kernel_inh = (e / tau_syn_inh) * t * exp(-t / tau_syn_inh) kernel I_kernel_exc = (e / tau_syn_exc) * t * exp(-t / tau_syn_exc) - inline I pA = (convolve(I_kernel_exc, exc_spikes) - convolve(I_kernel_inh, inh_spikes)) * pA + I_e + I_stim + inline I pA = convolve(I_kernel_exc, exc_spikes.weight) - convolve(I_kernel_inh, inh_spikes.weight) + I_e + I_stim V_m' = -(V_m - E_L) / tau_m + I / C_m Theta' = -(Theta - Theta_init) / tau_Theta @@ -90,8 +90,8 @@ model iaf_psc_alpha_adapt_thresh_neuron: I_e pA = 0 pA input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight pA) + inh_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh_OU.nestml b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh_OU.nestml index 8f9926813..e80c843cd 100644 --- a/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh_OU.nestml +++ b/doc/tutorials/spike_frequency_adaptation/models/iaf_psc_alpha_adapt_thresh_OU.nestml @@ -71,7 +71,7 @@ model iaf_psc_alpha_adapt_thresh_OU_neuron: equations: kernel I_kernel_inh = (e / tau_syn_inh) * t * exp(-t / tau_syn_inh) kernel I_kernel_exc = (e / tau_syn_exc) * t * exp(-t / tau_syn_exc) - inline I pA = (convolve(I_kernel_exc, exc_spikes) - convolve(I_kernel_inh, inh_spikes)) * pA + I_e + I_stim + I_noise + inline I pA = convolve(I_kernel_exc, exc_spikes.weight) - convolve(I_kernel_inh, inh_spikes.weight) + I_e + I_stim + I_noise V_m' = -(V_m - E_L) / tau_m + I / C_m Theta' = -(Theta - Theta_init) / tau_Theta @@ -99,8 +99,8 @@ model iaf_psc_alpha_adapt_thresh_OU_neuron: A_noise pA = ((D_noise * tau_syn_exc / 2) * (1 - exp(-2 * resolution() / tau_syn_exc )))**.5 input: - exc_spikes <- spike - inh_spikes <- spike + exc_spikes <- spike(weight pA) + inh_spikes <- spike(weight pA) I_stim pA <- continuous output: diff --git a/doc/tutorials/stdp_third_factor_active_dendrite/stdp_third_factor_active_dendrite.ipynb b/doc/tutorials/stdp_third_factor_active_dendrite/stdp_third_factor_active_dendrite.ipynb index a6307815d..1b0c45db6 100644 --- a/doc/tutorials/stdp_third_factor_active_dendrite/stdp_third_factor_active_dendrite.ipynb +++ b/doc/tutorials/stdp_third_factor_active_dendrite/stdp_third_factor_active_dendrite.ipynb @@ -334,8 +334,8 @@ " # alpha shaped postsynaptic current kernel\n", " kernel syn_kernel = (e / tau_syn) * t * exp(-t / tau_syn)\n", " kernel sg_kernel = delta(t)\n", - " recordable inline I_syn pA = convolve(syn_kernel, synaptic_spikes) * pA\n", - " V_m' = -(V_m - E_L) / tau_m + (I_syn + I_dAP + I_e) / C_m + convolve(sg_kernel, spike_generator_spikes) * mV / s\n", + " recordable inline I_syn pA = convolve(syn_kernel, synaptic_spikes.weight)\n", + " V_m' = -(V_m - E_L) / tau_m + (I_syn + I_dAP + I_e) / C_m + convolve(sg_kernel, spike_generator_spikes.weight) / s\n", " I_dAP' = -I_dAP / tau_dAP\n", "\n", " parameters:\n", @@ -355,10 +355,10 @@ " reset_I_dAP_after_AP boolean = true\n", "\n", " input:\n", - " synaptic_spikes <- spike\n", - " spike_generator_spikes <- spike\n", + " synaptic_spikes <- spike(weight pA)\n", + " spike_generator_spikes <- spike(weight mV)\n", "\n", - " output: \n", + " output:\n", " spike\n", "\n", " update:\n", From 5cbe0e3872890832e39b487fc9d98c990a3cc507 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 16 Jan 2025 22:34:39 +0100 Subject: [PATCH 34/68] add attributes to spiking input ports --- .../test_linear_time_invariant_input_port_optimisation.py | 3 +++ tests/nest_tests/test_simultaneous_spikes_different_ports.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py b/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py index 51fbf27e8..81f5df609 100644 --- a/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py +++ b/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py @@ -28,11 +28,14 @@ import nest +from pynestml.codegeneration.nest_tools import NESTTools from pynestml.frontend.pynestml_frontend import generate_nest_target TestLinearTimeInvariantInputPortOptimisation_neuron_types = ["aeif_cond_exp", "iaf_psc_delta"] +@pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), + reason="This test does not support NEST 2") class TestLinearTimeInvariantInputPortOptimisation: """ Test that the optimisations with the ``linear_time_invariant_spiking_input_ports`` NEST code generator option are working correctly. diff --git a/tests/nest_tests/test_simultaneous_spikes_different_ports.py b/tests/nest_tests/test_simultaneous_spikes_different_ports.py index 238d3c682..c22f14ca1 100644 --- a/tests/nest_tests/test_simultaneous_spikes_different_ports.py +++ b/tests/nest_tests/test_simultaneous_spikes_different_ports.py @@ -26,9 +26,12 @@ import nest +from pynestml.codegeneration.nest_tools import NESTTools from pynestml.frontend.pynestml_frontend import generate_nest_target +@pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), + reason="This test does not support NEST 2") class TestSimultaneousSpikesDifferentPorts: """ Tests the code generation and running a little simulation. Check that the numerical membrane voltage at the end of the simulation is close to a hard-coded numeric value. From 74c92f2d939cc58a989a73b35edfd5930fa2d927 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Fri, 17 Jan 2025 14:45:40 +0100 Subject: [PATCH 35/68] add attributes to spiking input ports --- .../nestml_active_dendrite_tutorial.ipynb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/tutorials/active_dendrite/nestml_active_dendrite_tutorial.ipynb b/doc/tutorials/active_dendrite/nestml_active_dendrite_tutorial.ipynb index 78345262e..8b8f2a30a 100644 --- a/doc/tutorials/active_dendrite/nestml_active_dendrite_tutorial.ipynb +++ b/doc/tutorials/active_dendrite/nestml_active_dendrite_tutorial.ipynb @@ -135,7 +135,7 @@ " equations:\n", " # alpha shaped postsynaptic current kernel\n", " kernel syn_kernel = (e / tau_syn) * t * exp(-t / tau_syn)\n", - " recordable inline I_syn pA = convolve(syn_kernel, spikes_in) * pA\n", + " recordable inline I_syn pA = convolve(syn_kernel, spikes_in.weight)\n", " V_m' = -(V_m - E_L) / tau_m + (I_syn + I_dAP + I_e) / C_m\n", "\n", " parameters:\n", @@ -153,7 +153,7 @@ " T_dAP ms = 10 ms # time window over which the dendritic current clamp is active\n", "\n", " input:\n", - " spikes_in <- spike\n", + " spikes_in <- spike(weight pA)\n", "\n", " output: \n", " spike\n", @@ -538,7 +538,7 @@ " equations:\n", " # alpha shaped postsynaptic current kernel\n", " kernel syn_kernel = (e / tau_syn) * t * exp(-t / tau_syn)\n", - " recordable inline I_syn pA = convolve(syn_kernel, spikes_in) * pA\n", + " recordable inline I_syn pA = convolve(syn_kernel, spikes_in.weight)\n", " V_m' = -(V_m - E_L) / tau_m + (enable_I_syn * I_syn + I_dAP + I_e) / C_m\n", "\n", " parameters:\n", @@ -556,7 +556,7 @@ " T_dAP ms = 10 ms # time window over which the dendritic current clamp is active\n", "\n", " input:\n", - " spikes_in <- spike\n", + " spikes_in <- spike(weight pA)\n", "\n", " output:\n", " spike\n", @@ -772,7 +772,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, From 110eb70bb7b1f6d62f37bbdd87b3c9f27808493a Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 13 Feb 2025 16:18:15 +0100 Subject: [PATCH 36/68] merge upstream/master --- .../nestml_language_concepts.rst | 16 +- tests/test_cocos.py | 626 +++++++++--------- 2 files changed, 308 insertions(+), 334 deletions(-) diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index 17f205dc7..55fc172f5 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -878,7 +878,7 @@ The spiking input port name ``spikes_in`` can subsequently be used in the right- .. math:: - \frac{dx}{dt} = -x / tau + \mathrm{spikes_in}(t) + \frac{dx}{dt} = -x / \tau + \mathrm{spikes\_in}(t) If ``x`` is a real number, then the units here are consistent. This can be written in NESTML as: @@ -890,7 +890,7 @@ If ``x`` is a real number, then the units here are consistent. This can be writt .. math:: - \frac{dx}{dt} = -x / tau + (K \ast \mathrm{spikes_in}) / s + \frac{dx}{dt} = -x / \tau + (K \ast \mathrm{spikes\_in}) / s This can be written in NESTML as: @@ -916,8 +916,6 @@ Handling spiking input by event handlers An ``onReceive`` block can be defined for every spiking input port, for example, if a port named ``pre_spikes`` is defined, the corresponding event handler has the general structure: - - .. code-block:: nestml onReceive(pre_spikes): @@ -926,6 +924,8 @@ An ``onReceive`` block can be defined for every spiking input port, for example, The statements in the event handler will be executed when the event occurs. +An event handler integrates the system from "just before" the event to "just after" the event. If the spike event occurs at time :math:`t_k`, then the event handler integrates the system in time from "just before the event" :math:`t_k^-` to "just after" the event :math:`t_k^+`. + To specify in which sequence the event handlers should be called in case multiple events are received at the exact same time, the ``priority`` parameter can be used, which can be given an integer value, where a larger value means higher priority. For example: .. code-block:: nestml @@ -1179,9 +1179,9 @@ Inside the ``update`` block, the current time can be retrieved via the predefine Integrating the ODEs ~~~~~~~~~~~~~~~~~~~~ -Integrating the ODEs needs to be triggered explicitly inside the ``update`` block by calling the ``integrate_odes()`` function. Making this call explicit forces the model to be precise about the sequence of steps that needs to be carried out to step the model state forward in time. +Integrating the ODEs needs to be triggered explicitly inside the ``update`` block by calling the ``integrate_odes()`` function. Making this call explicit allows subtle differences in integration sequence to be expressed, as well as making it explicit that some variables but not others are integrated; for example, if a neuron is in an absolute refractory state, we might want to skip integrating the differential equation for the membrane potential. -The ``integrate_odes()`` function numerically integrates the differential equations defined in the ``equations`` block. Integrating the ODEs from one timestep to the next has to be explicitly carried out in the model by calling the ``integrate_odes()`` function. If no parameters are given, all ODEs in the model are integrated. Integration can be limited to a given set of ODEs by giving their left-hand side state variables as parameters to the function, for example ``integrate_odes(V_m, I_ahp)`` if ODEs exist for the variables ``V_m`` and ``I_ahp``. In this example, these variables are integrated simultaneously (as one single system of equations). This is different from calling ``integrate_odes(V_m)`` and then ``integrate_odes(I_ahp)`` in that the second call would use the already-updated values from the first call. Variables not included in the call to ``integrate_odes()`` are assumed to remain constant (both inside the numeric solver stepping function as well as from before to after the call). +The ``integrate_odes()`` function numerically integrates differential equations defined in the ``equations`` block. If no parameters are given, all ODEs defined in the model are integrated. Integration can be limited to a given set of ODEs by giving their left-hand side state variables as parameters to the function, for example ``integrate_odes(V_m, I_ahp)`` if ODEs exist for the variables ``V_m`` and ``I_ahp``. In this example, these variables are integrated simultaneously (as one single system of equations). This is different from calling ``integrate_odes(V_m)`` and then ``integrate_odes(I_ahp)``, in that the second call would use the already-updated state values from the first call. Variables not included in the call to ``integrate_odes()`` are assumed to remain constant (both inside the numeric solver stepping function as well as from before to after the call). In case of higher-order ODEs of the form ``F(x'', x', x) = 0``, the solution ``x(t)`` is obtained by just providing the variable ``x`` to the ``integrate_odes`` function. For example, @@ -1197,9 +1197,7 @@ In case of higher-order ODEs of the form ``F(x'', x', x) = 0``, the solution ``x update: integrate_odes(x) -Here, ``integrate_odes(x)`` integrates the entire dynamics of ``x(t)``, in this case, ``x`` and ``x'``. - -Note that the dynamical equations that correspond to convolutions are always updated, regardless of whether ``integrate_odes()`` is called. The state variables affected by incoming events are updated at the end of each timestep, that is, within one timestep, the state as observed by statements in the ``update`` block will be those at :math:`t^-`, i.e. "just before" it has been updated due to the events. See also :ref:`Integrating spiking input` and :ref:`Integration order`. +Here, ``integrate_odes(x)`` integrates variables of all order; in this case, ``x`` and ``x'``. The state variables affected by incoming events are updated at the end of each timestep, that is, within one timestep, the state as observed by statements in the ``update`` block will be those at :math:`t^-`, i.e. "just before" it has been updated due to the events. See also :ref:`Integrating spiking input` and :ref:`Integration order`. ODEs that can be solved analytically are integrated to machine precision from one timestep to the next using the propagators obtained from `ODE-toolbox `_. In case a numerical solver is used (such as Runge-Kutta or forward Euler), the same ODEs are also evaluated numerically by the numerical solver to allow more precise values for analytically solvable ODEs *within* a timestep. In this way, the long-term dynamics obeys the analytic (more exact) equations, while the short-term (within one timestep) dynamics is evaluated to the precision of the numerical integrator. diff --git a/tests/test_cocos.py b/tests/test_cocos.py index a357bd421..de9044760 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -56,374 +56,350 @@ def test_invalid_element_defined_after_usage(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVariableDefinedAfterUsage.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - def test_spiking_input_port_on_equation_rhs_outside_convolve(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources')), 'spiking_input_port_on_equation_rhs_outside_convolve.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + def test_valid_element_defined_after_usage(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableDefinedAfterUsage.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_spiking_input_port_on_equation_rhs_outside_convolve2(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'resources')), 'spiking_input_port_on_equation_rhs_outside_convolve2.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + def test_invalid_element_in_same_line(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoElementInSameLine.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + def test_valid_element_in_same_line(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoElementInSameLine.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_integrate_odes_called_if_equations_defined(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + def test_valid_integrate_odes_called_if_equations_defined(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_element_not_defined_in_scope(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVariableNotDefined.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 6 + + def test_valid_element_not_defined_in_scope(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableNotDefined.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_variable_with_same_name_as_unit(self): + Logger.set_logging_level(LoggingLevel.NO) + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableWithSameNameAsUnit.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 3 + + def test_invalid_variable_redeclaration(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVariableRedeclared.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + def test_valid_variable_redeclaration(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableRedeclared.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_each_block_unique(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoEachBlockUnique.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + + def test_valid_each_block_unique(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoEachBlockUnique.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_function_unique_and_defined(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoFunctionNotUnique.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 + + def test_valid_function_unique_and_defined(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoFunctionNotUnique.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_inline_expressions_have_rhs(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionHasNoRhs.nestml')) + assert model is None # parse error + + def test_valid_inline_expressions_have_rhs(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionHasNoRhs.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_inline_expression_has_several_lhs(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) + assert model is None # parse error + + def test_valid_inline_expression_has_several_lhs(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_no_values_assigned_to_input_ports(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoValueAssignedToInputPort.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + + def test_invalid_order_of_equations_correct(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoNoOrderOfEquations.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + + def test_valid_order_of_equations_correct(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNoOrderOfEquations.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_numerator_of_unit_one(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoUnitNumeratorNotOne.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + + def test_valid_numerator_of_unit_one(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoUnitNumeratorNotOne.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_names_of_neurons_unique(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoMultipleNeuronsWithEqualName.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + + def test_valid_names_of_neurons_unique(self): + self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoMultipleNeuronsWithEqualName.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(None, LoggingLevel.ERROR)) == 0 - # def test_valid_element_defined_after_usage(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableDefinedAfterUsage.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_no_nest_collision(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoNestNamespaceCollision.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_invalid_element_in_same_line(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoElementInSameLine.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_valid_no_nest_collision(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNestNamespaceCollision.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_element_in_same_line(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoElementInSameLine.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_co_co_spike_input_ports_illegal_missing_attribute(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_invalid_integrate_odes_called_if_equations_defined(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_valid_co_co_spike_input_ports_illegal_missing_attribute(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_integrate_odes_called_if_equations_defined(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIntegrateOdesCalledIfEquationsDefined.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_parameters_assigned_only_in_parameters_block(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoParameterAssignedOutsideBlock.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_invalid_element_not_defined_in_scope(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVariableNotDefined.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 6 + def test_valid_parameters_assigned_only_in_parameters_block(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoParameterAssignedOutsideBlock.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_element_not_defined_in_scope(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableNotDefined.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_inline_expressions_assigned_only_in_declaration(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoAssignmentToInlineExpression.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_variable_with_same_name_as_unit(self): - # Logger.set_logging_level(LoggingLevel.NO) - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableWithSameNameAsUnit.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 3 + def test_invalid_internals_assigned_only_in_internals_block(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInternalAssignedOutsideBlock.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_invalid_variable_redeclaration(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVariableRedeclared.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_valid_internals_assigned_only_in_internals_block(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInternalAssignedOutsideBlock.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_variable_redeclaration(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVariableRedeclared.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_function_with_wrong_arg_number_detected(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - # def test_invalid_each_block_unique(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoEachBlockUnique.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + def test_valid_function_with_wrong_arg_number_detected(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_each_block_unique(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoEachBlockUnique.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_init_values_have_rhs_and_ode(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInitValuesWithoutOde.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 2 - # def test_invalid_function_unique_and_defined(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoFunctionNotUnique.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 + def test_valid_init_values_have_rhs_and_ode(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInitValuesWithoutOde.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 3 - # def test_valid_function_unique_and_defined(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoFunctionNotUnique.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_incorrect_return_stmt_detected(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIncorrectReturnStatement.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 - # def test_invalid_inline_expressions_have_rhs(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionHasNoRhs.nestml')) - # assert model is None # parse error + def test_valid_incorrect_return_stmt_detected(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIncorrectReturnStatement.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_inline_expressions_have_rhs(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionHasNoRhs.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_ode_vars_outside_init_block_detected(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOdeVarNotInInitialValues.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_invalid_inline_expression_has_several_lhs(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) - # assert model is None # parse error + def test_valid_ode_vars_outside_init_block_detected(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOdeVarNotInInitialValues.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_inline_expression_has_several_lhs(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_convolve_correctly_defined(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - # def test_invalid_no_values_assigned_to_input_ports(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoValueAssignedToInputPort.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + def test_valid_convolve_correctly_defined(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_invalid_order_of_equations_correct(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoNoOrderOfEquations.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + def test_invalid_vector_in_non_vector_declaration_detected(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorInNonVectorDeclaration.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_valid_order_of_equations_correct(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNoOrderOfEquations.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_valid_vector_in_non_vector_declaration_detected(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorInNonVectorDeclaration.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_invalid_numerator_of_unit_one(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoUnitNumeratorNotOne.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + def test_invalid_vector_parameter_declaration(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorParameterDeclaration.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + + def test_valid_vector_parameter_declaration(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorParameterDeclaration.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_vector_parameter_type(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorParameterType.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_valid_numerator_of_unit_one(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoUnitNumeratorNotOne.nestml')) + def test_valid_vector_parameter_type(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorParameterType.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_vector_parameter_size(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorDeclarationSize.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - # def test_invalid_names_of_neurons_unique(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoMultipleNeuronsWithEqualName.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 3 + def test_valid_vector_parameter_size(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorDeclarationSize.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_names_of_neurons_unique(self): - # self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoMultipleNeuronsWithEqualName.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(None, LoggingLevel.ERROR)) == 0 + def test_invalid_convolve_correctly_parameterized(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoConvolveNotCorrectlyParametrized.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - # def test_invalid_no_nest_collision(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoNestNamespaceCollision.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_valid_convolve_correctly_parameterized(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoConvolveNotCorrectlyParametrized.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_no_nest_collision(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNestNamespaceCollision.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_invariant_correctly_typed(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInvariantNotBool.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_invalid_parameters_assigned_only_in_parameters_block(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoParameterAssignedOutsideBlock.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_valid_invariant_correctly_typed(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInvariantNotBool.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_parameters_assigned_only_in_parameters_block(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoParameterAssignedOutsideBlock.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_expression_correctly_typed(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIllegalExpression.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - # def test_invalid_inline_expressions_assigned_only_in_declaration(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoAssignmentToInlineExpression.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_valid_expression_correctly_typed(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIllegalExpression.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_invalid_internals_assigned_only_in_internals_block(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInternalAssignedOutsideBlock.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_invalid_compound_expression_correctly_typed(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 10 - # def test_valid_internals_assigned_only_in_internals_block(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInternalAssignedOutsideBlock.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_valid_compound_expression_correctly_typed(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_invalid_function_with_wrong_arg_number_detected(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 + def test_invalid_ode_correctly_typed(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOdeIncorrectlyTyped.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + def test_valid_ode_correctly_typed(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOdeCorrectlyTyped.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_output_block_defined_if_emit_call(self): + """test that an error is raised when the emit_spike() function is called by the neuron, but an output block is not defined""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + def test_invalid_output_port_defined_if_emit_call(self): + """test that an error is raised when the emit_spike() function is called by the neuron, but a spiking output port is not defined""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortDefinedIfEmitCall-2.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + def test_valid_output_port_defined_if_emit_call(self): + """test that no error is raised when the output block is missing, but not emit_spike() functions are called""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_output_port_type_if_emit_call(self): + """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + def test_invalid_output_port_type_if_emit_call(self): + """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-2.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 - # def test_valid_function_with_wrong_arg_number_detected(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoFunctionCallNotConsistentWrongArgNumber.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_valid_output_port_type_if_emit_call(self): + """test that a warning is raised when the emit_spike() function is called with parameter types castable to the types defined in the spiking output port""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-3.nestml')) + assert model is not None + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) > 0 + + def test_invalid_output_port_type_continuous(self): + """test that an error is raised when a continous-time output port is defined as having attributes.""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeContinuous.nestml')) + assert model is None # should result in a parse error + + def test_valid_coco_kernel_type(self): + """ + Test the functionality of CoCoKernelType. + """ + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoKernelType.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_coco_kernel_type(self): + """ + Test the functionality of CoCoKernelType. + """ + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoKernelType.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_invalid_init_values_have_rhs_and_ode(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInitValuesWithoutOde.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 2 + def test_invalid_coco_kernel_type_initial_values(self): + """ + Test the functionality of CoCoKernelType. + """ + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoKernelTypeInitialValues.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 4 + + def test_valid_coco_state_variables_initialized(self): + """ + Test that the CoCo condition is applicable for all the variables in the state block initialized with a value + """ + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoStateVariablesInitialized.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + + def test_invalid_coco_state_variables_initialized(self): + """ + Test that the CoCo condition is applicable for all the variables in the state block not initialized + """ + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoStateVariablesInitialized.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - # def test_valid_init_values_have_rhs_and_ode(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInitValuesWithoutOde.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) == 3 + def test_invalid_co_co_priorities_correctly_specified(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoPrioritiesCorrectlySpecified.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - # def test_invalid_incorrect_return_stmt_detected(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIncorrectReturnStatement.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 + def test_valid_co_co_priorities_correctly_specified(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoPrioritiesCorrectlySpecified.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_incorrect_return_stmt_detected(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIncorrectReturnStatement.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_co_co_resolution_legally_used(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoResolutionLegallyUsed.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - # def test_invalid_ode_vars_outside_init_block_detected(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOdeVarNotInInitialValues.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_valid_co_co_resolution_legally_used(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoResolutionLegallyUsed.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_valid_ode_vars_outside_init_block_detected(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOdeVarNotInInitialValues.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_valid_co_co_vector_input_port(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorInputPortSizeAndType.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # def test_invalid_convolve_correctly_defined(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) - # assert any(["Actual type different from expected. Expected: 'pA', got: 'mV'!" in log_entry[2] for log_entry in Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)]) - - # def test_valid_convolve_correctly_defined(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoConvolveNotCorrectlyProvided.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_vector_in_non_vector_declaration_detected(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorInNonVectorDeclaration.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - # def test_valid_vector_in_non_vector_declaration_detected(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorInNonVectorDeclaration.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_vector_parameter_declaration(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorParameterDeclaration.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - # def test_valid_vector_parameter_declaration(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorParameterDeclaration.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_vector_parameter_type(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorParameterType.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - # def test_valid_vector_parameter_type(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorParameterType.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_vector_parameter_size(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorDeclarationSize.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - # def test_valid_vector_parameter_size(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorDeclarationSize.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_convolve_correctly_parameterized(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoConvolveNotCorrectlyParametrized.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - # def test_valid_convolve_correctly_parameterized(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoConvolveNotCorrectlyParametrized.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_invariant_correctly_typed(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInvariantNotBool.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - # def test_valid_invariant_correctly_typed(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInvariantNotBool.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_expression_correctly_typed(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoIllegalExpression.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - # def test_valid_expression_correctly_typed(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoIllegalExpression.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_compound_expression_correctly_typed(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 10 - - # def test_valid_compound_expression_correctly_typed(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CompoundOperatorWithDifferentButCompatibleUnits.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_ode_correctly_typed(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOdeIncorrectlyTyped.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 - - # def test_valid_ode_correctly_typed(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOdeCorrectlyTyped.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_output_block_defined_if_emit_call(self): - # """test that an error is raised when the emit_spike() function is called by the neuron, but an output block is not defined""" - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 - - # def test_invalid_output_port_defined_if_emit_call(self): - # """test that an error is raised when the emit_spike() function is called by the neuron, but a spiking output port is not defined""" - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortDefinedIfEmitCall-2.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 - - # def test_valid_output_port_defined_if_emit_call(self): - # """test that no error is raised when the output block is missing, but not emit_spike() functions are called""" - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_output_port_type_if_emit_call(self): - # """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 - - # def test_invalid_output_port_type_if_emit_call(self): - # """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-2.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 - - # def test_valid_output_port_type_if_emit_call(self): - # """test that a warning is raised when the emit_spike() function is called with parameter types castable to the types defined in the spiking output port""" - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-3.nestml')) - # assert model is not None - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) > 0 - - # def test_invalid_output_port_type_continuous(self): - # """test that an error is raised when a continous-time output port is defined as having attributes.""" - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeContinuous.nestml')) - # assert model is None # should result in a parse error - - # def test_valid_coco_kernel_type(self): - # """ - # Test the functionality of CoCoKernelType. - # """ - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoKernelType.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_coco_kernel_type(self): - # """ - # Test the functionality of CoCoKernelType. - # """ - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoKernelType.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - # def test_invalid_coco_kernel_type_initial_values(self): - # """ - # Test the functionality of CoCoKernelType. - # """ - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoKernelTypeInitialValues.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 4 - - # def test_valid_coco_state_variables_initialized(self): - # """ - # Test that the CoCo condition is applicable for all the variables in the state block initialized with a value - # """ - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoStateVariablesInitialized.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_coco_state_variables_initialized(self): - # """ - # Test that the CoCo condition is applicable for all the variables in the state block not initialized - # """ - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoStateVariablesInitialized.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - # def test_invalid_co_co_priorities_correctly_specified(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoPrioritiesCorrectlySpecified.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - # def test_valid_co_co_priorities_correctly_specified(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoPrioritiesCorrectlySpecified.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_co_co_resolution_legally_used(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoResolutionLegallyUsed.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - # def test_valid_co_co_resolution_legally_used(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoResolutionLegallyUsed.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_valid_co_co_vector_input_port(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoVectorInputPortSizeAndType.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_co_co_vector_input_port(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorInputPortSizeAndType.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - # def test_invalid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegal.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 8 - - # def test_valid_co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsLegal.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_co_co_spike_input_ports_illegal_missing_attribute(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - # def test_valid_co_co_spike_input_ports_illegal_missing_attribute(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_valid_co_co_on_receive_vectors_should_be_constant_size(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOnReceiveVectorsShouldBeConstantSize.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - - # def test_invalid_co_co_on_receive_vectors_should_be_constant_size(self): - # model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOnReceiveVectorsShouldBeConstantSize.nestml')) - # assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + def test_invalid_co_co_vector_input_port(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoVectorInputPortSizeAndType.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 def _parse_and_validate_model(self, fname: str) -> Optional[str]: from pynestml.frontend.pynestml_frontend import generate_target From e96fe188b1af81ce818075171baf7075838c6ab5 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 18 Feb 2025 15:27:05 +0100 Subject: [PATCH 37/68] add attributes to spiking input ports --- .../nestml_language_concepts.rst | 380 ++++++++++-------- doc/nestml_language/neurons_in_nestml.rst | 27 +- 2 files changed, 231 insertions(+), 176 deletions(-) diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index 55fc172f5..2549f4e6a 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -831,132 +831,186 @@ Block types - ``onCondition`` - Contains statements that are executed when a particular condition holds. The condition is expressed as a (boolean typed) expression. The advantage of having conditions separate from the ``update`` block is that a root-finding algorithm can be used to find the precise time at which a condition holds (with a higher resolution than the simulation timestep). This makes the model more generic with respect to the simulator that is used. -Input ------ +Equations +--------- -A model written in NESTML can be configured to receive two distinct types of input: spikes and continuous-time values. +Systems of ODEs +~~~~~~~~~~~~~~~ +In the ``equations`` block one can define a system of differential equations, with an arbitrary amount of equations, that contain derivatives of arbitrary order. When using a derivative of a variable, say ``V``, one must write: ``V'``. It is then assumed that ``V'`` is the first time derivative of ``V``, that is, :math:`dV/dt`. The second time derivative of ``V`` is ``V''``, and so on. If an equation contains a derivative of order :math:`n`, for example, :math:`V^{(n)}`, all initial values of :math:`V` up to order :math:`n-1` must be defined in the ``state`` block. For example, if stating -Continuous-time input ports -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. code-block:: nestml -Continuous-time input ports receive a time-varying signal :math:`f(t)` (possibly, a vector :math:`\mathbf{f}(t)`) that is defined for all :math:`t` (but that could, in practice, be implemented as a stepwise-continuous function of time). + V' = a * V +in the ``equations`` block, then -Spiking input ports -~~~~~~~~~~~~~~~~~~~ +.. code-block:: nestml -The incoming spikes at the spiking input port are modelled as Dirac delta functions. The Dirac delta function :math:`\delta(x)` is an impulsive function defined as zero at every value of :math:`x`, except for :math:`x=0`, and whose integral is equal to 1: + V real = 0 -.. math:: +has to be defined in the ``state`` block. Otherwise, an error message is generated. - \int \delta(t) dt = 1 +The content of spike and continuous time input ports can be used by just using their names. NESTML takes care behind the scenes that the buffer location at the current simulation time step is used. -The unit of the Dirac delta function follows from its definition: -.. math:: +Delay Differential Equations +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - f(0) = \int \delta(t) f(t) dt +The differential equations in the ``equations`` block can also be a delay differential equation, where the derivative at the current time depends on the derivative of a function at previous times. A state variable, say ``foo`` that is dependent on another state variable ``bar`` at a constant time offset (here, ``delay``) in the past, can be written as -Here :math:`f(t)` is a continuous function of :math:`t`. As the unit of the :math:`f()` is the same on both left-and right-hand side, the unit of :math:`dt \delta(t)` must be equal to 1. Therefore, the unit of :math:`\delta(t)` must be equal to the inverse of the unit of :math:`t`, that is :math:`s^{-1}`. Therefore, all the incoming spikes defined in the input block will have an implicit unit of :math:`\text{1/s}`. +.. code-block:: nestml -Given an input port ``spikes_in``, we can define the incoming spikes as a train of delta pulses: + state: + bar real = -70. + foo real = 0 -.. math:: + equations: + bar' = -bar / tau + foo' = bar(t - delay) / tau - \mathrm{spikes_in}(t) = \sum_k \delta(t - t_k) +Note that the ``delay`` can be a numeric constant or a constant defined in the ``parameters`` block. In the above example, the ``delay`` variable is defined in the ``parameters`` block as: -The units are the same as for a single delta function. +.. code-block:: nestml -Spiking input can be handled by convolutions with kernels (see :ref:`Integrating spiking input`) or by means of ``onReceive`` event handler blocks. + parameters: + tau ms = 3.5 ms + delay ms = 5.0 ms +For a full example, please refer to the tests at `tests/nest_tests/nest_delay_based_variables_test.py `_. -Handling spiking input by convolutions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. note:: -The spiking input port name ``spikes_in`` can subsequently be used in the right-hand side of ODEs: + - The value of the delayed variable (``bar`` in the above example) returned by the node's ``get()`` function in + PyNEST is always the non-delayed version, i.e., the value of the derivative of ``bar`` at time ``t``. Similarly, the + ``set()`` function sets the value of the actual state variable ``bar`` without the ``delay`` into consideration. + - The ``delay`` variable can be set from PyNEST using the ``set()`` function before running the simulation. Setting the value after the simulation can give rise to unpredictable results and is not currently supported. -.. math:: +.. note:: + + - Delay differential equations where the derivative of a variable is dependent on the derivative of the same + variable at previous times, for example, `The Mackey-Glass equation `_, are not supported currently. + - Delay differential equations with multiple delay values for the same variable are also not supported. - \frac{dx}{dt} = -x / \tau + \mathrm{spikes\_in}(t) +Inline expressions +~~~~~~~~~~~~~~~~~~ -If ``x`` is a real number, then the units here are consistent. This can be written in NESTML as: +In the ``equations`` block, inline expressions may be used to reduce redundancy, or improve legibility in the model code. An inline expression is a named expression, that will be "inlined" (effectively, copied-and-pasted in) when its variable symbol is mentioned in subsequent ODE or kernel expressions. In the following example, the inline expression ``h_inf_T`` is defined, and then used in an ODE definition: .. code-block:: nestml - x' = -x / tau + spikes_in + inline h_inf_T real = 1 / (1 + exp((V_m / mV + 83) / 4)) + IT_h' = (h_inf_T * nS - IT_h) / tau_h_T / ms -``spikes_in`` can also be used inside a convolution; for instance, if ``K`` is a kernel, then: +Because of nested substitutions, inline statements may cause the expressions to grow to large size. In case this becomes a problem, it is recommended to use functions instead. -.. math:: +The ``recordable`` keyword can be used to make the variable in inline expressions available to recording devices: - \frac{dx}{dt} = -x / \tau + (K \ast \mathrm{spikes\_in}) / s +.. code-block:: nestml -This can be written in NESTML as: + equations: + ... + recordable inline V_m mV = V_rel + E_L -.. code-block:: nestml +During simulation, one or more state variables are used to maintain the dynamical state of each convolution across time. To be able to reference these variables from within the model, a special case occurs when an inline expression is defined as a convolution and marked ``recordable``: - x' = -x / tau + convolve(K, spikes_in) / s +.. code-block:: nestml -Note that applying the convolution means integrating over time, hence dropping the [1/s] unit, leaving a unitless quantity. To make the units consistent in this case, an explicit division by seconds is required. + recordable inline I_syn pA = convolve(alpha_kernel, spiking_input_port) * pA -Physical units such as millivolts (:math:`\text{mV}`) and picoamperes (:math:`\text{pA}`) can be directly combined with the Dirac delta function to model an impulse with a physical quantity such as voltage or current. In such cases, the Dirac delta function is multiplied by the appropriate unit of the physical quantity to obtain a quantity with units of volts or amperes, for instance, if ``x`` is in ``pA``, then we can write: +Then, the state variables corresponding to this convolution can be referenced in the rest of the model, for instance: .. code-block:: nestml - x = -x / tau + pA * spikes_in + update: + # reset the state of synaptic integration + I_syn = 0 pA + I_syn' = 0 * s**-1 -XXX: mention no_spike_input_port_in_equation_rhs_outside_convolve +Kernel functions +~~~~~~~~~~~~~~~~ +A `kernel` is a function of time, or a differential equation, that represents a kernel which can be used in convolutions. For example, an exponentially decaying kernel could be described as a direct function of time, as follows: +.. code-block:: nestml -Handling spiking input by event handlers -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + kernel g = exp(-t / tau) -An ``onReceive`` block can be defined for every spiking input port, for example, if a port named ``pre_spikes`` is defined, the corresponding event handler has the general structure: +with time constant, for example, equal to 20 ms: .. code-block:: nestml - onReceive(pre_spikes): - println("Info: processing a presynaptic spike at time t = {t}") - # ... further statements go here ... + parameters: + tau ms = 20 ms -The statements in the event handler will be executed when the event occurs. +All kernels are assumed to start at time :math:`t \geq 0` (that is, the value of a kernel is 0 for :math:`t < 0`; it is not necessary to explicitly enforce this). -An event handler integrates the system from "just before" the event to "just after" the event. If the spike event occurs at time :math:`t_k`, then the event handler integrates the system in time from "just before the event" :math:`t_k^-` to "just after" the event :math:`t_k^+`. +Equivalently, the same exponentially decaying kernel can be formulated as a differential equation: -To specify in which sequence the event handlers should be called in case multiple events are received at the exact same time, the ``priority`` parameter can be used, which can be given an integer value, where a larger value means higher priority. For example: +.. code-block:: nestml + + kernel g' = -g / tau + +In this case, initial values have to be specified in the ``state`` block up to the order of the differential equation, e.g.: .. code-block:: nestml - onReceive(pre_spikes, priority=1): - println("Info: processing a presynaptic spike at time t = {t}") + state: + g real = 1 - onReceive(post_spikes, priority=2): - println("Info: processing a postsynaptic spike at time t = {t}") +Here, the ``1`` defines the peak value of the kernel at :math:`t = 0`. -In this case, if a pre- and postsynaptic spike are received at the exact same time, the higher-priority ``post_spikes`` handler will be invoked first. +An example second-order kernel is the dual exponential ("alpha") kernel, which can be defined in three equivalent ways. -Vector input ports of constant size can be used: +(1) As a direct function of time: -.. code-block:: nestml + .. code-block:: nestml - input: - foo[2] <- spike + kernel g = (e/tau) * t * exp(-t/tau) - onReceive(foo[0]): - # ... handle foo[0] spikes... +(2) As a system of coupled first-order differential equations: - onReceive(foo[1]): - # ... handle foo[1] spikes... + .. code-block:: nestml + + kernel g' = g$ - g / tau, + g$' = -g$ / tau + + with initial values: + + .. code-block:: nestml + + state: + g real = 0 + g$ real = 1 + + Note that the types of both differential equations are :math:`\text{ms}^{-1}`. + +(3) As a second-order differential equation: + + .. code-block:: nestml + + kernel g'' = (-2/tau) * g' - 1/tau**2) * g + + with initial values: + + .. code-block:: nestml + + state: + g real = 0 + g' ms**-1 = e / tau + +A Dirac delta impulse kernel can be defined by using the predefined function ``delta``: +.. code-block:: nestml + + kernel g = delta(t) Output ------ -Each model can only send a single type of event. The type of the event has to be given in the `output` block. Currently, however, only spike output is supported. +Each model can only produce a single output. The type of the event has to be given in the `output` block. Currently, only spike output is supported. .. code-block:: nestml @@ -965,14 +1019,7 @@ Each model can only send a single type of event. The type of the event has to be Calling the ``emit_spike()`` function in the ``update`` block results in firing a spike to all target neurons and devices time stamped with the simulation time at the end of the time interval ``t + timestep()``. -XXX: mention attributes here?! - - - -Event attributes -~~~~~~~~~~~~~~~~ - -Each spiking output event can be parameterised by one or more attributes. For example, a synapse could assign a weight (as a real number) and delay (in milliseconds) to its spike events by including these values in the call to ``emit_spike()``: +Each spiking output event can optionally be parameterised by one or more attributes. For example, a synapse could assign a weight (as a real number) and delay (in milliseconds) to its spike events by including these values in the call to ``emit_spike()``: .. code-block:: nestml @@ -994,180 +1041,195 @@ The names are only used externally, so that other models can refer to the correc Specific code generators may support a specific set of attributes; please check the documentation of each individual code generator for more details. -Equations ---------- +Input +----- -Systems of ODEs -~~~~~~~~~~~~~~~ +A model written in NESTML can be configured to receive two distinct types of input: spikes and continuous-time values. -In the ``equations`` block one can define a system of differential equations, with an arbitrary amount of equations, that contain derivatives of arbitrary order. When using a derivative of a variable, say ``V``, one must write: ``V'``. It is then assumed that ``V'`` is the first time derivative of ``V``, that is, :math:`dV/dt`. The second time derivative of ``V`` is ``V''``, and so on. If an equation contains a derivative of order :math:`n`, for example, :math:`V^{(n)}`, all initial values of :math:`V` up to order :math:`n-1` must be defined in the ``state`` block. For example, if stating -.. code-block:: nestml +Continuous-time input ports +~~~~~~~~~~~~~~~~~~~~~~~~~~~ - V' = a * V +Continuous-time input ports receive a time-varying signal :math:`f(t)` (possibly, a vector :math:`\mathbf{f}(t)`) that is defined for all :math:`t` (but that could, in practice, be implemented as a stepwise-continuous function of time). -in the ``equations`` block, then +For example, the following will add an external signal :math:`f(t)` with units of 1/s to a dynamical variable named :math:`x`. .. code-block:: nestml - V real = 0 + state: + x real = 0 -has to be defined in the ``state`` block. Otherwise, an error message is generated. + parameters: + tau ms = 20 ms -The content of spike and continuous time input ports can be used by just using their names. NESTML takes care behind the scenes that the buffer location at the current simulation time step is used. + equations: + x' = -x / tau + f + input: + f 1/s <- continuous -Delay Differential Equations -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The differential equations in the ``equations`` block can also be a delay differential equation, where the derivative at the current time depends on the derivative of a function at previous times. A state variable, say ``foo`` that is dependent on another state variable ``bar`` at a constant time offset (here, ``delay``) in the past, can be written as +Spiking input ports +~~~~~~~~~~~~~~~~~~~ -.. code-block:: nestml +The incoming spikes at the spiking input port are modelled as Dirac delta functions. The Dirac delta function :math:`\delta(x)` is an impulsive function defined as zero at every value of :math:`x`, except for :math:`x=0`, and whose integral is equal to 1: - state: - bar real = -70. - foo real = 0 +.. math:: - equations: - bar' = -bar / tau - foo' = bar(t - delay) / tau + \int \delta(t) dt = 1 -Note that the ``delay`` can be a numeric constant or a constant defined in the ``parameters`` block. In the above example, the ``delay`` variable is defined in the ``parameters`` block as: +The unit of the Dirac delta function follows from its definition: -.. code-block:: nestml +.. math:: - parameters: - tau ms = 3.5 ms - delay ms = 5.0 ms + f(0) = \int \delta(t) f(t) dt -For a full example, please refer to the tests at `tests/nest_tests/nest_delay_based_variables_test.py `_. +Here :math:`f(t)` is a continuous function of :math:`t`. As the unit of the :math:`f()` is the same on both left-and right-hand side, the unit of :math:`dt \delta(t)` must be equal to 1. Therefore, the unit of :math:`\delta(t)` must be equal to the inverse of the unit of :math:`t`, that is :math:`s^{-1}`. Therefore, all the incoming spikes defined in the input block will have an implicit unit of :math:`\text{1/s}`. -.. note:: +Given an input port named ``spikes_in``, the semantics of using this name in expressions and ODEs is that it should be understood as a train of delta pulses: - - The value of the delayed variable (``bar`` in the above example) returned by the node's ``get()`` function in - PyNEST is always the non-delayed version, i.e., the value of the derivative of ``bar`` at time ``t``. Similarly, the - ``set()`` function sets the value of the actual state variable ``bar`` without the ``delay`` into consideration. - - The ``delay`` variable can be set from PyNEST using the ``set()`` function before running the simulation. Setting the value after the simulation can give rise to unpredictable results and is not currently supported. +.. math:: -.. note:: + \mathrm{spikes\_in}(t) = \sum_k \delta(t - t_k) - - Delay differential equations where the derivative of a variable is dependent on the derivative of the same - variable at previous times, for example, `The Mackey-Glass equation `_, are not supported currently. - - Delay differential equations with multiple delay values for the same variable are also not supported. +The units are the same as for a single delta function. -Inline expressions -~~~~~~~~~~~~~~~~~~ +Each spike event can optionally contain one or more attributes, such as weight or delay. These are given numerical values by the sending side when calling ``emit_spike()``, and are read out by the receiving side, by appending a dot (fullstop) to the name of the spiking input port and then writing the name of the attribute. -In the ``equations`` block, inline expressions may be used to reduce redundancy, or improve legibility in the model code. An inline expression is a named expression, that will be "inlined" (effectively, copied-and-pasted in) when its variable symbol is mentioned in subsequent ODE or kernel expressions. In the following example, the inline expression ``h_inf_T`` is defined, and then used in an ODE definition: +For example, say there is a train of weighted spike events, with each event :math:$k$ having weight :math:`w_k`: + +.. math:: + + \mathrm{spikes\_in}(t) = \sum_k w_k \delta(t - t_k) + +A spiking input port that is suitable for handling these events could be defined as such: .. code-block:: nestml - inline h_inf_T real = 1 / (1 + exp((V_m / mV + 83) / 4)) - IT_h' = (h_inf_T * nS - IT_h) / tau_h_T / ms + input: + spikes_in <- spike(w real) -Because of nested substitutions, inline statements may cause the expressions to grow to large size. In case this becomes a problem, it is recommended to use functions instead. +Note that the units of ``spikes_in`` are again in 1/s. -The ``recordable`` keyword can be used to make the variable in inline expressions available to recording devices: +If a physical unit is specified (such as pA or mV), the numeric value of the attribute is interpreted as having the units given in the definition of the input port. For example, if :math:`w_k` is assumed to be in units of mV, then in combination with the 1/s unit of the delta train, the units of ``spikes_in.w`` are in mV/s, and the input port can be defined as follows: .. code-block:: nestml - equations: - ... - recordable inline V_m mV = V_rel + E_L + input: + spikes_in <- spike(w mV) -During simulation, one or more state variables are used to maintain the dynamical state of each convolution across time. To be able to reference these variables from within the model, a special case occurs when an inline expression is defined as a convolution and marked ``recordable``: +In general, spiking input can be processed by referencing the input port in the right-hand side of an equation (see :ref:`Handling spiking input in equations`) or by means of ``onReceive`` event handlers (see :ref:`Handling spiking input by event handlers`). -.. code-block:: nestml - recordable inline I_syn pA = convolve(alpha_kernel, spiking_input_port) * pA +Handling spiking input in equations +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Then, the state variables corresponding to this convolution can be referenced in the rest of the model, for instance: +The spiking input port name ``spikes_in`` can be used directly in the right-hand side of ODEs: + +.. math:: + + \frac{dx}{dt} = -\frac{x}{\tau} + \mathrm{spikes\_in}(t) + +If ``x`` is a real number, then the units here are consistent (in 1/s). This can be written in NESTML as: .. code-block:: nestml - update: - # reset the state of synaptic integration - I_syn = 0 pA - I_syn' = 0 * s**-1 + x' = -x / tau + spikes_in +``spikes_in`` can also be used inside a convolution; for instance, if ``K`` is a kernel, then: -Kernel functions -~~~~~~~~~~~~~~~~ +.. math:: -A `kernel` is a function of time, or a differential equation, that represents a kernel which can be used in convolutions. For example, an exponentially decaying kernel could be described as a direct function of time, as follows: + \frac{dx}{dt} = -\frac{x}{\tau} + (K \ast \mathrm{spikes\_in}) / s + +Note that applying the convolution means integrating over time, hence dropping the [1/s] unit, leaving a unitless quantity. To make the units consistent in this case, an explicit division by seconds is required. + +This can be written in NESTML as: .. code-block:: nestml - kernel g = exp(-t / tau) + x' = -x / tau + convolve(K, spikes_in) / s -with time constant, for example, equal to 20 ms: +Physical units such as millivolts (:math:`\text{mV}`) and picoamperes (:math:`\text{pA}`) can be directly combined with the Dirac delta function to model an impulse with a physical quantity such as voltage or current. In such cases, the Dirac delta function is multiplied by the appropriate unit of the physical quantity to obtain a quantity with units of volts or amperes, for instance, if ``x`` is in ``pA``, then we can write: .. code-block:: nestml - parameters: - tau ms = 20 ms + x = -x / tau + spikes_in * pA -All kernels are assumed to start at time :math:`t \geq 0` (that is, the value of a kernel is 0 for :math:`t < 0`; it is not necessary to explicitly enforce this). +However, note that this not account for different spikes carrying different weight (which typically results in different postsynaptic currents or potentials). In this example, each spike will result in a change in :math:`x` of 1 pA. -Equivalently, the same exponentially decaying kernel can be formulated as a differential equation: +To read out the attributes from events, for example the weight of the spike, the dot notation can be used, for example: .. code-block:: nestml - kernel g' = -g / tau + equations: + x' = -x / tau + spikes_in.w -In this case, initial values have to be specified in the ``state`` block up to the order of the differential equation, e.g.: +If ``spikes_in.w`` is defined as a real number, the units here are consistent (in 1/s). In case the weight is defined as having a unit in mV, it could be used for instance as follows: .. code-block:: nestml state: - g real = 1 + y mV = 0 mV -Here, the ``1`` defines the peak value of the kernel at :math:`t = 0`. + input: + spikes_in <- spike(w mV) -An example second-order kernel is the dual exponential ("alpha") kernel, which can be defined in three equivalent ways. + equations: + y' = -y / tau + spikes_in.w -(1) As a direct function of time: +Note that again, the units are consistent if :math:`w_k` is assumed to be in units of mV; in combination with the 1/s unit of the delta train, the units of ``spikes_in.w`` are in mV/s. - .. code-block:: nestml - kernel g = (e/tau) * t * exp(-t/tau) +Handling spiking input by event handlers +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -(2) As a system of coupled first-order differential equations: +An ``onReceive`` block can be defined for every spiking input port, for example, if a port named ``pre_spikes`` is defined, the corresponding event handler has the general structure: - .. code-block:: nestml +.. code-block:: nestml - kernel g' = g$ - g / tau, - g$' = -g$ / tau + onReceive(pre_spikes): + println("Info: processing a presynaptic spike at time t = {t}") + # ... further statements go here ... - with initial values: +The statements in the event handler will be executed when the event occurs and integrate the state of the system from "just before" the event (at :math:`t=t^-`) to "just after" the event (at :math:`t=t^+`): - .. code-block:: nestml +.. math:: - state: - g real = 0 - g$ real = 1 + \int_{t^-}^{t^+} \dot\mathbf{x}(t) dt - Note that the types of both differential equations are :math:`\text{ms}^{-1}`. +Because the statements in the ``onReceive`` block are executed "instantaneously" at the time of the spike, the units of 1/s due to the definition of the delta function drop out. For instance, when a port is defined with an attribute "w" in units of mV, then the following has consistent units: -(3) As a second-order differential equation: +.. code-block:: nestml - .. code-block:: nestml + onReceive(in_spikes): + V_m mV = 42 mV + V_m += in_spikes.w # consistent units - kernel g'' = (-2/tau) * g' - 1/tau**2) * g +To specify in which sequence the event handlers should be called in case multiple events are received at the exact same time, the ``priority`` parameter can be used, which can be given an integer value, where a larger value means higher priority. For example: - with initial values: +.. code-block:: nestml - .. code-block:: nestml + onReceive(pre_spikes, priority=1): + println("Info: processing a presynaptic spike at time t = {t}") - state: - g real = 0 - g' ms**-1 = e / tau + onReceive(post_spikes, priority=2): + println("Info: processing a postsynaptic spike at time t = {t}") -A Dirac delta impulse kernel can be defined by using the predefined function ``delta``: +In this case, if a pre- and postsynaptic spike are received at the exact same time, the higher-priority ``post_spikes`` handler will be invoked first. + +Vector input ports of constant size can be used: .. code-block:: nestml - kernel g = delta(t) + input: + foo[2] <- spike + + onReceive(foo[0]): + # ... handle foo[0] spikes... + + onReceive(foo[1]): + # ... handle foo[1] spikes... Handling of time diff --git a/doc/nestml_language/neurons_in_nestml.rst b/doc/nestml_language/neurons_in_nestml.rst index 8f72031fd..626ad8a04 100644 --- a/doc/nestml_language/neurons_in_nestml.rst +++ b/doc/nestml_language/neurons_in_nestml.rst @@ -51,14 +51,6 @@ The current port symbol (here, `I_stim`) is available as a variable and can be u Integrating spiking input ^^^^^^^^^^^^^^^^^^^^^^^^^ -Spikes arriving at the input port of a neuron can be written as a spike train :math:`s(t)`: - -.. math:: - - \large s(t) = \sum_{i=1}^N w_i \cdot \delta(t - t_i) - -where :math:`w_i` is the weight of spike :math:`i`. - To model the effect that an arriving spike has on the state of the neuron, a convolution with a kernel can be used. The kernel defines the postsynaptic response kernel, for example, an alpha (bi-exponential) function, decaying exponential, or a delta function. (See :ref:`Kernel functions` for how to define a kernel.) The convolution of the kernel with the spike train is defined as follows: .. math:: @@ -69,16 +61,20 @@ To model the effect that an arriving spike has on the state of the neuron, a con &= \sum_{i=1}^N w_i \cdot f(t - t_i) \end{align*} -For example, say there is a spiking input port defined named ``spikes``. A decaying exponential with time constant ``tau_syn`` is defined as postsynaptic kernel ``G``. Their convolution is expressed using the ``convolve()`` function, which takes a kernel and input port, respectively, as its arguments: +For example, say there is a spiking input port defined named ``spikes``, which receives weighted spike events: + +.. code-block:: nestml + + input: + spikes <- spike(weight pA) + +A decaying exponential with time constant ``tau_syn`` is defined as postsynaptic kernel ``G``. Their convolution is expressed using the ``convolve()`` function, which takes a kernel and input port, respectively, as its arguments: .. code-block:: nestml equations: kernel G = exp(-t / tau_syn) - inline I_syn pA = convolve(G, spikes) * pA - V_m' = -V_m / tau_m + I_syn / C_m - -Note that in this example, the intended physical unit (pA) was assigned by multiplying the scalar convolution result with the unit literal. By the definition of convolution, ``convolve(G, spikes)`` will have the unit of kernel ``G`` multiplied by the unit of ``spikes`` and unit of time, i.e., ``[G] * [spikes] * s``. Kernel functions in NESTML are always untyped and the unit of spikes is :math:`1/s` as discussed above. As a result, the unit of convolution is :math:`(1/s) * s`, a scalar quantity without a unit. + inline I_syn pA = convolve(G, spikes.weight) The incoming spikes could have been equivalently handled with an ``onReceive`` event handler block: @@ -89,12 +85,9 @@ The incoming spikes could have been equivalently handled with an ``onReceive`` e equations: I_syn' = -I_syn / tau_syn - V_m' = -V_m / tau_m + I_syn / C_m onReceive(spikes): - I_syn += spikes * pA * s - -Note that in this example, the intended physical unit (pA) was assigned by multiplying the type of the input port ``spikes`` (which is 1/s) by pA·s, resulting in a unit of pA for ``I_syn``. + I_syn += spikes.weight (Re)setting synaptic integration state From ea6061277f7d5838db5ca1ea37e24853d8fc3424 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sun, 9 Mar 2025 19:46:28 +0100 Subject: [PATCH 38/68] add attributes to spiking input ports --- .../nestml_language_concepts.rst | 16 ++-- models/neurons/iaf_psc_delta_neuron.nestml | 5 +- ...t_port_in_equation_rhs_outside_convolve.py | 86 ------------------- pynestml/cocos/co_cos_manager.py | 9 -- .../point_neuron/common/NeuronClass.jinja2 | 23 +---- 5 files changed, 13 insertions(+), 126 deletions(-) delete mode 100644 pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index 2549f4e6a..71bf3ef4e 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -1096,7 +1096,7 @@ The units are the same as for a single delta function. Each spike event can optionally contain one or more attributes, such as weight or delay. These are given numerical values by the sending side when calling ``emit_spike()``, and are read out by the receiving side, by appending a dot (fullstop) to the name of the spiking input port and then writing the name of the attribute. -For example, say there is a train of weighted spike events, with each event :math:$k$ having weight :math:`w_k`: +For example, say there is a train of weighted spike events, with each event :math:`k` having weight :math:`w_k`: .. math:: @@ -1109,9 +1109,7 @@ A spiking input port that is suitable for handling these events could be defined input: spikes_in <- spike(w real) -Note that the units of ``spikes_in`` are again in 1/s. - -If a physical unit is specified (such as pA or mV), the numeric value of the attribute is interpreted as having the units given in the definition of the input port. For example, if :math:`w_k` is assumed to be in units of mV, then in combination with the 1/s unit of the delta train, the units of ``spikes_in.w`` are in mV/s, and the input port can be defined as follows: +Note that the units of ``spikes_in.w`` are again in 1/s, as ``w`` has been defined as a dimensionless real number. If a physical unit is specified (such as pA or mV), the numeric value of the attribute is interpreted as having the units given in the definition of the input port. For example, if :math:`w_k` is assumed to be in units of mV, then in combination with the 1/s unit of the delta train, the units of ``spikes_in.w`` would be in mV/s, and the input port can be defined as follows: .. code-block:: nestml @@ -1196,15 +1194,15 @@ The statements in the event handler will be executed when the event occurs and i .. math:: - \int_{t^-}^{t^+} \dot\mathbf{x}(t) dt + \int_{t^-}^{t^+} \dot{\mathbf{x}}(t) dt -Because the statements in the ``onReceive`` block are executed "instantaneously" at the time of the spike, the units of 1/s due to the definition of the delta function drop out. For instance, when a port is defined with an attribute "w" in units of mV, then the following has consistent units: +Because the statements in the ``onReceive`` block are executed "instantaneously" at the time of the spike, the units of 1/s due to the definition of the delta function drop out. For instance, when a port is defined with an attribute "psp" in units of mV, then the following has consistent units: .. code-block:: nestml onReceive(in_spikes): - V_m mV = 42 mV - V_m += in_spikes.w # consistent units + V_m mV = 0 mV + V_m += in_spikes.psp # consistent units: lhs and rhs both in [mV] To specify in which sequence the event handlers should be called in case multiple events are received at the exact same time, the ``priority`` parameter can be used, which can be given an integer value, where a larger value means higher priority. For example: @@ -1218,7 +1216,7 @@ To specify in which sequence the event handlers should be called in case multipl In this case, if a pre- and postsynaptic spike are received at the exact same time, the higher-priority ``post_spikes`` handler will be invoked first. -Vector input ports of constant size can be used: +Vector input ports of constant size and with a constant numerical value for the index can be used: .. code-block:: nestml diff --git a/models/neurons/iaf_psc_delta_neuron.nestml b/models/neurons/iaf_psc_delta_neuron.nestml index e48cbba85..1012ce1ca 100644 --- a/models/neurons/iaf_psc_delta_neuron.nestml +++ b/models/neurons/iaf_psc_delta_neuron.nestml @@ -46,8 +46,7 @@ model iaf_psc_delta_neuron: refr_t ms = 0 ms # Refractory period timer equations: - kernel K_delta = delta(t) - V_m' = -(V_m - E_L) / tau_m + convolve(K_delta, spike_in_port.weight) / s + (I_e + I_stim) / C_m # XXX: TODO: instead of the convolution, this should just read ``... + spike_in_port.weight + ...``. This is a known issue (see https://github.com/nest/nestml/pull/1050). + V_m' = -(V_m - E_L) / tau_m + (I_e + I_stim) / C_m + spike_in_port.psp refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) parameters: @@ -64,7 +63,7 @@ model iaf_psc_delta_neuron: I_e pA = 0 pA input: - spike_in_port <- spike(weight mV) + spike_in_port <- spike(psp mV) I_stim pA <- continuous output: diff --git a/pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py b/pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py deleted file mode 100644 index dad03e423..000000000 --- a/pynestml/cocos/co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -# -# co_co_no_spike_input_port_in_equation_rhs_outside_convolve.py -# -# This file is part of NEST. -# -# Copyright (C) 2004 The NEST Initiative -# -# NEST is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# NEST is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with NEST. If not, see . - -from pynestml.cocos.co_co import CoCo -from pynestml.meta_model.ast_equations_block import ASTEquationsBlock -from pynestml.meta_model.ast_function_call import ASTFunctionCall -from pynestml.meta_model.ast_model import ASTModel -from pynestml.meta_model.ast_variable import ASTVariable -from pynestml.symbols.predefined_functions import PredefinedFunctions -from pynestml.utils.ast_utils import ASTUtils -from pynestml.utils.logger import Logger, LoggingLevel -from pynestml.utils.messages import Messages -from pynestml.visitors.ast_visitor import ASTVisitor - - -class CoCoNoSpikeInputPortInEquationRhsOutsideConvolve(CoCo): - """ - This coco checks that no spiking input port appears on the right-hand side of equations, outside a convolve() call. - - For instance, provided: - - .. code:: nestml - - input: - spikes_in_port <- spikes - - The following is allowed: - - .. code:: nestml - - equations: - kernel K = delta(t) - x' = convolve(K, spikes_in_port) / s - - But the following is not: - - .. code:: nestml - - equations: - x' = spikes_in_port - - """ - - @classmethod - def check_co_co(cls, model): - """ - Ensures the coco for the handed over model. - :param model: a single model instance. - """ - model.accept(NoSpikeInputPortInEquationRhsOutsideConvolveVisitor()) - - -class NoSpikeInputPortInEquationRhsOutsideConvolveVisitor(ASTVisitor): - def visit_variable(self, node: ASTVariable): - model = ASTUtils.find_parent_node_by_type(node, ASTModel) - assert model is not None - inport = ASTUtils.get_input_port_by_name(model.get_input_blocks(), node.get_name()) - if inport and inport.is_spike(): - if ASTUtils.find_parent_node_by_type(node, ASTEquationsBlock): - func_call = ASTUtils.find_parent_node_by_type(node, ASTFunctionCall) - if func_call and func_call.callee_name == PredefinedFunctions.CONVOLVE: - # it appears inside a convolve() call -- everything is fine! - return - - # it's an input port inside the equations block, but not inside a convolve() call -- error - code, message = Messages.get_spike_input_port_in_equation_rhs_outside_convolve() - Logger.log_message(code=code, message=message, error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, node=node) - return diff --git a/pynestml/cocos/co_cos_manager.py b/pynestml/cocos/co_cos_manager.py index f8bb564eb..908030af6 100644 --- a/pynestml/cocos/co_cos_manager.py +++ b/pynestml/cocos/co_cos_manager.py @@ -52,7 +52,6 @@ from pynestml.cocos.co_co_no_kernels_except_in_convolve import CoCoNoKernelsExceptInConvolve from pynestml.cocos.co_co_no_nest_name_space_collision import CoCoNoNestNameSpaceCollision from pynestml.cocos.co_co_no_duplicate_compilation_unit_names import CoCoNoDuplicateCompilationUnitNames -from pynestml.cocos.co_co_no_spike_input_port_in_equation_rhs_outside_convolve import CoCoNoSpikeInputPortInEquationRhsOutsideConvolve from pynestml.cocos.co_co_odes_have_consistent_units import CoCoOdesHaveConsistentUnits from pynestml.cocos.co_co_ode_functions_have_consistent_units import CoCoOdeFunctionsHaveConsistentUnits from pynestml.cocos.co_co_on_receive_vectors_should_be_constant_size import CoCoOnReceiveVectorsShouldBeConstantSize @@ -103,13 +102,6 @@ def check_each_block_defined_at_most_once(cls, node: ASTModel): """ CoCoEachBlockDefinedAtMostOnce.check_co_co(node) - @classmethod - def check_no_spike_input_port_in_equation_rhs_outside_convolve(cls, node: ASTModel): - """ - :param node: a single model instance - """ - CoCoNoSpikeInputPortInEquationRhsOutsideConvolve.check_co_co(node) - @classmethod def check_input_ports_appear_only_in_equation_rhs_and_event_handlers(cls, node: ASTModel): """ @@ -508,6 +500,5 @@ def check_cocos(cls, model: ASTModel, after_ast_rewrite: bool = False): cls.check_timestep_func_legally_used(model) cls.check_co_co_no_attributes_on_continuous_port(model) cls.check_input_ports_appear_only_in_equation_rhs_and_event_handlers(model) - cls.check_no_spike_input_port_in_equation_rhs_outside_convolve(model) Logger.set_current_node(None) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 4b7531e38..c9609fa7d 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -906,7 +906,7 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const * Begin NESTML generated code for the update block(s) **/ {% if neuron.get_update_blocks() %} -{%- filter indent(2) %} +{%- filter indent(4) %} {%- for block in neuron.get_update_blocks() %} {%- set ast = block.get_stmts_body() %} {%- if ast.print_comment('*')|length > 1 %} @@ -1031,23 +1031,21 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- for inputPortSymbol in neuron.get_spike_input_ports() %} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} - {%- if inputPortSymbol.name not in linear_time_invariant_spiking_input_ports %} - {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - std::list< double >& __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag); + std::list< double >& __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag); __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list.clear(); {%- endfor %} {%- endif %} {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} - std::list< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); + std::list< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.clear(); {%- endfor %} @@ -1061,19 +1059,6 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- endif %} - - - - - - - - - - - - - {%- if use_gap_junctions %} if ( called_from_wfr_update ) { @@ -1468,7 +1453,7 @@ void for (size_t spike_idx = 0; spike_idx < n_spikes; ++spike_idx) { /** - * Grab the actual spike event data from the buffers (for the current timepoint ``origin lag``) + * Grab the actual spike event data from the buffers (for the current timepoint ``origin + / start rendered code for integrate_olag``) **/ {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} From 249cf417f46a95897cab161f2eaf032609eddce5 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 13 Mar 2025 21:06:03 +0100 Subject: [PATCH 39/68] refactor NESTML printer --- .../printers/cpp_variable_printer.py | 4 +- .../printers/nestml_expression_printer.py | 143 ++++++++++ .../printers/nestml_function_call_printer.py | 40 +++ .../codegeneration/printers/nestml_printer.py | 270 +++++------------- .../nestml_simple_expression_printer.py | 75 +++++ .../printers/nestml_variable_printer.py | 21 +- .../ode_toolbox_expression_printer.py | 2 +- .../printers/ode_toolbox_variable_printer.py | 4 +- pynestml/meta_model/ast_equations_block.py | 6 + tests/test_unit_system.py | 8 +- 10 files changed, 352 insertions(+), 221 deletions(-) create mode 100644 pynestml/codegeneration/printers/nestml_expression_printer.py create mode 100644 pynestml/codegeneration/printers/nestml_function_call_printer.py create mode 100644 pynestml/codegeneration/printers/nestml_simple_expression_printer.py diff --git a/pynestml/codegeneration/printers/cpp_variable_printer.py b/pynestml/codegeneration/printers/cpp_variable_printer.py index 1e1039165..3756aff55 100644 --- a/pynestml/codegeneration/printers/cpp_variable_printer.py +++ b/pynestml/codegeneration/printers/cpp_variable_printer.py @@ -34,9 +34,9 @@ def _print_cpp_name(cls, variable_name: str) -> str: :param variable_name: a single name. :return: a string representation """ - differential_order = variable_name.count("\"") + differential_order = variable_name.count("'") if differential_order > 0: - return variable_name.replace("\"", "").replace("$", "__DOLLAR") + "__" + "d" * differential_order + return variable_name.replace("'", "").replace("$", "__DOLLAR") + "__" + "d" * differential_order return variable_name.replace("$", "__DOLLAR") diff --git a/pynestml/codegeneration/printers/nestml_expression_printer.py b/pynestml/codegeneration/printers/nestml_expression_printer.py new file mode 100644 index 000000000..ac8b62209 --- /dev/null +++ b/pynestml/codegeneration/printers/nestml_expression_printer.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +# +# nestml_expression_printer.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.codegeneration.printers.expression_printer import ExpressionPrinter +from pynestml.meta_model.ast_arithmetic_operator import ASTArithmeticOperator +from pynestml.meta_model.ast_comparison_operator import ASTComparisonOperator +from pynestml.meta_model.ast_expression import ASTExpression +from pynestml.meta_model.ast_logical_operator import ASTLogicalOperator +from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_unary_operator import ASTUnaryOperator + + +class NESTMLExpressionPrinter(ExpressionPrinter): + r""" + Printer for ``ASTExpression`` nodes in NESTML syntax. + """ + + def print(self, node: ASTNode) -> str: + if isinstance(node, ASTExpression): + if node.get_implicit_conversion_factor() and not node.get_implicit_conversion_factor() == 1: + return "(" + str(node.get_implicit_conversion_factor()) + " * (" + self.print_expression(node) + "))" + + return self.print_expression(node) + + if isinstance(node, ASTArithmeticOperator): + return self.print_arithmetic_operator(node) + + if isinstance(node, ASTUnaryOperator): + return self.print_unary_operator(node) + + if isinstance(node, ASTComparisonOperator): + return self.print_comparison_operator(node) + + if isinstance(node, ASTLogicalOperator): + return self.print_logical_operator(node) + + return self._simple_expression_printer.print(node) + + def print_logical_operator(self, node: ASTLogicalOperator) -> str: + if node.is_logical_and: + return " and " + + if node.is_logical_or: + return " or " + + raise Exception("Unknown logical operator") + + def print_comparison_operator(self, node: ASTComparisonOperator) -> str: + if node.is_lt: + return " < " + + if node.is_le: + return " <= " + + if node.is_eq: + return " == " + + if node.is_ne: + return " != " + + if node.is_ne2: + return " <> " + + if node.is_ge: + return " >= " + + if node.is_gt: + return " > " + + raise RuntimeError("(PyNestML.ComparisonOperator.Print) Type of comparison operator not specified!") + + def print_unary_operator(self, node: ASTUnaryOperator) -> str: + if node.is_unary_plus: + return "+" + + if node.is_unary_minus: + return "-" + + if node.is_unary_tilde: + return "~" + + raise RuntimeError("Type of unary operator not specified!") + + def print_arithmetic_operator(self, node: ASTArithmeticOperator) -> str: + if node.is_times_op: + return " * " + + if node.is_div_op: + return " / " + + if node.is_modulo_op: + return " % " + + if node.is_plus_op: + return " + " + + if node.is_minus_op: + return " - " + + if node.is_pow_op: + return " ** " + + raise RuntimeError("(PyNestML.ArithmeticOperator.Print) Arithmetic operator not specified.") + + def print_expression(self, node: ASTExpression) -> str: + ret = "" + if node.is_expression(): + if node.is_encapsulated: + ret += "(" + if node.is_logical_not: + ret += "not " + if node.is_unary_operator(): + ret += self.print(node.get_unary_operator()) + ret += self.print(node.get_expression()) + if node.is_encapsulated: + ret += ")" + elif node.is_compound_expression(): + ret += self.print(node.get_lhs()) + ret += self.print(node.get_binary_operator()) + ret += self.print(node.get_rhs()) + elif node.is_ternary_operator(): + ret += self.print(node.get_condition()) + "?" + self.print( + node.get_if_true()) + ":" + self.print(node.get_if_not()) + + return ret diff --git a/pynestml/codegeneration/printers/nestml_function_call_printer.py b/pynestml/codegeneration/printers/nestml_function_call_printer.py new file mode 100644 index 000000000..21efa320d --- /dev/null +++ b/pynestml/codegeneration/printers/nestml_function_call_printer.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# +# nestml_function_call_printer.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.codegeneration.printers.function_call_printer import FunctionCallPrinter +from pynestml.meta_model.ast_function_call import ASTFunctionCall + + +class NESTMLFunctionCallPrinter(FunctionCallPrinter): + r""" + Printer for ASTFunctionCall in C++ syntax. + """ + + def print_function_call(self, node: ASTFunctionCall) -> str: + ret = str(node.get_name()) + "(" + for i in range(0, len(node.get_args())): + ret += self._expression_printer.print(node.get_args()[i]) + if i < len(node.get_args()) - 1: # in the case that it is not the last arg, print also a comma + ret += "," + + ret += ")" + + return ret diff --git a/pynestml/codegeneration/printers/nestml_printer.py b/pynestml/codegeneration/printers/nestml_printer.py index 99c7bbfbc..ec1e1e353 100644 --- a/pynestml/codegeneration/printers/nestml_printer.py +++ b/pynestml/codegeneration/printers/nestml_printer.py @@ -19,11 +19,20 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . +from typing import Optional, Union + +from pynestml.codegeneration.printers import nest_variable_printer +from pynestml.codegeneration.printers.constant_printer import ConstantPrinter from pynestml.codegeneration.printers.model_printer import ModelPrinter +from pynestml.codegeneration.printers.nest_variable_printer import NESTVariablePrinter +from pynestml.codegeneration.printers.nestml_expression_printer import NESTMLExpressionPrinter +from pynestml.codegeneration.printers.nestml_function_call_printer import NESTMLFunctionCallPrinter +from pynestml.codegeneration.printers.nestml_simple_expression_printer import NESTMLSimpleExpressionPrinter +from pynestml.codegeneration.printers.nestml_variable_printer import NESTMLVariablePrinter +from pynestml.codegeneration.printers.variable_printer import VariablePrinter from pynestml.meta_model.ast_arithmetic_operator import ASTArithmeticOperator from pynestml.meta_model.ast_assignment import ASTAssignment from pynestml.meta_model.ast_bit_operator import ASTBitOperator -from pynestml.meta_model.ast_stmts_body import ASTStmtsBody from pynestml.meta_model.ast_block_with_variables import ASTBlockWithVariables from pynestml.meta_model.ast_comparison_operator import ASTComparisonOperator from pynestml.meta_model.ast_compound_stmt import ASTCompoundStmt @@ -38,25 +47,26 @@ from pynestml.meta_model.ast_function_call import ASTFunctionCall from pynestml.meta_model.ast_if_clause import ASTIfClause from pynestml.meta_model.ast_if_stmt import ASTIfStmt +from pynestml.meta_model.ast_inline_expression import ASTInlineExpression from pynestml.meta_model.ast_input_block import ASTInputBlock from pynestml.meta_model.ast_input_port import ASTInputPort from pynestml.meta_model.ast_input_qualifier import ASTInputQualifier from pynestml.meta_model.ast_kernel import ASTKernel from pynestml.meta_model.ast_logical_operator import ASTLogicalOperator +from pynestml.meta_model.ast_model import ASTModel +from pynestml.meta_model.ast_model_body import ASTModelBody from pynestml.meta_model.ast_namespace_decorator import ASTNamespaceDecorator from pynestml.meta_model.ast_nestml_compilation_unit import ASTNestMLCompilationUnit -from pynestml.meta_model.ast_model_body import ASTModelBody from pynestml.meta_model.ast_ode_equation import ASTOdeEquation -from pynestml.meta_model.ast_inline_expression import ASTInlineExpression -from pynestml.meta_model.ast_model import ASTModel from pynestml.meta_model.ast_on_condition_block import ASTOnConditionBlock from pynestml.meta_model.ast_output_block import ASTOutputBlock -from pynestml.meta_model.ast_parameter import ASTParameter from pynestml.meta_model.ast_on_receive_block import ASTOnReceiveBlock +from pynestml.meta_model.ast_parameter import ASTParameter from pynestml.meta_model.ast_return_stmt import ASTReturnStmt from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.meta_model.ast_small_stmt import ASTSmallStmt from pynestml.meta_model.ast_stmt import ASTStmt +from pynestml.meta_model.ast_stmts_body import ASTStmtsBody from pynestml.meta_model.ast_unary_operator import ASTUnaryOperator from pynestml.meta_model.ast_unit_type import ASTUnitType from pynestml.meta_model.ast_update_block import ASTUpdateBlock @@ -74,33 +84,47 @@ class NESTMLPrinter(ModelPrinter): def __init__(self): self.indent = 0 + self._expression_printer = NESTMLExpressionPrinter(simple_expression_printer=None) + self._constant_printer = ConstantPrinter() + self._function_call_printer = NESTMLFunctionCallPrinter(expression_printer=self._expression_printer) + self._variable_printer = NESTMLVariablePrinter(expression_printer=self._expression_printer) + self._simple_expression_printer = NESTMLSimpleExpressionPrinter(variable_printer=self._variable_printer, function_call_printer=self._function_call_printer, constant_printer=self._constant_printer) + self._expression_printer._simple_expression_printer = self._simple_expression_printer + def print_model(self, node: ASTModel) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) + self.inc_indent() ret += "model " + node.get_name() + ":" + print_sl_comment(node.in_comment) ret += "\n" + self.print(node.get_body()) - + self.dec_indent() return ret - def print_arithmetic_operator(celf, node: ASTArithmeticOperator) -> str: - if node.is_times_op: - return " * " + def print_constant(self, const: Union[str, float, int]) -> str: + return self._constant_printer.print_constant(const) + + def print_function_call(self, node: ASTFunctionCall) -> str: + return self._function_call_printer.print_function_call(node) + + def print_variable(self, node: ASTVariable) -> str: + return self._variable_printer.print_variable(node) - if node.is_div_op: - return " / " + def print_simple_expression(self, node: ASTSimpleExpression) -> str: + return self._simple_expression_printer.print_simple_expression(node) - if node.is_modulo_op: - return " % " + def print_expression(self, node: ASTExpression) -> str: + return self._expression_printer.print_expression(node) - if node.is_plus_op: - return " + " + def print_arithmetic_operator(self, node: ASTArithmeticOperator) -> str: + return self._expression_printer.print_arithmetic_operator(node) - if node.is_minus_op: - return " - " + def print_unary_operator(self, node: ASTUnaryOperator) -> str: + return self._expression_printer.print_unary_operator(node) - if node.is_pow_op: - return " ** " + def print_comparison_operator(self, node: ASTComparisonOperator) -> str: + return self._expression_printer.print_comparison_operator(node) - raise RuntimeError("(PyNestML.ArithmeticOperator.Print) Arithmetic operator not specified.") + def print_logical_operator(self, node: ASTLogicalOperator) -> str: + return self._expression_printer.print_logical_operator(node) def print_assignment(self, node: ASTAssignment) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) @@ -145,9 +169,10 @@ def print_stmts_body(self, node: ASTStmtsBody) -> str: return ret def print_block_with_variables(self, node: ASTBlockWithVariables) -> str: - ret = print_ml_comments(node.pre_comments, self.indent, False) - ret += print_n_spaces(self.indent) - + temp_indent = self.indent + self.inc_indent() + ret = print_ml_comments(node.pre_comments, temp_indent, False) + ret += print_n_spaces(temp_indent) if node.is_state: ret += "state" elif node.is_parameters: @@ -155,52 +180,19 @@ def print_block_with_variables(self, node: ASTBlockWithVariables) -> str: else: assert node.is_internals ret += "internals" - ret += ":" + print_sl_comment(node.in_comment) + "\n" - if node.get_declarations() is not None: - self.inc_indent() for decl in node.get_declarations(): ret += self.print(decl) - - self.dec_indent() - + self.dec_indent() return ret def print_model_body(self, node: ASTModelBody) -> str: - self.inc_indent() ret = "" for elem in node.body_elements: ret += self.print(elem) - - self.dec_indent() - return ret - def print_comparison_operator(self, node: ASTComparisonOperator) -> str: - if node.is_lt: - return " < " - - if node.is_le: - return " <= " - - if node.is_eq: - return " == " - - if node.is_ne: - return " != " - - if node.is_ne2: - return " <> " - - if node.is_ge: - return " >= " - - if node.is_gt: - return " > " - - raise RuntimeError("(PyNestML.ComparisonOperator.Print) Type of comparison operator not specified!") - def print_compound_stmt(self, node: ASTCompoundStmt) -> str: if node.is_if_stmt(): return self.print(node.get_if_stmt()) @@ -261,20 +253,11 @@ def print_declaration(self, node: ASTDeclaration) -> str: return ret def print_elif_clause(self, node: ASTElifClause) -> str: - ret = print_n_spaces(self.indent) + "elif " + self.print(node.get_condition()) + ":\n" - self.inc_indent() - ret += self.print(node.get_stmts_body()) - self.dec_indent() - - return ret + return (print_n_spaces(self.indent) + "elif " + self.print(node.get_condition()) + + ":\n" + self.print(node.get_stmts_body())) def print_else_clause(self, node: ASTElseClause) -> str: - ret = print_n_spaces(self.indent) + "else:\n" - self.inc_indent() - ret += self.print(node.get_stmts_body()) - self.dec_indent() - - return ret + return print_n_spaces(self.indent) + "else:\n" + self.print(node.get_stmts_body()) def print_equations_block(self, node: ASTEquationsBlock) -> str: temp_indent = self.indent @@ -287,41 +270,21 @@ def print_equations_block(self, node: ASTEquationsBlock) -> str: self.dec_indent() return ret - def print_expression(self, node: ASTExpression) -> str: - ret = "" - if node.is_expression(): - if node.is_encapsulated: - ret += "(" - if node.is_logical_not: - ret += "not " - if node.is_unary_operator(): - ret += self.print(node.get_unary_operator()) - ret += self.print(node.get_expression()) - if node.is_encapsulated: - ret += ")" - elif node.is_compound_expression(): - ret += self.print(node.get_lhs()) - ret += self.print(node.get_binary_operator()) - ret += self.print(node.get_rhs()) - elif node.is_ternary_operator(): - ret += self.print(node.get_condition()) + "?" + self.print( - node.get_if_true()) + ":" + self.print(node.get_if_not()) - return ret - def print_for_stmt(self, node: ASTForStmt) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) ret += print_n_spaces(self.indent) ret += ("for " + node.get_variable() + " in " + self.print(node.get_start_from()) + "..." + self.print(node.get_end_at()) + " step " + str(node.get_step()) + ":" + print_sl_comment(node.in_comment) + "\n") - self.inc_indent() ret += self.print(node.get_stmts_body()) - self.dec_indent() return ret + def print_function_call(self, node: ASTFunctionCall) -> str: + return self._function_call_printer.print_function_call(node) + def print_function(self, node: ASTFunction) -> str: ret = print_ml_comments(node.pre_comments, self.indent) - ret += print_n_spaces(self.indent) + "function " + node.get_name() + "(" + ret += "function " + node.get_name() + "(" if node.has_parameters(): for par in node.get_parameters(): ret += self.print(par) @@ -329,31 +292,14 @@ def print_function(self, node: ASTFunction) -> str: if node.has_return_type(): ret += " " + self.print(node.get_return_type()) ret += ":" + print_sl_comment(node.in_comment) + "\n" - self.inc_indent() ret += self.print(node.get_stmts_body()) + "\n" - self.dec_indent() - - return ret - - def print_function_call(self, node: ASTFunctionCall) -> str: - ret = str(node.get_name()) + "(" - for i in range(0, len(node.get_args())): - ret += self.print(node.get_args()[i]) - if i < len(node.get_args()) - 1: # in the case that it is not the last arg, print also a comma - ret += "," - - ret += ")" - return ret def print_if_clause(self, node: ASTIfClause) -> str: ret = print_ml_comments(node.pre_comments, self.indent) ret += print_n_spaces(self.indent) + "if " + self.print(node.get_condition()) + ":" ret += print_sl_comment(node.in_comment) + "\n" - self.inc_indent() ret += self.print(node.get_stmts_body()) - self.dec_indent() - return ret def print_if_stmt(self, node: ASTIfStmt) -> str: @@ -364,7 +310,6 @@ def print_if_stmt(self, node: ASTIfStmt) -> str: if node.get_else_clause() is not None: ret += self.print(node.get_else_clause()) ret += print_n_spaces(self.indent) + "\n" - return ret def print_input_block(self, node: ASTInputBlock) -> str: @@ -403,15 +348,6 @@ def print_input_qualifier(self, node: ASTInputQualifier) -> str: return "excitatory" return "" - def print_logical_operator(self, node: ASTLogicalOperator) -> str: - if node.is_logical_and: - return " and " - - if node.is_logical_or: - return " or " - - raise Exception("Unknown logical operator") - def print_compilation_unit(self, node: ASTNestMLCompilationUnit) -> str: ret = "" if node.get_model_list() is not None: @@ -455,14 +391,6 @@ def print_output_block(self, node: ASTOutputBlock) -> str: ret += print_n_spaces(self.indent) + "output:\n" ret += print_n_spaces(self.indent + 4) ret += "spike" if node.is_spike() else "continuous" - if node.get_attributes(): - ret += "(" - for i, attr in enumerate(node.get_attributes()): - ret += self.print(attr) - if i < len(node.get_attributes()) - 1: - ret += ", " - - ret += ")" ret += print_sl_comment(node.in_comment) ret += "\n" return ret @@ -475,33 +403,6 @@ def print_return_stmt(self, node: ASTReturnStmt): ret += "return " + (self.print(node.get_expression()) if node.has_expression() else "") return ret - def print_simple_expression(self, node: ASTSimpleExpression) -> str: - if node.is_function_call(): - return self.print(node.function_call) - - if node.is_boolean_true: - return "true" - - if node.is_boolean_false: - return "false" - - if node.is_inf_literal: - return "inf" - - if node.is_numeric_literal(): - if node.variable is not None: - return str(node.numeric_literal) + self.print(node.variable) - - return str(node.numeric_literal) - - if node.is_variable(): - return self.print_variable(node.get_variable()) - - if node.is_string(): - return node.get_string() - - raise RuntimeError("Simple rhs at %s not specified!" % str(node.get_source_position())) - def print_small_stmt(self, node: ASTSmallStmt) -> str: if node.is_assignment(): ret = self.print(node.get_assignment()) @@ -523,18 +424,6 @@ def print_stmt(self, node: ASTStmt): return self.print(node.compound_stmt) - def print_unary_operator(self, node: ASTUnaryOperator) -> str: - if node.is_unary_plus: - return "+" - - if node.is_unary_minus: - return "-" - - if node.is_unary_tilde: - return "~" - - raise RuntimeError("Type of unary operator not specified!") - def print_unit_type(self, node: ASTUnitType) -> str: if node.is_encapsulated: return "(" + self.print(node.compound_unit) + ")" @@ -543,60 +432,38 @@ def print_unit_type(self, node: ASTUnitType) -> str: return self.print(node.base) + "**" + str(node.exponent) if node.is_arithmetic_expression(): - t_lhs = self.print(node.get_lhs()) if isinstance(node.get_lhs(), ASTUnitType) else str(node.get_lhs()) + t_lhs = ( + self.print(node.get_lhs()) if isinstance(node.get_lhs(), ASTUnitType) else str(node.get_lhs())) if node.is_times: return t_lhs + "*" + self.print(node.get_rhs()) - - return t_lhs + "/" + self.print(node.get_rhs()) + else: + return t_lhs + "/" + self.print(node.get_rhs()) return node.unit def print_on_receive_block(self, node: ASTOnReceiveBlock) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) ret += print_n_spaces(self.indent) + "onReceive(" + node.port_name + "):" + print_sl_comment(node.in_comment) + "\n" - self.inc_indent() ret += self.print(node.get_stmts_body()) - self.dec_indent() - return ret def print_on_condition_block(self, node: ASTOnConditionBlock) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) ret += print_n_spaces(self.indent) + "onCondition(" + self.print(node.get_cond_expr()) + "):" + print_sl_comment(node.in_comment) + "\n" - self.inc_indent() ret += self.print(node.get_stmts_body()) - self.dec_indent() - return ret def print_update_block(self, node: ASTUpdateBlock): ret = print_ml_comments(node.pre_comments, self.indent, False) ret += print_n_spaces(self.indent) + "update:" + print_sl_comment(node.in_comment) + "\n" - self.inc_indent() ret += self.print(node.get_stmts_body()) - self.dec_indent() - - return ret - - def print_variable(self, node: ASTVariable): - ret = node.name - - if node.get_vector_parameter(): - ret += "[" + self.print(node.get_vector_parameter()) + "]" - - for i in range(1, node.differential_order + 1): - ret += "'" - return ret def print_while_stmt(self, node: ASTWhileStmt) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) ret += (print_n_spaces(self.indent) + "while " + self.print(node.get_condition()) + ":" + print_sl_comment(node.in_comment) + "\n") - self.inc_indent() ret += self.print(node.get_stmts_body()) - self.dec_indent() - return ret def inc_indent(self): @@ -613,27 +480,22 @@ def print_n_spaces(n) -> str: def print_ml_comments(comments, indent=0, newline=False) -> str: if comments is None or len(list(comments)) == 0: return "" - ret = "" for comment in comments: - if comment.lstrip() == "": - ret += "# \n" - + if "\"\"\"" in comment: + return comment + "\n" for c_line in comment.splitlines(True): if c_line == "\n": - ret += print_n_spaces(indent) + "# " + "\n" + ret += print_n_spaces(indent) + "#" + "\n" + continue + elif c_line.lstrip() == "": continue - ret += print_n_spaces(indent) - if c_line[len(c_line) - len(c_line.lstrip())] != "#": - ret += "# " - + ret += "#" ret += c_line + "\n" - if len(comment.splitlines(True)) > 1: ret += print_n_spaces(indent) - if len(comments) > 0 and newline: ret += "\n" diff --git a/pynestml/codegeneration/printers/nestml_simple_expression_printer.py b/pynestml/codegeneration/printers/nestml_simple_expression_printer.py new file mode 100644 index 000000000..8198c239a --- /dev/null +++ b/pynestml/codegeneration/printers/nestml_simple_expression_printer.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# +# nestml_simple_expression_printer.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.codegeneration.printers.simple_expression_printer import SimpleExpressionPrinter +from pynestml.meta_model.ast_function_call import ASTFunctionCall +from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression +from pynestml.meta_model.ast_variable import ASTVariable + + +class NESTMLSimpleExpressionPrinter(SimpleExpressionPrinter): + r""" + Printer for ASTSimpleExpressions in NESTML syntax. + """ + + def _print(self, node: ASTNode) -> str: + if isinstance(node, ASTVariable): + return self._variable_printer.print(node) + + if isinstance(node, ASTFunctionCall): + return self._function_call_printer.print(node) + + return self.print_simple_expression(node) + + def print(self, node: ASTNode) -> str: + if node.get_implicit_conversion_factor() and not node.get_implicit_conversion_factor() == 1: + return "(" + str(node.get_implicit_conversion_factor()) + " * (" + self._print(node) + "))" + + return self._print(node) + + def print_simple_expression(self, node: ASTSimpleExpression) -> str: + if node.is_function_call(): + return self.print(node.function_call) + + if node.is_boolean_true: + return "true" + + if node.is_boolean_false: + return "false" + + if node.is_inf_literal: + return "inf" + + if node.is_numeric_literal(): + if node.variable is not None: + # numeric literal + physical unit + return str(node.numeric_literal) + self.print(node.variable) + + return str(node.numeric_literal) + + if node.is_variable(): + return self._variable_printer.print_variable(node.get_variable()) + + if node.is_string(): + return node.get_string() + + raise RuntimeError("Simple rhs at %s not specified!" % str(node.get_source_position())) diff --git a/pynestml/codegeneration/printers/nestml_variable_printer.py b/pynestml/codegeneration/printers/nestml_variable_printer.py index 0e7ea9741..834df2918 100644 --- a/pynestml/codegeneration/printers/nestml_variable_printer.py +++ b/pynestml/codegeneration/printers/nestml_variable_printer.py @@ -23,15 +23,20 @@ from pynestml.meta_model.ast_variable import ASTVariable -class NestMLVariablePrinter(VariablePrinter): +class NESTMLVariablePrinter(VariablePrinter): r""" Print ``ASTVariable``s in NESTML syntax. """ - def print_variable(self, node: ASTVariable) -> str: - """ - Print a variable node - :param node: the node to print - :return: string representation - """ - return node.get_complete_name() + def print_variable(self, node: ASTVariable): + assert isinstance(node, ASTVariable) + + ret = node.name + + if node.get_vector_parameter(): + ret += "[" + self._expression_printer.print(node.get_vector_parameter()) + "]" + + for i in range(1, node.differential_order + 1): + ret += "'" + + return ret diff --git a/pynestml/codegeneration/printers/ode_toolbox_expression_printer.py b/pynestml/codegeneration/printers/ode_toolbox_expression_printer.py index af8acd4b8..09a8a918d 100644 --- a/pynestml/codegeneration/printers/ode_toolbox_expression_printer.py +++ b/pynestml/codegeneration/printers/ode_toolbox_expression_printer.py @@ -26,7 +26,7 @@ class ODEToolboxExpressionPrinter(CppExpressionPrinter): r""" - Printer for ``ASTExpression`` nodes in ODE-toolbox syntax. + Printer for ``ASTExpression`` nodes in ODE-toolbox (sympy) syntax. """ def _print_ternary_operator_expression(self, node: ASTExpression) -> str: diff --git a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py index 734eb530d..4b6a9b952 100644 --- a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py +++ b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py @@ -29,9 +29,9 @@ class ODEToolboxVariablePrinter(VariablePrinter): """ def print_variable(self, node: ASTVariable) -> str: - """ + r""" Print variable. :param node: the node to print :return: string representation """ - return node.get_complete_name().replace("$", "__DOLLAR") + return node.get_name().replace("$", "__DOLLAR") + "__d" * node.get_differential_order() diff --git a/pynestml/meta_model/ast_equations_block.py b/pynestml/meta_model/ast_equations_block.py index 18d576138..bf6abb75f 100644 --- a/pynestml/meta_model/ast_equations_block.py +++ b/pynestml/meta_model/ast_equations_block.py @@ -43,12 +43,18 @@ def __init__(self, declarations, *args, **kwargs): """ assert (declarations is not None and isinstance(declarations, list)), \ '(PyNestML.AST.EquationsBlock) No or wrong type of declarations provided (%s)!' % type(declarations) + + if declarations is None: + declarations = [] + for decl in declarations: assert decl is not None and (isinstance(decl, ASTKernel) or isinstance(decl, ASTOdeEquation) or isinstance(decl, ASTInlineExpression)), \ '(PyNestML.AST.EquationsBlock) No or wrong type of ode-element provided (%s)' % type(decl) + super(ASTEquationsBlock, self).__init__(*args, **kwargs) + self.declarations = declarations def clone(self): diff --git a/tests/test_unit_system.py b/tests/test_unit_system.py index ab4b5a560..5b71ce6e1 100644 --- a/tests/test_unit_system.py +++ b/tests/test_unit_system.py @@ -28,7 +28,7 @@ from pynestml.codegeneration.printers.cpp_type_symbol_printer import CppTypeSymbolPrinter from pynestml.codegeneration.printers.cpp_variable_printer import CppVariablePrinter from pynestml.codegeneration.printers.nest_cpp_function_call_printer import NESTCppFunctionCallPrinter -from pynestml.codegeneration.printers.nestml_variable_printer import NestMLVariablePrinter +from pynestml.codegeneration.printers.nestml_variable_printer import NESTMLVariablePrinter from pynestml.frontend.pynestml_frontend import generate_target from pynestml.symbol_table.symbol_table import SymbolTable from pynestml.symbols.predefined_functions import PredefinedFunctions @@ -58,7 +58,7 @@ def setUp(self, request): Logger.init_logger(LoggingLevel.INFO) - variable_printer = NestMLVariablePrinter(None) + variable_printer = NESTMLVariablePrinter(None) function_call_printer = NESTCppFunctionCallPrinter(None) cpp_variable_printer = CppVariablePrinter(None) self.printer = CppExpressionPrinter(CppSimpleExpressionPrinter(cpp_variable_printer, @@ -72,7 +72,7 @@ def setUp(self, request): def get_first_statement_in_update_block(self, model): if model.get_model_list()[0].get_update_blocks()[0]: - return model.get_model_list()[0].get_update_blocks()[0].get_stmts_body().get_stmts()[0] + return model.get_model_list()[0].get_update_blocks()[0].get_block().get_stmts()[0] return None @@ -103,7 +103,7 @@ def print_rhs_of_first_declaration_in_state_block(self, model): def print_first_return_statement_in_first_declared_function(self, model): func = self.get_first_declared_function(model) - return_expression = func.get_stmts_body().get_stmts()[0].small_stmt.get_return_stmt().get_expression() + return_expression = func.get_block().get_stmts()[0].small_stmt.get_return_stmt().get_expression() return self.printer.print(return_expression) def test_expression_after_magnitude_conversion_in_direct_assignment(self): From 2eac0264e5fe8c7ba67a58f851241c8774f0ed6f Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Fri, 14 Mar 2025 18:42:38 +0100 Subject: [PATCH 40/68] refactor NESTML printer --- .../codegeneration/printers/nestml_printer.py | 42 ++++++++++++++++--- .../point_neuron/@SYNAPSE_NAME@.py.jinja2 | 16 +++---- tests/test_unit_system.py | 4 +- .../CoCoVariableWithSameNameAsUnit.nestml | 24 +++++------ 4 files changed, 58 insertions(+), 28 deletions(-) diff --git a/pynestml/codegeneration/printers/nestml_printer.py b/pynestml/codegeneration/printers/nestml_printer.py index ec1e1e353..18a23eed3 100644 --- a/pynestml/codegeneration/printers/nestml_printer.py +++ b/pynestml/codegeneration/printers/nestml_printer.py @@ -184,7 +184,9 @@ def print_block_with_variables(self, node: ASTBlockWithVariables) -> str: if node.get_declarations() is not None: for decl in node.get_declarations(): ret += self.print(decl) + self.dec_indent() + return ret def print_model_body(self, node: ASTModelBody) -> str: @@ -253,11 +255,20 @@ def print_declaration(self, node: ASTDeclaration) -> str: return ret def print_elif_clause(self, node: ASTElifClause) -> str: - return (print_n_spaces(self.indent) + "elif " + self.print(node.get_condition()) - + ":\n" + self.print(node.get_stmts_body())) + ret = print_n_spaces(self.indent) + "elif " + self.print(node.get_condition()) + ":\n" + self.inc_indent() + ret += self.print(node.get_stmts_body()) + self.dec_indent() + + return ret def print_else_clause(self, node: ASTElseClause) -> str: - return print_n_spaces(self.indent) + "else:\n" + self.print(node.get_stmts_body()) + ret = print_n_spaces(self.indent) + "else:\n" + self.inc_indent() + ret += self.print(node.get_stmts_body()) + self.dec_indent() + + return ret def print_equations_block(self, node: ASTEquationsBlock) -> str: temp_indent = self.indent @@ -276,7 +287,9 @@ def print_for_stmt(self, node: ASTForStmt) -> str: ret += ("for " + node.get_variable() + " in " + self.print(node.get_start_from()) + "..." + self.print(node.get_end_at()) + " step " + str(node.get_step()) + ":" + print_sl_comment(node.in_comment) + "\n") + self.inc_indent() ret += self.print(node.get_stmts_body()) + self.dec_indent() return ret def print_function_call(self, node: ASTFunctionCall) -> str: @@ -292,14 +305,18 @@ def print_function(self, node: ASTFunction) -> str: if node.has_return_type(): ret += " " + self.print(node.get_return_type()) ret += ":" + print_sl_comment(node.in_comment) + "\n" + self.inc_indent() ret += self.print(node.get_stmts_body()) + "\n" + self.dec_indent() return ret def print_if_clause(self, node: ASTIfClause) -> str: ret = print_ml_comments(node.pre_comments, self.indent) ret += print_n_spaces(self.indent) + "if " + self.print(node.get_condition()) + ":" ret += print_sl_comment(node.in_comment) + "\n" + self.inc_indent() ret += self.print(node.get_stmts_body()) + self.dec_indent() return ret def print_if_stmt(self, node: ASTIfStmt) -> str: @@ -444,26 +461,34 @@ def print_unit_type(self, node: ASTUnitType) -> str: def print_on_receive_block(self, node: ASTOnReceiveBlock) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) ret += print_n_spaces(self.indent) + "onReceive(" + node.port_name + "):" + print_sl_comment(node.in_comment) + "\n" + self.inc_indent() ret += self.print(node.get_stmts_body()) + self.dec_indent() return ret def print_on_condition_block(self, node: ASTOnConditionBlock) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) ret += print_n_spaces(self.indent) + "onCondition(" + self.print(node.get_cond_expr()) + "):" + print_sl_comment(node.in_comment) + "\n" + self.inc_indent() ret += self.print(node.get_stmts_body()) + self.dec_indent() return ret def print_update_block(self, node: ASTUpdateBlock): ret = print_ml_comments(node.pre_comments, self.indent, False) ret += print_n_spaces(self.indent) + "update:" + print_sl_comment(node.in_comment) + "\n" + self.inc_indent() ret += self.print(node.get_stmts_body()) + self.dec_indent() return ret def print_while_stmt(self, node: ASTWhileStmt) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) ret += (print_n_spaces(self.indent) + "while " + self.print(node.get_condition()) + ":" + print_sl_comment(node.in_comment) + "\n") + self.inc_indent() ret += self.print(node.get_stmts_body()) + self.dec_indent() return ret def inc_indent(self): @@ -482,20 +507,25 @@ def print_ml_comments(comments, indent=0, newline=False) -> str: return "" ret = "" for comment in comments: - if "\"\"\"" in comment: - return comment + "\n" + if comment.lstrip() == "": + ret += "# \n" + for c_line in comment.splitlines(True): if c_line == "\n": ret += print_n_spaces(indent) + "#" + "\n" continue elif c_line.lstrip() == "": continue + ret += print_n_spaces(indent) if c_line[len(c_line) - len(c_line.lstrip())] != "#": - ret += "#" + ret += "# " + ret += c_line + "\n" + if len(comment.splitlines(True)) > 1: ret += print_n_spaces(indent) + if len(comments) > 0 and newline: ret += "\n" diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/@SYNAPSE_NAME@.py.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/@SYNAPSE_NAME@.py.jinja2 index 99890c0ab..7c6b43df6 100644 --- a/pynestml/codegeneration/resources_python_standalone/point_neuron/@SYNAPSE_NAME@.py.jinja2 +++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/@SYNAPSE_NAME@.py.jinja2 @@ -208,8 +208,8 @@ class Synapse_{{ astnode.name }}(Synapse): {% for function in astnode.get_functions() -%} {{ function_declaration.FunctionDeclaration(function, astnode.name) }}: {%- filter indent(4,True) %} -{%- with ast = function.get_block() %} -{%- include "directives_py/Block.jinja2" %} +{%- with ast = function.get_stmts_body() %} +{%- include "directives_py/StmtsBody.jinja2" %} {%- endwith %} {%- endfilter %} {%- endfor %} @@ -302,8 +302,8 @@ class Synapse_{{ astnode.name }}(Synapse): {% if astnode.get_update_blocks() | length > 0 %} {%- filter indent(4) %} {%- for dynamics in astnode.get_update_blocks() %} -{%- set ast = dynamics.get_block() %} -{%- include "directives_py/Block.jinja2" %} +{%- set ast = dynamics.get_stmts_body() %} +{%- include "directives_py/StmtsBody.jinja2" %} {%- endfor %} {%- endfilter %} @@ -349,12 +349,12 @@ class Synapse_{{ astnode.name }}(Synapse): # ------------------------------------------------------------------------- {%- for block in astnode.get_on_condition_blocks() %} if {{ printer.print(block.get_cond_expr()) }}: -{%- set ast = block.get_block() %} +{%- set ast = block.get_stmts_body() %} {%- if ast.print_comment('#') | length > 1 %} # {{ast.print_comment('#')}} {%- endif %} {%- filter indent(6) %} -{%- include "directives_py/Block.jinja2" %} +{%- include "directives_py/StmtsBody.jinja2" %} {%- endfilter %} {%- endfor %} @@ -386,9 +386,9 @@ class Synapse_{{ astnode.name }}(Synapse): # ------------------------------------------------------------------------- {%- for blk in astnode.get_on_receive_blocks() %} -{%- set ast = blk.get_block() %} +{%- set ast = blk.get_stmts_body() %} def on_receive_block_{{ blk.get_port_name() }}(self): {%- filter indent(4, True) -%} -{%- include "directives_py/Block.jinja2" %} +{%- include "directives_py/StmtsBody.jinja2" %} {%- endfilter %} {% endfor %} diff --git a/tests/test_unit_system.py b/tests/test_unit_system.py index 5b71ce6e1..77b3b0834 100644 --- a/tests/test_unit_system.py +++ b/tests/test_unit_system.py @@ -72,7 +72,7 @@ def setUp(self, request): def get_first_statement_in_update_block(self, model): if model.get_model_list()[0].get_update_blocks()[0]: - return model.get_model_list()[0].get_update_blocks()[0].get_block().get_stmts()[0] + return model.get_model_list()[0].get_update_blocks()[0].get_stmts_body().get_stmts()[0] return None @@ -103,7 +103,7 @@ def print_rhs_of_first_declaration_in_state_block(self, model): def print_first_return_statement_in_first_declared_function(self, model): func = self.get_first_declared_function(model) - return_expression = func.get_block().get_stmts()[0].small_stmt.get_return_stmt().get_expression() + return_expression = func.get_stmts_body().get_stmts()[0].small_stmt.get_return_stmt().get_expression() return self.printer.print(return_expression) def test_expression_after_magnitude_conversion_in_direct_assignment(self): diff --git a/tests/valid/CoCoVariableWithSameNameAsUnit.nestml b/tests/valid/CoCoVariableWithSameNameAsUnit.nestml index 6d580979e..207771724 100644 --- a/tests/valid/CoCoVariableWithSameNameAsUnit.nestml +++ b/tests/valid/CoCoVariableWithSameNameAsUnit.nestml @@ -1,35 +1,35 @@ # CoCoVariableRedeclared.nestml # ############################# -# -# +# +# # Description # +++++++++++ -# +# # This model is used to test if broken CoCos are identified correctly. Here, if redeclaration of symbols is detected. -# +# # Negative case. -# -# +# +# # Copyright statement # +++++++++++++++++++ -# +# # This file is part of NEST. -# +# # Copyright (C) 2004 The NEST Initiative -# +# # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. -# +# # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. -# +# # You should have received a copy of the GNU General Public License # along with NEST. If not, see . # model CoCoVariableWithSameNameAsUnit: state: - eV mV = 1 mV # should not conflict with predefined unit eV but throw a warning + m mV = 1 mV # should not conflict with predefined unit m but throw a warning From fd5e931ec231b28a20f63a33f58d26ce97c31031 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Fri, 14 Mar 2025 19:01:25 +0100 Subject: [PATCH 41/68] refactor NESTML printer --- .../codegeneration/printers/nestml_printer.py | 38 +++++++++++-------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/pynestml/codegeneration/printers/nestml_printer.py b/pynestml/codegeneration/printers/nestml_printer.py index 18a23eed3..6327bc3cd 100644 --- a/pynestml/codegeneration/printers/nestml_printer.py +++ b/pynestml/codegeneration/printers/nestml_printer.py @@ -93,10 +93,8 @@ def __init__(self): def print_model(self, node: ASTModel) -> str: ret = print_ml_comments(node.pre_comments, self.indent, False) - self.inc_indent() ret += "model " + node.get_name() + ":" + print_sl_comment(node.in_comment) ret += "\n" + self.print(node.get_body()) - self.dec_indent() return ret def print_constant(self, const: Union[str, float, int]) -> str: @@ -169,10 +167,8 @@ def print_stmts_body(self, node: ASTStmtsBody) -> str: return ret def print_block_with_variables(self, node: ASTBlockWithVariables) -> str: - temp_indent = self.indent - self.inc_indent() - ret = print_ml_comments(node.pre_comments, temp_indent, False) - ret += print_n_spaces(temp_indent) + ret = print_ml_comments(node.pre_comments, self.indent, False) + ret += print_n_spaces(self.indent) if node.is_state: ret += "state" elif node.is_parameters: @@ -182,17 +178,19 @@ def print_block_with_variables(self, node: ASTBlockWithVariables) -> str: ret += "internals" ret += ":" + print_sl_comment(node.in_comment) + "\n" if node.get_declarations() is not None: + self.inc_indent() for decl in node.get_declarations(): ret += self.print(decl) - - self.dec_indent() + self.dec_indent() return ret def print_model_body(self, node: ASTModelBody) -> str: + self.inc_indent() ret = "" for elem in node.body_elements: ret += self.print(elem) + self.dec_indent() return ret def print_compound_stmt(self, node: ASTCompoundStmt) -> str: @@ -297,7 +295,7 @@ def print_function_call(self, node: ASTFunctionCall) -> str: def print_function(self, node: ASTFunction) -> str: ret = print_ml_comments(node.pre_comments, self.indent) - ret += "function " + node.get_name() + "(" + ret += print_n_spaces(self.indent) + "function " + node.get_name() + "(" if node.has_parameters(): for par in node.get_parameters(): ret += self.print(par) @@ -317,6 +315,7 @@ def print_if_clause(self, node: ASTIfClause) -> str: self.inc_indent() ret += self.print(node.get_stmts_body()) self.dec_indent() + return ret def print_if_stmt(self, node: ASTIfStmt) -> str: @@ -327,6 +326,7 @@ def print_if_stmt(self, node: ASTIfStmt) -> str: if node.get_else_clause() is not None: ret += self.print(node.get_else_clause()) ret += print_n_spaces(self.indent) + "\n" + return ret def print_input_block(self, node: ASTInputBlock) -> str: @@ -408,6 +408,14 @@ def print_output_block(self, node: ASTOutputBlock) -> str: ret += print_n_spaces(self.indent) + "output:\n" ret += print_n_spaces(self.indent + 4) ret += "spike" if node.is_spike() else "continuous" + if node.get_attributes(): + ret += "(" + for i, attr in enumerate(node.get_attributes()): + ret += self.print(attr) + if i < len(node.get_attributes()) - 1: + ret += ", " + + ret += ")" ret += print_sl_comment(node.in_comment) ret += "\n" return ret @@ -449,12 +457,11 @@ def print_unit_type(self, node: ASTUnitType) -> str: return self.print(node.base) + "**" + str(node.exponent) if node.is_arithmetic_expression(): - t_lhs = ( - self.print(node.get_lhs()) if isinstance(node.get_lhs(), ASTUnitType) else str(node.get_lhs())) + t_lhs = self.print(node.get_lhs()) if isinstance(node.get_lhs(), ASTUnitType) else str(node.get_lhs()) if node.is_times: return t_lhs + "*" + self.print(node.get_rhs()) - else: - return t_lhs + "/" + self.print(node.get_rhs()) + + return t_lhs + "/" + self.print(node.get_rhs()) return node.unit @@ -464,6 +471,7 @@ def print_on_receive_block(self, node: ASTOnReceiveBlock) -> str: self.inc_indent() ret += self.print(node.get_stmts_body()) self.dec_indent() + return ret def print_on_condition_block(self, node: ASTOnConditionBlock) -> str: @@ -472,6 +480,7 @@ def print_on_condition_block(self, node: ASTOnConditionBlock) -> str: self.inc_indent() ret += self.print(node.get_stmts_body()) self.dec_indent() + return ret def print_update_block(self, node: ASTUpdateBlock): @@ -489,6 +498,7 @@ def print_while_stmt(self, node: ASTWhileStmt) -> str: self.inc_indent() ret += self.print(node.get_stmts_body()) self.dec_indent() + return ret def inc_indent(self): @@ -514,8 +524,6 @@ def print_ml_comments(comments, indent=0, newline=False) -> str: if c_line == "\n": ret += print_n_spaces(indent) + "#" + "\n" continue - elif c_line.lstrip() == "": - continue ret += print_n_spaces(indent) if c_line[len(c_line) - len(c_line.lstrip())] != "#": From a51e6a16a9def01aba015ebda53ffd86fd1697ef Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 19 Mar 2025 13:40:59 +0100 Subject: [PATCH 42/68] refactor NESTML printer --- .../printers/nestml_expression_printer.py | 36 +++++++++++++++---- 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/pynestml/codegeneration/printers/nestml_expression_printer.py b/pynestml/codegeneration/printers/nestml_expression_printer.py index ac8b62209..f5795cde1 100644 --- a/pynestml/codegeneration/printers/nestml_expression_printer.py +++ b/pynestml/codegeneration/printers/nestml_expression_printer.py @@ -25,6 +25,7 @@ from pynestml.meta_model.ast_expression import ASTExpression from pynestml.meta_model.ast_logical_operator import ASTLogicalOperator from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.meta_model.ast_unary_operator import ASTUnaryOperator @@ -85,7 +86,7 @@ def print_comparison_operator(self, node: ASTComparisonOperator) -> str: if node.is_gt: return " > " - raise RuntimeError("(PyNestML.ComparisonOperator.Print) Type of comparison operator not specified!") + raise RuntimeError("Type of comparison operator not specified!") def print_unary_operator(self, node: ASTUnaryOperator) -> str: if node.is_unary_plus: @@ -118,24 +119,47 @@ def print_arithmetic_operator(self, node: ASTArithmeticOperator) -> str: if node.is_pow_op: return " ** " - raise RuntimeError("(PyNestML.ArithmeticOperator.Print) Arithmetic operator not specified.") + raise RuntimeError("Arithmetic operator not specified.") def print_expression(self, node: ASTExpression) -> str: ret = "" if node.is_expression(): if node.is_encapsulated: ret += "(" + if node.is_logical_not: ret += "not " + if node.is_unary_operator(): - ret += self.print(node.get_unary_operator()) - ret += self.print(node.get_expression()) + ret += self.print_unary_operator(node.get_unary_operator()) + + if isinstance(node.get_expression(), ASTExpression): + ret += self.print_expression(node.get_expression()) + elif isinstance(node.get_expression(), ASTSimpleExpression): + ret += self._simple_expression_printer.print_simple_expression(node.get_expression()) + else: + raise RuntimeError("Unknown node type") + if node.is_encapsulated: ret += ")" + elif node.is_compound_expression(): - ret += self.print(node.get_lhs()) + if isinstance(node.get_lhs(), ASTExpression): + ret += self.print_expression(node.get_lhs()) + elif isinstance(node.get_lhs(), ASTSimpleExpression): + ret += self._simple_expression_printer.print_simple_expression(node.get_lhs()) + else: + raise RuntimeError("Unknown node type") + ret += self.print(node.get_binary_operator()) - ret += self.print(node.get_rhs()) + + if isinstance(node.get_rhs(), ASTExpression): + ret += self.print_expression(node.get_rhs()) + elif isinstance(node.get_rhs(), ASTSimpleExpression): + ret += self._simple_expression_printer.print_simple_expression(node.get_rhs()) + else: + raise RuntimeError("Unknown node type") + elif node.is_ternary_operator(): ret += self.print(node.get_condition()) + "?" + self.print( node.get_if_true()) + ":" + self.print(node.get_if_not()) From 5d79b21bdd838105cc9d1e62f57562b8f8674ddc Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 19 Mar 2025 21:32:12 +0100 Subject: [PATCH 43/68] add attributes to spiking input ports --- models/neurons/iaf_psc_delta_neuron.nestml | 26 +-- .../codegeneration/nest_code_generator.py | 5 +- .../printers/cpp_function_call_printer.py | 7 +- .../printers/nest_variable_printer.py | 37 ++- .../codegeneration/printers/nestml_printer.py | 3 - .../printers/nestml_variable_printer.py | 2 +- .../printers/ode_toolbox_variable_printer.py | 11 +- .../point_neuron/common/NeuronClass.jinja2 | 3 + pynestml/utils/ast_utils.py | 218 +++++++++++++++++- 9 files changed, 274 insertions(+), 38 deletions(-) diff --git a/models/neurons/iaf_psc_delta_neuron.nestml b/models/neurons/iaf_psc_delta_neuron.nestml index d121c6a2b..51f87ed85 100644 --- a/models/neurons/iaf_psc_delta_neuron.nestml +++ b/models/neurons/iaf_psc_delta_neuron.nestml @@ -1,29 +1,29 @@ # iaf_psc_delta - Current-based leaky integrate-and-fire neuron model with delta-kernel post-synaptic currents # ############################################################################################################ -# +# # Description # +++++++++++ -# +# # iaf_psc_delta is an implementation of a leaky integrate-and-fire model # where the potential jumps on each spike arrival. -# +# # The threshold crossing is followed by an absolute refractory period # during which the membrane potential is clamped to the resting potential. -# +# # Spikes arriving while the neuron is refractory, are discarded by # default. If the property ``with_refr_input`` is set to true, such # spikes are added to the membrane potential at the end of the # refractory period, dampened according to the interval between # arrival and end of refractoriness. -# +# # The general framework for the consistent formulation of systems with # neuron like dynamics interacting by point events is described in # [1]_. A flow chart can be found in [2]_. -# -# +# +# # References # ++++++++++ -# +# # .. [1] Rotter S, Diesmann M (1999). Exact simulation of # time-invariant linear systems with applications to neuronal # modeling. Biologial Cybernetics 81:381-402. @@ -32,11 +32,11 @@ # space analysis of synchronous spiking in cortical neural # networks. Neurocomputing 38-40:565-571. # DOI: https://doi.org/10.1016/S0925-2312(01)00409-X -# -# +# +# # See also # ++++++++ -# +# # iaf_psc_alpha, iaf_psc_exp # # @@ -46,13 +46,13 @@ model iaf_psc_delta_neuron: refr_t ms = 0 ms # Refractory period timer equations: - V_m' = -(V_m - E_L) / tau_m + (I_e + I_stim) / C_m + spike_in_port.psp + V_m' = -(V_m - E_L) / tau_m + (I_e + I_stim) / C_m + spike_in_port.psp * (1E-6 s / ms) # XXX: TODO: This should simply read ``... + spike_in_port.psp`` (see https://github.com/nest/nestml/issues/984) refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) parameters: tau_m ms = 10 ms # Membrane time constant C_m pF = 250 pF # Capacity of the membrane - refr_T ms = 2 ms # Duration of refractory period + refr_T ms = 2 ms # Duration of refractory period tau_syn ms = 2 ms # Time constant of synaptic current E_L mV = -70 mV # Resting membrane potential V_reset mV = -70 mV # Reset potential of the membrane diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index ac3931cc5..cc3cc80a6 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -360,7 +360,10 @@ def analyse_neuron(self, neuron: ASTModel) -> Tuple[Dict[str, ASTAssignment], Di kernel_buffers = ASTUtils.generate_kernel_buffers(neuron, equations_block) InlineExpressionExpansionTransformer().transform(neuron) - delta_factors = ASTUtils.get_delta_factors_(neuron, equations_block) + #delta_factors = ASTUtils.get_delta_factors_(neuron, equations_block) + print(neuron) + delta_factors = ASTUtils.get_delta_factors_from_input_port_references(neuron) + delta_factors |= ASTUtils.get_delta_factors_from_convolutions(neuron) ASTUtils.replace_convolve_calls_with_buffers_(neuron, equations_block) # Collect all equations with delay variables and replace ASTFunctionCall to ASTVariable wherever necessary diff --git a/pynestml/codegeneration/printers/cpp_function_call_printer.py b/pynestml/codegeneration/printers/cpp_function_call_printer.py index 11beba1bd..3c31d5660 100644 --- a/pynestml/codegeneration/printers/cpp_function_call_printer.py +++ b/pynestml/codegeneration/printers/cpp_function_call_printer.py @@ -23,15 +23,14 @@ import re -from pynestml.symbols.symbol import SymbolKind - from pynestml.codegeneration.printers.function_call_printer import FunctionCallPrinter from pynestml.meta_model.ast_function_call import ASTFunctionCall +from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_variable import ASTVariable from pynestml.symbol_table.scope import Scope from pynestml.symbols.predefined_functions import PredefinedFunctions +from pynestml.symbols.symbol import SymbolKind from pynestml.utils.ast_utils import ASTUtils -from pynestml.meta_model.ast_node import ASTNode -from pynestml.meta_model.ast_variable import ASTVariable class CppFunctionCallPrinter(FunctionCallPrinter): diff --git a/pynestml/codegeneration/printers/nest_variable_printer.py b/pynestml/codegeneration/printers/nest_variable_printer.py index 295668860..ee4ae6ab7 100644 --- a/pynestml/codegeneration/printers/nest_variable_printer.py +++ b/pynestml/codegeneration/printers/nest_variable_printer.py @@ -51,6 +51,11 @@ def __init__(self, expression_printer: ExpressionPrinter, with_origin: bool = Tr self.variables_special_cases = variables_special_cases self.cpp_variable_suffix = "" self.postsynaptic_getter_string_ = "start->get_%s()" + self.buffers_are_zero = True + + def set_buffers_to_zero(self, buffers_are_zero: bool): + self.buffers_are_zero = buffers_are_zero + return "" def set_getter_string(self, s): r"""Returns the empty string, because this method can be called from inside the Jinja template""" @@ -98,7 +103,7 @@ def print_variable(self, variable: ASTVariable) -> str: if variable.get_name() == PredefinedVariables.TIME_CONSTANT: return "get_t()" - symbol = variable.get_scope().resolve_to_symbol(variable.get_complete_name(), SymbolKind.VARIABLE) + symbol = variable.get_scope().resolve_to_symbol(variable.get_complete_name().replace("__DOT__", "."), SymbolKind.VARIABLE) if symbol is None: # test if variable name can be resolved to a type @@ -123,10 +128,15 @@ def print_variable(self, variable: ASTVariable) -> str: if not units_conversion_factor == 1: s += "(" + str(units_conversion_factor) + " * " if self.cpp_variable_suffix == "": + if self.buffers_are_zero and symbol.is_spike_input_port(): + # XXX do this in a derived class + return "0.0" + s += "B_." s += self._print_buffer_value(variable) if not units_conversion_factor == 1: s += ")" + import pdb;pdb.set_trace() return s if symbol.is_inline_expression: @@ -156,14 +166,13 @@ def _print_delay_variable(self, variable: ASTVariable) -> str: return "" - def _print_buffer_value(self, variable: ASTVariable) -> str: - """ - Converts for a handed over symbol the corresponding name of the buffer to a nest processable format. - :param variable: a single variable symbol. - :return: the corresponding representation as a string - """ - variable_symbol = variable.get_scope().resolve_to_symbol(variable.get_complete_name(), SymbolKind.VARIABLE) + def __print_buffer_value(self, variable: ASTVariable) -> str: + + variable_symbol = variable.get_scope().resolve_to_symbol(variable.get_complete_name().replace("__DOT__", "."), SymbolKind.VARIABLE) if variable_symbol.is_spike_input_port(): + if self.buffers_are_zero: + return "0.0" # XXX this should be spun off to a NESTVariablePrinterWithFactorsAsZeros + var_name = variable_symbol.get_symbol_name().upper() if variable.has_vector_parameter(): if variable.get_vector_parameter().is_variable(): @@ -172,7 +181,6 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: else: var_name += "_" + str(variable.get_vector_parameter()) - if variable.has_vector_parameter(): # add variable attribute if it exists if variable.attribute: return "__spike_input_" + str(variable.name) + "_VEC_IDX_" + str(variable.get_vector_parameter()) + "__DOT__" + variable.attribute @@ -191,6 +199,17 @@ def _print_buffer_value(self, variable: ASTVariable) -> str: # case of continuous-time input port return variable_symbol.get_symbol_name() + '_grid_sum_' + def _print_buffer_value(self, variable: ASTVariable) -> str: + """ + Converts for a handed over symbol the corresponding name of the buffer to a nest processable format. + :param variable: a single variable symbol. + :return: the corresponding representation as a string + """ + if variable.get_implicit_conversion_factor() and not variable.get_implicit_conversion_factor() == 1: + return "(" + str(variable.get_implicit_conversion_factor()) + " * (" + self.__print_buffer_value(variable) + "))" + + return self.__print_buffer_value(variable) + def _print(self, variable: ASTVariable, symbol, with_origin: bool = True) -> str: variable_name = CppVariablePrinter._print_cpp_name(variable.get_complete_name()) diff --git a/pynestml/codegeneration/printers/nestml_printer.py b/pynestml/codegeneration/printers/nestml_printer.py index 34262039f..1c0bea6ec 100644 --- a/pynestml/codegeneration/printers/nestml_printer.py +++ b/pynestml/codegeneration/printers/nestml_printer.py @@ -483,9 +483,6 @@ def print_update_block(self, node: ASTUpdateBlock): self.inc_indent() ret += self.print(node.get_stmts_body()) self.dec_indent() - - if node.get_attribute(): - ret += "." + node.get_attribute() return ret def print_while_stmt(self, node: ASTWhileStmt) -> str: diff --git a/pynestml/codegeneration/printers/nestml_variable_printer.py b/pynestml/codegeneration/printers/nestml_variable_printer.py index e55f49da2..04dfd611a 100644 --- a/pynestml/codegeneration/printers/nestml_variable_printer.py +++ b/pynestml/codegeneration/printers/nestml_variable_printer.py @@ -34,7 +34,7 @@ def print_variable(self, node: ASTVariable): ret = node.name if node.get_attribute(): - s += "." + node.get_attribute() + ret += "." + node.get_attribute() if node.get_vector_parameter(): ret += "[" + self._expression_printer.print(node.get_vector_parameter()) + "]" diff --git a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py index f51d08d07..e62be0458 100644 --- a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py +++ b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py @@ -36,12 +36,11 @@ def print_variable(self, node: ASTVariable) -> str: :param node: the node to print :return: string representation """ - s = node.get_name().replace("$", "__DOLLAR") + "__d" * node.get_differential_order() + s = node.get_name().replace("$", "__DOLLAR") - # input ports that appear here should be treated as trains of delta pulses - model = ASTUtils.find_parent_node_by_type(node, ASTModel) - inport = ASTUtils.get_input_port_by_name(model.get_input_blocks(), node.get_name()) - if inport and inport.is_spike(): - return "0.0" + if node.get_attribute(): + s += "__DOT__" + node.get_attribute() + + s += "__d" * node.get_differential_order() return s diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index c9609fa7d..054ee8e07 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -968,11 +968,14 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const /** * spike updates due to convolutions **/ + +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_buffers_to_zero(False) }} {{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix(" ") }} {# prevent printing origin #} {% filter indent(4) %} {%- include "directives_cpp/ApplySpikesFromBuffers.jinja2" %} {%- endfilter %} {{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix("") }} +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_buffers_to_zero(True) }} /** * Begin NESTML generated code for the onCondition block(s) diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index 00a6d0b4a..fb3abf862 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -27,7 +27,7 @@ import odetoolbox from pynestml.codegeneration.printers.ast_printer import ASTPrinter -from pynestml.codegeneration.printers.cpp_variable_printer import CppVariablePrinter +from pynestml.codegeneration.printers.nestml_simple_expression_printer_units_as_factors import NESTMLSimpleExpressionPrinterUnitsAsFactors from pynestml.frontend.frontend_configuration import FrontendConfiguration from pynestml.generated.PyNestMLLexer import PyNestMLLexer from pynestml.meta_model.ast_assignment import ASTAssignment @@ -2199,12 +2199,16 @@ def remove_ode_definitions_from_equations_block(cls, model: ASTModel) -> None: def get_delta_factors_(cls, neuron: ASTModel, equations_block: ASTEquationsBlock) -> dict: r""" For every occurrence of a convolution of the form `x^(n) = a * convolve(kernel, inport) + ...` where `kernel` is a delta function, add the element `(x^(n), inport) --> a` to the set. + + For every occurrence of a mention of the input port `x^(n) = a * inport`, add the element `(x^(n), inport) --> a` to the set. """ delta_factors = {} for ode_eq in equations_block.get_ode_equations(): var = ode_eq.get_lhs() expr = ode_eq.get_rhs() + + # add the convolutions conv_calls = ASTUtils.get_convolve_function_calls(expr) for conv_call in conv_calls: assert len( @@ -2223,7 +2227,203 @@ def get_delta_factors_(cls, neuron: ASTModel, equations_block: ASTEquationsBlock factor_str = " + ".join(factor_str) delta_factors[(var, inport)] = factor_str.replace("__DOT__", ".") + # add the mentions on an input port + input_port_terms = ASTUtils.get_spiking_input_port_terms(neuron, expr) + for input_port_term in input_port_terms: + expr_str = str(expr) + sympy_expr = sympy.parsing.sympy_parser.parse_expr(expr_str.replace(".", "__DOT__"), global_dict=odetoolbox.Shape._sympy_globals) + sympy_expr = sympy.expand(sympy_expr) + sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(input_port_term.name.replace(".", "__DOT__"), global_dict=odetoolbox.Shape._sympy_globals) + factor_str = [] + for term in sympy.Add.make_args(sympy_expr): + if str(term).split("__DOT__")[0] == str(sympy_conv_expr): + # factor_str.append(str(term.replace(sympy_conv_expr, 1))) + factor_str.append("1") + + factor_str = " + ".join(factor_str) + factor_str = factor_str.replace("__DOT__", ".") + + if (var, input_port_term) in delta_factors.keys(): + delta_factors[(var, input_port_term)] += " + " + factor_str + else: + delta_factors[(var, input_port_term)] = factor_str + + return delta_factors + + + + + + + + + + + + + + + + + + + + + + @classmethod + def get_delta_factors_from_convolutions(cls, model: ASTModel) -> dict: + r""" + For every occurrence of a convolution of the form `x^(n) = a * convolve(kernel, inport) + ...` where `kernel` is a delta function, add the element `(x^(n), inport) --> a` to the set. + """ + delta_factors = {} + + for equations_block in model.get_equations_blocks(): + for ode_eq in equations_block.get_ode_equations(): + var = ode_eq.get_lhs() + expr = ode_eq.get_rhs() + conv_calls = ASTUtils.get_convolve_function_calls(expr) + for conv_call in conv_calls: + assert len(conv_call.args) == 2, "convolve() function call should have precisely two arguments: kernel and spike input port" + kernel = conv_call.args[0] + if ASTUtils.is_delta_kernel(model.get_kernel_by_name(kernel.get_variable().get_name())): + inport = conv_call.args[1].get_variable() + factor_str = ASTUtils.get_factor_str_from_expr_and_inport(expr, str(conv_call)) + assert factor_str + delta_factors[(var, inport)] = factor_str + + return delta_factors + + @classmethod + def get_factor_str_from_expr_and_inport(cls, expr, sub_expr): + from sympy.physics.units import Quantity, Unit, siemens, milli, micro, nano, pico, femto, kilo, mega, volt, ampere, ohm, farad, second, meter, hertz + from sympy import sympify + + units = { + 'V': volt, # Volt + 'mV': milli * volt, # Millivolt (10^-3 V) + 'uV': micro * volt, # Microvolt (10^-6 V) + 'nV': nano * volt, # Nanovolt (10^-9 V) + + 'S': siemens, # Ampere + 'nS': nano * siemens, # Ampere + + 'A': ampere, # Ampere + 'mA': milli * ampere, # Milliampere (10^-3 A) + 'uA': micro * ampere, # Microampere (10^-6 A) + 'nA': nano * ampere, # Nanoampere (10^-9 A) + + 'Ohm': ohm, # Ohm + 'kOhm': kilo * ohm, # Kiloohm (10^3 Ohm) + 'MOhm': mega * ohm, # Megaohm (10^6 Ohm) + + 'F': farad, # Farad + 'uF': micro * farad, # Microfarad (10^-6 F) + 'nF': nano * farad, # Nanofarad (10^-9 F) + 'pF': pico * farad, # Picofarad (10^-12 F) + 'fF': femto * farad, # Femtofarad (10^-15 F) + + 's': second, # Second + 'ms': milli * second, # Millisecond (10^-3 s) + 'us': micro * second, # Microsecond (10^-6 s) + 'ns': nano * second, # Nanosecond (10^-9 s) + + 'Hz': hertz, # Hertz (1/s) + 'kHz': kilo * hertz, # Kilohertz (10^3 Hz) + 'MHz': mega * hertz, # Megahertz (10^6 Hz) + + 'm': meter, # Meter + 'mm': milli * meter, # Millimeter (10^-3 m) + 'um': micro * meter, # Micrometer (10^-6 m) + 'nm': nano * meter, # Nanometer (10^-9 m) + } + + from pynestml.codegeneration.printers.constant_printer import ConstantPrinter + from pynestml.codegeneration.printers.cpp_variable_printer import CppVariablePrinter + from pynestml.codegeneration.printers.nest_cpp_function_call_printer import NESTCppFunctionCallPrinter + from pynestml.codegeneration.printers.nestml_printer import NESTMLPrinter + from pynestml.codegeneration.printers.ode_toolbox_expression_printer import ODEToolboxExpressionPrinter + from pynestml.codegeneration.printers.ode_toolbox_variable_printer import ODEToolboxVariablePrinter + + printer = NESTMLPrinter() + printer._expression_printer = ODEToolboxExpressionPrinter(simple_expression_printer=None) + printer._constant_printer = ConstantPrinter() + printer._function_call_printer = NESTCppFunctionCallPrinter(expression_printer=printer._expression_printer) + printer._variable_printer = ODEToolboxVariablePrinter(expression_printer=printer._expression_printer) + printer._simple_expression_printer = NESTMLSimpleExpressionPrinterUnitsAsFactors(variable_printer=printer._variable_printer, function_call_printer=printer._function_call_printer, constant_printer=printer._constant_printer) + printer._expression_printer._simple_expression_printer = printer._simple_expression_printer + + expr_str = printer.print(expr) + + print("In get_delta_factors_from_input_port_references(): parsing " + expr_str) + sympy_expr = sympify(expr_str, locals=units) + sympy_expr = sympy.expand(sympy_expr) + sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(sub_expr) + factor_str = [] + for term in sympy.Add.make_args(sympy_expr): + if term.find(sympy_conv_expr): + _expr = str(term.replace(sympy_conv_expr, 1)) + factor_str.append(_expr) + + factor_str = " + ".join(factor_str) + + return factor_str + + @classmethod + def get_delta_factors_from_input_port_references(cls, model: ASTModel) -> dict: + r""" + For every occurrence of a convolution of the form ``x^(n) = a * inport + ...``, add the element `(x^(n), inport) --> a` to the set. + """ + delta_factors = {} + print("-----") + print("get_delta_factors_from_input_port_references") + + spike_inports = model.get_spike_input_ports() + for equations_block in model.get_equations_blocks(): + for ode_eq in equations_block.get_ode_equations(): + var = ode_eq.get_lhs() + expr = ode_eq.get_rhs() + + for inport_sym in spike_inports: + inport_ = ASTUtils.get_input_port_by_name(model.get_input_blocks(), inport_sym.name) + + inport_var = ASTNodeFactory.create_ast_variable(inport_sym.name) + inport_var.update_scope(equations_block.get_scope()) + + factor_str = ASTUtils.get_factor_str_from_expr_and_inport(expr, inport_var.name) + + if factor_str: + delta_factors[(var, inport_var)] = factor_str + + for param in inport_.get_parameters(): + inport_var = inport_var.clone() + + inport_var.attribute = param.get_name() + + factor_str = ASTUtils.get_factor_str_from_expr_and_inport(expr, inport_var.name + "__DOT__" + inport_var.attribute) + + if factor_str: + delta_factors[(var, inport_var)] = factor_str + + for k, v in delta_factors.items(): + print("var = " + str(k[0]) + ", inport = " + str(k[1]) + ", expr = " + str(v)) + print("-----") + + return delta_factors + + + + + + + + + + + + + + @classmethod def remove_kernel_definitions_from_equations_block(cls, model: ASTModel) -> ASTDeclaration: @@ -2610,3 +2810,19 @@ def port_name_printer(cls, variable: ASTVariable) -> str: @classmethod def is_parameter(cls, variable) -> str: return isinstance(variable, ASTParameter) + + def get_spiking_input_port_terms(model: ASTModel, expr): + r"""Collect all terms that refer to a spiking input inside ``expr``""" + + spiking_input_port_terms = [] + # print([str(s) for s in expr.get_variables()]) + spike_inports = model.get_spike_input_ports() + spike_inport_names = [inport.name for inport in spike_inports] + + for var in expr.get_variables(): + if str(var).split(".")[0] in spike_inport_names: + spiking_input_port_terms.append(var) + + return spiking_input_port_terms + + From 9562927a526b208efcb6bfe24004d33e76db85ce Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sat, 22 Mar 2025 13:20:10 +0100 Subject: [PATCH 44/68] add attributes to spiking input ports --- models/neurons/iaf_psc_delta_neuron.nestml | 2 +- models/neurons/iaf_psc_exp_neuron.nestml | 32 +++--- .../codegeneration/nest_code_generator.py | 5 +- .../nest_compartmental_code_generator.py | 3 +- .../printers/nest_variable_printer.py | 6 +- .../point_neuron/common/NeuronClass.jinja2 | 10 +- pynestml/utils/ast_utils.py | 99 ++----------------- 7 files changed, 39 insertions(+), 118 deletions(-) diff --git a/models/neurons/iaf_psc_delta_neuron.nestml b/models/neurons/iaf_psc_delta_neuron.nestml index 51f87ed85..d87f5ab6f 100644 --- a/models/neurons/iaf_psc_delta_neuron.nestml +++ b/models/neurons/iaf_psc_delta_neuron.nestml @@ -46,7 +46,7 @@ model iaf_psc_delta_neuron: refr_t ms = 0 ms # Refractory period timer equations: - V_m' = -(V_m - E_L) / tau_m + (I_e + I_stim) / C_m + spike_in_port.psp * (1E-6 s / ms) # XXX: TODO: This should simply read ``... + spike_in_port.psp`` (see https://github.com/nest/nestml/issues/984) + V_m' = -(V_m - E_L) / tau_m + (I_e + I_stim) / C_m + spike_in_port.psp refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) parameters: diff --git a/models/neurons/iaf_psc_exp_neuron.nestml b/models/neurons/iaf_psc_exp_neuron.nestml index 4539f6055..3decc1b83 100644 --- a/models/neurons/iaf_psc_exp_neuron.nestml +++ b/models/neurons/iaf_psc_exp_neuron.nestml @@ -1,37 +1,37 @@ # iaf_psc_exp - Leaky integrate-and-fire neuron model # ################################################### -# +# # Description # +++++++++++ -# +# # iaf_psc_exp is an implementation of a leaky integrate-and-fire model # with exponentially decaying synaptic currents according to [1]_. # Thus, postsynaptic currents have an infinitely short rise time. -# +# # The threshold crossing is followed by an absolute refractory period # during which the membrane potential is clamped to the resting potential # and spiking is prohibited. -# +# # The general framework for the consistent formulation of systems with # neuron like dynamics interacting by point events is described in # [1]_. A flow chart can be found in [2]_. -# +# # Critical tests for the formulation of the neuron model are the # comparisons of simulation results for different computation step # sizes. -# +# # .. note:: -# If tau_m is very close to tau_syn_exc or tau_syn_inh, numerical problems -# may arise due to singularities in the propagator matrics. If this is -# the case, replace equal-valued parameters by a single parameter. -# +# If tau_m is very close to tau_syn_exc or tau_syn_inh, numerical problems +# may arise due to singularities in the propagator matrics. If this is +# the case, replace equal-valued parameters by a single parameter. +# # For details, please see ``IAF_neurons_singularity.ipynb`` in # the NEST source code (``docs/model_details``). -# -# +# +# # References # ++++++++++ -# +# # .. [1] Rotter S, Diesmann M (1999). Exact simulation of # time-invariant linear systems with applications to neuronal # modeling. Biologial Cybernetics 81:381-402. @@ -44,11 +44,11 @@ # subthreshold integration with continuous spike times in discrete time # neural network simulations. Neural Computation, in press # DOI: https://doi.org/10.1162/neco.2007.19.1.47 -# -# +# +# # See also # ++++++++ -# +# # iaf_psc_delta, iaf_psc_alpha, iaf_cond_exp # # diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index f0f58bfb8..d0f94c5ed 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -358,8 +358,6 @@ def analyse_neuron(self, neuron: ASTModel) -> Tuple[Dict[str, ASTAssignment], Di kernel_buffers = ASTUtils.generate_kernel_buffers(neuron, equations_block) InlineExpressionExpansionTransformer().transform(neuron) - #delta_factors = ASTUtils.get_delta_factors_(neuron, equations_block) - print(neuron) delta_factors = ASTUtils.get_delta_factors_from_input_port_references(neuron) delta_factors |= ASTUtils.get_delta_factors_from_convolutions(neuron) ASTUtils.replace_convolve_calls_with_buffers_(neuron, equations_block) @@ -442,7 +440,8 @@ def analyse_synapse(self, synapse: ASTModel) -> Dict[str, ASTAssignment]: kernel_buffers = ASTUtils.generate_kernel_buffers(synapse, equations_block) InlineExpressionExpansionTransformer().transform(synapse) - delta_factors = ASTUtils.get_delta_factors_(synapse, equations_block) + delta_factors = ASTUtils.get_delta_factors_from_input_port_references(synapse) + delta_factors |= ASTUtils.get_delta_factors_from_convolutions(synapse) ASTUtils.replace_convolve_calls_with_buffers_(synapse, equations_block) analytic_solver, numeric_solver = self.ode_toolbox_analysis(synapse, kernel_buffers) diff --git a/pynestml/codegeneration/nest_compartmental_code_generator.py b/pynestml/codegeneration/nest_compartmental_code_generator.py index 445569744..e1752efbe 100644 --- a/pynestml/codegeneration/nest_compartmental_code_generator.py +++ b/pynestml/codegeneration/nest_compartmental_code_generator.py @@ -433,7 +433,8 @@ def analyse_neuron(self, neuron: ASTModel) -> List[ASTAssignment]: # if they have delta kernels, use sympy to expand the expression, then # find the convolve calls and replace them with constant value 1 # then return every subexpression that had that convolve() replaced - delta_factors = ASTUtils.get_delta_factors_(neuron, equations_block) + delta_factors = ASTUtils.get_delta_factors_from_input_port_references(neuron) + delta_factors |= ASTUtils.get_delta_factors_from_convolutions(neuron) # goes through all convolve() inside equations block # extracts what kernel is paired with what spike buffer diff --git a/pynestml/codegeneration/printers/nest_variable_printer.py b/pynestml/codegeneration/printers/nest_variable_printer.py index a88e2b676..645d606ed 100644 --- a/pynestml/codegeneration/printers/nest_variable_printer.py +++ b/pynestml/codegeneration/printers/nest_variable_printer.py @@ -107,6 +107,9 @@ def print_variable(self, variable: ASTVariable) -> str: symbol = variable.get_scope().resolve_to_symbol(variable.get_complete_name().replace("__DOT__", "."), SymbolKind.VARIABLE) + if symbol is None: + symbol = variable.get_scope().resolve_to_symbol(variable.get_complete_name(), SymbolKind.VARIABLE) + if symbol is None: # test if variable name can be resolved to a type if PredefinedUnits.is_unit(variable.get_complete_name()): @@ -138,7 +141,6 @@ def print_variable(self, variable: ASTVariable) -> str: s += self._print_buffer_value(variable) if not units_conversion_factor == 1: s += ")" - import pdb;pdb.set_trace() return s if symbol.is_inline_expression: @@ -173,7 +175,7 @@ def __print_buffer_value(self, variable: ASTVariable) -> str: variable_symbol = variable.get_scope().resolve_to_symbol(variable.get_complete_name().replace("__DOT__", "."), SymbolKind.VARIABLE) if variable_symbol.is_spike_input_port(): if self.buffers_are_zero: - return "0.0" # XXX this should be spun off to a NESTVariablePrinterWithFactorsAsZeros + return "0.0" # XXX this should be spun off to a NESTVariablePrinterWithFactorsAsZeros var_name = variable_symbol.get_symbol_name().upper() if variable.has_vector_parameter(): diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index aac8525e3..8971cf694 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -1398,10 +1398,7 @@ void const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function auto get_t = [origin, lag](){ return nest::Time( nest::Time::step( origin.get_steps() + lag + 1) ).get_ms(); }; - /** - * Begin NESTML generated code for the onReceive() block statements - **/ - +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_buffers_to_zero(False) }} {%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} {#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} /** @@ -1422,13 +1419,14 @@ void {%- endfor %} {%- endif %} const double __spike_input_{{ inputPort.name }} = B_.spike_input_{{ inputPort.name }}_grid_sum_; - +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_buffers_to_zero(True) }} /** * Begin NESTML generated code for the onReceive() block statements **/ {{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix(" ") }} {# prevent printing origin #} + {% filter indent(4, True) -%} {%- include "directives_cpp/StmtsBody.jinja2" %} {%- endfilter %} @@ -1482,11 +1480,13 @@ void * Begin NESTML generated code for the onReceive() block statements **/ +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_buffers_to_zero(False) }} {{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix(" ") }} {# prevent printing origin #} {% filter indent(4, True) -%} {%- include "directives_cpp/StmtsBody.jinja2" %} {%- endfilter %} {{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix("") }} +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_buffers_to_zero(True) }} /** * Advance the iterators diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index 78ddbc3d6..03599f2db 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -27,6 +27,7 @@ import odetoolbox from pynestml.codegeneration.printers.ast_printer import ASTPrinter +from pynestml.codegeneration.printers.cpp_variable_printer import CppVariablePrinter from pynestml.codegeneration.printers.nestml_simple_expression_printer_units_as_factors import NESTMLSimpleExpressionPrinterUnitsAsFactors from pynestml.frontend.frontend_configuration import FrontendConfiguration from pynestml.generated.PyNestMLLexer import PyNestMLLexer @@ -2225,82 +2226,6 @@ def remove_ode_definitions_from_equations_block(cls, model: ASTModel) -> None: for decl in decl_to_remove: equations_block.get_declarations().remove(decl) - - @classmethod - def get_delta_factors_(cls, neuron: ASTModel, equations_block: ASTEquationsBlock) -> dict: - r""" - For every occurrence of a convolution of the form `x^(n) = a * convolve(kernel, inport) + ...` where `kernel` is a delta function, add the element `(x^(n), inport) --> a` to the set. - - For every occurrence of a mention of the input port `x^(n) = a * inport`, add the element `(x^(n), inport) --> a` to the set. - """ - delta_factors = {} - - for ode_eq in equations_block.get_ode_equations(): - var = ode_eq.get_lhs() - expr = ode_eq.get_rhs() - - # add the convolutions - conv_calls = ASTUtils.get_convolve_function_calls(expr) - for conv_call in conv_calls: - assert len( - conv_call.args) == 2, "convolve() function call should have precisely two arguments: kernel and spike input port" - kernel = conv_call.args[0] - if cls.is_delta_kernel(neuron.get_kernel_by_name(kernel.get_variable().get_name())): - inport = conv_call.args[1].get_variable() - expr_str = str(expr) - sympy_expr = sympy.parsing.sympy_parser.parse_expr(expr_str.replace(".", "__DOT__"), global_dict=odetoolbox.Shape._sympy_globals) - sympy_expr = sympy.expand(sympy_expr) - sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(str(conv_call).replace(".", "__DOT__"), global_dict=odetoolbox.Shape._sympy_globals) - factor_str = [] - for term in sympy.Add.make_args(sympy_expr): - if term.find(sympy_conv_expr): - factor_str.append(str(term.replace(sympy_conv_expr, 1))) - factor_str = " + ".join(factor_str) - delta_factors[(var, inport)] = factor_str.replace("__DOT__", ".") - - # add the mentions on an input port - input_port_terms = ASTUtils.get_spiking_input_port_terms(neuron, expr) - for input_port_term in input_port_terms: - expr_str = str(expr) - sympy_expr = sympy.parsing.sympy_parser.parse_expr(expr_str.replace(".", "__DOT__"), global_dict=odetoolbox.Shape._sympy_globals) - sympy_expr = sympy.expand(sympy_expr) - sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(input_port_term.name.replace(".", "__DOT__"), global_dict=odetoolbox.Shape._sympy_globals) - factor_str = [] - for term in sympy.Add.make_args(sympy_expr): - if str(term).split("__DOT__")[0] == str(sympy_conv_expr): - # factor_str.append(str(term.replace(sympy_conv_expr, 1))) - factor_str.append("1") - - factor_str = " + ".join(factor_str) - factor_str = factor_str.replace("__DOT__", ".") - - if (var, input_port_term) in delta_factors.keys(): - delta_factors[(var, input_port_term)] += " + " + factor_str - else: - delta_factors[(var, input_port_term)] = factor_str - - - return delta_factors - - - - - - - - - - - - - - - - - - - - @classmethod def get_delta_factors_from_convolutions(cls, model: ASTModel) -> dict: @@ -2326,7 +2251,7 @@ def get_delta_factors_from_convolutions(cls, model: ASTModel) -> dict: return delta_factors @classmethod - def get_factor_str_from_expr_and_inport(cls, expr, sub_expr): + def get_factor_str_from_expr_and_inport(cls, expr, sub_expr, skip_if_in_convolve_call: bool = False): from sympy.physics.units import Quantity, Unit, siemens, milli, micro, nano, pico, femto, kilo, mega, volt, ampere, ohm, farad, second, meter, hertz from sympy import sympify @@ -2370,8 +2295,7 @@ def get_factor_str_from_expr_and_inport(cls, expr, sub_expr): } from pynestml.codegeneration.printers.constant_printer import ConstantPrinter - from pynestml.codegeneration.printers.cpp_variable_printer import CppVariablePrinter - from pynestml.codegeneration.printers.nest_cpp_function_call_printer import NESTCppFunctionCallPrinter + from pynestml.codegeneration.printers.nestml_function_call_printer import NESTMLFunctionCallPrinter from pynestml.codegeneration.printers.nestml_printer import NESTMLPrinter from pynestml.codegeneration.printers.ode_toolbox_expression_printer import ODEToolboxExpressionPrinter from pynestml.codegeneration.printers.ode_toolbox_variable_printer import ODEToolboxVariablePrinter @@ -2379,22 +2303,21 @@ def get_factor_str_from_expr_and_inport(cls, expr, sub_expr): printer = NESTMLPrinter() printer._expression_printer = ODEToolboxExpressionPrinter(simple_expression_printer=None) printer._constant_printer = ConstantPrinter() - printer._function_call_printer = NESTCppFunctionCallPrinter(expression_printer=printer._expression_printer) + printer._function_call_printer = NESTMLFunctionCallPrinter(expression_printer=printer._expression_printer) printer._variable_printer = ODEToolboxVariablePrinter(expression_printer=printer._expression_printer) printer._simple_expression_printer = NESTMLSimpleExpressionPrinterUnitsAsFactors(variable_printer=printer._variable_printer, function_call_printer=printer._function_call_printer, constant_printer=printer._constant_printer) printer._expression_printer._simple_expression_printer = printer._simple_expression_printer expr_str = printer.print(expr) - print("In get_delta_factors_from_input_port_references(): parsing " + expr_str) sympy_expr = sympify(expr_str, locals=units) sympy_expr = sympy.expand(sympy_expr) sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(sub_expr) factor_str = [] for term in sympy.Add.make_args(sympy_expr): - if term.find(sympy_conv_expr): - _expr = str(term.replace(sympy_conv_expr, 1)) - factor_str.append(_expr) + coeff = term.coeff(sympy_conv_expr) + if coeff: + factor_str.append(str(coeff)) factor_str = " + ".join(factor_str) @@ -2406,8 +2329,6 @@ def get_delta_factors_from_input_port_references(cls, model: ASTModel) -> dict: For every occurrence of a convolution of the form ``x^(n) = a * inport + ...``, add the element `(x^(n), inport) --> a` to the set. """ delta_factors = {} - print("-----") - print("get_delta_factors_from_input_port_references") spike_inports = model.get_spike_input_ports() for equations_block in model.get_equations_blocks(): @@ -2421,7 +2342,7 @@ def get_delta_factors_from_input_port_references(cls, model: ASTModel) -> dict: inport_var = ASTNodeFactory.create_ast_variable(inport_sym.name) inport_var.update_scope(equations_block.get_scope()) - factor_str = ASTUtils.get_factor_str_from_expr_and_inport(expr, inport_var.name) + factor_str = ASTUtils.get_factor_str_from_expr_and_inport(expr, inport_var.name, skip_if_in_convolve_call=True) if factor_str: delta_factors[(var, inport_var)] = factor_str @@ -2431,7 +2352,7 @@ def get_delta_factors_from_input_port_references(cls, model: ASTModel) -> dict: inport_var.attribute = param.get_name() - factor_str = ASTUtils.get_factor_str_from_expr_and_inport(expr, inport_var.name + "__DOT__" + inport_var.attribute) + factor_str = ASTUtils.get_factor_str_from_expr_and_inport(expr, inport_var.name + "__DOT__" + inport_var.attribute, skip_if_in_convolve_call=True) if factor_str: delta_factors[(var, inport_var)] = factor_str @@ -2855,5 +2776,3 @@ def get_spiking_input_port_terms(model: ASTModel, expr): spiking_input_port_terms.append(var) return spiking_input_port_terms - - From 4b7c777b0e8a75cceca293c9e1718b41b168c51a Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 24 Mar 2025 09:24:57 +0100 Subject: [PATCH 45/68] add attributes to spiking input ports --- pynestml/utils/ast_utils.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index 03599f2db..c55b93fe3 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -2357,26 +2357,8 @@ def get_delta_factors_from_input_port_references(cls, model: ASTModel) -> dict: if factor_str: delta_factors[(var, inport_var)] = factor_str - for k, v in delta_factors.items(): - print("var = " + str(k[0]) + ", inport = " + str(k[1]) + ", expr = " + str(v)) - print("-----") - return delta_factors - - - - - - - - - - - - - - @classmethod def remove_kernel_definitions_from_equations_block(cls, model: ASTModel) -> ASTDeclaration: r""" @@ -2767,7 +2749,6 @@ def get_spiking_input_port_terms(model: ASTModel, expr): r"""Collect all terms that refer to a spiking input inside ``expr``""" spiking_input_port_terms = [] - # print([str(s) for s in expr.get_variables()]) spike_inports = model.get_spike_input_ports() spike_inport_names = [inport.name for inport in spike_inports] From ad55c88f8df956cbdac8f078e8dedef01992dfc5 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 24 Mar 2025 09:41:17 +0100 Subject: [PATCH 46/68] add attributes to spiking input ports --- models/neurons/iaf_psc_exp_dend_neuron.nestml | 73 +++++++++---------- .../ConvolveSpikingNoAttributes.nestml | 49 ++++++------- ...rt_on_equation_rhs_outside_convolve.nestml | 37 ++++++---- ...t_on_equation_rhs_outside_convolve2.nestml | 37 ++++++---- tests/valid/CoCoInputPortsLegal.nestml | 61 ++++++++-------- 5 files changed, 138 insertions(+), 119 deletions(-) diff --git a/models/neurons/iaf_psc_exp_dend_neuron.nestml b/models/neurons/iaf_psc_exp_dend_neuron.nestml index 0751bb414..3f7bd5384 100644 --- a/models/neurons/iaf_psc_exp_dend_neuron.nestml +++ b/models/neurons/iaf_psc_exp_dend_neuron.nestml @@ -1,40 +1,39 @@ -""" -iaf_psc_exp_dend - Leaky integrate-and-fire neuron model with exponential PSCs -######################################################################### - -Description -+++++++++++ - -iaf_psc_exp is an implementation of a leaky integrate-and-fire model -with exponential-kernel postsynaptic currents (PSCs) according to [1]_. -Thus, postsynaptic currents have an infinitely short rise time. - -The threshold crossing is followed by an absolute refractory period (t_ref) -during which the membrane potential is clamped to the resting potential -and spiking is prohibited. - -.. note:: - If tau_m is very close to tau_syn_ex or tau_syn_in, numerical problems - may arise due to singularities in the propagator matrics. If this is - the case, replace equal-valued parameters by a single parameter. - - For details, please see ``IAF_neurons_singularity.ipynb`` in - the NEST source code (``docs/model_details``). - - -References -++++++++++ - -.. [1] Tsodyks M, Uziel A, Markram H (2000). Synchrony generation in recurrent - networks with frequency-dependent synapses. The Journal of Neuroscience, - 20,RC50:1-5. URL: https://infoscience.epfl.ch/record/183402 - - -See also -++++++++ - -iaf_cond_exp -""" +# iaf_psc_exp_dend - Leaky integrate-and-fire neuron model with exponential PSCs +# ######################################################################### +# +# Description +# +++++++++++ +# +# iaf_psc_exp is an implementation of a leaky integrate-and-fire model +# with exponential-kernel postsynaptic currents (PSCs) according to [1]_. +# Thus, postsynaptic currents have an infinitely short rise time. +# +# The threshold crossing is followed by an absolute refractory period (t_ref) +# during which the membrane potential is clamped to the resting potential +# and spiking is prohibited. +# +# .. note:: +# If tau_m is very close to tau_syn_ex or tau_syn_in, numerical problems +# may arise due to singularities in the propagator matrics. If this is +# the case, replace equal-valued parameters by a single parameter. +# +# For details, please see ``IAF_neurons_singularity.ipynb`` in +# the NEST source code (``docs/model_details``). +# +# +# References +# ++++++++++ +# +# .. [1] Tsodyks M, Uziel A, Markram H (2000). Synchrony generation in recurrent +# networks with frequency-dependent synapses. The Journal of Neuroscience, +# 20,RC50:1-5. URL: https://infoscience.epfl.ch/record/183402 +# +# +# See also +# ++++++++ +# +# iaf_cond_exp +# model iaf_psc_exp_dend_neuron: state: diff --git a/tests/nest_tests/resources/ConvolveSpikingNoAttributes.nestml b/tests/nest_tests/resources/ConvolveSpikingNoAttributes.nestml index 5add886a8..78f3b0bc7 100644 --- a/tests/nest_tests/resources/ConvolveSpikingNoAttributes.nestml +++ b/tests/nest_tests/resources/ConvolveSpikingNoAttributes.nestml @@ -1,28 +1,27 @@ -""" -ConvolveSpikingNoAttributes - Test convolution with spiking input ports without attributes -########################################################################################## - - -Copyright statement -+++++++++++++++++++ - -This file is part of NEST. - -Copyright (C) 2004 The NEST Initiative - -NEST is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation, either version 2 of the License, or -(at your option) any later version. - -NEST is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with NEST. If not, see . -""" +# ConvolveSpikingNoAttributes - Test convolution with spiking input ports without attributes +# ########################################################################################## +# +# +# Copyright statement +# +++++++++++++++++++ +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . +# model convolve_spiking_no_attributes_neuron: state: x real = 0. diff --git a/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve.nestml b/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve.nestml index 867e04d1c..d9638e5f9 100644 --- a/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve.nestml +++ b/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve.nestml @@ -1,16 +1,27 @@ -""" -spiking_input_port_on_equation_rhs_outside_convolve -################################################### - -Description -+++++++++++ - -.... - -copyright - - -""" +# spiking_input_port_on_equation_rhs_outside_convolve +# ########################################################################################## +# +# +# Copyright statement +# +++++++++++++++++++ +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . +# model spiking_input_port_on_equation_rhs_outside_convolve_neuron: state: x real = 0. diff --git a/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve2.nestml b/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve2.nestml index 807c276f7..4d40bb6c8 100644 --- a/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve2.nestml +++ b/tests/resources/spiking_input_port_on_equation_rhs_outside_convolve2.nestml @@ -1,16 +1,27 @@ -""" -spiking_input_port_on_equation_rhs_outside_convolve -################################################### - -Description -+++++++++++ - -.... - -copyright - - -""" +# spiking_input_port_on_equation_rhs_outside_convolve +# ################################################### +# +# +# Copyright statement +# +++++++++++++++++++ +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . +# model spiking_input_port_on_equation_rhs_outside_convolve2_neuron: state: x real = 0. diff --git a/tests/valid/CoCoInputPortsLegal.nestml b/tests/valid/CoCoInputPortsLegal.nestml index 1e735fe5f..15d964e37 100644 --- a/tests/valid/CoCoInputPortsLegal.nestml +++ b/tests/valid/CoCoInputPortsLegal.nestml @@ -1,34 +1,33 @@ -""" -CoCoInputPortsIllegal.nestml -############################ - - -Description -+++++++++++ - -This test is used to test the declaration of both vectorized and non-vectorized input ports. - - -Copyright statement -+++++++++++++++++++ - -This file is part of NEST. - -Copyright (C) 2004 The NEST Initiative - -NEST is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation, either version 2 of the License, or -(at your option) any later version. - -NEST is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with NEST. If not, see . -""" +# CoCoInputPortsIllegal.nestml +# ############################ +# +# +# Description +# +++++++++++ +# +# This test is used to test the declaration of both vectorized and non-vectorized input ports. +# +# +# Copyright statement +# +++++++++++++++++++ +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . +# model input_ports_legal_neuron: state: bar pA = 0 pA From ffdfec5c28dd9b5895878f58526b8b0011b50d03 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 24 Mar 2025 10:34:44 +0100 Subject: [PATCH 47/68] add attributes to spiking input ports --- ...ple_expression_printer_units_as_factors.py | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 pynestml/codegeneration/printers/nestml_simple_expression_printer_units_as_factors.py diff --git a/pynestml/codegeneration/printers/nestml_simple_expression_printer_units_as_factors.py b/pynestml/codegeneration/printers/nestml_simple_expression_printer_units_as_factors.py new file mode 100644 index 000000000..ce9710585 --- /dev/null +++ b/pynestml/codegeneration/printers/nestml_simple_expression_printer_units_as_factors.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# nestml_simple_expression_printer_units_as_factors.py +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . + +from pynestml.codegeneration.printers.nestml_simple_expression_printer import NESTMLSimpleExpressionPrinter +from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression + + +class NESTMLSimpleExpressionPrinterUnitsAsFactors(NESTMLSimpleExpressionPrinter): + r""" + Same as the NESTMLPrinter, except print unit literals with a multiplication operator between (for example "42 * ms" instead of "42 ms"). + """ + + def print_simple_expression(self, node: ASTSimpleExpression) -> str: + if node.is_numeric_literal(): + if node.variable is not None: + # numeric literal + physical unit + return str(node.numeric_literal) + " * " + self.print(node.variable) + + return str(node.numeric_literal) + + return super().print_simple_expression(node) From 72efd8faaa16ab8f6b769ff67ea0cc737dfbbb08 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 25 Mar 2025 12:38:56 +0100 Subject: [PATCH 48/68] add attributes to spiking input ports --- pynestml/codegeneration/nest_code_generator.py | 2 +- .../printers/nestml_function_call_printer.py | 2 +- .../printers/nestml_variable_printer.py | 6 +++--- .../printers/ode_toolbox_variable_printer.py | 8 +++++++- .../point_neuron/common/NeuronClass.jinja2 | 2 ++ pynestml/meta_model/ast_node_factory.py | 1 + pynestml/utils/ast_utils.py | 4 +++- .../iaf_psc_exp_multisynapse_vectors.nestml | 18 +++++++++--------- ...r_time_invariant_input_port_optimisation.py | 2 +- tests/test_nestml_printer.py | 2 +- 10 files changed, 29 insertions(+), 18 deletions(-) diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index d0f94c5ed..321e3f74f 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -115,7 +115,7 @@ class NESTCodeGenerator(CodeGenerator): - **continuous_state_buffering_method**: Which method to use for buffering state variables between neuron and synapse pairs. When a synapse has a "continuous" input port, connected to a postsynaptic neuron, either the value is obtained taking the synaptic (dendritic, that is, synapse-soma) delay into account, requiring a buffer to store the value at each timepoint (``continuous_state_buffering_method = "continuous_time_buffer"); or the value is obtained at the times of the somatic spikes of the postsynaptic neuron, ignoring the synaptic delay (``continuous_state_buffering_method == "post_spike_based"``). The former is more physically accurate but requires a large buffer and can require a long time to simulate. The latter ignores the dendritic delay but is much more computationally efficient. - **delay_variable**: A mapping identifying, for each synapse (the name of which is given as a key), the variable or parameter in the model that corresponds with the NEST ``Connection`` class delay property. - **weight_variable**: Like ``delay_variable``, but for synaptic weight. - - **linear_time_invariant_spiking_input_ports**: A list of spiking input ports which can be treated as linear and time-invariant; this implies that, for the given port(s), the weight of all spikes received within a timestep can be added together, improving memory consumption and runtime performance. Use with caution, for example, this is not compatible with using an input port as one processing inhibitory vs. excitatory spikes depending on the sign of the weight of the spike event. + - **linear_time_invariant_spiking_input_ports**: A list of spiking input ports which can be treated as linear and time-invariant; this implies that, for the given port(s), the weight of all spikes received within a timestep can be added together, improving memory consumption and runtime performance. Use with caution; for example, this is not compatible with using a single input port for, depending on the sign of the weight of the spike event, processing both inhibitory vs. excitatory spikes. - **redirect_build_output**: An optional boolean key for redirecting the build output. Setting the key to ``True``, two files will be created for redirecting the ``stdout`` and the ``stderr`. The ``target_path`` will be used as the default location for creating the two files. - **build_output_dir**: An optional string key representing the new path where the files corresponding to the output of the build phase will be created. This key requires that the ``redirect_build_output`` is set to ``True``. diff --git a/pynestml/codegeneration/printers/nestml_function_call_printer.py b/pynestml/codegeneration/printers/nestml_function_call_printer.py index 21efa320d..d0cce13a3 100644 --- a/pynestml/codegeneration/printers/nestml_function_call_printer.py +++ b/pynestml/codegeneration/printers/nestml_function_call_printer.py @@ -33,7 +33,7 @@ def print_function_call(self, node: ASTFunctionCall) -> str: for i in range(0, len(node.get_args())): ret += self._expression_printer.print(node.get_args()[i]) if i < len(node.get_args()) - 1: # in the case that it is not the last arg, print also a comma - ret += "," + ret += ", " ret += ")" diff --git a/pynestml/codegeneration/printers/nestml_variable_printer.py b/pynestml/codegeneration/printers/nestml_variable_printer.py index 04dfd611a..3ee6d5210 100644 --- a/pynestml/codegeneration/printers/nestml_variable_printer.py +++ b/pynestml/codegeneration/printers/nestml_variable_printer.py @@ -33,12 +33,12 @@ def print_variable(self, node: ASTVariable): ret = node.name - if node.get_attribute(): - ret += "." + node.get_attribute() - if node.get_vector_parameter(): ret += "[" + self._expression_printer.print(node.get_vector_parameter()) + "]" + if node.get_attribute(): + ret += "." + node.get_attribute() + for i in range(1, node.differential_order + 1): ret += "'" diff --git a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py index e62be0458..e134e7b4f 100644 --- a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py +++ b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py @@ -37,10 +37,16 @@ def print_variable(self, node: ASTVariable) -> str: :return: string representation """ s = node.get_name().replace("$", "__DOLLAR") + print("PRINTING VARIABLE " + s) + + if node.get_vector_parameter(): + s += "_VEC_IDX_" + self._expression_printer.print(node.get_vector_parameter()) + print("\tPRINTING VARIABLE " + s) if node.get_attribute(): s += "__DOT__" + node.get_attribute() + print("\tPRINTING VARIABLE " + s) s += "__d" * node.get_differential_order() - + print("\tPRINTING VARIABLE " + s) return s diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 8971cf694..589c6f991 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -1425,11 +1425,13 @@ void * Begin NESTML generated code for the onReceive() block statements **/ +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_buffers_to_zero(False) }} {{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix(" ") }} {# prevent printing origin #} {% filter indent(4, True) -%} {%- include "directives_cpp/StmtsBody.jinja2" %} {%- endfilter %} +{{ printer._expression_printer._simple_expression_printer._variable_printer.set_buffers_to_zero(True) }} {{ printer._expression_printer._simple_expression_printer._variable_printer.set_cpp_variable_suffix("") }} {%- else %} {#- generic input port: use lists of spike events for each buffer slot #} diff --git a/pynestml/meta_model/ast_node_factory.py b/pynestml/meta_model/ast_node_factory.py index 7a1557179..af7bf9d59 100644 --- a/pynestml/meta_model/ast_node_factory.py +++ b/pynestml/meta_model/ast_node_factory.py @@ -354,6 +354,7 @@ def create_ast_update_block(cls, block, source_position): @classmethod def create_ast_variable(cls, name: str, differential_order: int = 0, vector_parameter=None, is_homogeneous=False, attribute: Optional[str] = None, source_position: Optional[ASTSourceLocation] = None, scope: Optional[Scope] = None) -> ASTVariable: var = ASTVariable(name, differential_order, vector_parameter=vector_parameter, is_homogeneous=is_homogeneous, attribute=attribute, source_position=source_position) + if scope: var.scope = scope diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index c55b93fe3..45ceea62e 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -2312,7 +2312,7 @@ def get_factor_str_from_expr_and_inport(cls, expr, sub_expr, skip_if_in_convolve sympy_expr = sympify(expr_str, locals=units) sympy_expr = sympy.expand(sympy_expr) - sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(sub_expr) + sympy_conv_expr = sympy.parsing.sympy_parser.parse_expr(sub_expr.replace(".", "__DOT__")) factor_str = [] for term in sympy.Add.make_args(sympy_expr): coeff = term.coeff(sympy_conv_expr) @@ -2357,6 +2357,8 @@ def get_delta_factors_from_input_port_references(cls, model: ASTModel) -> dict: if factor_str: delta_factors[(var, inport_var)] = factor_str + # XXX: what about vectors????? + return delta_factors @classmethod diff --git a/tests/nest_tests/resources/iaf_psc_exp_multisynapse_vectors.nestml b/tests/nest_tests/resources/iaf_psc_exp_multisynapse_vectors.nestml index 4c25a0ae1..af96f9766 100644 --- a/tests/nest_tests/resources/iaf_psc_exp_multisynapse_vectors.nestml +++ b/tests/nest_tests/resources/iaf_psc_exp_multisynapse_vectors.nestml @@ -1,25 +1,25 @@ # iaf_psc_exp_multisynapse - Leaky integrate-and-fire neuron model with multiple ports # #################################################################################### -# +# # Description # +++++++++++ -# +# # Used in NESTML unit testing. -# +# # For more information about the model, see iaf_psc_exp in the ``models`` directory. -# +# # For more information about "multisynapse" models, please refer to the NESTML documentation. # model iaf_psc_exp_multisynapse_vectors_neuron: state: - V_m mV = E_L # membrane potential - refr_t ms = 0 ms # Refractory period timer + V_m mV = E_L # membrane potential + refr_t ms = 0 ms # Refractory period timer is_refractory boolean = false equations: - kernel I_kernel1 = exp(-1/tau_syn1*t) - kernel I_kernel2 = exp(-1/tau_syn2*t) - kernel I_kernel3 = -exp(-1/tau_syn3*t) + kernel I_kernel1 = exp(-t / tau_syn1) + kernel I_kernel2 = exp(-t / tau_syn2) + kernel I_kernel3 = -exp(-t / tau_syn3) inline I_syn pA = convolve(I_kernel1, spikes[0].weight) - convolve(I_kernel2, spikes[1].weight) + convolve(I_kernel3, spikes[2].weight) diff --git a/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py b/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py index 81f5df609..be65cc7e6 100644 --- a/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py +++ b/tests/nest_tests/test_linear_time_invariant_input_port_optimisation.py @@ -31,7 +31,7 @@ from pynestml.codegeneration.nest_tools import NESTTools from pynestml.frontend.pynestml_frontend import generate_nest_target -TestLinearTimeInvariantInputPortOptimisation_neuron_types = ["aeif_cond_exp", "iaf_psc_delta"] +TestLinearTimeInvariantInputPortOptimisation_neuron_types = ["iaf_psc_delta", "iaf_psc_exp"] @pytest.mark.skipif(NESTTools.detect_nest_version().startswith("v2"), diff --git a/tests/test_nestml_printer.py b/tests/test_nestml_printer.py index 4f3f4e3ed..b1b298a18 100644 --- a/tests/test_nestml_printer.py +++ b/tests/test_nestml_printer.py @@ -105,7 +105,7 @@ def test_function_without_comments(self): def test_function_call_with_comments(self): function_call = "# pre\n" \ - "min(1,2) # in\n" + "min(1, 2) # in\n" model = ModelParser.parse_stmts_body(function_call) model_printer = NESTMLPrinter() assert function_call == model_printer.print(model) From 174551cf5a740658fff4d6863dcefa1595e15ee8 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 25 Mar 2025 14:13:23 +0100 Subject: [PATCH 49/68] add attributes to spiking input ports --- tests/test_nestml_printer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_nestml_printer.py b/tests/test_nestml_printer.py index b1b298a18..b740484cd 100644 --- a/tests/test_nestml_printer.py +++ b/tests/test_nestml_printer.py @@ -111,7 +111,7 @@ def test_function_call_with_comments(self): assert function_call == model_printer.print(model) def test_function_call_without_comments(self): - function_call = "min(1,2)\n" + function_call = "min(1, 2)\n" model = ModelParser.parse_stmt(function_call) model_printer = NESTMLPrinter() assert function_call == model_printer.print(model) From ec7a2906c7d0077ec2fd908fcdcfc14f6f68ae82 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 25 Mar 2025 16:27:33 +0100 Subject: [PATCH 50/68] add attributes to spiking input ports --- models/neurons/izhikevich_neuron.nestml | 38 +++++++++---------- .../point_neuron/@NEURON_NAME@.py.jinja2 | 3 +- .../test_python_standalone_module.py.jinja2 | 7 +++- 3 files changed, 26 insertions(+), 22 deletions(-) diff --git a/models/neurons/izhikevich_neuron.nestml b/models/neurons/izhikevich_neuron.nestml index a003f257b..8bb08a4d1 100644 --- a/models/neurons/izhikevich_neuron.nestml +++ b/models/neurons/izhikevich_neuron.nestml @@ -1,38 +1,38 @@ # izhikevich - Izhikevich neuron model # #################################### -# +# # Description # +++++++++++ -# +# # Implementation of the simple spiking neuron model introduced by Izhikevich [1]_. The dynamics are given by: -# +# # .. math:: -# +# # dV_{m}/dt &= 0.04 V_{m}^2 + 5 V_{m} + 140 - U_{m} + I\\ # dU_{m}/dt &= a (b V_{m} - U_{m}) -# -# +# +# # .. math:: -# +# # &\text{if}\;\; V_{m} \geq V_{th}:\\ # &\;\;\;\; V_{m} \text{ is set to } c\\ # &\;\;\;\; U_{m} \text{ is incremented by } d\\ # & \, \\ # &V_{m} \text{ jumps on each spike arrival by the weight of the spike} -# +# # Incoming spikes cause an instantaneous jump in the membrane potential proportional to the strength of the synapse. -# +# # As published in [1]_, the numerics differs from the standard forward Euler technique in two ways: -# +# # 1) the new value of :math:`U_{m}` is calculated based on the new value of :math:`V_{m}`, rather than the previous value # 2) the variable :math:`V_{m}` is updated using a time step half the size of that used to update variable :math:`U_{m}`. -# +# # This model will instead be simulated using the numerical solver that is recommended by ODE-toolbox during code generation. -# -# +# +# # References # ++++++++++ -# +# # .. [1] Izhikevich, Simple Model of Spiking Neurons, IEEE Transactions on Neural Networks (2003) 14:1569-1572 # # @@ -42,8 +42,8 @@ model izhikevich_neuron: U_m real = b * V_m_init # Membrane potential recovery variable equations: - V_m' = ( 0.04 * V_m * V_m / mV + 5.0 * V_m + ( 140 - U_m ) * mV + ( (I_e + I_stim) * GOhm ) ) / ms - U_m' = a*(b*V_m-U_m * mV) / (mV*ms) + V_m' = (0.04 * V_m * V_m / mV + 5 * V_m + (140 - U_m) * mV + ((I_e + I_stim) * GOhm)) / ms + U_m' = a * (b * V_m - U_m * mV) / (mV * ms) parameters: a real = 0.02 # describes time scale of recovery variable @@ -58,15 +58,15 @@ model izhikevich_neuron: I_e pA = 0 pA input: - spikes <- spike(weight mV) + spike_in_port <- spike(weight mV) I_stim pA <- continuous output: spike - onReceive(spikes): + onReceive(spike_in_port): # Add synaptic contribution - V_m += spikes.weight + V_m += spike_in_port.weight # lower bound of membrane potential V_m = max(V_min, V_m) diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 index cb1e2cd77..72d72d4c4 100644 --- a/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 +++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/@NEURON_NAME@.py.jinja2 @@ -313,7 +313,8 @@ class Neuron_{{neuronName}}(Neuron): {% for blk in neuron.get_on_receive_blocks() %} if self.B_.spike_received_{{ utils.port_name_printer(blk.get_input_port_variable()) }}: # loop over all spikes received since last timestep - for i in range(len(self.B_.{{ utils.port_name_printer(blk.get_input_port_variable()) }})): + n_spikes_received: int = len(self.B_.{{ utils.port_name_printer(blk.get_input_port_variable()) }}) + for i in range(n_spikes_received): self.on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}() {%- endfor %} diff --git a/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 b/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 index 2454c6a1f..a11e6ed5a 100644 --- a/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 +++ b/pynestml/codegeneration/resources_python_standalone/point_neuron/test_python_standalone_module.py.jinja2 @@ -42,12 +42,15 @@ class TestSimulator: sg_exc = simulator.add_neuron(SpikeGenerator(interval=10.)) sg_inh = simulator.add_neuron(SpikeGenerator(interval=50.)) {% for neuron in neurons %} - neuron = simulator.add_neuron(Neuron_{{neuron.get_name()}}(timestep=simulator.timestep)) + neuron = simulator.add_neuron(Neuron_{{ neuron.get_name() }}(timestep=simulator.timestep)) if "exc_spikes" in simulator.neurons[neuron].get_spiking_input_ports(): simulator.connect(sg_exc, neuron, "exc_spikes", w=1000.) simulator.connect(sg_inh, neuron, "inh_spikes", w=4000.) else: - simulator.connect(sg_exc, neuron, "spikes", w=1000.) + simulator.connect(sg_exc, neuron, "spike_in_port", w=1000.) +{%- if not "izhikevich" in neuron.get_name() %} + simulator.connect(sg_inh, neuron, "spike_in_port", w=-4000.) +{%- endif %} {% endfor %} simulator.run(t_stop) From b581928f6d267bbc4fe7e8eea84803c8b6766431 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sun, 30 Mar 2025 21:29:36 +0200 Subject: [PATCH 51/68] add attributes to spike events --- .../codegeneration/printers/ode_toolbox_variable_printer.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py index e134e7b4f..3d51f2731 100644 --- a/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py +++ b/pynestml/codegeneration/printers/ode_toolbox_variable_printer.py @@ -37,16 +37,13 @@ def print_variable(self, node: ASTVariable) -> str: :return: string representation """ s = node.get_name().replace("$", "__DOLLAR") - print("PRINTING VARIABLE " + s) if node.get_vector_parameter(): s += "_VEC_IDX_" + self._expression_printer.print(node.get_vector_parameter()) - print("\tPRINTING VARIABLE " + s) if node.get_attribute(): s += "__DOT__" + node.get_attribute() - print("\tPRINTING VARIABLE " + s) s += "__d" * node.get_differential_order() - print("\tPRINTING VARIABLE " + s) + return s From f6d637f394e8e550d1a7134cce143f139bca47bb Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sat, 10 May 2025 18:08:42 +0200 Subject: [PATCH 52/68] update documentation about units --- .../nestml_language_concepts.rst | 146 +++++++++--------- 1 file changed, 69 insertions(+), 77 deletions(-) diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index 00cdcf553..3cc5495fb 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -38,91 +38,39 @@ Similar to Python, a single line can be split into multiple lines by using a bac Data types and physical units ----------------------------- -Data types define types of variables as well as parameters and return values of functions. NESTML provides the following primitive types and physical data types: +Data types define types of variables, as well as parameters and return values of functions. NESTML provides the following primitive types and physical data types, which can both be used to indicate types. Primitive data types ~~~~~~~~~~~~~~~~~~~~ -- ``real`` corresponds to the ``double`` data type in C++. Example literals are: ``42.0``, ``-0.42``, ``.44`` -- ``integer`` corresponds to the ``long`` data type in C++. Example literals are: ``42``, ``-7`` -- ``boolean`` corresponds to the ``bool`` data type in C++. Its only literals are ``true`` and ``false`` -- ``string`` corresponds to the ``std::string`` data type in C++. Example literals are: ``"Bob"``, ``""``, ``"Hello World!"`` -- ``void`` corresponds to the ``void`` data type in C++. No literals are possible and this can only be used in the declaration of a function without a return value. +- ``real`` indicates a real number. Example literals are: ``42.0``, ``-0.42``, ``.44`` +- ``integer`` indicates a natural number (signed integer). Example literals are: ``42``, ``-7`` +- ``boolean`` indicates a Boolean value. Its only literals are ``true`` and ``false`` +- ``string`` indicates a text string. Example literals are: ``"Bob"``, ``""``, ``"Hello World!"`` +- ``void`` can only be used in the declaration of a function to indicate that it does not return a value. Physical units ~~~~~~~~~~~~~~ -A physical unit in NESTML can be either a simple physical unit or a complex physical unit. A simple physical unit is composed of an optional magnitude prefix and the name of the unit. +A physical unit in NESTML can be either a base physical unit or a derived physical unit. The following table lists the seven base units as defined in `the SI standard `__. -The following table lists seven base units, which can be used to specify any physical unit. This idea is based on `the SI units `__. ++-----------+--------+---------------------+ +| Name | Symbol | Quantity | +|===========|========|=====================| +| meter | m | length | +| kilogram | kg | mass | +| second | s | time | +| Ampère | A | electric current | +| Kelvin | K | temperature | +| mole | mol | amount of substance | +| candela | cd | luminous intensity | ++-----------+--------+---------------------+ -+-----------------------+-------------+------------------+ -| Quantity | Unit Name | NESTML/SI unit | -+=======================+=============+==================+ -| length | meter | m | -+-----------------------+-------------+------------------+ -| mass | kilogram | kg | -+-----------------------+-------------+------------------+ -| time | second | s | -+-----------------------+-------------+------------------+ -| electric current | ampere | A | -+-----------------------+-------------+------------------+ -| temperature | kelvin | K | -+-----------------------+-------------+------------------+ -| amount of substance | mole | mol | -+-----------------------+-------------+------------------+ -| luminous intensity | candela | cd | -+-----------------------+-------------+------------------+ +Any other physical unit can be expressed as a combination of these seven units. For this, the operators ``*`` (multiplication), ``/`` (division), ``**`` (power) and ``()`` (parentheses) can be used (see below for examples). -Any other physical unit can be expressed as a combination of these seven units. These other units are called derived units. NESTML provides a concept for the derivation of new physical units, i.e., by combining simple units (consisting of a prefix and an SI unit), the user is able to create arbitrary physical units. - -Units can have at most one of the following magnitude prefixes: - -+----------+-----------+-----------------+----------+-----------+-----------------+ -| Factor | SI Name | NESTML prefix | Factor | SI Name | NESTML prefix | -+==========+===========+=================+==========+===========+=================+ -| 10^-1 | deci | d | 10^1 | deca | da | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-2 | centi | c | 10^2 | hecto | h | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-3 | milli | m | 10^3 | kilo | k | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-6 | micro | u | 10^6 | mega | M | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-9 | nano | n | 10^9 | giga | G | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-12 | pico | p | 10^12 | tera | T | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-15 | femto | f | 10^15 | peta | P | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-18 | atto | a | 10^18 | exa | E | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-21 | zepto | z | 10^21 | zetta | Z | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-24 | yocto | y | 10^24 | yotta | Y | -+----------+-----------+-----------------+----------+-----------+-----------------+ - -Simple physical units can be combined to complex units. For this, the operators , ``*`` (multiplication), ``/`` (division), ``**`` (power) and ``()`` (parenthesis) can be used. An example could be - -.. code-block:: nestml - - mV*mV*nS**2/(mS*pA) - -Units of the form `` ** -1`` can also be expressed as ``1/``. For example - -.. code-block:: nestml - - (ms*mV)**-1 - -is equivalent to - -.. code-block:: nestml - - 1/(ms*mV) - -NESTML also supports the usage of named derived-units such as Newton, Henry or lux: +NESTML also supports the usage of many named derived units such as Newton, Henry or lux. The following units are defined: .. list-table:: :header-rows: 1 @@ -184,14 +132,14 @@ NESTML also supports the usage of named derived-units such as Newton, Henry or l - C/V - kg\ :sup:`−1`\ ⋅ m\ :sup:`−2`\ ⋅ s\ :sup:`4`\ ⋅ A\ :sup:`2` * - Ohm - - Ω + - Ohm - resistance, impedance, reactance - V/A - kg⋅(m\ :sup:`2`\ ) ⋅ (s\ :sup:`−3`\ ) ⋅(A\ :sup:`−2`\ ) * - Siemens - S - electrical conductance - - Ω\ :sup:`−1` + - Ohm\ :sup:`−1` - (kg\ :sup:`−1`\ ) ⋅(m\ :sup:`−2`\ ) ⋅(s\ :sup:`3`\ ) ⋅ A\ :sup:`2` * - Weber - Wb @@ -240,17 +188,61 @@ NESTML also supports the usage of named derived-units such as Newton, Henry or l - mol⋅(s\ :sup:`−1`\ ) -Here, except for Ohm, the symbol of the unit has to be used in the model, e.g.: +These unit symbols can be used to define physical quantites, for instance: .. code-block:: nestml - x = 10 N * 22 Ohm / 0.5 V + x N*S = 10 N * 22 Ohm / 0.5 V + +Physical units can have at most one of the following magnitude prefixes: + ++----------+-----------+-----------------+----------+-----------+-----------------+ +| Factor | SI Name | NESTML prefix | Factor | SI Name | NESTML prefix | ++==========+===========+=================+==========+===========+=================+ +| 10^-1 | deci | d | 10^1 | deca | da | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-2 | centi | c | 10^2 | hecto | h | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-3 | milli | m | 10^3 | kilo | k | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-6 | micro | u | 10^6 | mega | M | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-9 | nano | n | 10^9 | giga | G | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-12 | pico | p | 10^12 | tera | T | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-15 | femto | f | 10^15 | peta | P | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-18 | atto | a | 10^18 | exa | E | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-21 | zepto | z | 10^21 | zetta | Z | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-24 | yocto | y | 10^24 | yotta | Y | ++----------+-----------+-----------------+----------+-----------+-----------------+ + +For example, the following defines a possible unit: + +.. code-block:: nestml + + mV*mV*nS**2/(mS*pA) + +Units of the form ``**-1`` can also be expressed as ``1/``. For example + +.. code-block:: nestml + + (ms*mV)**-1 + +is equivalent to + +.. code-block:: nestml + + 1/(ms*mV) Type and unit checks ~~~~~~~~~~~~~~~~~~~~ -NESTML checks type correctness of all expressions. This also applies to assignments, declarations with an initialization and function calls. NESTML supports conversion of ``integer``\ s to ``real``\ s. A conversion between ``unit``-typed and ``real``-typed variables is also possible. However, these conversions are reported as warnings. Finally, there is no conversion between numeric types and boolean or string types. +NESTML checks type correctness of all expressions. This also applies to assignments, declarations with an initialization and function calls. Conversion of ``integer``\ s to ``real``\ s is allowed. A conversion between ``unit``-typed and ``real``-typed variables is also allowed. However, these conversions are reported as warnings. No conversion is allowed between numeric types and Boolean or string types. Basic elements of the embedded programming language From d897b603b8a956176feb9d5ba28dc7e8ec11707a Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sat, 10 May 2025 18:25:03 +0200 Subject: [PATCH 53/68] update documentation about operators --- .../nestml_language_concepts.rst | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index 00cdcf553..3c24916ed 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -771,31 +771,34 @@ All variables, literals, and function calls are valid terms. Variables are names List of operators ~~~~~~~~~~~~~~~~~ -For any two valid numeric expressions ``a``, ``b``, boolean expressions ``c``,\ ``c1``,\ ``c2``, and an integer expression ``n`` the following operators produce valid expressions. +For any two valid numeric expressions ``x``, ``y``, boolean expressions ``b``,\ ``b1``,\ ``b2``, and integer expressions ``n``,\ ``i`` the following operators produce valid expressions. +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ | Operator | Description | Examples | +================================================+====================================================================+===========================+ -| ``()`` | Expressions with parentheses | ``(a)`` | +| ``()`` | Expressions with parentheses | ``(x)`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``**`` | Power operator. | ``a ** b`` | +| ``**`` | Power operator | ``x**y`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``+``, ``-``, ``~`` | Unary plus, unary minus, bitwise negation | ``-a``, ``~c`` | +| ``+``, ``-``, ``~`` | Unary plus, unary minus | ``-x`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``*``, ``/``, ``%`` | Multiplication, division and modulo operator | ``a * b``, ``a % b`` | +| ``*``, ``/``, ``%`` | Multiplication, division and modulo operator | ``x * y``, ``x % y`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``+``, ``-`` | Addition and subtraction | ``a + b``, ``a - b`` | +| ``+``, ``-`` | Addition and subtraction | ``x + y``, ``x - y`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``<<``, ``>>`` | Left and right bit shifts | ``a << n``, ``a >> n`` | +| ``<<``, ``>>`` | Left and right bit shifts | ``n << i``, ``n >> i`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``&``, ``|``, ``^`` | Bitwise ``and``, ``or`` and ``xor`` | ``a&b``, ``|``, ``a~b`` | +| ``~`` | Bitwise negation | ``~b`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``<``, ``<=``, ``==``, ``!=``, ``>=``, ``>`` | Comparison operators | ``a <= b``, ``a != b`` | +| ``&``, ``|``, ``^`` | Bitwise ``and``, ``or`` and ``xor`` | ``b1 & b2``, ``b1 ^ b2`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``not``, ``and``, ``or`` | Logical conjunction, disjunction and negation | ``not c``, ``c1 or c2`` | +| ``<``, ``<=``, ``==``, ``!=``, ``>=``, ``>`` | Comparison operators | ``x <= y``, ``x != y`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``?:`` | Ternary operator (return ``a`` if ``c`` is ``true``, ``b`` else) | ``c ? a : b`` | +| ``not``, ``and``, ``or`` | Logical conjunction, disjunction and negation | ``not b``, ``b1 or b2`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ +| ``?:`` | Ternary operator (return ``x`` if ``b`` is true, ``y`` otherwise) | ``b ? x : y`` | ++------------------------------------------------+--------------------------------------------------------------------+---------------------------+ + Blocks ------ From 52b9006cd79762b6719351c8f76302ac0275d221 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sat, 10 May 2025 19:16:48 +0200 Subject: [PATCH 54/68] fix typesetting in the documentation --- .../nestml_language_concepts.rst | 31 +++++++++---------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index 3c24916ed..33cc38c15 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -771,34 +771,31 @@ All variables, literals, and function calls are valid terms. Variables are names List of operators ~~~~~~~~~~~~~~~~~ -For any two valid numeric expressions ``x``, ``y``, boolean expressions ``b``,\ ``b1``,\ ``b2``, and integer expressions ``n``,\ ``i`` the following operators produce valid expressions. +For any two valid numeric expressions ``a``, ``b``, boolean expressions ``c``,\ ``c1``,\ ``c2``, and an integer expression ``n`` the following operators produce valid expressions. +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ | Operator | Description | Examples | +================================================+====================================================================+===========================+ -| ``()`` | Expressions with parentheses | ``(x)`` | +| ``()`` | Expressions with parentheses | ``(a)`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``**`` | Power operator | ``x**y`` | +| ``**`` | Power operator. | ``a ** b`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``+``, ``-``, ``~`` | Unary plus, unary minus | ``-x`` | +| ``+``, ``-``, ``~`` | Unary plus, unary minus, bitwise negation | ``-a``, ``~c`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``*``, ``/``, ``%`` | Multiplication, division and modulo operator | ``x * y``, ``x % y`` | +| ``*``, ``/``, ``%`` | Multiplication, division and modulo operator | ``a * b``, ``a % b`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``+``, ``-`` | Addition and subtraction | ``x + y``, ``x - y`` | +| ``+``, ``-`` | Addition and subtraction | ``a + b``, ``a - b`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``<<``, ``>>`` | Left and right bit shifts | ``n << i``, ``n >> i`` | +| ``<<``, ``>>`` | Left and right bit shifts | ``a << n``, ``a >> n`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``~`` | Bitwise negation | ``~b`` | +| ``&``, ``|``, ``^`` | Bitwise ``and``, ``or`` and ``xor`` | ``a&b``, ``|``, ``a~b`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``&``, ``|``, ``^`` | Bitwise ``and``, ``or`` and ``xor`` | ``b1 & b2``, ``b1 ^ b2`` | +| ``<``, ``<=``, ``==``, ``!=``, ``>=``, ``>`` | Comparison operators | ``a <= b``, ``a != b`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``<``, ``<=``, ``==``, ``!=``, ``>=``, ``>`` | Comparison operators | ``x <= y``, ``x != y`` | +| ``not``, ``and``, ``or`` | Logical conjunction, disjunction and negation | ``not c``, ``c1 or c2`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``not``, ``and``, ``or`` | Logical conjunction, disjunction and negation | ``not b``, ``b1 or b2`` | +| ``?:`` | Ternary operator (return ``a`` if ``c`` is ``true``, ``b`` else) | ``c ? a : b`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``?:`` | Ternary operator (return ``x`` if ``b`` is true, ``y`` otherwise) | ``b ? x : y`` | -+------------------------------------------------+--------------------------------------------------------------------+---------------------------+ - Blocks ------ @@ -820,7 +817,7 @@ Block types - ``state`` - This block is composed of a list of variable declarations that describe parts of the model which may change over time. All the variables declared in this block must be initialized with a value. - ``equations`` - This block contains kernel definitions and differential equations. It will be explained in further detail `later on in the manual <#equations>`__. - ``input`` - This block is composed of one or more input ports. It will be explained in further detail `later on in the manual <#input>`__. -- ``output`` *````* - Defines which type of event the model can send. Currently, only ``spike`` is supported. +- ``output`` - Defines which type of event the model can send. - ``update`` - Contains statements that are executed once every simulation timestep (on a fixed grid or from event to event). - ``onReceive`` - Can be defined for each spiking input port; contains statements that are executed whenever an incoming spike event arrives. Optional event parameters, such as the weight, can be accessed by referencing the input port name. Priorities can optionally be defined for each ``onReceive`` block; these resolve ambiguity in the model specification of which event handler should be called after which, in case multiple events occur at the exact same moment in time on several input ports, triggering multiple event handlers. - ``onCondition`` - Contains statements that are executed when a particular condition holds. The condition is expressed as a (boolean typed) expression. The advantage of having conditions separate from the ``update`` block is that a root-finding algorithm can be used to find the precise time at which a condition holds (with a higher resolution than the simulation timestep). This makes the model more generic with respect to the simulator that is used. @@ -1166,7 +1163,7 @@ Integration order During simulation, the simulation kernel (for example, NEST Simulator) is responsible for invoking the model functions that update its state: those in ``update``, ``onReceive``, integrating the ODEs, etc. Different simulators may invoke these functions in a different sequence and with different steps of time, leading to different numerical results even though the same model was used. For example, "time-based" simulators take discrete steps of time of fixed duration (for example, 1 millisecond), whereas "event-based" simulators process events at their exact time of occurrence, without having to round off the time of occurrence of the event to the nearest timestep interval. The following section describes some of the variants of integration sequences that can be encountered and what this means for the outcome of a simulation. -The recommended update sequence for a spiking neuron model is shown below (panel B), which is optimal ("gives the fewest surprises") in the case the simulator uses a minimum synaptic transmission delay (this includes NEST). In this sequence, first the subthreshold dynamics are evaluated (that is, ``integrate_odes()`` is called; in the simplest case, all equations are solved simultaneously) and only afterwards, incoming spikes are processed. +The recommended update sequence for a spiking neuron model is shown below (panel A), which is optimal ("gives the fewest surprises") in the case the simulator uses a minimum synaptic transmission delay (this includes NEST). In this sequence, first the subthreshold dynamics are evaluated (that is, ``integrate_odes()`` is called; in the simplest case, all equations are solved simultaneously) and only afterwards, incoming spikes are processed. .. _label:fig_integration_order .. figure:: https://raw.githubusercontent.com/nest/nestml/master/doc/fig/integration_order.png @@ -1203,4 +1200,4 @@ References .. [1] Morrison A, Diesmann M (2008). Maintaining causality in discrete time neuronal network simulations. Lectures in Supercomputational Neurosciences: Dynamics in Complex Brain Networks, 267-278. -.. [2] Stefan Rotter and Markus Diesmann. Exact digital simulation of time-invariant linear systems with applications to neuronal modeling. Biol. Cybern. 81, 381±402 (1999) +.. [2] Stefan Rotter and Markus Diesmann. Exact digital simulation of time-invariant linear systems with applications to neuronal modeling. Biol. Cybern. 81, 381–402 (1999) From d5f1181db2314eccca94dc9af9bd62f4efbb26f4 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Sat, 10 May 2025 19:24:10 +0200 Subject: [PATCH 55/68] update documentation about time integration --- .../nestml_language_concepts.rst | 220 +++++++++--------- doc/running/running_nest.rst | 6 + 2 files changed, 111 insertions(+), 115 deletions(-) diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index 5f4a190ba..80a403877 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -38,91 +38,39 @@ Similar to Python, a single line can be split into multiple lines by using a bac Data types and physical units ----------------------------- -Data types define types of variables as well as parameters and return values of functions. NESTML provides the following primitive types and physical data types: +Data types define types of variables, as well as parameters and return values of functions. NESTML provides the following primitive types and physical data types, which can both be used to indicate types. Primitive data types ~~~~~~~~~~~~~~~~~~~~ -- ``real`` corresponds to the ``double`` data type in C++. Example literals are: ``42.0``, ``-0.42``, ``.44`` -- ``integer`` corresponds to the ``long`` data type in C++. Example literals are: ``42``, ``-7`` -- ``boolean`` corresponds to the ``bool`` data type in C++. Its only literals are ``true`` and ``false`` -- ``string`` corresponds to the ``std::string`` data type in C++. Example literals are: ``"Bob"``, ``""``, ``"Hello World!"`` -- ``void`` corresponds to the ``void`` data type in C++. No literals are possible and this can only be used in the declaration of a function without a return value. +- ``real`` indicates a real number. Example literals are: ``42.0``, ``-0.42``, ``.44`` +- ``integer`` indicates a natural number (signed integer). Example literals are: ``42``, ``-7`` +- ``boolean`` indicates a Boolean value. Its only literals are ``true`` and ``false`` +- ``string`` indicates a text string. Example literals are: ``"Bob"``, ``""``, ``"Hello World!"`` +- ``void`` can only be used in the declaration of a function to indicate that it does not return a value. Physical units ~~~~~~~~~~~~~~ -A physical unit in NESTML can be either a simple physical unit or a complex physical unit. A simple physical unit is composed of an optional magnitude prefix and the name of the unit. +A physical unit in NESTML can be either a base physical unit or a derived physical unit. The following table lists the seven base units as defined in `the SI standard `__. -The following table lists seven base units, which can be used to specify any physical unit. This idea is based on `the SI units `__. ++-----------+--------+---------------------+ +| Name | Symbol | Quantity | +|===========|========|=====================| +| meter | m | length | +| kilogram | kg | mass | +| second | s | time | +| Ampère | A | electric current | +| Kelvin | K | temperature | +| mole | mol | amount of substance | +| candela | cd | luminous intensity | ++-----------+--------+---------------------+ -+-----------------------+-------------+------------------+ -| Quantity | Unit Name | NESTML/SI unit | -+=======================+=============+==================+ -| length | meter | m | -+-----------------------+-------------+------------------+ -| mass | kilogram | kg | -+-----------------------+-------------+------------------+ -| time | second | s | -+-----------------------+-------------+------------------+ -| electric current | ampere | A | -+-----------------------+-------------+------------------+ -| temperature | kelvin | K | -+-----------------------+-------------+------------------+ -| amount of substance | mole | mol | -+-----------------------+-------------+------------------+ -| luminous intensity | candela | cd | -+-----------------------+-------------+------------------+ +Any other physical unit can be expressed as a combination of these seven units. For this, the operators ``*`` (multiplication), ``/`` (division), ``**`` (power) and ``()`` (parentheses) can be used (see below for examples). -Any other physical unit can be expressed as a combination of these seven units. These other units are called derived units. NESTML provides a concept for the derivation of new physical units, i.e., by combining simple units (consisting of a prefix and an SI unit), the user is able to create arbitrary physical units. - -Units can have at most one of the following magnitude prefixes: - -+----------+-----------+-----------------+----------+-----------+-----------------+ -| Factor | SI Name | NESTML prefix | Factor | SI Name | NESTML prefix | -+==========+===========+=================+==========+===========+=================+ -| 10^-1 | deci | d | 10^1 | deca | da | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-2 | centi | c | 10^2 | hecto | h | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-3 | milli | m | 10^3 | kilo | k | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-6 | micro | u | 10^6 | mega | M | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-9 | nano | n | 10^9 | giga | G | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-12 | pico | p | 10^12 | tera | T | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-15 | femto | f | 10^15 | peta | P | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-18 | atto | a | 10^18 | exa | E | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-21 | zepto | z | 10^21 | zetta | Z | -+----------+-----------+-----------------+----------+-----------+-----------------+ -| 10^-24 | yocto | y | 10^24 | yotta | Y | -+----------+-----------+-----------------+----------+-----------+-----------------+ - -Simple physical units can be combined to complex units. For this, the operators , ``*`` (multiplication), ``/`` (division), ``**`` (power) and ``()`` (parenthesis) can be used. An example could be - -.. code-block:: nestml - - mV*mV*nS**2/(mS*pA) - -Units of the form `` ** -1`` can also be expressed as ``1/``. For example - -.. code-block:: nestml - - (ms*mV)**-1 - -is equivalent to - -.. code-block:: nestml - - 1/(ms*mV) - -NESTML also supports the usage of named derived-units such as Newton, Henry or lux: +NESTML also supports the usage of many named derived units such as Newton, Henry or lux. The following units are defined: .. list-table:: :header-rows: 1 @@ -184,14 +132,14 @@ NESTML also supports the usage of named derived-units such as Newton, Henry or l - C/V - kg\ :sup:`−1`\ ⋅ m\ :sup:`−2`\ ⋅ s\ :sup:`4`\ ⋅ A\ :sup:`2` * - Ohm - - Ω + - Ohm - resistance, impedance, reactance - V/A - kg⋅(m\ :sup:`2`\ ) ⋅ (s\ :sup:`−3`\ ) ⋅(A\ :sup:`−2`\ ) * - Siemens - S - electrical conductance - - Ω\ :sup:`−1` + - Ohm\ :sup:`−1` - (kg\ :sup:`−1`\ ) ⋅(m\ :sup:`−2`\ ) ⋅(s\ :sup:`3`\ ) ⋅ A\ :sup:`2` * - Weber - Wb @@ -240,17 +188,61 @@ NESTML also supports the usage of named derived-units such as Newton, Henry or l - mol⋅(s\ :sup:`−1`\ ) -Here, except for Ohm, the symbol of the unit has to be used in the model, e.g.: +These unit symbols can be used to define physical quantites, for instance: + +.. code-block:: nestml + + x N*S = 10 N * 22 Ohm / 0.5 V + +Physical units can have at most one of the following magnitude prefixes: + ++----------+-----------+-----------------+----------+-----------+-----------------+ +| Factor | SI Name | NESTML prefix | Factor | SI Name | NESTML prefix | ++==========+===========+=================+==========+===========+=================+ +| 10^-1 | deci | d | 10^1 | deca | da | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-2 | centi | c | 10^2 | hecto | h | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-3 | milli | m | 10^3 | kilo | k | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-6 | micro | u | 10^6 | mega | M | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-9 | nano | n | 10^9 | giga | G | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-12 | pico | p | 10^12 | tera | T | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-15 | femto | f | 10^15 | peta | P | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-18 | atto | a | 10^18 | exa | E | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-21 | zepto | z | 10^21 | zetta | Z | ++----------+-----------+-----------------+----------+-----------+-----------------+ +| 10^-24 | yocto | y | 10^24 | yotta | Y | ++----------+-----------+-----------------+----------+-----------+-----------------+ + +For example, the following defines a possible unit: + +.. code-block:: nestml + + mV*mV*nS**2/(mS*pA) + +Units of the form ``**-1`` can also be expressed as ``1/``. For example .. code-block:: nestml - x = 10 N * 22 Ohm / 0.5 V + (ms*mV)**-1 + +is equivalent to + +.. code-block:: nestml + + 1/(ms*mV) Type and unit checks ~~~~~~~~~~~~~~~~~~~~ -NESTML checks type correctness of all expressions. This also applies to assignments, declarations with an initialization and function calls. NESTML supports conversion of ``integer``\ s to ``real``\ s. A conversion between ``unit``-typed and ``real``-typed variables is also possible. However, these conversions are reported as warnings. Finally, there is no conversion between numeric types and boolean or string types. +NESTML checks type correctness of all expressions. This also applies to assignments, declarations with an initialization and function calls. Conversion of ``integer``\ s to ``real``\ s is allowed. A conversion between ``unit``-typed and ``real``-typed variables is also allowed. However, these conversions are reported as warnings. No conversion is allowed between numeric types and Boolean or string types. Basic elements of the embedded programming language @@ -766,35 +758,37 @@ Expressions in NESTML can be specified in a recursive fashion. Terms ~~~~~ -All variables, literals, and function calls are valid terms. Variables are names of user-defined or predefined variables (``t``, ``e``). +All variables, literals, and function calls are valid terms. List of operators ~~~~~~~~~~~~~~~~~ -For any two valid numeric expressions ``a``, ``b``, boolean expressions ``c``,\ ``c1``,\ ``c2``, and an integer expression ``n`` the following operators produce valid expressions. +For any two valid numeric expressions ``x``, ``y``, boolean expressions ``b``,\ ``b1``,\ ``b2``, and integer expressions ``n``,\ ``i`` the following operators produce valid expressions. +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ | Operator | Description | Examples | +================================================+====================================================================+===========================+ -| ``()`` | Expressions with parentheses | ``(a)`` | +| ``()`` | Expressions with parentheses | ``(x)`` | ++------------------------------------------------+--------------------------------------------------------------------+---------------------------+ +| ``**`` | Power operator | ``x**y`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``**`` | Power operator. | ``a ** b`` | +| ``+``, ``-``, ``~`` | Unary plus, unary minus | ``-x`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``+``, ``-``, ``~`` | Unary plus, unary minus, bitwise negation | ``-a``, ``~c`` | +| ``*``, ``/``, ``%`` | Multiplication, division and modulo operator | ``x * y``, ``x % y`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``*``, ``/``, ``%`` | Multiplication, division and modulo operator | ``a * b``, ``a % b`` | +| ``+``, ``-`` | Addition and subtraction | ``x + y``, ``x - y`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``+``, ``-`` | Addition and subtraction | ``a + b``, ``a - b`` | +| ``<<``, ``>>`` | Left and right bit shifts | ``n << i``, ``n >> i`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``<<``, ``>>`` | Left and right bit shifts | ``a << n``, ``a >> n`` | +| ``~`` | Bitwise negation | ``~b`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``&``, ``|``, ``^`` | Bitwise ``and``, ``or`` and ``xor`` | ``a&b``, ``|``, ``a~b`` | +| ``&``, ``|``, ``^`` | Bitwise ``and``, ``or`` and ``xor`` | ``b1 & b2``, ``b1 ^ b2`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``<``, ``<=``, ``==``, ``!=``, ``>=``, ``>`` | Comparison operators | ``a <= b``, ``a != b`` | +| ``<``, ``<=``, ``==``, ``!=``, ``>=``, ``>`` | Comparison operators | ``x <= y``, ``x != y`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``not``, ``and``, ``or`` | Logical conjunction, disjunction and negation | ``not c``, ``c1 or c2`` | +| ``not``, ``and``, ``or`` | Logical conjunction, disjunction and negation | ``not b``, ``b1 or b2`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ -| ``?:`` | Ternary operator (return ``a`` if ``c`` is ``true``, ``b`` else) | ``c ? a : b`` | +| ``?:`` | Ternary operator (return ``x`` if ``b`` is true, ``y`` otherwise) | ``b ? x : y`` | +------------------------------------------------+--------------------------------------------------------------------+---------------------------+ Blocks @@ -1146,7 +1140,7 @@ Physical units such as millivolts (:math:`\text{mV}`) and picoamperes (:math:`\t x = -x / tau + spikes_in * pA -However, note that this not account for different spikes carrying different weight (which typically results in different postsynaptic currents or potentials). In this example, each spike will result in a change in :math:`x` of 1 pA. +However, note that this does not account for different spikes carrying different weight (which typically results in different postsynaptic currents or potentials). In this example, each spike will result in a change in :math:`x` of 1 pA. To read out the attributes from events, for example the weight of the spike, the dot notation can be used, for example: @@ -1174,7 +1168,7 @@ Note that again, the units are consistent if :math:`w_k` is assumed to be in uni Handling spiking input by event handlers ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -An ``onReceive`` block can be defined for every spiking input port, for example, if a port named ``pre_spikes`` is defined, the corresponding event handler has the general structure: +An ``onReceive`` block can be defined for every spiking input port. For example, if a port named ``pre_spikes`` is defined, the corresponding event handler has the general structure: .. code-block:: nestml @@ -1182,21 +1176,17 @@ An ``onReceive`` block can be defined for every spiking input port, for example, println("Info: processing a presynaptic spike at time t = {t}") # ... further statements go here ... -The statements in the event handler will be executed when the event occurs and integrate the state of the system from "just before" the event (at :math:`t=t^-`) to "just after" the event (at :math:`t=t^+`): - -.. math:: - - \int_{t^-}^{t^+} \dot{\mathbf{x}}(t) dt - -Because the statements in the ``onReceive`` block are executed "instantaneously" at the time of the spike, the units of 1/s due to the definition of the delta function drop out. For instance, when a port is defined with an attribute "psp" in units of mV, then the following has consistent units: +The statements in the event handler will be executed when the event occurs and integrate the state of the system from "just before" the event (at :math:`t-\epsilon`, for :math:`\epsilon\rightarrow 0`) to "just after" the event (at :math:`t=t+\epsilon`). Because the statements in the ``onReceive`` block are executed "instantaneously" at the time of the spike, the units of 1/s due to the definition of the delta function drop out. For instance, when a port is defined with an attribute "psp" in units of mV, then the following has consistent units: .. code-block:: nestml - onReceive(in_spikes): + state: V_m mV = 0 mV + + onReceive(in_spikes): V_m += in_spikes.psp # consistent units: lhs and rhs both in [mV] -To specify in which sequence the event handlers should be called in case multiple events are received at the exact same time, the ``priority`` parameter can be used, which can be given an integer value, where a larger value means higher priority. For example: +To specify in which sequence the event handlers should be called in case multiple events are received at the exact same time, the ``priority`` parameter can be used, which can be given an integer value, where a larger value means higher priority (handled earlier). For example: .. code-block:: nestml @@ -1225,17 +1215,17 @@ Vector input ports of constant size and with a constant numerical value for the Handling of time ---------------- -Inside the ``update`` block, the current time can be retrieved via the predefined, global variable ``t``. The statements executed in the block are responsible for updating the state of the model between timesteps or events. The statements in this block update the state of the model from the "current" time ``t``, to the next simulation timestep or time of next event ``t + timestep()``. The update step involves integration of the ODEs and corresponds to the "free-flight" or "subthreshold" integration; the events themselves are handled elsewhere, namely as a convolution with a kernel, or as an ``onReceive`` block. +Inside the ``update`` block, the current time can be retrieved via the predefined, global variable ``t``. The statements executed in the block are responsible for updating the state of the model between events. The statements in this block update the state of the model from the "current" time ``t``, to the next simulation timestep or time of next event ``t + timestep()``. The update step involves integration of the ODEs, corresponding to the "free-flight" or "subthreshold" integration; the events themselves are handled elsewhere, namely as a convolution with a kernel, or as an ``onReceive`` block. Integrating the ODEs ~~~~~~~~~~~~~~~~~~~~ -Integrating the ODEs needs to be triggered explicitly inside the ``update`` block by calling the ``integrate_odes()`` function. Making this call explicit allows subtle differences in integration sequence to be expressed, as well as making it explicit that some variables but not others are integrated; for example, if a neuron is in an absolute refractory state, we might want to skip integrating the differential equation for the membrane potential. +Numerical integration of the ODEs needs to be triggered explicitly inside the ``update`` block by calling the ``integrate_odes()`` function. Making this call explicit allows subtle differences in integration sequence to be expressed, as well as making it explicit that some variables but not others are integrated; for example, if a neuron is in an absolute refractory state, we might want to skip integrating the differential equation for the membrane potential. -The ``integrate_odes()`` function numerically integrates differential equations defined in the ``equations`` block. If no parameters are given, all ODEs defined in the model are integrated. Integration can be limited to a given set of ODEs by giving their left-hand side state variables as parameters to the function, for example ``integrate_odes(V_m, I_ahp)`` if ODEs exist for the variables ``V_m`` and ``I_ahp``. In this example, these variables are integrated simultaneously (as one single system of equations). This is different from calling ``integrate_odes(V_m)`` and then ``integrate_odes(I_ahp)``, in that the second call would use the already-updated state values from the first call. Variables not included in the call to ``integrate_odes()`` are assumed to remain constant (both inside the numeric solver stepping function as well as from before to after the call). +If ``integrate_odes()`` is called without parameters, all ODEs defined in the model are integrated. Integration can be limited to a given set of ODEs by giving their left-hand side state variables as parameters to the function, for example, ``integrate_odes(V_m, I_ahp)`` if ODEs exist for the variables ``V_m`` and ``I_ahp``. In this example, these variables are integrated simultaneously (as one single system of equations). This is different from calling ``integrate_odes(V_m)`` and then ``integrate_odes(I_ahp)``, in that the second call would use the already-updated state value from the first call. Variables not included in the call to ``integrate_odes()`` are assumed to remain constant (both inside the numeric solver stepping function as well as from before to after the call). -In case of higher-order ODEs of the form ``F(x'', x', x) = 0``, the solution ``x(t)`` is obtained by just providing the variable ``x`` to the ``integrate_odes`` function. For example, +In case of higher-order ODEs, calling ``integrate_odes()`` integrates variables of all order. For example, in case an ODE :math:`d^2x/dt^2` is defined, then calling ``integrate_odes(x)`` will integrate all variable orders related to ``x``: .. code-block:: nestml @@ -1249,7 +1239,7 @@ In case of higher-order ODEs of the form ``F(x'', x', x) = 0``, the solution ``x update: integrate_odes(x) -Here, ``integrate_odes(x)`` integrates variables of all order; in this case, ``x`` and ``x'``. The state variables affected by incoming events are updated at the end of each timestep, that is, within one timestep, the state as observed by statements in the ``update`` block will be those at :math:`t^-`, i.e. "just before" it has been updated due to the events. See also :ref:`Integrating spiking input` and :ref:`Integration order`. +Here, ``integrate_odes(x)`` integrates both ``x`` and ``x'``. ODEs that can be solved analytically are integrated to machine precision from one timestep to the next using the propagators obtained from `ODE-toolbox `_. In case a numerical solver is used (such as Runge-Kutta or forward Euler), the same ODEs are also evaluated numerically by the numerical solver to allow more precise values for analytically solvable ODEs *within* a timestep. In this way, the long-term dynamics obeys the analytic (more exact) equations, while the short-term (within one timestep) dynamics is evaluated to the precision of the numerical integrator. @@ -1257,13 +1247,13 @@ ODEs that can be solved analytically are integrated to machine precision from on Retrieving simulation timing parameters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -To retrieve timing parameters from the simulator kernel, two special functions are built into NESTML: +To retrieve timing parameters from the simulator kernel, three predefined functions are built into NESTML: -- ``resolution`` returns the current timestep taken. Can be used only inside the ``update`` block and in intialising expressions. The use of this function assumes that the simulator uses fixed resolution steps, therefore it is recommended to use ``timestep()`` instead in order to make the models more generic. -- ``timestep`` returns the current timestep taken. Can be used only inside the ``update`` block. -- ``steps`` takes one parameter of type ``ms`` and returns the number of simulation steps in the current simulation resolution. This only makes sense in case of a fixed simulation resolution (such as in NEST); hence, use of this function is not recommended, because it precludes the models from being compatible with other simulation platforms where a non-constant simulation timestep is used. +- ``resolution()`` can only be used in the context of fixed-timestep simulation. It returns the time resolution (duration of each timestep) taken by the simulator. It is only allowed to be called inside the ``update`` block and in intialising expressions. This only makes sense in case of a fixed-timestep simulation; hence, use of this function is not recommended, because it precludes the models from being compatible with other simulation platforms where a non-constant simulation timestep is used. Instead, ``timestep()`` should be preferred to make models more generic. +- ``timestep()`` returns the current timestep taken. It is only allowed inside the ``update`` block. +- ``steps()`` takes one parameter of type ``ms`` and returns the number of simulation steps in the current simulation resolution. This only makes sense in case of a fixed-timestep simulation; hence, use of this function is not recommended, because it precludes the models from being compatible with other simulation platforms where a non-constant simulation timestep is used. -When using ``resolution()``, it is recommended to use the function call directly in the code, rather than defining it as a parameter. This makes the model more robust in case the resolution is changed during the simulation. In some cases, as in the synapse ``update`` block, a step is made between spike events, unconstrained by the simulation resolution. For example: +When using ``resolution()``, it is recommended to use the function call directly in the code, rather than assigning it to a parameter. This makes the model more robust in case the resolution is changed during the simulation. In some cases, as in the ``update`` block, a step may be made between spike events, unconstrained by the simulation resolution. For example: .. code-block:: nestml @@ -1281,20 +1271,20 @@ When using ``resolution()``, it is recommended to use the function call directly Integration order ~~~~~~~~~~~~~~~~~ -During simulation, the simulation kernel (for example, NEST Simulator) is responsible for invoking the model functions that update its state: those in ``update``, ``onReceive``, integrating the ODEs, etc. Different simulators may invoke these functions in a different sequence and with different steps of time, leading to different numerical results even though the same model was used. For example, "time-based" simulators take discrete steps of time of fixed duration (for example, 1 millisecond), whereas "event-based" simulators process events at their exact time of occurrence, without having to round off the time of occurrence of the event to the nearest timestep interval. The following section describes some of the variants of integration sequences that can be encountered and what this means for the outcome of a simulation. +During simulation, the simulation kernel (for example, NEST Simulator) is responsible for invoking the model functions that update its state: those in ``update``, ``onReceive``, and ``onCondition`` blocks. Different simulators may invoke these functions in a different sequence and with different steps of time, leading to different numerical results even though the same model was used. For example, "time-based" simulators take discrete steps of time of fixed duration (for example, 1 millisecond), whereas "event-based" simulators process events at their exact time of occurrence, without having to round off the time of occurrence of the event to the nearest timestep interval. The following section describes some of the variants of integration sequences that can be encountered and what this means for the outcome of a simulation. -The recommended update sequence for a spiking neuron model is shown below (panel B), which is optimal ("gives the fewest surprises") in the case the simulator uses a minimum synaptic transmission delay (this includes NEST). In this sequence, first the subthreshold dynamics are evaluated (that is, ``integrate_odes()`` is called; in the simplest case, all equations are solved simultaneously) and only afterwards, incoming spikes are processed. +The recommended update sequence for a spiking neuron model is shown below (panel A), which is optimal ("gives the fewest surprises") in the case the simulator uses a minimum synaptic transmission delay (this includes NEST). In this sequence, first the subthreshold dynamics are evaluated (that is, ``integrate_odes()`` is called; in the simplest case, all equations are solved simultaneously) and only afterwards, incoming spikes are processed. .. _label:fig_integration_order .. figure:: https://raw.githubusercontent.com/nest/nestml/master/doc/fig/integration_order.png :alt: Different conventions for the integration sequence. Modified after [1]_, their Fig. 10.2. The precise sequence of operations depends on whether the simulation is considered to have synaptic propagation delays (A) or not (B). -The numeric results of a typical simulation run are shown below. Consider a leaky integrate-and-fire neuron with exponentially decaying postsynaptic currents :math:`I_\text{syn}`. The neuron is integrated using a fixed timestep of :math:`1~\text{ms}` (left) and using an event-based method (right): +The numeric results of a typical simulation run are shown below. Consider a leaky integrate-and-fire neuron with exponentially decaying postsynaptic currents :math:`I_\text{syn}`. The same neuron is integrated using a fixed timestep of :math:`1~\text{ms}` (left) and using an event-based method (right): .. figure:: https://raw.githubusercontent.com/nest/nestml/master/doc/fig/integration_order_example.png :alt: Numerical example for two different integration sequences. -On the left, both pre-synaptic spikes are only processed at the end of the interval in which they occur. The statements in the ``update`` block are run every timestep for a fixed timestep of :math:`1~\text{ms}`, alternating with the statements in the ``onReceive`` handler for the spiking input port. Note that this means that the effect of the spikes becomes visible at the end of the timestep in :math:`I_\text{syn}`, but it takes another timestep before ``integrate_odes()`` is called again and consequently for the effect of the spikes to become visible in the membrane potential. This results in a threshold crossing and the neuron firing a spike. On the right half of the figure, the same presynaptic spike timing is used, but events are processed at their exact time of occurrence. In this case, the ``update`` statements are called once to update the neuron from time 0 to :math:`1~\text{ms}`, then again to update from :math:`1~\text{ms}` to the time of the first spike, then the spike is processed by running the statements in its ``onReceive`` block, then ``update`` is called to update from the time of the first spike to the second spike, and so on. The time courses of :math:`I_\text{syn}` and :math:`V_\text{m}` are such that the threshold is not reached and the neuron does not fire, illustrating the numerical differences that can occur when the same model is simulated using different strategies. +On the left, both pre-synaptic spikes are only processed at the end of the interval in which they occur. The statements in the ``update`` block are run every timestep for a fixed timestep of :math:`1~\text{ms}`, alternating with the statements in the ``onReceive`` handler for the spiking input port. Note that this means that the effect of the spikes becomes visible at the end of the timestep in :math:`I_\text{syn}`, but it takes another timestep before ``integrate_odes()`` is called again and consequently for the effect of the spikes to become visible in the membrane potential. This results in a threshold crossing and the neuron firing a spike. In the right panel in the figure, the same presynaptic spike timing is used, but events are processed at their exact time of occurrence. In this case, the ``update`` statements are called once to update the neuron from time 0 to :math:`1~\text{ms}`, then again to update from :math:`1~\text{ms}` to the time of the first spike, then the spike is processed by running the statements in its ``onReceive`` block, then ``update`` is called to update from the time of the first spike to the second spike, and so on. The time courses of :math:`I_\text{syn}` and :math:`V_\text{m}` are such that the threshold is not reached and the neuron does not fire, illustrating the numerical differences that can occur when the same model is simulated using different strategies. Guards @@ -1320,4 +1310,4 @@ References .. [1] Morrison A, Diesmann M (2008). Maintaining causality in discrete time neuronal network simulations. Lectures in Supercomputational Neurosciences: Dynamics in Complex Brain Networks, 267-278. -.. [2] Stefan Rotter and Markus Diesmann. Exact digital simulation of time-invariant linear systems with applications to neuronal modeling. Biol. Cybern. 81, 381±402 (1999) +.. [2] Stefan Rotter and Markus Diesmann. Exact digital simulation of time-invariant linear systems with applications to neuronal modeling. Biol. Cybern. 81, 381–402 (1999) diff --git a/doc/running/running_nest.rst b/doc/running/running_nest.rst index 4486283ea..e6e1ed199 100644 --- a/doc/running/running_nest.rst +++ b/doc/running/running_nest.rst @@ -12,6 +12,12 @@ After NESTML completes, the NEST extension module (by default called ``"nestmlmo Several code generator options are available; for an overview see :class:`pynestml.codegeneration.nest_code_generator.NESTCodeGenerator`. +Data types +---------- + +- The NESTML data type ``real`` will be rendered as ``double``. +- The NESTML data type ``integer`` will be rendered as ``long``. + Simulation loop --------------- From 3d3d2755e7a6bd39486f736435bc8ecb42b33984 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 13 May 2025 09:14:55 +0200 Subject: [PATCH 56/68] update documentation on linear_time_invariant_spiking_input_ports --- doc/running/running_nest.rst | 40 ++++++++++++++++++++++++++++++++++-- 1 file changed, 38 insertions(+), 2 deletions(-) diff --git a/doc/running/running_nest.rst b/doc/running/running_nest.rst index e6e1ed199..153f9b25c 100644 --- a/doc/running/running_nest.rst +++ b/doc/running/running_nest.rst @@ -244,8 +244,6 @@ By default, the "continuous-time" based buffer is selected. This covers the most As a computationally more efficient alternative, a spike-based buffer can be selected. In this case, the third factor is not stored every timestep, but only upon the occurrence of postsynaptic (somatic) spikes. Because of the existence of a nonzero dendritic delay, the time at which the somatic spike is observed at the synapse is delayed, and the time at which the third factor is sampled should match the time of the spike at the synapse, rather than the soma. When the spike-based buffering method is used, the dendritic delay is therefore ignored, because the third factor is sampled instead at the time of the somatic spike. - - Dendritic delay and synaptic weight ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -297,6 +295,44 @@ Random numbers In case random numbers are needed inside the synapse, the random number generator belonging to the postsynaptic target is used. +Performance optimisations +------------------------- + +In neuron models, incoming spikes are by default buffered into a queue (a ``std::vector``) before being processed. This implementation is the most generic, allowing, for example, spikes with both positive and negative weights arriving at one and the same input port to be handled differently according to the sign. However, the queue can cause a degradation in runtime performance on the order of 10%. If no conditional processing of the incoming spikes is necessary, and all spikes are treated in the same, linear, time-invariant (LTI) manner, then no queue is necessary as all spike weights can be simply added together into a single floating-point variable. The code generator option ``linear_time_invariant_spiking_input_ports`` can be used to indicate for which ports the spikes can be treated in an LTI-manner. + +For instance, if spikes arriving at the same port are handled differently according to sign of the weight: + +.. code:: nestml + + input: + spike_in_port <- spike(weight pA) + + onReceive(spike_in_port): + # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike + if spike_in_port.weight > 0: + I_syn_exc += spike_in_port.weight + else: + I_syn_inh -= spike_in_port.weight + +then the system is not LTI and a queue is necessary. + +However, if two separate ports are used (and weights are subsequently processed in an LTI manner), the model can be formulated in a mathematically equivalent way: + +.. code:: nestml + + input: + spike_in_port_exc <- spike(weight pA) + spike_in_port_inh <- spike(weight pA) + + onReceive(spike_in_port_exc): + I_syn_exc += spike_in_port.weight + + onReceive(spike_in_port_exc): + I_syn_inh += spike_in_port.weight + +In this case, the ``linear_time_invariant_spiking_input_ports`` option can be used to specify that both ``spike_in_port_exc`` and ``spike_in_port_inh`` are LTI ports, for better runtime performance. + + References ---------- From 3068ce9674c533b35ce8b505701b8f64956146c3 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 19 May 2025 11:09:41 +0200 Subject: [PATCH 57/68] adapt to ODE-toolbox API change --- pynestml/codegeneration/nest_code_generator.py | 3 +-- pynestml/codegeneration/nest_compartmental_code_generator.py | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index 2a4eccf8f..e8991acc5 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -913,12 +913,12 @@ def ode_toolbox_analysis(self, neuron: ASTModel, kernel_buffers: Mapping[ASTKern odetoolbox_indict = ASTUtils.transform_ode_and_kernels_to_json(neuron, neuron.get_parameters_blocks(), kernel_buffers, printer=self._ode_toolbox_printer) odetoolbox_indict["options"] = {} odetoolbox_indict["options"]["output_timestep_symbol"] = "__h" + odetoolbox_indict["options"]["simplify_expression"] = self.get_option("simplify_expression") disable_analytic_solver = self.get_option("solver") != "analytic" solver_result = odetoolbox.analysis(odetoolbox_indict, disable_stiffness_check=True, disable_analytic_solver=disable_analytic_solver, preserve_expressions=self.get_option("preserve_expressions"), - simplify_expression=self.get_option("simplify_expression"), log_level=FrontendConfiguration.logging_level) analytic_solver = None analytic_solvers = [x for x in solver_result if x["solver"] == "analytical"] @@ -936,7 +936,6 @@ def ode_toolbox_analysis(self, neuron: ASTModel, kernel_buffers: Mapping[ASTKern disable_stiffness_check=True, disable_analytic_solver=True, preserve_expressions=self.get_option("preserve_expressions"), - simplify_expression=self.get_option("simplify_expression"), log_level=FrontendConfiguration.logging_level) numeric_solvers = [x for x in solver_result if x["solver"].startswith("numeric")] assert len(numeric_solvers) <= 1, "More than one numeric solver not presently supported" diff --git a/pynestml/codegeneration/nest_compartmental_code_generator.py b/pynestml/codegeneration/nest_compartmental_code_generator.py index 00f7ed5c8..82be5734f 100644 --- a/pynestml/codegeneration/nest_compartmental_code_generator.py +++ b/pynestml/codegeneration/nest_compartmental_code_generator.py @@ -299,6 +299,8 @@ def create_ode_indict(self, neuron, parameters_block, kernel_buffers) odetoolbox_indict["options"] = {} odetoolbox_indict["options"]["output_timestep_symbol"] = "__h" + odetoolbox_indict["options"]["simplify_expression"] = self.get_option("simplify_expression") + return odetoolbox_indict def ode_solve_analytically(self, @@ -313,7 +315,6 @@ def ode_solve_analytically(self, odetoolbox_indict, disable_stiffness_check=True, preserve_expressions=self.get_option("preserve_expressions"), - simplify_expression=self.get_option("simplify_expression"), log_level=FrontendConfiguration.logging_level) analytic_solver = None @@ -360,7 +361,6 @@ def ode_toolbox_analysis(self, neuron: ASTModel, disable_stiffness_check=True, disable_analytic_solver=True, preserve_expressions=self.get_option("preserve_expressions"), - simplify_expression=self.get_option("simplify_expression"), log_level=FrontendConfiguration.logging_level) numeric_solvers = [ x for x in solver_result if x["solver"].startswith("numeric")] From ef8394f2a80c1e187ce0592d93434f6da688f35c Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 2 Jun 2025 17:02:39 +0200 Subject: [PATCH 58/68] fix CI after upstream Cython issue (see https://github.com/nest/nest-simulator/pull/3497) --- .github/workflows/continuous_benchmarking_master_branch.yml | 2 +- .github/workflows/continuous_benchmarking_pull_requests.yml | 2 +- .github/workflows/nestml-build.yml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/continuous_benchmarking_master_branch.yml b/.github/workflows/continuous_benchmarking_master_branch.yml index 20764e20e..1715490cf 100644 --- a/.github/workflows/continuous_benchmarking_master_branch.yml +++ b/.github/workflows/continuous_benchmarking_master_branch.yml @@ -37,7 +37,7 @@ jobs: # Install NEST simulator - name: NEST simulator run: | - python -m pip install cython + python -m pip install "cython<=3.0.10" echo "GITHUB_WORKSPACE = $GITHUB_WORKSPACE" NEST_SIMULATOR=$(pwd)/nest-simulator NEST_INSTALL=$(pwd)/nest_install diff --git a/.github/workflows/continuous_benchmarking_pull_requests.yml b/.github/workflows/continuous_benchmarking_pull_requests.yml index 5be31daf5..fbfcb6965 100644 --- a/.github/workflows/continuous_benchmarking_pull_requests.yml +++ b/.github/workflows/continuous_benchmarking_pull_requests.yml @@ -44,7 +44,7 @@ jobs: # Install NEST simulator - name: NEST simulator run: | - python -m pip install cython + python -m pip install "cython<=3.0.10" echo "GITHUB_WORKSPACE = $GITHUB_WORKSPACE" NEST_SIMULATOR=$(pwd)/nest-simulator NEST_INSTALL=$(pwd)/nest_install diff --git a/.github/workflows/nestml-build.yml b/.github/workflows/nestml-build.yml index c2d9b1a91..8974d2d17 100644 --- a/.github/workflows/nestml-build.yml +++ b/.github/workflows/nestml-build.yml @@ -213,7 +213,7 @@ jobs: # Install NEST simulator - name: NEST simulator run: | - python -m pip install cython + python -m pip install "cython<=3.0.10" echo "GITHUB_WORKSPACE = $GITHUB_WORKSPACE" NEST_SIMULATOR=$(pwd)/nest-simulator NEST_INSTALL=$(pwd)/nest_install @@ -331,7 +331,7 @@ jobs: # Install NEST simulator - name: NEST simulator run: | - python -m pip install cython + python -m pip install "cython<=3.0.10" echo "GITHUB_WORKSPACE = $GITHUB_WORKSPACE" NEST_SIMULATOR=$(pwd)/nest-simulator NEST_INSTALL=$(pwd)/nest_install From 335e022ffabc028cda3347ab5a7e19376a7c07d5 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Mon, 2 Jun 2025 21:07:46 +0200 Subject: [PATCH 59/68] only generate code for unweighted spike buffer if necessary --- .../nestml_language_concepts.rst | 13 ++ models/neurons/aeif_cond_alpha_neuron.nestml | 45 ++--- models/neurons/aeif_cond_exp_neuron.nestml | 38 ++-- models/neurons/iaf_psc_exp_neuron.nestml | 2 +- ...only_in_equation_rhs_and_event_handlers.py | 10 +- .../codegeneration/nest_code_generator.py | 44 ++++- .../point_neuron/common/NeuronClass.jinja2 | 162 +++++++++--------- .../point_neuron/common/NeuronHeader.jinja2 | 29 ++-- .../BufferDeclarationValue.jinja2 | 2 +- .../directives_cpp/SpikeBufferGetter.jinja2 | 4 +- .../point_neuron/setup/CMakeLists.txt.jinja2 | 9 +- ...CoInputPortsIllegalMissingAttribute.nestml | 41 ----- tests/test_cocos.py | 8 - 13 files changed, 202 insertions(+), 205 deletions(-) delete mode 100644 tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index c3e4a91e5..c8862c6b2 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -1186,6 +1186,19 @@ The statements in the event handler will be executed when the event occurs and i onReceive(in_spikes): V_m += in_spikes.psp # consistent units: lhs and rhs both in [mV] +In ``onReceive`` blocks, a spiking input port may not appear without an attribute present: + +.. code-block:: nestml + + state: + x real = 0 + + input: + in_spikes <- spike(weight pA) + + onReceive(in_spikes): + x += in_spikes # error! + To specify in which sequence the event handlers should be called in case multiple events are received at the exact same time, the ``priority`` parameter can be used, which can be given an integer value, where a larger value means higher priority (handled earlier). For example: .. code-block:: nestml diff --git a/models/neurons/aeif_cond_alpha_neuron.nestml b/models/neurons/aeif_cond_alpha_neuron.nestml index 7a29d6c23..3408bbc27 100644 --- a/models/neurons/aeif_cond_alpha_neuron.nestml +++ b/models/neurons/aeif_cond_alpha_neuron.nestml @@ -1,48 +1,48 @@ # aeif_cond_alpha - Conductance based exponential integrate-and-fire neuron model # ############################################################################### -# +# # Description # +++++++++++ -# +# # aeif_cond_alpha is the adaptive exponential integrate and fire neuron according to Brette and Gerstner (2005), with post-synaptic conductances in the form of a bi-exponential ("alpha") function. -# +# # The membrane potential is given by the following differential equation: -# +# # .. math:: -# +# # C_m \frac{dV_m}{dt} = # -g_L(V_m-E_L)+g_L\Delta_T\exp\left(\frac{V_m-V_{th}}{\Delta_T}\right) - # g_e(t)(V_m-E_e) \\ # -g_i(t)(V_m-E_i)-w + I_e -# +# # and -# +# # .. math:: -# +# # \tau_w \frac{dw}{dt} = a(V_m-E_L) - w -# +# # Note that the membrane potential can diverge to positive infinity due to the exponential term. To avoid numerical instabilities, instead of :math:`V_m`, the value :math:`\min(V_m,V_{peak})` is used in the dynamical equations. -# +# # .. note:: -# +# # The default refractory period for ``aeif`` models is zero, consistent with the model definition in # Brette & Gerstner [1]_. Thus, an ``aeif`` neuron with default parameters can fire multiple spikes in a single # time step, which can lead to exploding spike numbers and extreme slow-down of simulations. # To avoid such unphysiological behavior, you should set a refractory time ``refr_t > 0``. -# -# +# +# # References # ++++++++++ -# +# # .. [1] Brette R and Gerstner W (2005). Adaptive exponential # integrate-and-fire model as an effective description of neuronal # activity. Journal of Neurophysiology. 943637-3642 # DOI: https://doi.org/10.1152/jn.00686.2005 -# -# +# +# # See also # ++++++++ -# +# # iaf_cond_alpha, aeif_cond_exp # # @@ -50,7 +50,7 @@ # +++++++++++++++++++ # # This file is part of NEST. -# +# # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify @@ -75,11 +75,11 @@ model aeif_cond_alpha_neuron: refr_t ms = 0 ms # Refractory period timer equations: - inline V_bounded mV = min(V_m, V_peak) # prevent exponential divergence kernel g_inh = (e / tau_syn_inh) * t * exp(-t / tau_syn_inh) kernel g_exc = (e / tau_syn_exc) * t * exp(-t / tau_syn_exc) # Add inlines to simplify the equation definition of V_m + inline V_bounded mV = min(V_m, V_peak) # prevent exponential divergence inline exp_arg real = (V_bounded - V_th) / Delta_T inline I_spike pA = g_L * Delta_T * exp(exp_arg) inline I_syn_exc pA = convolve(g_exc, exc_spikes.weight) * (V_bounded - E_exc) @@ -118,13 +118,6 @@ model aeif_cond_alpha_neuron: # Constant external input current I_e pA = 0 pA - internals: - # Impulse to add to DG_EXC on spike arrival to evoke unit-amplitude conductance excursion - PSConInit_E nS/ms = nS * e / tau_syn_exc - - # Impulse to add to DG_INH on spike arrival to evoke unit-amplitude conductance excursion - PSConInit_I nS/ms = nS * e / tau_syn_inh - input: exc_spikes <- spike(weight nS) inh_spikes <- spike(weight nS) diff --git a/models/neurons/aeif_cond_exp_neuron.nestml b/models/neurons/aeif_cond_exp_neuron.nestml index 46e7b66e6..57c2ce6b6 100644 --- a/models/neurons/aeif_cond_exp_neuron.nestml +++ b/models/neurons/aeif_cond_exp_neuron.nestml @@ -1,49 +1,49 @@ # aeif_cond_exp - Conductance based exponential integrate-and-fire neuron model # ############################################################################# -# +# # Description # +++++++++++ -# +# # aeif_cond_exp is the adaptive exponential integrate and fire neuron # according to Brette and Gerstner (2005), with post-synaptic # conductances in the form of truncated exponentials. -# +# # The membrane potential is given by the following differential equation: -# +# # .. math:: -# +# # C_m \frac{dV_m}{dt} = # -g_L(V_m-E_L)+g_L\Delta_T\exp\left(\frac{V_m-V_{th}}{\Delta_T}\right) - g_e(t)(V_m-E_e) \\ # -g_i(t)(V_m-E_i)-w +I_e -# +# # and -# +# # .. math:: -# +# # \tau_w \frac{dw}{dt} = a(V_m-E_L) - w -# +# # Note that the membrane potential can diverge to positive infinity due to the exponential term. To avoid numerical instabilities, instead of :math:`V_m`, the value :math:`\min(V_m,V_{peak})` is used in the dynamical equations. -# +# # .. note:: -# +# # The default refractory period for ``aeif`` models is zero, consistent with the model definition in # Brette & Gerstner [1]_. Thus, an ``aeif`` neuron with default parameters can fire multiple spikes in a single # time step, which can lead to exploding spike numbers and extreme slow-down of simulations. # To avoid such unphysiological behavior, you should set a refractory time ``refr_t > 0``. -# -# +# +# # References # ++++++++++ -# +# # .. [1] Brette R and Gerstner W (2005). Adaptive exponential # integrate-and-fire model as an effective description of neuronal # activity. Journal of Neurophysiology. 943637-3642 # DOI: https://doi.org/10.1152/jn.00686.2005 -# -# +# +# # See also # ++++++++ -# +# # iaf_cond_exp, aeif_cond_alpha # # @@ -51,7 +51,7 @@ # +++++++++++++++++++ # # This file is part of NEST. -# +# # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify @@ -78,11 +78,11 @@ model aeif_cond_exp_neuron: g_syn_inh nS = 0 nS equations: - inline V_bounded mV = min(V_m, V_peak) # prevent exponential divergence g_syn_exc' = -g_syn_exc / tau_syn_exc g_syn_inh' = -g_syn_inh / tau_syn_inh # Add inlines to simplify the equation definition of V_m + inline V_bounded mV = min(V_m, V_peak) # prevent exponential divergence inline exp_arg real = (V_bounded - V_th) / Delta_T inline I_spike pA = g_L * Delta_T * exp(exp_arg) inline I_syn_exc pA = g_syn_exc * (V_bounded - E_exc) diff --git a/models/neurons/iaf_psc_exp_neuron.nestml b/models/neurons/iaf_psc_exp_neuron.nestml index 7ce695906..0474eed46 100644 --- a/models/neurons/iaf_psc_exp_neuron.nestml +++ b/models/neurons/iaf_psc_exp_neuron.nestml @@ -57,7 +57,7 @@ # +++++++++++++++++++ # # This file is part of NEST. -# +# # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify diff --git a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py index 5a1a3c8e5..7b0edd37a 100644 --- a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py +++ b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py @@ -63,11 +63,11 @@ def visit_variable(self, node: ASTVariable): # input port appears inside the declaration of an onReceive block; everything is OK return - if in_port.parameters and not node.attribute: - # input port has parameters (for instance, ``x`` in ``foo <- spike(x real)`` but the variable reference is missing an attribute (``foo`` instead of ``foo.x``) - code, message = Messages.get_spike_input_port_attribute_missing(node.get_name()) - Logger.log_message(code=code, message=message, error_position=node.get_source_position(), - log_level=LoggingLevel.ERROR) + # if in_port.parameters and not node.attribute: + # # input port has parameters (for instance, ``x`` in ``foo <- spike(x real)`` but the variable reference is missing an attribute (``foo`` instead of ``foo.x``) + # code, message = Messages.get_spike_input_port_attribute_missing(node.get_name()) + # Logger.log_message(code=code, message=message, error_position=node.get_source_position(), + # log_level=LoggingLevel.ERROR) _node = node while _node: diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index 83afb7b1e..4333d35a2 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -52,11 +52,16 @@ from pynestml.codegeneration.printers.sympy_simple_expression_printer import SympySimpleExpressionPrinter from pynestml.frontend.frontend_configuration import FrontendConfiguration from pynestml.meta_model.ast_assignment import ASTAssignment +from pynestml.meta_model.ast_equations_block import ASTEquationsBlock +from pynestml.meta_model.ast_input_block import ASTInputBlock from pynestml.meta_model.ast_input_port import ASTInputPort from pynestml.meta_model.ast_kernel import ASTKernel from pynestml.meta_model.ast_model import ASTModel from pynestml.meta_model.ast_node_factory import ASTNodeFactory from pynestml.meta_model.ast_ode_equation import ASTOdeEquation +from pynestml.meta_model.ast_on_receive_block import ASTOnReceiveBlock +from pynestml.meta_model.ast_stmts_body import ASTStmtsBody +from pynestml.meta_model.ast_variable import ASTVariable from pynestml.symbol_table.symbol_table import SymbolTable from pynestml.symbols.real_type_symbol import RealTypeSymbol from pynestml.symbols.unit_type_symbol import UnitTypeSymbol @@ -77,6 +82,7 @@ from pynestml.visitors.ast_set_vector_parameter_in_update_expressions import ASTSetVectorParameterInUpdateExpressionVisitor from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor from pynestml.visitors.ast_random_number_generator_visitor import ASTRandomNumberGeneratorVisitor +from pynestml.visitors.ast_visitor import ASTVisitor def find_spiking_post_port(synapse, namespace): @@ -87,6 +93,37 @@ def find_spiking_post_port(synapse, namespace): return None +class NoAttributesSpikingInputPortNecessaryVisitor(ASTVisitor): + r"""This visitor checks whether any references occur in the model to a spiking input port without attributes. + + For instance, for a spiking input port: + + .. code:: nestml + + input: + spikes_in_port <- spike(weight pA) + + A reference to the port with attribute would be ``spikes_in_port.weight`` and a reference without attributes would be ``spikes_in_port``. + + If no references to the port without an attribute are present, then no code needs to be generated for the buffer, saving on runtime performance. + """ + def __init__(self, model: ASTModel): + super().__init__() + + self._attributes_spiking_input_port_necessary = False + self._spike_input_ports = model.get_spike_input_port_names() + + def visit_variable(self, node: ASTVariable): + if node.name in self._spike_input_ports: + print("inspecting " + str(node) + " in " + str(node.get_parent().get_parent()) + " parent: " + str(node.get_parent().get_parent())) + if node.name in self._spike_input_ports \ + and node.get_attribute() is None \ + and not ASTUtils.find_parent_node_by_type(node, ASTInputBlock) \ + and not (ASTUtils.find_parent_node_by_type(node, ASTOnReceiveBlock) and node.get_parent().input_port_variable.name == node.name): + import pdb;pdb.set_trace() + self._attributes_spiking_input_port_necessary = True + + class NESTCodeGenerator(CodeGenerator): r""" Code generator for a NEST Simulator C++ extension module. @@ -112,7 +149,7 @@ class NESTCodeGenerator(CodeGenerator): - **solver**: A string identifying the preferred ODE solver. ``"analytic"`` for propagator solver preferred; fallback to numeric solver in case ODEs are not analytically solvable. Use ``"numeric"`` to disable analytic solver. - **gsl_adaptive_step_size_controller**: For the numeric (GSL) solver: how to interpret the absolute and relative tolerance values. Can be changed to trade off integration accuracy with numerical stability. The default value is ``"with_respect_to_solution"``. Can also be set to ``"with_respect_to_derivative"``. (Tolerance values can be specified at runtime as parameters of the model instance.) For further details, see https://www.gnu.org/software/gsl/doc/html/ode-initval.html#adaptive-step-size-control. - **numeric_solver**: A string identifying the preferred numeric ODE solver. Supported are ``"rk45"`` and ``"forward-Euler"``. - - **continuous_state_buffering_method**: Which method to use for buffering state variables between neuron and synapse pairs. When a synapse has a "continuous" input port, connected to a postsynaptic neuron, either the value is obtained taking the synaptic (dendritic, that is, synapse-soma) delay into account, requiring a buffer to store the value at each timepoint (``continuous_state_buffering_method = "continuous_time_buffer"); or the value is obtained at the times of the somatic spikes of the postsynaptic neuron, ignoring the synaptic delay (``continuous_state_buffering_method == "post_spike_based"``). The former is more physically accurate but requires a large buffer and can require a long time to simulate. The latter ignores the dendritic delay but is much more computationally efficient. + - **continuous_state_buffering_method**: Which method to use for buffering state variables between neuron and synapse pairs. When a synapse has a "continuous" input port, connected to a postsynaptic neuron, either the value is obtained taking the synaptic (dendritic, that is, synapse-soma) delay into account, requiring a buffer to store the value at each timepoint (``continuous_state_buffering_method = "continuous_time_buffer"``); or the value is obtained at the times of the somatic spikes of the postsynaptic neuron, ignoring the synaptic delay (``continuous_state_buffering_method == "post_spike_based"``). The former is more physically accurate but requires a large buffer and can require a long time to simulate. The latter ignores the dendritic delay but is much more computationally efficient. - **delay_variable**: A mapping identifying, for each synapse (the name of which is given as a key), the variable or parameter in the model that corresponds with the NEST ``Connection`` class delay property. - **weight_variable**: Like ``delay_variable``, but for synaptic weight. - **linear_time_invariant_spiking_input_ports**: A list of spiking input ports which can be treated as linear and time-invariant; this implies that, for the given port(s), the weight of all spikes received within a timestep can be added together, improving memory consumption and runtime performance. Use with caution; for example, this is not compatible with using a single input port for, depending on the sign of the weight of the spike event, processing both inhibitory vs. excitatory spikes. @@ -557,8 +594,13 @@ def _get_model_namespace(self, astnode: ASTModel) -> Dict: if "continuous_post_ports" in dir(astnode): namespace["continuous_post_ports"] = astnode.continuous_post_ports + # input port/event handling options namespace["linear_time_invariant_spiking_input_ports"] = self.get_option("linear_time_invariant_spiking_input_ports") + v = NoAttributesSpikingInputPortNecessaryVisitor(astnode) + astnode.accept(v) + namespace["attributes_spiking_input_port_necessary"] = v._attributes_spiking_input_port_necessary + return namespace def _get_synapse_model_namespace(self, synapse: ASTModel) -> Dict: diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 589c6f991..43786a2bf 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -206,8 +206,10 @@ namespace nest , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- else %} +{%- if attributes_spiking_input_port_necessary %} {#- generic input port: use lists of spike events for each buffer slot #} - , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::VectorRingBuffer() ) +{%- endif %} {%- endif %} {%- endfor %} {%- endif %} @@ -219,8 +221,10 @@ namespace nest , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- else %} +{%- if attributes_spiking_input_port_necessary %} {#- generic input port: use lists of spike events for each buffer slot #} - , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::VectorRingBuffer() ) +{%- endif %} {%- endif %} {%- endfor %} {%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} @@ -229,9 +233,11 @@ namespace nest , spike_input_{{ inputPort.name }}_grid_sum_( 0. ) {%- else %} {#- generic input port: use lists of spike events for each buffer slot #} - , spike_input_{{ inputPort.name }}_( nest::ListRingBuffer() ) +{%- if attributes_spiking_input_port_necessary %} + , spike_input_{{ inputPort.name }}_( nest::VectorRingBuffer() ) +{%- endif %} {%- endif %} - , spike_input_{{ inputPort.name }}_spike_input_received_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_n_spikes_received_( nest::RingBuffer() ) {%- endif %} {%- endfor %} {%- endif %} @@ -261,7 +267,9 @@ namespace nest , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- else %} {#- generic input port: use lists of spike events for each buffer slot #} - , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) +{%- if attributes_spiking_input_port_necessary %} + , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::VectorRingBuffer() ) +{%- endif %} {%- endif %} {%- endfor %} {%- else %} @@ -279,7 +287,9 @@ namespace nest , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- else %} {#- generic input port: use lists of spike events for each buffer slot #} - , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::ListRingBuffer() ) +{%- if attributes_spiking_input_port_necessary %} + , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::VectorRingBuffer() ) +{%- endif %} {%- endif %} {%- endfor %} {%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} @@ -288,9 +298,11 @@ namespace nest , spike_input_{{ inputPort.name }}_grid_sum_( 0. ) {%- else %} {#- generic input port: use lists of spike events for each buffer slot #} - , spike_input_{{ inputPort.name }}_( nest::ListRingBuffer() ) +{%- if attributes_spiking_input_port_necessary %} + , spike_input_{{ inputPort.name }}_( nest::VectorRingBuffer() ) +{%- endif %} {%- endif %} - , spike_input_{{ inputPort.name }}_spike_input_received_( nest::RingBuffer() ) + , spike_input_{{ inputPort.name }}_n_spikes_received_( nest::RingBuffer() ) {%- endif %} {%- endfor %} {%- endif %} @@ -508,15 +520,19 @@ void {{ neuronName }}::init_buffers_() B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.clear(); {%- endfor %} {%- endif %} +{%- if attributes_spiking_input_port_necessary %} B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_.clear(); - B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_.clear(); +{%- endif %} + B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_n_spikes_received_.clear(); {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.clear(); {%- endfor %} +{%- if attributes_spiking_input_port_necessary %} B_.spike_input_{{ inputPort.name }}_.clear(); - B_.spike_input_{{ inputPort.name }}_spike_input_received_.clear(); +{%- endif %} + B_.spike_input_{{ inputPort.name }}_n_spikes_received_.clear(); {%- endif %} {%- endfor %} {% endif %} @@ -813,66 +829,46 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- for inputPortSymbol in neuron.get_spike_input_ports() %} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} - - - {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - - -{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} -{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_value(lag); const double __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }} = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; -{%- else %} -{#- generic input port: use lists of spike events for each buffer slot #} +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} const double __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }} = std::accumulate(B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag).begin(), B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag).end(), 0.0); -{%- endif %} - - - - +{%- endif %} {%- endfor %} {%- endif %} {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} - - -{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} -{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_value(lag); const double __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }} = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; -{%- else %} -{#- generic input port: use lists of spike events for each buffer slot #} +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} const double __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }} = std::accumulate(B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag).begin(), B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag).end(), 0.0); -{%- endif %} - - - - - +{%- endif %} {%- endfor %} - -{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} -{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} +{%- if attributes_spiking_input_port_necessary %} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} B_.spike_input_{{ inputPort.name }}_grid_sum_ = B_.spike_input_{{ inputPort.name }}_.get_value(lag); const double __spike_input_{{ inputPort.name }} = B_.spike_input_{{ inputPort.name }}_grid_sum_; -{%- else %} +{%- else %} {#- generic input port: use lists of spike events for each buffer slot #} const double __spike_input_{{ inputPort.name }} = std::accumulate(B_.spike_input_{{ inputPort.name }}_.get_list(lag).begin(), B_.spike_input_{{ inputPort.name }}_.get_list(lag).end(), 0.0); -{%- endif %} - - - - +{%- endif %} +{%- endif %} {%- endif %} {%- endfor %} - {%- if has_delay_variables %} /** * delay variables @@ -924,7 +920,7 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const **/ {% for blk in neuron.get_on_receive_blocks() %} {%- set inport = utils.port_name_printer(blk.get_input_port_variable()) %} - if (B_.spike_input_{{ inport }}_spike_input_received_.get_value(lag)) + if (B_.spike_input_{{ inport }}_n_spikes_received_.get_value(lag)) { // B_.spike_input_{{ inport }} = false; // no need to reset the flag -- reading from the RingBuffer resets the RingBuffer entries on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(origin, lag); @@ -1041,19 +1037,21 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - std::list< double >& __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag); + std::vector< double >& __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag); __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list.clear(); {%- endfor %} {%- endif %} {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} - std::list< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); + std::vector< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.clear(); {%- endfor %} - std::list< double >& __spike_input_{{ inputPort.name }}_list = B_.spike_input_{{ inputPort.name }}_.get_list(lag); +{%- if attributes_spiking_input_port_necessary %} + std::vector< double >& __spike_input_{{ inputPort.name }}_list = B_.spike_input_{{ inputPort.name }}_.get_list(lag); __spike_input_{{ inputPort.name }}_list.clear(); +{%- endif %} {%- endif %} {%- endif %} @@ -1195,8 +1193,10 @@ std::cout << "[neuron " << this << "] Writing history at time " << nest::Time(ne B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.clear(); //, spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) {%- endfor %} +{%- if attributes_spiking_input_port_necessary %} B_.spike_input_{{ inputPort.name }}_.clear(); - //, spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_( 0. ) +{%- endif %} + //, spike_input_{{ inputPort.name }}_n_spikes_received_grid_sum_( 0. ) {%- endif %} {%- endfor %} {%- endif %} @@ -1313,45 +1313,36 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) { {%- if spike_in_port.get_parameters() %} {%- for attribute in spike_in_port.get_parameters() %} - - -{%- if spike_in_port_name in linear_time_invariant_spiking_input_ports %} +{%- if spike_in_port_name in linear_time_invariant_spiking_input_ports %} {#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} B_.spike_input_{{ spike_in_port_name }}__DOT__{{ attribute.name }}_.add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_weight() * e.get_multiplicity() ); -{%- else %} +{%- else %} {#- generic input port: use lists of spike events for each buffer slot #} B_.spike_input_{{ spike_in_port_name }}__DOT__{{ attribute.name }}_.append_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_weight() * e.get_multiplicity() ); -{%- endif %} - - +{%- endif %} {%- endfor %} {%- endif %} - // add an unweighted spike to the general "train of delta pulses" input buffer - //std::cout << "\tappending spike at offset = " << e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin()) << "; buffer size = " << B_.spike_input_{{ spike_in_port_name }}_.size() << "; nest::kernel().connection_manager.get_min_delay() = " << nest::kernel().connection_manager.get_min_delay() << "\n"; - //std::cout << "\tappending spike at offset = " << e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin()) << " to B_.spike_input_{{ spike_in_port_name }}_, before length = " << B_.spike_input_{{ spike_in_port_name }}_.get_list(e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin())).size() << "\n"; - // B_.spike_input_{{ spike_in_port_name }}_.resize(); - -{%- if spike_in_port_name in linear_time_invariant_spiking_input_ports %} +{%- if spike_in_port_name in linear_time_invariant_spiking_input_ports %} {#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} B_.spike_input_{{ spike_in_port_name }}_.add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_multiplicity() ); -{%- else %} +{%- else %} +{%- if attributes_spiking_input_port_necessary %} {#- generic input port: use lists of spike events for each buffer slot #} B_.spike_input_{{ spike_in_port_name }}_.append_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), e.get_multiplicity() ); -{%- endif %} - - //std::cout << "\tappending spike to B_.spike_input_{{ spike_in_port_name }}_, after length = " << B_.spike_input_{{ spike_in_port_name }}_.get_list(e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin())).size() << "\n"; +{%- endif %} +{%- endif %} // set the "spike received" flag - B_.spike_input_{{ spike_in_port_name }}_spike_input_received_.add_value( + B_.spike_input_{{ spike_in_port_name }}_n_spikes_received_.add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), 1. ); } @@ -1441,26 +1432,28 @@ void {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - std::list< double >& __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag); - std::list< double >::iterator __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list_iterator = __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list.begin(); + std::vector< double >& __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag); + std::vector< double >::iterator __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list_iterator = __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list.begin(); {%- endfor %} {%- endif %} {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} - std::list< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); - std::list< double >::iterator __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list_iterator = __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.begin(); + std::vector< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); + std::vector< double >::iterator __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list_iterator = __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.begin(); {%- endfor %} +{%- endif %} +{%- if attributes_spiking_input_port_necessary %} + std::vector< double >& __spike_input_{{ inputPort.name }}_list = B_.spike_input_{{ inputPort.name }}_.get_list(lag); + std::vector< double >::iterator __spike_input_{{ inputPort.name }}_list_iterator = __spike_input_{{ inputPort.name }}_list.begin(); {%- endif %} - std::list< double >& __spike_input_{{ inputPort.name }}_list = B_.spike_input_{{ inputPort.name }}_.get_list(lag); - std::list< double >::iterator __spike_input_{{ inputPort.name }}_list_iterator = __spike_input_{{ inputPort.name }}_list.begin(); - - const size_t n_spikes = __spike_input_{{ inputPort.name }}_list.size(); + const size_t n_spikes = B_.spike_input_{{ inputPort.name }}_n_spikes_received_.get_value(lag); +std::cout << "PROCESSING " << n_spikes <<" SPIKES\n"; for (size_t spike_idx = 0; spike_idx < n_spikes; ++spike_idx) { /** - * Grab the actual spike event data from the buffers (for the current timepoint ``origin + / start rendered code for integrate_olag``) + * Grab the actual spike event data from the buffers (for the current timepoint ``origin + lag``) **/ {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} @@ -1476,7 +1469,9 @@ void const double __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }} = *__spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list_iterator; {%- endfor %} {%- endif %} +{%- if attributes_spiking_input_port_necessary %} const double __spike_input_{{ inputPort.name }} = *__spike_input_{{ inputPort.name }}_list_iterator; +{%- endif %} /** * Begin NESTML generated code for the onReceive() block statements @@ -1507,12 +1502,12 @@ void ++__spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list_iterator; {%- endfor %} {%- endif %} +{%- if attributes_spiking_input_port_necessary %} ++__spike_input_{{ inputPort.name }}_list_iterator; +{%- endif %} } /* // clear the processed spike events from the list -std::cout << "\tclearing spike buffers....\n"; - std::cout << "\tbefore clearing " << __spike_input_{{ inputPort.name }}_list.size() << " spikes\n"; {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} @@ -1529,12 +1524,9 @@ std::cout << "\tclearing spike buffers....\n"; {%- endfor %} {%- endif %} +{%- if attributes_spiking_input_port_necessary %} __spike_input_{{ inputPort.name }}_list.clear(); - - - - std::cout << "\tafter clearing " << __spike_input_{{ inputPort.name }}_list.size() << " spikes\n"; - std::cout << "\tafter clearing (orig list) " << B_.spike_input_{{ inputPort.name }}_.get_list(lag).size() << " spikes\n"; +{%- endif %} */ {%- endif %} } diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index a4ec4dfb5..0785a85e8 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -93,6 +93,7 @@ along with NEST. If not, see . #include "event.h" #include "nest_types.h" #include "ring_buffer.h" +#include "vector_ring_buffer.h" #include "universal_data_logger.h" // Includes from sli: @@ -802,8 +803,8 @@ private: {%- endif %} nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_grid_sum_; - nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; - double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_; + nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_n_spikes_received_; + double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_n_spikes_received_grid_sum_; {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} @@ -812,8 +813,8 @@ private: {%- endfor %} nest::RingBuffer spike_input_{{ inputPort.name }}_; // buffer for unweighted spikes double spike_input_{{ inputPort.name }}_grid_sum_; // buffer for unweighted spikes - nest::RingBuffer spike_input_{{ inputPort.name }}_spike_input_received_; // buffer for the "spike received" boolean flag - double spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; // buffer for the "spike received" boolean flag + nest::RingBuffer spike_input_{{ inputPort.name }}_n_spikes_received_; // buffer for the "spike received" boolean flag + double spike_input_{{ inputPort.name }}_n_spikes_received_grid_sum_; // buffer for the "spike received" boolean flag {%- endif %} {%- else %} {#- generic input port: use lists of spike events for each buffer slot #} @@ -823,23 +824,27 @@ private: {%- for i in range(size) %} {%- if inputPort.get_parameters() %} {%- for parameter in inputPort.get_parameters() %} - nest::ListRingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_; + nest::VectorRingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; {%- endfor %} {%- endif %} - nest::ListRingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_; +{%- if attributes_spiking_input_port_necessary %} + nest::VectorRingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_; double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_grid_sum_; - nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_; - double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_spike_input_received_grid_sum_; +{%- endif %} + nest::RingBuffer spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_n_spikes_received_; + double spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_n_spikes_received_grid_sum_; {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} - nest::ListRingBuffer spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; + nest::VectorRingBuffer spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_; double spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; {%- endfor %} - nest::ListRingBuffer spike_input_{{ inputPort.name }}_; // buffer for unweighted spikes - nest::RingBuffer spike_input_{{ inputPort.name }}_spike_input_received_; // buffer for the "spike received" boolean flag - double spike_input_{{ inputPort.name }}_spike_input_received_grid_sum_; // buffer for the "spike received" boolean flag +{%- if attributes_spiking_input_port_necessary %} + nest::VectorRingBuffer spike_input_{{ inputPort.name }}_; // buffer for unweighted spikes +{%- endif %} + nest::RingBuffer spike_input_{{ inputPort.name }}_n_spikes_received_; // buffer for the "spike received" boolean flag + double spike_input_{{ inputPort.name }}_n_spikes_received_grid_sum_; // buffer for the "spike received" boolean flag {%- endif %} {%- endif %} {%- endfor %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/BufferDeclarationValue.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/BufferDeclarationValue.jinja2 index e2fdbad16..b849634ec 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/BufferDeclarationValue.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/BufferDeclarationValue.jinja2 @@ -2,7 +2,7 @@ {%- if tracing %}/* generated by {{self._TemplateReference__context.name}} */ {% endif %} {%- if node.has_vector_parameter() %} std::vector {{ node.name }}_grid_sum_; -std::vector {{ node.name }}_spike_input_received_grid_sum_; +std::vector {{ node.name }}_n_spikes_received_grid_sum_; {%- else %} double {{ node.name }}_grid_sum_; {%- endif %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/SpikeBufferGetter.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/SpikeBufferGetter.jinja2 index 530deb219..e171e23ae 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/SpikeBufferGetter.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/directives_cpp/SpikeBufferGetter.jinja2 @@ -4,8 +4,8 @@ {% set _tuples = [ ("nest::RingBuffer", "spike_inputs_", "Buffer containing the incoming spikes"), ("double", "spike_inputs_grid_sum_", "Buffer containing the sum of all the incoming spikes"), - ("nest::RingBuffer", "spike_input_received_", "Buffer containing a flag whether incoming spikes have been received on a given port"), - ("double", "spike_input_received_grid_sum_", "Buffer containing a flag whether incoming spikes have been received on a given port") + ("nest::RingBuffer", "n_spikes_received_", "Buffer containing a flag whether incoming spikes have been received on a given port"), + ("double", "n_spikes_received_grid_sum_", "Buffer containing a flag whether incoming spikes have been received on a given port") ] %} {%- for data_type, variable_name, comment_string in _tuples %} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/CMakeLists.txt.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/CMakeLists.txt.jinja2 index 002ff05de..29ef9cee9 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/setup/CMakeLists.txt.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/CMakeLists.txt.jinja2 @@ -59,10 +59,11 @@ set( MODULE_NAME ${SHORT_NAME} ) # 2) Add all your sources here set( MODULE_SOURCES - {{moduleName}}.h {{moduleName}}.cpp - {% for neuron in neurons %} - {{neuron.get_name()}}.cpp {{neuron.get_name()}}.h - {% endfor %} + {{ moduleName }}.h {{ moduleName }}.cpp +{%- for neuron in neurons %} + {{ neuron.get_name() }}.cpp {{ neuron.get_name() }}.h +{%- endfor %} + vector_ring_buffer.h vector_ring_buffer.cpp ) # 3) We require a header name like this: diff --git a/tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml b/tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml deleted file mode 100644 index a64f01380..000000000 --- a/tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml +++ /dev/null @@ -1,41 +0,0 @@ -# CoCoInputPortsIllegal.nestml -# ############################ -# -# Description -# +++++++++++ -# -# This test is used to test the declaration of both vectorized and non-vectorized input ports. -# -# -# Positive case. -# -# -# Copyright statement -# +++++++++++++++++++ -# -# This file is part of NEST. -# -# Copyright (C) 2004 The NEST Initiative -# -# NEST is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# NEST is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with NEST. If not, see . -# -model input_ports_illegal_neuron: - state: - foo real = 0. - - input: - spike_in_port <- spike(foo real) - - onReceive(spike_in_port): - foo += spike_in_port diff --git a/tests/test_cocos.py b/tests/test_cocos.py index de9044760..0c0b0a184 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -165,14 +165,6 @@ def test_valid_no_nest_collision(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNestNamespaceCollision.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_co_co_spike_input_ports_illegal_missing_attribute(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 - - def test_valid_co_co_spike_input_ports_illegal_missing_attribute(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 - def test_invalid_parameters_assigned_only_in_parameters_block(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoParameterAssignedOutsideBlock.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 From bdfcdc7086f5d776fab624f5809ce6f4f5f289f6 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 3 Jun 2025 10:39:53 +0200 Subject: [PATCH 60/68] add attributes to spiking input ports --- .gitignore | 1 + .../point_neuron/common/NeuronClass.jinja2 | 17 ++++++++--------- .../point_neuron/common/NeuronHeader.jinja2 | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.gitignore b/.gitignore index d80c8cffd..951d6c1c1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +*.orig .settings/ target/ .idea/ diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index 43786a2bf..d20ef57ff 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -70,7 +70,7 @@ along with NEST. If not, see . #include "{{ neuronName }}.h" // uncomment the next line to enable printing of detailed debug information -// #define DEBUG +#define DEBUG {% if state_vars_that_need_continuous_buffering | length > 0 %} {%- if continuous_state_buffering_method == "continuous_time_buffer" %} @@ -920,10 +920,11 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const **/ {% for blk in neuron.get_on_receive_blocks() %} {%- set inport = utils.port_name_printer(blk.get_input_port_variable()) %} - if (B_.spike_input_{{ inport }}_n_spikes_received_.get_value(lag)) + const size_t n_spikes_received = B_.spike_input_{{ inport }}_n_spikes_received_.get_value(lag); + if (n_spikes_received) { // B_.spike_input_{{ inport }} = false; // no need to reset the flag -- reading from the RingBuffer resets the RingBuffer entries - on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(origin, lag); + on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(origin, lag, n_spikes_received); } {%- endfor %} @@ -1341,7 +1342,7 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) {%- endif %} {%- endif %} - // set the "spike received" flag + // increment the number of spikes received B_.spike_input_{{ spike_in_port_name }}_n_spikes_received_.add_value( e.get_rel_delivery_steps( nest::kernel().simulation_manager.get_slice_origin() ), 1. ); @@ -1380,10 +1381,10 @@ void {{ neuronName }}::handle(nest::CurrentEvent& e) {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inport.split(".")[0]) %} {%- set inputPortSymbol = astnode.get_scope().resolve_to_symbol(inputPort.name, SymbolKind.VARIABLE) %} void -{{ neuronName }}::on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(nest::Time const &origin, const long lag) +{{ neuronName }}::on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(nest::Time const &origin, const long lag, const size_t n_spikes_received) { #ifdef DEBUG - std::cout << "[neuron " << this << "] {{ neuronName }}::on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}()" << std::endl; + std::cout << "[neuron " << this << "] {{ neuronName }}::on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(lag=" << lag << ", n_spikes_received = " << n_spikes_received << ")" << std::endl; #endif const double __timestep = nest::Time::get_resolution().get_ms(); // do not remove, this is necessary for the timestep() function @@ -1448,9 +1449,7 @@ void std::vector< double >::iterator __spike_input_{{ inputPort.name }}_list_iterator = __spike_input_{{ inputPort.name }}_list.begin(); {%- endif %} - const size_t n_spikes = B_.spike_input_{{ inputPort.name }}_n_spikes_received_.get_value(lag); -std::cout << "PROCESSING " << n_spikes <<" SPIKES\n"; - for (size_t spike_idx = 0; spike_idx < n_spikes; ++spike_idx) + for (size_t spike_idx = 0; spike_idx < n_spikes_received; ++spike_idx) { /** * Grab the actual spike event data from the buffers (for the current timepoint ``origin + lag``) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 0785a85e8..193e0d99e 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -477,7 +477,7 @@ public: {% filter indent(2, True) -%} {%- for blk in neuron.get_on_receive_blocks() %} - void on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(nest::Time const &origin, const long lag); + void on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(nest::Time const &origin, const long lag, const size_t n_spikes_received); {%- endfor %} {%- endfilter %} From 38e4e7d9a8501c27633b417e863188d02c8a4838 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 3 Jun 2025 11:27:57 +0200 Subject: [PATCH 61/68] add vector ring buffer --- .../setup/vector_ring_buffer.cpp.jinja2 | 71 +++++++++ .../setup/vector_ring_buffer.h.jinja2 | 142 ++++++++++++++++++ 2 files changed, 213 insertions(+) create mode 100644 pynestml/codegeneration/resources_nest/point_neuron/setup/vector_ring_buffer.cpp.jinja2 create mode 100644 pynestml/codegeneration/resources_nest/point_neuron/setup/vector_ring_buffer.h.jinja2 diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/vector_ring_buffer.cpp.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/vector_ring_buffer.cpp.jinja2 new file mode 100644 index 000000000..91b4694cf --- /dev/null +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/vector_ring_buffer.cpp.jinja2 @@ -0,0 +1,71 @@ +{# + * vector_ring_buffer.cpp.jinja2 + * + * This file is part of NEST. + * + * Copyright (C) 2004 The NEST Initiative + * + * NEST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 2 of the License, or + * (at your option) any later version. + * + * NEST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with NEST. If not, see . + * +#} +/* + * vector_ring_buffer.cpp + * + * This file is part of NEST. + * + * Copyright (C) 2004 The NEST Initiative + * + * NEST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 2 of the License, or + * (at your option) any later version. + * + * NEST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with NEST. If not, see . + * + */ + +#include "vector_ring_buffer.h" + + +nest::VectorRingBuffer::VectorRingBuffer() + : buffer_( kernel().connection_manager.get_min_delay() + kernel().connection_manager.get_max_delay() ) +{ +} + +void +nest::VectorRingBuffer::resize() +{ + size_t size = kernel().connection_manager.get_min_delay() + kernel().connection_manager.get_max_delay(); + if ( buffer_.size() != size ) + { + buffer_.resize( size ); + } +} + +void +nest::VectorRingBuffer::clear() +{ + resize(); // does nothing if size is fine + // clear all elements + for ( unsigned int i = 0; i < buffer_.size(); i++ ) + { + buffer_[ i ].clear(); + } +} diff --git a/pynestml/codegeneration/resources_nest/point_neuron/setup/vector_ring_buffer.h.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/setup/vector_ring_buffer.h.jinja2 new file mode 100644 index 000000000..609681cc9 --- /dev/null +++ b/pynestml/codegeneration/resources_nest/point_neuron/setup/vector_ring_buffer.h.jinja2 @@ -0,0 +1,142 @@ +{# + * vector_ring_buffer.h.jinja2 + * + * This file is part of NEST. + * + * Copyright (C) 2004 The NEST Initiative + * + * NEST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 2 of the License, or + * (at your option) any later version. + * + * NEST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with NEST. If not, see . + * +#} +/* + * vector_ring_buffer.h + * + * This file is part of NEST. + * + * Copyright (C) 2004 The NEST Initiative + * + * NEST is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 2 of the License, or + * (at your option) any later version. + * + * NEST is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with NEST. If not, see . + * + */ + +#ifndef VECTOR_RING_BUFFER_H +#define VECTOR_RING_BUFFER_H + +// C++ includes: +#include +#include +#include + +// Includes from nestkernel: +#include "kernel_manager.h" +#include "nest_time.h" +#include "nest_types.h" + +namespace nest +{ + +class VectorRingBuffer +{ +public: + VectorRingBuffer(); + + /** + * Append a value to the ring buffer list. + * + * @param offs Arrival time relative to beginning of slice. + * @param double Value to append. + */ + void append_value( const long offs, const double ); + + std::vector< double >& get_list( const long offs ); + + /** + * Initialize the buffer with empty lists. + * Also resizes the buffer if necessary. + */ + void clear(); + + /** + * Resize the buffer according to max_thread and max_delay. + * + * New elements are filled with empty lists. + * @note resize() has no effect if the buffer has the correct size. + */ + void resize(); + + /** + * Returns buffer size, for memory measurement. + */ + size_t + size() const + { + return buffer_.size(); + } + +private: + //! Buffered data + std::vector< std::vector< double > > buffer_; + + /** + * Obtain buffer index. + * + * @param delay delivery delay for event + * @returns index to buffer element into which event should be + * recorded. + */ + size_t get_index_( const long d ) const; +}; + +inline void +VectorRingBuffer::append_value( const long offs, const double v ) +{ + buffer_[ get_index_( offs ) ].push_back( v ); +} + +inline std::vector< double >& +VectorRingBuffer::get_list( const long offs ) +{ + assert( 0 <= offs and static_cast< size_t >( offs ) < buffer_.size() ); + assert( offs < kernel().connection_manager.get_min_delay() ); + + // offs == 0 is beginning of slice, but we have to + // take modulo into account when indexing + long idx = get_index_( offs ); + return buffer_[ idx ]; +} + +inline size_t +VectorRingBuffer::get_index_( const long d ) const +{ + const long idx = kernel().event_delivery_manager.get_modulo( d ); + assert( 0 <= idx ); + assert( static_cast< size_t >( idx ) < buffer_.size() ); + return idx; +} + +} // namespace nest + + +#endif /* #ifndef VECTOR_RING_BUFFER_H */ From be889ddd7402fa86ef4d40a8df68d3f28937e87c Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 3 Jun 2025 21:16:56 +0200 Subject: [PATCH 62/68] add attributes to spiking input ports --- .../codegeneration/nest_code_generator.py | 22 +++++++++++-------- .../point_neuron/common/NeuronClass.jinja2 | 10 +++++---- .../nest_tests/nest_custom_templates_test.py | 5 ++++- .../resources/add_spikes_to_ode.nestml | 18 +++++++-------- .../iaf_psc_exp_resolution_test.nestml | 12 +++++----- 5 files changed, 38 insertions(+), 29 deletions(-) diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index 4a38a57a4..0ea17706b 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -60,6 +60,7 @@ from pynestml.meta_model.ast_node_factory import ASTNodeFactory from pynestml.meta_model.ast_ode_equation import ASTOdeEquation from pynestml.meta_model.ast_on_receive_block import ASTOnReceiveBlock +from pynestml.meta_model.ast_simple_expression import ASTSimpleExpression from pynestml.meta_model.ast_stmts_body import ASTStmtsBody from pynestml.meta_model.ast_variable import ASTVariable from pynestml.symbol_table.symbol_table import SymbolTable @@ -114,13 +115,10 @@ def __init__(self, model: ASTModel): self._spike_input_ports = model.get_spike_input_port_names() def visit_variable(self, node: ASTVariable): - if node.name in self._spike_input_ports: - print("inspecting " + str(node) + " in " + str(node.get_parent().get_parent()) + " parent: " + str(node.get_parent().get_parent())) if node.name in self._spike_input_ports \ - and node.get_attribute() is None \ - and not ASTUtils.find_parent_node_by_type(node, ASTInputBlock) \ - and not (ASTUtils.find_parent_node_by_type(node, ASTOnReceiveBlock) and node.get_parent().input_port_variable.name == node.name): - import pdb;pdb.set_trace() + and node.get_attribute() is None \ + and not ASTUtils.find_parent_node_by_type(node, ASTInputBlock) \ + and not (ASTUtils.find_parent_node_by_type(node, ASTOnReceiveBlock) and ASTUtils.find_parent_node_by_type(node, ASTOnReceiveBlock).input_port_variable.name == node.name): self._attributes_spiking_input_port_necessary = True @@ -379,6 +377,10 @@ def analyse_neuron(self, neuron: ASTModel) -> Tuple[Dict[str, ASTAssignment], Di code, message = Messages.get_start_processing_model(neuron.get_name()) Logger.log_message(neuron, code, message, neuron.get_source_position(), LoggingLevel.INFO) + v = NoAttributesSpikingInputPortNecessaryVisitor(neuron) + neuron.accept(v) + neuron._attributes_spiking_input_port_necessary = v._attributes_spiking_input_port_necessary + if not neuron.get_equations_blocks(): # add all declared state variables as none of them are used in equations block self.non_equations_state_variables[neuron.get_name()] = [] @@ -466,6 +468,10 @@ def analyse_synapse(self, synapse: ASTModel) -> Dict[str, ASTAssignment]: code, message = Messages.get_start_processing_model(synapse.get_name()) Logger.log_message(synapse, code, message, synapse.get_source_position(), LoggingLevel.INFO) + v = NoAttributesSpikingInputPortNecessaryVisitor(synapse) + synapse.accept(v) + synapse._attributes_spiking_input_port_necessary = v._attributes_spiking_input_port_necessary + spike_updates = {} if synapse.get_equations_blocks(): if len(synapse.get_equations_blocks()) > 1: @@ -597,9 +603,7 @@ def _get_model_namespace(self, astnode: ASTModel) -> Dict: # input port/event handling options namespace["linear_time_invariant_spiking_input_ports"] = self.get_option("linear_time_invariant_spiking_input_ports") - v = NoAttributesSpikingInputPortNecessaryVisitor(astnode) - astnode.accept(v) - namespace["attributes_spiking_input_port_necessary"] = v._attributes_spiking_input_port_necessary + namespace["attributes_spiking_input_port_necessary"] = astnode._attributes_spiking_input_port_necessary return namespace diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index d20ef57ff..e9f45a6ba 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -920,11 +920,13 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const **/ {% for blk in neuron.get_on_receive_blocks() %} {%- set inport = utils.port_name_printer(blk.get_input_port_variable()) %} - const size_t n_spikes_received = B_.spike_input_{{ inport }}_n_spikes_received_.get_value(lag); - if (n_spikes_received) { - // B_.spike_input_{{ inport }} = false; // no need to reset the flag -- reading from the RingBuffer resets the RingBuffer entries - on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(origin, lag, n_spikes_received); + const size_t n_spikes_received = B_.spike_input_{{ inport }}_n_spikes_received_.get_value(lag); + if (n_spikes_received) + { + // B_.spike_input_{{ inport }} = false; // no need to reset the flag -- reading from the RingBuffer resets the RingBuffer entries + on_receive_block_{{ utils.port_name_printer(blk.get_input_port_variable()) }}(origin, lag, n_spikes_received); + } } {%- endfor %} diff --git a/tests/nest_tests/nest_custom_templates_test.py b/tests/nest_tests/nest_custom_templates_test.py index c1a130dfc..8f13653e3 100644 --- a/tests/nest_tests/nest_custom_templates_test.py +++ b/tests/nest_tests/nest_custom_templates_test.py @@ -51,7 +51,10 @@ def test_custom_templates(self): "model_templates": {"neuron": ["@NEURON_NAME@.cpp.jinja2", "@NEURON_NAME@.h.jinja2"], "synapse": ["@SYNAPSE_NAME@.h.jinja2"]}, "module_templates": ["setup/CMakeLists.txt.jinja2", - "setup/@MODULE_NAME@.h.jinja2", "setup/@MODULE_NAME@.cpp.jinja2"]}} + "setup/@MODULE_NAME@.h.jinja2", + "setup/@MODULE_NAME@.cpp.jinja2", + "setup/vector_ring_buffer.h.jinja2", + "setup/vector_ring_buffer.cpp.jinja2"]}} generate_target(input_path, target_platform, target_path, logging_level=logging_level, diff --git a/tests/nest_tests/resources/add_spikes_to_ode.nestml b/tests/nest_tests/resources/add_spikes_to_ode.nestml index 814ead939..d8f6b30d0 100644 --- a/tests/nest_tests/resources/add_spikes_to_ode.nestml +++ b/tests/nest_tests/resources/add_spikes_to_ode.nestml @@ -1,25 +1,25 @@ # add_spikes_to_ode.nestml # ######################## -# +# # Test that spikes from an input port can be directly added to a linear (propagator-solved) and nonlinear (numeric solver) ODE. -# +# # Copyright statement # +++++++++++++++++++ -# +# # This file is part of NEST. -# +# # Copyright (C) 2004 The NEST Initiative -# +# # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. -# +# # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. -# +# # You should have received a copy of the GNU General Public License # along with NEST. If not, see . # @@ -30,8 +30,8 @@ model add_spikes_to_ode: equations: kernel K = delta(t) - x' = -x + 42 * convolve(K, spikes) / s # linear eq # XXX: this should just read ``x' = -x + 42 * spikes``; this is a known issue (see https://github.com/nest/nestml/pull/1050). - y' = -y**2 + 123 * convolve(K, spikes) / s # nonlinear eq # XXX: this should just read ``x' = -x + 42 * spikes``; this is a known issue (see https://github.com/nest/nestml/pull/1050). + x' = -x + 42 * convolve(K, spikes) / s # linear eq + y' = -y**2 + 123 * convolve(K, spikes) / s # nonlinear eq input: spikes <- spike diff --git a/tests/nest_tests/resources/iaf_psc_exp_resolution_test.nestml b/tests/nest_tests/resources/iaf_psc_exp_resolution_test.nestml index 2f7a5bee2..dfc1b7a7a 100644 --- a/tests/nest_tests/resources/iaf_psc_exp_resolution_test.nestml +++ b/tests/nest_tests/resources/iaf_psc_exp_resolution_test.nestml @@ -1,9 +1,9 @@ # iaf_psc_exp_resolution_test # ########################### -# +# # Description # +++++++++++ -# +# # Used to test resolution() function. # model iaf_psc_exp_resolution_test_neuron: @@ -38,7 +38,7 @@ model iaf_psc_exp_resolution_test_neuron: c ms = resolution() input: - spike_in_port <- spike + spike_in_port <- spike(weight pA) I_stim pA <- continuous output: @@ -46,10 +46,10 @@ model iaf_psc_exp_resolution_test_neuron: onReceive(spike_in_port): # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike - if spike_in_port < 0: - I_syn_inh += spike_in_port * pA * s + if spike_in_port.weight < 0: + I_syn_inh += spike_in_port.weight else: - I_syn_exc += spike_in_port * pA * s + I_syn_exc += spike_in_port.weight update: d ms = resolution() From 68757607436b7a1797d8bf490ee13a611129dee9 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 3 Jun 2025 22:35:45 +0200 Subject: [PATCH 63/68] add attributes to spiking input ports --- ...only_in_equation_rhs_and_event_handlers.py | 11 ++++---- .../codegeneration/nest_code_generator.py | 17 +++++++++--- ...ction_with_inline_expression_neuron.nestml | 26 +++++++++---------- tests/test_cocos.py | 4 +++ 4 files changed, 37 insertions(+), 21 deletions(-) diff --git a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py index 7b0edd37a..996417cbf 100644 --- a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py +++ b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py @@ -63,11 +63,12 @@ def visit_variable(self, node: ASTVariable): # input port appears inside the declaration of an onReceive block; everything is OK return - # if in_port.parameters and not node.attribute: - # # input port has parameters (for instance, ``x`` in ``foo <- spike(x real)`` but the variable reference is missing an attribute (``foo`` instead of ``foo.x``) - # code, message = Messages.get_spike_input_port_attribute_missing(node.get_name()) - # Logger.log_message(code=code, message=message, error_position=node.get_source_position(), - # log_level=LoggingLevel.ERROR) + # this should only be checked in event handler statements + if in_port.parameters and not node.attribute: + # input port has parameters (for instance, ``x`` in ``foo <- spike(x real)`` but the variable reference is missing an attribute (``foo`` instead of ``foo.x``) + code, message = Messages.get_spike_input_port_attribute_missing(node.get_name()) + Logger.log_message(code=code, message=message, error_position=node.get_source_position(), + log_level=LoggingLevel.ERROR) _node = node while _node: diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index 0ea17706b..94b8f5084 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -108,17 +108,28 @@ class NoAttributesSpikingInputPortNecessaryVisitor(ASTVisitor): If no references to the port without an attribute are present, then no code needs to be generated for the buffer, saving on runtime performance. """ - def __init__(self, model: ASTModel): + def __init__(self, model: ASTModel, enable_on_receive_check: bool = True): super().__init__() + self._model = model self._attributes_spiking_input_port_necessary = False self._spike_input_ports = model.get_spike_input_port_names() + self.enable_on_receive_check = enable_on_receive_check def visit_variable(self, node: ASTVariable): if node.name in self._spike_input_ports \ and node.get_attribute() is None \ - and not ASTUtils.find_parent_node_by_type(node, ASTInputBlock) \ - and not (ASTUtils.find_parent_node_by_type(node, ASTOnReceiveBlock) and ASTUtils.find_parent_node_by_type(node, ASTOnReceiveBlock).input_port_variable.name == node.name): + and not ASTUtils.find_parent_node_by_type(node, ASTInputBlock): + + if self.enable_on_receive_check and ASTUtils.find_parent_node_by_type(node, ASTOnReceiveBlock): + # parent is an onReceive block: ignore mentions in the onReceive block input_port_variable, check instead for occurrences in the body (statements) of the block + on_receive_block_stmts = ASTUtils.find_parent_node_by_type(node, ASTOnReceiveBlock).get_stmts_body() + v = NoAttributesSpikingInputPortNecessaryVisitor(self._model, enable_on_receive_check=False) + on_receive_block_stmts.accept(v) + self._attributes_spiking_input_port_necessary = v._attributes_spiking_input_port_necessary + + return + self._attributes_spiking_input_port_necessary = True diff --git a/tests/nest_tests/resources/beta_function_with_inline_expression_neuron.nestml b/tests/nest_tests/resources/beta_function_with_inline_expression_neuron.nestml index 51af5512b..95ccb6f6e 100644 --- a/tests/nest_tests/resources/beta_function_with_inline_expression_neuron.nestml +++ b/tests/nest_tests/resources/beta_function_with_inline_expression_neuron.nestml @@ -1,24 +1,24 @@ # beta_function_with_inline_expression_neuron # ########################################### -# +# # Description # +++++++++++ -# +# # Used for testing processing of inline expressions. -# -# +# +# # Copyright # +++++++++ -# +# # This file is part of NEST. -# +# # Copyright (C) 2004 The NEST Initiative -# +# # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. -# +# # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the @@ -29,8 +29,8 @@ model beta_function_with_inline_expression_neuron: parameters: - tau1 ms = 20 ms ## decay time - tau2 ms = 10 ms ## rise time + tau1 ms = 20 ms # decay time + tau2 ms = 10 ms # rise time state: x_ pA/ms = 0 pA/ms @@ -41,12 +41,12 @@ model beta_function_with_inline_expression_neuron: equations: x' = x_ - x / tau2 - x_' = - x_ / tau1 + x_' = -x_ / tau1 recordable inline z pA = x input: - weighted_input_spikes <- spike + weighted_input_spikes <- spike(weight pA) output: spike @@ -55,4 +55,4 @@ model beta_function_with_inline_expression_neuron: integrate_odes() onReceive(weighted_input_spikes): - x_ += alpha * (1 / tau2 - 1 / tau1) * pA * weighted_input_spikes * s + x_ += alpha * (1 / tau2 - 1 / tau1) * weighted_input_spikes.weight diff --git a/tests/test_cocos.py b/tests/test_cocos.py index 0c0b0a184..24e4b91a0 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -165,6 +165,10 @@ def test_valid_no_nest_collision(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoNestNamespaceCollision.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_co_co_spike_input_ports_illegal_missing_attribute(self): + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 + def test_invalid_parameters_assigned_only_in_parameters_block(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoParameterAssignedOutsideBlock.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 1 From 48c1290898bcbb8162a22ff0ac7e742cee04d7d2 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Tue, 10 Jun 2025 17:19:16 +0200 Subject: [PATCH 64/68] add attributes to spiking input ports --- ...CoInputPortsIllegalMissingAttribute.nestml | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml diff --git a/tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml b/tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml new file mode 100644 index 000000000..fa5df5536 --- /dev/null +++ b/tests/invalid/CoCoInputPortsIllegalMissingAttribute.nestml @@ -0,0 +1,41 @@ +# CoCoInputPortsIllegal.nestml +# ############################ +# +# Description +# +++++++++++ +# +# This test is used to test the declaration of both vectorized and non-vectorized input ports. +# +# +# Positive case. +# +# +# Copyright statement +# +++++++++++++++++++ +# +# This file is part of NEST. +# +# Copyright (C) 2004 The NEST Initiative +# +# NEST is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 2 of the License, or +# (at your option) any later version. +# +# NEST is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with NEST. If not, see . +# +model input_ports_illegal_neuron: + state: + foo real = 0. + + input: + spike_in_port <- spike(foo real) + + onReceive(spike_in_port): + foo += spike_in_port From 9d87e4c326902a524a484fbf7d318460412abb52 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 11 Jun 2025 17:26:04 +0200 Subject: [PATCH 65/68] add attributes to spiking input ports --- .gitignore | 1 - .../co_co_no_attributes_on_continuous_port.py | 6 - ...receive_vectors_should_be_constant_size.py | 5 - ...only_in_equation_rhs_and_event_handlers.py | 1 - .../point_neuron/common/NeuronClass.jinja2 | 248 +++++++++--------- .../visitors/ast_function_call_visitor.py | 1 - pynestml/visitors/ast_symbol_table_visitor.py | 8 - tests/nest_tests/nest_integration_test.py | 9 +- 8 files changed, 129 insertions(+), 150 deletions(-) diff --git a/.gitignore b/.gitignore index 951d6c1c1..d80c8cffd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,3 @@ -*.orig .settings/ target/ .idea/ diff --git a/pynestml/cocos/co_co_no_attributes_on_continuous_port.py b/pynestml/cocos/co_co_no_attributes_on_continuous_port.py index 86f8289de..26de84276 100644 --- a/pynestml/cocos/co_co_no_attributes_on_continuous_port.py +++ b/pynestml/cocos/co_co_no_attributes_on_continuous_port.py @@ -19,16 +19,10 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -from typing import Optional - from pynestml.cocos.co_co import CoCo -from pynestml.meta_model.ast_function_call import ASTFunctionCall from pynestml.meta_model.ast_model import ASTModel -from pynestml.symbols.predefined_functions import PredefinedFunctions -from pynestml.utils.ast_utils import ASTUtils from pynestml.utils.logger import Logger, LoggingLevel from pynestml.utils.messages import Messages -from pynestml.visitors.ast_visitor import ASTVisitor class CoCoNoAttributesOnContinuousPort(CoCo): diff --git a/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py b/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py index 526e1b53e..89de214a8 100644 --- a/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py +++ b/pynestml/cocos/co_co_on_receive_vectors_should_be_constant_size.py @@ -20,13 +20,8 @@ # along with NEST. If not, see . from pynestml.cocos.co_co import CoCo -from pynestml.meta_model.ast_declaration import ASTDeclaration -from pynestml.meta_model.ast_expression import ASTExpression from pynestml.meta_model.ast_input_port import ASTInputPort from pynestml.meta_model.ast_model import ASTModel -from pynestml.meta_model.ast_variable import ASTVariable -from pynestml.symbols.integer_type_symbol import IntegerTypeSymbol -from pynestml.symbols.symbol import SymbolKind from pynestml.utils.logger import LoggingLevel, Logger from pynestml.utils.messages import Messages from pynestml.visitors.ast_visitor import ASTVisitor diff --git a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py index 996417cbf..176ca16e5 100644 --- a/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py +++ b/pynestml/cocos/co_co_spike_input_ports_appear_only_in_equation_rhs_and_event_handlers.py @@ -28,7 +28,6 @@ from pynestml.meta_model.ast_ode_equation import ASTOdeEquation from pynestml.meta_model.ast_on_receive_block import ASTOnReceiveBlock from pynestml.meta_model.ast_variable import ASTVariable -from pynestml.symbols.symbol import SymbolKind from pynestml.utils.ast_utils import ASTUtils from pynestml.utils.logger import Logger, LoggingLevel from pynestml.utils.messages import Messages diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index e9f45a6ba..eaa444f4a 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -70,7 +70,7 @@ along with NEST. If not, see . #include "{{ neuronName }}.h" // uncomment the next line to enable printing of detailed debug information -#define DEBUG +// #define DEBUG {% if state_vars_that_need_continuous_buffering | length > 0 %} {%- if continuous_state_buffering_method == "continuous_time_buffer" %} @@ -193,54 +193,58 @@ namespace nest {{ neuronName }}::Buffers_::Buffers_({{ neuronName }} &n): logger_(n) -{%- if neuron.get_spike_input_ports()|length > 0 %} {%- for inputPortSymbol in neuron.get_spike_input_ports() %} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} -{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} -{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) -{%- else %} -{%- if attributes_spiking_input_port_necessary %} -{#- generic input port: use lists of spike events for each buffer slot #} +{%- else %} +{%- if attributes_spiking_input_port_necessary %} +{#- generic input port: use lists of spike events for each buffer slot #} , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::VectorRingBuffer() ) -{%- endif %} -{%- endif %} +{%- endif %} +{%- endif %} {%- endfor %} {%- endif %} {%- endfor %} {%- else %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} -{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} -{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) -{%- else %} -{%- if attributes_spiking_input_port_necessary %} -{#- generic input port: use lists of spike events for each buffer slot #} +{%- else %} +{%- if attributes_spiking_input_port_necessary %} +{#- generic input port: use lists of spike events for each buffer slot #} , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::VectorRingBuffer() ) -{%- endif %} -{%- endif %} +{%- endif %} +{%- endif %} {%- endfor %} -{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} -{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} , spike_input_{{ inputPort.name }}_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}_grid_sum_( 0. ) -{%- else %} -{#- generic input port: use lists of spike events for each buffer slot #} -{%- if attributes_spiking_input_port_necessary %} +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} +{%- if attributes_spiking_input_port_necessary %} , spike_input_{{ inputPort.name }}_( nest::VectorRingBuffer() ) -{%- endif %} -{%- endif %} +{%- endif %} +{%- endif %} , spike_input_{{ inputPort.name }}_n_spikes_received_( nest::RingBuffer() ) {%- endif %} {%- endfor %} -{%- endif %} {%- if uses_numeric_solver %} {%- if numeric_solver == "rk45" %} , __s( nullptr ), __c( nullptr ), __e( nullptr ) @@ -252,59 +256,59 @@ namespace nest {{ neuronName }}::Buffers_::Buffers_(const Buffers_ &, {{ neuronName }} &n): logger_(n) -{%- if neuron.get_spike_input_ports()|length > 0 %} -{%- for inputPortSymbol in neuron.get_spike_input_ports() %} -{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} -{%- if inputPortSymbol.has_vector_parameter() %} -{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} -{%- for i in range(size) %} -{%- if inputPort.get_parameters() %} -{%- for parameter in inputPort.get_parameters() %} - -{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} -{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} +{%- if neuron.get_spike_input_ports() | length > 0 %} +{%- for inputPortSymbol in neuron.get_spike_input_ports() %} +{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} +{%- if inputPortSymbol.has_vector_parameter() %} +{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} +{%- for i in range(size) %} +{%- if inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} +{%- for parameter in inputPort.get_parameters() %} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) -{%- else %} -{#- generic input port: use lists of spike events for each buffer slot #} -{%- if attributes_spiking_input_port_necessary %} +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} +{%- if attributes_spiking_input_port_necessary %} , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_( nest::VectorRingBuffer() ) -{%- endif %} -{%- endif %} -{%- endfor %} -{%- else %} - ????????????? +{%- endif %} +{%- endif %} +{%- endfor %} +{%- endif %} +{%- endfor %} +{%- else %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} {%- endif %} -{%- endfor %} -{%- else %} -{%- for parameter in inputPort.get_parameters() %} - - - -{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} -{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} +{%- for parameter in inputPort.get_parameters() %} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) -{%- else %} -{#- generic input port: use lists of spike events for each buffer slot #} -{%- if attributes_spiking_input_port_necessary %} +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} +{%- if attributes_spiking_input_port_necessary %} , spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_( nest::VectorRingBuffer() ) -{%- endif %} -{%- endif %} -{%- endfor %} -{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} -{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} +{%- endif %} +{%- endif %} +{%- endfor %} +{%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} +{#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} , spike_input_{{ inputPort.name }}_( nest::RingBuffer() ) , spike_input_{{ inputPort.name }}_grid_sum_( 0. ) -{%- else %} -{#- generic input port: use lists of spike events for each buffer slot #} -{%- if attributes_spiking_input_port_necessary %} +{%- else %} +{#- generic input port: use lists of spike events for each buffer slot #} +{%- if attributes_spiking_input_port_necessary %} , spike_input_{{ inputPort.name }}_( nest::VectorRingBuffer() ) -{%- endif %} -{%- endif %} +{%- endif %} +{%- endif %} , spike_input_{{ inputPort.name }}_n_spikes_received_( nest::RingBuffer() ) -{%- endif %} -{%- endfor %} +{%- endif %} +{%- endfor %} {%- endif %} {%- if uses_numeric_solver %} {%- if numeric_solver == "rk45" %} @@ -516,6 +520,9 @@ void {{ neuronName }}::init_buffers_() {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.clear(); {%- endfor %} @@ -526,6 +533,9 @@ void {{ neuronName }}::init_buffers_() B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}_n_spikes_received_.clear(); {%- endfor %} {%- else %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.clear(); {%- endfor %} @@ -833,6 +843,9 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} {%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} {#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} @@ -846,6 +859,9 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- endif %} {%- endfor %} {%- else %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} {%- if inputPortSymbol.name in linear_time_invariant_spiking_input_ports %} {#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} @@ -1014,22 +1030,11 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- endif %} {%- endfor %} - - - - - - - - - - /** * clear spike buffers at end of timestep (all spikes have been processed at this point) **/ -{#- if neuron.get_on_receive_blocks() | length == 0 and neuron.get_spike_input_ports() |length > 0 #} -{%- if neuron.get_spike_input_ports() |length > 0 %} +{%- if neuron.get_spike_input_ports() | length > 0 %} {%- for inputPortSymbol in neuron.get_spike_input_ports() %} {%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} @@ -1039,6 +1044,9 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} std::vector< double >& __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag); __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list.clear(); @@ -1046,16 +1054,17 @@ void {{ neuronName }}::update(nest::Time const & origin, const long from, const {%- endif %} {%- endfor %} {%- else %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} std::vector< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.clear(); {%- endfor %} - {%- if attributes_spiking_input_port_necessary %} std::vector< double >& __spike_input_{{ inputPort.name }}_list = B_.spike_input_{{ inputPort.name }}_.get_list(lag); __spike_input_{{ inputPort.name }}_list.clear(); {%- endif %} - {%- endif %} {%- endif %} @@ -1172,45 +1181,6 @@ std::cout << "[neuron " << this << "] Writing history at time " << nest::Time(ne return wfr_tol_exceeded; {%- endif %} - - - - - -/* -{%- if neuron.get_spike_input_ports()|length > 0 %} -{%- for inputPortSymbol in neuron.get_spike_input_ports() %} -{%- set inputPort = utils.get_input_port_by_name(astnode.get_input_blocks(), inputPortSymbol.name.split(".")[0]) %} -{%- if inputPortSymbol.has_vector_parameter() %} -{%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} -{%- for i in range(size) %} -{%- if inputPort.get_parameters() %} -{%- for parameter in inputPort.get_parameters() %} - B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.clear(); - // , spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) -{%- endfor %} -{%- endif %} -{%- endfor %} -{%- else %} -{%- for parameter in inputPort.get_parameters() %} - B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.clear(); - //, spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_( 0. ) -{%- endfor %} -{%- if attributes_spiking_input_port_necessary %} - B_.spike_input_{{ inputPort.name }}_.clear(); -{%- endif %} - //, spike_input_{{ inputPort.name }}_n_spikes_received_grid_sum_( 0. ) -{%- endif %} -{%- endfor %} -{%- endif %} -*/ - - - - - - - } {%- if state_vars_that_need_continuous_buffering | length > 0 %} @@ -1315,6 +1285,9 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) {%- endif %} { {%- if spike_in_port.get_parameters() %} +{%- if spike_in_port.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for attribute in spike_in_port.get_parameters() %} {%- if spike_in_port_name in linear_time_invariant_spiking_input_ports %} {#- linear, time-invariant input port: all spike events for a specific buffer slot can be added together into a single number #} @@ -1402,12 +1375,18 @@ void {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} const double __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }} = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_grid_sum_; {%- endfor %} {%- endif %} {%- endfor %} {%- else %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} const double __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }} = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_grid_sum_; {%- endfor %} @@ -1434,6 +1413,9 @@ void {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} std::vector< double >& __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_.get_list(lag); std::vector< double >::iterator __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list_iterator = __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list.begin(); @@ -1442,6 +1424,9 @@ void {%- endfor %} {%- else %} {%- for parameter in inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} std::vector< double >& __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list = B_.spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_.get_list(lag); std::vector< double >::iterator __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list_iterator = __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.begin(); {%- endfor %} @@ -1460,12 +1445,18 @@ void {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} const double __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }} = *__spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list_iterator; {%- endfor %} {%- endif %} {%- endfor %} {%- else %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} const double __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }} = *__spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list_iterator; {%- endfor %} @@ -1493,12 +1484,18 @@ void {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} ++__spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list_iterator; {%- endfor %} {%- endif %} {%- endfor %} {%- else %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} ++__spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list_iterator; {%- endfor %} @@ -1509,17 +1506,24 @@ void } /* // clear the processed spike events from the list + // this can be skipped as reading the spikes into the buffer (using ``get_value(lag)`` automatically clears the list) {%- if inputPortSymbol.has_vector_parameter() %} {%- set size = utils.get_numeric_vector_size(inputPortSymbol) %} {%- for i in range(size) %} {%- if inputPort.get_parameters() %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} __spike_input_{{ inputPort.name }}_VEC_IDX_{{ i }}__DOT__{{ parameter.get_name() }}_list.clear(); {%- endfor %} {%- endif %} {%- endfor %} {%- else %} +{%- if inputPort.get_parameters() | length > 1 %} + {{ raise('Neuron models in NEST Simulator only support one spike event attribute ("weight")') }} +{%- endif %} {%- for parameter in inputPort.get_parameters() %} __spike_input_{{ inputPort.name }}__DOT__{{ parameter.get_name() }}_list.clear(); {%- endfor %} @@ -1697,12 +1701,6 @@ void {{ printer.print(utils.get_variable_by_name(astnode, spike_update.get_variable().get_complete_name())) }} += 1.; {%- endfor %} - /** - * updates due to spiking input ports that appear on the right-hand side of equations (but not inside convolutions) - **/ - - // sorry, this is not supported yet! See https://github.com/nest/nestml/pull/1050 - /** * push back history **/ diff --git a/pynestml/visitors/ast_function_call_visitor.py b/pynestml/visitors/ast_function_call_visitor.py index 0dc2b0be1..2e4eb714e 100644 --- a/pynestml/visitors/ast_function_call_visitor.py +++ b/pynestml/visitors/ast_function_call_visitor.py @@ -19,7 +19,6 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -from typing import Optional from pynestml.meta_model.ast_equations_block import ASTEquationsBlock from pynestml.meta_model.ast_input_port import ASTInputPort from pynestml.meta_model.ast_model import ASTModel diff --git a/pynestml/visitors/ast_symbol_table_visitor.py b/pynestml/visitors/ast_symbol_table_visitor.py index 629bac41b..50cbd759f 100644 --- a/pynestml/visitors/ast_symbol_table_visitor.py +++ b/pynestml/visitors/ast_symbol_table_visitor.py @@ -471,14 +471,6 @@ def visit_variable(self, node: ASTVariable): node.get_vector_parameter().update_scope(node.get_scope()) node.get_vector_parameter().accept(self) - # if ASTUtils.vector_parameter_is_variable(node.get_vector_parameter()): - # symbol = VariableSymbol(element_reference=node, scope=node.get_scope(), name=node.get_name(), - # block_type=BlockType.INPUT, vector_parameter=node.get_size_parameter(), - # is_predefined=False, is_inline_expression=False, is_recordable=False, - # type_symbol=type_symbol, variable_type=VariableType.BUFFER) - # symbol.set_comment(node.get_comment()) - # node.get_scope().add_symbol(symbol) - if isinstance(node.get_vector_parameter(), ASTParameter): # vector parameter is a declaration symbol = VariableSymbol(element_reference=node, diff --git a/tests/nest_tests/nest_integration_test.py b/tests/nest_tests/nest_integration_test.py index d4f34ff87..04fd1e9ae 100644 --- a/tests/nest_tests/nest_integration_test.py +++ b/tests/nest_tests/nest_integration_test.py @@ -28,10 +28,13 @@ from pynestml.codegeneration.nest_tools import NESTTools from pynestml.frontend.pynestml_frontend import generate_nest_target -import matplotlib -import matplotlib.pyplot as plt +try: + import matplotlib + import matplotlib.pyplot as plt -TEST_PLOTS = True + TEST_PLOTS = True +except BaseException: + TEST_PLOTS = False def get_model_doc_title(model_fname: str): From 39fd9392e663f6112b53ac598a9932f5b5e5cd9a Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 10 Sep 2025 08:27:31 +0200 Subject: [PATCH 66/68] add attributes to spike events --- models/neurons/iaf_psc_delta_neuron.nestml | 2 +- .../codegeneration/nest_code_generator.py | 21 ++++++++++++++++++- .../nest_compartmental_code_generator.py | 4 ++++ 3 files changed, 25 insertions(+), 2 deletions(-) diff --git a/models/neurons/iaf_psc_delta_neuron.nestml b/models/neurons/iaf_psc_delta_neuron.nestml index 10d8d79e0..9361f8e02 100644 --- a/models/neurons/iaf_psc_delta_neuron.nestml +++ b/models/neurons/iaf_psc_delta_neuron.nestml @@ -44,7 +44,7 @@ # +++++++++++++++++++ # # This file is part of NEST. -# +# # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify diff --git a/pynestml/codegeneration/nest_code_generator.py b/pynestml/codegeneration/nest_code_generator.py index 94b8f5084..c65416448 100644 --- a/pynestml/codegeneration/nest_code_generator.py +++ b/pynestml/codegeneration/nest_code_generator.py @@ -162,6 +162,24 @@ class NESTCodeGenerator(CodeGenerator): - **delay_variable**: A mapping identifying, for each synapse (the name of which is given as a key), the variable or parameter in the model that corresponds with the NEST ``Connection`` class delay property. - **weight_variable**: Like ``delay_variable``, but for synaptic weight. - **linear_time_invariant_spiking_input_ports**: A list of spiking input ports which can be treated as linear and time-invariant; this implies that, for the given port(s), the weight of all spikes received within a timestep can be added together, improving memory consumption and runtime performance. Use with caution; for example, this is not compatible with using a single input port for, depending on the sign of the weight of the spike event, processing both inhibitory vs. excitatory spikes. + - **excitatory_inhibitory_combined_port**: A tuple containing the name of two spiking input ports. Without this option set, each input port would be assigned its own unique rport for connecting to in NEST, for instance: + + .. code-block:: python + + receptor_types = nest.GetStatus(neuron, "receptor_types")[0] + nest.Connect(sg_exc, neuron, syn_spec={"receptor_type": receptor_types["EXC_SPIKES"], "weight": 1.}) + nest.Connect(sg_inh, neuron, syn_spec={"receptor_type": receptor_types["INH_SPIKES"], "weight": 1.}) + + For compatibility with legacy NEST Simulator models, these ports can instead be externally represented as a single port, that interprets spikes as either "excitatory" or "inhibitory" depending on the sign of the weight. In this case, connections to the neuron are made while omitting an explicit rport; inhibitory connections are the indicated by weights with a negative sign: + + .. code-block:: python + + receptor_types = nest.GetStatus(neuron, "receptor_types")[0] + nest.Connect(sg_exc, neuron, syn_spec={"weight": 1.}) + nest.Connect(sg_inh, neuron, syn_spec={"weight": -1.}) + + This flag can only be used if there are exactly two spiking input ports in the model. + - **redirect_build_output**: An optional boolean key for redirecting the build output. Setting the key to ``True``, two files will be created for redirecting the ``stdout`` and the ``stderr`. The ``target_path`` will be used as the default location for creating the two files. - **build_output_dir**: An optional string key representing the new path where the files corresponding to the output of the build phase will be created. This key requires that the ``redirect_build_output`` is set to ``True``. @@ -194,7 +212,8 @@ class NESTCodeGenerator(CodeGenerator): "continuous_state_buffering_method": "continuous_time_buffer", "delay_variable": {}, "weight_variable": {}, - "linear_time_invariant_spiking_input_ports": [] + "linear_time_invariant_spiking_input_ports": [], + "excitatory_inhibitory_combined_port": () } def __init__(self, options: Optional[Mapping[str, Any]] = None): diff --git a/pynestml/codegeneration/nest_compartmental_code_generator.py b/pynestml/codegeneration/nest_compartmental_code_generator.py index 9ddfdd175..9cc3ad1bc 100644 --- a/pynestml/codegeneration/nest_compartmental_code_generator.py +++ b/pynestml/codegeneration/nest_compartmental_code_generator.py @@ -74,6 +74,7 @@ from pynestml.utils.model_parser import ModelParser from pynestml.utils.syns_info_enricher import SynsInfoEnricher from pynestml.utils.synapse_processing import SynapseProcessing +from pynestml.visitors.ast_parent_visitor import ASTParentVisitor from pynestml.visitors.ast_random_number_generator_visitor import ASTRandomNumberGeneratorVisitor from pynestml.visitors.ast_symbol_table_visitor import ASTSymbolTableVisitor @@ -417,6 +418,7 @@ def analyse_neuron(self, neuron: ASTModel) -> List[ASTAssignment]: assert len(neuron.get_equations_blocks()) == 1, "Only one equations block supported for now" assert len(neuron.get_state_blocks()) == 1, "Only one state block supported for now" + neuron.accept(ASTParentVisitor()) equations_block = neuron.get_equations_blocks()[0] @@ -527,6 +529,8 @@ def analyse_neuron(self, neuron: ASTModel) -> List[ASTAssignment]: spike_updates = self.get_spike_update_expressions( neuron, kernel_buffers, [analytic_solver, numeric_solver], delta_factors) + neuron.accept(ASTParentVisitor()) + return spike_updates def compute_name_of_generated_file(self, jinja_file_name, neuron): From 88a8e96574b73cd3afd3c29c45c705f75819c550 Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Wed, 24 Sep 2025 14:14:13 +0200 Subject: [PATCH 67/68] add attributes to spike events --- .../nestml_language_concepts.rst | 26 +- models/neurons/aeif_psc_alpha_neuron.nestml | 117 -- .../point_neuron/common/NeuronClass.jinja2 | 4 +- .../point_neuron/common/NeuronHeader.jinja2 | 35 +- pynestml/generated/PyNestMLLexer.py | 4 +- pynestml/generated/PyNestMLParser.py | 1645 +++++++++-------- pynestml/generated/PyNestMLParserVisitor.py | 2 +- pynestml/utils/ast_utils.py | 22 +- tests/test_cocos.py | 10 +- 9 files changed, 901 insertions(+), 964 deletions(-) delete mode 100644 models/neurons/aeif_psc_alpha_neuron.nestml diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index 95ee99aa5..f5b29c72b 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -1177,7 +1177,16 @@ An ``onReceive`` block can be defined for every spiking input port. For example, println("Info: processing a presynaptic spike at time t = {t}") # ... further statements go here ... -The statements in the event handler will be executed when the event occurs and integrate the state of the system from "just before" the event (at :math:`t-\epsilon`, for :math:`\epsilon\rightarrow 0`) to "just after" the event (at :math:`t=t+\epsilon`). Because the statements in the ``onReceive`` block are executed "instantaneously" at the time of the spike, the units of 1/s due to the definition of the delta function drop out. For instance, when a port is defined with an attribute "psp" in units of mV, then the following has consistent units: +The statements in the event handler will be executed when the event occurs and integrate the state of the system from "just before" the event (at :math:`t-\epsilon`, for :math:`\epsilon\rightarrow 0`) to "just after" the event (at :math:`t=t+\epsilon`). Analogous to the ``update`` block, the predefined variable ``t`` indicates the time :math:`t-\epsilon` at the start of the interval, whereas the predefined function ``timestep()`` yields the duration of the interval :math:`2\epsilon` for :math:`\epsilon\rightarrow 0`. As the timestep() function would typically yield a numerical value equal to zero, its use inside an ``onReceive`` block only makes sense to integrate across delta pulses. + +Typically, the statements in the ``onReceive`` block integrate over the delta function across time, which yields the surface area of the pulse, which typically corresponds to the weight of the spike or to another spike event attribute. Integration across time causes the 1/s unit of the spike train to drop out, so that what remains are the units of the spike attribute itself. For instance, when a port is defined with an attribute "psp" in units of mV: + +.. code-block:: nestml + + input: + in_spikes(psp mV) <- spike + +then the following has consistent units: .. code-block:: nestml @@ -1185,9 +1194,16 @@ The statements in the event handler will be executed when the event occurs and i V_m mV = 0 mV onReceive(in_spikes): - V_m += in_spikes.psp # consistent units: lhs and rhs both in [mV] + V_m += integrate(in_spikes.psp, t, t + timestep()) # lhs and rhs both in [mV] + +A spiking input port (or any of its attributes) may not appear outside of a ``integrate()`` call, because the units will be inconsistent; for example: + +.. code-block:: nestml + + onReceive(in_spikes): + V_m += in_spikes.psp # error! lhs in [mV], rhs in [mV/s] -In ``onReceive`` blocks, a spiking input port may not appear without an attribute present: +A spiking input port may appear without an attribute present; this refers to the unweighted train of delta pulses (with surface area 1): .. code-block:: nestml @@ -1195,10 +1211,10 @@ In ``onReceive`` blocks, a spiking input port may not appear without an attribut x real = 0 input: - in_spikes <- spike(weight pA) + in_spikes <- spike(foo pA, bar mmol) onReceive(in_spikes): - x += in_spikes # error! + x += integrate(in_spikes, t, t + timestep()) # increments x by 1 To specify in which sequence the event handlers should be called in case multiple events are received at the exact same time, the ``priority`` parameter can be used, which can be given an integer value, where a larger value means higher priority (handled earlier). For example: diff --git a/models/neurons/aeif_psc_alpha_neuron.nestml b/models/neurons/aeif_psc_alpha_neuron.nestml deleted file mode 100644 index 8d313e445..000000000 --- a/models/neurons/aeif_psc_alpha_neuron.nestml +++ /dev/null @@ -1,117 +0,0 @@ -# aeif_psc_alpha - Conductance based exponential integrate-and-fire neuron model -# ############################################################################## -# -# Description -# +++++++++++ -# -# aeif_psc_alpha is the adaptive exponential integrate and fire neuron according to Brette and Gerstner (2005), with post-synaptic conductances in the form of a bi-exponential ("alpha") function. -# -# The membrane potential is given by the following differential equation: -# -# .. math:: -# -# C_m \frac{dV_m}{dt} = -# -g_L(V_m-E_L)+g_L\Delta_T\exp\left(\frac{V_m-V_{th}}{\Delta_T}\right) - -# g_e(t)(V_m-E_e) \\ -# -g_i(t)(V_m-E_i)-w + I_e -# -# and -# -# .. math:: -# -# \tau_w \frac{dw}{dt} = a(V_m-E_L) - w -# -# Note that the membrane potential can diverge to positive infinity due to the exponential term. To avoid numerical instabilities, instead of :math:`V_m`, the value :math:`\min(V_m,V_{peak})` is used in the dynamical equations. -# -# -# References -# ++++++++++ -# -# .. [1] Brette R and Gerstner W (2005). Adaptive exponential -# integrate-and-fire model as an effective description of neuronal -# activity. Journal of Neurophysiology. 943637-3642 -# DOI: https://doi.org/10.1152/jn.00686.2005 -# -# -# See also -# ++++++++ -# -# iaf_psc_alpha, aeif_psc_exp -# -model aeif_psc_alpha_neuron: - - state: - V_m mV = E_L # Membrane potential - w pA = 0 pA # Spike-adaptation current - refr_t ms = 0 ms # Refractory period timer - I_syn_exc pA = 0 pA # AHP conductance - I_syn_exc' pA/ms = 0 pA/ms # AHP conductance - I_syn_inh pA = 0 pA # AHP conductance - I_syn_inh' pA/ms = 0 pA/ms # AHP conductance - - equations: - inline V_bounded mV = min(V_m, V_peak) # prevent exponential divergence - - I_syn_exc'' = -2 * I_syn_exc' / tau_exc - I_syn_exc / tau_exc**2 - I_syn_inh'' = -2 * I_syn_inh' / tau_inh - I_syn_inh / tau_inh**2 - - # Add inlines to simplify the equation definition of V_m - inline exp_arg real = (V_bounded - V_th) / Delta_T - inline I_spike pA = g_L * Delta_T * exp(exp_arg) - - V_m' = (-g_L * (V_bounded - E_L) + I_spike + I_syn_exc - I_syn_inh - w + I_e + I_stim) / C_m - w' = (a * (V_bounded - E_L) - w) / tau_w - - refr_t' = -1e3 * ms/s # refractoriness is implemented as an ODE, representing a timer counting back down to zero. XXX: TODO: This should simply read ``refr_t' = -1 / s`` (see https://github.com/nest/nestml/issues/984) - - parameters: - # membrane parameters - C_m pF = 281.0 pF # Membrane Capacitance - refr_T ms = 2 ms # Duration of refractory period - V_reset mV = -60.0 mV # Reset Potential - g_L nS = 30.0 nS # Leak Conductance - E_L mV = -70.6 mV # Leak reversal Potential (aka resting potential) - - # spike adaptation parameters - a nS = 4 nS # Subthreshold adaptation - b pA = 80.5 pA # Spike-triggered adaptation - Delta_T mV = 2.0 mV # Slope factor - tau_w ms = 144.0 ms # Adaptation time constant - V_th mV = -50.4 mV # Threshold Potential - V_peak mV = 0 mV # Spike detection threshold - - # synaptic parameters - tau_exc ms = 0.2 ms # Synaptic Time Constant Excitatory Synapse - tau_inh ms = 2.0 ms # Synaptic Time Constant for Inhibitory Synapse - - # constant external input current - I_e pA = 0 pA - - input: - exc_spikes <- excitatory spike - inh_spikes <- inhibitory spike - I_stim pA <- continuous - - output: - spike - - update: - if refr_t > 0 ms: - # neuron is absolute refractory, do not evolve V_m - integrate_odes(I_syn_exc, I_syn_inh, w, refr_t) - else: - # always evolve all ODEs; V_m - integrate_odes(I_syn_exc, I_syn_inh, w, V_m) - - onCondition(V_m >= V_th): - # threshold crossing - refr_t = refr_T # start of the refractory period - V_m = V_reset - w += b - emit_spike() - - onReceive(exc_spikes): # Spike input - I_syn_exc' += exc_spikes * (e / tau_exc) * pA * s - - onReceive(inh_spikes): # Spike input - I_syn_inh' += inh_spikes * (e / tau_inh) * pA * s diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 index cd5e2b85b..7209b002e 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronClass.jinja2 @@ -1291,9 +1291,9 @@ void {{ neuronName }}::handle(nest::SpikeEvent &e) #endif assert(e.get_delay_steps() > 0); - assert(e.get_rport() <= {{ utils.nestml_input_port_to_nest_rport_dict(astnode) | length }}); + assert(e.get_rport() <= {{ utils.nestml_spiking_input_port_to_nest_rport_dict(astnode) | length }}); -{%- for spike_in_port_name, rport in utils.nestml_input_port_to_nest_rport_dict(astnode).items() %} +{%- for spike_in_port_name, rport in utils.nestml_spiking_input_port_to_nest_rport_dict(astnode).items() %} {%- set spike_in_port = utils.get_input_port_by_name(astnode.get_input_blocks(), spike_in_port_name.split("_VEC_IDX_")[0]) %} {%- if astnode.get_body().get_spike_input_ports() | length > 1 or astnode.is_multisynapse_spikes() %} if (e.get_rport() == {{ rport }}) diff --git a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 index 809734eb7..3375fed86 100644 --- a/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 +++ b/pynestml/codegeneration/resources_nest/point_neuron/common/NeuronHeader.jinja2 @@ -535,32 +535,15 @@ private: {%- endif %} private: -{% if has_multiple_synapses -%} /** * Synapse types to connect to * @note Excluded lower and upper bounds are defined as MIN_, MAX_. * Excluding port 0 avoids accidental connections. **/ - const nest_port_t MAX_SPIKE_RECEPTOR = {{ utils.nestml_input_port_to_nest_rport_dict(astnode) | length }}; - enum ContinuousInput - { -{%- set ns.count = 0 -%} -{%- for port in neuron.get_continuous_input_ports() %} -{%- if port.has_vector_parameter() -%} -{% set size = utils.get_numeric_vector_size(port) | int %} -{%- for i in range(size) %} - {{port.get_symbol_name().upper()}}_{{i}} = {{ns.count}}, -{%- set ns.count = ns.count + 1 -%} -{%- endfor %} -{%- else %} - {{port.get_symbol_name().upper()}} = {{ns.count}}, -{%- set ns.count = ns.count + 1 -%} -{%- endif -%} -{%- endfor %} - NUM_CONTINUOUS_INPUT_PORTS = {{ ns.count }} - }; + const nest_port_t MAX_SPIKE_RECEPTOR = {{ utils.nestml_spiking_input_port_to_nest_rport_dict(astnode) | length }}; + + const nest_port_t NUM_CONTINUOUS_INPUT_PORTS = {{ utils.nestml_continuous_input_port_to_nest_rport_dict(astnode) | length }}; -{%- endif %} /** * Reset state of neuron. @@ -1101,19 +1084,19 @@ inline void {{neuronName}}::get_status(DictionaryDatum &__d) const // input ports to NEST rport number mapping DictionaryDatum __receptor_type = new Dictionary(); -{%- for spike_in_port_name, rport in utils.nestml_input_port_to_nest_rport_dict(astnode).items() %} +{%- for spike_in_port_name, rport in utils.nestml_spiking_input_port_to_nest_rport_dict(astnode).items() %} {%- set spike_in_port = utils.get_input_port_by_name(astnode.get_input_blocks(), spike_in_port_name.split("_VEC_IDX_")[0]) %} ( *__receptor_type )[ "{{spike_in_port_name.upper()}}" ] = {{ rport }}; {%- endfor %} (*__d)["receptor_types"] = __receptor_type; -{%- endif %} {%- if (neuron.get_continuous_input_ports())|length > 1 %} DictionaryDatum __continuous_input_type = new Dictionary(); -{%- for port in neuron.get_continuous_input_ports() %} - ( *__continuous_input_type )[ "{{port.get_symbol_name().upper()}}" ] = {{port.get_symbol_name().upper()}}; -{%- endfor %} - ( *__d )[ "continuous_inputs" ] = __continuous_input_type; +{%- for port in neuron.get_continuous_input_ports() %} + ( *__continuous_input_type )[ "{{port.get_symbol_name().upper()}}" ] = {{port.get_symbol_name().upper()}}; +{%- endfor %} + ( *__d )[ "continuous_inputs" ] = __continuous_input_type; +{%- endif %} (*__d)[nest::names::recordables] = recordablesMap_.get_list(); {%- if uses_numeric_solver %} diff --git a/pynestml/generated/PyNestMLLexer.py b/pynestml/generated/PyNestMLLexer.py index d9f1a8ff1..bbc234488 100644 --- a/pynestml/generated/PyNestMLLexer.py +++ b/pynestml/generated/PyNestMLLexer.py @@ -1,4 +1,4 @@ -# Generated from PyNestMLLexer.g4 by ANTLR 4.13.2 +# Generated from PyNestMLLexer.g4 by ANTLR 4.13.1 from antlr4 import * from io import StringIO import sys @@ -416,7 +416,7 @@ class PyNestMLLexer(PyNestMLLexerBase): def __init__(self, input=None, output:TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.13.2") + self.checkVersion("4.13.1") self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) self._actions = None self._predicates = None diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index 3d226e3fe..4ce33aed3 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -1,4 +1,4 @@ -# Generated from PyNestMLParser.g4 by ANTLR 4.13.2 +# Generated from PyNestMLParser.g4 by ANTLR 4.13.1 # encoding: utf-8 from antlr4 import * from io import StringIO @@ -10,7 +10,7 @@ def serializedATN(): return [ - 4,1,89,666,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,87,676,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, @@ -29,234 +29,238 @@ def serializedATN(): 3,8,221,8,8,1,9,1,9,1,9,1,9,1,9,3,9,228,8,9,1,10,1,10,1,10,1,10, 1,10,1,10,1,10,3,10,237,8,10,1,11,1,11,3,11,241,8,11,1,12,1,12,1, 12,1,12,1,12,3,12,248,8,12,1,12,5,12,251,8,12,10,12,12,12,254,9, - 12,1,13,1,13,1,13,1,13,1,13,5,13,261,8,13,10,13,12,13,264,9,13,3, - 13,266,8,13,1,13,1,13,1,14,3,14,271,8,14,1,14,1,14,1,14,1,14,1,14, - 1,14,3,14,279,8,14,1,14,5,14,282,8,14,10,14,12,14,285,9,14,1,14, - 1,14,1,15,1,15,1,15,1,15,3,15,293,8,15,1,15,5,15,296,8,15,10,15, - 12,15,299,9,15,1,15,1,15,1,16,1,16,1,16,1,16,1,16,1,16,1,16,1,16, - 1,16,5,16,312,8,16,10,16,12,16,315,9,16,1,16,3,16,318,8,16,1,16, - 1,16,1,17,1,17,3,17,324,8,17,1,18,1,18,1,18,3,18,329,8,18,1,19,1, - 19,1,19,1,19,3,19,335,8,19,1,19,1,19,1,20,1,20,1,20,1,20,1,20,1, - 20,3,20,345,8,20,1,20,1,20,1,21,3,21,350,8,21,1,21,3,21,353,8,21, - 1,21,1,21,1,21,5,21,358,8,21,10,21,12,21,361,9,21,1,21,1,21,1,21, - 3,21,366,8,21,1,21,1,21,1,21,1,21,3,21,372,8,21,1,21,5,21,375,8, - 21,10,21,12,21,378,9,21,1,22,1,22,1,22,1,23,3,23,384,8,23,1,23,1, - 23,1,23,5,23,389,8,23,10,23,12,23,392,9,23,1,24,1,24,3,24,396,8, - 24,1,25,1,25,5,25,400,8,25,10,25,12,25,403,9,25,1,25,3,25,406,8, - 25,1,26,1,26,1,26,1,26,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1, - 27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1,28,1,28,1,28,1,29,1,29,1, - 29,1,29,1,29,1,29,1,29,1,29,3,29,439,8,29,1,29,1,29,1,29,1,29,1, - 29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,30,1,30,1,30,1,31,1,31,4, - 31,458,8,31,11,31,12,31,459,1,31,1,31,1,32,1,32,1,32,1,32,1,32,1, - 33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,4,33,479,8,33,11, - 33,12,33,480,1,33,1,33,1,34,1,34,1,34,1,34,1,34,5,34,490,8,34,10, - 34,12,34,493,9,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,35,1,35,1, - 35,1,35,1,35,5,35,507,8,35,10,35,12,35,510,9,35,1,35,1,35,1,35,1, - 35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,4,36,524,8,36,11,36,12, - 36,525,1,36,1,36,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,38,1,38,1, - 38,1,38,1,38,1,38,1,38,4,38,544,8,38,11,38,12,38,545,1,38,1,38,1, - 39,1,39,1,39,1,39,1,39,1,39,3,39,556,8,39,1,39,1,39,1,39,1,39,5, - 39,562,8,39,10,39,12,39,565,9,39,3,39,567,8,39,1,39,3,39,570,8,39, - 4,39,572,8,39,11,39,12,39,573,1,39,1,39,1,40,1,40,1,40,1,40,1,40, - 3,40,583,8,40,1,40,1,40,5,40,587,8,40,10,40,12,40,590,9,40,1,40, - 1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,600,8,41,1,41,1,41,1,41, - 1,41,1,41,1,42,1,42,3,42,609,8,42,1,43,1,43,1,43,1,43,1,43,1,43, - 1,43,1,43,1,43,5,43,620,8,43,10,43,12,43,623,9,43,3,43,625,8,43, - 1,43,3,43,628,8,43,1,43,3,43,631,8,43,1,43,1,43,1,43,1,44,1,44,1, - 44,1,44,1,44,1,44,5,44,642,8,44,10,44,12,44,645,9,44,3,44,647,8, - 44,1,44,1,44,3,44,651,8,44,1,44,1,44,1,44,1,44,1,44,1,44,1,45,1, - 45,1,45,1,46,1,46,1,46,1,46,1,46,0,2,4,12,47,0,2,4,6,8,10,12,14, - 16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58, - 60,62,64,66,68,70,72,74,76,78,80,82,84,86,88,90,92,0,4,2,0,49,49, - 73,73,1,0,88,89,1,0,30,32,3,0,23,23,85,86,88,89,731,0,100,1,0,0, - 0,2,114,1,0,0,0,4,125,1,0,0,0,6,148,1,0,0,0,8,150,1,0,0,0,10,152, - 1,0,0,0,12,165,1,0,0,0,14,215,1,0,0,0,16,220,1,0,0,0,18,227,1,0, - 0,0,20,236,1,0,0,0,22,240,1,0,0,0,24,242,1,0,0,0,26,255,1,0,0,0, - 28,270,1,0,0,0,30,288,1,0,0,0,32,302,1,0,0,0,34,323,1,0,0,0,36,328, - 1,0,0,0,38,334,1,0,0,0,40,338,1,0,0,0,42,349,1,0,0,0,44,379,1,0, - 0,0,46,383,1,0,0,0,48,393,1,0,0,0,50,397,1,0,0,0,52,407,1,0,0,0, - 54,415,1,0,0,0,56,423,1,0,0,0,58,430,1,0,0,0,60,447,1,0,0,0,62,457, - 1,0,0,0,64,463,1,0,0,0,66,468,1,0,0,0,68,484,1,0,0,0,70,501,1,0, - 0,0,72,518,1,0,0,0,74,529,1,0,0,0,76,536,1,0,0,0,78,549,1,0,0,0, - 80,577,1,0,0,0,82,594,1,0,0,0,84,608,1,0,0,0,86,610,1,0,0,0,88,635, - 1,0,0,0,90,658,1,0,0,0,92,661,1,0,0,0,94,101,5,8,0,0,95,101,5,9, - 0,0,96,101,5,10,0,0,97,101,5,11,0,0,98,101,5,12,0,0,99,101,3,4,2, - 0,100,94,1,0,0,0,100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100, - 98,1,0,0,0,100,99,1,0,0,0,101,1,1,0,0,0,102,104,5,73,0,0,103,102, - 1,0,0,0,103,104,1,0,0,0,104,105,1,0,0,0,105,106,5,47,0,0,106,107, - 5,88,0,0,107,108,5,77,0,0,108,109,5,88,0,0,109,115,5,48,0,0,110, - 112,7,0,0,0,111,110,1,0,0,0,111,112,1,0,0,0,112,113,1,0,0,0,113, - 115,7,1,0,0,114,103,1,0,0,0,114,111,1,0,0,0,115,3,1,0,0,0,116,117, - 6,2,-1,0,117,118,5,47,0,0,118,119,3,4,2,0,119,120,5,48,0,0,120,126, - 1,0,0,0,121,122,5,88,0,0,122,123,5,77,0,0,123,126,3,4,2,2,124,126, - 5,87,0,0,125,116,1,0,0,0,125,121,1,0,0,0,125,124,1,0,0,0,126,138, - 1,0,0,0,127,130,10,3,0,0,128,131,5,75,0,0,129,131,5,77,0,0,130,128, - 1,0,0,0,130,129,1,0,0,0,131,132,1,0,0,0,132,137,3,4,2,4,133,134, - 10,4,0,0,134,135,5,76,0,0,135,137,3,2,1,0,136,127,1,0,0,0,136,133, - 1,0,0,0,137,140,1,0,0,0,138,136,1,0,0,0,138,139,1,0,0,0,139,5,1, - 0,0,0,140,138,1,0,0,0,141,149,5,43,0,0,142,149,5,44,0,0,143,144, - 5,45,0,0,144,145,3,8,4,0,145,146,5,81,0,0,146,147,3,10,5,0,147,149, - 1,0,0,0,148,141,1,0,0,0,148,142,1,0,0,0,148,143,1,0,0,0,149,7,1, - 0,0,0,150,151,5,87,0,0,151,9,1,0,0,0,152,153,5,87,0,0,153,11,1,0, - 0,0,154,155,6,6,-1,0,155,156,5,47,0,0,156,157,3,12,6,0,157,158,5, - 48,0,0,158,166,1,0,0,0,159,160,3,16,8,0,160,161,3,12,6,9,161,166, - 1,0,0,0,162,163,5,26,0,0,163,166,3,12,6,4,164,166,3,14,7,0,165,154, - 1,0,0,0,165,159,1,0,0,0,165,162,1,0,0,0,165,164,1,0,0,0,166,203, - 1,0,0,0,167,168,10,10,0,0,168,169,5,76,0,0,169,202,3,12,6,10,170, - 174,10,8,0,0,171,175,5,75,0,0,172,175,5,77,0,0,173,175,5,78,0,0, - 174,171,1,0,0,0,174,172,1,0,0,0,174,173,1,0,0,0,175,176,1,0,0,0, - 176,202,3,12,6,9,177,180,10,7,0,0,178,181,5,49,0,0,179,181,5,73, - 0,0,180,178,1,0,0,0,180,179,1,0,0,0,181,182,1,0,0,0,182,202,3,12, - 6,8,183,184,10,6,0,0,184,185,3,18,9,0,185,186,3,12,6,7,186,202,1, - 0,0,0,187,188,10,5,0,0,188,189,3,20,10,0,189,190,3,12,6,6,190,202, - 1,0,0,0,191,192,10,3,0,0,192,193,3,22,11,0,193,194,3,12,6,4,194, - 202,1,0,0,0,195,196,10,2,0,0,196,197,5,79,0,0,197,198,3,12,6,0,198, - 199,5,80,0,0,199,200,3,12,6,3,200,202,1,0,0,0,201,167,1,0,0,0,201, - 170,1,0,0,0,201,177,1,0,0,0,201,183,1,0,0,0,201,187,1,0,0,0,201, - 191,1,0,0,0,201,195,1,0,0,0,202,205,1,0,0,0,203,201,1,0,0,0,203, - 204,1,0,0,0,204,13,1,0,0,0,205,203,1,0,0,0,206,216,3,26,13,0,207, - 216,5,85,0,0,208,210,7,1,0,0,209,211,3,24,12,0,210,209,1,0,0,0,210, - 211,1,0,0,0,211,216,1,0,0,0,212,216,5,86,0,0,213,216,5,23,0,0,214, - 216,3,24,12,0,215,206,1,0,0,0,215,207,1,0,0,0,215,208,1,0,0,0,215, - 212,1,0,0,0,215,213,1,0,0,0,215,214,1,0,0,0,216,15,1,0,0,0,217,221, - 5,49,0,0,218,221,5,73,0,0,219,221,5,50,0,0,220,217,1,0,0,0,220,218, - 1,0,0,0,220,219,1,0,0,0,221,17,1,0,0,0,222,228,5,53,0,0,223,228, - 5,52,0,0,224,228,5,51,0,0,225,228,5,59,0,0,226,228,5,60,0,0,227, - 222,1,0,0,0,227,223,1,0,0,0,227,224,1,0,0,0,227,225,1,0,0,0,227, - 226,1,0,0,0,228,19,1,0,0,0,229,237,5,61,0,0,230,237,5,63,0,0,231, - 237,5,68,0,0,232,237,5,69,0,0,233,237,5,70,0,0,234,237,5,71,0,0, - 235,237,5,62,0,0,236,229,1,0,0,0,236,230,1,0,0,0,236,231,1,0,0,0, - 236,232,1,0,0,0,236,233,1,0,0,0,236,234,1,0,0,0,236,235,1,0,0,0, - 237,21,1,0,0,0,238,241,5,24,0,0,239,241,5,25,0,0,240,238,1,0,0,0, - 240,239,1,0,0,0,241,23,1,0,0,0,242,247,5,87,0,0,243,244,5,54,0,0, - 244,245,3,12,6,0,245,246,5,56,0,0,246,248,1,0,0,0,247,243,1,0,0, - 0,247,248,1,0,0,0,248,252,1,0,0,0,249,251,5,83,0,0,250,249,1,0,0, - 0,251,254,1,0,0,0,252,250,1,0,0,0,252,253,1,0,0,0,253,25,1,0,0,0, - 254,252,1,0,0,0,255,256,5,87,0,0,256,265,5,47,0,0,257,262,3,12,6, - 0,258,259,5,72,0,0,259,261,3,12,6,0,260,258,1,0,0,0,261,264,1,0, - 0,0,262,260,1,0,0,0,262,263,1,0,0,0,263,266,1,0,0,0,264,262,1,0, - 0,0,265,257,1,0,0,0,265,266,1,0,0,0,266,267,1,0,0,0,267,268,5,48, - 0,0,268,27,1,0,0,0,269,271,5,27,0,0,270,269,1,0,0,0,270,271,1,0, - 0,0,271,272,1,0,0,0,272,273,5,14,0,0,273,274,5,87,0,0,274,275,3, - 0,0,0,275,276,5,74,0,0,276,278,3,12,6,0,277,279,5,82,0,0,278,277, - 1,0,0,0,278,279,1,0,0,0,279,283,1,0,0,0,280,282,3,6,3,0,281,280, - 1,0,0,0,282,285,1,0,0,0,283,281,1,0,0,0,283,284,1,0,0,0,284,286, - 1,0,0,0,285,283,1,0,0,0,286,287,5,7,0,0,287,29,1,0,0,0,288,289,3, - 24,12,0,289,290,5,74,0,0,290,292,3,12,6,0,291,293,5,82,0,0,292,291, - 1,0,0,0,292,293,1,0,0,0,293,297,1,0,0,0,294,296,3,6,3,0,295,294, - 1,0,0,0,296,299,1,0,0,0,297,295,1,0,0,0,297,298,1,0,0,0,298,300, - 1,0,0,0,299,297,1,0,0,0,300,301,5,7,0,0,301,31,1,0,0,0,302,303,5, - 28,0,0,303,304,3,24,12,0,304,305,5,74,0,0,305,313,3,12,6,0,306,307, - 5,3,0,0,307,308,3,24,12,0,308,309,5,74,0,0,309,310,3,12,6,0,310, - 312,1,0,0,0,311,306,1,0,0,0,312,315,1,0,0,0,313,311,1,0,0,0,313, - 314,1,0,0,0,314,317,1,0,0,0,315,313,1,0,0,0,316,318,5,82,0,0,317, - 316,1,0,0,0,317,318,1,0,0,0,318,319,1,0,0,0,319,320,5,7,0,0,320, - 33,1,0,0,0,321,324,3,38,19,0,322,324,3,36,18,0,323,321,1,0,0,0,323, - 322,1,0,0,0,324,35,1,0,0,0,325,329,3,50,25,0,326,329,3,58,29,0,327, - 329,3,60,30,0,328,325,1,0,0,0,328,326,1,0,0,0,328,327,1,0,0,0,329, - 37,1,0,0,0,330,335,3,40,20,0,331,335,3,26,13,0,332,335,3,42,21,0, - 333,335,3,48,24,0,334,330,1,0,0,0,334,331,1,0,0,0,334,332,1,0,0, - 0,334,333,1,0,0,0,335,336,1,0,0,0,336,337,5,7,0,0,337,39,1,0,0,0, - 338,344,3,24,12,0,339,345,5,74,0,0,340,345,5,64,0,0,341,345,5,65, - 0,0,342,345,5,66,0,0,343,345,5,67,0,0,344,339,1,0,0,0,344,340,1, - 0,0,0,344,341,1,0,0,0,344,342,1,0,0,0,344,343,1,0,0,0,345,346,1, - 0,0,0,346,347,3,12,6,0,347,41,1,0,0,0,348,350,5,27,0,0,349,348,1, - 0,0,0,349,350,1,0,0,0,350,352,1,0,0,0,351,353,5,14,0,0,352,351,1, - 0,0,0,352,353,1,0,0,0,353,354,1,0,0,0,354,359,3,24,12,0,355,356, - 5,72,0,0,356,358,3,24,12,0,357,355,1,0,0,0,358,361,1,0,0,0,359,357, - 1,0,0,0,359,360,1,0,0,0,360,362,1,0,0,0,361,359,1,0,0,0,362,365, - 3,0,0,0,363,364,5,74,0,0,364,366,3,12,6,0,365,363,1,0,0,0,365,366, - 1,0,0,0,366,371,1,0,0,0,367,368,5,57,0,0,368,369,3,12,6,0,369,370, - 5,58,0,0,370,372,1,0,0,0,371,367,1,0,0,0,371,372,1,0,0,0,372,376, - 1,0,0,0,373,375,3,6,3,0,374,373,1,0,0,0,375,378,1,0,0,0,376,374, - 1,0,0,0,376,377,1,0,0,0,377,43,1,0,0,0,378,376,1,0,0,0,379,380,3, - 42,21,0,380,381,5,7,0,0,381,45,1,0,0,0,382,384,5,7,0,0,383,382,1, - 0,0,0,383,384,1,0,0,0,384,385,1,0,0,0,385,390,3,34,17,0,386,389, - 5,7,0,0,387,389,3,34,17,0,388,386,1,0,0,0,388,387,1,0,0,0,389,392, - 1,0,0,0,390,388,1,0,0,0,390,391,1,0,0,0,391,47,1,0,0,0,392,390,1, - 0,0,0,393,395,5,15,0,0,394,396,3,12,6,0,395,394,1,0,0,0,395,396, - 1,0,0,0,396,49,1,0,0,0,397,401,3,52,26,0,398,400,3,54,27,0,399,398, - 1,0,0,0,400,403,1,0,0,0,401,399,1,0,0,0,401,402,1,0,0,0,402,405, - 1,0,0,0,403,401,1,0,0,0,404,406,3,56,28,0,405,404,1,0,0,0,405,406, - 1,0,0,0,406,51,1,0,0,0,407,408,5,16,0,0,408,409,3,12,6,0,409,410, - 5,80,0,0,410,411,5,7,0,0,411,412,5,1,0,0,412,413,3,46,23,0,413,414, - 5,2,0,0,414,53,1,0,0,0,415,416,5,17,0,0,416,417,3,12,6,0,417,418, - 5,80,0,0,418,419,5,7,0,0,419,420,5,1,0,0,420,421,3,46,23,0,421,422, - 5,2,0,0,422,55,1,0,0,0,423,424,5,18,0,0,424,425,5,80,0,0,425,426, - 5,7,0,0,426,427,5,1,0,0,427,428,3,46,23,0,428,429,5,2,0,0,429,57, - 1,0,0,0,430,431,5,19,0,0,431,432,5,87,0,0,432,433,5,21,0,0,433,434, - 3,12,6,0,434,435,5,46,0,0,435,436,3,12,6,0,436,438,5,22,0,0,437, - 439,5,73,0,0,438,437,1,0,0,0,438,439,1,0,0,0,439,440,1,0,0,0,440, - 441,7,1,0,0,441,442,5,80,0,0,442,443,5,7,0,0,443,444,5,1,0,0,444, - 445,3,46,23,0,445,446,5,2,0,0,446,59,1,0,0,0,447,448,5,20,0,0,448, - 449,3,12,6,0,449,450,5,80,0,0,450,451,5,7,0,0,451,452,5,1,0,0,452, - 453,3,46,23,0,453,454,5,2,0,0,454,61,1,0,0,0,455,458,3,64,32,0,456, - 458,5,7,0,0,457,455,1,0,0,0,457,456,1,0,0,0,458,459,1,0,0,0,459, - 457,1,0,0,0,459,460,1,0,0,0,460,461,1,0,0,0,461,462,5,0,0,1,462, - 63,1,0,0,0,463,464,5,29,0,0,464,465,5,87,0,0,465,466,5,80,0,0,466, - 467,3,66,33,0,467,65,1,0,0,0,468,469,5,7,0,0,469,478,5,1,0,0,470, - 479,3,72,36,0,471,479,3,76,38,0,472,479,3,78,39,0,473,479,3,86,43, - 0,474,479,3,88,44,0,475,479,3,68,34,0,476,479,3,70,35,0,477,479, - 3,74,37,0,478,470,1,0,0,0,478,471,1,0,0,0,478,472,1,0,0,0,478,473, - 1,0,0,0,478,474,1,0,0,0,478,475,1,0,0,0,478,476,1,0,0,0,478,477, - 1,0,0,0,479,480,1,0,0,0,480,478,1,0,0,0,480,481,1,0,0,0,481,482, - 1,0,0,0,482,483,5,2,0,0,483,67,1,0,0,0,484,485,5,38,0,0,485,486, - 5,47,0,0,486,491,5,87,0,0,487,488,5,72,0,0,488,490,3,92,46,0,489, - 487,1,0,0,0,490,493,1,0,0,0,491,489,1,0,0,0,491,492,1,0,0,0,492, - 494,1,0,0,0,493,491,1,0,0,0,494,495,5,48,0,0,495,496,5,80,0,0,496, - 497,5,7,0,0,497,498,5,1,0,0,498,499,3,46,23,0,499,500,5,2,0,0,500, - 69,1,0,0,0,501,502,5,39,0,0,502,503,5,47,0,0,503,508,3,12,6,0,504, - 505,5,72,0,0,505,507,3,92,46,0,506,504,1,0,0,0,507,510,1,0,0,0,508, - 506,1,0,0,0,508,509,1,0,0,0,509,511,1,0,0,0,510,508,1,0,0,0,511, - 512,5,48,0,0,512,513,5,80,0,0,513,514,5,7,0,0,514,515,5,1,0,0,515, - 516,3,46,23,0,516,517,5,2,0,0,517,71,1,0,0,0,518,519,7,2,0,0,519, - 520,5,80,0,0,520,521,5,7,0,0,521,523,5,1,0,0,522,524,3,44,22,0,523, - 522,1,0,0,0,524,525,1,0,0,0,525,523,1,0,0,0,525,526,1,0,0,0,526, - 527,1,0,0,0,527,528,5,2,0,0,528,73,1,0,0,0,529,530,5,33,0,0,530, - 531,5,80,0,0,531,532,5,7,0,0,532,533,5,1,0,0,533,534,3,46,23,0,534, - 535,5,2,0,0,535,75,1,0,0,0,536,537,5,34,0,0,537,538,5,80,0,0,538, - 539,5,7,0,0,539,543,5,1,0,0,540,544,3,28,14,0,541,544,3,30,15,0, - 542,544,3,32,16,0,543,540,1,0,0,0,543,541,1,0,0,0,543,542,1,0,0, - 0,544,545,1,0,0,0,545,543,1,0,0,0,545,546,1,0,0,0,546,547,1,0,0, - 0,547,548,5,2,0,0,548,77,1,0,0,0,549,550,5,35,0,0,550,551,5,80,0, - 0,551,552,5,7,0,0,552,571,5,1,0,0,553,556,3,80,40,0,554,556,3,82, - 41,0,555,553,1,0,0,0,555,554,1,0,0,0,556,569,1,0,0,0,557,566,5,47, - 0,0,558,563,3,90,45,0,559,560,5,72,0,0,560,562,3,90,45,0,561,559, - 1,0,0,0,562,565,1,0,0,0,563,561,1,0,0,0,563,564,1,0,0,0,564,567, - 1,0,0,0,565,563,1,0,0,0,566,558,1,0,0,0,566,567,1,0,0,0,567,568, - 1,0,0,0,568,570,5,48,0,0,569,557,1,0,0,0,569,570,1,0,0,0,570,572, - 1,0,0,0,571,555,1,0,0,0,572,573,1,0,0,0,573,571,1,0,0,0,573,574, - 1,0,0,0,574,575,1,0,0,0,575,576,5,2,0,0,576,79,1,0,0,0,577,582,5, - 87,0,0,578,579,5,54,0,0,579,580,3,12,6,0,580,581,5,56,0,0,581,583, - 1,0,0,0,582,578,1,0,0,0,582,583,1,0,0,0,583,584,1,0,0,0,584,588, - 5,55,0,0,585,587,3,84,42,0,586,585,1,0,0,0,587,590,1,0,0,0,588,586, - 1,0,0,0,588,589,1,0,0,0,589,591,1,0,0,0,590,588,1,0,0,0,591,592, - 5,40,0,0,592,593,5,7,0,0,593,81,1,0,0,0,594,599,5,87,0,0,595,596, - 5,54,0,0,596,597,3,12,6,0,597,598,5,56,0,0,598,600,1,0,0,0,599,595, - 1,0,0,0,599,600,1,0,0,0,600,601,1,0,0,0,601,602,3,0,0,0,602,603, - 5,55,0,0,603,604,5,37,0,0,604,605,5,7,0,0,605,83,1,0,0,0,606,609, - 5,41,0,0,607,609,5,42,0,0,608,606,1,0,0,0,608,607,1,0,0,0,609,85, - 1,0,0,0,610,611,5,36,0,0,611,612,5,80,0,0,612,613,5,7,0,0,613,630, - 5,1,0,0,614,627,5,40,0,0,615,624,5,47,0,0,616,621,3,90,45,0,617, - 618,5,72,0,0,618,620,3,90,45,0,619,617,1,0,0,0,620,623,1,0,0,0,621, - 619,1,0,0,0,621,622,1,0,0,0,622,625,1,0,0,0,623,621,1,0,0,0,624, - 616,1,0,0,0,624,625,1,0,0,0,625,626,1,0,0,0,626,628,5,48,0,0,627, - 615,1,0,0,0,627,628,1,0,0,0,628,631,1,0,0,0,629,631,5,37,0,0,630, - 614,1,0,0,0,630,629,1,0,0,0,631,632,1,0,0,0,632,633,5,7,0,0,633, - 634,5,2,0,0,634,87,1,0,0,0,635,636,5,13,0,0,636,637,5,87,0,0,637, - 646,5,47,0,0,638,643,3,90,45,0,639,640,5,72,0,0,640,642,3,90,45, - 0,641,639,1,0,0,0,642,645,1,0,0,0,643,641,1,0,0,0,643,644,1,0,0, - 0,644,647,1,0,0,0,645,643,1,0,0,0,646,638,1,0,0,0,646,647,1,0,0, - 0,647,648,1,0,0,0,648,650,5,48,0,0,649,651,3,0,0,0,650,649,1,0,0, - 0,650,651,1,0,0,0,651,652,1,0,0,0,652,653,5,80,0,0,653,654,5,7,0, - 0,654,655,5,1,0,0,655,656,3,46,23,0,656,657,5,2,0,0,657,89,1,0,0, - 0,658,659,5,87,0,0,659,660,3,0,0,0,660,91,1,0,0,0,661,662,5,87,0, - 0,662,663,5,74,0,0,663,664,7,3,0,0,664,93,1,0,0,0,73,100,103,111, + 12,1,12,1,12,3,12,258,8,12,1,13,1,13,1,13,1,13,1,13,5,13,265,8,13, + 10,13,12,13,268,9,13,3,13,270,8,13,1,13,1,13,1,14,3,14,275,8,14, + 1,14,1,14,1,14,1,14,1,14,1,14,3,14,283,8,14,1,14,5,14,286,8,14,10, + 14,12,14,289,9,14,1,14,1,14,1,15,1,15,1,15,1,15,3,15,297,8,15,1, + 15,5,15,300,8,15,10,15,12,15,303,9,15,1,15,1,15,1,16,1,16,1,16,1, + 16,1,16,1,16,1,16,1,16,1,16,5,16,316,8,16,10,16,12,16,319,9,16,1, + 16,3,16,322,8,16,1,16,1,16,1,17,1,17,3,17,328,8,17,1,18,1,18,1,18, + 3,18,333,8,18,1,19,1,19,1,19,1,19,3,19,339,8,19,1,19,1,19,1,20,1, + 20,1,20,1,20,1,20,1,20,3,20,349,8,20,1,20,1,20,1,21,3,21,354,8,21, + 1,21,3,21,357,8,21,1,21,1,21,1,21,5,21,362,8,21,10,21,12,21,365, + 9,21,1,21,1,21,1,21,3,21,370,8,21,1,21,1,21,1,21,1,21,3,21,376,8, + 21,1,21,5,21,379,8,21,10,21,12,21,382,9,21,1,22,1,22,1,22,1,23,3, + 23,388,8,23,1,23,1,23,1,23,5,23,393,8,23,10,23,12,23,396,9,23,1, + 24,1,24,3,24,400,8,24,1,25,1,25,5,25,404,8,25,10,25,12,25,407,9, + 25,1,25,3,25,410,8,25,1,26,1,26,1,26,1,26,1,26,1,26,1,26,1,26,1, + 27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1,28,1, + 28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,29,1,29,3,29,443,8,29,1, + 29,1,29,1,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,30,1, + 30,1,30,1,31,1,31,4,31,462,8,31,11,31,12,31,463,1,31,1,31,1,32,1, + 32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1,33,1, + 33,4,33,483,8,33,11,33,12,33,484,1,33,1,33,1,34,1,34,1,34,1,34,1, + 34,5,34,494,8,34,10,34,12,34,497,9,34,1,34,1,34,1,34,1,34,1,34,1, + 34,1,34,1,35,1,35,1,35,1,35,1,35,5,35,511,8,35,10,35,12,35,514,9, + 35,1,35,1,35,1,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,4, + 36,528,8,36,11,36,12,36,529,1,36,1,36,1,37,1,37,1,37,1,37,1,37,1, + 37,1,37,1,38,1,38,1,38,1,38,1,38,1,38,1,38,4,38,548,8,38,11,38,12, + 38,549,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39,4,39,560,8,39,11, + 39,12,39,561,1,39,1,39,1,40,1,40,1,40,1,40,1,40,3,40,571,8,40,1, + 40,1,40,1,40,1,40,1,40,1,40,5,40,579,8,40,10,40,12,40,582,9,40,3, + 40,584,8,40,1,40,3,40,587,8,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41, + 3,41,596,8,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,5,41,605,8,41,10, + 41,12,41,608,9,41,3,41,610,8,41,1,41,3,41,613,8,41,1,41,1,41,1,42, + 1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,5,42,626,8,42,10,42,12,42, + 629,9,42,3,42,631,8,42,1,42,3,42,634,8,42,1,42,3,42,637,8,42,1,42, + 1,42,1,42,1,43,1,43,1,43,1,43,1,43,1,43,5,43,648,8,43,10,43,12,43, + 651,9,43,3,43,653,8,43,1,43,1,43,3,43,657,8,43,1,43,1,43,1,43,1, + 43,1,43,1,43,1,44,1,44,1,44,1,45,1,45,3,45,670,8,45,1,46,1,46,1, + 46,1,46,1,46,0,2,4,12,47,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28, + 30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72, + 74,76,78,80,82,84,86,88,90,92,0,4,2,0,47,47,71,71,1,0,86,87,1,0, + 30,32,3,0,23,23,83,84,86,87,744,0,100,1,0,0,0,2,114,1,0,0,0,4,125, + 1,0,0,0,6,148,1,0,0,0,8,150,1,0,0,0,10,152,1,0,0,0,12,165,1,0,0, + 0,14,215,1,0,0,0,16,220,1,0,0,0,18,227,1,0,0,0,20,236,1,0,0,0,22, + 240,1,0,0,0,24,242,1,0,0,0,26,259,1,0,0,0,28,274,1,0,0,0,30,292, + 1,0,0,0,32,306,1,0,0,0,34,327,1,0,0,0,36,332,1,0,0,0,38,338,1,0, + 0,0,40,342,1,0,0,0,42,353,1,0,0,0,44,383,1,0,0,0,46,387,1,0,0,0, + 48,397,1,0,0,0,50,401,1,0,0,0,52,411,1,0,0,0,54,419,1,0,0,0,56,427, + 1,0,0,0,58,434,1,0,0,0,60,451,1,0,0,0,62,461,1,0,0,0,64,467,1,0, + 0,0,66,472,1,0,0,0,68,488,1,0,0,0,70,505,1,0,0,0,72,522,1,0,0,0, + 74,533,1,0,0,0,76,540,1,0,0,0,78,553,1,0,0,0,80,565,1,0,0,0,82,590, + 1,0,0,0,84,616,1,0,0,0,86,641,1,0,0,0,88,664,1,0,0,0,90,669,1,0, + 0,0,92,671,1,0,0,0,94,101,5,8,0,0,95,101,5,9,0,0,96,101,5,10,0,0, + 97,101,5,11,0,0,98,101,5,12,0,0,99,101,3,4,2,0,100,94,1,0,0,0,100, + 95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100,98,1,0,0,0,100,99,1, + 0,0,0,101,1,1,0,0,0,102,104,5,71,0,0,103,102,1,0,0,0,103,104,1,0, + 0,0,104,105,1,0,0,0,105,106,5,45,0,0,106,107,5,86,0,0,107,108,5, + 75,0,0,108,109,5,86,0,0,109,115,5,46,0,0,110,112,7,0,0,0,111,110, + 1,0,0,0,111,112,1,0,0,0,112,113,1,0,0,0,113,115,7,1,0,0,114,103, + 1,0,0,0,114,111,1,0,0,0,115,3,1,0,0,0,116,117,6,2,-1,0,117,118,5, + 45,0,0,118,119,3,4,2,0,119,120,5,46,0,0,120,126,1,0,0,0,121,122, + 5,86,0,0,122,123,5,75,0,0,123,126,3,4,2,2,124,126,5,85,0,0,125,116, + 1,0,0,0,125,121,1,0,0,0,125,124,1,0,0,0,126,138,1,0,0,0,127,130, + 10,3,0,0,128,131,5,73,0,0,129,131,5,75,0,0,130,128,1,0,0,0,130,129, + 1,0,0,0,131,132,1,0,0,0,132,137,3,4,2,4,133,134,10,4,0,0,134,135, + 5,74,0,0,135,137,3,2,1,0,136,127,1,0,0,0,136,133,1,0,0,0,137,140, + 1,0,0,0,138,136,1,0,0,0,138,139,1,0,0,0,139,5,1,0,0,0,140,138,1, + 0,0,0,141,149,5,41,0,0,142,149,5,42,0,0,143,144,5,43,0,0,144,145, + 3,8,4,0,145,146,5,79,0,0,146,147,3,10,5,0,147,149,1,0,0,0,148,141, + 1,0,0,0,148,142,1,0,0,0,148,143,1,0,0,0,149,7,1,0,0,0,150,151,5, + 85,0,0,151,9,1,0,0,0,152,153,5,85,0,0,153,11,1,0,0,0,154,155,6,6, + -1,0,155,156,5,45,0,0,156,157,3,12,6,0,157,158,5,46,0,0,158,166, + 1,0,0,0,159,160,3,16,8,0,160,161,3,12,6,9,161,166,1,0,0,0,162,163, + 5,26,0,0,163,166,3,12,6,4,164,166,3,14,7,0,165,154,1,0,0,0,165,159, + 1,0,0,0,165,162,1,0,0,0,165,164,1,0,0,0,166,203,1,0,0,0,167,168, + 10,10,0,0,168,169,5,74,0,0,169,202,3,12,6,10,170,174,10,8,0,0,171, + 175,5,73,0,0,172,175,5,75,0,0,173,175,5,76,0,0,174,171,1,0,0,0,174, + 172,1,0,0,0,174,173,1,0,0,0,175,176,1,0,0,0,176,202,3,12,6,9,177, + 180,10,7,0,0,178,181,5,47,0,0,179,181,5,71,0,0,180,178,1,0,0,0,180, + 179,1,0,0,0,181,182,1,0,0,0,182,202,3,12,6,8,183,184,10,6,0,0,184, + 185,3,18,9,0,185,186,3,12,6,7,186,202,1,0,0,0,187,188,10,5,0,0,188, + 189,3,20,10,0,189,190,3,12,6,6,190,202,1,0,0,0,191,192,10,3,0,0, + 192,193,3,22,11,0,193,194,3,12,6,4,194,202,1,0,0,0,195,196,10,2, + 0,0,196,197,5,77,0,0,197,198,3,12,6,0,198,199,5,78,0,0,199,200,3, + 12,6,3,200,202,1,0,0,0,201,167,1,0,0,0,201,170,1,0,0,0,201,177,1, + 0,0,0,201,183,1,0,0,0,201,187,1,0,0,0,201,191,1,0,0,0,201,195,1, + 0,0,0,202,205,1,0,0,0,203,201,1,0,0,0,203,204,1,0,0,0,204,13,1,0, + 0,0,205,203,1,0,0,0,206,216,3,26,13,0,207,216,5,83,0,0,208,210,7, + 1,0,0,209,211,3,24,12,0,210,209,1,0,0,0,210,211,1,0,0,0,211,216, + 1,0,0,0,212,216,5,84,0,0,213,216,5,23,0,0,214,216,3,24,12,0,215, + 206,1,0,0,0,215,207,1,0,0,0,215,208,1,0,0,0,215,212,1,0,0,0,215, + 213,1,0,0,0,215,214,1,0,0,0,216,15,1,0,0,0,217,221,5,47,0,0,218, + 221,5,71,0,0,219,221,5,48,0,0,220,217,1,0,0,0,220,218,1,0,0,0,220, + 219,1,0,0,0,221,17,1,0,0,0,222,228,5,51,0,0,223,228,5,50,0,0,224, + 228,5,49,0,0,225,228,5,57,0,0,226,228,5,58,0,0,227,222,1,0,0,0,227, + 223,1,0,0,0,227,224,1,0,0,0,227,225,1,0,0,0,227,226,1,0,0,0,228, + 19,1,0,0,0,229,237,5,59,0,0,230,237,5,61,0,0,231,237,5,66,0,0,232, + 237,5,67,0,0,233,237,5,68,0,0,234,237,5,69,0,0,235,237,5,60,0,0, + 236,229,1,0,0,0,236,230,1,0,0,0,236,231,1,0,0,0,236,232,1,0,0,0, + 236,233,1,0,0,0,236,234,1,0,0,0,236,235,1,0,0,0,237,21,1,0,0,0,238, + 241,5,24,0,0,239,241,5,25,0,0,240,238,1,0,0,0,240,239,1,0,0,0,241, + 23,1,0,0,0,242,247,5,85,0,0,243,244,5,52,0,0,244,245,3,90,45,0,245, + 246,5,54,0,0,246,248,1,0,0,0,247,243,1,0,0,0,247,248,1,0,0,0,248, + 252,1,0,0,0,249,251,5,81,0,0,250,249,1,0,0,0,251,254,1,0,0,0,252, + 250,1,0,0,0,252,253,1,0,0,0,253,257,1,0,0,0,254,252,1,0,0,0,255, + 256,5,82,0,0,256,258,3,24,12,0,257,255,1,0,0,0,257,258,1,0,0,0,258, + 25,1,0,0,0,259,260,5,85,0,0,260,269,5,45,0,0,261,266,3,12,6,0,262, + 263,5,70,0,0,263,265,3,12,6,0,264,262,1,0,0,0,265,268,1,0,0,0,266, + 264,1,0,0,0,266,267,1,0,0,0,267,270,1,0,0,0,268,266,1,0,0,0,269, + 261,1,0,0,0,269,270,1,0,0,0,270,271,1,0,0,0,271,272,5,46,0,0,272, + 27,1,0,0,0,273,275,5,27,0,0,274,273,1,0,0,0,274,275,1,0,0,0,275, + 276,1,0,0,0,276,277,5,14,0,0,277,278,5,85,0,0,278,279,3,0,0,0,279, + 280,5,72,0,0,280,282,3,12,6,0,281,283,5,80,0,0,282,281,1,0,0,0,282, + 283,1,0,0,0,283,287,1,0,0,0,284,286,3,6,3,0,285,284,1,0,0,0,286, + 289,1,0,0,0,287,285,1,0,0,0,287,288,1,0,0,0,288,290,1,0,0,0,289, + 287,1,0,0,0,290,291,5,7,0,0,291,29,1,0,0,0,292,293,3,24,12,0,293, + 294,5,72,0,0,294,296,3,12,6,0,295,297,5,80,0,0,296,295,1,0,0,0,296, + 297,1,0,0,0,297,301,1,0,0,0,298,300,3,6,3,0,299,298,1,0,0,0,300, + 303,1,0,0,0,301,299,1,0,0,0,301,302,1,0,0,0,302,304,1,0,0,0,303, + 301,1,0,0,0,304,305,5,7,0,0,305,31,1,0,0,0,306,307,5,28,0,0,307, + 308,3,24,12,0,308,309,5,72,0,0,309,317,3,12,6,0,310,311,5,3,0,0, + 311,312,3,24,12,0,312,313,5,72,0,0,313,314,3,12,6,0,314,316,1,0, + 0,0,315,310,1,0,0,0,316,319,1,0,0,0,317,315,1,0,0,0,317,318,1,0, + 0,0,318,321,1,0,0,0,319,317,1,0,0,0,320,322,5,80,0,0,321,320,1,0, + 0,0,321,322,1,0,0,0,322,323,1,0,0,0,323,324,5,7,0,0,324,33,1,0,0, + 0,325,328,3,38,19,0,326,328,3,36,18,0,327,325,1,0,0,0,327,326,1, + 0,0,0,328,35,1,0,0,0,329,333,3,50,25,0,330,333,3,58,29,0,331,333, + 3,60,30,0,332,329,1,0,0,0,332,330,1,0,0,0,332,331,1,0,0,0,333,37, + 1,0,0,0,334,339,3,40,20,0,335,339,3,26,13,0,336,339,3,42,21,0,337, + 339,3,48,24,0,338,334,1,0,0,0,338,335,1,0,0,0,338,336,1,0,0,0,338, + 337,1,0,0,0,339,340,1,0,0,0,340,341,5,7,0,0,341,39,1,0,0,0,342,348, + 3,24,12,0,343,349,5,72,0,0,344,349,5,62,0,0,345,349,5,63,0,0,346, + 349,5,64,0,0,347,349,5,65,0,0,348,343,1,0,0,0,348,344,1,0,0,0,348, + 345,1,0,0,0,348,346,1,0,0,0,348,347,1,0,0,0,349,350,1,0,0,0,350, + 351,3,12,6,0,351,41,1,0,0,0,352,354,5,27,0,0,353,352,1,0,0,0,353, + 354,1,0,0,0,354,356,1,0,0,0,355,357,5,14,0,0,356,355,1,0,0,0,356, + 357,1,0,0,0,357,358,1,0,0,0,358,363,3,24,12,0,359,360,5,70,0,0,360, + 362,3,24,12,0,361,359,1,0,0,0,362,365,1,0,0,0,363,361,1,0,0,0,363, + 364,1,0,0,0,364,366,1,0,0,0,365,363,1,0,0,0,366,369,3,0,0,0,367, + 368,5,72,0,0,368,370,3,12,6,0,369,367,1,0,0,0,369,370,1,0,0,0,370, + 375,1,0,0,0,371,372,5,55,0,0,372,373,3,12,6,0,373,374,5,56,0,0,374, + 376,1,0,0,0,375,371,1,0,0,0,375,376,1,0,0,0,376,380,1,0,0,0,377, + 379,3,6,3,0,378,377,1,0,0,0,379,382,1,0,0,0,380,378,1,0,0,0,380, + 381,1,0,0,0,381,43,1,0,0,0,382,380,1,0,0,0,383,384,3,42,21,0,384, + 385,5,7,0,0,385,45,1,0,0,0,386,388,5,7,0,0,387,386,1,0,0,0,387,388, + 1,0,0,0,388,389,1,0,0,0,389,394,3,34,17,0,390,393,5,7,0,0,391,393, + 3,34,17,0,392,390,1,0,0,0,392,391,1,0,0,0,393,396,1,0,0,0,394,392, + 1,0,0,0,394,395,1,0,0,0,395,47,1,0,0,0,396,394,1,0,0,0,397,399,5, + 15,0,0,398,400,3,12,6,0,399,398,1,0,0,0,399,400,1,0,0,0,400,49,1, + 0,0,0,401,405,3,52,26,0,402,404,3,54,27,0,403,402,1,0,0,0,404,407, + 1,0,0,0,405,403,1,0,0,0,405,406,1,0,0,0,406,409,1,0,0,0,407,405, + 1,0,0,0,408,410,3,56,28,0,409,408,1,0,0,0,409,410,1,0,0,0,410,51, + 1,0,0,0,411,412,5,16,0,0,412,413,3,12,6,0,413,414,5,78,0,0,414,415, + 5,7,0,0,415,416,5,1,0,0,416,417,3,46,23,0,417,418,5,2,0,0,418,53, + 1,0,0,0,419,420,5,17,0,0,420,421,3,12,6,0,421,422,5,78,0,0,422,423, + 5,7,0,0,423,424,5,1,0,0,424,425,3,46,23,0,425,426,5,2,0,0,426,55, + 1,0,0,0,427,428,5,18,0,0,428,429,5,78,0,0,429,430,5,7,0,0,430,431, + 5,1,0,0,431,432,3,46,23,0,432,433,5,2,0,0,433,57,1,0,0,0,434,435, + 5,19,0,0,435,436,5,85,0,0,436,437,5,21,0,0,437,438,3,12,6,0,438, + 439,5,44,0,0,439,440,3,12,6,0,440,442,5,22,0,0,441,443,5,71,0,0, + 442,441,1,0,0,0,442,443,1,0,0,0,443,444,1,0,0,0,444,445,7,1,0,0, + 445,446,5,78,0,0,446,447,5,7,0,0,447,448,5,1,0,0,448,449,3,46,23, + 0,449,450,5,2,0,0,450,59,1,0,0,0,451,452,5,20,0,0,452,453,3,12,6, + 0,453,454,5,78,0,0,454,455,5,7,0,0,455,456,5,1,0,0,456,457,3,46, + 23,0,457,458,5,2,0,0,458,61,1,0,0,0,459,462,3,64,32,0,460,462,5, + 7,0,0,461,459,1,0,0,0,461,460,1,0,0,0,462,463,1,0,0,0,463,461,1, + 0,0,0,463,464,1,0,0,0,464,465,1,0,0,0,465,466,5,0,0,1,466,63,1,0, + 0,0,467,468,5,29,0,0,468,469,5,85,0,0,469,470,5,78,0,0,470,471,3, + 66,33,0,471,65,1,0,0,0,472,473,5,7,0,0,473,482,5,1,0,0,474,483,3, + 72,36,0,475,483,3,76,38,0,476,483,3,78,39,0,477,483,3,84,42,0,478, + 483,3,86,43,0,479,483,3,68,34,0,480,483,3,70,35,0,481,483,3,74,37, + 0,482,474,1,0,0,0,482,475,1,0,0,0,482,476,1,0,0,0,482,477,1,0,0, + 0,482,478,1,0,0,0,482,479,1,0,0,0,482,480,1,0,0,0,482,481,1,0,0, + 0,483,484,1,0,0,0,484,482,1,0,0,0,484,485,1,0,0,0,485,486,1,0,0, + 0,486,487,5,2,0,0,487,67,1,0,0,0,488,489,5,38,0,0,489,490,5,45,0, + 0,490,495,3,24,12,0,491,492,5,70,0,0,492,494,3,92,46,0,493,491,1, + 0,0,0,494,497,1,0,0,0,495,493,1,0,0,0,495,496,1,0,0,0,496,498,1, + 0,0,0,497,495,1,0,0,0,498,499,5,46,0,0,499,500,5,78,0,0,500,501, + 5,7,0,0,501,502,5,1,0,0,502,503,3,46,23,0,503,504,5,2,0,0,504,69, + 1,0,0,0,505,506,5,39,0,0,506,507,5,45,0,0,507,512,3,12,6,0,508,509, + 5,70,0,0,509,511,3,92,46,0,510,508,1,0,0,0,511,514,1,0,0,0,512,510, + 1,0,0,0,512,513,1,0,0,0,513,515,1,0,0,0,514,512,1,0,0,0,515,516, + 5,46,0,0,516,517,5,78,0,0,517,518,5,7,0,0,518,519,5,1,0,0,519,520, + 3,46,23,0,520,521,5,2,0,0,521,71,1,0,0,0,522,523,7,2,0,0,523,524, + 5,78,0,0,524,525,5,7,0,0,525,527,5,1,0,0,526,528,3,44,22,0,527,526, + 1,0,0,0,528,529,1,0,0,0,529,527,1,0,0,0,529,530,1,0,0,0,530,531, + 1,0,0,0,531,532,5,2,0,0,532,73,1,0,0,0,533,534,5,33,0,0,534,535, + 5,78,0,0,535,536,5,7,0,0,536,537,5,1,0,0,537,538,3,46,23,0,538,539, + 5,2,0,0,539,75,1,0,0,0,540,541,5,34,0,0,541,542,5,78,0,0,542,543, + 5,7,0,0,543,547,5,1,0,0,544,548,3,28,14,0,545,548,3,30,15,0,546, + 548,3,32,16,0,547,544,1,0,0,0,547,545,1,0,0,0,547,546,1,0,0,0,548, + 549,1,0,0,0,549,547,1,0,0,0,549,550,1,0,0,0,550,551,1,0,0,0,551, + 552,5,2,0,0,552,77,1,0,0,0,553,554,5,35,0,0,554,555,5,78,0,0,555, + 556,5,7,0,0,556,559,5,1,0,0,557,560,3,80,40,0,558,560,3,82,41,0, + 559,557,1,0,0,0,559,558,1,0,0,0,560,561,1,0,0,0,561,559,1,0,0,0, + 561,562,1,0,0,0,562,563,1,0,0,0,563,564,5,2,0,0,564,79,1,0,0,0,565, + 570,5,85,0,0,566,567,5,52,0,0,567,568,3,12,6,0,568,569,5,54,0,0, + 569,571,1,0,0,0,570,566,1,0,0,0,570,571,1,0,0,0,571,572,1,0,0,0, + 572,573,5,53,0,0,573,586,5,40,0,0,574,583,5,45,0,0,575,580,3,88, + 44,0,576,577,5,70,0,0,577,579,3,88,44,0,578,576,1,0,0,0,579,582, + 1,0,0,0,580,578,1,0,0,0,580,581,1,0,0,0,581,584,1,0,0,0,582,580, + 1,0,0,0,583,575,1,0,0,0,583,584,1,0,0,0,584,585,1,0,0,0,585,587, + 5,46,0,0,586,574,1,0,0,0,586,587,1,0,0,0,587,588,1,0,0,0,588,589, + 5,7,0,0,589,81,1,0,0,0,590,595,5,85,0,0,591,592,5,52,0,0,592,593, + 3,12,6,0,593,594,5,54,0,0,594,596,1,0,0,0,595,591,1,0,0,0,595,596, + 1,0,0,0,596,597,1,0,0,0,597,598,3,0,0,0,598,599,5,53,0,0,599,612, + 5,37,0,0,600,609,5,45,0,0,601,606,3,88,44,0,602,603,5,70,0,0,603, + 605,3,88,44,0,604,602,1,0,0,0,605,608,1,0,0,0,606,604,1,0,0,0,606, + 607,1,0,0,0,607,610,1,0,0,0,608,606,1,0,0,0,609,601,1,0,0,0,609, + 610,1,0,0,0,610,611,1,0,0,0,611,613,5,46,0,0,612,600,1,0,0,0,612, + 613,1,0,0,0,613,614,1,0,0,0,614,615,5,7,0,0,615,83,1,0,0,0,616,617, + 5,36,0,0,617,618,5,78,0,0,618,619,5,7,0,0,619,636,5,1,0,0,620,633, + 5,40,0,0,621,630,5,45,0,0,622,627,3,88,44,0,623,624,5,70,0,0,624, + 626,3,88,44,0,625,623,1,0,0,0,626,629,1,0,0,0,627,625,1,0,0,0,627, + 628,1,0,0,0,628,631,1,0,0,0,629,627,1,0,0,0,630,622,1,0,0,0,630, + 631,1,0,0,0,631,632,1,0,0,0,632,634,5,46,0,0,633,621,1,0,0,0,633, + 634,1,0,0,0,634,637,1,0,0,0,635,637,5,37,0,0,636,620,1,0,0,0,636, + 635,1,0,0,0,637,638,1,0,0,0,638,639,5,7,0,0,639,640,5,2,0,0,640, + 85,1,0,0,0,641,642,5,13,0,0,642,643,5,85,0,0,643,652,5,45,0,0,644, + 649,3,88,44,0,645,646,5,70,0,0,646,648,3,88,44,0,647,645,1,0,0,0, + 648,651,1,0,0,0,649,647,1,0,0,0,649,650,1,0,0,0,650,653,1,0,0,0, + 651,649,1,0,0,0,652,644,1,0,0,0,652,653,1,0,0,0,653,654,1,0,0,0, + 654,656,5,46,0,0,655,657,3,0,0,0,656,655,1,0,0,0,656,657,1,0,0,0, + 657,658,1,0,0,0,658,659,5,78,0,0,659,660,5,7,0,0,660,661,5,1,0,0, + 661,662,3,46,23,0,662,663,5,2,0,0,663,87,1,0,0,0,664,665,5,85,0, + 0,665,666,3,0,0,0,666,89,1,0,0,0,667,670,3,88,44,0,668,670,3,12, + 6,0,669,667,1,0,0,0,669,668,1,0,0,0,670,91,1,0,0,0,671,672,5,85, + 0,0,672,673,5,72,0,0,673,674,7,3,0,0,674,93,1,0,0,0,76,100,103,111, 114,125,130,136,138,148,165,174,180,201,203,210,215,220,227,236, - 240,247,252,262,265,270,278,283,292,297,313,317,323,328,334,344, - 349,352,359,365,371,376,383,388,390,395,401,405,438,457,459,478, - 480,491,508,525,543,545,555,563,566,569,573,582,588,599,608,621, - 624,627,630,643,646,650 + 240,247,252,257,266,269,274,282,287,296,301,317,321,327,332,338, + 348,353,356,363,369,375,380,387,392,394,399,405,409,442,461,463, + 482,484,495,512,529,547,549,559,561,570,580,583,586,595,606,609, + 612,627,630,633,636,649,652,656,669 ] class PyNestMLParser ( Parser ): @@ -277,14 +281,13 @@ class PyNestMLParser ( Parser ): "'and'", "'or'", "'not'", "'recordable'", "'kernel'", "'model'", "'state'", "'parameters'", "'internals'", "'update'", "'equations'", "'input'", "'output'", "'continuous'", - "'onReceive'", "'onCondition'", "'spike'", "'inhibitory'", - "'excitatory'", "'@homogeneous'", "'@heterogeneous'", - "'@'", "'...'", "'('", "')'", "'+'", "'~'", "'|'", - "'^'", "'&'", "'['", "'<-'", "']'", "'[['", "']]'", - "'<<'", "'>>'", "'<'", "'>'", "'<='", "'+='", "'-='", - "'*='", "'/='", "'=='", "'!='", "'<>'", "'>='", "','", - "'-'", "'='", "'*'", "'**'", "'/'", "'%'", "'?'", "':'", - "'::'", "';'", "'''", "'.'" ] + "'onReceive'", "'onCondition'", "'spike'", "'@homogeneous'", + "'@heterogeneous'", "'@'", "'...'", "'('", "')'", "'+'", + "'~'", "'|'", "'^'", "'&'", "'['", "'<-'", "']'", "'[['", + "']]'", "'<<'", "'>>'", "'<'", "'>'", "'<='", "'+='", + "'-='", "'*='", "'/='", "'=='", "'!='", "'<>'", "'>='", + "','", "'-'", "'='", "'*'", "'**'", "'/'", "'%'", "'?'", + "':'", "'::'", "';'", "'''", "'.'" ] symbolicNames = [ "", "INDENT", "DEDENT", "KERNEL_JOINING", "WS", "LINE_ESCAPE", "SL_COMMENT", "NEWLINE", "INTEGER_KEYWORD", @@ -297,20 +300,19 @@ class PyNestMLParser ( Parser ): "STATE_KEYWORD", "PARAMETERS_KEYWORD", "INTERNALS_KEYWORD", "UPDATE_KEYWORD", "EQUATIONS_KEYWORD", "INPUT_KEYWORD", "OUTPUT_KEYWORD", "CONTINUOUS_KEYWORD", "ON_RECEIVE_KEYWORD", - "ON_CONDITION_KEYWORD", "SPIKE_KEYWORD", "INHIBITORY_KEYWORD", - "EXCITATORY_KEYWORD", "DECORATOR_HOMOGENEOUS", "DECORATOR_HETEROGENEOUS", - "AT", "ELLIPSIS", "LEFT_PAREN", "RIGHT_PAREN", "PLUS", - "TILDE", "PIPE", "CARET", "AMPERSAND", "LEFT_SQUARE_BRACKET", - "LEFT_ANGLE_MINUS", "RIGHT_SQUARE_BRACKET", "LEFT_LEFT_SQUARE", - "RIGHT_RIGHT_SQUARE", "LEFT_LEFT_ANGLE", "RIGHT_RIGHT_ANGLE", - "LEFT_ANGLE", "RIGHT_ANGLE", "LEFT_ANGLE_EQUALS", - "PLUS_EQUALS", "MINUS_EQUALS", "STAR_EQUALS", "FORWARD_SLASH_EQUALS", - "EQUALS_EQUALS", "EXCLAMATION_EQUALS", "LEFT_ANGLE_RIGHT_ANGLE", - "RIGHT_ANGLE_EQUALS", "COMMA", "MINUS", "EQUALS", - "STAR", "STAR_STAR", "FORWARD_SLASH", "PERCENT", "QUESTION", - "COLON", "DOUBLE_COLON", "SEMICOLON", "DIFFERENTIAL_ORDER", - "FULLSTOP", "BOOLEAN_LITERAL", "STRING_LITERAL", "NAME", - "UNSIGNED_INTEGER", "FLOAT" ] + "ON_CONDITION_KEYWORD", "SPIKE_KEYWORD", "DECORATOR_HOMOGENEOUS", + "DECORATOR_HETEROGENEOUS", "AT", "ELLIPSIS", "LEFT_PAREN", + "RIGHT_PAREN", "PLUS", "TILDE", "PIPE", "CARET", "AMPERSAND", + "LEFT_SQUARE_BRACKET", "LEFT_ANGLE_MINUS", "RIGHT_SQUARE_BRACKET", + "LEFT_LEFT_SQUARE", "RIGHT_RIGHT_SQUARE", "LEFT_LEFT_ANGLE", + "RIGHT_RIGHT_ANGLE", "LEFT_ANGLE", "RIGHT_ANGLE", + "LEFT_ANGLE_EQUALS", "PLUS_EQUALS", "MINUS_EQUALS", + "STAR_EQUALS", "FORWARD_SLASH_EQUALS", "EQUALS_EQUALS", + "EXCLAMATION_EQUALS", "LEFT_ANGLE_RIGHT_ANGLE", "RIGHT_ANGLE_EQUALS", + "COMMA", "MINUS", "EQUALS", "STAR", "STAR_STAR", "FORWARD_SLASH", + "PERCENT", "QUESTION", "COLON", "DOUBLE_COLON", "SEMICOLON", + "DIFFERENTIAL_ORDER", "FULLSTOP", "BOOLEAN_LITERAL", + "STRING_LITERAL", "NAME", "UNSIGNED_INTEGER", "FLOAT" ] RULE_dataType = 0 RULE_unitTypeExponent = 1 @@ -354,10 +356,10 @@ class PyNestMLParser ( Parser ): RULE_inputBlock = 39 RULE_spikeInputPort = 40 RULE_continuousInputPort = 41 - RULE_inputQualifier = 42 - RULE_outputBlock = 43 - RULE_function = 44 - RULE_parameter = 45 + RULE_outputBlock = 42 + RULE_function = 43 + RULE_parameter = 44 + RULE_expressionOrParameter = 45 RULE_constParameter = 46 ruleNames = [ "dataType", "unitTypeExponent", "unitType", "anyDecorator", @@ -371,7 +373,7 @@ class PyNestMLParser ( Parser ): "nestMLCompilationUnit", "model", "modelBody", "onReceiveBlock", "onConditionBlock", "blockWithVariables", "updateBlock", "equationsBlock", "inputBlock", "spikeInputPort", "continuousInputPort", - "inputQualifier", "outputBlock", "function", "parameter", + "outputBlock", "function", "parameter", "expressionOrParameter", "constParameter" ] EOF = Token.EOF @@ -415,59 +417,57 @@ class PyNestMLParser ( Parser ): ON_RECEIVE_KEYWORD=38 ON_CONDITION_KEYWORD=39 SPIKE_KEYWORD=40 - INHIBITORY_KEYWORD=41 - EXCITATORY_KEYWORD=42 - DECORATOR_HOMOGENEOUS=43 - DECORATOR_HETEROGENEOUS=44 - AT=45 - ELLIPSIS=46 - LEFT_PAREN=47 - RIGHT_PAREN=48 - PLUS=49 - TILDE=50 - PIPE=51 - CARET=52 - AMPERSAND=53 - LEFT_SQUARE_BRACKET=54 - LEFT_ANGLE_MINUS=55 - RIGHT_SQUARE_BRACKET=56 - LEFT_LEFT_SQUARE=57 - RIGHT_RIGHT_SQUARE=58 - LEFT_LEFT_ANGLE=59 - RIGHT_RIGHT_ANGLE=60 - LEFT_ANGLE=61 - RIGHT_ANGLE=62 - LEFT_ANGLE_EQUALS=63 - PLUS_EQUALS=64 - MINUS_EQUALS=65 - STAR_EQUALS=66 - FORWARD_SLASH_EQUALS=67 - EQUALS_EQUALS=68 - EXCLAMATION_EQUALS=69 - LEFT_ANGLE_RIGHT_ANGLE=70 - RIGHT_ANGLE_EQUALS=71 - COMMA=72 - MINUS=73 - EQUALS=74 - STAR=75 - STAR_STAR=76 - FORWARD_SLASH=77 - PERCENT=78 - QUESTION=79 - COLON=80 - DOUBLE_COLON=81 - SEMICOLON=82 - DIFFERENTIAL_ORDER=83 - FULLSTOP=84 - BOOLEAN_LITERAL=85 - STRING_LITERAL=86 - NAME=87 - UNSIGNED_INTEGER=88 - FLOAT=89 + DECORATOR_HOMOGENEOUS=41 + DECORATOR_HETEROGENEOUS=42 + AT=43 + ELLIPSIS=44 + LEFT_PAREN=45 + RIGHT_PAREN=46 + PLUS=47 + TILDE=48 + PIPE=49 + CARET=50 + AMPERSAND=51 + LEFT_SQUARE_BRACKET=52 + LEFT_ANGLE_MINUS=53 + RIGHT_SQUARE_BRACKET=54 + LEFT_LEFT_SQUARE=55 + RIGHT_RIGHT_SQUARE=56 + LEFT_LEFT_ANGLE=57 + RIGHT_RIGHT_ANGLE=58 + LEFT_ANGLE=59 + RIGHT_ANGLE=60 + LEFT_ANGLE_EQUALS=61 + PLUS_EQUALS=62 + MINUS_EQUALS=63 + STAR_EQUALS=64 + FORWARD_SLASH_EQUALS=65 + EQUALS_EQUALS=66 + EXCLAMATION_EQUALS=67 + LEFT_ANGLE_RIGHT_ANGLE=68 + RIGHT_ANGLE_EQUALS=69 + COMMA=70 + MINUS=71 + EQUALS=72 + STAR=73 + STAR_STAR=74 + FORWARD_SLASH=75 + PERCENT=76 + QUESTION=77 + COLON=78 + DOUBLE_COLON=79 + SEMICOLON=80 + DIFFERENTIAL_ORDER=81 + FULLSTOP=82 + BOOLEAN_LITERAL=83 + STRING_LITERAL=84 + NAME=85 + UNSIGNED_INTEGER=86 + FLOAT=87 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.13.2") + self.checkVersion("4.13.1") self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) self._predicates = None @@ -551,7 +551,7 @@ def dataType(self): self.state = 98 localctx.isVoid = self.match(PyNestMLParser.VOID_KEYWORD) pass - elif token in [47, 87, 88]: + elif token in [45, 85, 86]: self.enterOuterAlt(localctx, 6) self.state = 99 localctx.unit = self.unitType(0) @@ -631,7 +631,7 @@ def unitTypeExponent(self): self.state = 103 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==73: + if _la==71: self.state = 102 localctx.negative = self.match(PyNestMLParser.MINUS) @@ -653,10 +653,10 @@ def unitTypeExponent(self): self.state = 111 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==49 or _la==73: + if _la==47 or _la==71: self.state = 110 _la = self._input.LA(1) - if not(_la==49 or _la==73): + if not(_la==47 or _la==71): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -665,7 +665,7 @@ def unitTypeExponent(self): self.state = 113 _la = self._input.LA(1) - if not(_la==88 or _la==89): + if not(_la==86 or _la==87): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -756,7 +756,7 @@ def unitType(self, _p:int=0): self.state = 125 self._errHandler.sync(self) token = self._input.LA(1) - if token in [47]: + if token in [45]: self.state = 117 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) self.state = 118 @@ -764,7 +764,7 @@ def unitType(self, _p:int=0): self.state = 119 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass - elif token in [88]: + elif token in [86]: self.state = 121 localctx.unitlessLiteral = self.match(PyNestMLParser.UNSIGNED_INTEGER) self.state = 122 @@ -772,7 +772,7 @@ def unitType(self, _p:int=0): self.state = 123 localctx.right = self.unitType(2) pass - elif token in [87]: + elif token in [85]: self.state = 124 localctx.unit = self.match(PyNestMLParser.NAME) pass @@ -802,11 +802,11 @@ def unitType(self, _p:int=0): self.state = 130 self._errHandler.sync(self) token = self._input.LA(1) - if token in [75]: + if token in [73]: self.state = 128 localctx.timesOp = self.match(PyNestMLParser.STAR) pass - elif token in [77]: + elif token in [75]: self.state = 129 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass @@ -892,17 +892,17 @@ def anyDecorator(self): self.state = 148 self._errHandler.sync(self) token = self._input.LA(1) - if token in [43]: + if token in [41]: self.enterOuterAlt(localctx, 1) self.state = 141 self.match(PyNestMLParser.DECORATOR_HOMOGENEOUS) pass - elif token in [44]: + elif token in [42]: self.enterOuterAlt(localctx, 2) self.state = 142 self.match(PyNestMLParser.DECORATOR_HETEROGENEOUS) pass - elif token in [45]: + elif token in [43]: self.enterOuterAlt(localctx, 3) self.state = 143 self.match(PyNestMLParser.AT) @@ -1110,7 +1110,7 @@ def expression(self, _p:int=0): self.state = 165 self._errHandler.sync(self) token = self._input.LA(1) - if token in [47]: + if token in [45]: self.state = 155 localctx.leftParentheses = self.match(PyNestMLParser.LEFT_PAREN) self.state = 156 @@ -1118,7 +1118,7 @@ def expression(self, _p:int=0): self.state = 157 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass - elif token in [49, 50, 73]: + elif token in [47, 48, 71]: self.state = 159 self.unaryOperator() self.state = 160 @@ -1130,7 +1130,7 @@ def expression(self, _p:int=0): self.state = 163 localctx.term = self.expression(4) pass - elif token in [23, 85, 86, 87, 88, 89]: + elif token in [23, 83, 84, 85, 86, 87]: self.state = 164 self.simpleExpression() pass @@ -1174,15 +1174,15 @@ def expression(self, _p:int=0): self.state = 174 self._errHandler.sync(self) token = self._input.LA(1) - if token in [75]: + if token in [73]: self.state = 171 localctx.timesOp = self.match(PyNestMLParser.STAR) pass - elif token in [77]: + elif token in [75]: self.state = 172 localctx.divOp = self.match(PyNestMLParser.FORWARD_SLASH) pass - elif token in [78]: + elif token in [76]: self.state = 173 localctx.moduloOp = self.match(PyNestMLParser.PERCENT) pass @@ -1204,11 +1204,11 @@ def expression(self, _p:int=0): self.state = 180 self._errHandler.sync(self) token = self._input.LA(1) - if token in [49]: + if token in [47]: self.state = 178 localctx.plusOp = self.match(PyNestMLParser.PLUS) pass - elif token in [73]: + elif token in [71]: self.state = 179 localctx.minusOp = self.match(PyNestMLParser.MINUS) pass @@ -1362,7 +1362,7 @@ def simpleExpression(self): self.enterOuterAlt(localctx, 3) self.state = 208 _la = self._input.LA(1) - if not(_la==88 or _la==89): + if not(_la==86 or _la==87): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1444,17 +1444,17 @@ def unaryOperator(self): self.state = 220 self._errHandler.sync(self) token = self._input.LA(1) - if token in [49]: + if token in [47]: self.enterOuterAlt(localctx, 1) self.state = 217 localctx.unaryPlus = self.match(PyNestMLParser.PLUS) pass - elif token in [73]: + elif token in [71]: self.enterOuterAlt(localctx, 2) self.state = 218 localctx.unaryMinus = self.match(PyNestMLParser.MINUS) pass - elif token in [50]: + elif token in [48]: self.enterOuterAlt(localctx, 3) self.state = 219 localctx.unaryTilde = self.match(PyNestMLParser.TILDE) @@ -1518,27 +1518,27 @@ def bitOperator(self): self.state = 227 self._errHandler.sync(self) token = self._input.LA(1) - if token in [53]: + if token in [51]: self.enterOuterAlt(localctx, 1) self.state = 222 localctx.bitAnd = self.match(PyNestMLParser.AMPERSAND) pass - elif token in [52]: + elif token in [50]: self.enterOuterAlt(localctx, 2) self.state = 223 localctx.bitXor = self.match(PyNestMLParser.CARET) pass - elif token in [51]: + elif token in [49]: self.enterOuterAlt(localctx, 3) self.state = 224 localctx.bitOr = self.match(PyNestMLParser.PIPE) pass - elif token in [59]: + elif token in [57]: self.enterOuterAlt(localctx, 4) self.state = 225 localctx.bitShiftLeft = self.match(PyNestMLParser.LEFT_LEFT_ANGLE) pass - elif token in [60]: + elif token in [58]: self.enterOuterAlt(localctx, 5) self.state = 226 localctx.bitShiftRight = self.match(PyNestMLParser.RIGHT_RIGHT_ANGLE) @@ -1610,37 +1610,37 @@ def comparisonOperator(self): self.state = 236 self._errHandler.sync(self) token = self._input.LA(1) - if token in [61]: + if token in [59]: self.enterOuterAlt(localctx, 1) self.state = 229 localctx.lt = self.match(PyNestMLParser.LEFT_ANGLE) pass - elif token in [63]: + elif token in [61]: self.enterOuterAlt(localctx, 2) self.state = 230 localctx.le = self.match(PyNestMLParser.LEFT_ANGLE_EQUALS) pass - elif token in [68]: + elif token in [66]: self.enterOuterAlt(localctx, 3) self.state = 231 localctx.eq = self.match(PyNestMLParser.EQUALS_EQUALS) pass - elif token in [69]: + elif token in [67]: self.enterOuterAlt(localctx, 4) self.state = 232 localctx.ne = self.match(PyNestMLParser.EXCLAMATION_EQUALS) pass - elif token in [70]: + elif token in [68]: self.enterOuterAlt(localctx, 5) self.state = 233 localctx.ne2 = self.match(PyNestMLParser.LEFT_ANGLE_RIGHT_ANGLE) pass - elif token in [71]: + elif token in [69]: self.enterOuterAlt(localctx, 6) self.state = 234 localctx.ge = self.match(PyNestMLParser.RIGHT_ANGLE_EQUALS) pass - elif token in [62]: + elif token in [60]: self.enterOuterAlt(localctx, 7) self.state = 235 localctx.gt = self.match(PyNestMLParser.RIGHT_ANGLE) @@ -1721,7 +1721,8 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser self.name = None # Token - self.vectorParameter = None # ExpressionContext + self.vectorParameter = None # ExpressionOrParameterContext + self.attribute = None # VariableContext def NAME(self): return self.getToken(PyNestMLParser.NAME, 0) @@ -1738,8 +1739,15 @@ def DIFFERENTIAL_ORDER(self, i:int=None): else: return self.getToken(PyNestMLParser.DIFFERENTIAL_ORDER, i) - def expression(self): - return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + def FULLSTOP(self): + return self.getToken(PyNestMLParser.FULLSTOP, 0) + + def expressionOrParameter(self): + return self.getTypedRuleContext(PyNestMLParser.ExpressionOrParameterContext,0) + + + def variable(self): + return self.getTypedRuleContext(PyNestMLParser.VariableContext,0) def getRuleIndex(self): @@ -1769,7 +1777,7 @@ def variable(self): self.state = 243 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) self.state = 244 - localctx.vectorParameter = self.expression(0) + localctx.vectorParameter = self.expressionOrParameter() self.state = 245 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) @@ -1785,6 +1793,16 @@ def variable(self): self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,21,self._ctx) + self.state = 257 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,22,self._ctx) + if la_ == 1: + self.state = 255 + self.match(PyNestMLParser.FULLSTOP) + self.state = 256 + localctx.attribute = self.variable() + + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -1843,31 +1861,31 @@ def functionCall(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 255 + self.state = 259 localctx.calleeName = self.match(PyNestMLParser.NAME) - self.state = 256 + self.state = 260 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 265 + self.state = 269 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 1829587424116736) != 0) or ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & 126977) != 0): - self.state = 257 + if (((_la) & ~0x3f) == 0 and ((1 << _la) & 457396912652288) != 0) or ((((_la - 71)) & ~0x3f) == 0 and ((1 << (_la - 71)) & 126977) != 0): + self.state = 261 self.expression(0) - self.state = 262 + self.state = 266 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==72: - self.state = 258 + while _la==70: + self.state = 262 self.match(PyNestMLParser.COMMA) - self.state = 259 + self.state = 263 self.expression(0) - self.state = 264 + self.state = 268 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 267 + self.state = 271 self.match(PyNestMLParser.RIGHT_PAREN) except RecognitionException as re: localctx.exception = re @@ -1940,43 +1958,43 @@ def inlineExpression(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 270 + self.state = 274 self._errHandler.sync(self) _la = self._input.LA(1) if _la==27: - self.state = 269 + self.state = 273 localctx.recordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 272 + self.state = 276 self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 273 + self.state = 277 localctx.variableName = self.match(PyNestMLParser.NAME) - self.state = 274 + self.state = 278 self.dataType() - self.state = 275 + self.state = 279 self.match(PyNestMLParser.EQUALS) - self.state = 276 + self.state = 280 self.expression(0) - self.state = 278 + self.state = 282 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==82: - self.state = 277 + if _la==80: + self.state = 281 self.match(PyNestMLParser.SEMICOLON) - self.state = 283 + self.state = 287 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): - self.state = 280 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & 15393162788864) != 0): + self.state = 284 localctx.decorator = self.anyDecorator() - self.state = 285 + self.state = 289 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 286 + self.state = 290 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2040,31 +2058,31 @@ def odeEquation(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 288 + self.state = 292 localctx.lhs = self.variable() - self.state = 289 + self.state = 293 self.match(PyNestMLParser.EQUALS) - self.state = 290 + self.state = 294 localctx.rhs = self.expression(0) - self.state = 292 + self.state = 296 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==82: - self.state = 291 + if _la==80: + self.state = 295 self.match(PyNestMLParser.SEMICOLON) - self.state = 297 + self.state = 301 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): - self.state = 294 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & 15393162788864) != 0): + self.state = 298 localctx.decorator = self.anyDecorator() - self.state = 299 + self.state = 303 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 300 + self.state = 304 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2136,39 +2154,39 @@ def kernel(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 302 + self.state = 306 self.match(PyNestMLParser.KERNEL_KEYWORD) - self.state = 303 + self.state = 307 self.variable() - self.state = 304 + self.state = 308 self.match(PyNestMLParser.EQUALS) - self.state = 305 + self.state = 309 self.expression(0) - self.state = 313 + self.state = 317 self._errHandler.sync(self) _la = self._input.LA(1) while _la==3: - self.state = 306 + self.state = 310 self.match(PyNestMLParser.KERNEL_JOINING) - self.state = 307 + self.state = 311 self.variable() - self.state = 308 + self.state = 312 self.match(PyNestMLParser.EQUALS) - self.state = 309 + self.state = 313 self.expression(0) - self.state = 315 + self.state = 319 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 317 + self.state = 321 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==82: - self.state = 316 + if _la==80: + self.state = 320 self.match(PyNestMLParser.SEMICOLON) - self.state = 319 + self.state = 323 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2211,17 +2229,17 @@ def stmt(self): localctx = PyNestMLParser.StmtContext(self, self._ctx, self.state) self.enterRule(localctx, 34, self.RULE_stmt) try: - self.state = 323 + self.state = 327 self._errHandler.sync(self) token = self._input.LA(1) - if token in [14, 15, 27, 87]: + if token in [14, 15, 27, 85]: self.enterOuterAlt(localctx, 1) - self.state = 321 + self.state = 325 self.smallStmt() pass elif token in [16, 19, 20]: self.enterOuterAlt(localctx, 2) - self.state = 322 + self.state = 326 self.compoundStmt() pass else: @@ -2272,22 +2290,22 @@ def compoundStmt(self): localctx = PyNestMLParser.CompoundStmtContext(self, self._ctx, self.state) self.enterRule(localctx, 36, self.RULE_compoundStmt) try: - self.state = 328 + self.state = 332 self._errHandler.sync(self) token = self._input.LA(1) if token in [16]: self.enterOuterAlt(localctx, 1) - self.state = 325 + self.state = 329 self.ifStmt() pass elif token in [19]: self.enterOuterAlt(localctx, 2) - self.state = 326 + self.state = 330 self.forStmt() pass elif token in [20]: self.enterOuterAlt(localctx, 3) - self.state = 327 + self.state = 331 self.whileStmt() pass else: @@ -2346,31 +2364,31 @@ def smallStmt(self): self.enterRule(localctx, 38, self.RULE_smallStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 334 + self.state = 338 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,33,self._ctx) + la_ = self._interp.adaptivePredict(self._input,34,self._ctx) if la_ == 1: - self.state = 330 + self.state = 334 self.assignment() pass elif la_ == 2: - self.state = 331 + self.state = 335 self.functionCall() pass elif la_ == 3: - self.state = 332 + self.state = 336 self.declaration() pass elif la_ == 4: - self.state = 333 + self.state = 337 self.returnStmt() pass - self.state = 336 + self.state = 340 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2435,35 +2453,35 @@ def assignment(self): self.enterRule(localctx, 40, self.RULE_assignment) try: self.enterOuterAlt(localctx, 1) - self.state = 338 + self.state = 342 localctx.lhs_variable = self.variable() - self.state = 344 + self.state = 348 self._errHandler.sync(self) token = self._input.LA(1) - if token in [74]: - self.state = 339 + if token in [72]: + self.state = 343 localctx.directAssignment = self.match(PyNestMLParser.EQUALS) pass - elif token in [64]: - self.state = 340 + elif token in [62]: + self.state = 344 localctx.compoundSum = self.match(PyNestMLParser.PLUS_EQUALS) pass - elif token in [65]: - self.state = 341 + elif token in [63]: + self.state = 345 localctx.compoundMinus = self.match(PyNestMLParser.MINUS_EQUALS) pass - elif token in [66]: - self.state = 342 + elif token in [64]: + self.state = 346 localctx.compoundProduct = self.match(PyNestMLParser.STAR_EQUALS) pass - elif token in [67]: - self.state = 343 + elif token in [65]: + self.state = 347 localctx.compoundQuotient = self.match(PyNestMLParser.FORWARD_SLASH_EQUALS) pass else: raise NoViableAltException(self) - self.state = 346 + self.state = 350 self.expression(0) except RecognitionException as re: localctx.exception = re @@ -2551,67 +2569,67 @@ def declaration(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 349 + self.state = 353 self._errHandler.sync(self) _la = self._input.LA(1) if _la==27: - self.state = 348 + self.state = 352 localctx.isRecordable = self.match(PyNestMLParser.RECORDABLE_KEYWORD) - self.state = 352 + self.state = 356 self._errHandler.sync(self) _la = self._input.LA(1) if _la==14: - self.state = 351 + self.state = 355 localctx.isInlineExpression = self.match(PyNestMLParser.INLINE_KEYWORD) - self.state = 354 + self.state = 358 self.variable() - self.state = 359 + self.state = 363 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==72: - self.state = 355 + while _la==70: + self.state = 359 self.match(PyNestMLParser.COMMA) - self.state = 356 + self.state = 360 self.variable() - self.state = 361 + self.state = 365 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 362 + self.state = 366 self.dataType() - self.state = 365 + self.state = 369 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==74: - self.state = 363 + if _la==72: + self.state = 367 self.match(PyNestMLParser.EQUALS) - self.state = 364 + self.state = 368 localctx.rhs = self.expression(0) - self.state = 371 + self.state = 375 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==57: - self.state = 367 + if _la==55: + self.state = 371 self.match(PyNestMLParser.LEFT_LEFT_SQUARE) - self.state = 368 + self.state = 372 localctx.invariant = self.expression(0) - self.state = 369 + self.state = 373 self.match(PyNestMLParser.RIGHT_RIGHT_SQUARE) - self.state = 376 + self.state = 380 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & 61572651155456) != 0): - self.state = 373 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & 15393162788864) != 0): + self.state = 377 localctx.decorator = self.anyDecorator() - self.state = 378 + self.state = 382 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2656,9 +2674,9 @@ def declaration_newline(self): self.enterRule(localctx, 44, self.RULE_declaration_newline) try: self.enterOuterAlt(localctx, 1) - self.state = 379 + self.state = 383 self.declaration() - self.state = 380 + self.state = 384 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -2708,35 +2726,35 @@ def stmtsBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 383 + self.state = 387 self._errHandler.sync(self) _la = self._input.LA(1) if _la==7: - self.state = 382 + self.state = 386 self.match(PyNestMLParser.NEWLINE) - self.state = 385 + self.state = 389 self.stmt() - self.state = 390 + self.state = 394 self._errHandler.sync(self) _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & 135905408) != 0) or _la==87: - self.state = 388 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & 135905408) != 0) or _la==85: + self.state = 392 self._errHandler.sync(self) token = self._input.LA(1) if token in [7]: - self.state = 386 + self.state = 390 self.match(PyNestMLParser.NEWLINE) pass - elif token in [14, 15, 16, 19, 20, 27, 87]: - self.state = 387 + elif token in [14, 15, 16, 19, 20, 27, 85]: + self.state = 391 self.stmt() pass else: raise NoViableAltException(self) - self.state = 392 + self.state = 396 self._errHandler.sync(self) _la = self._input.LA(1) @@ -2782,13 +2800,13 @@ def returnStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 393 + self.state = 397 self.match(PyNestMLParser.RETURN_KEYWORD) - self.state = 395 + self.state = 399 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 1829587424116736) != 0) or ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & 126977) != 0): - self.state = 394 + if (((_la) & ~0x3f) == 0 and ((1 << _la) & 457396912652288) != 0) or ((((_la - 71)) & ~0x3f) == 0 and ((1 << (_la - 71)) & 126977) != 0): + self.state = 398 self.expression(0) @@ -2842,23 +2860,23 @@ def ifStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 397 - self.ifClause() self.state = 401 + self.ifClause() + self.state = 405 self._errHandler.sync(self) _la = self._input.LA(1) while _la==17: - self.state = 398 + self.state = 402 self.elifClause() - self.state = 403 + self.state = 407 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 405 + self.state = 409 self._errHandler.sync(self) _la = self._input.LA(1) if _la==18: - self.state = 404 + self.state = 408 self.elseClause() @@ -2919,19 +2937,19 @@ def ifClause(self): self.enterRule(localctx, 52, self.RULE_ifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 407 + self.state = 411 self.match(PyNestMLParser.IF_KEYWORD) - self.state = 408 + self.state = 412 self.expression(0) - self.state = 409 + self.state = 413 self.match(PyNestMLParser.COLON) - self.state = 410 + self.state = 414 self.match(PyNestMLParser.NEWLINE) - self.state = 411 + self.state = 415 self.match(PyNestMLParser.INDENT) - self.state = 412 + self.state = 416 self.stmtsBody() - self.state = 413 + self.state = 417 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -2990,19 +3008,19 @@ def elifClause(self): self.enterRule(localctx, 54, self.RULE_elifClause) try: self.enterOuterAlt(localctx, 1) - self.state = 415 + self.state = 419 self.match(PyNestMLParser.ELIF_KEYWORD) - self.state = 416 + self.state = 420 self.expression(0) - self.state = 417 + self.state = 421 self.match(PyNestMLParser.COLON) - self.state = 418 + self.state = 422 self.match(PyNestMLParser.NEWLINE) - self.state = 419 + self.state = 423 self.match(PyNestMLParser.INDENT) - self.state = 420 + self.state = 424 self.stmtsBody() - self.state = 421 + self.state = 425 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3057,17 +3075,17 @@ def elseClause(self): self.enterRule(localctx, 56, self.RULE_elseClause) try: self.enterOuterAlt(localctx, 1) - self.state = 423 + self.state = 427 self.match(PyNestMLParser.ELSE_KEYWORD) - self.state = 424 + self.state = 428 self.match(PyNestMLParser.COLON) - self.state = 425 + self.state = 429 self.match(PyNestMLParser.NEWLINE) - self.state = 426 + self.state = 430 self.match(PyNestMLParser.INDENT) - self.state = 427 + self.state = 431 self.stmtsBody() - self.state = 428 + self.state = 432 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3155,45 +3173,45 @@ def forStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 430 + self.state = 434 self.match(PyNestMLParser.FOR_KEYWORD) - self.state = 431 + self.state = 435 localctx.var = self.match(PyNestMLParser.NAME) - self.state = 432 + self.state = 436 self.match(PyNestMLParser.IN_KEYWORD) - self.state = 433 + self.state = 437 localctx.start_from = self.expression(0) - self.state = 434 + self.state = 438 self.match(PyNestMLParser.ELLIPSIS) - self.state = 435 + self.state = 439 localctx.end_at = self.expression(0) - self.state = 436 + self.state = 440 self.match(PyNestMLParser.STEP_KEYWORD) - self.state = 438 + self.state = 442 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==73: - self.state = 437 + if _la==71: + self.state = 441 localctx.negative = self.match(PyNestMLParser.MINUS) - self.state = 440 + self.state = 444 _la = self._input.LA(1) - if not(_la==88 or _la==89): + if not(_la==86 or _la==87): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 441 + self.state = 445 self.match(PyNestMLParser.COLON) - self.state = 442 + self.state = 446 self.match(PyNestMLParser.NEWLINE) - self.state = 443 + self.state = 447 self.match(PyNestMLParser.INDENT) - self.state = 444 + self.state = 448 self.stmtsBody() - self.state = 445 + self.state = 449 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3252,19 +3270,19 @@ def whileStmt(self): self.enterRule(localctx, 60, self.RULE_whileStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 447 + self.state = 451 self.match(PyNestMLParser.WHILE_KEYWORD) - self.state = 448 + self.state = 452 self.expression(0) - self.state = 449 + self.state = 453 self.match(PyNestMLParser.COLON) - self.state = 450 + self.state = 454 self.match(PyNestMLParser.NEWLINE) - self.state = 451 + self.state = 455 self.match(PyNestMLParser.INDENT) - self.state = 452 + self.state = 456 self.stmtsBody() - self.state = 453 + self.state = 457 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3317,31 +3335,31 @@ def nestMLCompilationUnit(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 457 + self.state = 461 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 457 + self.state = 461 self._errHandler.sync(self) token = self._input.LA(1) if token in [29]: - self.state = 455 + self.state = 459 self.model() pass elif token in [7]: - self.state = 456 + self.state = 460 self.match(PyNestMLParser.NEWLINE) pass else: raise NoViableAltException(self) - self.state = 459 + self.state = 463 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==7 or _la==29): break - self.state = 461 + self.state = 465 self.match(PyNestMLParser.EOF) except RecognitionException as re: localctx.exception = re @@ -3390,13 +3408,13 @@ def model(self): self.enterRule(localctx, 64, self.RULE_model) try: self.enterOuterAlt(localctx, 1) - self.state = 463 + self.state = 467 self.match(PyNestMLParser.MODEL_KEYWORD) - self.state = 464 + self.state = 468 self.match(PyNestMLParser.NAME) - self.state = 465 + self.state = 469 self.match(PyNestMLParser.COLON) - self.state = 466 + self.state = 470 self.modelBody() except RecognitionException as re: localctx.exception = re @@ -3498,59 +3516,59 @@ def modelBody(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 468 + self.state = 472 self.match(PyNestMLParser.NEWLINE) - self.state = 469 + self.state = 473 self.match(PyNestMLParser.INDENT) - self.state = 478 + self.state = 482 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 478 + self.state = 482 self._errHandler.sync(self) token = self._input.LA(1) if token in [30, 31, 32]: - self.state = 470 + self.state = 474 self.blockWithVariables() pass elif token in [34]: - self.state = 471 + self.state = 475 self.equationsBlock() pass elif token in [35]: - self.state = 472 + self.state = 476 self.inputBlock() pass elif token in [36]: - self.state = 473 + self.state = 477 self.outputBlock() pass elif token in [13]: - self.state = 474 + self.state = 478 self.function() pass elif token in [38]: - self.state = 475 + self.state = 479 self.onReceiveBlock() pass elif token in [39]: - self.state = 476 + self.state = 480 self.onConditionBlock() pass elif token in [33]: - self.state = 477 + self.state = 481 self.updateBlock() pass else: raise NoViableAltException(self) - self.state = 480 + self.state = 484 self._errHandler.sync(self) _la = self._input.LA(1) if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 960998940672) != 0)): break - self.state = 482 + self.state = 486 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3567,7 +3585,7 @@ class OnReceiveBlockContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser - self.inputPortName = None # Token + self.inputPortVariable = None # VariableContext def ON_RECEIVE_KEYWORD(self): return self.getToken(PyNestMLParser.ON_RECEIVE_KEYWORD, 0) @@ -3594,8 +3612,9 @@ def stmtsBody(self): def DEDENT(self): return self.getToken(PyNestMLParser.DEDENT, 0) - def NAME(self): - return self.getToken(PyNestMLParser.NAME, 0) + def variable(self): + return self.getTypedRuleContext(PyNestMLParser.VariableContext,0) + def COMMA(self, i:int=None): if i is None: @@ -3629,35 +3648,35 @@ def onReceiveBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 484 + self.state = 488 self.match(PyNestMLParser.ON_RECEIVE_KEYWORD) - self.state = 485 + self.state = 489 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 486 - localctx.inputPortName = self.match(PyNestMLParser.NAME) - self.state = 491 + self.state = 490 + localctx.inputPortVariable = self.variable() + self.state = 495 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==72: - self.state = 487 + while _la==70: + self.state = 491 self.match(PyNestMLParser.COMMA) - self.state = 488 + self.state = 492 self.constParameter() - self.state = 493 + self.state = 497 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 494 + self.state = 498 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 495 + self.state = 499 self.match(PyNestMLParser.COLON) - self.state = 496 + self.state = 500 self.match(PyNestMLParser.NEWLINE) - self.state = 497 + self.state = 501 self.match(PyNestMLParser.INDENT) - self.state = 498 + self.state = 502 self.stmtsBody() - self.state = 499 + self.state = 503 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3737,35 +3756,35 @@ def onConditionBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 501 + self.state = 505 self.match(PyNestMLParser.ON_CONDITION_KEYWORD) - self.state = 502 + self.state = 506 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 503 + self.state = 507 localctx.condition = self.expression(0) - self.state = 508 + self.state = 512 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==72: - self.state = 504 + while _la==70: + self.state = 508 self.match(PyNestMLParser.COMMA) - self.state = 505 + self.state = 509 self.constParameter() - self.state = 510 + self.state = 514 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 511 + self.state = 515 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 512 + self.state = 516 self.match(PyNestMLParser.COLON) - self.state = 513 + self.state = 517 self.match(PyNestMLParser.NEWLINE) - self.state = 514 + self.state = 518 self.match(PyNestMLParser.INDENT) - self.state = 515 + self.state = 519 self.stmtsBody() - self.state = 516 + self.state = 520 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3831,7 +3850,7 @@ def blockWithVariables(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 518 + self.state = 522 localctx.blockType = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & 7516192768) != 0)): @@ -3839,25 +3858,25 @@ def blockWithVariables(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 519 + self.state = 523 self.match(PyNestMLParser.COLON) - self.state = 520 + self.state = 524 self.match(PyNestMLParser.NEWLINE) - self.state = 521 + self.state = 525 self.match(PyNestMLParser.INDENT) - self.state = 523 + self.state = 527 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 522 + self.state = 526 self.declaration_newline() - self.state = 525 + self.state = 529 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==14 or _la==27 or _la==87): + if not (_la==14 or _la==27 or _la==85): break - self.state = 527 + self.state = 531 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3912,17 +3931,17 @@ def updateBlock(self): self.enterRule(localctx, 74, self.RULE_updateBlock) try: self.enterOuterAlt(localctx, 1) - self.state = 529 + self.state = 533 self.match(PyNestMLParser.UPDATE_KEYWORD) - self.state = 530 + self.state = 534 self.match(PyNestMLParser.COLON) - self.state = 531 + self.state = 535 self.match(PyNestMLParser.NEWLINE) - self.state = 532 + self.state = 536 self.match(PyNestMLParser.INDENT) - self.state = 533 + self.state = 537 self.stmtsBody() - self.state = 534 + self.state = 538 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3995,43 +4014,43 @@ def equationsBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 536 + self.state = 540 self.match(PyNestMLParser.EQUATIONS_KEYWORD) - self.state = 537 + self.state = 541 self.match(PyNestMLParser.COLON) - self.state = 538 + self.state = 542 self.match(PyNestMLParser.NEWLINE) - self.state = 539 + self.state = 543 self.match(PyNestMLParser.INDENT) - self.state = 543 + self.state = 547 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 543 + self.state = 547 self._errHandler.sync(self) token = self._input.LA(1) if token in [14, 27]: - self.state = 540 + self.state = 544 self.inlineExpression() pass - elif token in [87]: - self.state = 541 + elif token in [85]: + self.state = 545 self.odeEquation() pass elif token in [28]: - self.state = 542 + self.state = 546 self.kernel() pass else: raise NoViableAltException(self) - self.state = 545 + self.state = 549 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 402669568) != 0) or _la==87): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 402669568) != 0) or _la==85): break - self.state = 547 + self.state = 551 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4078,31 +4097,6 @@ def continuousInputPort(self, i:int=None): return self.getTypedRuleContext(PyNestMLParser.ContinuousInputPortContext,i) - def LEFT_PAREN(self, i:int=None): - if i is None: - return self.getTokens(PyNestMLParser.LEFT_PAREN) - else: - return self.getToken(PyNestMLParser.LEFT_PAREN, i) - - def RIGHT_PAREN(self, i:int=None): - if i is None: - return self.getTokens(PyNestMLParser.RIGHT_PAREN) - else: - return self.getToken(PyNestMLParser.RIGHT_PAREN, i) - - def parameter(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PyNestMLParser.ParameterContext) - else: - return self.getTypedRuleContext(PyNestMLParser.ParameterContext,i) - - - def COMMA(self, i:int=None): - if i is None: - return self.getTokens(PyNestMLParser.COMMA) - else: - return self.getToken(PyNestMLParser.COMMA, i) - def getRuleIndex(self): return PyNestMLParser.RULE_inputBlock @@ -4122,69 +4116,39 @@ def inputBlock(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 549 + self.state = 553 self.match(PyNestMLParser.INPUT_KEYWORD) - self.state = 550 + self.state = 554 self.match(PyNestMLParser.COLON) - self.state = 551 + self.state = 555 self.match(PyNestMLParser.NEWLINE) - self.state = 552 + self.state = 556 self.match(PyNestMLParser.INDENT) - self.state = 571 + self.state = 559 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 555 + self.state = 559 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,57,self._ctx) + la_ = self._interp.adaptivePredict(self._input,58,self._ctx) if la_ == 1: - self.state = 553 + self.state = 557 self.spikeInputPort() pass elif la_ == 2: - self.state = 554 + self.state = 558 self.continuousInputPort() pass - self.state = 569 + self.state = 561 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==47: - self.state = 557 - self.match(PyNestMLParser.LEFT_PAREN) - self.state = 566 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==87: - self.state = 558 - self.parameter() - self.state = 563 - self._errHandler.sync(self) - _la = self._input.LA(1) - while _la==72: - self.state = 559 - self.match(PyNestMLParser.COMMA) - self.state = 560 - self.parameter() - self.state = 565 - self._errHandler.sync(self) - _la = self._input.LA(1) - - - - self.state = 568 - self.match(PyNestMLParser.RIGHT_PAREN) - - - self.state = 573 - self._errHandler.sync(self) - _la = self._input.LA(1) - if not (_la==87): + if not (_la==85): break - self.state = 575 + self.state = 563 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4222,17 +4186,29 @@ def LEFT_SQUARE_BRACKET(self): def RIGHT_SQUARE_BRACKET(self): return self.getToken(PyNestMLParser.RIGHT_SQUARE_BRACKET, 0) - def inputQualifier(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PyNestMLParser.InputQualifierContext) - else: - return self.getTypedRuleContext(PyNestMLParser.InputQualifierContext,i) + def LEFT_PAREN(self): + return self.getToken(PyNestMLParser.LEFT_PAREN, 0) + def RIGHT_PAREN(self): + return self.getToken(PyNestMLParser.RIGHT_PAREN, 0) def expression(self): return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + def parameter(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PyNestMLParser.ParameterContext) + else: + return self.getTypedRuleContext(PyNestMLParser.ParameterContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.COMMA) + else: + return self.getToken(PyNestMLParser.COMMA, i) + def getRuleIndex(self): return PyNestMLParser.RULE_spikeInputPort @@ -4252,35 +4228,55 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 577 + self.state = 565 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 582 + self.state = 570 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==54: - self.state = 578 + if _la==52: + self.state = 566 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 579 + self.state = 567 localctx.sizeParameter = self.expression(0) - self.state = 580 + self.state = 568 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 584 + self.state = 572 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 588 + self.state = 573 + self.match(PyNestMLParser.SPIKE_KEYWORD) + self.state = 586 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==41 or _la==42: - self.state = 585 - self.inputQualifier() - self.state = 590 + if _la==45: + self.state = 574 + self.match(PyNestMLParser.LEFT_PAREN) + self.state = 583 self._errHandler.sync(self) _la = self._input.LA(1) + if _la==85: + self.state = 575 + self.parameter() + self.state = 580 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==70: + self.state = 576 + self.match(PyNestMLParser.COMMA) + self.state = 577 + self.parameter() + self.state = 582 + self._errHandler.sync(self) + _la = self._input.LA(1) - self.state = 591 - self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 592 + + + self.state = 585 + self.match(PyNestMLParser.RIGHT_PAREN) + + + self.state = 588 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4322,10 +4318,29 @@ def LEFT_SQUARE_BRACKET(self): def RIGHT_SQUARE_BRACKET(self): return self.getToken(PyNestMLParser.RIGHT_SQUARE_BRACKET, 0) + def LEFT_PAREN(self): + return self.getToken(PyNestMLParser.LEFT_PAREN, 0) + + def RIGHT_PAREN(self): + return self.getToken(PyNestMLParser.RIGHT_PAREN, 0) + def expression(self): return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + def parameter(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PyNestMLParser.ParameterContext) + else: + return self.getTypedRuleContext(PyNestMLParser.ParameterContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.COMMA) + else: + return self.getToken(PyNestMLParser.COMMA, i) + def getRuleIndex(self): return PyNestMLParser.RULE_continuousInputPort @@ -4345,85 +4360,58 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 594 + self.state = 590 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 599 + self.state = 595 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==54: - self.state = 595 + if _la==52: + self.state = 591 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 596 + self.state = 592 localctx.sizeParameter = self.expression(0) - self.state = 597 + self.state = 593 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 601 + self.state = 597 self.dataType() - self.state = 602 + self.state = 598 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 603 + self.state = 599 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 604 - self.match(PyNestMLParser.NEWLINE) - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class InputQualifierContext(ParserRuleContext): - __slots__ = 'parser' - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - self.isInhibitory = None # Token - self.isExcitatory = None # Token - - def INHIBITORY_KEYWORD(self): - return self.getToken(PyNestMLParser.INHIBITORY_KEYWORD, 0) - - def EXCITATORY_KEYWORD(self): - return self.getToken(PyNestMLParser.EXCITATORY_KEYWORD, 0) - - def getRuleIndex(self): - return PyNestMLParser.RULE_inputQualifier - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitInputQualifier" ): - return visitor.visitInputQualifier(self) - else: - return visitor.visitChildren(self) + self.state = 612 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==45: + self.state = 600 + self.match(PyNestMLParser.LEFT_PAREN) + self.state = 609 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==85: + self.state = 601 + self.parameter() + self.state = 606 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==70: + self.state = 602 + self.match(PyNestMLParser.COMMA) + self.state = 603 + self.parameter() + self.state = 608 + self._errHandler.sync(self) + _la = self._input.LA(1) + self.state = 611 + self.match(PyNestMLParser.RIGHT_PAREN) - def inputQualifier(self): - - localctx = PyNestMLParser.InputQualifierContext(self, self._ctx, self.state) - self.enterRule(localctx, 84, self.RULE_inputQualifier) - try: - self.state = 608 - self._errHandler.sync(self) - token = self._input.LA(1) - if token in [41]: - self.enterOuterAlt(localctx, 1) - self.state = 606 - localctx.isInhibitory = self.match(PyNestMLParser.INHIBITORY_KEYWORD) - pass - elif token in [42]: - self.enterOuterAlt(localctx, 2) - self.state = 607 - localctx.isExcitatory = self.match(PyNestMLParser.EXCITATORY_KEYWORD) - pass - else: - raise NoViableAltException(self) + self.state = 614 + self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -4501,65 +4489,65 @@ def accept(self, visitor:ParseTreeVisitor): def outputBlock(self): localctx = PyNestMLParser.OutputBlockContext(self, self._ctx, self.state) - self.enterRule(localctx, 86, self.RULE_outputBlock) + self.enterRule(localctx, 84, self.RULE_outputBlock) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 610 + self.state = 616 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 611 + self.state = 617 self.match(PyNestMLParser.COLON) - self.state = 612 + self.state = 618 self.match(PyNestMLParser.NEWLINE) - self.state = 613 + self.state = 619 self.match(PyNestMLParser.INDENT) - self.state = 630 + self.state = 636 self._errHandler.sync(self) token = self._input.LA(1) if token in [40]: - self.state = 614 + self.state = 620 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 627 + self.state = 633 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==47: - self.state = 615 + if _la==45: + self.state = 621 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 624 + self.state = 630 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==87: - self.state = 616 + if _la==85: + self.state = 622 localctx.attribute = self.parameter() - self.state = 621 + self.state = 627 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==72: - self.state = 617 + while _la==70: + self.state = 623 self.match(PyNestMLParser.COMMA) - self.state = 618 + self.state = 624 localctx.attribute = self.parameter() - self.state = 623 + self.state = 629 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 626 + self.state = 632 self.match(PyNestMLParser.RIGHT_PAREN) pass elif token in [37]: - self.state = 629 + self.state = 635 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 632 + self.state = 638 self.match(PyNestMLParser.NEWLINE) - self.state = 633 + self.state = 639 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4638,55 +4626,55 @@ def accept(self, visitor:ParseTreeVisitor): def function(self): localctx = PyNestMLParser.FunctionContext(self, self._ctx, self.state) - self.enterRule(localctx, 88, self.RULE_function) + self.enterRule(localctx, 86, self.RULE_function) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 635 + self.state = 641 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 636 + self.state = 642 self.match(PyNestMLParser.NAME) - self.state = 637 + self.state = 643 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 646 + self.state = 652 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==87: - self.state = 638 + if _la==85: + self.state = 644 self.parameter() - self.state = 643 + self.state = 649 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==72: - self.state = 639 + while _la==70: + self.state = 645 self.match(PyNestMLParser.COMMA) - self.state = 640 + self.state = 646 self.parameter() - self.state = 645 + self.state = 651 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 648 + self.state = 654 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 650 + self.state = 656 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 140737488363264) != 0) or _la==87 or _la==88: - self.state = 649 + if (((_la) & ~0x3f) == 0 and ((1 << _la) & 35184372096768) != 0) or _la==85 or _la==86: + self.state = 655 localctx.returnType = self.dataType() - self.state = 652 + self.state = 658 self.match(PyNestMLParser.COLON) - self.state = 653 + self.state = 659 self.match(PyNestMLParser.NEWLINE) - self.state = 654 + self.state = 660 self.match(PyNestMLParser.INDENT) - self.state = 655 + self.state = 661 self.stmtsBody() - self.state = 656 + self.state = 662 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4726,12 +4714,12 @@ def accept(self, visitor:ParseTreeVisitor): def parameter(self): localctx = PyNestMLParser.ParameterContext(self, self._ctx, self.state) - self.enterRule(localctx, 90, self.RULE_parameter) + self.enterRule(localctx, 88, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 658 + self.state = 664 self.match(PyNestMLParser.NAME) - self.state = 659 + self.state = 665 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4742,6 +4730,63 @@ def parameter(self): return localctx + class ExpressionOrParameterContext(ParserRuleContext): + __slots__ = 'parser' + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def parameter(self): + return self.getTypedRuleContext(PyNestMLParser.ParameterContext,0) + + + def expression(self): + return self.getTypedRuleContext(PyNestMLParser.ExpressionContext,0) + + + def getRuleIndex(self): + return PyNestMLParser.RULE_expressionOrParameter + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitExpressionOrParameter" ): + return visitor.visitExpressionOrParameter(self) + else: + return visitor.visitChildren(self) + + + + + def expressionOrParameter(self): + + localctx = PyNestMLParser.ExpressionOrParameterContext(self, self._ctx, self.state) + self.enterRule(localctx, 90, self.RULE_expressionOrParameter) + try: + self.state = 669 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,75,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 667 + self.parameter() + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 668 + self.expression(0) + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class ConstParameterContext(ParserRuleContext): __slots__ = 'parser' @@ -4791,14 +4836,14 @@ def constParameter(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 661 + self.state = 671 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 662 + self.state = 672 self.match(PyNestMLParser.EQUALS) - self.state = 663 + self.state = 673 localctx.value = self._input.LT(1) _la = self._input.LA(1) - if not(_la==23 or ((((_la - 85)) & ~0x3f) == 0 and ((1 << (_la - 85)) & 27) != 0)): + if not(_la==23 or ((((_la - 83)) & ~0x3f) == 0 and ((1 << (_la - 83)) & 27) != 0)): localctx.value = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) diff --git a/pynestml/generated/PyNestMLParserVisitor.py b/pynestml/generated/PyNestMLParserVisitor.py index 6d80e839d..d021a3f89 100644 --- a/pynestml/generated/PyNestMLParserVisitor.py +++ b/pynestml/generated/PyNestMLParserVisitor.py @@ -1,4 +1,4 @@ -# Generated from PyNestMLParser.g4 by ANTLR 4.13.2 +# Generated from PyNestMLParser.g4 by ANTLR 4.13.1 from antlr4 import * if "." in __name__: from .PyNestMLParser import PyNestMLParser diff --git a/pynestml/utils/ast_utils.py b/pynestml/utils/ast_utils.py index ba00bc2a3..e783db248 100644 --- a/pynestml/utils/ast_utils.py +++ b/pynestml/utils/ast_utils.py @@ -2710,7 +2710,7 @@ def initial_value_or_zero(cls, astnode: ASTModel, var): return "0" @classmethod - def nestml_input_port_to_nest_rport_dict(cls, astnode: ASTModel) -> Dict[str, int]: + def nestml_spiking_input_port_to_nest_rport_dict(cls, astnode: ASTModel) -> Dict[str, int]: input_port_to_rport = {} rport = 1 # if there is more than one spiking input port, count begins at 1 for input_block in astnode.get_input_blocks(): @@ -2728,6 +2728,24 @@ def nestml_input_port_to_nest_rport_dict(cls, astnode: ASTModel) -> Dict[str, in return input_port_to_rport + @classmethod + def nestml_continuous_input_port_to_nest_rport_dict(cls, astnode: ASTModel) -> Dict[str, int]: + input_port_to_rport = {} + rport = 1 # if there is more than one spiking input port, count begins at 1 + for input_block in astnode.get_input_blocks(): + for input_port in input_block.get_input_ports(): + if not input_port.is_continuous(): + continue + + if input_port.get_size_parameter(): + for i in range(int(str(input_port.size_parameter))): # XXX: should be able to convert size_parameter expression to an integer more generically (allowing for e.g. parameters) + input_port_to_rport[input_port.name + "_VEC_IDX_" + str(i)] = rport + rport += 1 + else: + input_port_to_rport[input_port.name] = rport + rport += 1 + + return input_port_to_rport @classmethod def find_parent_node_by_type(cls, node: ASTNode, type_to_find: Any) -> Optional[Any]: @@ -2743,7 +2761,7 @@ def find_parent_node_by_type(cls, node: ASTNode, type_to_find: Any) -> Optional[ @classmethod def nestml_input_port_to_nest_rport(cls, astnode: ASTModel, spike_in_port: ASTInputPort): - return ASTUtils.nestml_input_port_to_nest_rport_dict(astnode)[spike_in_port] + return ASTUtils.nestml_spiking_input_port_to_nest_rport_dict(astnode)[spike_in_port] @classmethod def port_name_printer(cls, variable: ASTVariable) -> str: diff --git a/tests/test_cocos.py b/tests/test_cocos.py index 223005869..eb78138fa 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -132,11 +132,7 @@ def test_valid_inline_expression_has_several_lhs(self): def test_invalid_no_values_assigned_to_input_ports(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoValueAssignedToInputPort.nestml')) - assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 2 - - def test_valid_no_values_assigned_to_input_ports(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoValueAssignedToInputPort.nestml')) - assert len(Logger.get_messages(model, LoggingLevel.ERROR)) == 0 + assert len(Logger.get_messages(model, LoggingLevel.ERROR)) == 2 def test_invalid_order_of_equations_correct(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoNoOrderOfEquations.nestml')) @@ -206,10 +202,6 @@ def test_invalid_co_co_spike_input_ports_illegal_missing_attribute(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInputPortsIllegalMissingAttribute.nestml')) assert len(Logger.get_messages(model, LoggingLevel.ERROR)) == 1 - def test_valid_redundant_input_port_keywords_detected(self): - model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInputPortWithRedundantTypes.nestml')) - assert len(Logger.get_messages(model, LoggingLevel.ERROR)) == 0 - def test_invalid_parameters_assigned_only_in_parameters_block(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoParameterAssignedOutsideBlock.nestml')) assert len(Logger.get_messages(model, LoggingLevel.ERROR)) == 1 From bb2430135b50beebc51935f3b51c3b3d3ac58a6b Mon Sep 17 00:00:00 2001 From: "C.A.P. Linssen" Date: Thu, 25 Sep 2025 12:34:10 +0200 Subject: [PATCH 68/68] add attributes to spike events --- .../nestml_language_concepts.rst | 26 ++++++------ doc/nestml_language/neurons_in_nestml.rst | 2 +- ...af_psc_exp_nonlineardendrite_neuron.nestml | 5 +-- models/neurons/aeif_cond_alpha_neuron.nestml | 4 +- models/neurons/aeif_cond_exp_neuron.nestml | 7 ++-- ...iaf_psc_delta_fixed_timestep_neuron.nestml | 31 +++++++------- models/neurons/iaf_psc_exp_neuron.nestml | 7 ++-- models/neurons/izhikevich_neuron.nestml | 5 ++- pynestml/symbols/predefined_functions.py | 16 ++++++++ .../aeif_cond_alpha_alt_neuron.nestml | 41 ++++++++++--------- ...alpha_function_2nd_order_ode_neuron.nestml | 17 ++++---- ...ction_with_inline_expression_neuron.nestml | 4 +- 12 files changed, 93 insertions(+), 72 deletions(-) diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index f5b29c72b..acf1c852b 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -1115,7 +1115,7 @@ The spiking input port name ``spikes_in`` can be used directly in the right-hand \frac{dx}{dt} = -\frac{x}{\tau} + \mathrm{spikes\_in}(t) -If ``x`` is a real number, then the units here are consistent (in 1/s). This can be written in NESTML as: +If ``x`` is a real number, then the units here are consistent (in 1/s) on left- and right-hand side of the equation. This can be written in NESTML as: .. code-block:: nestml @@ -1125,23 +1125,23 @@ If ``x`` is a real number, then the units here are consistent (in 1/s). This can .. math:: - \frac{dx}{dt} = -\frac{x}{\tau} + (K \ast \mathrm{spikes\_in}) / s + \frac{dx}{dt} = -\frac{x}{\tau} + \frac{1}{C} \left(K \ast \mathrm{spikes\_in}\right) -Note that applying the convolution means integrating over time, hence dropping the [1/s] unit, leaving a unitless quantity. To make the units consistent in this case, an explicit division by seconds is required. +Note that applying the convolution means integrating over time, hence dropping the [1/s] unit, leaving a unitless quantity (the function of time (:math:`K \ast \mathrm{spikes\_in}`). To make the units consistent in this case, an explicit division by time (such as by a constant :math:`C` with units [s]) is required. This can be written in NESTML as: .. code-block:: nestml - x' = -x / tau + convolve(K, spikes_in) / s + x' = -x / tau + convolve(K, spikes_in) / C -Physical units such as millivolts (:math:`\text{mV}`) and picoamperes (:math:`\text{pA}`) can be directly combined with the Dirac delta function to model an impulse with a physical quantity such as voltage or current. In such cases, the Dirac delta function is multiplied by the appropriate unit of the physical quantity to obtain a quantity with units of volts or amperes, for instance, if ``x`` is in ``pA``, then we can write: +Physical units such as millivolts (:math:`\text{mV}`) and picoamperes (:math:`\text{pA}`) can be directly combined with the Dirac delta function to model an impulse with a physical quantity such as voltage or current. In such cases, the Dirac delta function is multiplied by the appropriate unit of the physical quantity to obtain a quantity with units of volts or amperes, for instance, if ``I`` is in ``pA``, then we can write: .. code-block:: nestml - x = -x / tau + spikes_in * pA + I' = -I / tau + spikes_in * (1 pA) -However, note that this does not account for different spikes carrying different weight (which typically results in different postsynaptic currents or potentials). In this example, each spike will result in a change in :math:`x` of 1 pA. +However, note that this does not account for different spikes carrying different weight (which typically results in different postsynaptic currents or potentials). In this example, each spike will result in a change in :math:`I` of 1 pA. To read out the attributes from events, for example the weight of the spike, the dot notation can be used, for example: @@ -1155,13 +1155,13 @@ If ``spikes_in.w`` is defined as a real number, the units here are consistent (i .. code-block:: nestml state: - y mV = 0 mV + V mV = 0 mV input: spikes_in <- spike(w mV) equations: - y' = -y / tau + spikes_in.w + V' = -V / tau + spikes_in.w Note that again, the units are consistent if :math:`w_k` is assumed to be in units of mV; in combination with the 1/s unit of the delta train, the units of ``spikes_in.w`` are in mV/s. @@ -1177,16 +1177,16 @@ An ``onReceive`` block can be defined for every spiking input port. For example, println("Info: processing a presynaptic spike at time t = {t}") # ... further statements go here ... -The statements in the event handler will be executed when the event occurs and integrate the state of the system from "just before" the event (at :math:`t-\epsilon`, for :math:`\epsilon\rightarrow 0`) to "just after" the event (at :math:`t=t+\epsilon`). Analogous to the ``update`` block, the predefined variable ``t`` indicates the time :math:`t-\epsilon` at the start of the interval, whereas the predefined function ``timestep()`` yields the duration of the interval :math:`2\epsilon` for :math:`\epsilon\rightarrow 0`. As the timestep() function would typically yield a numerical value equal to zero, its use inside an ``onReceive`` block only makes sense to integrate across delta pulses. +The statements in the event handler will be executed when the event occurs and integrate the state of the system from "just before" the event (at :math:`t-\epsilon`, for :math:`\epsilon\rightarrow 0`) to "just after" the event (at :math:`t=t+\epsilon`). Analogous to the ``update`` block, the predefined variable ``t`` indicates the time :math:`t-\epsilon` at the start of the interval, whereas the predefined function ``timestep()`` yields the duration of the interval :math:`2\epsilon` for :math:`\epsilon\rightarrow 0`. As the timestep() function would typically yield a numerical value equal to zero, its use inside an ``onReceive`` block only makes sense to integrate over (trains of) delta pulses to obtain the area under the curve. -Typically, the statements in the ``onReceive`` block integrate over the delta function across time, which yields the surface area of the pulse, which typically corresponds to the weight of the spike or to another spike event attribute. Integration across time causes the 1/s unit of the spike train to drop out, so that what remains are the units of the spike attribute itself. For instance, when a port is defined with an attribute "psp" in units of mV: +Typically, the statements in the ``onReceive`` block integrate the delta function across time, which yields the surface area under the curve, which typically corresponds to the weight of the spike, or to another spike event attribute. Integration across time causes the 1/s unit of the spike train to drop out, so that what remains are the units of the spike attribute itself. For instance, when a port is defined with an attribute "psp" in units of mV: .. code-block:: nestml input: in_spikes(psp mV) <- spike -then the following has consistent units: +then the following has consistent units: ``in_spikes.psp`` is in mV/s as it consists of the unit given in the spiking input port definition, multiplied with the 1/s from the delta pulses), and after integration the 1/s drops out leaving a unit of mV. .. code-block:: nestml @@ -1196,7 +1196,7 @@ then the following has consistent units: onReceive(in_spikes): V_m += integrate(in_spikes.psp, t, t + timestep()) # lhs and rhs both in [mV] -A spiking input port (or any of its attributes) may not appear outside of a ``integrate()`` call, because the units will be inconsistent; for example: +In ``onReceive`` blocks, a spiking input port (or any of its attributes) may not appear outside of a ``integrate()`` call, because the units will be inconsistent; for example: .. code-block:: nestml diff --git a/doc/nestml_language/neurons_in_nestml.rst b/doc/nestml_language/neurons_in_nestml.rst index 14f01cfbb..e8a486a5c 100644 --- a/doc/nestml_language/neurons_in_nestml.rst +++ b/doc/nestml_language/neurons_in_nestml.rst @@ -87,7 +87,7 @@ The incoming spikes could have been equivalently handled with an ``onReceive`` e I_syn' = -I_syn / tau_syn onReceive(spikes): - I_syn += spikes.weight + I_syn += integrate(spikes.weight, t, t + timestep()) (Re)setting synaptic integration state diff --git a/doc/tutorials/sequence_learning/iaf_psc_exp_nonlineardendrite_neuron.nestml b/doc/tutorials/sequence_learning/iaf_psc_exp_nonlineardendrite_neuron.nestml index b044db974..a3ab1e242 100644 --- a/doc/tutorials/sequence_learning/iaf_psc_exp_nonlineardendrite_neuron.nestml +++ b/doc/tutorials/sequence_learning/iaf_psc_exp_nonlineardendrite_neuron.nestml @@ -77,7 +77,6 @@ model iaf_psc_exp_nonlineardendrite_neuron: I_dend_incr pA/ms = pA * exp(1) / tau_syn2 - input: I_1 <- spike(weight pA) I_2 <- spike(weight real) @@ -87,7 +86,8 @@ model iaf_psc_exp_nonlineardendrite_neuron: spike onReceive(I_2): - I_dend$ += I_2.weight * I_dend_incr + spike_weight pA = integral(I_2.weight, t, t + timestep()) + I_dend$ += spike_weight * I_dend_incr update: # solve ODEs @@ -136,4 +136,3 @@ model iaf_psc_exp_nonlineardendrite_neuron: active_dendrite_readout = 0. dAP_counts = 0 I_dend = 0 pA - diff --git a/models/neurons/aeif_cond_alpha_neuron.nestml b/models/neurons/aeif_cond_alpha_neuron.nestml index 3408bbc27..9a65a3d41 100644 --- a/models/neurons/aeif_cond_alpha_neuron.nestml +++ b/models/neurons/aeif_cond_alpha_neuron.nestml @@ -119,8 +119,8 @@ model aeif_cond_alpha_neuron: I_e pA = 0 pA input: - exc_spikes <- spike(weight nS) - inh_spikes <- spike(weight nS) + exc_spikes <- spike(weight nS/s) + inh_spikes <- spike(weight nS/s) I_stim pA <- continuous output: diff --git a/models/neurons/aeif_cond_exp_neuron.nestml b/models/neurons/aeif_cond_exp_neuron.nestml index 57c2ce6b6..a3b0445bc 100644 --- a/models/neurons/aeif_cond_exp_neuron.nestml +++ b/models/neurons/aeif_cond_exp_neuron.nestml @@ -127,10 +127,11 @@ model aeif_cond_exp_neuron: onReceive(spike_in_port): # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike - if spike_in_port.weight > 0: - g_syn_exc += spike_in_port.weight + spike_weight nS = integral(spike_in_port.weight, t, t + timestep()) + if spike_weight > 0 nS: + g_syn_exc += spike_weight else: - g_syn_inh -= spike_in_port.weight + g_syn_inh -= spike_weight update: if refr_t > 0 ms: diff --git a/models/neurons/iaf_psc_delta_fixed_timestep_neuron.nestml b/models/neurons/iaf_psc_delta_fixed_timestep_neuron.nestml index 12779fe1b..e1c5975fb 100644 --- a/models/neurons/iaf_psc_delta_fixed_timestep_neuron.nestml +++ b/models/neurons/iaf_psc_delta_fixed_timestep_neuron.nestml @@ -1,19 +1,19 @@ # iaf_psc_delta_fixed_timestep - Current-based leaky integrate-and-fire neuron model with delta-kernel post-synaptic currents # ########################################################################################################################### -# +# # Description # +++++++++++ -# +# # An implementation of a leaky integrate-and-fire model where the potential jumps on each spike arrival. The threshold crossing is followed by an absolute refractory period during which the membrane potential is clamped to the resting potential. Spikes arriving while the neuron is refractory are discarded. -# +# # The general framework for the consistent formulation of systems with neuron-like dynamics interacting by point events is described in [1]_. A flow chart can be found in [2]_. -# +# # This model differs from ``iaf_psc_delta`` in that it assumes a fixed-timestep simulator, so the functions ``resolution()`` and ``steps()`` can be used. -# -# +# +# # References # ++++++++++ -# +# # .. [1] Rotter S, Diesmann M (1999). Exact simulation of # time-invariant linear systems with applications to neuronal # modeling. Biologial Cybernetics 81:381-402. @@ -22,11 +22,11 @@ # space analysis of synchronous spiking in cortical neural # networks. Neurocomputing 38-40:565-571. # DOI: https://doi.org/10.1016/S0925-2312(01)00409-X -# -# +# +# # See also # ++++++++ -# +# # iaf_psc_alpha, iaf_psc_exp # # @@ -34,7 +34,7 @@ # +++++++++++++++++++ # # This file is part of NEST. -# +# # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify @@ -73,16 +73,17 @@ model iaf_psc_delta_fixed_timestep_neuron: refr_counts integer = steps(refr_T) input: - spikes <- spike(weight mV) + spike_in_port <- spike(weight mV) I_stim pA <- continuous output: spike - onReceive(spikes): - # discard spikes if neuron is refractory + onReceive(spike_in_port): + # process spike only if neuron is not refractory if refr_counter == 0: - V_m += spikes.weight + spike_weight mV = integrate(spike_in_port.weight, t, t + timestep()) + V_m += spike_weight update: if refr_counter > 0: diff --git a/models/neurons/iaf_psc_exp_neuron.nestml b/models/neurons/iaf_psc_exp_neuron.nestml index 0474eed46..3c4ceecbb 100644 --- a/models/neurons/iaf_psc_exp_neuron.nestml +++ b/models/neurons/iaf_psc_exp_neuron.nestml @@ -110,10 +110,11 @@ model iaf_psc_exp_neuron: onReceive(spike_in_port): # route the incoming spike on the basis of the weight: less than zero means an inhibitory spike; greater than zero means an excitatory spike - if spike_in_port.weight > 0 pA: - I_syn_exc += spike_in_port.weight + weight pA = integral(spike_in_port.weight, t, t + timestep()) # integrate the incoming spike train (in pA/s) from "just before" the spike to "just after" the spike to obtain weight in pA + if weight > 0 pA: + I_syn_exc += weight else: - I_syn_inh -= spike_in_port.weight + I_syn_inh -= weight update: if refr_t > 0 ms: diff --git a/models/neurons/izhikevich_neuron.nestml b/models/neurons/izhikevich_neuron.nestml index a7b908cd0..ac8688ac4 100644 --- a/models/neurons/izhikevich_neuron.nestml +++ b/models/neurons/izhikevich_neuron.nestml @@ -40,7 +40,7 @@ # +++++++++++++++++++ # # This file is part of NEST. -# +# # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify @@ -87,7 +87,8 @@ model izhikevich_neuron: onReceive(spike_in_port): # Add synaptic contribution - V_m += spike_in_port.weight + spike_weight real = integrate(spike_in_port.weight, t, t + timestep()) + V_m += spike_weight # lower bound of membrane potential V_m = max(V_min, V_m) diff --git a/pynestml/symbols/predefined_functions.py b/pynestml/symbols/predefined_functions.py index 7fd273032..f2a8e6785 100644 --- a/pynestml/symbols/predefined_functions.py +++ b/pynestml/symbols/predefined_functions.py @@ -64,6 +64,7 @@ class PredefinedFunctions: DELTA = "delta" INTEGRATE_ODES = "integrate_odes" CONVOLVE = "convolve" + INTEGRAL = "integral" name2function = {} # type: Mapping[str, FunctionSymbol] @classmethod @@ -106,6 +107,7 @@ def register_functions(cls): cls.__register_floor_function() cls.__register_round_function() cls.__register_convolve() + cls.__register_integral() @classmethod def register_function(cls, name, params, return_type, element_reference): @@ -519,6 +521,20 @@ def __register_convolve(cls): element_reference=None, is_predefined=True) cls.name2function[cls.CONVOLVE] = symbol + @classmethod + def __register_integral(cls): + """ + Registers the integral function into the system. + """ + params = list() + params.append(PredefinedTypes.get_real_type()) # function to integrate + params.append(PredefinedTypes.get_type("ms")) # from time + params.append(PredefinedTypes.get_type("ms")) # to time + symbol = FunctionSymbol(name=cls.INTEGRAL, param_types=params, + return_type=PredefinedTypes.get_template_type(0), + element_reference=None, is_predefined=True) + cls.name2function[cls.INTEGRAL] = symbol + @classmethod def get_function_symbols(cls): """ diff --git a/tests/nest_tests/resources/aeif_cond_alpha_alt_neuron.nestml b/tests/nest_tests/resources/aeif_cond_alpha_alt_neuron.nestml index ce35aa4b2..b1e7f1ad5 100644 --- a/tests/nest_tests/resources/aeif_cond_alpha_alt_neuron.nestml +++ b/tests/nest_tests/resources/aeif_cond_alpha_alt_neuron.nestml @@ -1,41 +1,41 @@ # aeif_cond_alpha - Conductance based exponential integrate-and-fire neuron model # ############################################################################### -# +# # Description # +++++++++++ -# +# # aeif_psc_alpha is the adaptive exponential integrate and fire neuron according to Brette and Gerstner (2005), with post-synaptic conductances in the form of a bi-exponential ("alpha") function. -# +# # The membrane potential is given by the following differential equation: -# +# # .. math:: -# +# # C_m \frac{dV_m}{dt} = # -g_L(V_m-E_L)+g_L\Delta_T\exp\left(\frac{V_m-V_{th}}{\Delta_T}\right) - # g_e(t)(V_m-E_e) \\ # -g_i(t)(V_m-E_i)-w + I_e -# +# # and -# +# # .. math:: -# +# # \tau_w \frac{dw}{dt} = a(V_m-E_L) - w -# +# # Note that the membrane potential can diverge to positive infinity due to the exponential term. To avoid numerical instabilities, instead of :math:`V_m`, the value :math:`\min(V_m,V_{peak})` is used in the dynamical equations. -# -# +# +# # References # ++++++++++ -# +# # .. [1] Brette R and Gerstner W (2005). Adaptive exponential # integrate-and-fire model as an effective description of neuronal # activity. Journal of Neurophysiology. 943637-3642 # DOI: https://doi.org/10.1152/jn.00686.2005 -# -# +# +# # See also # ++++++++ -# +# # iaf_psc_alpha, aeif_psc_exp # model aeif_cond_alpha_alt_neuron: @@ -90,7 +90,7 @@ model aeif_cond_alpha_alt_neuron: I_e pA = 0 pA input: - in_spikes <- spike(weight nS) + spike_in_port <- spike(weight nS) I_stim pA <- continuous output: @@ -104,11 +104,12 @@ model aeif_cond_alpha_alt_neuron: # neuron not refractory integrate_odes(g_exc, g_inh, V_m, w) - onReceive(in_spikes): - if in_spikes.weight > 0: - g_exc' += in_spikes.weight * (e / tau_syn_exc) + onReceive(spike_in_port): + spike_weight nS = integrate(spike_in_port.weight, t, t + timestep()) + if spike_weight > 0: + g_exc' += spike_weight * (e / tau_syn_exc) else: - g_inh' -= in_spikes.weight * (e / tau_syn_inh) + g_inh' -= spike_weight * (e / tau_syn_inh) onCondition(refr_t <= 0 ms and V_m >= V_th): # threshold crossing diff --git a/tests/nest_tests/resources/alpha_function_2nd_order_ode_neuron.nestml b/tests/nest_tests/resources/alpha_function_2nd_order_ode_neuron.nestml index 5a5694b54..af9991d7c 100644 --- a/tests/nest_tests/resources/alpha_function_2nd_order_ode_neuron.nestml +++ b/tests/nest_tests/resources/alpha_function_2nd_order_ode_neuron.nestml @@ -1,25 +1,25 @@ # alpha_function_2nd_order_ode_neuron.nestml # ########################################## -# +# # Tests that for a system of higher-oder ODEs of the form F(x'',x',x)=0, integrate_odes(x) includes the integration of all the higher-order variables involved of the system. -# +# # Copyright statement # +++++++++++++++++++ -# +# # This file is part of NEST. -# +# # Copyright (C) 2004 The NEST Initiative -# +# # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. -# +# # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. -# +# # You should have received a copy of the GNU General Public License # along with NEST. If not, see . # @@ -40,4 +40,5 @@ model alpha_function_2nd_order_ode_neuron: integrate_odes(x, y) onReceive(fX): - x' += e * fX.weight / ms + spike_weight real = integrate(fX.weight, t, t + timestep()) + x' += e * spike_weight / ms diff --git a/tests/nest_tests/resources/beta_function_with_inline_expression_neuron.nestml b/tests/nest_tests/resources/beta_function_with_inline_expression_neuron.nestml index 95ccb6f6e..16cd12c2c 100644 --- a/tests/nest_tests/resources/beta_function_with_inline_expression_neuron.nestml +++ b/tests/nest_tests/resources/beta_function_with_inline_expression_neuron.nestml @@ -46,7 +46,7 @@ model beta_function_with_inline_expression_neuron: recordable inline z pA = x input: - weighted_input_spikes <- spike(weight pA) + spike_in_port <- spike(weight pA) output: spike @@ -55,4 +55,4 @@ model beta_function_with_inline_expression_neuron: integrate_odes() onReceive(weighted_input_spikes): - x_ += alpha * (1 / tau2 - 1 / tau1) * weighted_input_spikes.weight + x_ += alpha * (1 / tau2 - 1 / tau1) * integrate(spike_in_port.weight, t, t + timestep())