diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index cf55b9b03b..6d43825ccd 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -36,10 +36,10 @@ jobs: python3-dev python3-pip shell: bash - name: Set up Python3 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 2 - name: Install Python3 dependencies @@ -48,7 +48,7 @@ jobs: pip3 install -U pip setuptools pip3 install --user -r requirements.txt - name: Restore compiler cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ${{runner.workspace}}/ccache @@ -82,7 +82,7 @@ jobs: lcov --capture --directory . --no-external --output-file build/coverage-run.info --exclude "*/ext/*" (cd build; lcov --add-tracefile coverage-base.info --add-tracefile coverage-run.info --output-file coverage.info) (cd build; lcov --list coverage.info) - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 with: files: ./build/coverage.info fail_ci_if_error: true diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index e347bf4fd7..3b7bfd7693 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -15,7 +15,7 @@ jobs: name: C/C++, CMake and Python runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Update submodule working-directory: ${{runner.workspace}}/nmodl run: git submodule update --init cmake/hpc-coding-conventions diff --git a/.github/workflows/nmodl-ci.yml b/.github/workflows/nmodl-ci.yml index 1a66988d20..0341cedef1 100644 --- a/.github/workflows/nmodl-ci.yml +++ b/.github/workflows/nmodl-ci.yml @@ -73,11 +73,11 @@ jobs: shell: bash - name: Set up Python3 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install Python3 dependencies working-directory: ${{runner.workspace}}/nmodl @@ -141,7 +141,7 @@ jobs: echo ----- - name: Restore compiler cache - uses: pat-s/always-upload-cache@v3 + uses: actions/cache@v4 with: path: | ${{runner.workspace}}/ccache @@ -149,6 +149,7 @@ jobs: restore-keys: | ${{hashfiles('matrix.json')}}-${{github.ref}}- ${{hashfiles('matrix.json')}}- + save-always: true - name: Build shell: bash @@ -183,7 +184,7 @@ jobs: run: | cmake --build . --target install - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ctest-results-${{hashfiles('matrix.json')}} path: ${{runner.workspace}}/nmodl/build/Testing/*/Test.xml diff --git a/.github/workflows/nmodl-doc.yml b/.github/workflows/nmodl-doc.yml index 56a8df8e86..22ab1192b3 100644 --- a/.github/workflows/nmodl-doc.yml +++ b/.github/workflows/nmodl-doc.yml @@ -47,11 +47,11 @@ jobs: shell: bash - name: Set up Python3 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 @@ -68,7 +68,7 @@ jobs: uses: mxschmitt/action-tmate@v3 - name: Restore compiler cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ${{runner.workspace}}/ccache diff --git a/.github/workflows/sonarsource.yml b/.github/workflows/sonarsource.yml index 220280be8d..45b09e7457 100644 --- a/.github/workflows/sonarsource.yml +++ b/.github/workflows/sonarsource.yml @@ -22,10 +22,10 @@ jobs: python3-dev python3-pip shell: bash - name: Set up Python3 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis - name: Install Python3 dependencies @@ -49,6 +49,7 @@ jobs: run: | build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }} cmake --build build/ - name: Run sonar-scanner + continue-on-error: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} diff --git a/INSTALL.rst b/INSTALL.rst index 28098deab1..72fff92bde 100644 --- a/INSTALL.rst +++ b/INSTALL.rst @@ -224,4 +224,4 @@ You can build the entire documentation simply by using sphinx from .. code:: sh - python3 setup.py build_ext --inplace docs -G "Unix Makefiles" + python3 setup.py build_ext --inplace docs diff --git a/docs/contents/ions.rst b/docs/contents/ions.rst new file mode 100644 index 0000000000..898bad69e5 --- /dev/null +++ b/docs/contents/ions.rst @@ -0,0 +1,109 @@ +Ions +==== + +NEURON supports computing the ion currents and ion concentrations. For each segment +there can be separate current for separate ions, i.e. one for sodium ions and +another for calcium ions. + +There are five variables associated with ions: the current (``ina``), the +concentration inside the segment adjacent to the membrane (``nai``), the +concentration outside the segment adjacent to the membrane (``nao``), the +reversal potential (``ena``) and the derivative if the current w.r.t. the +voltage (``dinadv``). The names should be split as ``i{na}`` and therefore +refer to the value for sodium, for calcium it would have been ``ica``. + +These variables are physical properties of the segment. Therefore, there exists +one mechanism per ion. MOD files can include code to read or write these +variables. + +NDMOL Keywords +-------------- +A MOD file seeking to use ions should use ``USEION`` as follows: + +.. code:: + + NEURON { + USEION na READ ina WRITE ena + } + + ASSIGNED { + ena (mV) + ina (mA / cm2) + } + +Multiple ions are expressed by one line of ``USEION`` per ion. + +The ``{ion_name}`` is a string giving the ion a name. For sodium it's ``na`` +and for calcium ``ca``. If the no other mechanisms have defined an ion with +this name a new ion mechanism is created. + +Both ``READ`` and ``WRITE`` are optional and are followed by a comma separated +list of ion variable names, e.g. ``ina, ena``. + +Keyword: WRITE +~~~~~~~~~~~~~~ + +Writing Ion Currents +^^^^^^^^^^^^^^^^^^^^ + +In MOD file one can set the value of ion variables. + +.. code:: + + BREAKPOINT { + ina = gna*(v - ena) + } + +Semantically, this states that the contribution of the Hodgkin-Huxley model to +the overall sodium current in that segment is ``gna*(v - ena)``. Since +everything is at the level of segment, we'll not keep repeating "for that +segment". Similarly, each mechanism computes a `local` contribution, i.e. the +contribution due to this mechanism to the actual `global` ion current. + +Therefore, code for the following must be generated: + +1. Compute the local contribution to the sodium current. +2. Increment the total, local, current contribution by ``ina``. +3. Increment the global sodium current by ``ina``. +4. Compute local derivative of ``ina`` w.r.t. the voltage. +5. Increment the global derivative of the sodium current w.r.t. the voltage. + +The global current must also be updated as usual. However, this isn't ion +specific and hence omitted. + + +Storage +------- + +Each mechanism that specifies ``USEION na`` contains a copy of all used ion +variables and pointers to the shared values in the ion mechanism, see Figure 1. + +The pointer to the variable in the ion mechanism is prefixed with ``ion_``, +e.g. during initialization we might copy the shared value ``*ion_ena[i]`` to +``ena[i]`` (the copy local to the mechanism using the ion). + +.. figure:: ../images/ion_storage.svg + + Figure 1: Ion mechanism for sodium (``na``) and its use in the + Hodgkin-Huxley mechanism. This figure shows the NEURON memory layout. + + +Optimizing Storage +~~~~~~~~~~~~~~~~~~ + +Since the common pattern is to only access the values of a particular instance, +the local copy isn't needed. I might facilitate SIMD, but it could be replaces +by local variables, see Figure 2. + +.. figure:: ../images/ion_storage-opt.svg + + Figure 2: Optimized ion storage layout. + + +This optimization is implemented in NMODL. It can be activated on the CLI via + +.. code:: sh + + nmodl ... codegen --opt-ionvar-copy + + diff --git a/docs/contents/pointers.rst b/docs/contents/pointers.rst new file mode 100644 index 0000000000..9add4b8e32 --- /dev/null +++ b/docs/contents/pointers.rst @@ -0,0 +1,30 @@ +NMODL "pointers" +================ + +Mechanisms can refer to values in other mechanisms, e.g. the sodium current +``ina``. Therefore, it supports a notion of "pointer", called ``Datum``. A datum +can store a pointer to a double, a stable pointer to a double, integers, or +pointers to anything else. + +Integer Variables +----------------- +One important subset of Datum are pointers to RANGE variables. Meaning they are +pointers to parameters in other mechanisms or pointers to the parameters +associated with each node, e.g. the voltage. Since the storage of RANGE +variable is controlled by NEURON/CoreNEURON, these pointers have stronger +semantics than a ``double*``. + +These make up the majority of usecases for Datum; and are considered the +well-mannered subset. + +In CoreNEURON this subset of Datums are treated differently for other Datums. +Because CoreNEURON stores the values these Datums can point to in a single +contiguous array of doubles, the "pointers" can be expressed as indices into +this array. + +Therefore, this subset of Datums is referred to as "integer variables". + +In NEURON these pointers are a ``data_handle`` to the value they point to. +Before the simulation phase they are "resolved" and a cache stores a list of +``double*`` to the appropriate values. + diff --git a/docs/images/ion_storage-opt.svg b/docs/images/ion_storage-opt.svg new file mode 100644 index 0000000000..d933b4eaab --- /dev/null +++ b/docs/images/ion_storage-opt.svg @@ -0,0 +1,516 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + hh + ina + ena + + + + + + + + + + + + + + + + + ion_ina + ion_ena + nrn_init_hh + + no-op + +nrn_cur_hh + + ions.ena = *ion_ena[i] + ions.ina = f(*ion_ena[i], v+dv) + ions.ina = f(*ion_ena[i], v) + + *ion_dinadv[i] += f(ions) + *ion_ina[i] += ions.ina + +nrn_state_hh + + no-op + + + + + + + + + Allocated, but not used + + + + RANGE variable + Pointer to RANGE + + + diff --git a/docs/images/ion_storage.svg b/docs/images/ion_storage.svg new file mode 100644 index 0000000000..28af536e69 --- /dev/null +++ b/docs/images/ion_storage.svg @@ -0,0 +1,829 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0: ena + 1: nai + 2: nao + 3: ina + 4: dinadv + hh + ina + ena + ion_ina + ion_ena + nrn_init_hh + + ena[i] = *ion_ena[i] + +nrn_cur_hh + + ena[i] = *ion_ena[i] + ina[i] = f(*ion_ena[i], v+dv) + ina[i] = f(*ion_ena[i], v) + + *ion_dinadv[i] += f(ina[i]) + *ion_ina[i] += ions.ina + +nrn_state_hh + + ena[i] = *ion_ena[i] + + + + + + + + + RANGE variable + Pointer to RANGE + + na + + + diff --git a/docs/index.rst b/docs/index.rst index 4a494781f7..f923c6723b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -19,6 +19,8 @@ About NMODL :caption: Contents: contents/visitors + contents/ions + contents/pointers .. toctree:: :maxdepth: 3 diff --git a/src/codegen/codegen_acc_visitor.cpp b/src/codegen/codegen_acc_visitor.cpp index 68ff009471..20624b6df7 100644 --- a/src/codegen/codegen_acc_visitor.cpp +++ b/src/codegen/codegen_acc_visitor.cpp @@ -62,17 +62,8 @@ void CodegenAccVisitor::print_atomic_reduction_pragma() { void CodegenAccVisitor::print_backend_includes() { - /** - * Artificial cells are executed on CPU. As Random123 is allocated on GPU by default, - * we have to disable GPU allocations using `DISABLE_OPENACC` macro. - */ - if (info.artificial_cell) { - printer->add_line("#undef DISABLE_OPENACC"); - printer->add_line("#define DISABLE_OPENACC"); - } else { - printer->add_line("#include "); - printer->add_line("#include "); - } + printer->add_line("#include "); + printer->add_line("#include "); } diff --git a/src/codegen/codegen_coreneuron_cpp_visitor.cpp b/src/codegen/codegen_coreneuron_cpp_visitor.cpp index 08ec11a658..ed919fa887 100644 --- a/src/codegen/codegen_coreneuron_cpp_visitor.cpp +++ b/src/codegen/codegen_coreneuron_cpp_visitor.cpp @@ -87,140 +87,6 @@ int CodegenCoreneuronCppVisitor::position_of_int_var(const std::string& name) co } -/** - * \details Current variable used in breakpoint block could be local variable. - * In this case, neuron has already renamed the variable name by prepending - * "_l". In our implementation, the variable could have been renamed by - * one of the pass. And hence, we search all local variables and check if - * the variable is renamed. Note that we have to look into the symbol table - * of statement block and not breakpoint. - */ -std::string CodegenCoreneuronCppVisitor::breakpoint_current(std::string current) const { - auto breakpoint = info.breakpoint_node; - if (breakpoint == nullptr) { - return current; - } - auto symtab = breakpoint->get_statement_block()->get_symbol_table(); - auto variables = symtab->get_variables_with_properties(NmodlType::local_var); - for (const auto& var: variables) { - auto renamed_name = var->get_name(); - auto original_name = var->get_original_name(); - if (current == original_name) { - current = renamed_name; - break; - } - } - return current; -} - - -/** - * \details Depending upon the block type, we have to print read/write ion variables - * during code generation. Depending on block/procedure being printed, this - * method return statements as vector. As different code backends could have - * different variable names, we rely on backend-specific read_ion_variable_name - * and write_ion_variable_name method which will be overloaded. - */ -std::vector CodegenCoreneuronCppVisitor::ion_read_statements(BlockType type) const { - if (optimize_ion_variable_copies()) { - return ion_read_statements_optimized(type); - } - std::vector statements; - for (const auto& ion: info.ions) { - auto name = ion.name; - for (const auto& var: ion.reads) { - auto const iter = std::find(ion.implicit_reads.begin(), ion.implicit_reads.end(), var); - if (iter != ion.implicit_reads.end()) { - continue; - } - auto variable_names = read_ion_variable_name(var); - auto first = get_variable_name(variable_names.first); - auto second = get_variable_name(variable_names.second); - statements.push_back(fmt::format("{} = {};", first, second)); - } - for (const auto& var: ion.writes) { - if (ion.is_ionic_conc(var)) { - auto variables = read_ion_variable_name(var); - auto first = get_variable_name(variables.first); - auto second = get_variable_name(variables.second); - statements.push_back(fmt::format("{} = {};", first, second)); - } - } - } - return statements; -} - - -std::vector CodegenCoreneuronCppVisitor::ion_read_statements_optimized( - BlockType type) const { - std::vector statements; - for (const auto& ion: info.ions) { - for (const auto& var: ion.writes) { - if (ion.is_ionic_conc(var)) { - auto variables = read_ion_variable_name(var); - auto first = "ionvar." + variables.first; - const auto& second = get_variable_name(variables.second); - statements.push_back(fmt::format("{} = {};", first, second)); - } - } - } - return statements; -} - -// NOLINTNEXTLINE(readability-function-cognitive-complexity) -std::vector CodegenCoreneuronCppVisitor::ion_write_statements(BlockType type) { - std::vector statements; - for (const auto& ion: info.ions) { - std::string concentration; - auto name = ion.name; - for (const auto& var: ion.writes) { - auto variable_names = write_ion_variable_name(var); - if (ion.is_ionic_current(var)) { - if (type == BlockType::Equation) { - auto current = breakpoint_current(var); - auto lhs = variable_names.first; - auto op = "+="; - auto rhs = get_variable_name(current); - if (info.point_process) { - auto area = get_variable_name(naming::NODE_AREA_VARIABLE); - rhs += fmt::format("*(1.e2/{})", area); - } - statements.push_back(ShadowUseStatement{lhs, op, rhs}); - } - } else { - if (!ion.is_rev_potential(var)) { - concentration = var; - } - auto lhs = variable_names.first; - auto op = "="; - auto rhs = get_variable_name(variable_names.second); - statements.push_back(ShadowUseStatement{lhs, op, rhs}); - } - } - - if (type == BlockType::Initial && !concentration.empty()) { - int index = 0; - if (ion.is_intra_cell_conc(concentration)) { - index = 1; - } else if (ion.is_extra_cell_conc(concentration)) { - index = 2; - } else { - /// \todo Unhandled case in neuron implementation - throw std::logic_error(fmt::format("codegen error for {} ion", ion.name)); - } - auto ion_type_name = fmt::format("{}_type", ion.name); - auto lhs = fmt::format("int {}", ion_type_name); - auto op = "="; - auto rhs = get_variable_name(ion_type_name); - statements.push_back(ShadowUseStatement{lhs, op, rhs}); - auto statement = conc_write_statement(ion.name, concentration, index); - statements.push_back(ShadowUseStatement{statement, "", ""}); - } - } - return statements; -} - - /** * \details Often top level verbatim blocks use variables with old names. * Here we process if we are processing verbatim block at global scope. @@ -256,11 +122,6 @@ std::string CodegenCoreneuronCppVisitor::process_verbatim_token(const std::strin } -bool CodegenCoreneuronCppVisitor::ion_variable_struct_required() const { - return optimize_ion_variable_copies() && info.ion_has_write_variable(); -} - - /** * \details This can be override in the backend. For example, parameters can be constant * except in INITIAL block where they are set to 0. As initial block is/can be @@ -1189,18 +1050,6 @@ std::string CodegenCoreneuronCppVisitor::register_mechanism_arguments() const { } -std::pair CodegenCoreneuronCppVisitor::read_ion_variable_name( - const std::string& name) { - return {name, naming::ION_VARNAME_PREFIX + name}; -} - - -std::pair CodegenCoreneuronCppVisitor::write_ion_variable_name( - const std::string& name) { - return {naming::ION_VARNAME_PREFIX + name, name}; -} - - std::string CodegenCoreneuronCppVisitor::conc_write_statement(const std::string& ion_name, const std::string& concentration, int index) { @@ -1223,28 +1072,6 @@ std::string CodegenCoreneuronCppVisitor::conc_write_statement(const std::string& } -/** - * If mechanisms dependency level execution is enabled then certain updates - * like ionic current contributions needs to be atomically updated. In this - * case we first update current mechanism's shadow vector and then add statement - * to queue that will be used in reduction queue. - */ -std::string CodegenCoreneuronCppVisitor::process_shadow_update_statement( - const ShadowUseStatement& statement, - BlockType /* type */) { - // when there is no operator or rhs then that statement doesn't need shadow update - if (statement.op.empty() && statement.rhs.empty()) { - auto text = statement.lhs + ";"; - return text; - } - - // return regular statement - auto lhs = get_variable_name(statement.lhs); - auto text = fmt::format("{} {} {};", lhs, statement.op, statement.rhs); - return text; -} - - /****************************************************************************************/ /* Code-specific printing routines for code generation */ /****************************************************************************************/ @@ -1259,6 +1086,15 @@ void CodegenCoreneuronCppVisitor::print_first_pointer_var_index_getter() { } +void CodegenCoreneuronCppVisitor::print_first_random_var_index_getter() { + printer->add_newline(2); + print_device_method_annotation(); + printer->push_block("static inline int first_random_var_index()"); + printer->fmt_line("return {};", info.first_random_var_index); + printer->pop_block(); +} + + void CodegenCoreneuronCppVisitor::print_num_variable_getter() { printer->add_newline(2); print_device_method_annotation(); @@ -1435,24 +1271,6 @@ std::string CodegenCoreneuronCppVisitor::global_variable_name(const SymbolType& } -std::string CodegenCoreneuronCppVisitor::update_if_ion_variable_name( - const std::string& name) const { - std::string result(name); - if (ion_variable_struct_required()) { - if (info.is_ion_read_variable(name)) { - result = naming::ION_VARNAME_PREFIX + name; - } - if (info.is_ion_write_variable(name)) { - result = "ionvar." + name; - } - if (info.is_current(name)) { - result = "ionvar." + name; - } - } - return result; -} - - std::string CodegenCoreneuronCppVisitor::get_variable_name(const std::string& name, bool use_instance) const { const std::string& varname = update_if_ion_variable_name(name); @@ -2293,6 +2111,19 @@ void CodegenCoreneuronCppVisitor::print_instance_variable_setup() { printer->fmt_push_block("static void {}(NrnThread* nt, Memb_list* ml, int type)", method_name(naming::NRN_PRIVATE_DESTRUCTOR_METHOD)); cast_inst_and_assert_validity(); + + // delete random streams + if (info.random_variables.size()) { + printer->add_line("int pnodecount = ml->_nodecount_padded;"); + printer->add_line("int nodecount = ml->nodecount;"); + printer->add_line("Datum* indexes = ml->pdata;"); + printer->push_block("for (int id = 0; id < nodecount; id++)"); + for (const auto& var: info.random_variables) { + const auto& name = get_variable_name(var->get_name()); + printer->fmt_line("nrnran123_deletestream((nrnran123_State*){});", name); + } + printer->pop_block(); + } print_instance_struct_delete_from_device(); printer->add_multi_line(R"CODE( delete inst; @@ -3561,6 +3392,7 @@ void CodegenCoreneuronCppVisitor::print_namespace_end() { void CodegenCoreneuronCppVisitor::print_common_getters() { print_first_pointer_var_index_getter(); + print_first_random_var_index_getter(); print_net_receive_arg_size_getter(); print_thread_getters(); print_num_variable_getter(); diff --git a/src/codegen/codegen_coreneuron_cpp_visitor.hpp b/src/codegen/codegen_coreneuron_cpp_visitor.hpp index f09c5bf9c2..9c43a4cbe0 100644 --- a/src/codegen/codegen_coreneuron_cpp_visitor.hpp +++ b/src/codegen/codegen_coreneuron_cpp_visitor.hpp @@ -112,42 +112,6 @@ class CodegenCoreneuronCppVisitor: public CodegenCppVisitor { int position_of_int_var(const std::string& name) const override; - /** - * Determine the variable name for the "current" used in breakpoint block taking into account - * intermediate code transformations. - * \param current The variable name for the current used in the model - * \return The name for the current to be printed in C++ - */ - std::string breakpoint_current(std::string current) const; - - - /** - * For a given output block type, return statements for all read ion variables - * - * \param type The type of code block being generated - * \return A \c vector of strings representing the reading of ion variables - */ - std::vector ion_read_statements(BlockType type) const; - - - /** - * For a given output block type, return minimal statements for all read ion variables - * - * \param type The type of code block being generated - * \return A \c vector of strings representing the reading of ion variables - */ - std::vector ion_read_statements_optimized(BlockType type) const; - - - /** - * For a given output block type, return statements for writing back ion variables - * - * \param type The type of code block being generated - * \return A \c vector of strings representing the write-back of ion variables - */ - std::vector ion_write_statements(BlockType type); - - /** * Process a token in a verbatim block for possible variable renaming * \param token The verbatim token to be processed @@ -156,13 +120,6 @@ class CodegenCoreneuronCppVisitor: public CodegenCppVisitor { std::string process_verbatim_token(const std::string& token); - /** - * Check if a structure for ion variables is required - * \return \c true if a structure fot ion variables must be generated - */ - bool ion_variable_struct_required() const; - - /** * Check if variable is qualified as constant * \param name The name of variable @@ -331,7 +288,7 @@ class CodegenCoreneuronCppVisitor: public CodegenCppVisitor { /** * Check if ion variable copies should be avoided */ - bool optimize_ion_variable_copies() const; + bool optimize_ion_variable_copies() const override; /** @@ -552,22 +509,6 @@ class CodegenCoreneuronCppVisitor: public CodegenCppVisitor { std::string register_mechanism_arguments() const override; - /** - * Return ion variable name and corresponding ion read variable name - * \param name The ion variable name - * \return The ion read variable name - */ - static std::pair read_ion_variable_name(const std::string& name); - - - /** - * Return ion variable name and corresponding ion write variable name - * \param name The ion variable name - * \return The ion write variable name - */ - static std::pair write_ion_variable_name(const std::string& name); - - /** * Generate Function call statement for nrn_wrote_conc * \param ion_name The name of the ion variable @@ -577,21 +518,7 @@ class CodegenCoreneuronCppVisitor: public CodegenCppVisitor { */ std::string conc_write_statement(const std::string& ion_name, const std::string& concentration, - int index); - - /** - * Process shadow update statement - * - * If the statement requires reduction then add it to vector of reduction statement and return - * statement using shadow update - * - * \param statement The statement that might require shadow updates - * \param type The target backend code block type - * \return The generated target backend code - */ - std::string process_shadow_update_statement(const ShadowUseStatement& statement, - BlockType type); - + int index) override; /****************************************************************************************/ /* Code-specific printing routines for code generations */ @@ -605,6 +532,13 @@ class CodegenCoreneuronCppVisitor: public CodegenCppVisitor { void print_first_pointer_var_index_getter(); + /** + * Print the getter method for index position of first RANDOM variable + * + */ + void print_first_random_var_index_getter(); + + /** * Print the getter methods for float and integer variables count * @@ -657,14 +591,6 @@ class CodegenCoreneuronCppVisitor: public CodegenCppVisitor { /****************************************************************************************/ - /** - * Determine the updated name if the ion variable has been optimized - * \param name The ion variable name - * \return The updated name of the variable has been optimized (e.g. \c ena --> \c ion_ena) - */ - std::string update_if_ion_variable_name(const std::string& name) const; - - /** * Determine the name of a \c float variable given its symbol * diff --git a/src/codegen/codegen_cpp_visitor.cpp b/src/codegen/codegen_cpp_visitor.cpp index 4fbf7e6fe2..e22abc221c 100644 --- a/src/codegen/codegen_cpp_visitor.cpp +++ b/src/codegen/codegen_cpp_visitor.cpp @@ -10,6 +10,7 @@ #include "codegen/codegen_helper_visitor.hpp" #include "codegen/codegen_utils.hpp" #include "visitors/rename_visitor.hpp" +#include "visitors/visitor_utils.hpp" namespace nmodl { namespace codegen { @@ -25,6 +26,10 @@ using symtab::syminfo::NmodlType; /* Common helper routines accross codegen functions */ /****************************************************************************************/ +bool CodegenCppVisitor::ion_variable_struct_required() const { + return optimize_ion_variable_copies() && info.ion_has_write_variable(); +} + std::string CodegenCppVisitor::get_parameter_str(const ParamVector& params) { std::string str; @@ -145,7 +150,6 @@ bool CodegenCppVisitor::defined_method(const std::string& name) const { return function && function->has_any_property(properties); } - int CodegenCppVisitor::float_variables_size() const { return codegen_float_variables.size(); } @@ -209,6 +213,191 @@ bool CodegenCppVisitor::need_semicolon(const Statement& node) { return true; } +/** + * \details Depending upon the block type, we have to print read/write ion variables + * during code generation. Depending on block/procedure being printed, this + * method return statements as vector. As different code backends could have + * different variable names, we rely on backend-specific read_ion_variable_name + * and write_ion_variable_name method which will be overloaded. + */ +std::vector CodegenCppVisitor::ion_read_statements(BlockType type) const { + if (optimize_ion_variable_copies()) { + return ion_read_statements_optimized(type); + } + std::vector statements; + for (const auto& ion: info.ions) { + auto name = ion.name; + for (const auto& var: ion.reads) { + auto const iter = std::find(ion.implicit_reads.begin(), ion.implicit_reads.end(), var); + if (iter != ion.implicit_reads.end()) { + continue; + } + auto variable_names = read_ion_variable_name(var); + auto first = get_variable_name(variable_names.first); + auto second = get_variable_name(variable_names.second); + statements.push_back(fmt::format("{} = {};", first, second)); + } + for (const auto& var: ion.writes) { + if (ion.is_ionic_conc(var)) { + auto variables = read_ion_variable_name(var); + auto first = get_variable_name(variables.first); + auto second = get_variable_name(variables.second); + statements.push_back(fmt::format("{} = {};", first, second)); + } + } + } + return statements; +} + + +std::vector CodegenCppVisitor::ion_read_statements_optimized(BlockType type) const { + std::vector statements; + for (const auto& ion: info.ions) { + for (const auto& var: ion.writes) { + if (ion.is_ionic_conc(var)) { + auto variables = read_ion_variable_name(var); + auto first = "ionvar." + variables.first; + const auto& second = get_variable_name(variables.second); + statements.push_back(fmt::format("{} = {};", first, second)); + } + } + } + return statements; +} + +// NOLINTNEXTLINE(readability-function-cognitive-complexity) +std::vector CodegenCppVisitor::ion_write_statements(BlockType type) { + std::vector statements; + for (const auto& ion: info.ions) { + std::string concentration; + auto name = ion.name; + for (const auto& var: ion.writes) { + auto variable_names = write_ion_variable_name(var); + if (ion.is_ionic_current(var)) { + if (type == BlockType::Equation) { + auto current = breakpoint_current(var); + auto lhs = variable_names.first; + auto op = "+="; + auto rhs = get_variable_name(current); + if (info.point_process) { + auto area = get_variable_name(naming::NODE_AREA_VARIABLE); + rhs += fmt::format("*(1.e2/{})", area); + } + statements.push_back(ShadowUseStatement{lhs, op, rhs}); + } + } else { + if (!ion.is_rev_potential(var)) { + concentration = var; + } + auto lhs = variable_names.first; + auto op = "="; + auto rhs = get_variable_name(variable_names.second); + statements.push_back(ShadowUseStatement{lhs, op, rhs}); + } + } + + if (type == BlockType::Initial && !concentration.empty()) { + int index = 0; + if (ion.is_intra_cell_conc(concentration)) { + index = 1; + } else if (ion.is_extra_cell_conc(concentration)) { + index = 2; + } else { + /// \todo Unhandled case in neuron implementation + throw std::logic_error(fmt::format("codegen error for {} ion", ion.name)); + } + auto ion_type_name = fmt::format("{}_type", ion.name); + auto lhs = fmt::format("int {}", ion_type_name); + auto op = "="; + auto rhs = get_variable_name(ion_type_name); + statements.push_back(ShadowUseStatement{lhs, op, rhs}); + auto statement = conc_write_statement(ion.name, concentration, index); + statements.push_back(ShadowUseStatement{statement, "", ""}); + } + } + return statements; +} + +/** + * If mechanisms dependency level execution is enabled then certain updates + * like ionic current contributions needs to be atomically updated. In this + * case we first update current mechanism's shadow vector and then add statement + * to queue that will be used in reduction queue. + */ +std::string CodegenCppVisitor::process_shadow_update_statement(const ShadowUseStatement& statement, + BlockType /* type */) { + // when there is no operator or rhs then that statement doesn't need shadow update + if (statement.op.empty() && statement.rhs.empty()) { + auto text = statement.lhs + ";"; + return text; + } + + // return regular statement + auto lhs = get_variable_name(statement.lhs); + auto text = fmt::format("{} {} {};", lhs, statement.op, statement.rhs); + return text; +} + + +/** + * \details Current variable used in breakpoint block could be local variable. + * In this case, neuron has already renamed the variable name by prepending + * "_l". In our implementation, the variable could have been renamed by + * one of the pass. And hence, we search all local variables and check if + * the variable is renamed. Note that we have to look into the symbol table + * of statement block and not breakpoint. + */ +std::string CodegenCppVisitor::breakpoint_current(std::string current) const { + auto breakpoint = info.breakpoint_node; + if (breakpoint == nullptr) { + return current; + } + auto symtab = breakpoint->get_statement_block()->get_symbol_table(); + auto variables = symtab->get_variables_with_properties(NmodlType::local_var); + for (const auto& var: variables) { + auto renamed_name = var->get_name(); + auto original_name = var->get_original_name(); + if (current == original_name) { + current = renamed_name; + break; + } + } + return current; +} + + +/****************************************************************************************/ +/* Routines for returning variable name */ +/****************************************************************************************/ + +std::string CodegenCppVisitor::update_if_ion_variable_name(const std::string& name) const { + std::string result(name); + if (ion_variable_struct_required()) { + if (info.is_ion_read_variable(name)) { + result = naming::ION_VARNAME_PREFIX + name; + } + if (info.is_ion_write_variable(name)) { + result = "ionvar." + name; + } + if (info.is_current(name)) { + result = "ionvar." + name; + } + } + return result; +} + + +std::pair CodegenCppVisitor::read_ion_variable_name( + const std::string& name) { + return {name, naming::ION_VARNAME_PREFIX + name}; +} + + +std::pair CodegenCppVisitor::write_ion_variable_name( + const std::string& name) { + return {naming::ION_VARNAME_PREFIX + name, name}; +} + /****************************************************************************************/ /* Main printing routines for code generation */ @@ -235,7 +424,20 @@ void CodegenCppVisitor::print_global_var_struct_decl() { void CodegenCppVisitor::print_function_call(const FunctionCall& node) { const auto& name = node.get_node_name(); - auto function_name = name; + + // return C++ function name for RANDOM construct function + // e.g. nrnran123_negexp for random_negexp + auto get_renamed_random_function = + [&](const std::string& name) -> std::pair { + if (codegen::naming::RANDOM_FUNCTIONS_MAPPING.count(name)) { + return {codegen::naming::RANDOM_FUNCTIONS_MAPPING[name], true}; + } + return {name, false}; + }; + std::string function_name; + bool is_random_function; + std::tie(function_name, is_random_function) = get_renamed_random_function(name); + if (defined_method(name)) { function_name = method_name(name); } @@ -265,6 +467,12 @@ void CodegenCppVisitor::print_function_call(const FunctionCall& node) { } } + // first argument to random functions need to be type casted + // from void* to nrnran123_State*. + if (is_random_function && !arguments.empty()) { + printer->add_text("(nrnran123_State*)"); + } + print_vector_elements(arguments, ", "); printer->add_text(')'); } @@ -684,6 +892,15 @@ void CodegenCppVisitor::update_index_semantics() { index += size; } + for (auto& var: info.random_variables) { + if (info.first_random_var_index == -1) { + info.first_random_var_index = index; + } + int size = var->get_length(); + info.semantics.emplace_back(index, naming::RANDOM_SEMANTIC, size); + index += size; + } + if (info.diam_used) { info.semantics.emplace_back(index++, naming::DIAM_VARIABLE, 1); } @@ -863,6 +1080,12 @@ std::vector CodegenCppVisitor::get_int_variables() { } } + for (const auto& var: info.random_variables) { + auto name = var->get_name(); + variables.emplace_back(make_symbol(name), true); + variables.back().symbol->add_properties(NmodlType::random_var); + } + if (info.diam_used) { variables.emplace_back(make_symbol(naming::DIAM_VARIABLE)); } diff --git a/src/codegen/codegen_cpp_visitor.hpp b/src/codegen/codegen_cpp_visitor.hpp index 549519d396..b3d5511da3 100644 --- a/src/codegen/codegen_cpp_visitor.hpp +++ b/src/codegen/codegen_cpp_visitor.hpp @@ -406,6 +406,12 @@ class CodegenCppVisitor: public visitor::ConstAstVisitor { /* Common helper routines accross codegen functions */ /****************************************************************************************/ + /** + * Check if a structure for ion variables is required + * \return \c true if a structure fot ion variables must be generated + */ + bool ion_variable_struct_required() const; + /** * Generate the string representing the procedure parameter declaration @@ -590,6 +596,56 @@ class CodegenCppVisitor: public visitor::ConstAstVisitor { std::vector get_int_variables(); + /** + * For a given output block type, return statements for all read ion variables + * + * \param type The type of code block being generated + * \return A \c vector of strings representing the reading of ion variables + */ + std::vector ion_read_statements(BlockType type) const; + + + /** + * For a given output block type, return minimal statements for all read ion variables + * + * \param type The type of code block being generated + * \return A \c vector of strings representing the reading of ion variables + */ + std::vector ion_read_statements_optimized(BlockType type) const; + + + /** + * For a given output block type, return statements for writing back ion variables + * + * \param type The type of code block being generated + * \return A \c vector of strings representing the write-back of ion variables + */ + std::vector ion_write_statements(BlockType type); + + + /** + * Process shadow update statement + * + * If the statement requires reduction then add it to vector of reduction statement and return + * statement using shadow update + * + * \param statement The statement that might require shadow updates + * \param type The target backend code block type + * \return The generated target backend code + */ + std::string process_shadow_update_statement(const ShadowUseStatement& statement, + BlockType type); + + + /** + * Determine the variable name for the "current" used in breakpoint block taking into account + * intermediate code transformations. + * \param current The variable name for the current used in the model + * \return The name for the current to be printed in C++ + */ + std::string breakpoint_current(std::string current) const; + + /****************************************************************************************/ /* Backend specific routines */ /****************************************************************************************/ @@ -609,6 +665,10 @@ class CodegenCppVisitor: public visitor::ConstAstVisitor { */ virtual void print_global_var_struct_decl(); + /** + * Check if ion variable copies should be avoided + */ + virtual bool optimize_ion_variable_copies() const = 0; /****************************************************************************************/ /* Printing routines for code generation */ @@ -808,6 +868,16 @@ class CodegenCppVisitor: public visitor::ConstAstVisitor { return std::make_shared(name, ModToken()); } + /** + * Generate Function call statement for nrn_wrote_conc + * \param ion_name The name of the ion variable + * \param concentration The name of the concentration variable + * \param index + * \return The string representing the function call + */ + virtual std::string conc_write_statement(const std::string& ion_name, + const std::string& concentration, + int index) = 0; /****************************************************************************************/ /* Code-specific printing routines for code generations */ @@ -830,6 +900,13 @@ class CodegenCppVisitor: public visitor::ConstAstVisitor { /* Routines for returning variable name */ /****************************************************************************************/ + /** + * Determine the updated name if the ion variable has been optimized + * \param name The ion variable name + * \return The updated name of the variable has been optimized (e.g. \c ena --> \c ion_ena) + */ + std::string update_if_ion_variable_name(const std::string& name) const; + /** * Determine the name of a \c float variable given its symbol @@ -887,6 +964,30 @@ class CodegenCppVisitor: public visitor::ConstAstVisitor { bool use_instance = true) const = 0; + /** + * Return ion variable name and corresponding ion read variable name. + * + * Example: + * {"ena", "ion_ena"} = read_ion_variable_name("ena"); + * + * \param name The ion variable name + * \return The ion read variable name + */ + static std::pair read_ion_variable_name(const std::string& name); + + + /** + * Return ion variable name and corresponding ion write variable name + * + * Example: + * {"ion_ena", "ena"} = write_ion_variable_name("ena"); + * + * \param name The ion variable name + * \return The ion write variable name + */ + static std::pair write_ion_variable_name(const std::string& name); + + /****************************************************************************************/ /* Main printing routines for code generation */ /****************************************************************************************/ diff --git a/src/codegen/codegen_helper_visitor.cpp b/src/codegen/codegen_helper_visitor.cpp index fa9e4438b6..72dc2508f2 100644 --- a/src/codegen/codegen_helper_visitor.cpp +++ b/src/codegen/codegen_helper_visitor.cpp @@ -269,6 +269,12 @@ void CodegenHelperVisitor::find_non_range_variables() { // clang-format on info.pointer_variables = psymtab->get_variables_with_properties(properties); + /// find RANDOM variables + // clang-format off + properties = NmodlType::random_var; + // clang-format on + info.random_variables = psymtab->get_variables_with_properties(properties); + // find special variables like diam, area // clang-format off properties = NmodlType::assigned_definition diff --git a/src/codegen/codegen_info.hpp b/src/codegen/codegen_info.hpp index c8e25e2e39..2f626e031b 100644 --- a/src/codegen/codegen_info.hpp +++ b/src/codegen/codegen_info.hpp @@ -12,6 +12,7 @@ * \brief Various types to store code generation specific information */ +#include #include #include #include @@ -20,6 +21,7 @@ #include "ast/ast.hpp" #include "symtab/symbol_table.hpp" + namespace nmodl { namespace codegen { @@ -109,12 +111,55 @@ struct Ion { return is_intra_cell_conc(text) || is_extra_cell_conc(text); } + /// Is the variable name `text` related to this ion? + /// + /// Example: For sodium this is true for any of `"ena"`, `"ina"`, `"nai"` + /// and `"nao"`; but not `ion_ina`, etc. + bool is_ionic_variable(const std::string& text) const { + return is_ionic_conc(text) || is_ionic_current(text) || is_rev_potential(text); + } + + bool is_current_derivative(const std::string& text) const { + return text == ("di" + name + "dv"); + } + /// for a given ion, return different variable names/properties /// like internal/external concentration, reversial potential, /// ionic current etc. static std::vector get_possible_variables(const std::string& ion_name) { return {"i" + ion_name, ion_name + "i", ion_name + "o", "e" + ion_name}; } + + /// Variable index in the ion mechanism. + /// + /// For sodium (na), the `var_name` must be one of `ina`, `ena`, `nai`, + /// `nao` or `dinadv`. Replace `na` with the analogous for other ions. + /// + /// In NRN the order is: + /// 0: ena + /// 1: nai + /// 2: nao + /// 3: ina + /// 4: dinadv + int variable_index(const std::string& var_name) const { + if (is_rev_potential(var_name)) { + return 0; + } + if (is_intra_cell_conc(var_name)) { + return 1; + } + if (is_extra_cell_conc(var_name)) { + return 2; + } + if (is_ionic_current(var_name)) { + return 3; + } + if (is_current_derivative(var_name)) { + return 4; + } + + throw std::runtime_error(fmt::format("Invalid `var_name == {}`.", var_name)); + } }; @@ -327,9 +372,15 @@ struct CodegenInfo { /// pointer or bbcore pointer variables std::vector pointer_variables; + /// RANDOM variables + std::vector random_variables; + /// index/offset for first pointer variable if exist int first_pointer_var_index = -1; + /// index/offset for first RANDOM variable if exist + int first_random_var_index = -1; + /// tqitem index in integer variables /// note that if tqitem doesn't exist then the default value should be 0 int tqitem_index = 0; diff --git a/src/codegen/codegen_naming.hpp b/src/codegen/codegen_naming.hpp index e8240b7df2..78f2b25ead 100644 --- a/src/codegen/codegen_naming.hpp +++ b/src/codegen/codegen_naming.hpp @@ -7,8 +7,8 @@ #pragma once -#include #include +#include namespace nmodl { @@ -119,6 +119,9 @@ static constexpr char POINTER_SEMANTIC[] = "pointer"; /// semantic type for core pointer variable static constexpr char CORE_POINTER_SEMANTIC[] = "bbcorepointer"; +/// semantic type for RANDOM variable +static constexpr char RANDOM_SEMANTIC[] = "random"; + /// semantic type for net send call static constexpr char NET_SEND_SEMANTIC[] = "netsend"; @@ -174,7 +177,7 @@ static constexpr char NRN_POINTERINDEX[] = "hoc_nrnpointerindex"; /// commonly used variables in verbatim block and how they /// should be mapped to new code generation backends // clang-format off - const std::map VERBATIM_VARIABLES_MAPPING{ + static const std::unordered_map VERBATIM_VARIABLES_MAPPING{ {"_nt", "nt"}, {"_p", "data"}, {"_ppvar", "indexes"}, @@ -183,8 +186,18 @@ static constexpr char NRN_POINTERINDEX[] = "hoc_nrnpointerindex"; {"_cntml_padded", "pnodecount"}, {"_cntml", "nodecount"}, {"_tqitem", "tqitem"}}; -// clang-format on + // Functions available in NMODL with RANDOM construct and their mapping to + // C++ functions for Random123 interface. + static std::unordered_map RANDOM_FUNCTIONS_MAPPING{ + {"random_setseq", "nrnran123_setseq"}, + {"random_setids", "nrnran123_setids"}, + {"random_uniform", "nrnran123_uniform"}, + {"random_negexp", "nrnran123_negexp"}, + {"random_normal", "nrnran123_normal"}, + {"random_ipick", "nrnran123_ipick"}, + {"random_dpick", "nrnran123_dblpick"}}; +// clang-format on } // namespace naming } // namespace codegen } // namespace nmodl diff --git a/src/codegen/codegen_neuron_cpp_visitor.cpp b/src/codegen/codegen_neuron_cpp_visitor.cpp index 4f108e8414..e734550b64 100644 --- a/src/codegen/codegen_neuron_cpp_visitor.cpp +++ b/src/codegen/codegen_neuron_cpp_visitor.cpp @@ -83,6 +83,13 @@ void CodegenNeuronCppVisitor::print_atomic_reduction_pragma() { return; } +bool CodegenNeuronCppVisitor::optimize_ion_variable_copies() const { + if (optimize_ionvar_copies) { + throw std::runtime_error("Not implemented."); + } + return false; +} + /****************************************************************************************/ /* Printing routines for code generation */ @@ -279,6 +286,13 @@ void CodegenNeuronCppVisitor::print_namespace_stop() { } +std::string CodegenNeuronCppVisitor::conc_write_statement(const std::string& ion_name, + const std::string& concentration, + int index) { + // throw std::runtime_error("Not implemented."); + return ""; +} + /****************************************************************************************/ /* Routines for returning variable name */ /****************************************************************************************/ @@ -309,7 +323,30 @@ std::string CodegenNeuronCppVisitor::float_variable_name(const SymbolType& symbo std::string CodegenNeuronCppVisitor::int_variable_name(const IndexVariableInfo& symbol, const std::string& name, bool use_instance) const { - return name; + auto position = position_of_int_var(name); + if (symbol.is_index) { + if (use_instance) { + throw std::runtime_error("Not implemented. [wiejo]"); + // return fmt::format("inst->{}[{}]", name, position); + } + throw std::runtime_error("Not implemented. [ncuwi]"); + // return fmt::format("indexes[{}]", position); + } + if (symbol.is_integer) { + if (use_instance) { + throw std::runtime_error("Not implemented. [cnuoe]"); + // return fmt::format("inst->{}[{}*pnodecount+id]", name, position); + } + throw std::runtime_error("Not implemented. [u32ow]"); + // return fmt::format("indexes[{}*pnodecount+id]", position); + } + if (use_instance) { + return fmt::format("(*inst.{}[id])", name); + } + + throw std::runtime_error("Not implemented. [nvueir]"); + // auto data = symbol.is_vdata ? "_vdata" : "_data"; + // return fmt::format("nt->{}[indexes[{}*pnodecount + id]]", data, position); } @@ -325,8 +362,7 @@ std::string CodegenNeuronCppVisitor::global_variable_name(const SymbolType& symb std::string CodegenNeuronCppVisitor::get_variable_name(const std::string& name, bool use_instance) const { - // const std::string& varname = update_if_ion_variable_name(name); - const std::string& varname = name; + const std::string& varname = update_if_ion_variable_name(name); auto symbol_comparator = [&varname](const SymbolType& sym) { return varname == sym->get_name(); @@ -659,43 +695,39 @@ void CodegenNeuronCppVisitor::print_make_instance() const { info.mod_suffix); printer->fmt_push_block("return {}", instance_struct()); + std::vector make_instance_args; + const auto codegen_float_variables_size = codegen_float_variables.size(); - const auto codegen_int_variables_size = codegen_int_variables.size(); for (int i = 0; i < codegen_float_variables_size; ++i) { const auto& float_var = codegen_float_variables[i]; - /// print comma only if there are codegen_int_variables needed to be printer afterwards - const auto print_comma = - i < codegen_float_variables_size - 1 || - (codegen_int_variables_size > 0 && - codegen_int_variables[0].symbol->get_name() != naming::POINT_PROCESS_VARIABLE) || - (codegen_int_variables_size > 1 && - codegen_int_variables[0].symbol->get_name() == naming::POINT_PROCESS_VARIABLE); if (float_var->is_array()) { - printer->fmt_line("_ml.template data_array<{0}, {1}>(0){2} /* {3} */", - i, - float_var->get_length(), - print_comma ? "," : "", - float_var->get_name()); + make_instance_args.push_back( + fmt::format("_ml.template data_array_ptr<{}, {}>()", i, float_var->get_length())); } else { - printer->fmt_line("&_ml.template fpfield<{0}>(0){1} /* {2} */", - i, - print_comma ? "," : "", - float_var->get_name()); + make_instance_args.push_back(fmt::format("_ml.template fpfield_ptr<{}>()", i)); } } - for (int i = 0; i < codegen_int_variables_size; ++i) { - const auto& int_var_name = codegen_int_variables[i].symbol->get_name(); - if (int_var_name == naming::POINT_PROCESS_VARIABLE) { - continue; + + const auto codegen_int_variables_size = codegen_int_variables.size(); + for (size_t i = 0; i < codegen_int_variables_size; ++i) { + const auto& var = codegen_int_variables[i]; + auto name = var.symbol->get_name(); + auto const variable = [&var, i]() -> std::string { + if (var.is_index || var.is_integer) { + return ""; + } else if (var.is_vdata) { + return ""; + } else { + return fmt::format("_ml.template dptr_field_ptr<{}>()", i); + } + }(); + if (variable != "") { + make_instance_args.push_back(variable); } - const auto print_comma = i < codegen_int_variables_size - 1 && - codegen_int_variables[i + 1].symbol->get_name() != - naming::POINT_PROCESS_VARIABLE; - printer->fmt_line("_ml.template dptr_field<{0}>(0){1} /* {2} */", - i, - print_comma ? "," : "", - int_var_name); } + + printer->add_multi_line(fmt::format("{}", fmt::join(make_instance_args, ",\n"))); + printer->pop_block(";"); printer->pop_block(); } @@ -763,66 +795,67 @@ void CodegenNeuronCppVisitor::print_mechanism_register() { /// TODO: More things to add here printer->add_line("_nrn_mechanism_register_data_fields(mech_type,"); printer->increase_indent(); + const auto codegen_float_variables_size = codegen_float_variables.size(); - const auto codegen_int_variables_size = codegen_int_variables.size(); + std::vector mech_register_args; + for (int i = 0; i < codegen_float_variables_size; ++i) { const auto& float_var = codegen_float_variables[i]; - const auto print_comma = i < codegen_float_variables_size - 1 || - codegen_int_variables_size > 0 || info.emit_cvode; if (float_var->is_array()) { - printer->fmt_line( - "_nrn_mechanism_field{{\"{0}\", {1}}}{2} /* float var index {3} */", - float_var->get_name(), - float_var->get_length(), - print_comma ? "," : "", - i); + mech_register_args.push_back( + fmt::format("_nrn_mechanism_field{{\"{}\", {}}} /* {} */", + float_var->get_name(), + float_var->get_length(), + i)); } else { - printer->fmt_line( - "_nrn_mechanism_field{{\"{0}\"}}{1} /* float var index {2} */", - float_var->get_name(), - print_comma ? "," : "", - i); + mech_register_args.push_back(fmt::format( + "_nrn_mechanism_field{{\"{}\"}} /* {} */", float_var->get_name(), i)); } } + + const auto codegen_int_variables_size = codegen_int_variables.size(); for (int i = 0; i < codegen_int_variables_size; ++i) { const auto& int_var = codegen_int_variables[i]; - const auto& int_var_name = int_var.symbol->get_name(); - const auto print_comma = i < codegen_int_variables_size - 1 || info.emit_cvode; - auto nrn_name = int_var_name; - if (nrn_name == naming::NODE_AREA_VARIABLE) { - nrn_name = naming::AREA_VARIABLE; - } else if (nrn_name == naming::POINT_PROCESS_VARIABLE) { - nrn_name = "pntproc"; + const auto& name = int_var.symbol->get_name(); + if (i != info.semantics[i].index) { + throw std::runtime_error("Broken logic."); } - printer->fmt_line( - "_nrn_mechanism_field<{0}>{{\"{1}\", \"{2}\"}}{3} /* int var index {4} */", - int_var_name == naming::POINT_PROCESS_VARIABLE ? "Point_process*" : "double*", - int_var_name, - nrn_name, - print_comma ? "," : "", - i); + + auto type = (name == naming::POINT_PROCESS_VARIABLE) ? "Point_process*" : "double*"; + mech_register_args.push_back( + fmt::format("_nrn_mechanism_field<{}>{{\"{}\", \"{}\"}} /* {} */", + type, + name, + info.semantics[i].name, + i)); } if (info.emit_cvode) { - printer->add_line("_nrn_mechanism_field{\"_cvode_ieq\", \"cvodeieq\"} /* 0 */"); + mech_register_args.push_back( + "_nrn_mechanism_field{\"_cvode_ieq\", \"cvodeieq\"} /* 0 */"); } + + printer->add_multi_line(fmt::format("{}", fmt::join(mech_register_args, ",\n"))); + printer->decrease_indent(); printer->add_line(");"); printer->add_newline(); printer->fmt_line("hoc_register_prop_size(mech_type, {}, {});", - float_variables_size(), - int_variables_size()); - for (auto i = 0; i < codegen_int_variables.size(); ++i) { + codegen_float_variables_size, + codegen_int_variables_size); + + for (int i = 0; i < codegen_int_variables_size; ++i) { const auto& int_var = codegen_int_variables[i]; - const auto& int_var_name = int_var.symbol->get_name(); - auto nrn_name = int_var_name; - if (nrn_name == naming::NODE_AREA_VARIABLE) { - nrn_name = naming::AREA_VARIABLE; - } else if (nrn_name == naming::POINT_PROCESS_VARIABLE) { - nrn_name = "pntproc"; + const auto& name = int_var.symbol->get_name(); + if (i != info.semantics[i].index) { + throw std::runtime_error("Broken logic."); } - printer->fmt_line("hoc_register_dparam_semantics(mech_type, {}, \"{}\");", i, nrn_name); + + printer->fmt_line("hoc_register_dparam_semantics(mech_type, {}, \"{}\");", + i, + info.semantics[i].name); } + printer->pop_block(); } @@ -852,11 +885,11 @@ void CodegenNeuronCppVisitor::print_mechanism_range_var_structure(bool print_ini continue; } else if (var.is_index || var.is_integer) { auto qualifier = var.is_constant ? "const " : ""; - printer->fmt_line("{}{}* {}{};", qualifier, int_type, name, value_initialize); + printer->fmt_line("{}{}* const* {}{};", qualifier, int_type, name, value_initialize); } else { auto qualifier = var.is_constant ? "const " : ""; auto type = var.is_vdata ? "void*" : default_float_data_type(); - printer->fmt_line("{}{}* {}{};", qualifier, type, name, value_initialize); + printer->fmt_line("{}{}* const* {}{};", qualifier, type, name, value_initialize); } } @@ -870,11 +903,24 @@ void CodegenNeuronCppVisitor::print_mechanism_range_var_structure(bool print_ini void CodegenNeuronCppVisitor::print_initial_block(const InitialBlock* node) { + // read ion statements + auto read_statements = ion_read_statements(BlockType::Initial); + for (auto& statement: read_statements) { + printer->add_line(statement); + } + // initial block if (node != nullptr) { const auto& block = node->get_statement_block(); print_statement_block(*block, false, false); } + + // write ion statements + auto write_statements = ion_write_statements(BlockType::Initial); + for (auto& statement: write_statements) { + auto text = process_shadow_update_statement(statement, BlockType::Initial); + printer->add_line(text); + } } @@ -930,12 +976,14 @@ void CodegenNeuronCppVisitor::print_nrn_destructor() { /// TODO: Print the equivalent of `nrn_alloc_` void CodegenNeuronCppVisitor::print_nrn_alloc() { printer->add_newline(2); + auto method = method_name(naming::NRN_ALLOC_METHOD); printer->fmt_push_block("static void {}(Prop* _prop)", method); printer->add_multi_line(R"CODE( Prop *prop_ion{}; Datum *_ppvar{}; )CODE"); + if (info.point_process) { printer->push_block("if (nrn_point_prop_)"); printer->add_multi_line(R"CODE( @@ -954,7 +1002,7 @@ void CodegenNeuronCppVisitor::print_nrn_alloc() { auto* const _ml = &_ml_real; size_t const _iml{}; )CODE"); - printer->fmt_line("assert(_nrn_mechanism_get_num_vars(_prop) == {});", float_variables_size()); + printer->fmt_line("assert(_nrn_mechanism_get_num_vars(_prop) == {});", codegen_float_variables_size()); if (float_variables_size()) { printer->add_line("/*initialize range parameters*/"); for (const auto& var: info.range_parameter_vars) { @@ -972,8 +1020,6 @@ void CodegenNeuronCppVisitor::print_nrn_alloc() { printer->pop_block(); } - printer->fmt_line("assert(_nrn_mechanism_get_num_vars(_prop) == {});", float_variables_size()); - if (info.semantic_variable_count) { printer->add_line("_nrn_mechanism_access_dparam(_prop) = _ppvar;"); } @@ -986,7 +1032,38 @@ void CodegenNeuronCppVisitor::print_nrn_alloc() { throw std::runtime_error("Area allocation not implemented."); } - /// TODO: IONs setup and CONSTRUCTOR call + const auto codegen_int_variables_size = codegen_int_variables.size(); + + // TODO number of datum is the number of integer vars. + printer->fmt_line("Datum *_ppvar = nrn_prop_datum_alloc(mech_type, {}, _prop);", + codegen_int_variables_size); + printer->fmt_line("_nrn_mechanism_access_dparam(_prop) = _ppvar;"); + + for (const auto& ion: info.ions) { + printer->fmt_line("Symbol * {}_sym = hoc_lookup(\"{}_ion\");", ion.name, ion.name); + printer->fmt_line("Prop * {}_prop = need_memb({}_sym);", ion.name, ion.name); + + for (size_t i = 0; i < codegen_int_variables_size; ++i) { + const auto& var = codegen_int_variables[i]; + + // if(var.symbol->has_any_property(NmodlType::useion)) { + const std::string& var_name = var.symbol->get_name(); + if (var_name.rfind("ion_", 0) != 0) { + continue; + } + + std::string ion_var_name = std::string(var_name.begin() + 4, var_name.end()); + if (ion.is_ionic_variable(ion_var_name)) { + printer->fmt_line("_ppvar[{}] = _nrn_mechanism_get_param_handle({}_prop, {});", + i, + ion.name, + ion.variable_index(ion_var_name)); + } + //} + } + } + + /// TODO: CONSTRUCTOR call printer->pop_block(); } @@ -1008,6 +1085,19 @@ void CodegenNeuronCppVisitor::print_nrn_state() { printer->push_block("for (int id = 0; id < nodecount; id++)"); + /** + * \todo Eigen solver node also emits IonCurVar variable in the functor + * but that shouldn't update ions in derivative block + */ + if (ion_variable_struct_required()) { + throw std::runtime_error("Not implemented."); + } + + auto read_statements = ion_read_statements(BlockType::State); + for (auto& statement: read_statements) { + printer->add_line(statement); + } + if (info.nrn_state_block) { info.nrn_state_block->visit_children(*this); } @@ -1017,6 +1107,12 @@ void CodegenNeuronCppVisitor::print_nrn_state() { print_statement_block(*block, false, false); } + const auto& write_statements = ion_write_statements(BlockType::State); + for (auto& statement: write_statements) { + const auto& text = process_shadow_update_statement(statement, BlockType::State); + printer->add_line(text); + } + printer->pop_block(); printer->pop_block(); } diff --git a/src/codegen/codegen_neuron_cpp_visitor.hpp b/src/codegen/codegen_neuron_cpp_visitor.hpp index 2eaab92a3e..c15c36d68e 100644 --- a/src/codegen/codegen_neuron_cpp_visitor.hpp +++ b/src/codegen/codegen_neuron_cpp_visitor.hpp @@ -116,6 +116,11 @@ class CodegenNeuronCppVisitor: public CodegenCppVisitor { virtual void print_atomic_reduction_pragma() override; + /** + * Check if ion variable copies should be avoided + */ + bool optimize_ion_variable_copies() const override; + /****************************************************************************************/ /* Printing routines for code generation */ /****************************************************************************************/ @@ -254,6 +259,9 @@ class CodegenNeuronCppVisitor: public CodegenCppVisitor { */ std::string register_mechanism_arguments() const override; + std::string conc_write_statement(const std::string& ion_name, + const std::string& concentration, + int index) override; /****************************************************************************************/ /* Code-specific printing routines for code generations */ diff --git a/src/language/code_generator.cmake b/src/language/code_generator.cmake index 9d9705edc1..90705ac3df 100644 --- a/src/language/code_generator.cmake +++ b/src/language/code_generator.cmake @@ -139,6 +139,8 @@ set(AST_GENERATED_SOURCES ${PROJECT_BINARY_DIR}/src/ast/procedure_block.hpp ${PROJECT_BINARY_DIR}/src/ast/program.hpp ${PROJECT_BINARY_DIR}/src/ast/protect_statement.hpp + ${PROJECT_BINARY_DIR}/src/ast/random_var.hpp + ${PROJECT_BINARY_DIR}/src/ast/random_var_list.hpp ${PROJECT_BINARY_DIR}/src/ast/range.hpp ${PROJECT_BINARY_DIR}/src/ast/range_var.hpp ${PROJECT_BINARY_DIR}/src/ast/react_var_name.hpp diff --git a/src/language/nmodl.yaml b/src/language/nmodl.yaml index e2686fd9f8..60919df1b1 100644 --- a/src/language/nmodl.yaml +++ b/src/language/nmodl.yaml @@ -394,6 +394,14 @@ type: Name node_name: true + - RandomVar: + brief: "Single variable of type RANDOM. pointer to a nrnran123_State" + members: + - name: + brief: "Name of the a RANDOM variable" + type: Name + node_name: true + - BbcorePointerVar: members: - name: @@ -415,7 +423,7 @@ - Block: brief: "Base class for all block scoped nodes" description: | - NMODL has different local and global block scoped nodes like + NMODL has different local and globals block scoped nodes like ast::NeuronBlock, ast::ParamBlock, ast::IfStatement etc. Ast::Block is base class and defines common interface for these nodes. @@ -1731,6 +1739,25 @@ separator: ", " brief: "Represents GLOBAL statement in NMODL" + - RandomVarList: + brief: "Represents RANDOM statement in NMODL" + nmodl: "RANDOM " + members: + - variables: + brief: "Vector of random variables" + type: RandomVar + vector: true + separator: ", " + description: | + Here is an example of RANDOM statement + + \code{.mod} + NEURON { + THREADSAFE + POINT_PROCESS NetStim + RANDOM ranvar + \endcode + - Pointer: nmodl: "POINTER " members: @@ -1738,6 +1765,7 @@ brief: "Vector of pointer variables" type: PointerVar vector: true + add: true separator: ", " brief: "Represents POINTER statement in NMODL" diff --git a/src/language/node_info.py b/src/language/node_info.py index 466895b3f9..ff6d72faf5 100644 --- a/src/language/node_info.py +++ b/src/language/node_info.py @@ -66,6 +66,7 @@ "GlobalVar", "PointerVar", "BbcorePointerVar", + "RandomVar", "ExternVar", "PrimeName", "ConstantVar", diff --git a/src/lexer/nmodl_utils.cpp b/src/lexer/nmodl_utils.cpp index 6ba9bffd09..7b6941cc06 100644 --- a/src/lexer/nmodl_utils.cpp +++ b/src/lexer/nmodl_utils.cpp @@ -233,6 +233,8 @@ SymbolType token_symbol(const std::string& key, PositionType& pos, TokenType typ return Parser::make_PROCEDURE(token, pos); case Token::PROTECT: return Parser::make_PROTECT(token, pos); + case Token::RANDOM: + return Parser::make_RANDOM(token, pos); case Token::RANGE: return Parser::make_RANGE(token, pos); case Token::READ: diff --git a/src/lexer/token_mapping.cpp b/src/lexer/token_mapping.cpp index c3c0fb116f..734d965837 100644 --- a/src/lexer/token_mapping.cpp +++ b/src/lexer/token_mapping.cpp @@ -97,6 +97,7 @@ const static std::map keywords = { {"CHARGE", Token::VALENCE}, {"GLOBAL", Token::GLOBAL}, {"POINTER", Token::POINTER}, + {"RANDOM", Token::RANDOM}, {"BBCOREPOINTER", Token::BBCOREPOINTER}, {"EXTERNAL", Token::EXTERNAL}, {"INCLUDE", Token::INCLUDE1}, diff --git a/src/main.cpp b/src/main.cpp index cbd1e62c37..eed968bef1 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -25,6 +25,7 @@ #include "visitors/after_cvode_to_cnexp_visitor.hpp" #include "visitors/ast_visitor.hpp" #include "visitors/constant_folder_visitor.hpp" +#include "visitors/function_callpath_visitor.hpp" #include "visitors/global_var_visitor.hpp" #include "visitors/implicit_argument_visitor.hpp" #include "visitors/indexedname_visitor.hpp" @@ -528,6 +529,12 @@ int main(int argc, const char* argv[]) { SymtabVisitor(update_symtab).visit_program(*ast); } + { + FunctionCallpathVisitor{}.visit_program(*ast); + ast_to_nmodl(*ast, filepath("FunctionCallpathVisitor")); + SymtabVisitor(update_symtab).visit_program(*ast); + } + { if (coreneuron_code && oacc_backend) { logger->info("Running OpenACC backend code generator for CoreNEURON"); diff --git a/src/parser/nmodl.yy b/src/parser/nmodl.yy index 8d9b7bd5fb..c07bd961a3 100644 --- a/src/parser/nmodl.yy +++ b/src/parser/nmodl.yy @@ -127,6 +127,7 @@ %token PROCEDURE %token PROTECT %token RANGE +%token RANDOM %token REACT1 %token REACTION %token READ @@ -293,6 +294,7 @@ %type global_var_list %type pointer_var_list %type bbcore_pointer_var_list +%type random_var_list %type external_var_list %type valence %type initial_statement @@ -1987,6 +1989,11 @@ neuron_statement : $1.emplace_back(new ast::BbcorePointer($3)); $$ = $1; } + | neuron_statement RANDOM random_var_list + { + $1.emplace_back(new ast::RandomVarList($3)); + $$ = $1; + } | neuron_statement EXTERNAL external_var_list { $1.emplace_back(new ast::External($3)); @@ -2195,6 +2202,23 @@ bbcore_pointer_var_list : NAME_PTR ; +random_var_list : NAME_PTR + { + $$ = ast::RandomVarVector(); + $$.emplace_back(new ast::RandomVar($1)); + } + | random_var_list "," NAME_PTR + { + $1.emplace_back(new ast::RandomVar($3)); + $$ = $1; + } + | error + { + error(scanner.loc, "random_var_list"); + } + ; + + external_var_list : NAME_PTR { $$ = ast::ExternVarVector(); diff --git a/src/symtab/symbol_properties.cpp b/src/symtab/symbol_properties.cpp index 8782332ee2..c65a8bdd19 100644 --- a/src/symtab/symbol_properties.cpp +++ b/src/symtab/symbol_properties.cpp @@ -6,8 +6,10 @@ */ #include +#include #include +#include "codegen/codegen_naming.hpp" #include "symtab/symbol_properties.hpp" #include "utils/string_utils.hpp" @@ -159,6 +161,14 @@ std::vector to_string_vector(const NmodlType& obj) { properties.emplace_back("codegen_var"); } + if (has_property(obj, NmodlType::use_range_ptr_var)) { + properties.emplace_back("use_range_ptr_var"); + } + + if (has_property(obj, NmodlType::random_var)) { + properties.emplace_back("random_var"); + } + return properties; } diff --git a/src/symtab/symbol_properties.hpp b/src/symtab/symbol_properties.hpp index dad35d8b60..8bf6359194 100644 --- a/src/symtab/symbol_properties.hpp +++ b/src/symtab/symbol_properties.hpp @@ -55,25 +55,25 @@ enum class Status : enum_type { empty = 0, /// converted to local - localized = 1L << 0, + localized = 1LL << 0, /// converted to global - globalized = 1L << 1, + globalized = 1LL << 1, /// inlined - inlined = 1L << 2, + inlined = 1LL << 2, /// renamed - renamed = 1L << 3, + renamed = 1LL << 3, /// created - created = 1L << 4, + created = 1LL << 4, /// derived from state - from_state = 1L << 5, + from_state = 1LL << 5, /// variable marked as thread safe - thread_safe = 1L << 6 + thread_safe = 1LL << 6 }; /// usage of mod file as array or scalar @@ -88,10 +88,10 @@ enum class VariableType : enum_type { /// variable usage within a mod file enum class Access : enum_type { /// variable is ready only - read = 1L << 0, + read = 1LL << 0, /// variable is written only - write = 1L << 1 + write = 1LL << 1 }; @@ -118,106 +118,112 @@ enum class NmodlType : enum_type { empty = 0, /// Local Variable - local_var = 1L << 0, + local_var = 1LL << 0, /// Global Variable - global_var = 1L << 1, + global_var = 1LL << 1, /// Range Variable - range_var = 1L << 2, + range_var = 1LL << 2, /// Parameter Variable - param_assign = 1L << 3, + param_assign = 1LL << 3, /// Pointer Type - pointer_var = 1L << 4, + pointer_var = 1LL << 4, /// Bbcorepointer Type - bbcore_pointer_var = 1L << 5, + bbcore_pointer_var = 1LL << 5, /// Extern Type - extern_var = 1L << 6, + extern_var = 1LL << 6, /// Prime Type - prime_name = 1L << 7, + prime_name = 1LL << 7, /// Assigned Definition - assigned_definition = 1L << 8, + assigned_definition = 1LL << 8, /// Unit Def - unit_def = 1L << 9, + unit_def = 1LL << 9, /// Read Ion - read_ion_var = 1L << 10, + read_ion_var = 1LL << 10, /// Write Ion - write_ion_var = 1L << 11, + write_ion_var = 1LL << 11, /// Non Specific Current - nonspecific_cur_var = 1L << 12, + nonspecific_cur_var = 1LL << 12, /// Electrode Current - electrode_cur_var = 1L << 13, + electrode_cur_var = 1LL << 13, /// Argument Type - argument = 1L << 14, + argument = 1LL << 14, /// Function Type - function_block = 1L << 15, + function_block = 1LL << 15, /// Procedure Type - procedure_block = 1L << 16, + procedure_block = 1LL << 16, /// Derivative Block - derivative_block = 1L << 17, + derivative_block = 1LL << 17, /// Linear Block - linear_block = 1L << 18, + linear_block = 1LL << 18, /// NonLinear Block - non_linear_block = 1L << 19, + non_linear_block = 1LL << 19, /// constant variable - constant_var = 1L << 20, + constant_var = 1LL << 20, /// Kinetic Block - kinetic_block = 1L << 21, + kinetic_block = 1LL << 21, /// FunctionTable Block - function_table_block = 1L << 22, + function_table_block = 1LL << 22, /// factor in unit block - factor_def = 1L << 23, + factor_def = 1LL << 23, /// neuron variable accessible in mod file - extern_neuron_variable = 1L << 24, + extern_neuron_variable = 1LL << 24, /// neuron solver methods and math functions - extern_method = 1L << 25, + extern_method = 1LL << 25, /// state variable - state_var = 1L << 26, + state_var = 1LL << 26, /// need to solve : used in solve statement - to_solve = 1L << 27, + to_solve = 1LL << 27, /// ion type - useion = 1L << 28, + useion = 1LL << 28, /// variable is used in table statement - table_statement_var = 1L << 29, + table_statement_var = 1LL << 29, /// variable is used in table as assigned - table_assigned_var = 1L << 30, + table_assigned_var = 1LL << 30, /// Discrete Block - discrete_block = 1L << 31, + discrete_block = 1LL << 31, /// Define variable / macro - define = 1L << 32, + define = 1LL << 32, /// Codegen specific variable - codegen_var = 1L << 33 + codegen_var = 1LL << 33, + + /// Randomvar Type + random_var = 1LL << 34, + + /// FUNCTION or PROCEDURE needs setdata check + use_range_ptr_var = 1LL << 35 }; template @@ -250,7 +256,6 @@ inline T& operator&=(T& lhs, T rhs) { return lhs; } - /// check if any property is set inline bool has_property(const NmodlType& obj, NmodlType property) { return static_cast(obj & property); diff --git a/src/visitors/CMakeLists.txt b/src/visitors/CMakeLists.txt index 8871c9f80d..03b98d228c 100644 --- a/src/visitors/CMakeLists.txt +++ b/src/visitors/CMakeLists.txt @@ -12,6 +12,7 @@ add_library( after_cvode_to_cnexp_visitor.cpp constant_folder_visitor.cpp defuse_analyze_visitor.cpp + function_callpath_visitor.cpp global_var_visitor.cpp implicit_argument_visitor.cpp indexedname_visitor.cpp diff --git a/src/visitors/function_callpath_visitor.cpp b/src/visitors/function_callpath_visitor.cpp new file mode 100644 index 0000000000..e9dc69ce42 --- /dev/null +++ b/src/visitors/function_callpath_visitor.cpp @@ -0,0 +1,95 @@ + +/* + * Copyright 2024 Blue Brain Project, EPFL. + * See the top-level LICENSE file for details. + * + * SPDX-License-Identifier: Apache-2.0 + */ + +#include "visitors/function_callpath_visitor.hpp" + +namespace nmodl { +namespace visitor { + +using symtab::Symbol; +using symtab::syminfo::NmodlType; + +void FunctionCallpathVisitor::visit_var_name(const ast::VarName& node) { + if (visited_functions_or_procedures.empty()) { + return; + } + /// If node is either a RANGE var, a POINTER or a BBCOREPOINTER then + /// the FUNCTION or PROCEDURE it's used in should have the `use_range_ptr_var` + /// property + auto sym = psymtab->lookup(node.get_node_name()); + const auto properties = NmodlType::range_var | NmodlType::pointer_var | + NmodlType::bbcore_pointer_var; + if (sym && sym->has_any_property(properties)) { + const auto top = visited_functions_or_procedures.back(); + const auto caller_func_name = + top->is_function_block() + ? dynamic_cast(top)->get_node_name() + : dynamic_cast(top)->get_node_name(); + auto caller_func_proc_sym = psymtab->lookup(caller_func_name); + caller_func_proc_sym->add_properties(NmodlType::use_range_ptr_var); + } +} + +void FunctionCallpathVisitor::visit_function_call(const ast::FunctionCall& node) { + if (visited_functions_or_procedures.empty()) { + return; + } + const auto name = node.get_node_name(); + const auto func_symbol = psymtab->lookup(name); + if (!func_symbol || + !func_symbol->has_any_property(NmodlType::function_block | NmodlType::procedure_block) || + func_symbol->get_nodes().empty()) { + return; + } + /// Visit the called FUNCTION/PROCEDURE AST node to check whether + /// it has `use_range_ptr_var` property. If it does the currently called + /// function needs to have it too. + const auto func_block = func_symbol->get_nodes()[0]; + func_block->accept(*this); + if (func_symbol->has_any_property(NmodlType::use_range_ptr_var)) { + const auto top = visited_functions_or_procedures.back(); + auto caller_func_name = + top->is_function_block() + ? dynamic_cast(top)->get_node_name() + : dynamic_cast(top)->get_node_name(); + auto caller_func_proc_sym = psymtab->lookup(caller_func_name); + caller_func_proc_sym->add_properties(NmodlType::use_range_ptr_var); + } +} + +void FunctionCallpathVisitor::visit_procedure_block(const ast::ProcedureBlock& node) { + /// Avoid recursive calls + if (std::find(visited_functions_or_procedures.begin(), + visited_functions_or_procedures.end(), + &node) != visited_functions_or_procedures.end()) { + return; + } + visited_functions_or_procedures.push_back(&node); + node.visit_children(*this); + visited_functions_or_procedures.pop_back(); +} + +void FunctionCallpathVisitor::visit_function_block(const ast::FunctionBlock& node) { + // Avoid recursive calls + if (std::find(visited_functions_or_procedures.begin(), + visited_functions_or_procedures.end(), + &node) != visited_functions_or_procedures.end()) { + return; + } + visited_functions_or_procedures.push_back(&node); + node.visit_children(*this); + visited_functions_or_procedures.pop_back(); +} + +void FunctionCallpathVisitor::visit_program(const ast::Program& node) { + psymtab = node.get_symbol_table(); + node.visit_children(*this); +} + +} // namespace visitor +} // namespace nmodl diff --git a/src/visitors/function_callpath_visitor.hpp b/src/visitors/function_callpath_visitor.hpp new file mode 100644 index 0000000000..92666841b1 --- /dev/null +++ b/src/visitors/function_callpath_visitor.hpp @@ -0,0 +1,65 @@ +/* + * Copyright 2024 Blue Brain Project, EPFL. + * See the top-level LICENSE file for details. + * + * SPDX-License-Identifier: Apache-2.0 + */ + +#pragma once + +/** + * \file + * \brief \copybrief nmodl::visitor::FunctionCallpathVisitor + */ + +#include "ast/all.hpp" +#include "symtab/decl.hpp" +#include "visitors/ast_visitor.hpp" + +namespace nmodl { +namespace visitor { + +/** + * \addtogroup visitor_classes + * \{ + */ + +/** + * \class FunctionCallpathVisitor + * \brief %Visitor for traversing \c FunctionBlock s and \c ProcedureBlocks through + * their \c FunctionCall s + * + * This visitor is used to traverse the \c FUNCTION s and \c PROCEDURE s in the NMODL files. + * It visits the \c FunctionBlock s and \c ProcedureBlock s and if there is a \c FunctionCall + * in those, it visits the \c FunctionBlock or \c ProcedureBlock of the \c FunctionCall. + * Currently it only checks whether in this path of function calls there is any use of \c RANGE , + * \c POINTER or \c BBCOREPOINTER variable. In case there is it adds the \c use_range_ptr_var + * property in the \c Symbol of the function or procedure in the program \c SymbolTable and does the + * same recursively for all the caller functions. The \c use_range_ptr_var property is used later in + * the \c CodegenNeuronCppVisitor . + * + */ +class FunctionCallpathVisitor: public ConstAstVisitor { + private: + /// Vector of currently visited functions or procedures (used as a searchable stack) + std::vector visited_functions_or_procedures; + + /// symbol table for the program + symtab::SymbolTable* psymtab = nullptr; + + public: + void visit_var_name(const ast::VarName& node) override; + + void visit_function_call(const ast::FunctionCall& node) override; + + void visit_function_block(const ast::FunctionBlock& node) override; + + void visit_procedure_block(const ast::ProcedureBlock& node) override; + + void visit_program(const ast::Program& node) override; +}; + +/** \} */ // end of visitor_classes + +} // namespace visitor +} // namespace nmodl diff --git a/src/visitors/perf_visitor.cpp b/src/visitors/perf_visitor.cpp index 078c8300e0..4e83faa0fc 100644 --- a/src/visitors/perf_visitor.cpp +++ b/src/visitors/perf_visitor.cpp @@ -256,6 +256,11 @@ void PerfVisitor::count_variables() { variables = current_symtab->get_variables_with_properties(property); num_pointer_variables = static_cast(variables.size()); + /// RANDOM variables have NmodlType::random_var + property = NmodlType::random_var; + variables = current_symtab->get_variables_with_properties(property); + num_random_variables = static_cast(variables.size()); + /// number of global variables : parameters and pointers could appear also /// as range variables and hence need to filter out. But if anything declared @@ -293,6 +298,7 @@ void PerfVisitor::print_memory_usage() { stream << " STATE : " << num_state_variables; stream << " POINTER : " << num_pointer_variables << std::endl; + stream << " RANDOM : " << num_random_variables << std::endl; if (printer) { printer->push_block("MemoryInfo"); @@ -317,6 +323,10 @@ void PerfVisitor::print_memory_usage() { printer->add_node(std::to_string(num_pointer_variables), "total"); printer->pop_block(); + printer->push_block("RANDOM"); + printer->add_node(std::to_string(num_random_variables), "total"); + printer->pop_block(); + printer->pop_block(); } } diff --git a/src/visitors/perf_visitor.hpp b/src/visitors/perf_visitor.hpp index 7fa0c01d3f..a503391971 100644 --- a/src/visitors/perf_visitor.hpp +++ b/src/visitors/perf_visitor.hpp @@ -119,6 +119,9 @@ class PerfVisitor: public ConstAstVisitor { /// count of pointer / bbcorepointer variables int num_pointer_variables = 0; + /// count of RANDOM variables + int num_random_variables = 0; + /// keys used in map to track var usage std::string const_memr_key = "cm_r_u"; std::string const_memw_key = "cm_w_u"; diff --git a/src/visitors/semantic_analysis_visitor.cpp b/src/visitors/semantic_analysis_visitor.cpp index 22220f3b8d..ff83a36ead 100644 --- a/src/visitors/semantic_analysis_visitor.cpp +++ b/src/visitors/semantic_analysis_visitor.cpp @@ -8,10 +8,12 @@ #include "visitors/semantic_analysis_visitor.hpp" #include "ast/breakpoint_block.hpp" #include "ast/function_block.hpp" +#include "ast/function_call.hpp" #include "ast/function_table_block.hpp" #include "ast/independent_block.hpp" #include "ast/procedure_block.hpp" #include "ast/program.hpp" +#include "ast/statement_block.hpp" #include "ast/string.hpp" #include "ast/suffix.hpp" #include "ast/table_statement.hpp" @@ -24,6 +26,7 @@ namespace visitor { bool SemanticAnalysisVisitor::check(const ast::Program& node) { check_fail = false; + program_symtab = node.get_symbol_table(); /// <-- This code is for check 2 const auto& suffix_node = collect_nodes(node, {ast::AstNodeType::SUFFIX}); @@ -89,6 +92,92 @@ void SemanticAnalysisVisitor::visit_function_block(const ast::FunctionBlock& nod /// --> } +void SemanticAnalysisVisitor::visit_name(const ast::Name& node) { + /// <-- This code is a portion of check 9 + // There are only two contexts where a random_var is allowed. As the first arg of a random + // function or as an item in the RANDOM declaration. + // Only the former needs checking. + bool ok = true; + auto name = node.get_node_name(); + + // only check for variables exist in the symbol table (e.g. SUFFIX has type Name but it's not + // variable) + // if variable is not RANDOM then nothing to check for it + auto symbol = program_symtab->lookup(name); + if (!symbol || !symbol->has_any_property(symtab::syminfo::NmodlType::random_var)) { + return; + } + + auto parent = node.get_parent(); + + // if it's RANDOM var declaration in NEURON block then nothing to do + if (parent && parent->is_random_var()) { + return; + } + + if (parent && parent->is_var_name()) { + parent = parent->get_parent(); + if (parent && parent->is_function_call()) { + auto fname = parent->get_node_name(); + // if function is a random function then check if the current + // name is the function's first argument + if (is_random_construct_function(fname)) { + auto rfun = dynamic_cast(parent); + const auto& arguments = rfun->get_arguments(); + if (!arguments.empty() && arguments.front()->is_var_name() && + arguments.front()->get_node_name() == name) { + // if this is a first argument to function then there + // is no problem + node.visit_children(*this); + return; + } + } + } + } + + // Otherwise, we have an error + auto position = node.get_token()->position(); + logger->critical( + fmt::format("SemanticAnalysisVisitor :: RANDOM variable {} at {}" + " can be used only as the first arg of a random function", + node.get_node_name(), + position)); + check_fail = true; + + node.visit_children(*this); + /// --> +} + +void SemanticAnalysisVisitor::visit_function_call(const ast::FunctionCall& node) { + /// <-- This code is a portion of check 9 + // The first arg of a RANDOM function must be a random_var + // Otherwise it's an error + auto fname = node.get_node_name(); + if (is_random_construct_function(fname)) { + const auto& arguments = node.get_arguments(); + if (!arguments.empty()) { + auto arg0 = arguments.front(); + if (arg0->is_var_name()) { + auto name = arg0->get_node_name(); + auto symbol = program_symtab->lookup(name); + if (symbol->has_any_property(symtab::syminfo::NmodlType::random_var)) { + node.visit_children(*this); + return; + } + } + } + auto position = node.get_name()->get_token()->position(); + logger->critical( + fmt::format("SemanticAnalysisVisitor :: random function {} at {} :: The first arg must " + "be a random variable", + fname, + position)); + check_fail = true; + } + node.visit_children(*this); + /// --> +} + void SemanticAnalysisVisitor::visit_table_statement(const ast::TableStatement& tableStmt) { /// <-- This code is for check 1 if ((in_function || in_procedure) && !one_arg_in_procedure_function) { diff --git a/src/visitors/semantic_analysis_visitor.hpp b/src/visitors/semantic_analysis_visitor.hpp index 1f9c2b7e98..93c0958cef 100644 --- a/src/visitors/semantic_analysis_visitor.hpp +++ b/src/visitors/semantic_analysis_visitor.hpp @@ -32,6 +32,7 @@ * 6. Check that mutex are not badly use * 7. Check than function table got at least one argument. * 8. Check that at most one derivative block is present. + * 9. Check that RANDOM variable is mentioned only as first arg in random function. */ #include "ast/ast.hpp" #include "visitors/ast_visitor.hpp" @@ -41,8 +42,10 @@ namespace visitor { class SemanticAnalysisVisitor: public ConstAstVisitor { private: + // if semantic analysis check has failed bool check_fail = false; - + // symbol table for the program + symtab::SymbolTable* program_symtab = nullptr; /// true if accelerator backend is used for code generation bool accel_backend = false; /// true if the procedure or the function contains only one argument @@ -86,6 +89,12 @@ class SemanticAnalysisVisitor: public ConstAstVisitor { /// Look if MUTEXUNLOCK is outside a locked block void visit_mutex_unlock(const ast::MutexUnlock& node) override; + /// Only use of random_var is as first arg in random function. + void visit_name(const ast::Name& node) override; + + /// random function first arg must be random_var + void visit_function_call(const ast::FunctionCall& node) override; + public: SemanticAnalysisVisitor(bool accel_backend = false) : accel_backend(accel_backend) {} diff --git a/src/visitors/visitor_utils.cpp b/src/visitors/visitor_utils.cpp index 7044a88a66..8bc49703c9 100644 --- a/src/visitors/visitor_utils.cpp +++ b/src/visitors/visitor_utils.cpp @@ -12,6 +12,7 @@ #include #include "ast/all.hpp" +#include "codegen/codegen_naming.hpp" #include "parser/nmodl_driver.hpp" #include "utils/string_utils.hpp" #include "visitors/json_visitor.hpp" @@ -281,4 +282,8 @@ std::string get_full_var_name(const ast::VarName& node) { return full_var_name; } +bool is_random_construct_function(const std::string& name) { + return codegen::naming::RANDOM_FUNCTIONS_MAPPING.count(name) != 0; +} + } // namespace nmodl diff --git a/src/visitors/visitor_utils.hpp b/src/visitors/visitor_utils.hpp index a817ad0123..9e7163fdb0 100644 --- a/src/visitors/visitor_utils.hpp +++ b/src/visitors/visitor_utils.hpp @@ -139,4 +139,7 @@ std::string get_indexed_name(const ast::IndexedName& node); /// Given a VarName node, return the full var name including index std::string get_full_var_name(const ast::VarName& node); +/// Is given name a one of the function for RANDOM construct +bool is_random_construct_function(const std::string& name); + } // namespace nmodl diff --git a/test/unit/codegen/codegen_neuron_cpp_visitor.cpp b/test/unit/codegen/codegen_neuron_cpp_visitor.cpp index 7d66bd2591..525e9fba84 100644 --- a/test/unit/codegen/codegen_neuron_cpp_visitor.cpp +++ b/test/unit/codegen/codegen_neuron_cpp_visitor.cpp @@ -12,6 +12,7 @@ #include "codegen/codegen_neuron_cpp_visitor.hpp" #include "parser/nmodl_driver.hpp" #include "test/unit/utils/test_utils.hpp" +#include "visitors/function_callpath_visitor.hpp" #include "visitors/inline_visitor.hpp" #include "visitors/neuron_solve_visitor.hpp" #include "visitors/solve_block_visitor.hpp" @@ -25,6 +26,7 @@ using namespace codegen; using nmodl::parser::NmodlDriver; using nmodl::test_utils::reindent_text; +using symtab::syminfo::NmodlType; /// Helper for creating C codegen visitor std::shared_ptr create_neuron_cpp_visitor( @@ -38,6 +40,7 @@ std::shared_ptr create_neuron_cpp_visitor( InlineVisitor().visit_program(*ast); NeuronSolveVisitor().visit_program(*ast); SolveBlockVisitor().visit_program(*ast); + FunctionCallpathVisitor().visit_program(*ast); /// create C code generation visitor auto cv = std::make_shared("_test", ss, "double", false); @@ -47,8 +50,7 @@ std::shared_ptr create_neuron_cpp_visitor( /// print entire code -std::string get_neuron_cpp_code(const std::string& nmodl_text, - const bool generate_gpu_code = false) { +std::string get_neuron_cpp_code(const std::string& nmodl_text) { const auto& ast = NmodlDriver().parse_string(nmodl_text); std::stringstream ss; auto cvisitor = create_neuron_cpp_visitor(ast, nmodl_text, ss); @@ -194,9 +196,9 @@ void _nrn_mechanism_register_data_fields(Args&&... args) { double* Ds{}; double* v_unused{}; double* g_unused{}; - const double* ion_ena{}; - double* ion_ina{}; - double* ion_dinadv{}; + const double* const* ion_ena{}; + double* const* ion_ina{}; + double* const* ion_dinadv{}; pas_test_Store* global{&pas_test_global}; };)"; @@ -254,25 +256,25 @@ void _nrn_mechanism_register_data_fields(Args&&... args) { mech_type = nrn_get_mechtype(mechanism_info[1]); _nrn_mechanism_register_data_fields(mech_type, - _nrn_mechanism_field{"g"}, /* float var index 0 */ - _nrn_mechanism_field{"e"}, /* float var index 1 */ - _nrn_mechanism_field{"i"}, /* float var index 2 */ - _nrn_mechanism_field{"ar", 2}, /* float var index 3 */ - _nrn_mechanism_field{"s"}, /* float var index 4 */ - _nrn_mechanism_field{"ena"}, /* float var index 5 */ - _nrn_mechanism_field{"ina"}, /* float var index 6 */ - _nrn_mechanism_field{"Ds"}, /* float var index 7 */ - _nrn_mechanism_field{"v_unused"}, /* float var index 8 */ - _nrn_mechanism_field{"g_unused"}, /* float var index 9 */ - _nrn_mechanism_field{"ion_ena", "ion_ena"}, /* int var index 0 */ - _nrn_mechanism_field{"ion_ina", "ion_ina"}, /* int var index 1 */ - _nrn_mechanism_field{"ion_dinadv", "ion_dinadv"} /* int var index 2 */ + _nrn_mechanism_field{"g"} /* 0 */, + _nrn_mechanism_field{"e"} /* 1 */, + _nrn_mechanism_field{"i"} /* 2 */, + _nrn_mechanism_field{"ar", 2} /* 3 */, + _nrn_mechanism_field{"s"} /* 4 */, + _nrn_mechanism_field{"ena"} /* 5 */, + _nrn_mechanism_field{"ina"} /* 6 */, + _nrn_mechanism_field{"Ds"} /* 7 */, + _nrn_mechanism_field{"v_unused"} /* 8 */, + _nrn_mechanism_field{"g_unused"} /* 9 */, + _nrn_mechanism_field{"ion_ena", "na_ion"} /* 0 */, + _nrn_mechanism_field{"ion_ina", "na_ion"} /* 1 */, + _nrn_mechanism_field{"ion_dinadv", "na_ion"} /* 2 */ ); hoc_register_prop_size(mech_type, 10, 3); - hoc_register_dparam_semantics(mech_type, 0, "ion_ena"); - hoc_register_dparam_semantics(mech_type, 1, "ion_ina"); - hoc_register_dparam_semantics(mech_type, 2, "ion_dinadv"); + hoc_register_dparam_semantics(mech_type, 0, "na_ion"); + hoc_register_dparam_semantics(mech_type, 1, "na_ion"); + hoc_register_dparam_semantics(mech_type, 2, "na_ion"); })CODE"; REQUIRE_THAT(generated, @@ -298,3 +300,47 @@ void _nrn_mechanism_register_data_fields(Args&&... args) { } } } + + +SCENARIO("Check whether PROCEDURE and FUNCTION need setdata call", "[codegen][needsetdata]") { + GIVEN("mod file with GLOBAL and RANGE variables used in FUNC and PROC") { + std::string input_nmodl = R"( + NEURON { + SUFFIX test + RANGE x + GLOBAL s + } + PARAMETER { + s = 2 + } + ASSIGNED { + x + } + PROCEDURE a() { + x = get_42() + } + FUNCTION b() { + a() + } + FUNCTION get_42() { + get_42 = 42 + } + )"; + const auto& ast = NmodlDriver().parse_string(input_nmodl); + std::stringstream ss; + auto cvisitor = create_neuron_cpp_visitor(ast, input_nmodl, ss); + cvisitor->visit_program(*ast); + const auto symtab = ast->get_symbol_table(); + THEN("use_range_ptr_var property is added to needed FUNC and PROC") { + auto use_range_ptr_var_funcs = symtab->get_variables_with_properties( + NmodlType::use_range_ptr_var); + REQUIRE(use_range_ptr_var_funcs.size() == 2); + const auto a = symtab->lookup("a"); + REQUIRE(a->has_any_property(NmodlType::use_range_ptr_var)); + const auto b = symtab->lookup("b"); + REQUIRE(b->has_any_property(NmodlType::use_range_ptr_var)); + const auto get_42 = symtab->lookup("get_42"); + REQUIRE(!get_42->has_any_property(NmodlType::use_range_ptr_var)); + } + } +} diff --git a/test/unit/modtoken/modtoken.cpp b/test/unit/modtoken/modtoken.cpp index ceaa025510..d473b3307b 100644 --- a/test/unit/modtoken/modtoken.cpp +++ b/test/unit/modtoken/modtoken.cpp @@ -47,14 +47,14 @@ TEST_CASE("NMODL Lexer returning valid ModToken object", "[token][modtoken]") { std::stringstream ss; symbol_type("text", value); ss << *(value.get_token()); - REQUIRE(ss.str() == " text at [1.1-4] type 342"); + REQUIRE(ss.str() == " text at [1.1-4] type 343"); } { std::stringstream ss; symbol_type(" some_text", value); ss << *(value.get_token()); - REQUIRE(ss.str() == " some_text at [1.3-11] type 342"); + REQUIRE(ss.str() == " some_text at [1.3-11] type 343"); } } @@ -64,7 +64,7 @@ TEST_CASE("NMODL Lexer returning valid ModToken object", "[token][modtoken]") { std::stringstream ss; symbol_type("h'' = ", value); ss << *(value.get_token()); - REQUIRE(ss.str() == " h'' at [1.1-3] type 349"); + REQUIRE(ss.str() == " h'' at [1.1-3] type 350"); REQUIRE(value.get_order()->eval() == 2); } } diff --git a/test/unit/utils/nmodl_constructs.cpp b/test/unit/utils/nmodl_constructs.cpp index 4faa30d843..8079fb5e47 100644 --- a/test/unit/utils/nmodl_constructs.cpp +++ b/test/unit/utils/nmodl_constructs.cpp @@ -478,6 +478,8 @@ std::map const nmodl_valid_constructs{ POINTER rng1, rng2 BBCOREPOINTER rng3 EXTERNAL extvar + RANDOM r1 + RANDOM r2, r3 THREADSAFE } )" diff --git a/test/unit/visitor/semantic_analysis.cpp b/test/unit/visitor/semantic_analysis.cpp index df3a6f4452..210b5ec63c 100644 --- a/test/unit/visitor/semantic_analysis.cpp +++ b/test/unit/visitor/semantic_analysis.cpp @@ -192,3 +192,69 @@ SCENARIO("At most one DERIVATIVE block", "[visitor][semantic_analysis]") { } } } + +SCENARIO("RANDOM Construct", "[visitor][semantic_analysis]") { + GIVEN("A mod file with correct RANDOM variable usage") { + std::string nmodl_text = R"( + NEURON { + RANDOM r + } + PROCEDURE rates() { + LOCAL x + random_setseq(r, 1) + x = 1 + random_negexp(r) + x = x + exp(random_negexp(r)) + } + FUNCTION erand() { + erand = random_negexp(r) + } + )"; + THEN("Semantic analysis should pass") { + REQUIRE_FALSE(run_semantic_analysis_visitor(nmodl_text)); + } + } + + GIVEN("A mod file with incorrect usage of RANDOM variable as function arguments") { + std::string nmodl_text = R"( + NEURON { + RANDOM r + } + PROCEDURE rates() { + random_setseq(1, r) + } + )"; + THEN("Semantic analysis should faial") { + REQUIRE(run_semantic_analysis_visitor(nmodl_text)); + } + } + + GIVEN("A mod file with incorrect usage of RANDOM variable in an expression") { + std::string nmodl_text = R"( + NEURON { + RANDOM r + } + PROCEDURE rates() { + LOCAL x + x = r + 1 + } + )"; + THEN("Semantic analysis should fail") { + REQUIRE(run_semantic_analysis_visitor(nmodl_text)); + } + } + + GIVEN("A mod file with incorrect usage of RANDOM variable in non-random function") { + std::string nmodl_text = R"( + NEURON { + RANDOM r + } + PROCEDURE rates() { + LOCAL x + x = exp(r) + 1 + } + )"; + THEN("Semantic analysis should fail") { + REQUIRE(run_semantic_analysis_visitor(nmodl_text)); + } + } +} diff --git a/test/usecases/ionic/ionic.mod b/test/usecases/ionic/ionic.mod new file mode 100644 index 0000000000..a0075b716d --- /dev/null +++ b/test/usecases/ionic/ionic.mod @@ -0,0 +1,13 @@ +NEURON { + SUFFIX ionic + USEION na READ ina WRITE ena +} + +ASSIGNED { + ina (mA/cm2) + ena (mV) +} + +BREAKPOINT { + ena = 42.0 +} diff --git a/test/usecases/ionic/simulate.py b/test/usecases/ionic/simulate.py new file mode 100644 index 0000000000..fcbd4d37b7 --- /dev/null +++ b/test/usecases/ionic/simulate.py @@ -0,0 +1,28 @@ +import numpy as np + +from neuron import h, gui +from neuron.units import ms + +nseg = 1 + +s = h.Section() +s.insert("ionic") +s.nseg = nseg + +x_hoc = h.Vector().record(s(0.5)._ref_ena) +t_hoc = h.Vector().record(h._ref_t) + +h.stdinit() +h.tstop = 5.0 * ms +h.run() + +x = np.array(x_hoc.as_numpy()) +t = np.array(t_hoc.as_numpy()) + +x_exact = np.full(t.shape, 42.0) +x_exact[0] = 0.0 + +abs_err = np.abs(x - x_exact) + +assert np.all(abs_err < 1e-12), abs_err +print("ionic: success")