diff --git a/.gitignore b/.gitignore index 06205f5..1cd18b9 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,7 @@ __pycache__ *application_generated_data_files/ *reports/ .mypy_cache/ +**/*png +**/*pdf +**/*npz +**/*zip diff --git a/.ratexcludes b/.ratexcludes index f63e490..e8f90c8 100644 --- a/.ratexcludes +++ b/.ratexcludes @@ -1,5 +1,6 @@ **/*.colour_map **/*.col +**/*.csv **/SpiNNUtils/** **/SpiNNMachine/** **/SpiNNMan/** diff --git a/examples/icub_vor_examples/cerebellum.py b/examples/icub_vor_examples/cerebellum.py new file mode 100644 index 0000000..d4cd779 --- /dev/null +++ b/examples/icub_vor_examples/cerebellum.py @@ -0,0 +1,393 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# import socket +import pyNN.spiNNaker as sim +import numpy as np +import matplotlib.pyplot as plt + +from pyNN.random import RandomDistribution, NumpyRNG + +# cerebellum with simulated input +RETINA_X_SIZE = 304 +RETINA_Y_SIZE = 240 +RETINA_BASE_KEY = 0x00000000 +RETINA_MASK = 0xFF000000 +RETINA_Y_BIT_SHIFT = 9 + +# Synapsis parameters +gc_pc_weights = 0.005 +mf_vn_weights = 0.001 +pc_vn_weights = -0.00002 +io_pc_weights = 0.0 +mf_gc_weights = 0.0006 +go_gc_weights = -0.0002 +input_weights = 0.00025 +mf_go_weights = 0.0006 + +# Network parameters +num_MF_neurons = 100 +num_GC_neurons = 2000 +num_GOC_neurons = 100 +num_PC_neurons = 200 +num_VN_neurons = 200 +num_IO_neurons = 200 + +# Random distribution for synapses delays and weights (MF and GO) +delay_distr = RandomDistribution('uniform', (1.0, 10.0), + rng=NumpyRNG(seed=85524)) +weight_distr_MF = RandomDistribution( + 'uniform', (mf_gc_weights*0.8, mf_gc_weights*1.2), + rng=NumpyRNG(seed=85524)) +weight_distr_GO = RandomDistribution( + 'uniform', (go_gc_weights*0.8, go_gc_weights*1.2), + rng=NumpyRNG(seed=24568)) + +# Post-synapse population +neuron_params = { + "v_thresh": -50, + "v_reset": -70, + "v_rest": -65, + "i_offset": 0 # DC input +} + +# Learning parameters cos rule (MF to VN) +min_weight_c = 0 +max_weight_c = 0.1 +pot_alpha_c = 0.01 # this is alpha in the paper +beta_c = 11 +sigma_c = 201 +initial_weight_c = 0.005 +initial_weight_c = 0.05 +plastic_delay_c = 4 + +# Learning parameters sin rule (GrC to PC) +min_weight_s = 0 +max_weight_s = 0.1 +pot_alpha_s = 0.01 +t_peak_s = 100 +initial_weight_s = 0.05 +plastic_delay_s = 4 + +sim.setup(timestep=1.) + + +# Sensorial Activity: input activity from vestibulus (will come from the head +# IMU, now it is a test bench) +def sensorial_activity(pt): + MAX_AMPLITUDE = 0.8 + RELATIVE_AMPLITUDE = 1.0 + _head_pos = [] + _head_vel = [] + + i = np.arange(0, 2, 0.01) + for t in i: + desired_speed = -np.cos( + t * 2 * np.pi) * MAX_AMPLITUDE * RELATIVE_AMPLITUDE * 2.0 * np.pi + desired_pos = -np.sin( + t * 2 * np.pi) * MAX_AMPLITUDE * RELATIVE_AMPLITUDE + _head_pos.append(desired_pos) + _head_vel.append(desired_speed) + + # single point over time + head_pos = _head_pos[pt] + head_vel = _head_vel[pt] + + head_pos = ((head_pos + 0.8) / 1.6) + head_vel = ((head_vel + 0.8 * 2 * np.pi) / (1.6 * 2 * np.pi)) + + if head_pos > 1.0: + head_pos = 1.0 + elif head_pos < 0.0: + head_pos = 0.0 + if head_vel > 1.0: + head_vel = 1.0 + elif head_vel < 0.0: + head_vel = 0.0 + + min_rate = 0.0 + max_rate = 600.0 + sigma = 0.02 + MF_pos_activity = np.zeros((50)) + MF_vel_activity = np.zeros((50)) + + for i in range(50): + mean = float(i) / 50.0 + 0.01 + gaussian = np.exp( + -((head_pos - mean) * (head_pos - mean))/(2.0 * sigma * sigma)) + MF_pos_activity[i] = min_rate + gaussian * (max_rate - min_rate) + + for i in range(50): + mean = float(i) / 50.0 + 0.01 + gaussian = np.exp( + -((head_vel - mean) * (head_vel - mean))/(2.0 * sigma * sigma)) + MF_vel_activity[i] = min_rate + gaussian * (max_rate - min_rate) + + # sa_mean_freq = np.arange(0,1000,10) + sa_mean_freq = np.concatenate((MF_pos_activity, MF_vel_activity)) + out = [sa_mean_freq, head_pos, head_vel] + return out + + +# Error Activity: error from eye and head encoders +def error_activity(pt): + def compute_P_error(kp, head_position, eye_position): + error = kp * (head_position + eye_position) + return error + + def compute_D_error(kd, head_velocity, eye_velocity): + error = kd * (head_velocity + eye_velocity) + return error + + MAX_AMPLITUDE = 0.8 + MAX_AMPLITUDE_EYE = 0.35 + RELATIVE_AMPLITUDE_EYE = 1.0 + phaseShift = 1.0 * np.pi + # simulated error between eye and head signals, error is zero if the waves + # are in opposite phase + _eye_pos = [] + _eye_vel = [] + ea_rate = [] + i = np.arange(0, 2, 0.01) + for t_eye in i: + desired_speed = -np.cos( + t_eye * 2 * np.pi+phaseShift) * MAX_AMPLITUDE_EYE * \ + RELATIVE_AMPLITUDE_EYE * 2.0 * np.pi + desired_pos = -np.sin( + t_eye * 2 * np.pi + phaseShift) * MAX_AMPLITUDE_EYE * \ + RELATIVE_AMPLITUDE_EYE + _eye_pos.append(desired_pos) + _eye_vel.append(desired_speed) + + # single point over time + eye_pos = _eye_pos[pt] + eye_vel = _eye_vel[pt] + # print 'eye_pos ea',eye_pos + + head = sensorial_activity(pt) + head_pos = head[1] + head_vel = head[2] + + # print(head_pos, eye_pos) + kp = 15.0 + position_error = compute_P_error(kp, head_pos, eye_pos) + kd = 15.0 + velocity_error = compute_D_error(kd, head_vel, eye_vel) + + error = (position_error * 0.1 + ( + velocity_error / (2.0 * np.pi)) * 0.9) / (MAX_AMPLITUDE * 5) + + # print(position_error, velocity_error, error) + + min_rate = 1.0 + max_rate = 25.0 + err = np.linspace(-2.0, 2.0, 20) +# err = [1] + for j in range(len(err)): + error_ = err[j] + # print(error) + low_neuron_ID_threshold = abs(error_) * 100.0 + up_neuron_ID_threshold = low_neuron_ID_threshold - 100.0 + IO_agonist = np.zeros((100)) + IO_antagonist = np.zeros((100)) + + rate = [] + for i in range(100): + if (i < up_neuron_ID_threshold): + rate.append(max_rate) + elif (i < low_neuron_ID_threshold): + aux_rate = max_rate - (max_rate - min_rate) * ( + (i - up_neuron_ID_threshold)/( + low_neuron_ID_threshold - up_neuron_ID_threshold)) + rate.append(aux_rate) + else: + rate.append(min_rate) + + if error_ >= 0.0: + IO_agonist[0:100] = min_rate + IO_antagonist = rate + else: + IO_antagonist[0:100] = min_rate + IO_agonist = rate + + ea_rate = np.concatenate((IO_agonist, IO_antagonist)) + + low_neuron_ID_threshold = abs(error) * 100.0 + up_neuron_ID_threshold = low_neuron_ID_threshold - 100.0 + IO_agonist = np.zeros((100)) + IO_antagonist = np.zeros((100)) + + rate = [] + for i in range(100): + if (i < up_neuron_ID_threshold): + rate.append(max_rate) + elif (i < low_neuron_ID_threshold): + aux_rate = max_rate - (max_rate - min_rate) * ( + (i - up_neuron_ID_threshold)/( + low_neuron_ID_threshold - up_neuron_ID_threshold)) + rate.append(aux_rate) + else: + rate.append(min_rate) + + if error >= 0.0: + IO_agonist[0:100] = min_rate + IO_antagonist = rate + else: + IO_antagonist[0:100] = min_rate + IO_agonist = rate + + ea_rate = np.concatenate((IO_agonist, IO_antagonist)) + + return ea_rate + + +for j in range(200): + x = error_activity(j) + plt.plot(x) + +plt.show() + +SA_population = sim.Population( + num_MF_neurons, # number of sources + sim.SpikeSourcePoisson, # source type + # {'rate': sa_mean_freq}, # source spike times + {'rate': sensorial_activity(10)[0]}, # source spike times + label="sa_population" # identifier + ) + +# Create MF population +MF_population = sim.Population(num_MF_neurons, sim.IF_curr_exp(), + label='MFLayer') + +# Create GOC population +GOC_population = sim.Population(num_GOC_neurons, sim.IF_cond_exp(), + label='GOCLayer') + +# Create MF-GO connections +mf_go_connections = sim.Projection( + MF_population, GOC_population, sim.OneToOneConnector(), + sim.StaticSynapse(delay=1.0, weight=mf_go_weights)) + +# Create GrC population +GC_population = sim.Population(num_GC_neurons, sim.IF_curr_exp(), + label='GCLayer') + +# create PC population +PC_population = sim.Population( + num_PC_neurons, # number of neurons + sim.IF_cond_exp(**neuron_params), # Neuron model + label="Purkinje Cell" # identifier + ) + +# create VN population +VN_population = sim.Population( + num_VN_neurons, # number of neurons + sim.IF_cond_exp(**neuron_params), # Neuron model + label="Vestibular Nuclei" # identifier + ) + +# Create IO population +IO_population = sim.Population(num_IO_neurons, sim.IF_curr_exp(), + label='IOLayer') + +# Create connections + +# Create MF-GC and GO-GC connections +float_num_MF_neurons = float(num_MF_neurons) + +list_GOC_GC = [] +list_MF_GC = [] +list_GOC_GC_2 = [] +# projections to subpopulations +# https://github.com/SpiNNakerManchester/sPyNNaker8/issues/168) +for i in range(num_MF_neurons): + GC_medium_index = int( + round((i / float_num_MF_neurons) * num_GC_neurons)) + GC_lower_index = GC_medium_index - 40 + GC_upper_index = GC_medium_index + 60 + + if (GC_lower_index < 0): + GC_lower_index = 0 + + elif (GC_upper_index > num_GC_neurons): + GC_upper_index = num_GC_neurons + + for j in range(GC_medium_index - GC_lower_index): + list_GOC_GC.append((i, GC_lower_index + j)) + + for j in range(GC_medium_index + 20 - GC_medium_index): + list_MF_GC.append((i, GC_medium_index + j)) + + for j in range(GC_upper_index - GC_medium_index - 20): + list_GOC_GC_2.append((i, GC_medium_index + 20 + j)) + +GO_GC_con1 = sim.Projection( + GOC_population, GC_population, + sim.FromListConnector(list_GOC_GC, weight_distr_GO, delay_distr)) + +MF_GC_con2 = sim.Projection( + MF_population, GC_population, + sim.FromListConnector(list_MF_GC, weight_distr_MF, delay_distr)) + +GO_GC_con3 = sim.Projection( + GOC_population, GC_population, + sim.FromListConnector(list_GOC_GC_2, weight_distr_GO, delay_distr)) + +# Create PC-VN connections +pc_vn_connections = sim.Projection( + PC_population, VN_population, sim.OneToOneConnector(), + # receptor_type='GABA', + synapse_type=sim.StaticSynapse(delay=1.0, weight=pc_vn_weights)) + +# Create MF-VN learning rule - cos +mfvn_plas = sim.STDPMechanism( + timing_dependence=sim.extra_models.TimingDependenceMFVN(beta=beta_c, + sigma=sigma_c), + weight_dependence=sim.extra_models.WeightDependenceMFVN( + w_min=min_weight_c, w_max=max_weight_c, pot_alpha=pot_alpha_c), + weight=initial_weight_c, delay=plastic_delay_c) + +# Create MF to VN connections +mf_vn_connections = sim.Projection( + MF_population, VN_population, sim.AllToAllConnector(), + synapse_type=mfvn_plas, receptor_type="excitatory") + +# Create projection from PC to VN -- replaces "TEACHING SIGNAL" +pc_vn_connections = sim.Projection( + PC_population, VN_population, sim.OneToOneConnector(), + sim.StaticSynapse(weight=0.0, delay=1), receptor_type="excitatory") + +# create PF-PC learning rule - sin +pfpc_plas = sim.STDPMechanism( + timing_dependence=sim.extra_models.TimingDependencePFPC(t_peak=t_peak_s), + weight_dependence=sim.extra_models.WeightDependencePFPC( + w_min=min_weight_s, w_max=max_weight_s, pot_alpha=pot_alpha_s), + weight=initial_weight_s, delay=plastic_delay_s) + +# Create PF-PC connections +pf_pc_connections = sim.Projection( + GC_population, PC_population, sim.AllToAllConnector(), + synapse_type=pfpc_plas, receptor_type="excitatory") + +# Create IO-PC connections. This synapse with "receptor_type=COMPLEX_SPIKE" +# propagates the learning signals that drive the plasticity mechanisms in +# GC-PC synapses +io_pc_connections = sim.Projection( + IO_population, PC_population, sim.OneToOneConnector(), + # receptor_type='COMPLEX_SPIKE', + synapse_type=sim.StaticSynapse(delay=1.0, weight=io_pc_weights)) + +lif_pop = sim.Population(1024, sim.IF_curr_exp(), label='pop_lif') + +out_pop = sim.Population(128, sim.IF_curr_exp(), label='pop_out') diff --git a/examples/icub_vor_examples/cerebellum_tb.py b/examples/icub_vor_examples/cerebellum_tb.py new file mode 100644 index 0000000..993b87b --- /dev/null +++ b/examples/icub_vor_examples/cerebellum_tb.py @@ -0,0 +1,428 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pyNN.spiNNaker as sim +import numpy as np +import matplotlib.pyplot as plt + +from pyNN.utility.plotting import Figure, Panel +from pyNN.random import RandomDistribution + +L_RATE = 2 +H_RATE = 20 + +# cerebellum test bench +runtime = 1000 + +# Synapsis parameters +gc_pc_weights = 0.005 +mf_vn_weights = 0.0005 +pc_vn_weights = 0.01 +cf_pc_weights = 0.0 +mf_gc_weights = 0.5 +go_gc_weights = 0.002 +input_weights = 0.0025 +mf_go_weights = 0.1 + +# Network parameters +num_MF_neurons = 100 +num_GC_neurons = 2000 +num_GOC_neurons = 100 +num_PC_neurons = 200 +num_VN_neurons = 200 +num_CF_neurons = 200 + +# Random distribution for synapses delays and weights (MF and GO) +delay_distr = RandomDistribution('uniform', (1.0, 10.0)) +weight_distr_MF = RandomDistribution( + 'uniform', (mf_gc_weights*0.8, mf_gc_weights*1.2)) +seed_MF = 85524 +weight_distr_GO = RandomDistribution( + 'uniform', (go_gc_weights*0.8, go_gc_weights*1.2)) +seed_GO = 24568 + + +# Post-synapse population +neuron_params = { + "v_thresh": -50, + "v_reset": -70, + "v_rest": -65, + "i_offset": 0 # DC input +} + +# Learning parameters cos rule (MF to VN) +min_weight_c = 0 +max_weight_c = 0.005 +pot_alpha_c = 0.001 # this is alpha in the paper +beta_c = 11 +sigma_c = 201 +initial_weight_c = 0.001 # max_weight_c # 0.0005 +# initial_weight_c = 0.05 +plastic_delay_c = 4 + +# Learning parameters sin rule (GrC to PC) +min_weight_s = 0 +max_weight_s = 0.01 +pot_alpha_s = 0.01 +t_peak_s = 100 +initial_weight_s = max_weight_s # 0.0001 +plastic_delay_s = 4 +weight_dist_pfpc = RandomDistribution('uniform', + (initial_weight_s*0.8, + initial_weight_s*1.2)) +seed_PFPC = 24534 + +sim.setup(timestep=1.) + +# Sensorial Activity: input activity from vestibulus (will come from the head +# IMU, now it is a test bench) +# We simulate the output of the head encoders with a sinusoidal function. Each +# "sensorial activity" value is derived from the head position and velocity. +# From that value, we generate the mean firing rate of the MF neurons (later +# this will be an input that will come from the robot, through the spinnLink) +# the neurons that are active depend on the value of the sensorial activity. +# For each a gaussian is created centered on a specific neuron. + +# Prepare variables once at beginning +MAX_RATE = 600.0 +MAX_AMPLITUDE = 0.8 +RELATIVE_AMPLITUDE = 1.0 +_head_pos = [] +_head_vel = [] + +i = np.arange(0, 1000, 0.001) +for t in i: + desired_speed = -np.cos( + t * 2 * np.pi) * MAX_AMPLITUDE * RELATIVE_AMPLITUDE * 2.0 * np.pi + desired_pos = -np.sin( + t * 2 * np.pi) * MAX_AMPLITUDE * RELATIVE_AMPLITUDE + _head_pos.append(desired_pos) + _head_vel.append(desired_speed) + + +def sensorial_activity(pt): + # pt is a single time point at which we measure the head encoder's output + # single point over time + head_pos = _head_pos[pt] + head_vel = _head_vel[pt] + + head_pos = ((head_pos + 0.8) / 1.6) + head_vel = ((head_vel + 0.8 * 2 * np.pi) / (1.6 * 2 * np.pi)) + + if head_pos > 1.0: + head_pos = 1.0 + elif head_pos < 0.0: + head_pos = 0.0 + if head_vel > 1.0: + head_vel = 1.0 + elif head_vel < 0.0: + head_vel = 0.0 + + min_rate = 0.0 + max_rate = MAX_RATE + sigma = 0.02 + MF_pos_activity = np.zeros((50)) + MF_vel_activity = np.zeros((50)) + + # generate gaussian distributions around the neuron tuned to a given + # sensorial activity + for i in range(50): + mean = float(i) / 50.0 + 0.01 + gaussian = np.exp( + -((head_pos - mean) * (head_pos - mean))/(2.0 * sigma * sigma)) + MF_pos_activity[i] = min_rate + gaussian * (max_rate - min_rate) + + for i in range(50): + mean = float(i) / 50.0 + 0.01 + gaussian = np.exp( + -((head_vel - mean) * (head_vel - mean))/(2.0 * sigma * sigma)) + MF_vel_activity[i] = min_rate + gaussian * (max_rate - min_rate) + + sa_mean_freq = np.concatenate((MF_pos_activity, MF_vel_activity)) + out = [sa_mean_freq, head_pos, head_vel] + return out + + +# Error Activity: error from eye and head encoders +def error_activity(error_): + + IO_agonist = np.zeros((100)) + IO_antagonist = np.zeros((100)) + + IO_agonist[:] = H_RATE + IO_antagonist[:] = L_RATE + + ea_rate = np.concatenate((IO_agonist, IO_antagonist)) + + return ea_rate + + +def process_VN_spiketrains(VN_spikes, t_start): + total_spikes = 0 + for spiketrain in VN_spikes.segments[0].spiketrains: + s = spiketrain.as_array()[ + np.where(spiketrain.as_array() >= t_start)[0]] + total_spikes += len(s) + + return total_spikes + + +############################################################### +# Create Populations +############################################################### + +# Create MF population - fake input population that will be substituted by +# external input from robot + +MF_population = sim.Population( + num_MF_neurons, # number of sources + sim.SpikeSourcePoisson, # source type + # {'rate': sa_mean_freq}, # source spike times + {'rate': sensorial_activity(0)[0]}, # source spike times + label="MFLayer", # identifier + additional_parameters={"max_rate": MAX_RATE, "seed": seed_MF} + ) + +# Create GOC population +GOC_population = sim.Population(num_GOC_neurons, sim.IF_cond_exp(), + label='GOCLayer', + additional_parameters={"seed": seed_GO}) + +# create PC population +PC_population = sim.Population( + num_PC_neurons, # number of neurons + sim.IF_cond_exp(**neuron_params), # Neuron model + label="Purkinje Cell", # identifier + additional_parameters={"seed": seed_PFPC} + ) + +# create VN population +VN_population = sim.Population( + num_VN_neurons, # number of neurons + sim.IF_cond_exp(**neuron_params), # Neuron model + label="Vestibular Nuclei", # identifier + additional_parameters={"seed": seed_PFPC} + ) + +# Create GrC population +GC_population = sim.Population(num_GC_neurons, sim.IF_curr_exp(), + label='GCLayer', + additional_parameters={"seed": seed_GO}) + +# generate fake error (it should be calculated from sensorial activity in error +# activity, but we skip it and just generate an error from -1.5 to 1.5) +err = -0.7 # other values to test: -0.3 0 0.3 0.7 + +# Create CF population - fake input population that will be substituted by +# external input from robot +CF_population = sim.Population( + num_CF_neurons, # number of sources + sim.SpikeSourcePoisson, # source type + # {'rate': sa_mean_freq}, # source spike times + {'rate': error_activity(err)}, # source spike times + label="CFLayer", # identifier + additional_parameters={"max_rate": MAX_RATE} + ) + +############################################################### +# Create connections +############################################################### + +# Create MF-GO connections +mf_go_connections = sim.Projection(MF_population, + GOC_population, + sim.OneToOneConnector(), + sim.StaticSynapse(delay=delay_distr, + weight=mf_go_weights), + receptor_type='excitatory') + +# Create MF-GC and GO-GC connections +float_num_MF_neurons = float(num_MF_neurons) + +list_GOC_GC = [] +list_MF_GC = [] +list_GOC_GC_2 = [] +# projections to subpopulations +# https://github.com/SpiNNakerManchester/sPyNNaker8/issues/168) +for i in range(num_MF_neurons): + GC_medium_index = int( + round((i / float_num_MF_neurons) * num_GC_neurons)) + GC_lower_index = GC_medium_index - 40 + GC_upper_index = GC_medium_index + 60 + + if (GC_lower_index < 0): + GC_lower_index = 0 + + elif (GC_upper_index > num_GC_neurons): + GC_upper_index = num_GC_neurons + + for j in range(GC_medium_index - GC_lower_index): + list_GOC_GC.append( + (i, GC_lower_index + j, # go_gc_weights, 1) + weight_distr_GO.next(), delay_distr.next())) + + for j in range(GC_medium_index + 20 - GC_medium_index): + list_MF_GC.append( + (i, GC_medium_index + j, # mf_gc_weights, 1) + weight_distr_MF.next(), delay_distr.next())) + + for j in range(GC_upper_index - GC_medium_index - 20): + list_GOC_GC_2.append( + (i, GC_medium_index + 20 + j, # go_gc_weights, 1) + weight_distr_GO.next(), delay_distr.next())) + +GO_GC_con1 = sim.Projection( + GOC_population, GC_population, sim.FromListConnector(list_GOC_GC), + receptor_type='inhibitory') # this should be inhibitory + +MF_GC_con2 = sim.Projection( + MF_population, GC_population, sim.FromListConnector(list_MF_GC), + receptor_type='excitatory') + +GO_GC_con3 = sim.Projection( + GOC_population, GC_population, sim.FromListConnector(list_GOC_GC_2), + receptor_type='inhibitory') + + +# Create PC-VN connections +pc_vn_connections = sim.Projection( + PC_population, VN_population, sim.OneToOneConnector(), + # receptor_type='GABA', # Should these be inhibitory? + synapse_type=sim.StaticSynapse(delay=delay_distr, weight=pc_vn_weights), + receptor_type='inhibitory') + +# Create MF-VN learning rule - cos +mfvn_plas = sim.STDPMechanism( + timing_dependence=sim.extra_models.TimingDependenceMFVN(beta=beta_c, + sigma=sigma_c), + weight_dependence=sim.extra_models.WeightDependenceMFVN( + w_min=min_weight_c, w_max=max_weight_c, pot_alpha=pot_alpha_c), + weight=initial_weight_c, delay=delay_distr) + +# Create MF to VN connections +mf_vn_connections = sim.Projection( + MF_population, VN_population, sim.AllToAllConnector(), + # Needs mapping as FromListConnector to make efficient + synapse_type=mfvn_plas, receptor_type="excitatory") + +# Create projection from PC to VN -- replaces "TEACHING SIGNAL" +pc_vn_connections = sim.Projection( + PC_population, VN_population, sim.OneToOneConnector(), + sim.StaticSynapse(weight=0.0, delay=1.0), + receptor_type="excitatory") # "TEACHING SIGNAL" + +# create PF-PC learning rule - sin +pfpc_plas = sim.STDPMechanism( + timing_dependence=sim.extra_models.TimingDependencePFPC(t_peak=t_peak_s), + weight_dependence=sim.extra_models.WeightDependencePFPC( + w_min=min_weight_s, w_max=max_weight_s, pot_alpha=pot_alpha_s), + weight=initial_weight_s, delay=delay_distr + ) + +# Create PF-PC connections +pf_pc_connections = sim.Projection( + GC_population, PC_population, sim.AllToAllConnector(), + synapse_type=pfpc_plas, receptor_type="excitatory") + +# Create IO-PC connections. This synapse with "receptor_type=COMPLEX_SPIKE" +# propagates the learning signals that drive the plasticity mechanisms in +# GC-PC synapses +cf_pc_connections = sim.Projection( + CF_population, PC_population, sim.OneToOneConnector(), + # receptor_type='COMPLEX_SPIKE', + synapse_type=sim.StaticSynapse(delay=1.0, weight=cf_pc_weights), + receptor_type='excitatory') + +MF_population.record(['spikes']) +CF_population.record(['spikes']) +GC_population.record('all') +GOC_population.record(['spikes']) +VN_population.record('all') # VN_population.record(['spikes']) +PC_population.record(['spikes']) + +samples_in_repeat = 99 +sample_time = 10 +repeats = 1 +total_runtime = 0 +VN_transfer_func = [] + +for i in range(samples_in_repeat): + sim.run(sample_time) + + VN_spikes = VN_population.get_data('spikes') + VN_transfer_func.append(process_VN_spiketrains(VN_spikes, total_runtime)) + + total_runtime += sample_time + + print(total_runtime) + + MF_population.set(rate=sensorial_activity(total_runtime)[0]) + +total_runtime = runtime*repeats + +MF_spikes = MF_population.get_data('spikes') +CF_spikes = CF_population.get_data('spikes') +GC_spikes = GC_population.get_data('all') +GOC_spikes = GOC_population.get_data('spikes') +VN_spikes = VN_population.get_data('all') # VN_population.get_data('spikes') +PC_spikes = PC_population.get_data('spikes') + +mfvn_weights = mf_vn_connections.get('weight', 'list', with_address=False) +pfpc_weights = pf_pc_connections.get('weight', 'list', with_address=False) + +# Plot +F = Figure( + Panel(MF_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='MF_spikes'), + Panel(CF_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='CF_spikes'), + Panel(GC_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='GC_spikes'), + Panel(GOC_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='GOC_spikes'), + Panel(PC_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='PC_spikes'), + Panel(VN_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='VN_spikes'), + Panel(VN_spikes.segments[0].filter(name='gsyn_inh')[0], + ylabel="Membrane potential (mV)", yticks=True, + xlim=(0, total_runtime)) + ) +plt.show() + +plt.figure() +plt.plot(mfvn_weights, + label='mf-vn weights (init: {})'.format(initial_weight_c)) +plt.legend() + +plt.figure() +plt.plot(pfpc_weights, color='orange', + label='pf-pc weights (init: {})'.format(initial_weight_s)) +plt.legend() + +print(VN_transfer_func) + +plt.figure() +plt.plot(VN_transfer_func) + +plt.show() + +sim.end() +print("job done") diff --git a/examples/icub_vor_examples/dataflow.py b/examples/icub_vor_examples/dataflow.py new file mode 100644 index 0000000..9088c03 --- /dev/null +++ b/examples/icub_vor_examples/dataflow.py @@ -0,0 +1,151 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# import socket +import pyNN.spiNNaker as sim +# import numpy as np +# import logging +# import matplotlib.pyplot as plt + +from pacman.model.constraints.key_allocator_constraints import ( + FixedKeyAndMaskConstraint) +from pacman.model.graphs.application import ApplicationSpiNNakerLinkVertex +from pacman.model.routing_info import BaseKeyAndMask +from spinn_front_end_common.abstract_models.\ + abstract_provides_outgoing_partition_constraints import ( + AbstractProvidesOutgoingPartitionConstraints) +from spinn_utilities.overrides import overrides +from spinn_front_end_common.abstract_models.\ + abstract_provides_incoming_partition_constraints import ( + AbstractProvidesIncomingPartitionConstraints) +from pacman.operations.routing_info_allocator_algorithms.\ + malloc_based_routing_allocator.utils import get_possible_masks +from spinn_front_end_common.utility_models.command_sender_machine_vertex \ + import CommandSenderMachineVertex + +from spinn_front_end_common.abstract_models \ + import AbstractSendMeMulticastCommandsVertex +from spinn_front_end_common.utility_models.multi_cast_command \ + import MultiCastCommand + +# from pyNN.utility import Timer +# from pyNN.utility.plotting import Figure, Panel +# from pyNN.random import RandomDistribution, NumpyRNG + +NUM_NEUR_IN = 1024 # 1024 # 2x240x304 mask -> 0xFFFE0000 +MASK_IN = 0xFFFFFC00 # 0xFFFFFC00 +NUM_NEUR_OUT = 1024 +# MASK_OUT =0xFFFFFC00 + + +class ICUBInputVertex( + ApplicationSpiNNakerLinkVertex, + AbstractProvidesOutgoingPartitionConstraints, + AbstractProvidesIncomingPartitionConstraints, + AbstractSendMeMulticastCommandsVertex): + + def __init__(self, spinnaker_link_id, board_address=None, + constraints=None, label=None): + + ApplicationSpiNNakerLinkVertex.__init__( + self, n_atoms=NUM_NEUR_IN, spinnaker_link_id=spinnaker_link_id, + board_address=board_address, label=label) + + # AbstractProvidesNKeysForPartition.__init__(self) + AbstractProvidesOutgoingPartitionConstraints.__init__(self) + AbstractSendMeMulticastCommandsVertex.__init__(self) + + @overrides(AbstractProvidesOutgoingPartitionConstraints. + get_outgoing_partition_constraints) + def get_outgoing_partition_constraints(self, partition): + return [FixedKeyAndMaskConstraint( + keys_and_masks=[BaseKeyAndMask( + base_key=0, # upper part of the key, + mask=MASK_IN)])] + + @overrides(AbstractProvidesIncomingPartitionConstraints. + get_incoming_partition_constraints) + def get_incoming_partition_constraints(self, partition): + if isinstance(partition.pre_vertex, CommandSenderMachineVertex): + return [] + # index = graph_mapper.get_machine_vertex_index(partition.pre_vertex) + # vertex_slice = graph_mapper.get_slice(partition.pre_vertex) + index = partition.pre_vertex.index + vertex_slice = partition.pre_vertex.vertex_slice + mask = get_possible_masks(vertex_slice.n_atoms)[0] + key = (0x1000 + index) << 16 + return [FixedKeyAndMaskConstraint( + keys_and_masks=[BaseKeyAndMask(key, mask)])] + + @property + @overrides(AbstractSendMeMulticastCommandsVertex.start_resume_commands) + def start_resume_commands(self): + return [MultiCastCommand( + key=0x80000000, payload=0, repeat=5, delay_between_repeats=100)] + + @property + @overrides(AbstractSendMeMulticastCommandsVertex.pause_stop_commands) + def pause_stop_commands(self): + return [MultiCastCommand( + key=0x40000000, payload=0, repeat=5, delay_between_repeats=100)] + + @property + @overrides(AbstractSendMeMulticastCommandsVertex.timed_commands) + def timed_commands(self): + return [] + + +sim.setup(timestep=1.0) +# sim.set_number_of_neurons_per_core(sim.IF_curr_exp, 32) + +# set up populations, +pop = sim.Population(None, ICUBInputVertex(spinnaker_link_id=0), + label='pop_in') + +# neural population , +neuron_pop = sim.Population(NUM_NEUR_OUT, sim.IF_curr_exp(), + label='neuron_pop') + +sim.Projection(pop, neuron_pop, sim.OneToOneConnector(), + sim.StaticSynapse(weight=20.0)) + +# pop_out = sim.Population(None, ICUBOutputVertex(spinnaker_link_id=0), +# label='pop_out') + +sim.external_devices.activate_live_output_to(neuron_pop, pop) + +# recordings and simulations +# neuron_pop.record("spikes") + +# simtime = 30000 #ms, +# sim.run(simtime) + +# continuous run until key press +# remember: do NOT record when running in this mode +sim.external_devices.run_forever() +input('Press enter to stop') + +# exc_spikes = neuron_pop.get_data("spikes") +# +# Figure( +# # raster plot of the neuron_pop +# Panel(exc_spikes.segments[0].spiketrains, xlabel="Time/ms", xticks=True, +# yticks=True, markersize=0.2, xlim=(0, simtime)), +# title="neuron_pop: spikes" +# ) +# plt.show() + +sim.end() + +print("finished") diff --git a/examples/icub_vor_examples/pb_cerebellum_tb.py b/examples/icub_vor_examples/pb_cerebellum_tb.py new file mode 100644 index 0000000..56f1bf0 --- /dev/null +++ b/examples/icub_vor_examples/pb_cerebellum_tb.py @@ -0,0 +1,810 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pyNN.spiNNaker as sim +import numpy as np +import pylab as plt +import matplotlib as mlib + +from pyNN.utility.plotting import Figure, Panel +from pyNN.random import RandomDistribution + +# PAB imports +import traceback +import os +import neo + +# ====================== MAKING THINGS LOOK CONSISTENT ======================== +# ensure we use viridis as the default cmap +plt.viridis() + +# ensure we use the same rc parameters for all matplotlib outputs +mlib.rcParams.update({'font.size': 24}) +mlib.rcParams.update({'errorbar.capsize': 5}) +mlib.rcParams.update({'figure.autolayout': True}) +viridis_cmap = mlib.cm.get_cmap('viridis') + +PREFFERED_ORDER = [ + 'mossy_fibers', + 'climbing_fibers' + 'granule', + 'golgi', + 'purkinje', + 'vn' +] + + +# PAB UTILS +def get_plot_order(for_keys): + # Compute plot order + plot_order = [] + # only focus on keys for pops that have spikes + key_duplicate = list(for_keys) + key_duplicate.sort() + for pref in PREFFERED_ORDER: + for i, key in enumerate(key_duplicate): + if pref in key: + plot_order.append(key) + key_duplicate.pop(i) + + # add remaining keys to the end of plot order + plot_order += key_duplicate + print("Plot order:", plot_order) + return plot_order + + +def color_for_index(index, size, cmap=viridis_cmap): + return cmap(index / (size + 1)) + + +def convert_spiketrains(spiketrains): + """ Converts a list of spiketrains into spynakker7 format + + :param spiketrains: List of SpikeTrains + :rtype: nparray + """ + if len(spiketrains) == 0: + return np.empty(shape=(0, 2)) + + neurons = np.concatenate([ + np.repeat(x.annotations['source_index'], len(x)) + for x in spiketrains]) + spikes = np.concatenate([x.magnitude for x in spiketrains]) + return np.column_stack((neurons, spikes)) + + +def convert_spikes(neo, run=0): + """ Extracts the spikes for run one from a Neo Object + + :param neo: neo Object including Spike Data + :param run: Zero based index of the run to extract data for + :type run: int + :rtype: nparray + """ + if len(neo.segments) <= run: + raise ValueError( + "Data only contains {} so unable to run {}. Note run is the " + "zero based index.".format(len(neo.segments), run)) + return convert_spiketrains(neo.segments[run].spiketrains) + + +def write_short_msg(msg, value): + print("{:40}:{:39}".format(msg, str(value))) + + +def save_figure(plt, name, extensions=(".png",), **kwargs): + for ext in extensions: + write_short_msg("Plotting", name + ext) + plt.savefig(name + ext, **kwargs) + + +fig_folder = "figures/" +# Check if the folders exist +if not os.path.isdir(fig_folder) and not os.path.exists(fig_folder): + os.mkdir(fig_folder) + +# Record SCRIPT start time (wall clock) +start_time = plt.datetime.datetime.now() + +# Starting to record additional parameters + +L_RATE = 2 +H_RATE = 20 + +# cerebellum test bench +runtime = 1000 + +# Synapse parameters +gc_pc_weights = 0.005 +mf_vn_weights = 0.0005 +pc_vn_weights = 0.01 +cf_pc_weights = 0.0 +mf_gc_weights = 0.5 +go_gc_weights = 0.002 +input_weights = 0.0025 +mf_go_weights = 0.1 + +# Network parameters +num_MF_neurons = 100 +num_GC_neurons = 2000 +num_GOC_neurons = 100 +num_PC_neurons = 200 +num_VN_neurons = 200 +num_CF_neurons = 200 + +# Random distribution for synapses delays and weights (MF and GO) +delay_distr = RandomDistribution( + 'uniform', (1.0, 10.0)) +weight_distr_MF = RandomDistribution( + 'uniform', (mf_gc_weights * 0.8, mf_gc_weights * 1.2)) +weight_distr_GO = RandomDistribution( + 'uniform', (go_gc_weights * 0.8, go_gc_weights * 1.2)) + +# Post-synapse population +neuron_params = { + "v_thresh": -50, + "v_reset": -70, + "v_rest": -65, + "i_offset": 0 # DC input +} + +all_neurons = { + "mossy_fibers": num_MF_neurons, + "granule": num_GC_neurons, + "golgi": num_GOC_neurons, + "purkinje": num_PC_neurons, + "vn": num_VN_neurons, + "climbing_fibers": num_CF_neurons +} + +all_populations = { + +} + +initial_connectivity = { + +} + +final_connectivity = { + +} + +all_projections = { + +} + +# Learning parameters cos rule +# ==============(MF to VN)============== +min_weight_c = 0 +max_weight_c = 0.005 +pot_alpha_c = 0.001 # this is alpha in the paper +beta_c = 11 +sigma_c = 201 +initial_weight_c = 0.001 # max_weight_c #0.0005 +plastic_delay_c = 4 + +# Learning parameters sin rule +# ==============(GrC to PC)============== +min_weight_s = 0 +max_weight_s = 0.01 +pot_alpha_s = 0.01 +t_peak_s = 100 +initial_weight_s = max_weight_s # 0.0001 +plastic_delay_s = 4 +weight_dist_pfpc = RandomDistribution('uniform', + (initial_weight_s * 0.8, + initial_weight_s * 1.2)) + +sim.setup(timestep=1., min_delay=1, max_delay=15) +global_n_neurons_per_core = 64 +sim.set_number_of_neurons_per_core( + sim.SpikeSourcePoisson, global_n_neurons_per_core) +sim.set_number_of_neurons_per_core( + sim.SpikeSourceArray, global_n_neurons_per_core) +sim.set_number_of_neurons_per_core(sim.IF_cond_exp, global_n_neurons_per_core) +sim.set_number_of_neurons_per_core( + sim.IF_cond_exp, global_n_neurons_per_core) + +# Sensorial Activity: input activity from vestibulus (will come from the head +# IMU, now it is a test bench) +# We simulate the output of the head encoders with a sinusoidal function. +# Each "sensorial activity" value is derived from the head position and +# velocity. From that value, we generate the mean firing rate of the MF neurons +# (later this will be an input that will come from the robot, through the +# spinnLink) the neurons that are active depend on the value of the sensorial +# activity. For each a gaussian is created centered on a specific neuron. + +# Prepare variables once at beginning +MAX_RATE = 600.0 +MAX_AMPLITUDE = 0.8 +RELATIVE_AMPLITUDE = 1.0 +_head_pos = [] +_head_vel = [] + +i = np.arange(0, 1000, 0.001) +for t in i: + desired_speed = -np.cos( + t * 2 * np.pi) * MAX_AMPLITUDE * RELATIVE_AMPLITUDE * 2.0 * np.pi + desired_pos = -np.sin( + t * 2 * np.pi) * MAX_AMPLITUDE * RELATIVE_AMPLITUDE + _head_pos.append(desired_pos) + _head_vel.append(desired_speed) + +f = plt.figure(1, figsize=(9, 9), dpi=400) +plt.plot(_head_pos) +plt.xlim([0, 1000]) +plt.title("Pre-computed head positions") +plt.tight_layout() +save_figure(plt, os.path.join(fig_folder, "head_positions"), + extensions=['.png', ]) +plt.close(f) + +f = plt.figure(1, figsize=(9, 9), dpi=400) +plt.plot(_head_vel) +plt.xlim([0, 1000]) +plt.title("Pre-computed head velocities") +plt.tight_layout() +save_figure(plt, os.path.join(fig_folder, "head_velocities"), + extensions=['.png', ]) +plt.close(f) + +normalised_head_pos = (np.asarray(_head_pos) + 0.8) / 1.6 +normalised_head_vel = ( + np.asarray(_head_vel) + 0.8 * 2 * np.pi) / (1.6 * 2 * np.pi) + +f = plt.figure(1, figsize=(9, 9), dpi=400) +plt.plot(((np.asarray(_head_pos) + 0.8) / 1.6)) +plt.xlim([0, 1000]) +plt.title("Head positions") +plt.xlabel("Time (ms)") +plt.ylabel("Proportion of max") +plt.tight_layout() +save_figure(plt, os.path.join(fig_folder, "head_positions_processed"), + extensions=['.png', '.pdf']) +plt.close(f) + +f = plt.figure(1, figsize=(9, 9), dpi=400) +plt.plot(((np.asarray(_head_vel) + 0.8 * 2 * np.pi) / (1.6 * 2 * np.pi))) +plt.xlim([0, 1000]) +plt.title("Head velocities") +plt.ylabel("Proportion of max") +plt.xlabel("Time (ms)") +plt.tight_layout() +save_figure(plt, os.path.join(fig_folder, "head_velocities_processed"), + extensions=['.png', '.pdf']) +plt.close(f) + +np.savetxt("normalised_head_positions.csv", normalised_head_pos, delimiter=",") +np.savetxt("normalised_head_velocities.csv", normalised_head_vel, + delimiter=",") + + +def sensorial_activity(pt): + """ + :param pt: a single time point at which we measure head encoder's output + :return: + """ + + # Head position and velocity seem to be retrieve from a look-up table then + # updated single point over time + head_pos = _head_pos[pt] + head_vel = _head_vel[pt] + + head_pos = ((head_pos + 0.8) / 1.6) + head_vel = ((head_vel + 0.8 * 2 * np.pi) / (1.6 * 2 * np.pi)) + + if head_pos > 1.0: + head_pos = 1.0 + elif head_pos < 0.0: + head_pos = 0.0 + if head_vel > 1.0: + head_vel = 1.0 + elif head_vel < 0.0: + head_vel = 0.0 + + min_rate = 0.0 + max_rate = MAX_RATE + sigma = 0.02 + MF_pos_activity = np.zeros((50)) + MF_vel_activity = np.zeros((50)) + + # generate gaussian distributions around the neuron tuned to a given + # sensorial activity + for i in range(50): + mean = float(i) / 50.0 + 0.01 + gaussian = np.exp( + -((head_pos - mean) * (head_pos - mean)) / (2.0 * sigma * sigma)) + MF_pos_activity[i] = min_rate + gaussian * (max_rate - min_rate) + + for i in range(50): + mean = float(i) / 50.0 + 0.01 + gaussian = np.exp( + -((head_vel - mean) * (head_vel - mean)) / (2.0 * sigma * sigma)) + MF_vel_activity[i] = min_rate + gaussian * (max_rate - min_rate) + + sa_mean_freq = np.concatenate((MF_pos_activity, MF_vel_activity)) + out = [sa_mean_freq, head_pos, head_vel] + return out + + +# Error Activity: error from eye and head encoders +def error_activity(error_): + IO_agonist = np.zeros((100)) + IO_antagonist = np.zeros((100)) + + IO_agonist[:] = H_RATE + IO_antagonist[:] = L_RATE + + ea_rate = np.concatenate((IO_agonist, IO_antagonist)) + + return ea_rate + + +def process_VN_spiketrains(VN_spikes, t_start): + total_spikes = 0 + for spiketrain in VN_spikes.segments[0].spiketrains: + s = spiketrain.as_array()[ + np.where(spiketrain.as_array() >= t_start)[0]] + total_spikes += len(s) + + return total_spikes + + +############################################################################## +# ============================ Create populations ============================ +############################################################################## + +# Create MF population - fake input population that will be substituted by +# external input from robot + +MF_population = sim.Population( + num_MF_neurons, # number of sources + sim.SpikeSourcePoisson, # source type + # {'rate': sa_mean_freq}, # source spike times + {'rate': sensorial_activity(0)[0]}, # source spike times + label="MFLayer", # identifier + additional_parameters={"max_rate": MAX_RATE, 'seed': 24534} + ) + +all_populations["mossy_fibers"] = MF_population + +# Create GOC population +GOC_population = sim.Population(num_GOC_neurons, sim.IF_cond_exp(), + label='GOCLayer', + additional_parameters={'seed': 24534}) +all_populations["golgi"] = GOC_population + +# create PC population +PC_population = sim.Population( + num_PC_neurons, # number of neurons + sim.IF_cond_exp(**neuron_params), # Neuron model + label="Purkinje Cell", # identifier + additional_parameters={'seed': 24534}) +all_populations["purkinje"] = PC_population + +# create VN population +VN_population = sim.Population( + num_VN_neurons, # number of neurons + sim.IF_cond_exp(**neuron_params), # Neuron model + label="Vestibular Nuclei", # identifier + additional_parameters={'seed': 24534}) +all_populations["vn"] = VN_population + +# Create GrC population +GC_population = sim.Population(num_GC_neurons, sim.IF_curr_exp(), + label='GCLayer', + additional_parameters={'seed': 24534}) +all_populations["granule"] = GC_population + +# generate fake error (it should be calculated from sensorial activity in error +# activity, but we skip it and just generate an error from -1.5 to 1.5) +err = -0.7 # other values to test: -0.3 0 0.3 0.7 + +# Create CF population - fake input population that will be substituted by +# external input from robot +CF_population = sim.Population( + num_CF_neurons, # number of sources + sim.SpikeSourcePoisson, # source type + # {'rate': sa_mean_freq}, # source spike times + {'rate': error_activity(err)}, # source spike times + label="CFLayer", # identifier + additional_parameters={"max_rate": MAX_RATE, 'seed': 24534} + ) +all_populations["climbing_fibers"] = CF_population + +############################################################################## +# ============================ Create connections ============================ +############################################################################## + +# Create MF-GO connections +mf_go_connections = sim.Projection(MF_population, + GOC_population, + sim.OneToOneConnector(), + sim.StaticSynapse(delay=delay_distr, + weight=mf_go_weights), + receptor_type='excitatory') +all_projections["mf_goc"] = mf_go_connections + +# Create MF-GC and GO-GC connections +float_num_MF_neurons = float(num_MF_neurons) + +list_GOC_GC = [] +list_MF_GC = [] +list_GOC_GC_2 = [] +# projections to subpopulations +# https://github.com/SpiNNakerManchester/sPyNNaker8/issues/168) +for i in range(num_MF_neurons): + GC_medium_index = int(round((i / float_num_MF_neurons) * num_GC_neurons)) + GC_lower_index = GC_medium_index - 40 + GC_upper_index = GC_medium_index + 60 + + if (GC_lower_index < 0): + GC_lower_index = 0 + + elif (GC_upper_index > num_GC_neurons): + GC_upper_index = num_GC_neurons + + for j in range(GC_medium_index - GC_lower_index): + list_GOC_GC.append( + (i, GC_lower_index + j, + # go_gc_weights, 1) + weight_distr_GO.next(), delay_distr.next()) + ) + + for j in range(GC_medium_index + 20 - GC_medium_index): + list_MF_GC.append( + (i, GC_medium_index + j, + # mf_gc_weights, 1) + weight_distr_MF.next(), delay_distr.next()) + ) + + for j in range(GC_upper_index - GC_medium_index - 20): + list_GOC_GC_2.append( + (i, GC_medium_index + 20 + j, + # go_gc_weights, 1) + weight_distr_GO.next(), delay_distr.next()) + ) + +GO_GC_con1 = sim.Projection(GOC_population, + GC_population, + sim.FromListConnector(list_GOC_GC), + receptor_type='inhibitory') +all_projections["goc_grc_1"] = GO_GC_con1 + +MF_GC_con2 = sim.Projection(MF_population, + GC_population, + sim.FromListConnector(list_MF_GC), + receptor_type='excitatory') +all_projections["mf_grc"] = MF_GC_con2 + +GO_GC_con3 = sim.Projection(GOC_population, + GC_population, + sim.FromListConnector(list_GOC_GC_2), + receptor_type='inhibitory') +all_projections["goc_grc_2"] = GO_GC_con3 + +# Create PC-VN connections +pc_vn_connections = sim.Projection( + PC_population, VN_population, sim.OneToOneConnector(), + # receptor_type='GABA', # Should these be inhibitory? + synapse_type=sim.StaticSynapse(delay=delay_distr, weight=pc_vn_weights), + receptor_type='inhibitory') +all_projections["pc_vn"] = pc_vn_connections + +# Create MF-VN learning rule - cos +mfvn_plas = sim.STDPMechanism( + timing_dependence=sim.extra_models.TimingDependenceMFVN(beta=beta_c, + sigma=sigma_c), + weight_dependence=sim.extra_models.WeightDependenceMFVN( + w_min=min_weight_c, w_max=max_weight_c, pot_alpha=pot_alpha_c), + weight=initial_weight_c, delay=delay_distr) + +# Create MF to VN connections +mf_vn_connections = sim.Projection( + MF_population, VN_population, sim.AllToAllConnector(), + # Needs mapping as FromListConnector to make efficient + synapse_type=mfvn_plas, + receptor_type="excitatory") +all_projections["mf_vn"] = mf_vn_connections + +# Create projection from PC to VN -- replaces "TEACHING SIGNAL" +pc_vn_connections = sim.Projection( + PC_population, VN_population, sim.OneToOneConnector(), + sim.StaticSynapse(weight=0.0, delay=1.0), + receptor_type="excitatory") # "TEACHING SIGNAL" +all_projections["pc_vn_teaching"] = pc_vn_connections + +# create PF-PC learning rule - sin +pfpc_plas = sim.STDPMechanism( + timing_dependence=sim.extra_models.TimingDependencePFPC(t_peak=t_peak_s), + weight_dependence=sim.extra_models.WeightDependencePFPC( + w_min=min_weight_s, w_max=max_weight_s, pot_alpha=pot_alpha_s), + weight=initial_weight_s, + delay=delay_distr +) + +# Create PF-PC connections +pf_pc_connections = sim.Projection( + GC_population, PC_population, sim.AllToAllConnector(), + synapse_type=pfpc_plas, + receptor_type="excitatory") +all_projections["pf_pc"] = pf_pc_connections + +# Create IO-PC connections. This synapse with "receptor_type=COMPLEX_SPIKE" +# propagates the learning signals that drive the plasticity mechanisms in +# GC-PC synapses +cf_pc_connections = sim.Projection( + CF_population, PC_population, sim.OneToOneConnector(), + # receptor_type='COMPLEX_SPIKE', + synapse_type=sim.StaticSynapse(delay=1.0, weight=cf_pc_weights), + receptor_type='excitatory') +all_projections["cf_pc"] = cf_pc_connections + +# ============================ Set up recordings ============================ + +MF_population.record(['spikes']) +CF_population.record(['spikes']) +GC_population.record('all') +# GOC_population.record(['spikes']) +GOC_population.record('all') +VN_population.record('all') # VN_population.record(['spikes']) +# PC_population.record(['spikes']) +PC_population.record('all') + +# ============================ Run simulation ============================ + +samples_in_repeat = 99 +sample_time = 10 +repeats = 1 +total_runtime = 0 +VN_transfer_func = [] + +print("=" * 80) +print("Running simulation for", runtime, " ms split into", samples_in_repeat, + "chunks.") +all_spikes_first_trial = {} +# Record simulation start time (wall clock) +sim_start_time = plt.datetime.datetime.now() +for i in range(samples_in_repeat): + sim.run(sample_time) + + VN_spikes = VN_population.get_data('spikes') + VN_transfer_func.append(process_VN_spiketrains(VN_spikes, total_runtime)) + + total_runtime += sample_time + + print(total_runtime) + + MF_population.set(rate=sensorial_activity(total_runtime)[0]) + +end_time = plt.datetime.datetime.now() +total_time = end_time - start_time +sim_total_time = end_time - sim_start_time + +total_runtime = runtime * repeats + +# ==================== Retrieving data from simulation ======================= + +MF_spikes = MF_population.get_data('spikes') +CF_spikes = CF_population.get_data('spikes') +GC_spikes = GC_population.get_data('all') +GOC_spikes = GOC_population.get_data('spikes') +VN_spikes = VN_population.get_data('spikes') +VN_gsyninh = VN_population.get_data('gsyn_inh') +PC_spikes = PC_population.get_data('spikes') + +mfvn_weights = mf_vn_connections.get('weight', 'list', with_address=False) +pfpc_weights = pf_pc_connections.get('weight', 'list', with_address=False) + +# Retrieve recordings +all_spikes = {} +for label, pop in all_populations.items(): + if pop is not None: + print("Retrieving recordings for ", label, "...") + all_spikes[label] = pop.get_data(['spikes']) +other_recordings = {} +for label, pop in all_populations.items(): + if label in ["mossy_fibers", "climbing_fibers"]: + continue + print("Retrieving recordings for ", label, "...") + other_recordings[label] = {} + + other_recordings[label]['current'] = np.array( + pop.get_data(['gsyn_inh']).filter(name='gsyn_inh'))[0].T + + other_recordings[label]['gsyn'] = np.array( + pop.get_data(['gsyn_exc']).filter(name='gsyn_exc'))[0].T + + other_recordings[label]['v'] = np.array( + pop.get_data(['v']).segments[0].filter(name='v'))[0].T + +# Retrieve final network connectivity +try: + final_connectivity = {} + for label, p in all_projections.items(): + if p is None: + print("Projection", label, "is not implemented!") + continue + print("Retrieving connectivity for projection ", label, "...") + try: + conn = \ + np.array(p.get(('weight', 'delay'), + format="list")._get_data_items()) + except Exception as e: + print("Careful! Something happened when retrieving the " + "connectivity:", e, "\nRetrying...") + conn = \ + np.array(p.get(('weight', 'delay'), format="list")) + + conn = np.array(conn.tolist()) + final_connectivity[label] = conn +except Exception: + # This simulator might not support the way this is done + final_connectivity = [] + traceback.print_exc() + +sim.end() +print("job done") +# Report time taken +print("Total time elapsed -- " + str(total_time)) +# ============================ Plotting some stuff =========================== +# ============================ PAB ANALYSIS ============================ + +# Compute plot order +plot_order = get_plot_order(all_spikes.keys()) +n_plots = float(len(plot_order)) + +# Check if using neo blocks +neo_all_spikes = {} +for pop, potential_neo_block in all_spikes.items(): + if isinstance(potential_neo_block, neo.Block): + # make a copy of the spikes dict + neo_all_spikes[pop] = potential_neo_block + all_spikes[pop] = convert_spikes(potential_neo_block) + +# Report useful parameters +print("=" * 80) +print("Analysis report") +print("-" * 80) +print("Current time", + plt.datetime.datetime.now().strftime("%H:%M:%S on %d.%m.%Y")) +# Report number of neurons +print("=" * 80) +print("Number of neurons in each population") +print("-" * 80) +for pop in plot_order: + print("\t{:20} -> {:10} neurons".format(pop, all_neurons[pop])) + +# Report weights values +print("Average weight per projection") +print("-" * 80) +conn_dict = {} +for key in final_connectivity: + # Connection holder annoyance here: + conn = np.asarray(final_connectivity[key]) + if final_connectivity[key] is None or conn.size == 0: + print("Skipping analysing connection", key) + continue + conn_exists = True + if len(conn.shape) == 1 or conn.shape[1] != 4: + try: + x = np.concatenate(conn) + conn = x + except Exception: + traceback.print_exc() + names = [('source', 'int_'), + ('target', 'int_'), + ('weight', 'float_'), + ('delay', 'float_')] + useful_conn = np.zeros((conn.shape[0], 4), dtype=np.float) + for i, (n, _) in enumerate(names): + useful_conn[:, i] = conn[n].astype(np.float) + final_connectivity[key] = useful_conn.astype(np.float) + conn = useful_conn.astype(np.float) + conn_dict[key] = conn + mean = np.mean(conn[:, 2]) + print("{:27} -> {:4.6f} uS".format( + key, mean)) + +print("Average Delay per projection") +print("-" * 80) +for key in final_connectivity: + conn = conn_dict[key] + mean = np.mean(conn[:, 3]) + print("{:27} -> {:4.2f} ms".format( + key, mean)) + +print("Plotting spiking raster plot for all populations") +f, axes = plt.subplots(len(all_spikes.keys()), 1, + figsize=(14, 20), sharex=True, dpi=400) +for index, pop in enumerate(plot_order): + curr_ax = axes[index] + # spike raster + _times = all_spikes[pop][:, 1] + _ids = all_spikes[pop][:, 0] + + curr_ax.scatter(_times, + _ids, + color=viridis_cmap(index / (n_plots + 1)), + s=.5, rasterized=True) + curr_ax.set_title(pop) +plt.xlabel("Time (ms)") +# plt.suptitle((use_display_name(simulator)+"\n") +f.tight_layout() +save_figure(plt, os.path.join(fig_folder, "raster_plots"), + extensions=['.png', '.pdf']) +plt.close(f) + +if not isinstance(final_connectivity, list): + for proj, conn in final_connectivity.items(): + f = plt.figure(1, figsize=(9, 9), dpi=400) + plt.hist(conn[:, 2], bins=20) + plt.title(proj) + plt.xlabel("Weight") + plt.ylabel("Count") + save_figure(plt, os.path.join( + fig_folder, "{}_weight_histogram".format(proj)), + extensions=['.png', ]) + plt.close(f) + +F = Figure( + Panel(MF_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='MF_spikes'), + Panel(CF_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='CF_spikes'), + Panel(GC_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='GC_spikes'), + Panel(GOC_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='GOC_spikes'), + Panel(PC_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='PC_spikes'), + Panel(VN_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, total_runtime), + xlabel='VN_spikes'), + Panel(VN_gsyninh.segments[0].filter(name='gsyn_inh')[0], + ylabel="gsyn_inh", yticks=True, xlim=(0, total_runtime)) +) +save_figure(plt, os.path.join(fig_folder, "collections"), + extensions=['.png', ]) +# plt.show(block=False) + +plt.figure() +plt.plot(mfvn_weights, + label='mf-vn weights (init: {})'.format(initial_weight_c)) +plt.title("mfvn_weights") +plt.legend() +save_figure(plt, os.path.join(fig_folder, "mfvn_weights"), + extensions=['.png', ]) + +plt.figure() +plt.title("pfpc_weights") +plt.plot(pfpc_weights, color='orange', + label='pf-pc weights (init: {})'.format(initial_weight_s)) +plt.legend() +save_figure(plt, os.path.join(fig_folder, "pfpc_weights"), + extensions=['.png', ]) + +print(VN_transfer_func) +plt.figure() +plt.plot(VN_transfer_func) +plt.title("vn_transfer_function") +save_figure(plt, os.path.join(fig_folder, "VN_transfer_func"), + extensions=['.png', ]) + +# plt.show() diff --git a/examples/icub_vor_examples/readme.md b/examples/icub_vor_examples/readme.md new file mode 100644 index 0000000..73c929b --- /dev/null +++ b/examples/icub_vor_examples/readme.md @@ -0,0 +1,13 @@ +# Project's resources: + +### Telluride implementation of STDP for cerebellar model on pyNN: + +https://github.com/neworderofjamie/jamie_telluride_2016/tree/master/cerebellum/cerebellum + +### Network model (NEST) + +https://github.com/EduardoRosLab/VOR_in_neurorobotics + +### Neurons and synapse models (NEST) + +https://github.com/jgarridoalcazar/SpikingCerebellum diff --git a/examples/icub_vor_examples/receptive_fields_for_motion.py b/examples/icub_vor_examples/receptive_fields_for_motion.py new file mode 100644 index 0000000..4b6fade --- /dev/null +++ b/examples/icub_vor_examples/receptive_fields_for_motion.py @@ -0,0 +1,515 @@ +#!usr/bin/python + +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# import socket +import pyNN.spiNNaker as sim +# import numpy as np +# import logging +import matplotlib.pyplot as plt + +# (the files for these imports don't appear to be on the icub_vor branch) +# from decode_events import * +# from functions import * +# import yarp + +from pacman.model.constraints.key_allocator_constraints import ( + FixedKeyAndMaskConstraint) +from pacman.model.graphs.application import ApplicationSpiNNakerLinkVertex +from pacman.model.routing_info import BaseKeyAndMask +from spinn_front_end_common.abstract_models.\ + abstract_provides_outgoing_partition_constraints import ( + AbstractProvidesOutgoingPartitionConstraints) +from spinn_utilities.overrides import overrides +from spinn_front_end_common.abstract_models.\ + abstract_provides_incoming_partition_constraints import ( + AbstractProvidesIncomingPartitionConstraints) +from pacman.operations.routing_info_allocator_algorithms.\ + malloc_based_routing_allocator.utils import get_possible_masks +from spinn_front_end_common.utility_models.command_sender_machine_vertex \ + import CommandSenderMachineVertex + +from spinn_front_end_common.abstract_models \ + import AbstractSendMeMulticastCommandsVertex +from spinn_front_end_common.utility_models.multi_cast_command \ + import MultiCastCommand + +from pyNN.utility.plotting import Figure, Panel + +# yarp.Network.init() + +NUM_NEUR_IN = 1024 # 1024 # 2x240x304 mask -> 0xFFFE0000 +MASK_IN = 0xFFFFFC00 # 0xFFFFFC00 +NUM_NEUR_OUT = 1024 +MASK_OUT = 0xFFFFFFFC + + +class ICUBInputVertex( + ApplicationSpiNNakerLinkVertex, + AbstractProvidesOutgoingPartitionConstraints, + AbstractProvidesIncomingPartitionConstraints, + AbstractSendMeMulticastCommandsVertex): + + def __init__(self, spinnaker_link_id, board_address=None, + constraints=None, label=None): + + ApplicationSpiNNakerLinkVertex.__init__( + self, n_atoms=NUM_NEUR_IN, spinnaker_link_id=spinnaker_link_id, + board_address=board_address, label=label) + + # AbstractProvidesNKeysForPartition.__init__(self) + AbstractProvidesOutgoingPartitionConstraints.__init__(self) + AbstractSendMeMulticastCommandsVertex.__init__(self) + + @overrides(AbstractProvidesOutgoingPartitionConstraints. + get_outgoing_partition_constraints) + def get_outgoing_partition_constraints(self, partition): + return [FixedKeyAndMaskConstraint( + keys_and_masks=[BaseKeyAndMask( + base_key=0, # upper part of the key, + mask=MASK_IN)])] + + @overrides(AbstractProvidesIncomingPartitionConstraints. + get_incoming_partition_constraints) + def get_incoming_partition_constraints(self, partition): + if isinstance(partition.pre_vertex, CommandSenderMachineVertex): + return [] + # index = graph_mapper.get_machine_vertex_index(partition.pre_vertex) + # vertex_slice = graph_mapper.get_slice(partition.pre_vertex) + index = partition.pre_vertex.index + vertex_slice = partition.pre_vertex.vertex_slice + mask = get_possible_masks(vertex_slice.n_atoms)[0] + key = (0x1000 + index) << 16 + return [FixedKeyAndMaskConstraint( + keys_and_masks=[BaseKeyAndMask(key, mask)])] + + @property + @overrides(AbstractSendMeMulticastCommandsVertex.start_resume_commands) + def start_resume_commands(self): + return [MultiCastCommand( + key=0x80000000, payload=0, repeat=5, delay_between_repeats=100)] + + @property + @overrides(AbstractSendMeMulticastCommandsVertex.pause_stop_commands) + def pause_stop_commands(self): + return [MultiCastCommand( + key=0x40000000, payload=0, repeat=5, delay_between_repeats=100)] + + @property + @overrides(AbstractSendMeMulticastCommandsVertex.timed_commands) + def timed_commands(self): + return [] + + +def convert_data_mapping(x, y, down_sample_x, down_sample_y): + if simulate: + x /= (304 / down_sample_x) + y /= (240 / down_sample_y) + address = (y * 304) + x + else: + address = (int(y) << 12) + (int(x) << 1) + 1 + return int(address) + + +def convert_data(data): + converted_data = [[] for i in range(304*240)] + for item in data: + converted_data[convert_data_mapping( + item[2], item[3], width, length)].append(item[0]*1000) + return converted_data + + +def convert_pixel_mapping(pixel, down_sample_x, down_sample_y): + x = pixel % 304 + y = (pixel - x) / 304 + if simulate: + x /= (304 / down_sample_x) + y /= (240 / down_sample_y) + address = (y * down_sample_x) + x + else: + address = (y << 12) + (x << 1) + 1 + return address + + +def map_neurons_to_field( + length, x_segments, y_segments, scale_down, layers, + x_size=304, y_size=240, central_x=304, central_y=240): + # base-filter = width x (dimension / segments) + # Y*8 E*2 X*9 P*1 + # x_segments /= 2 + # y_segments /= 2 + field_size_x = [] + field_size_y = [] + # define the field size for each layer + for i in range(layers): + field_size_length_x = length * (scale_down ** layers) + field_size_width_x = (x_size / x_segments) * (scale_down ** layers) + field_size_x.append([field_size_length_x, field_size_width_x]) + field_size_length_y = length * (scale_down ** layers) + field_size_width_y = (y_size / y_segments) * (scale_down ** layers) + field_size_y.append([field_size_length_y, field_size_width_y]) + + # map the field size to real pixels + pixel_mapping = [[[[-1, -1] for x in range(y_size)] for y in range(x_size)] + for i in range(layers + 1)] + field_size = [[[0 for i in range(y_segments)] for j in range(x_segments)] + for k in range(layers+1)] + x_border = int(x_size - central_x) # / 2 + y_border = int(y_size - central_y) # / 2 + for x in range(int(central_x / 2)): + for y in range(int(central_y / 2)): + x_segment = int(x / ((x_size - (x_border)) / x_segments)) + y_segment = int(y / ((y_size - (y_border)) / y_segments)) + pixel_mapping[layers][(x_size - (x_border // 2)) - x - 1][ + y + y_border] = [(x_segments - 1) - x_segment, y_segment] + pixel_mapping[layers][x + x_border][ + (y_size - (y_border // 2)) - y - 1] = [ + x_segment, (y_segments - 1) - y_segment] + pixel_mapping[layers][(x_size - (x_border // 2)) - x - 1][ + (y_size - (y_border // 2)) - y - 1] = [ + (x_segments - 1) - x_segment, (y_segments - 1) - y_segment] + pixel_mapping[layers][x + x_border][y + y_border] = [ + x_segment, y_segment] + # for layer in range(layers): + # x_border = int(x_size - (x_size * (scale_down ** layer))) # / 2 + # y_border = int(y_size - (y_size * (scale_down ** layer))) # / 2 + # scale_val = scale_down ** layer + # if x_border < x_size / 2 or y_border < y_size / 2: + # for x in range(0, (x_size / 2) - x_border): + # for y in range(0, (y_size / 2) - y_border): + # if x < length * (scale_val) or y < length * (scale_val): + # x_segment = int( + # x / ((x_size - (x_border * 2)) / x_segments)) + # y_segment = int( + # y / ((y_size - (y_border * 2)) / y_segments)) + # pixel_mapping[layer][x + x_border][y + y_border] = \ + # [x_segment, y_segment] + # field_size[layer][x_segment][y_segment] += 1 + # pixel_mapping[layer][((x_size - 1) - x_border) - x][ + # ((y_size - 1) - y_border) - y] = [ + # (x_segments - 1) - x_segment, + # (y_segments - 1) - y_segment] + # field_size[layer][(x_segments - 1) - x_segment][ + # (y_segments - 1) - y_segment] += 1 + # pixel_mapping[layer][x + x_border][ + # ((y_size - 1) - y_border) - y] = [ + # x_segment, (y_segments - 1) - y_segment] + # field_size[layer][x_segment][ + # (y_segments - 1) - y_segment] += 1 + # pixel_mapping[layer][((x_size - 1) - x_border) - x][ + # y + y_border] = [ + # (x_segments - 1) - x_segment, y_segment] + # field_size[layer][(x_segments - 1) - x_segment][ + # y_segment] += 1 + # # (print x, x_segment, x_segments, y, y_segment, + # # y_segments) + # # print(x, y) + # print("finished layer", layer, "with border", x_border, y_border) + + print("done mapping") + return pixel_mapping, field_size + + +def convert_xy_field_to_id(field_x, field_y, width, length): + # receptive_fields = width * 2 + length * 2 - 2 + id_val = (field_y * width) + field_x + return id_val + + +def map_to_from_list( + mapping, width, length, scale_down, pixel_weight=5, x_size=304, + y_size=240, motor_weight=1.5): + # motor_weight *= width * length + mapping, field_size = mapping + layers = len(mapping) + print("layer", layers, "width", width, "length", length) + from_list_connections = [[] for i in range(layers)] + motor_conn = [] + for layer in range(layers): + motor_control_e = [] + motor_control_i = [] + for x in range(x_size): + for y in range(y_size): + if mapping[layer][x][y][0] != -1: + if layer < layers - 1: + weight = pixel_weight / float( + field_size[layer][mapping[layer][x][y][0]][ + mapping[layer][x][y][1]]) + else: + weight = pixel_weight # / 16. + connection = [ + convert_pixel_mapping(x + (304 * y), width, length), + convert_xy_field_to_id( + mapping[layer][x][y][0], mapping[layer][x][y][1], + width, length), + weight, 1] + if connection not in from_list_connections: + from_list_connections[layer].append(connection) + print(x, y, x + (304 * y), convert_pixel_mapping( + x + (304 * y), width, length), convert_xy_field_to_id( + mapping[layer][x][y][0], mapping[layer][x][y][1], + width, length)) + if convert_pixel_mapping( + x + (304 * y), width, length) >= 304*240 or \ + convert_xy_field_to_id( + mapping[layer][x][y][0], mapping[layer][x][y][1], + width, length) >= width*length: + print("oops") + for connection in from_list_connections[layer]: + if connection[1] % width < width / 2: + if [connection[1], 0, motor_weight, 1] not in motor_control_e: + motor_control_e.append([connection[1], 0, motor_weight, 1]) + motor_control_i.append([connection[1], 1, motor_weight, 1]) + if connection[1] % width > width / 2: + if [connection[1], 1, motor_weight, 1] not in motor_control_e: + motor_control_e.append([connection[1], 1, motor_weight, 1]) + motor_control_i.append([connection[1], 0, motor_weight, 1]) + if connection[1] / width < length / 2: + if [connection[1], 2, motor_weight, 1] not in motor_control_e: + motor_control_e.append([connection[1], 2, motor_weight, 1]) + motor_control_i.append([connection[1], 3, motor_weight, 1]) + if connection[1] / width > length / 2: + if [connection[1], 3, motor_weight, 1] not in motor_control_e: + motor_control_e.append([connection[1], 3, motor_weight, 1]) + motor_control_i.append([connection[1], 2, motor_weight, 1]) + motor_conn.append([motor_control_e, motor_control_i]) + print("created connections") + return from_list_connections, motor_conn + + +def split_the_from_list( + input, output, from_list, receptor_type="excitatory", max_conn=255): + # from_list_segments = [from_list[x:x + max_conn] for x in xrange( + # 0, len(from_list), max_conn)] + # file_name = "connections.txt" + # with open(file_name, 'w') as f: + # for segment in from_list_segments: + # connections_file = open(file_name, "w") + # for connection in segment: + # for item in connection: + # f.write("%s " % item) + # f.write("\n") + # connections_file.close() + # sim.Projection(input, output, sim.FromFileConnector(file_name), + # receptor_type=receptor_type) + + from_list_segments = [from_list[x:x + max_conn] for x in range( + 0, len(from_list), max_conn)] + for connection in from_list_segments: + sim.Projection(input, output, sim.FromListConnector(connection), + receptor_type=receptor_type) + + +def segment_hidden_pop(from_list, width, length, pre): + # hidden_pops = [] + list_of_lists = [[] for i in range(width*length)] + for connection in from_list: + if pre: + list_of_lists[connection[0]].append( + [0, connection[1], connection[2], connection[3]]) + else: + list_of_lists[connection[1]].append( + [connection[0], 0, connection[2], connection[3]]) + return list_of_lists + + +def connect_it_up(visual_input, motor_output, connections, width, length): + visual_connections, motor_conn = connections + layers = len(motor_conn) + all_pops = [] + hidden_pops = [] + for layer in range(layers): + motor_conn_e, motor_conn_i = motor_conn[layer] + # hidden_pop = sim.Population(width * length, sim.IF_curr_exp(), + # label="hidden_pop_{}".format(layer)) + # hidden_pops.append(hidden_pop) + for i in range(width*length): + hidden_pop = sim.Population( + 1, sim.IF_curr_exp(tau_refrac=3), + label="hidden_pop_{}_{}".format(layer, i)) + if simulate: + hidden_pop.record(["spikes", "v"]) + hidden_pops.append(hidden_pop) + list_of_lists = segment_hidden_pop( + visual_connections[layer], width, length, False) + for i in range(len(list_of_lists)): + split_the_from_list(visual_input, hidden_pops[i], list_of_lists[i]) + list_of_lists = segment_hidden_pop(motor_conn_e, width, length, True) + for i in range(len(list_of_lists)): + split_the_from_list(hidden_pops[i], motor_output, list_of_lists[i]) + list_of_lists = segment_hidden_pop(motor_conn_i, width, length, True) + for i in range(len(list_of_lists)): + split_the_from_list(hidden_pops[i], motor_output, list_of_lists[i], + receptor_type="inhibitory") + # split_the_from_list(visual_input, hidden_pop, + # visual_connections[layer]) + # split_the_from_list(hidden_pop, motor_output, motor_conn_e) + # split_the_from_list(hidden_pops[layer], motor_output, motor_conn_i, + # receptor_type="inhibitory") + all_pops.append(hidden_pops) + print("finished connecting") + return all_pops + + +simulate = True + +sim.setup(timestep=1.0) +# sim.set_number_of_neurons_per_core(sim.IF_curr_exp, 255) + +width = 2 +length = 2 + +# mapping_1 = map_neurons_to_field(64, 19, 15, 0.9, 5) +mapping_2 = map_neurons_to_field(20, width, length, 0.9, 0) +# mapping_3 = map_neurons_to_field(50, 19, 15, 0.9, 2) +# mapping_4 = map_neurons_to_field(128, 2, 2, 0.9, 5) +# mapping_5 = map_neurons_to_field(128, 2, 2, 0.9, 7) +print("created mapping") +# connections_1 = map_to_from_list(mapping_1, 19, 15, 0.9) +connections_2 = map_to_from_list(mapping_2, width, length, 0.9) +# connections_3 = map_to_from_list(mapping_3, 19, 15, 0.9) +# connections_4 = map_to_from_list(mapping_4, 2, 2, 0.9) +# connections_5 = map_to_from_list(mapping_5, 2, 2, 0.9) +print("created all connections") + + +sim.setup(timestep=1.0) +# sim.set_number_of_neurons_per_core(sim.IF_curr_exp, 32) + +# set up populations, +if simulate: + # Assuming channel 0 as left camera and channel 1 as right camera + # Import data.log and Decode events + # (the files for these imports don't appear to be on the icub_vor branch) + # dm = DataManager() + # dm.load_AE_from_yarp('acquisitions10042019/circle10042019') + # + # # Loading decoded events; data(timestamp, channel, x, y, polarity) + # stereo_data = np.loadtxt( + # 'acquisitions10042019/circle10042019/decoded_events.txt', + # delimiter=',') + # [left_data, right_data] = split_stereo_data(stereo_data) + [left_data, right_data] = [[], []] + # left_data.tolist() + # right_data.tolist() + + new_left = convert_data(left_data) + new_right = convert_data(right_data) + + print('ATIS data processing ended') + + # This currently crashes because width*length=4 is not the same as the + # value used in convert_data (304*240) + vis_pop = sim.Population(width*length, sim.SpikeSourceArray(new_left), + label='pop_in') +else: + vis_pop = sim.Population(None, ICUBInputVertex(spinnaker_link_id=0), + label='pop_in') + +pop_out = sim.Population(4, sim.IF_curr_exp(tau_refrac=3), + label="motor_control") +if simulate: + pop_out.record(['spikes', 'v']) + +# pop_out = sim.Population(None, ICUBOutputVertex(spinnaker_link_id=0), +# label='pop_out') + +# hidden_pops = connect_it_up(vis_pop, pop_out, connections_1, 19, 15) +hidden_pops = connect_it_up(vis_pop, pop_out, connections_2, width, length) +# connect_it_up(vis_pop, pop_out, connections_3, 19, 15) +# connect_it_up(vis_pop, pop_out, connections_4, 2, 2) +# connect_it_up(vis_pop, pop_out, connections_5, 2, 2) + +# test_input = sim.Population(304*240, sim.IF_curr_exp(), label="readout") +# test_input.record(['spikes', 'v']) +# sim.Projection(vis_pop, test_input, sim.OneToOneConnector(), +# sim.StaticSynapse(weight=0.1)) + +# sim.Projection(pop, neuron_pop, sim.OneToOneConnector(), +# sim.StaticSynapse(weight=20.0)) + +sim.external_devices.activate_live_output_to(pop_out, vis_pop) + +# out_port = yarp.BufferedPortBottle() +# out_port.open('/spinn:o') +# # bottle = out_port.prepare() +# # bottle.clear() +# # bottle.addInt32(2) +# # out_port.write() +# # out_port +# # b.addString("thing") +# while True: +# bottle = out_port.prepare() +# bottle.clear() +# bottle.addInt32(2) +# out_port.write() + +# recordings and simulations, +# neuron_pop.record("spikes") + +simtime = 30000 # ms +if simulate: + sim.run(simtime) +else: + sim.external_devices.run_forever() + input('Press enter to stop') + +# continuous run until key press +# remember: do NOT record when running in this mode + +if simulate: + exc_spikes = pop_out.get_data("spikes") + exc_v = pop_out.get_data("v") + hidden_spikes = [] + hidden_v = [] + for i in range(len(hidden_pops[0])): + hidden_spikes.append(hidden_pops[0][i].get_data("spikes")) + hidden_v.append(hidden_pops[0][i].get_data("v")) + # input_spikes = test_input.get_data("spikes") + # input_v = test_input.get_data("v") + + Figure( + # raster plot of the neuron_pop + Panel(exc_spikes.segments[0].spiketrains, xlabel="Time/ms", + xticks=True, yticks=True, markersize=0.2, xlim=(0, simtime)), + Panel(exc_v.segments[0].filter(name='v')[0], + ylabel="Membrane potential (mV)", yticks=True), + Panel(hidden_spikes[0].segments[0].spiketrains, xlabel="Time/ms", + xticks=True, yticks=True, markersize=0.2, xlim=(0, simtime)), + Panel(hidden_v[0].segments[0].filter(name='v')[0], + ylabel="Membrane potential (mV)", yticks=True), + Panel(hidden_spikes[1].segments[0].spiketrains, xlabel="Time/ms", + xticks=True, yticks=True, markersize=0.2, xlim=(0, simtime)), + Panel(hidden_v[1].segments[0].filter(name='v')[0], + ylabel="Membrane potential (mV)", yticks=True), + Panel(hidden_spikes[2].segments[0].spiketrains, xlabel="Time/ms", + xticks=True, yticks=True, markersize=0.2, xlim=(0, simtime)), + Panel(hidden_v[2].segments[0].filter(name='v')[0], + ylabel="Membrane potential (mV)", yticks=True), + Panel(hidden_spikes[3].segments[0].spiketrains, xlabel="Time/ms", + xticks=True, yticks=True, markersize=0.2, xlim=(0, simtime)), + Panel(hidden_v[3].segments[0].filter(name='v')[0], + ylabel="Membrane potential (mV)", yticks=True), + # Panel(input_spikes.segments[0].spiketrains, xlabel="Time/ms", + # xticks=True, yticks=True, markersize=0.2, xlim=(0, simtime)), + # Panel(input_v.segments[0].filter(name='v')[0], + # ylabel="Membrane potential (mV)", yticks=True), + title="neuron_pop: spikes" + ) + plt.show() + +sim.end() + +print("finished") diff --git a/examples/icub_vor_examples/single_purkinje_cell_pf_windowing.py b/examples/icub_vor_examples/single_purkinje_cell_pf_windowing.py new file mode 100644 index 0000000..99ad32c --- /dev/null +++ b/examples/icub_vor_examples/single_purkinje_cell_pf_windowing.py @@ -0,0 +1,116 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pyNN.spiNNaker as p +from pyNN.utility.plotting import Figure, Panel +import matplotlib.pyplot as plt + +p.setup(1) # simulation timestep (ms) +runtime = 500 + +# Post-synapse population +neuron_params = { + "v_thresh": -50, + "v_reset": -70, + "v_rest": -65, + "i_offset": 0 # DC input +} + +# Learning parameters +min_weight = 0 +max_weight = 0.1 +pot_alpha = 0.01 +t_peak = 100 +initial_weight = 0.05 +plastic_delay = 4 + +purkinje_cell = p.Population( + 1, # number of neurons + p.IF_cond_exp(**neuron_params), # Neuron model + label="Purkinje Cell" # identifier + ) + + +# Spike source to send spike via synapse +pf_spike_times = [50, 60, 65, 85, 101, 400] +# 150, 175, 180, 190, 240, 250, 255, +# 270, 300, 345, 350, 360, 370, 400, 422, 425, 427, 429] + +granular_cell = p.Population( + 1, # number of sources + p.SpikeSourceArray, # source type + {'spike_times': pf_spike_times}, # source spike times + label="src1" # identifier + ) + +# Spike source to send spike via synapse from climbing fibre +cf_spike_times = [55, 80, 90, 95, 96, 201] # 104, 107, 246] +climbing_fibre = p.Population( + 1, # number of sources + p.SpikeSourceArray, # source type + {'spike_times': cf_spike_times}, # source spike times + label="src2" # identifier + ) + +# Create projection from GC to PC +pfpc_plas = p.STDPMechanism( + timing_dependence=p.extra_models.TimingDependencePFPC(t_peak=t_peak), + weight_dependence=p.extra_models.WeightDependencePFPC(w_min=min_weight, + w_max=max_weight, + pot_alpha=pot_alpha), + weight=initial_weight, delay=plastic_delay) + +synapse_pfpc = p.Projection( + granular_cell, purkinje_cell, p.AllToAllConnector(), + synapse_type=pfpc_plas, receptor_type="excitatory") + +# Create projection from CF to PC +synapse = p.Projection( + climbing_fibre, purkinje_cell, p.OneToOneConnector(), + p.StaticSynapse(weight=0.0, delay=1), receptor_type="excitatory") + +granular_cell.record('spikes') +climbing_fibre.record('spikes') +purkinje_cell.record("all") + +p.run(runtime) + +granluar_cell_spikes = granular_cell.get_data('spikes') +climbing_fibre_spikes = climbing_fibre.get_data('spikes') +purkinje_data = purkinje_cell.get_data() + +pf_weights = synapse_pfpc.get('weight', 'list', with_address=False) +print(pf_weights) + +# Plot +F = Figure( + # plot data for postsynaptic neuron + Panel(granluar_cell_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + Panel(climbing_fibre_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + Panel(purkinje_data.segments[0].filter(name='v')[0], + ylabel="Membrane potential (mV)", + data_labels=[purkinje_cell.label], yticks=True, xlim=(0, runtime)), + Panel(purkinje_data.segments[0].filter(name='gsyn_exc')[0], + ylabel="gsyn excitatory (mV)", + data_labels=[purkinje_cell.label], yticks=True, xlim=(0, runtime)), + Panel(purkinje_data.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + ) + +plt.show() +p.end() + +print("Job Complete") diff --git a/examples/icub_vor_examples/single_purkinje_cell_potentiation_test.py b/examples/icub_vor_examples/single_purkinje_cell_potentiation_test.py new file mode 100644 index 0000000..8c90143 --- /dev/null +++ b/examples/icub_vor_examples/single_purkinje_cell_potentiation_test.py @@ -0,0 +1,100 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pyNN.spiNNaker as p +from pyNN.utility.plotting import Figure, Panel +import matplotlib.pyplot as plt + +p.setup(1) # simulation timestep (ms) +runtime = 1000 + +# Post-synapse population +neuron_params = { + "v_thresh": -50, + "v_reset": -70, + "v_rest": -65, + "i_offset": 0 # DC input +} + +# Learning parameters +min_weight = 0 +max_weight = 0.1 +pot_alpha = 0.002 # this is alpha in the paper + +t_peak = 101 # ms + +initial_weight = 0.05 +plastic_delay = 4 + +purkinje_cell = p.Population( + 1, # number of neurons + p.IF_cond_exp(**neuron_params), # Neuron model + label="Purkinje Cell" # identifier + ) + + +# Spike source to send spike via synapse +spike_times = [101, 201, 301, 401, 501, 601, 701, 801, 901] + +granular_cell = p.Population( + 1, # number of sources + p.SpikeSourceArray, # source type + {'spike_times': spike_times}, # source spike times + label="src1" # identifier + ) + +# Create projection from GC to PC +pfpc_plas = p.STDPMechanism( + timing_dependence=p.extra_models.TimingDependencePFPC(t_peak=t_peak), + weight_dependence=p.extra_models.WeightDependencePFPC(w_min=min_weight, + w_max=max_weight, + pot_alpha=pot_alpha), + weight=initial_weight, delay=plastic_delay) + +synapse_pfpc = p.Projection( + granular_cell, purkinje_cell, p.AllToAllConnector(), + synapse_type=pfpc_plas, receptor_type="excitatory") + +granular_cell.record('spikes') +purkinje_cell.record("all") + +pf_weights = [] +for i in range(len(spike_times)): + p.run(100) + pf_weights.append(synapse_pfpc.get('weight', 'list', with_address=False)) + +granluar_cell_spikes = granular_cell.get_data('spikes') +purkinje_data = purkinje_cell.get_data() + +for i in pf_weights: + print(i) +# Plot +F = Figure( + # plot data for postsynaptic neuron + Panel(granluar_cell_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + Panel(purkinje_data.segments[0].filter(name='v')[0], + ylabel="Membrane potential (mV)", + data_labels=[purkinje_cell.label], yticks=True, xlim=(0, runtime)), + Panel(purkinje_data.segments[0].filter(name='gsyn_exc')[0], + ylabel="gsyn excitatory (mV)", + data_labels=[purkinje_cell.label], yticks=True, xlim=(0, runtime)), + Panel(purkinje_data.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + ) + +plt.show() +p.end() + +print("Job Complete") diff --git a/examples/icub_vor_examples/single_purkinje_cell_test.py b/examples/icub_vor_examples/single_purkinje_cell_test.py new file mode 100644 index 0000000..22cf932 --- /dev/null +++ b/examples/icub_vor_examples/single_purkinje_cell_test.py @@ -0,0 +1,114 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pyNN.spiNNaker as p +from pyNN.utility.plotting import Figure, Panel +import matplotlib.pyplot as plt + +p.setup(1) # simulation timestep (ms) +runtime = 500 + +# Post-synapse population +neuron_params = { + "v_thresh": -50, + "v_reset": -70, + "v_rest": -65, + "i_offset": 0 # DC input + } + +# Learning parameters +min_weight = 0 +max_weight = 0.1 +pot_alpha = 0.01 +t_peak = 100 +initial_weight = 0.05 +plastic_delay = 4 + +purkinje_cell = p.Population( + 1, # number of neurons + p.IF_cond_exp(**neuron_params), # Neuron model + label="Purkinje Cell" # identifier + ) + +# Spike source to send spike via synapse +spike_times = [50, 150, 270] + +granular_cell = p.Population( + 1, # number of sources + p.SpikeSourceArray, # source type + {'spike_times': spike_times}, # source spike times + label="src1" # identifier + ) + +# Spike source to send spike via synapse from climbing fibre +spike_times_2 = [100, 104, 107, 246] +climbing_fibre = p.Population( + 1, # number of sources + p.SpikeSourceArray, # source type + {'spike_times': spike_times_2}, # source spike times + label="src2" # identifier + ) + +# Create projection from GC to PC +pfpc_plas = p.STDPMechanism( + timing_dependence=p.extra_models.TimingDependencePFPC(t_peak=t_peak), + weight_dependence=p.extra_models.WeightDependencePFPC(w_min=min_weight, + w_max=max_weight, + pot_alpha=pot_alpha), + weight=initial_weight, delay=plastic_delay) + +synapse_pfpc = p.Projection( + granular_cell, purkinje_cell, p.AllToAllConnector(), + synapse_type=pfpc_plas, receptor_type="excitatory") + +# Create projection from CF to PC +synapse = p.Projection( + climbing_fibre, purkinje_cell, p.OneToOneConnector(), + p.StaticSynapse(weight=0.0, delay=1), receptor_type="excitatory") + +granular_cell.record('spikes') +climbing_fibre.record('spikes') +purkinje_cell.record("all") + +p.run(runtime) + +granluar_cell_spikes = granular_cell.get_data('spikes') +climbing_fibre_spikes = climbing_fibre.get_data('spikes') +purkinje_data = purkinje_cell.get_data() + +pf_weights = synapse_pfpc.get('weight', 'list', with_address=False) +print(pf_weights) + +# Plot +F = Figure( + # plot data for postsynaptic neuron + Panel(granluar_cell_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + Panel(climbing_fibre_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + Panel(purkinje_data.segments[0].filter(name='v')[0], + ylabel="PC membrane potential (mV)", + data_labels=[purkinje_cell.label], yticks=True, xlim=(0, runtime)), + Panel(purkinje_data.segments[0].filter(name='gsyn_exc')[0], + ylabel="PC excitatory current (mV)", + data_labels=[purkinje_cell.label], yticks=True, xlim=(0, runtime)), + Panel(purkinje_data.segments[0].spiketrains, + ylabel="PC spikes", + xlabel="Time (ms)", + yticks=True, markersize=2, xlim=(0, runtime)), + ) + +plt.savefig("single_pc_test.png", dpi=600) +plt.show() +p.end() diff --git a/examples/icub_vor_examples/single_vestibular_nuclei_mf_windowing.py b/examples/icub_vor_examples/single_vestibular_nuclei_mf_windowing.py new file mode 100644 index 0000000..feefc24 --- /dev/null +++ b/examples/icub_vor_examples/single_vestibular_nuclei_mf_windowing.py @@ -0,0 +1,122 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pyNN.spiNNaker as p +from pyNN.utility.plotting import Figure, Panel +import matplotlib.pyplot as plt + +p.setup(1) # simulation timestep (ms) +runtime = 500 + +# Post-synapse population +neuron_params = { + "v_thresh": -50, + "v_reset": -70, + "v_rest": -65, + "i_offset": 0 # DC input + } + +# Learning parameters +min_weight = 0 +max_weight = 0.1 +pot_alpha = 0.01 # this is alpha in the paper + +beta = 11 +sigma = 201 +initial_weight = 0.005 + +initial_weight = 0.05 +plastic_delay = 4 + +vestibular_nuclei = p.Population( + 1, # number of neurons + p.IF_cond_exp(**neuron_params), # Neuron model + label="Vestibular Nuclei" # identifier + ) + +# Spike source to send spike via synapse +mf_spike_times = [50, 60, 65, 85, 101, 400] +# 150, 175, 180, 190, 240, 250, 255, +# 270, 300, 345, 350, 360, 370, 400, 422, 425, 427, 429] + +mossy_fibre_src = p.Population( + 1, # number of sources + p.SpikeSourceArray, # source type + {'spike_times': mf_spike_times}, # source spike times + label="src1" # identifier + ) + +# Spike source to send spike via synapse from climbing fibre +pc_spike_times = [55, 80, 90, 95, 96, 201] # 104, 107, 246] +purkinje_cell_src = p.Population( + 1, # number of sources + p.SpikeSourceArray, # source type + {'spike_times': pc_spike_times}, # source spike times + label="src2" # identifier + ) + +# Create projection from GC to PC +mfvn_plas = p.STDPMechanism( + timing_dependence=p.extra_models.TimingDependenceMFVN(beta=beta, + sigma=sigma), + weight_dependence=p.extra_models.WeightDependenceMFVN(w_min=min_weight, + w_max=max_weight, + pot_alpha=pot_alpha), + weight=initial_weight, delay=plastic_delay) + +synapse_mfvn = p.Projection( + mossy_fibre_src, vestibular_nuclei, p.AllToAllConnector(), + synapse_type=mfvn_plas, receptor_type="excitatory") + +# Create projection from PC to VN +synapse = p.Projection( + purkinje_cell_src, vestibular_nuclei, p.OneToOneConnector(), + p.StaticSynapse(weight=0.0, delay=1), receptor_type="excitatory") + +mossy_fibre_src.record('spikes') +purkinje_cell_src.record('spikes') +vestibular_nuclei.record("all") + +p.run(runtime) + +mossy_fibre_src_spikes = mossy_fibre_src.get_data('spikes') +purkinje_cell_src_spikes = purkinje_cell_src.get_data('spikes') +vestibular_nuclei_data = vestibular_nuclei.get_data() + +mf_weights = synapse_mfvn.get('weight', 'list', with_address=False) +print(mf_weights) + +# Plot +F = Figure( + # plot data for postsynaptic neuron + Panel(mossy_fibre_src_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + Panel(purkinje_cell_src_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + Panel(vestibular_nuclei_data.segments[0].filter(name='v')[0], + ylabel="Membrane potential (mV)", + data_labels=[vestibular_nuclei.label], yticks=True, xlim=(0, + runtime)), + Panel(vestibular_nuclei_data.segments[0].filter(name='gsyn_exc')[0], + ylabel="gsyn excitatory (mV)", + data_labels=[vestibular_nuclei.label], yticks=True, xlim=(0, + runtime)), + Panel(vestibular_nuclei_data.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + ) + +plt.show() +p.end() + +print("Job Complete") diff --git a/examples/icub_vor_examples/single_vestibular_nuclei_mf_windowing_3_spikes.py b/examples/icub_vor_examples/single_vestibular_nuclei_mf_windowing_3_spikes.py new file mode 100644 index 0000000..947a104 --- /dev/null +++ b/examples/icub_vor_examples/single_vestibular_nuclei_mf_windowing_3_spikes.py @@ -0,0 +1,122 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pyNN.spiNNaker as p +from pyNN.utility.plotting import Figure, Panel +import matplotlib.pyplot as plt + +p.setup(1) # simulation timestep (ms) +runtime = 500 + +# Post-synapse population +neuron_params = { + "v_thresh": -50, + "v_reset": -70, + "v_rest": -65, + "i_offset": 0 # DC input + } + +# Learning parameters +pot_alpha = 0.01 +beta = 11 +sigma = 201 + +pot_alpha = 0.01 +min_weight = 0 +max_weight = 0.1 + +initial_weight = 0.01 +plastic_delay = 4 + +vestibular_nuclei = p.Population( + 1, # number of neurons + p.IF_cond_exp(**neuron_params), # Neuron model + label="Vestibular Nuclei" # identifier + ) + + +# Spike source to send spike via synapse +mf_spike_times = [50, 60, 65] +# 150, 175, 180, 190, 240, 250, 255, +# 270, 300, 345, 350, 360, 370, 400, 422, 425, 427, 429] + +mossy_fibre_src = p.Population( + 1, # number of sources + p.SpikeSourceArray, # source type + {'spike_times': mf_spike_times}, # source spike times + label="src1" # identifier + ) + +# Spike source to send spike via synapse from climbing fibre +pc_spike_times = [60] # 104, 107, 246] +purkinje_cell_src = p.Population( + 1, # number of sources + p.SpikeSourceArray, # source type + {'spike_times': pc_spike_times}, # source spike times + label="purkinje_cell_src" # identifier + ) + +# Create projection from GC to PC +mfvn_plas = p.STDPMechanism( + timing_dependence=p.extra_models.TimingDependenceMFVN(beta=beta, + sigma=sigma), + weight_dependence=p.extra_models.WeightDependenceMFVN(w_min=min_weight, + w_max=max_weight, + pot_alpha=pot_alpha), + weight=initial_weight, delay=plastic_delay) + +synapse_mfvn = p.Projection( + mossy_fibre_src, vestibular_nuclei, p.AllToAllConnector(), + synapse_type=mfvn_plas, receptor_type="excitatory") + +# Create projection from PC to VN +synapse = p.Projection( + purkinje_cell_src, vestibular_nuclei, p.OneToOneConnector(), + p.StaticSynapse(weight=0.0, delay=1), receptor_type="excitatory") + +mossy_fibre_src.record('spikes') +purkinje_cell_src.record('spikes') +vestibular_nuclei.record("all") + +p.run(runtime) + +mossy_fibre_src_spikes = mossy_fibre_src.get_data('spikes') +purkinje_cell_src_spikes = purkinje_cell_src.get_data('spikes') +vestibular_nuclei_data = vestibular_nuclei.get_data() + +mf_weights = synapse_mfvn.get('weight', 'list', with_address=False) +print("\n {}".format(mf_weights)) + +# Plot +F = Figure( + # plot data for postsynaptic neuron + Panel(mossy_fibre_src_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + Panel(purkinje_cell_src_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + Panel(vestibular_nuclei_data.segments[0].filter(name='v')[0], + ylabel="Membrane potential (mV)", + data_labels=[vestibular_nuclei.label], yticks=True, xlim=(0, + runtime)), + Panel(vestibular_nuclei_data.segments[0].filter(name='gsyn_exc')[0], + ylabel="gsyn excitatory (mV)", + data_labels=[vestibular_nuclei.label], yticks=True, xlim=(0, + runtime)), + Panel(vestibular_nuclei_data.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + ) + +plt.show() +p.end() +print("Job Complete") diff --git a/examples/icub_vor_examples/single_vestibular_nuclei_potentiation_test.py b/examples/icub_vor_examples/single_vestibular_nuclei_potentiation_test.py new file mode 100644 index 0000000..a527ff9 --- /dev/null +++ b/examples/icub_vor_examples/single_vestibular_nuclei_potentiation_test.py @@ -0,0 +1,109 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pyNN.spiNNaker as p +from pyNN.utility.plotting import Figure, Panel +import matplotlib.pyplot as plt + + +p.setup(1) # simulation timestep (ms) +runtime = 1000 + +# Post-synapse population +neuron_params = { + "v_thresh": -50, + "v_reset": -70, + "v_rest": -65, + "i_offset": 0 # DC input +} + +# Learning parameters +min_weight = 0 +max_weight = 0.1 +pot_alpha = 0.01 # this is alpha in the paper + +beta = 11 +sigma = 201 +initial_weight = 0.005 + +plastic_delay = 4 + +vestibular_neuclei = p.Population( + 1, # number of neurons + p.IF_cond_exp(**neuron_params), # Neuron model + label="Vestibular Nuclei" # identifier + ) + +# Spike source to send spike via synapse +spike_times = [1, 101, 201, 301, 401, 501, 601, 701, 801, 901] + +mossy_fibre_src = p.Population( + 1, # number of sources + p.SpikeSourceArray, # source type + {'spike_times': spike_times}, # source spike times + label="src1" # identifier + ) + +# Create projection from MF to VN +mfvn_plas = p.STDPMechanism( + timing_dependence=p.extra_models.TimingDependenceMFVN(beta=beta, + sigma=sigma), + weight_dependence=p.extra_models.WeightDependenceMFVN(w_min=min_weight, + w_max=max_weight, + pot_alpha=pot_alpha), + weight=initial_weight, delay=plastic_delay) + +synapse_mfvn = p.Projection( + mossy_fibre_src, vestibular_neuclei, p.AllToAllConnector(), + synapse_type=mfvn_plas, receptor_type="excitatory") + +mossy_fibre_src.record('spikes') +vestibular_neuclei.record("all") + +mf_weights = [] +for i in range(len(spike_times)): + p.run(100) + mf_weights.append(synapse_mfvn.get('weight', 'list', with_address=False)) + +mossy_fibre_src_spikes = mossy_fibre_src.get_data('spikes') +vestibular_neuclei_data = vestibular_neuclei.get_data() + +# print weight history +for i in mf_weights: + print(i) + +# Plot +F = Figure( + # plot data for postsynaptic neuron + Panel(mossy_fibre_src_spikes.segments[0].spiketrains, + yticks=True, markersize=2, xlim=(0, runtime)), + Panel(vestibular_neuclei_data.segments[0].filter(name='v')[0], + ylabel="VN membrane potential (mV)", + data_labels=[vestibular_neuclei.label], yticks=True, xlim=(0, + runtime)), + Panel(vestibular_neuclei_data.segments[0].filter(name='gsyn_exc')[0], + ylabel="VN excitatory current (mV)", + data_labels=[vestibular_neuclei.label], yticks=True, xlim=(0, + runtime)), + Panel(vestibular_neuclei_data.segments[0].spiketrains, + xlabel="Time (ms)", + yticks=True, markersize=2, xlim=(0, runtime)), + ) + + +plt.savefig("single_vn_test.png", dpi=600) +plt.show() +p.end() + +print("Job Complete") diff --git a/examples/icub_vor_examples/test_mfvn_lut.py b/examples/icub_vor_examples/test_mfvn_lut.py new file mode 100644 index 0000000..48b2728 --- /dev/null +++ b/examples/icub_vor_examples/test_mfvn_lut.py @@ -0,0 +1,30 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import matplotlib.pyplot as plt +from spynnaker.pyNN.models.neuron.plasticity.stdp.common \ + import write_mfvn_lut + +beta = 10 +sigma = 200 + +_comp_times, out_float, plot_times = write_mfvn_lut( + spec=None, sigma=sigma, beta=beta, lut_size=256, shift=0, time_probe=22, + kernel_scaling=0.8) + +plt.plot(plot_times, out_float, label='float') +# plt.plot(t,out_fixed, label='fixed') +plt.legend() +plt.title("mf-VN LUT") +plt.savefig("figures/write_mfvn_lut.png") diff --git a/examples/icub_vor_examples/test_pfpc_lut.py b/examples/icub_vor_examples/test_pfpc_lut.py new file mode 100644 index 0000000..9e24441 --- /dev/null +++ b/examples/icub_vor_examples/test_pfpc_lut.py @@ -0,0 +1,33 @@ +# Copyright (c) 2021 The University of Manchester +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import matplotlib.pyplot as plt +from spynnaker.pyNN.models.neuron.plasticity.stdp.common \ + import write_pfpc_lut + +t_peak = 100 + +_comp_times, out_float, out_fixed, t = write_pfpc_lut( + spec=None, peak_time=t_peak, lut_size=256, shift=0, time_probe=t_peak, + kernel_scaling=0.8) + +plt.plot(t, out_float, label='float') +plt.legend() +plt.title("pf-PC LUT") +plt.savefig("figures/write_pfpc_lut.png") + +plt.plot(t, out_fixed, label='fixed int16') +plt.legend() +plt.title("pf-PC LUT") +plt.savefig("figures/write_pfpc_lut_final_exp_fix.png") diff --git a/integration_tests/script_builder.py b/integration_tests/script_builder.py index dd0f398..81350a5 100644 --- a/integration_tests/script_builder.py +++ b/integration_tests/script_builder.py @@ -27,6 +27,12 @@ def build_scripts(self): exceptions = {} exceptions["pushbot_ethernet_example.py"] = "Needs a physical pushbot" exceptions["pushbot_light_follower.py"] = "Runs forever" + exceptions["dataflow.py"] = "Vertex tested elsewhere" + exceptions["receptive_fields_for_motion.py"] = "Duplication" + exceptions["cerebellum.py"] = "Script has no run stage" + exceptions["cerebellum_tb.py"] = "Old version of pb_cerebellum_tb.py" + exceptions["test_mfvn_lut.py"] = "Only a test (no machine needed)" + exceptions["test_pfpc_lut.py"] = "Only a test (no machine needed)" # For branches these raise a SkipTest quoting the time given # For cron and manual runs these just and a warning