Skip to content

Commit

Permalink
Making HippoUnit Python 3 compatible (while keeping Python 2 compatib…
Browse files Browse the repository at this point in the history
…ility)
  • Loading branch information
sasaray committed Aug 28, 2019
1 parent c7de8c2 commit 26f3d49
Show file tree
Hide file tree
Showing 26 changed files with 1,280 additions and 1,206 deletions.
10 changes: 10 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,16 @@ Test Platforms
- neuron 7.4


4. Ubuntu 16.04.6 LTS
- python 3.5.2
- sciunit 0.2.1.1
- efel 3.0.58
- numpy 1.16.4
- quantities 0.12.1
- scipy 1.3.0
- matplotlib 3.0.3
- neuron 7.6.2

Acknowledgments
-----------------

Expand Down
2 changes: 1 addition & 1 deletion hippounit/capabilities/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@
modules = [ basename(f)[:-3] for f in files if isfile(f)]

for module in modules:
exec("from %s import *" % module)
exec("from .%s import *" % module)
32 changes: 16 additions & 16 deletions hippounit/capabilities/cap_ProvidesGoodObliques.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,22 +5,22 @@


class ProvidesGoodObliques(sciunit.Capability):
""" Indicates that the model provides a list of oblique dendrites and locations to be tested"""
""" Indicates that the model provides a list of oblique dendrites and locations to be tested"""

def find_good_obliques(self):
""" Must provide a list of oblique dendrites
that meet the criteria of the experimental protocol (Losonczy, Magee 2006),
and also proximal and distal locations on them.
Criteria: originate from the trunk, have no child, close to the soma (at most 120 microns)
The form must be: dend_loc = [['name_of_dend1',prox_location, "prox"],['name_of_dend1',dist_location, "dist"],['name_of_dend2',prox_location, "prox"] ['name_of_dend2',dist_location, "dist"]]
E.g. : [['CCell[0].apic[47]', 0.5, "prox"], ['CCell[0].apic[47]', 0.8333333333333333, "dist"]] """
def find_good_obliques(self):
""" Must provide a list of oblique dendrites
that meet the criteria of the experimental protocol (Losonczy, Magee 2006),
and also proximal and distal locations on them.
Criteria: originate from the trunk, have no child, close to the soma (at most 120 microns)
The form must be: dend_loc = [['name_of_dend1',prox_location, "prox"],['name_of_dend1',dist_location, "dist"],['name_of_dend2',prox_location, "prox"] ['name_of_dend2',dist_location, "dist"]]
E.g. : [['CCell[0].apic[47]', 0.5, "prox"], ['CCell[0].apic[47]', 0.8333333333333333, "dist"]] """

raise NotImplementedError()
raise NotImplementedError()

def find_obliques_multiproc(self):
""" Used to keep all NEURON related tasks in independent processes, to avoid errors like 'template can not be redefined'"""
pool_obl = multiprocessing.Pool(1, maxtasksperchild = 1)
self.dend_loc = pool_obl.apply(self.find_good_obliques) # this way model.dend_loc gets the values
pool_obl.terminate()
pool_obl.join()
del pool_obl
def find_obliques_multiproc(self):
""" Used to keep all NEURON related tasks in independent processes, to avoid errors like 'template can not be redefined'"""
pool_obl = multiprocessing.Pool(1, maxtasksperchild = 1)
self.dend_loc = pool_obl.apply(self.find_good_obliques) # this way model.dend_loc gets the values
pool_obl.terminate()
pool_obl.join()
del pool_obl
26 changes: 13 additions & 13 deletions hippounit/capabilities/cap_ProvidesRandomDendriticLocations.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,18 @@


class ProvidesRandomDendriticLocations(sciunit.Capability):
""" Indicates that the model provides a list of randomly selected locations on the trunk (primary apical dendrite) to be tested"""
""" Indicates that the model provides a list of randomly selected locations on the trunk (primary apical dendrite) to be tested"""

def get_random_locations(self, num, seed, dist_range):
""" Must provide a list of lists [dendrite, seg.x]. Eg. : [['dendrite[31]', 0.5], ['dendrite[117]', 0.8333333333333333], ['dendrite[117]', 0.16666666666666666], ['dendrite[77]', 0.5], ['dendrite[99]', 0.5]],
and a dictionary where the keys are the locations, the value is the actual distance of the location from the soma. Eg.: {('dendrite[95]', 0.5): 191.4537639215934, ('dendrite[91]', 0.5): 186.10161451767556}"""
raise NotImplementedError()
def get_random_locations(self, num, seed, dist_range):
""" Must provide a list of lists [dendrite, seg.x]. Eg. : [['dendrite[31]', 0.5], ['dendrite[117]', 0.8333333333333333], ['dendrite[117]', 0.16666666666666666], ['dendrite[77]', 0.5], ['dendrite[99]', 0.5]],
and a dictionary where the keys are the locations, the value is the actual distance of the location from the soma. Eg.: {('dendrite[95]', 0.5): 191.4537639215934, ('dendrite[91]', 0.5): 186.10161451767556}"""
raise NotImplementedError()

def get_random_locations_multiproc(self, num, seed, dist_range):
""" Used to keep all NEURON related tasks in independent processes, to avoid errors like 'template can not be redefined'"""
pool = multiprocessing.Pool(1, maxtasksperchild = 1)
self.dend_locations, actual_distances = pool.apply(self.get_random_locations, (num, seed, dist_range,)) # this way model.dend_loc gets the values
pool.terminate()
pool.join()
del pool
return self.dend_locations, actual_distances
def get_random_locations_multiproc(self, num, seed, dist_range):
""" Used to keep all NEURON related tasks in independent processes, to avoid errors like 'template can not be redefined'"""
pool = multiprocessing.Pool(1, maxtasksperchild = 1)
self.dend_locations, actual_distances = pool.apply(self.get_random_locations, (num, seed, dist_range,)) # this way model.dend_loc gets the values
pool.terminate()
pool.join()
del pool
return self.dend_locations, actual_distances
30 changes: 15 additions & 15 deletions hippounit/capabilities/cap_ProvidesRecordingLocationsOnTrunk.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,21 @@


class ProvidesRecordingLocationsOnTrunk(sciunit.Capability):
""" Indicates that the model provides a list of locations on the trunk (primary apical dendrite) to be tested"""
""" Indicates that the model provides a list of locations on the trunk (primary apical dendrite) to be tested"""

def find_trunk_locations(self,distances, tolerance):
""" Must provide a dictionary - keys: distances, values: corresponding locations on the trunk (primary apical dendrite) in list
at 50, 105, 250, 350 um distances from the soma
The form must be: dend_loc = (dist1, ['trunk_segment1_1',location],['trunk_segment1_2',location]), (dist2, ['trunk_segment2',location]),(dist3, ['trunk_segment3',location]), (dist4, ['trunk_segment4',location])
E.g. : OrderedDict([(50, ['dendrite[0]', 0.6956994222486329]), (150, ['dendrite[81]', 0.5557523508251703]), (250, ['dendrite[109]', 0.33250043844278565])]) """
def find_trunk_locations(self,distances, tolerance):
""" Must provide a dictionary - keys: distances, values: corresponding locations on the trunk (primary apical dendrite) in list
at 50, 105, 250, 350 um distances from the soma
The form must be: dend_loc = (dist1, ['trunk_segment1_1',location],['trunk_segment1_2',location]), (dist2, ['trunk_segment2',location]),(dist3, ['trunk_segment3',location]), (dist4, ['trunk_segment4',location])
E.g. : OrderedDict([(50, ['dendrite[0]', 0.6956994222486329]), (150, ['dendrite[81]', 0.5557523508251703]), (250, ['dendrite[109]', 0.33250043844278565])]) """

raise NotImplementedError()
raise NotImplementedError()

def find_trunk_locations_multiproc(self, distances, tolerance):
""" Used to keep all NEURON related tasks in independent processes, to avoid errors like 'template can not be redefined'"""
pool_trunk = multiprocessing.Pool(1, maxtasksperchild = 1)
self.dend_locations, actual_distances = pool_trunk.apply(self.find_trunk_locations, (distances, tolerance,)) # this way model.dend_loc gets the values
pool_trunk.terminate()
pool_trunk.join()
del pool_trunk
return self.dend_locations, actual_distances
def find_trunk_locations_multiproc(self, distances, tolerance):
""" Used to keep all NEURON related tasks in independent processes, to avoid errors like 'template can not be redefined'"""
pool_trunk = multiprocessing.Pool(1, maxtasksperchild = 1)
self.dend_locations, actual_distances = pool_trunk.apply(self.find_trunk_locations, (distances, tolerance,)) # this way model.dend_loc gets the values
pool_trunk.terminate()
pool_trunk.join()
del pool_trunk
return self.dend_locations, actual_distances
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@


class ReceivesSquareCurrent_ProvidesResponse(sciunit.Capability):
"""Indicates that current can be injected into the model as
"""Indicates that current can be injected into the model as
a square pulse. """

def inject_current(self, amp, delay, dur, section_stim, loc_stim, section_rec, loc_rec):
""" Must return numpy arrays containing the time and voltage values"""
raise NotImplementedError()
def inject_current(self, amp, delay, dur, section_stim, loc_stim, section_rec, loc_rec):
""" Must return numpy arrays containing the time and voltage values"""
raise NotImplementedError()

def get_vm(self, amp, delay, dur, section_stim, loc_stim, section_rec, loc_rec):
def get_vm(self, amp, delay, dur, section_stim, loc_stim, section_rec, loc_rec):

t, v = self.inject_current(amp, delay, dur, section_stim, loc_stim, section_rec, loc_rec)
return t, v
t, v = self.inject_current(amp, delay, dur, section_stim, loc_stim, section_rec, loc_rec)
return t, v
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@


class ReceivesSquareCurrent_ProvidesResponse_MultipleLocations(sciunit.Capability):
"""Indicates that current can be injected into the model as
"""Indicates that current can be injected into the model as
a square pulse. And records at multiple locations."""

def inject_current_record_respons_multiple_loc(self, amp, delay, dur, section_stim, loc_stim, dend_locations):
""" Must return numpy arrays containing the time and voltage values"""
raise NotImplementedError()
def inject_current_record_respons_multiple_loc(self, amp, delay, dur, section_stim, loc_stim, dend_locations):
""" Must return numpy arrays containing the time and voltage values"""
raise NotImplementedError()

def get_multiple_vm(self, amp, delay, dur, section_stim, loc_stim, dend_locations):
# v : dictionary - keys: dendritic location, values: the voltage trace for each recording locations
t, v_stim, v = self.inject_current_record_respons_multiple_loc(amp, delay, dur, section_stim, loc_stim, dend_locations)
return t, v_stim, v
def get_multiple_vm(self, amp, delay, dur, section_stim, loc_stim, dend_locations):
# v : dictionary - keys: dendritic location, values: the voltage trace for each recording locations
t, v_stim, v = self.inject_current_record_respons_multiple_loc(amp, delay, dur, section_stim, loc_stim, dend_locations)
return t, v_stim, v
14 changes: 7 additions & 7 deletions hippounit/capabilities/cap_ReceivesEPSCstim.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@
from sciunit import Capability

class ReceivesEPSCstim(sciunit.Capability):
"""Indicates that the model receives synapse"""
"""Indicates that the model receives synapse"""

def run_EPSCstim(self, dend_loc, weight, tau1, tau2):
""" Must return numpy arrays containing the time and voltage values (at the soma and at the synaptic location )"""
raise NotImplementedError()
def run_EPSCstim(self, dend_loc, weight, tau1, tau2):
""" Must return numpy arrays containing the time and voltage values (at the soma and at the synaptic location )"""
raise NotImplementedError()

def run_EPSC_stim_get_vm(self, dend_loc, weight, tau1, tau2):
def run_EPSC_stim_get_vm(self, dend_loc, weight, tau1, tau2):

t, v, v_dend = self.run_EPSCstim(dend_loc, weight, tau1, tau2)
t, v, v_dend = self.run_EPSCstim(dend_loc, weight, tau1, tau2)

return t, v, v_dend
return t, v, v_dend
14 changes: 7 additions & 7 deletions hippounit/capabilities/cap_ReceivesSynapse.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@
from sciunit import Capability

class ReceivesSynapse(sciunit.Capability):
"""Indicates that the model receives synapse"""
"""Indicates that the model receives synapse"""

def run_syn(self, dend_loc, interval, number, AMPA_weight):
""" Must return numpy arrays containing the time and voltage values (at the soma and at the synaptic location )"""
raise NotImplementedError()
def run_syn(self, dend_loc, interval, number, AMPA_weight):
""" Must return numpy arrays containing the time and voltage values (at the soma and at the synaptic location )"""
raise NotImplementedError()

def run_synapse_get_vm(self, dend_loc, interval, number, AMPA_weight):
def run_synapse_get_vm(self, dend_loc, interval, number, AMPA_weight):

t, v, v_dend = self.run_syn(dend_loc, interval, number, AMPA_weight)
t, v, v_dend = self.run_syn(dend_loc, interval, number, AMPA_weight)

return t, v, v_dend
return t, v, v_dend
7 changes: 4 additions & 3 deletions hippounit/classify_apical_sections.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from builtins import range
import neurom as nm
from neurom.core.dataformat import COLS
from neurom.morphmath import point_dist2
Expand Down Expand Up @@ -43,7 +44,7 @@ def get_apical_point(neurite, morph, tuft_percent=27):
min_distance2 = max_distance2 * (1 - tuft_percent / 100.) ** 2

common_parents = set(nm.iter_sections(apical))
#Iterator to the sections in a neurite, neuron or neuron population.
#Iterator to the sections in a neurite, neuron or neuron population.
all_parents = set([])

for leaf in apical.root_node.ileaf():
Expand All @@ -54,7 +55,7 @@ def get_apical_point(neurite, morph, tuft_percent=27):
common_parents &= set_parents
all_parents |= set_parents

apical_point_section = None
apical_point_section = None
for parent_section in nm.iter_sections(apical):
if parent_section in common_parents:
common_parents.remove(parent_section)
Expand Down Expand Up @@ -105,7 +106,7 @@ def multiple_apical_points(morphology):
#apical_point.append(point)
#dist_apical_point.append(nm.morphmath.point_dist(morph.soma.center, point.points[-1, COLS.XYZ]))
#a, b = min(point[1] for idx, point in enumerate(apical_points_and_distances))
mn,idx = min( (apical_points_and_distances[i][1],i) for i in xrange(len(apical_points_and_distances)) )
mn,idx = min( (apical_points_and_distances[i][1],i) for i in range(len(apical_points_and_distances)) )
#print mn, idx
#print apical_points_and_distances
new_apical_points = []
Expand Down
7 changes: 5 additions & 2 deletions hippounit/plottools.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
#!/usr/bin/env python
"""DocString"""
from __future__ import division

from builtins import range
from past.utils import old_div
from itertools import cycle
import matplotlib
import matplotlib.colors as mplcol
Expand All @@ -18,7 +21,7 @@ def adjust_spines(ax, spines, color='k', d_out=10, d_down=[]):
ax.set_frame_on(True)
ax.patch.set_visible(False)

for loc, spine in ax.spines.iteritems():
for loc, spine in ax.spines.items():
if loc in spines:
if loc == 'bottom':
spine.set_position(('outward', d_down)) # outward by 10 points
Expand Down Expand Up @@ -129,7 +132,7 @@ def tiled_figure(figname='', frames=1, columns=2, figs=collections.OrderedDict()
gs.update(top=top, bottom=bottom, left=left, right=right, hspace=hspace, wspace=wspace)

for fi in range(frames):
axs.append(fig.add_subplot(gs[int(fi/columns), int(fi%columns)]))
axs.append(fig.add_subplot(gs[int(old_div(fi,columns)), int(fi%columns)]))
adjust_spines(axs[-1], ['left', 'bottom'], d_out=0)

figs[figname]['axs'] = axs
Expand Down
2 changes: 1 addition & 1 deletion hippounit/scores/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@
modules = [ basename(f)[:-3] for f in files if isfile(f)]

for module in modules:
exec("from %s import *" % module)
exec("from .%s import *" % module)
2 changes: 2 additions & 0 deletions hippounit/scores/score_P_Value_ObliqueIntegration.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import division
from builtins import range
from sciunit import Score
import numpy
from sciunit.utils import assert_dimensionless
Expand Down
13 changes: 7 additions & 6 deletions hippounit/scores/score_ZScore_ObliqueIntegration.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from __future__ import division
from sciunit import Score
import numpy
import collections
Expand All @@ -11,10 +12,10 @@ class ZScore_ObliqueIntegration(Score):

def __init__(self, score, related_data={}):

if not isinstance(score, Exception) and not isinstance(score, float):
raise InvalidScoreError("Score must be a float.")
else:
super(ZScore_ObliqueIntegration,self).__init__(score, related_data=related_data)
if not isinstance(score, Exception) and not isinstance(score, float):
raise InvalidScoreError("Score must be a float.")
else:
super(ZScore_ObliqueIntegration,self).__init__(score, related_data=related_data)

@classmethod
def compute(cls, observation, prediction):
Expand All @@ -26,7 +27,7 @@ def compute(cls, observation, prediction):
errors_dict=collections.OrderedDict()
errors=[]

for feat_name, value in observation.iteritems():
for feat_name, value in observation.items():
if 'mean' in feat_name:
p_mean = prediction['model_' + feat_name]
o_mean = observation[feat_name]
Expand All @@ -49,4 +50,4 @@ def compute(cls, observation, prediction):

def __str__(self):

return 'ZScore_avg = %.2f' % self.score
return 'ZScore_avg = %.2f' % self.score
5 changes: 4 additions & 1 deletion hippounit/scores/score_ZScore_PSPAttenuation.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
from __future__ import division
from builtins import str
from builtins import range
from sciunit import Score
import numpy
from sciunit.utils import assert_dimensionless
Expand Down Expand Up @@ -36,7 +39,7 @@ def compute(cls, observation, prediction, distances):
errors['error_attenuation_soma/dend_'+str(distances[i])+'_um'] = error

error_list = []
for key, value in errors.iteritems():
for key, value in errors.items():
error_list.append(value)

score_avg = numpy.nanmean(error_list)
Expand Down
10 changes: 7 additions & 3 deletions hippounit/scores/score_ZScore_backpropagatingAP.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
from __future__ import division
from builtins import str
from builtins import range

from sciunit import Score
import numpy
from sciunit.utils import assert_dimensionless
Expand All @@ -23,7 +27,7 @@ def compute(cls, observation, prediction, distances):
errors = collections.OrderedDict()

for i in range (0, len(distances)):
if 'mean_AP1_amp_strong_propagating_at_'+str(distances[i])+'um' in observation.keys() or 'mean_AP1_amp_weak_propagating_at_'+str(distances[i])+'um' in observation.keys():
if 'mean_AP1_amp_strong_propagating_at_'+str(distances[i])+'um' in list(observation.keys()) or 'mean_AP1_amp_weak_propagating_at_'+str(distances[i])+'um' in list(observation.keys()):
p_value = prediction['model_AP1_amp_at_'+str(distances[i])+'um']['mean']
o_mean = observation['mean_AP1_amp_strong_propagating_at_'+str(distances[i])+'um']
o_std = observation['std_AP1_amp_strong_propagating_at_'+str(distances[i])+'um']
Expand Down Expand Up @@ -73,10 +77,10 @@ def compute(cls, observation, prediction, distances):
score_strong_propagating = []
score_weak_propagating = []

for key, value in errors.iteritems():
for key, value in errors.items():
if 'strong' not in key: # everything except 'strong'
score_weak_propagating.append(value)
for key, value in errors.iteritems():
for key, value in errors.items():
if 'weak' not in key:
score_strong_propagating.append(value)

Expand Down
9 changes: 5 additions & 4 deletions hippounit/scores/score_ZScore_depolblock.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from __future__ import division
from sciunit import Score
import numpy
import math
Expand All @@ -13,10 +14,10 @@ class ZScore_depolblock(Score):
def __init__(self, score, related_data={}):

#self.score_l=[]
if not isinstance(score, Exception) and not isinstance(score, float):
raise InvalidScoreError("Score must be a float.")
else:
super(ZScore_depolblock,self).__init__(score, related_data=related_data)
if not isinstance(score, Exception) and not isinstance(score, float):
raise InvalidScoreError("Score must be a float.")
else:
super(ZScore_depolblock,self).__init__(score, related_data=related_data)

@classmethod
def compute(cls, observation, prediction):
Expand Down
Loading

0 comments on commit 26f3d49

Please sign in to comment.