Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions hippounit/scores/score_ZScore_backpropagatingAP.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ def compute(cls, observation, prediction, distances):
"""Computes average of z-scores from observation and prediction for back-propagating AP amplitudes"""

errors = collections.OrderedDict()
if not prediction:
never_fired_penalty = 250
return [never_fired_penalty, never_fired_penalty], errors

for i in range (0, len(distances)):
if 'mean_AP1_amp_strong_propagating_at_'+str(distances[i])+'um' in list(observation.keys()) or 'mean_AP1_amp_weak_propagating_at_'+str(distances[i])+'um' in list(observation.keys()):
Expand Down
64 changes: 44 additions & 20 deletions hippounit/tests/test_BackpropagatingAPTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,8 @@ class BackpropagatingAPTest(Test):
If False, only the JSON files containing the absolute feature values, the feature error scores and the final scores, and a log file are saved, but the figures and pickle files are not.
trunk_origin : list
first element : name of the section from which the trunk originates, second element : position on section (E.g. ['soma[5]', 1]). If not set by the user, the end of the default soma section is used.
serialized : boolean
if True, the simulation is not parallelized
"""

def __init__(self, config = {},
Expand Down Expand Up @@ -131,7 +133,8 @@ def __init__(self, config = {},
base_directory= None,
show_plot=True,
save_all = True,
trunk_origin = None):
trunk_origin = None,
serialized = False):

observation = self.format_data(observation)

Expand All @@ -158,6 +161,7 @@ def __init__(self, config = {},
self.npool = multiprocessing.cpu_count() - 1

self.config = config
self.serialized = serialized

description = "Tests the mode and efficacy of back-propagating action potentials on the apical trunk."

Expand Down Expand Up @@ -213,12 +217,14 @@ def binsearch(self, model, stim_range, delay, dur, section_stim, loc_stim, secti

result=[]

pool = multiprocessing.Pool(1, maxtasksperchild = 1) # I use multiprocessing to keep every NEURON related task in independent processes

traces= pool.apply(self.run_cclamp_on_soma, args = (model, amplitude, delay, dur, section_stim, loc_stim, section_rec, loc_rec))
pool.terminate()
pool.join()
del pool
if self.serialized:
traces = self.run_cclamp_on_soma(model, amplitude, delay, dur, section_stim, loc_stim, section_rec, loc_rec)
else:
pool = multiprocessing.Pool(1, maxtasksperchild = 1) # I use multiprocessing to keep every NEURON related task in independent processes
traces = pool.apply(self.run_cclamp_on_soma, args = (model, amplitude, delay, dur, section_stim, loc_stim, section_rec, loc_rec))
pool.terminate()
pool.join()
del pool

spikecount = self.spikecount(delay, dur, traces)

Expand Down Expand Up @@ -290,15 +296,17 @@ def find_current_amp(self, model, delay, dur, section_stim, loc_stim, section_re
#amps=[0.0,0.3,0.8]
#amps=[0.0,0.2, 0.9]

pool = multiprocessing.Pool(self.npool, maxtasksperchild=1)


run_cclamp_on_soma_ = functools.partial(self.run_cclamp_on_soma, model, delay=delay, dur=dur, section_stim=section_stim, loc_stim=loc_stim, section_rec=section_rec, loc_rec=loc_rec)
traces = pool.map(run_cclamp_on_soma_, amps, chunksize=1)

pool.terminate()
pool.join()
del pool
if self.serialized:
traces = []
for amp in amps:
traces.append(self.run_cclamp_on_soma(model, amp, delay=delay, dur=dur, section_stim=section_stim, loc_stim=loc_stim, section_rec=section_rec, loc_rec=loc_rec))
else:
pool = multiprocessing.Pool(self.npool, maxtasksperchild=1)
run_cclamp_on_soma_ = functools.partial(self.run_cclamp_on_soma, model, delay=delay, dur=dur, section_stim=section_stim, loc_stim=loc_stim, section_rec=section_rec, loc_rec=loc_rec)
traces = pool.map(run_cclamp_on_soma_, amps, chunksize=1)
pool.terminate()
pool.join()
del pool

spikecounts = []
_spikecounts = []
Expand All @@ -311,6 +319,13 @@ def find_current_amp(self, model, delay, dur, section_stim, loc_stim, section_re
for i in range(len(traces)):
spikecounts.append(self.spikecount(delay, dur, traces[i]))

if not spikecounts:
message_to_logFile += 'The model did not fire at all for any current injection amplitude.\n'
message_to_logFile += "---------------------------------------------------------------------------------------------------\n"

print('The model did not fire at all for any current injection amplitude')
amplitude = None
return amplitude, message_to_logFile

if amps[0] == 0.0 and spikecounts[0] > 0:

Expand All @@ -319,7 +334,7 @@ def find_current_amp(self, model, delay, dur, section_stim, loc_stim, section_re

print('Spontaneous firing')
amplitude = None
"""TODO: stop the whole thing"""
return amplitude, message_to_logFile

elif max(spikecounts) < 10:

Expand All @@ -328,6 +343,7 @@ def find_current_amp(self, model, delay, dur, section_stim, loc_stim, section_re

print('The model fired at ' + str(max(spikecounts)[0]) + ' Hz to ' + str(amps[-1]) + ' nA current step, and did not reach 10 Hz firing rate as supposed (according to Bianchi et al 2012 Fig. 1 B eg.)')
amplitude = None
return amplitude, message_to_logFile

else:
for i in range(len(spikecounts)):
Expand Down Expand Up @@ -750,7 +766,10 @@ def generate_prediction(self, model, verbose=False):
distances = self.config['recording']['distances']
tolerance = self.config['recording']['tolerance']

dend_locations, actual_distances = model.find_trunk_locations_multiproc(distances, tolerance, self.trunk_origin)
if self.serialized:
dend_locations, actual_distances = model.find_trunk_locations(distances, tolerance, self.trunk_origin)
else:
dend_locations, actual_distances = model.find_trunk_locations_multiproc(distances, tolerance, self.trunk_origin)
#print dend_locations, actual_distances

print('Dendritic locations to be tested (with their actual distances):', actual_distances)
Expand All @@ -766,9 +785,14 @@ def generate_prediction(self, model, verbose=False):
plt.close('all') #needed to avoid overlapping of saved images when the test is run on multiple models

amplitude, message_to_logFile = self.find_current_amp(model, delay, duration, "soma", 0.5, "soma", 0.5)
if amplitude is None:
return prediction

pool = multiprocessing.Pool(1, maxtasksperchild = 1)
traces = pool.apply(self.cclamp, args = (model, amplitude, delay, duration, "soma", 0.5, dend_locations))
if self.serialized:
traces = self.cclamp(model, amplitude, delay, duration, "soma", 0.5, dend_locations)
else:
pool = multiprocessing.Pool(1, maxtasksperchild = 1)
traces = pool.apply(self.cclamp, args = (model, amplitude, delay, duration, "soma", 0.5, dend_locations))

filepath = self.path_results + self.test_log_filename
self.logFile = open(filepath, 'w') # if it is opened before multiprocessing, the multiporeccing won't work under python3
Expand Down
32 changes: 19 additions & 13 deletions hippounit/tests/test_DepolarizationBlockTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,15 +93,18 @@ class DepolarizationBlockTest(Test):
If False, plots are not displayed but still saved
save_all : boolean
If False, only the JSON files containing the absolute feature values, the feature error scores and the final scores, and a log file are saved, but the figures and pickle files are not.
serialized : boolean
if True, the simulation is not parallelized
"""

def __init__(self,
observation = {'mean_Ith':None, 'Ith_std':None, 'mean_Veq': None, 'Veq_std': None} ,
name="Depolarization block test" ,
force_run=False,
base_directory= None,
show_plot=True,
save_all=True):
show_plot=True,
save_all=True,
serialized=False):

observation = self.format_data(observation)

Expand All @@ -125,6 +128,7 @@ def __init__(self,

self.logFile = None
self.test_log_filename = 'test_log.txt'
self.serialized = serialized

description = "Tests if the model enters depolarization block under current injection of increasing amplitudes."

Expand Down Expand Up @@ -567,17 +571,19 @@ def generate_prediction(self, model, verbose=False):

efel.reset()

pool = multiprocessing.Pool(self.npool, maxtasksperchild=1)
#amps = numpy.arange(0,3.55,0.05)
amps = numpy.arange(0,1.65,0.05)

cclamp_ = functools.partial(self.cclamp, model, delay = 500, dur = 1000)
results = pool.map(cclamp_, amps, chunksize=1)
#results = result.get()

pool.terminate()
pool.join()
del pool
amps = numpy.arange(0, 1.65, 0.05)
if self.serialized:
results = []
for amp in amps:
cclamp = self.cclamp(model, amp, delay=500, dur=1000)
results.append(cclamp)
else:
pool = multiprocessing.Pool(self.npool, maxtasksperchild=1)
cclamp_ = functools.partial(self.cclamp, model, delay = 500, dur = 1000)
results = pool.map(cclamp_, amps, chunksize=1)
pool.terminate()
pool.join()
del pool

plt.close('all') #needed to avoid overlapping of saved images when the test is run on multiple models in a for loop

Expand Down
52 changes: 30 additions & 22 deletions hippounit/tests/test_ObliqueIntegrationTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,8 @@ class ObliqueIntegrationTest(Test):
If False, only the JSON files containing the absolute feature values, the feature error scores and the final scores, and a log file are saved, but the figures and pickle files are not.
trunk_origin : list
first element : name of the section from which the trunk originates, second element : position on section (E.g. ['soma[5]', 1]). If not set by the user, the end of the default soma section is used.
serialized : boolean
if True, the simulation is not parallelized
"""

def __init__(self,
Expand All @@ -137,7 +139,8 @@ def __init__(self,
base_directory= None,
show_plot=True,
save_all = True,
trunk_origin = None):
trunk_origin = None,
serialized=False):

observation = self.format_data(observation)
observation = self.add_std_to_observation(observation)
Expand Down Expand Up @@ -165,6 +168,7 @@ def __init__(self,
self.max_num_syn = 10

self.threshold_index = 5 #threshold input number for dendritic spike generation - index 0 is 0 input
self.serialized = serialized

description = "Tests the signal integration in oblique dendrites for increasing number of synchronous and asynchronous inputs"

Expand Down Expand Up @@ -323,13 +327,14 @@ def binsearch(self, model, dend_loc0):

for n in num:


pool_syn = multiprocessing.Pool(1, maxtasksperchild = 1) # I use multiprocessing to keep every NEURON related task in independent processes

t, v, v_dend = pool_syn.apply(self.syn_binsearch, args = (model, dend_loc0, interval, n, c_stim[midpoint]))
pool_syn.terminate()
pool_syn.join()
del pool_syn
if self.serialized:
t, v, v_dend = self.syn_binsearch(model, dend_loc0, interval, n, c_stim[midpoint])
else:
pool_syn = multiprocessing.Pool(1, maxtasksperchild = 1) # I use multiprocessing to keep every NEURON related task in independent processes
t, v, v_dend = pool_syn.apply(self.syn_binsearch, args = (model, dend_loc0, interval, n, c_stim[midpoint]))
pool_syn.terminate()
pool_syn.join()
del pool_syn

result.append(self.analyse_syn_traces(model, t, v, v_dend, model.threshold))
#print result
Expand Down Expand Up @@ -1687,8 +1692,10 @@ def generate_prediction(self, model, verbose=False):
raise
pass


model.find_obliques_multiproc(self.trunk_origin)
if self.serialized:
model.dend_loc = model.find_good_obliques(self.trunk_origin)
else:
model.find_obliques_multiproc(self.trunk_origin)

print('Dendrites and locations to be tested: ', model.dend_loc)

Expand All @@ -1706,18 +1713,19 @@ def generate_prediction(self, model, verbose=False):
print('The default NMDA model of HippoUnit is used with Jahr, Stevens voltage dependence.')
print('')

#pool0 = multiprocessing.pool.ThreadPool(self.npool) # multiprocessing.pool.ThreadPool is used because a nested multiprocessing is used in the function called here (to keep every NEURON related task in independent processes)
pool0 = NonDaemonPool(self.npool, maxtasksperchild = 1)

print("Adjusting synaptic weights on all the locations ...")

binsearch_ = functools.partial(self.binsearch, model)
results0 = pool0.map(binsearch_, model.dend_loc, chunksize=1) #model.dend_loc[0:2] - no need for info if it is distal or proximal
#results0 = result0.get()

pool0.terminate()
pool0.join()
del pool0
##pool0 = multiprocessing.pool.ThreadPool(self.npool) # multiprocessing.pool.ThreadPool is used because a nested multiprocessing is used in the function called here (to keep every NEURON related task in independent processes)
#pool0 = NonDaemonPool(self.npool, maxtasksperchild = 1)
#print("Adjusting synaptic weights on all the locations ...")
#binsearch_ = functools.partial(self.binsearch, model)
#results0 = pool0.map(binsearch_, model.dend_loc, chunksize=1) #model.dend_loc[0:2] - no need for info if it is distal or proximal
results0 = []
for dend_loc in model.dend_loc:
result0 = self.binsearch(model, dend_loc)
results0.append(result0)
##results0 = result0.get()
#pool0.terminate()
#pool0.join()
#del pool0


num = numpy.arange(0,self.max_num_syn+1)
Expand Down
54 changes: 38 additions & 16 deletions hippounit/tests/test_PSPAttenuationTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,8 @@ class PSPAttenuationTest(Test):
random seed for random dendritic location selection
trunk_origin : list
first element : name of the section from which the trunk originates, second element : position on section (E.g. ['soma[5]', 1]). If not set by the user, the end of the default soma section is used.
serialized : boolean
if True, the simulation is not parallelized
"""

def __init__(self, config = {},
Expand All @@ -116,7 +118,8 @@ def __init__(self, config = {},
num_of_dend_locations = 15,
random_seed = 1,
save_all = True,
trunk_origin = None):
trunk_origin = None,
serialized=False):

observation = self.format_data(observation)

Expand Down Expand Up @@ -144,6 +147,7 @@ def __init__(self, config = {},

self.num_of_dend_locations = num_of_dend_locations
self.random_seed = random_seed
self.serialized = serialized

description = "Tests how much synaptic potential attenuates from the dendrite (different distances) to the soma."

Expand Down Expand Up @@ -481,7 +485,15 @@ def generate_prediction(self, model, verbose=False):
EPSC_amp = self.config['EPSC_amplitude']

model.SecList_name = model.TrunkSecList_name
locations, locations_distances = model.get_random_locations_multiproc(self.num_of_dend_locations, self.random_seed, dist_range, self.trunk_origin) # number of random locations , seed
if self.serialized:
locations, locations_distances = model.get_random_locations(self.num_of_dend_locations,
self.random_seed, dist_range,
self.trunk_origin)
else:
locations, locations_distances = model.get_random_locations_multiproc(self.num_of_dend_locations,
self.random_seed, dist_range,
self.trunk_origin) # number of random locations , seed

#print dend_locations, actual_distances
print('Dendritic locations to be tested (with their actual distances):', locations_distances)

Expand All @@ -494,25 +506,35 @@ def generate_prediction(self, model, verbose=False):
#print locations_weights

""" run model without an input"""
pool = multiprocessing.Pool(self.npool, maxtasksperchild=1)
run_stimulus_ = functools.partial(self.run_stimulus, model, tau1 = tau1, tau2 = tau2)
traces_no_input = pool.map(run_stimulus_, locations_weights, chunksize=1)

pool.terminate()
pool.join()
del pool
if self.serialized:
traces_no_input = []
for locations_weight in locations_weights:
trace = self.run_stimulus(model, locations_weight, tau1=tau1, tau2=tau2)
traces_no_input.append(trace)
else:
pool = multiprocessing.Pool(self.npool, maxtasksperchild=1)
run_stimulus_ = functools.partial(self.run_stimulus, model, tau1 = tau1, tau2 = tau2)
traces_no_input = pool.map(run_stimulus_, locations_weights, chunksize=1)
pool.terminate()
pool.join()
del pool
traces_dict_no_input = dict(list(i.items())[0] for i in traces_no_input) # merge list of dicts into single dict

locations_weights = self.calculate_weights(traces_dict_no_input, EPSC_amp)

"""run model with inputs"""
pool = multiprocessing.Pool(self.npool, maxtasksperchild=1)
run_stimulus_ = functools.partial(self.run_stimulus, model, tau1 = tau1, tau2 = tau2)
traces = pool.map(run_stimulus_, locations_weights, chunksize=1)

pool.terminate()
pool.join()
del pool
if self.serialized:
traces = []
for locations_weight in locations_weights:
trace = self.run_stimulus(model, locations_weight, tau1=tau1, tau2=tau2)
traces.append(trace)
else:
pool = multiprocessing.Pool(self.npool, maxtasksperchild=1)
run_stimulus_ = functools.partial(self.run_stimulus, model, tau1 = tau1, tau2 = tau2)
traces = pool.map(run_stimulus_, locations_weights, chunksize=1)
pool.terminate()
pool.join()
del pool
traces_dict = dict(list(i.items())[0] for i in traces) # merge list of dicts into single dict


Expand Down
Loading