本文整理汇总了Python中mozaik.tools.mozaik_parametrized.MozaikParametrized类的典型用法代码示例。如果您正苦于以下问题:Python MozaikParametrized类的具体用法?Python MozaikParametrized怎么用?Python MozaikParametrized使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了MozaikParametrized类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
def __init__(self, datastore,single_trial=False, **params):
self.single_trial = single_trial
PerDSVPlot.__init__(self, datastore, **params)
ss = self._get_stimulus_ids()
assert ss != [], "Error, empty datastore!"
if self.title_style == "Clever":
stimulus = MozaikParametrized.idd(ss[0])
for s in ss:
s = MozaikParametrized.idd(s)
if s.name != stimulus.name:
logger.warning('Datastore does not contain same type of stimuli: changing title_style from Clever to Standard')
self.title_style = "Standard"
break
# lets find parameter indexes that vary if we need 'Clever' title style
if self.title_style == "Clever":
self.varied = varying_parameters([MozaikParametrized.idd(s) for s in ss])
if not self.single_trial:
self.varied = [x for x in self.varied if x != 'trial']
if self.title_style == "Standard":
self.extra_space_top = 0.07
if self.title_style == "Clever":
self.extra_space_top = len(self.varied)*0.005
示例2: _ploter
def _ploter(self, idx, gs):
pair = self.pairs[idx]
# Let's figure out the varying parameters
p1 = varying_parameters(pair)
if pair[0].stimulus_id == None or pair[1].stimulus_id == None:
p2 = []
elif MozaikParametrized.idd(pair[0].stimulus_id).name != MozaikParametrized.idd(pair[1].stimulus_id).name:
p2 = ['name']
else:
p2 = varying_parameters([MozaikParametrized.idd(p.stimulus_id) for p in pair])
p1 = [x for x in p1 if ((x != 'value_name') and (x != 'stimulus_id'))]
x_label = pair[0].value_name + '(' + pair[0].value_units.dimensionality.latex + ')'
y_label = pair[1].value_name + '(' + pair[1].value_units.dimensionality.latex + ')'
for p in p1:
x_label += '\n' + str(p) + " = " + str(getattr(pair[0],p))
y_label += '\n' + str(p) + " = " + str(getattr(pair[1],p))
for p in p2:
x_label += '\n' + str(p) + " = " + str(getattr(MozaikParametrized.idd(pair[0].stimulus_id),p))
y_label += '\n' + str(p) + " = " + str(getattr(MozaikParametrized.idd(pair[1].stimulus_id),p))
params = {}
params["x_label"] = x_label
params["y_label"] = y_label
params["title"] = self.sheets[idx]
if pair[0].value_units != pair[1].value_units or pair[1].value_units == pq.dimensionless:
params["equal_aspect_ratio"] = False
ids = list(set(pair[0].ids) & set(pair[1].ids))
return [("ScatterPlot",ScatterPlot(pair[0].get_value_by_id(ids), pair[1].get_value_by_id(ids)),gs,params)]
示例3: perform_analysis
def perform_analysis(self):
dsv1 = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating')
for sheet in dsv1.sheets():
dsv = queries.param_filter_query(dsv1, sheet_name=sheet)
segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
for segs,st in zip(segs1, stids):
first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
duration = first_analog_signal.t_stop - first_analog_signal.t_start
frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
period = 1/frequency
period = period.rescale(first_analog_signal.t_start.units)
cycles = duration / period
first_har = round(cycles)
e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_vm_ids()]
e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_esyn_ids()]
i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_isyn_ids()]
v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_vm_ids()]
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
示例4: __init__
def __init__(self,**params):
MozaikParametrized.__init__(self,**params)
assert self.size_x > 0 and self.size_y > 0
half_width = self.size_x/2.0
half_height = self.size_y/2.0
self.left = self.location_x - half_width
self.right = self.location_x + half_width
self.top = self.location_y + half_height
self.bottom = self.location_y - half_height
self.width = self.right - self.left
self.height = self.top - self.bottom
示例5: save_data
def save_data(dirname,dsv,name):
try:
os.mkdir(dirname)
except:
'all good'
for neuron_id in analog_ids:
mat_vm = []
mat_exc = []
mat_inh = []
for seg in dsv.get_segments():
sid = MozaikParametrized.idd(seg.annotations['stimulus'])
a = seg.get_vm(neuron_id).magnitude
a= numpy.insert(a,0,sid.trial)
a= numpy.insert(a,0,sid.y)
mat_vm.append(a)
a = seg.get_esyn(neuron_id).magnitude
a= numpy.insert(a,0,sid.trial)
a= numpy.insert(a,0,sid.y)
mat_exc.append(a)
a = seg.get_isyn(neuron_id).magnitude
a= numpy.insert(a,0,sid.trial)
a= numpy.insert(a,0,sid.y)
mat_inh.append(a)
numpy.savetxt(dirname+'/'+'VM_' + name+str(neuron_id)+'.csv',numpy.array(mat_vm))
numpy.savetxt(dirname+'/'+'ExcC' + name+str(neuron_id)+'.csv',numpy.array(mat_exc))
numpy.savetxt(dirname+'/'+'InhC' + name+str(neuron_id)+'.csv',numpy.array(mat_inh))
示例6: ads_with_equal_stimulus_type
def ads_with_equal_stimulus_type(dsv, allow_None=False):
"""
This functions tests whether DSV contains only ADS associated
with the same stimulus type.
Parameters
----------
not_None : bool
If true it will not allow ADS that are not associated with stimulus
"""
if allow_None:
return matching_parametrized_object_params([MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results if ads.stimulus_id != None],params=['name'])
else:
if len([0 for ads in dsv.analysis_results if ads.stimulus_id == None]) > 0:
return False
return matching_parametrized_object_params([MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results],params=['name'])
示例7: __init__
def __init__(self, datastore, parameters, plot_file_name=None,fig_param=None):
Plotting.__init__(self, datastore, parameters, plot_file_name, fig_param)
self.st = []
self.tc_dict = []
self.pnvs = []
self.max_mean_response_indexes = []
assert queries.ads_with_equal_stimulus_type(datastore)
assert len(self.parameters.neurons) > 0 , "ERROR, empty list of neurons specified"
dsvs = queries.partition_analysis_results_by_parameters_query(self.datastore,parameter_list=['value_name'],excpt=True)
for dsv in dsvs:
dsv = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name)
assert matching_parametrized_object_params(dsv.get_analysis_result(), params=['value_name'])
self.pnvs.append(dsv.get_analysis_result())
# get stimuli
st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs[-1]]
self.st.append(st)
# transform the pnvs into a dictionary of tuning curves along the parameter_name
# also make sure the values are ordered according to ids in the first pnv
dic = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs[-1]],st,self.parameters.parameter_name)
#sort the entries in dict according to the parameter parameter_name values
for k in dic:
(b, a) = dic[k]
par, val = zip(
*sorted(
zip(b,
numpy.array(a))))
dic[k] = (par,numpy.array(val))
self.tc_dict.append(dic)
if self.parameters.centered:
self.max_mean_response_indexes.append(numpy.argmax(sum([a[1] for a in dic.values()]),axis=0))
示例8: perform_analysis
def perform_analysis(self):
dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingCenterSurroundStimulus')
if len(dsv.get_analysis_result()) == 0: return
assert queries.ads_with_equal_stimulus_type(dsv)
assert queries.equal_ads(dsv,except_params=['stimulus_id'])
self.pnvs = dsv.get_analysis_result()
# get stimuli
self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]
# transform the pnvs into a dictionary of tuning curves according along the 'surround_orientation' parameter
# also make sure they are ordered according to the first pnv's idds
self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"surround_orientation")
for k in self.tc_dict.keys():
sis = []
surround_tuning=[]
# we will do the calculation neuron by neuron
for i in xrange(0,len(self.parameters.neurons)):
ors = self.tc_dict[k][0]
values = numpy.array([a[i] for a in self.tc_dict[k][1]])
d={}
for o,v in zip(ors,values):
d[o] = v
sis.append(d[0] / d[numpy.pi/2])
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
示例9: ads_with_equal_stimuli
def ads_with_equal_stimuli(dsv,params=None,except_params=None):
"""
This functions returns true if DSV contains only ADS associated with stimuli
of the same kind and with the same values for parameters supplied in *params* or
with the exception of parameters listed in *except_params*.
Otherwise False.
"""
return matching_parametrized_object_params([MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results],params=params,except_params=except_params)
示例10: print_content
def print_content(self, full_recordings=False, full_ADS=False):
"""
Prints the content of the data store (specifically the list of recordings and ADSs in the DSV).
If the
Parameters
----------
full_recordings : bool (optional)
If True each contained recording will be printed.
Otherwise only the overview of the recordings based on stimulus type will be shown.
full_ADS : bool (optional)
If True each contained ADS will be printed (for each this will print the set of their mozaik parameters together with their values).
Otherwise only the overview of the ADSs based on their identifier will be shown.
"""
logger.info("DSV info:")
logger.info(" Number of recordings: " + str(len(self.block.segments)))
d = {}
for st in [s.annotations['stimulus'] for s in self.block.segments]:
d[MozaikParametrized.idd(st).name] = d.get(MozaikParametrized.idd(st).name, 0) + 1
for k in d.keys():
logger.info(" " + str(k) + " : " + str(d[k]))
logger.info(" Number of ADS: " + str(len(self.analysis_results)))
d = {}
for ads in self.analysis_results:
d[ads.identifier] = d.get(ads.identifier, 0) + 1
for k in d.keys():
logger.info(" " + str(k) + " : " + str(d[k]))
if full_recordings:
logger.info('RECORDING RESULTS')
for s in [s.annotations['stimulus'] for s in self.block.segments]:
logger.info(str(s))
if full_ADS:
logger.info('ANALYSIS RESULTS')
for a in self.analysis_results:
logger.info(str(a))
示例11: setup_logging
import numpy
from mozaik.storage import queries
from mozaik.controller import Global
Global.root_directory = sys.argv[1]+'/'
setup_logging()
data_store = PickledDataStore(load=True,parameters=ParameterSet({'root_directory':sys.argv[1],'store_stimuli' : False}),replace=True)
NeuronAnnotationsToPerNeuronValues(data_store,ParameterSet({})).analyse()
analog_ids = queries.param_filter_query(data_store,sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()
dsv = queries.param_filter_query(data_store,st_name='FlashedBar')
for ads in dsv.get_analysis_result():
sid = MozaikParametrized.idd(ads.stimulus_id)
sid.x=0
ads.stimulus_id = str(sid)
for seg in dsv.get_segments():
sid = MozaikParametrized.idd(seg.annotations['stimulus'])
sid.x=0
seg.annotations['stimulus'] = str(sid)
for seg in dsv.get_segments(null=True):
sid = MozaikParametrized.idd(seg.annotations['stimulus'])
sid.x=0
seg.annotations['stimulus'] = str(sid)
def save_data(dirname,dsv,name):
try:
示例12: __init__
def __init__(self,tags=[], **params):
MozaikParametrized.__init__(self, **params)
self.tags = tags
示例13: __init__
def __init__(self, **params):
MozaikParametrized.__init__(self, **params)
self.input = None
self._frames = self.frames()
self.n_frames = numpy.inf # possibly very dangerous. Don't do 'for i in range(stim.n_frames)'!
示例14: process_input
def process_input(self, visual_space, stimulus, duration=None, offset=0):
"""
Present a visual stimulus to the model, and create the LGN output
(relay) neurons.
Parameters
----------
visual_space : VisualSpace
The visual space to which the stimuli are presented.
stimulus : VisualStimulus
The visual stimulus to be shown.
duration : int (ms)
The time for which we will simulate the stimulus
offset : int(ms)
The time (in absolute time of the whole simulation) at which the stimulus starts.
Returns
-------
retinal_input : list(ndarray)
List of 2D arrays containing the frames of luminances that were presented to the retina.
"""
logger.debug("Presenting visual stimulus from visual space %s" % visual_space)
visual_space.set_duration(duration)
self.input = visual_space
st = MozaikParametrized.idd(stimulus)
st.trial = None # to avoid recalculating RFs response to multiple trials of the same stimulus
cached = self.get_cache(st)
if cached == None:
logger.debug("Generating output spikes...")
(input_currents, retinal_input) = self._calculate_input_currents(visual_space,
duration)
else:
logger.debug("Retrieved spikes from cache...")
(input_currents, retinal_input) = cached
ts = self.model.sim.get_time_step()
#import pylab
#pylab.figure()
for rf_type in self.rf_types:
assert isinstance(input_currents[rf_type], list)
for i, (lgn_cell, input_current, scs, ncs) in enumerate(
zip(self.sheets[rf_type].pop,
input_currents[rf_type],
self.scs[rf_type],
self.ncs[rf_type])):
assert isinstance(input_current, dict)
#if i==0:
# pylab.plot(self.parameters.linear_scaler * input_current['amplitudes'])
t = input_current['times'] + offset
a = self.parameters.linear_scaler * input_current['amplitudes']
scs.set_parameters(times=t, amplitudes=a)
if self.parameters.mpi_reproducible_noise:
t = numpy.arange(0, duration, ts) + offset
amplitudes = (self.parameters.noise.mean
+ self.parameters.noise.stdev
* self.ncs_rng[rf_type][i].randn(len(t)))
ncs.set_parameters(times=t, amplitudes=amplitudes)
# for debugging/testing, doesn't work with MPI !!!!!!!!!!!!
#input_current_array = numpy.zeros((self.shape[1], self.shape[0], len(visual_space.time_points(duration))))
#update_factor = int(visual_space.update_interval/self.parameters.receptive_field.temporal_resolution)
#logger.debug("input_current_array.shape = %s, update_factor = %d, p.dim = %s" % (input_current_array.shape, update_factor, self.shape))
#k = 0
#for i in range(self.shape[1]): # self.sahpe gives (x,y), so self.shape[1] is the height
# for j in range(self.shape[0]):
# where the kernel temporal resolution is finer than the frame update interval,
# we only keep the current values at the start of each frame
# input_current_array[i,j, :] = input_currents['X_ON'][k]['amplitudes'][::update_factor]
# k += 1
# if record() has already been called, setup the recording now
self._built = True
self.write_cache(st, input_currents, retinal_input)
return retinal_input
示例15: equal_stimulus
def equal_stimulus(dsv,except_params):
"""
This functions returns True if DSV contains only recordings associated
with stimuli of identical parameter values, with the exception of parameters in *except_params*
"""
return matching_parametrized_object_params([MozaikParametrized.idd(s) for s in dsv.get_stimuli()],except_params=['name'])