本文整理汇总了Python中mozaik.tools.mozaik_parametrized.MozaikParametrized.idd方法的典型用法代码示例。如果您正苦于以下问题:Python MozaikParametrized.idd方法的具体用法?Python MozaikParametrized.idd怎么用?Python MozaikParametrized.idd使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mozaik.tools.mozaik_parametrized.MozaikParametrized
的用法示例。
在下文中一共展示了MozaikParametrized.idd方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def __init__(self, datastore,single_trial=False, **params):
self.single_trial = single_trial
PerDSVPlot.__init__(self, datastore, **params)
ss = self._get_stimulus_ids()
assert ss != [], "Error, empty datastore!"
if self.title_style == "Clever":
stimulus = MozaikParametrized.idd(ss[0])
for s in ss:
s = MozaikParametrized.idd(s)
if s.name != stimulus.name:
logger.warning('Datastore does not contain same type of stimuli: changing title_style from Clever to Standard')
self.title_style = "Standard"
break
# lets find parameter indexes that vary if we need 'Clever' title style
if self.title_style == "Clever":
self.varied = varying_parameters([MozaikParametrized.idd(s) for s in ss])
if not self.single_trial:
self.varied = [x for x in self.varied if x != 'trial']
if self.title_style == "Standard":
self.extra_space_top = 0.07
if self.title_style == "Clever":
self.extra_space_top = len(self.varied)*0.005
示例2: _ploter
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def _ploter(self, idx, gs):
pair = self.pairs[idx]
# Let's figure out the varying parameters
p1 = varying_parameters(pair)
if pair[0].stimulus_id == None or pair[1].stimulus_id == None:
p2 = []
elif MozaikParametrized.idd(pair[0].stimulus_id).name != MozaikParametrized.idd(pair[1].stimulus_id).name:
p2 = ['name']
else:
p2 = varying_parameters([MozaikParametrized.idd(p.stimulus_id) for p in pair])
p1 = [x for x in p1 if ((x != 'value_name') and (x != 'stimulus_id'))]
x_label = pair[0].value_name + '(' + pair[0].value_units.dimensionality.latex + ')'
y_label = pair[1].value_name + '(' + pair[1].value_units.dimensionality.latex + ')'
for p in p1:
x_label += '\n' + str(p) + " = " + str(getattr(pair[0],p))
y_label += '\n' + str(p) + " = " + str(getattr(pair[1],p))
for p in p2:
x_label += '\n' + str(p) + " = " + str(getattr(MozaikParametrized.idd(pair[0].stimulus_id),p))
y_label += '\n' + str(p) + " = " + str(getattr(MozaikParametrized.idd(pair[1].stimulus_id),p))
params = {}
params["x_label"] = x_label
params["y_label"] = y_label
params["title"] = self.sheets[idx]
if pair[0].value_units != pair[1].value_units or pair[1].value_units == pq.dimensionless:
params["equal_aspect_ratio"] = False
ids = list(set(pair[0].ids) & set(pair[1].ids))
return [("ScatterPlot",ScatterPlot(pair[0].get_value_by_id(ids), pair[1].get_value_by_id(ids)),gs,params)]
示例3: perform_analysis
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def perform_analysis(self):
dsv1 = queries.param_filter_query(self.datastore,st_name='FullfieldDriftingSinusoidalGrating')
for sheet in dsv1.sheets():
dsv = queries.param_filter_query(dsv1, sheet_name=sheet)
segs1, stids = colapse(dsv.get_segments(),dsv.get_stimuli(),parameter_list=['trial'],allow_non_identical_objects=True)
for segs,st in zip(segs1, stids):
first_analog_signal = segs[0].get_esyn(segs[0].get_stored_esyn_ids()[0])
duration = first_analog_signal.t_stop - first_analog_signal.t_start
frequency = MozaikParametrized.idd(st).temporal_frequency * MozaikParametrized.idd(st).params()['temporal_frequency'].units
period = 1/frequency
period = period.rescale(first_analog_signal.t_start.units)
cycles = duration / period
first_har = round(cycles)
e_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_esyn_ids()]
i_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_isyn_ids()]
v_f0 = [abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten())[0]/len(segs[0].get_esyn(idd))) for idd in segs[0].get_stored_vm_ids()]
e_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_esyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_esyn_ids()]
i_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_isyn(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_isyn_ids()]
v_f1 = [2*abs(numpy.fft.fft(numpy.mean([seg.get_vm(idd) for seg in segs],axis=0).flatten()/len(segs[0].get_esyn(idd)))[first_har]) for idd in segs[0].get_stored_vm_ids()]
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f0,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F0_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f0,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F0_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f0,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F0_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(e_f1,segs[0].get_stored_esyn_ids(),first_analog_signal.units,value_name = 'F1_Exc_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(i_f1,segs[0].get_stored_isyn_ids(),first_analog_signal.units,value_name = 'F1_Inh_Cond',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(v_f1,segs[0].get_stored_vm_ids(),first_analog_signal.units,value_name = 'F1_Vm',sheet_name=sheet,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(st)))
示例4: perform_analysis
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def perform_analysis(self):
dsv = queries.param_filter_query(self.datastore,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name,st_name='DriftingSinusoidalGratingCenterSurroundStimulus')
if len(dsv.get_analysis_result()) == 0: return
assert queries.ads_with_equal_stimulus_type(dsv)
assert queries.equal_ads(dsv,except_params=['stimulus_id'])
self.pnvs = dsv.get_analysis_result()
# get stimuli
self.st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs]
# transform the pnvs into a dictionary of tuning curves according along the 'surround_orientation' parameter
# also make sure they are ordered according to the first pnv's idds
self.tc_dict = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs],self.st,"surround_orientation")
for k in self.tc_dict.keys():
sis = []
surround_tuning=[]
# we will do the calculation neuron by neuron
for i in xrange(0,len(self.parameters.neurons)):
ors = self.tc_dict[k][0]
values = numpy.array([a[i] for a in self.tc_dict[k][1]])
d={}
for o,v in zip(ors,values):
d[o] = v
sis.append(d[0] / d[numpy.pi/2])
self.datastore.full_datastore.add_analysis_result(PerNeuronValue(sis,self.parameters.neurons,None,value_name = 'Suppression index of ' + self.pnvs[0].value_name ,sheet_name=self.parameters.sheet_name,tags=self.tags,period=None,analysis_algorithm=self.__class__.__name__,stimulus_id=str(k)))
示例5: ads_with_equal_stimulus_type
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def ads_with_equal_stimulus_type(dsv, allow_None=False):
"""
This functions tests whether DSV contains only ADS associated
with the same stimulus type.
Parameters
----------
not_None : bool
If true it will not allow ADS that are not associated with stimulus
"""
if allow_None:
return matching_parametrized_object_params([MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results if ads.stimulus_id != None],params=['name'])
else:
if len([0 for ads in dsv.analysis_results if ads.stimulus_id == None]) > 0:
return False
return matching_parametrized_object_params([MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results],params=['name'])
示例6: __init__
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def __init__(self, datastore, parameters, plot_file_name=None,fig_param=None):
Plotting.__init__(self, datastore, parameters, plot_file_name, fig_param)
self.st = []
self.tc_dict = []
self.pnvs = []
self.max_mean_response_indexes = []
assert queries.ads_with_equal_stimulus_type(datastore)
assert len(self.parameters.neurons) > 0 , "ERROR, empty list of neurons specified"
dsvs = queries.partition_analysis_results_by_parameters_query(self.datastore,parameter_list=['value_name'],excpt=True)
for dsv in dsvs:
dsv = queries.param_filter_query(dsv,identifier='PerNeuronValue',sheet_name=self.parameters.sheet_name)
assert matching_parametrized_object_params(dsv.get_analysis_result(), params=['value_name'])
self.pnvs.append(dsv.get_analysis_result())
# get stimuli
st = [MozaikParametrized.idd(s.stimulus_id) for s in self.pnvs[-1]]
self.st.append(st)
# transform the pnvs into a dictionary of tuning curves along the parameter_name
# also make sure the values are ordered according to ids in the first pnv
dic = colapse_to_dictionary([z.get_value_by_id(self.parameters.neurons) for z in self.pnvs[-1]],st,self.parameters.parameter_name)
#sort the entries in dict according to the parameter parameter_name values
for k in dic:
(b, a) = dic[k]
par, val = zip(
*sorted(
zip(b,
numpy.array(a))))
dic[k] = (par,numpy.array(val))
self.tc_dict.append(dic)
if self.parameters.centered:
self.max_mean_response_indexes.append(numpy.argmax(sum([a[1] for a in dic.values()]),axis=0))
示例7: save_data
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def save_data(dirname,dsv,name):
try:
os.mkdir(dirname)
except:
'all good'
for neuron_id in analog_ids:
mat_vm = []
mat_exc = []
mat_inh = []
for seg in dsv.get_segments():
sid = MozaikParametrized.idd(seg.annotations['stimulus'])
a = seg.get_vm(neuron_id).magnitude
a= numpy.insert(a,0,sid.trial)
a= numpy.insert(a,0,sid.y)
mat_vm.append(a)
a = seg.get_esyn(neuron_id).magnitude
a= numpy.insert(a,0,sid.trial)
a= numpy.insert(a,0,sid.y)
mat_exc.append(a)
a = seg.get_isyn(neuron_id).magnitude
a= numpy.insert(a,0,sid.trial)
a= numpy.insert(a,0,sid.y)
mat_inh.append(a)
numpy.savetxt(dirname+'/'+'VM_' + name+str(neuron_id)+'.csv',numpy.array(mat_vm))
numpy.savetxt(dirname+'/'+'ExcC' + name+str(neuron_id)+'.csv',numpy.array(mat_exc))
numpy.savetxt(dirname+'/'+'InhC' + name+str(neuron_id)+'.csv',numpy.array(mat_inh))
示例8: ads_with_equal_stimuli
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def ads_with_equal_stimuli(dsv,params=None,except_params=None):
"""
This functions returns true if DSV contains only ADS associated with stimuli
of the same kind and with the same values for parameters supplied in *params* or
with the exception of parameters listed in *except_params*.
Otherwise False.
"""
return matching_parametrized_object_params([MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results],params=params,except_params=except_params)
示例9: print_content
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def print_content(self, full_recordings=False, full_ADS=False):
"""
Prints the content of the data store (specifically the list of recordings and ADSs in the DSV).
If the
Parameters
----------
full_recordings : bool (optional)
If True each contained recording will be printed.
Otherwise only the overview of the recordings based on stimulus type will be shown.
full_ADS : bool (optional)
If True each contained ADS will be printed (for each this will print the set of their mozaik parameters together with their values).
Otherwise only the overview of the ADSs based on their identifier will be shown.
"""
logger.info("DSV info:")
logger.info(" Number of recordings: " + str(len(self.block.segments)))
d = {}
for st in [s.annotations['stimulus'] for s in self.block.segments]:
d[MozaikParametrized.idd(st).name] = d.get(MozaikParametrized.idd(st).name, 0) + 1
for k in d.keys():
logger.info(" " + str(k) + " : " + str(d[k]))
logger.info(" Number of ADS: " + str(len(self.analysis_results)))
d = {}
for ads in self.analysis_results:
d[ads.identifier] = d.get(ads.identifier, 0) + 1
for k in d.keys():
logger.info(" " + str(k) + " : " + str(d[k]))
if full_recordings:
logger.info('RECORDING RESULTS')
for s in [s.annotations['stimulus'] for s in self.block.segments]:
logger.info(str(s))
if full_ADS:
logger.info('ANALYSIS RESULTS')
for a in self.analysis_results:
logger.info(str(a))
示例10: perform_analysis_and_visualization
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def perform_analysis_and_visualization(data_store):
analog_ids = param_filter_query(data_store,sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()
analog_ids_inh = param_filter_query(data_store,sheet_name="V1_Inh_L4").get_segments()[0].get_stored_esyn_ids()
spike_ids = param_filter_query(data_store,sheet_name="V1_Exc_L4").get_segments()[0].get_stored_spike_train_ids()
number_of_cells = len(analog_ids)
numberOfCells_str = str(number_of_cells)
print 'NUMBER OF CELLS' + numberOfCells_str
stimuli_list = list(('SparseNoise', 'DenseNoise'))
#stimuli_list = ['SparseNoise']
save_to = './Data/'
print stimuli_list
for stimuli_type in stimuli_list:
print 'Getting voltage and images for ' + stimuli_type
# Saving parameters
format = '.pickle'
quality = '_15000_21_' # This is the number of images followed by the interval that they take in ms
# Load the segments
dsv = queries.param_filter_query(data_store, sheet_name="V1_Exc_L4",st_name=stimuli_type)
segments = dsv.get_segments()
stimuli = [MozaikParametrized.idd(s) for s in dsv.get_stimuli()]
# Take the seeds
seeds = [s.experiment_seed for s in stimuli]
print seeds,segments,stimuli
# Sort them based on their seeds
seeds,segments,stimuli = zip(*sorted(zip(seeds,segments,stimuli)))
segment_length = segments[0].get_spiketrain(spike_ids[0]).t_stop
# Values to obtain
spikes = [[] for i in segments[0].get_spiketrain(spike_ids)]
images = []
## Extract images
print 'Extracting and processing images'
for i, seg in enumerate(segments):
"""
First we take out the stimuli and make them as small as we can First we take out the stimuli and make them
as small as we can than one pixel assigned to each value of luminance. In order to do so, we first call the class
And re-adjust is parameter st.density = st.grid_size. After that we successively call the class to extract the images
frames
"""
# First we take the class
st = MozaikParametrized.idd_to_instance(stimuli[i])
st.size_x = 1.0
st.size_y = 1.0
st.density = st.grid_size
fr = st.frames()
# First we call as many frames as many frames as we need (total time / time per image = total # of images)
ims = [fr.next()[0] for i in xrange(0,st.duration/st.frame_duration)]
# Now, we take the images that repeat themselves
ims = [ims[i] for i in xrange(0,len(ims)) if ((i % (st.time_per_image/st.frame_duration)) == 0)]
images.append(ims)
# Saving images
print 'Saving Images '
# Concatenate and save
ims = numpy.concatenate(images,axis=0)
images_filename = save_to + 'images' + quality + stimuli_type + format
f = open(images_filename,'wb')
cPickle.dump(ims,f)
## Get the voltage for all the cells
for cell_number in range(number_of_cells):
print 'Extracting Voltage for cell ', cell_number
vm = [] # Intialize voltage list
for i,seg in enumerate(segments):
# get vm
v = seg.get_vm(analog_ids[cell_number])
# Check that the voltage between segments match
if vm != []:
assert vm[-1][-1] == v.magnitude[0]
# Append
vm.append(v.magnitude)
# Concatenate the experiments
print 'Concatenating Voltage'
vm = [v[:-1] for v in vm] # Take the last element out
vm = numpy.concatenate(vm,axis=0)
print 'voltage shape=', numpy.shape(vm)
# Save the voltage
print 'Saving Voltage for cell', cell_number
voltage_filename = save_to + 'vm' + '_cell_'+ str(cell_number) + quality + stimuli_type + '.pickle'
f = open(voltage_filename,'wb')
cPickle.dump(vm,f)
示例11: setup_logging
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
import numpy
from mozaik.storage import queries
from mozaik.controller import Global
Global.root_directory = sys.argv[1]+'/'
setup_logging()
data_store = PickledDataStore(load=True,parameters=ParameterSet({'root_directory':sys.argv[1],'store_stimuli' : False}),replace=True)
NeuronAnnotationsToPerNeuronValues(data_store,ParameterSet({})).analyse()
analog_ids = queries.param_filter_query(data_store,sheet_name="V1_Exc_L4").get_segments()[0].get_stored_esyn_ids()
dsv = queries.param_filter_query(data_store,st_name='FlashedBar')
for ads in dsv.get_analysis_result():
sid = MozaikParametrized.idd(ads.stimulus_id)
sid.x=0
ads.stimulus_id = str(sid)
for seg in dsv.get_segments():
sid = MozaikParametrized.idd(seg.annotations['stimulus'])
sid.x=0
seg.annotations['stimulus'] = str(sid)
for seg in dsv.get_segments(null=True):
sid = MozaikParametrized.idd(seg.annotations['stimulus'])
sid.x=0
seg.annotations['stimulus'] = str(sid)
def save_data(dirname,dsv,name):
try:
示例12: process_input
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def process_input(self, visual_space, stimulus, duration=None, offset=0):
"""
Present a visual stimulus to the model, and create the LGN output
(relay) neurons.
Parameters
----------
visual_space : VisualSpace
The visual space to which the stimuli are presented.
stimulus : VisualStimulus
The visual stimulus to be shown.
duration : int (ms)
The time for which we will simulate the stimulus
offset : int(ms)
The time (in absolute time of the whole simulation) at which the stimulus starts.
Returns
-------
retinal_input : list(ndarray)
List of 2D arrays containing the frames of luminances that were presented to the retina.
"""
logger.debug("Presenting visual stimulus from visual space %s" % visual_space)
visual_space.set_duration(duration)
self.input = visual_space
st = MozaikParametrized.idd(stimulus)
st.trial = None # to avoid recalculating RFs response to multiple trials of the same stimulus
cached = self.get_cache(st)
if cached == None:
logger.debug("Generating output spikes...")
(input_currents, retinal_input) = self._calculate_input_currents(visual_space,
duration)
else:
logger.debug("Retrieved spikes from cache...")
(input_currents, retinal_input) = cached
ts = self.model.sim.get_time_step()
#import pylab
#pylab.figure()
for rf_type in self.rf_types:
assert isinstance(input_currents[rf_type], list)
for i, (lgn_cell, input_current, scs, ncs) in enumerate(
zip(self.sheets[rf_type].pop,
input_currents[rf_type],
self.scs[rf_type],
self.ncs[rf_type])):
assert isinstance(input_current, dict)
#if i==0:
# pylab.plot(self.parameters.linear_scaler * input_current['amplitudes'])
t = input_current['times'] + offset
a = self.parameters.linear_scaler * input_current['amplitudes']
scs.set_parameters(times=t, amplitudes=a)
if self.parameters.mpi_reproducible_noise:
t = numpy.arange(0, duration, ts) + offset
amplitudes = (self.parameters.noise.mean
+ self.parameters.noise.stdev
* self.ncs_rng[rf_type][i].randn(len(t)))
ncs.set_parameters(times=t, amplitudes=amplitudes)
# for debugging/testing, doesn't work with MPI !!!!!!!!!!!!
#input_current_array = numpy.zeros((self.shape[1], self.shape[0], len(visual_space.time_points(duration))))
#update_factor = int(visual_space.update_interval/self.parameters.receptive_field.temporal_resolution)
#logger.debug("input_current_array.shape = %s, update_factor = %d, p.dim = %s" % (input_current_array.shape, update_factor, self.shape))
#k = 0
#for i in range(self.shape[1]): # self.sahpe gives (x,y), so self.shape[1] is the height
# for j in range(self.shape[0]):
# where the kernel temporal resolution is finer than the frame update interval,
# we only keep the current values at the start of each frame
# input_current_array[i,j, :] = input_currents['X_ON'][k]['amplitudes'][::update_factor]
# k += 1
# if record() has already been called, setup the recording now
self._built = True
self.write_cache(st, input_currents, retinal_input)
return retinal_input
示例13: equal_stimulus
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def equal_stimulus(dsv,except_params):
"""
This functions returns True if DSV contains only recordings associated
with stimuli of identical parameter values, with the exception of parameters in *except_params*
"""
return matching_parametrized_object_params([MozaikParametrized.idd(s) for s in dsv.get_stimuli()],except_params=['name'])
示例14: equal_stimulus_type
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def equal_stimulus_type(dsv):
"""
This functions returns True if DSV contains only recordings associated
with the same stimulus type. Otherwise False.
"""
return matching_parametrized_object_params([MozaikParametrized.idd(s) for s in dsv.get_stimuli()],params=['name'])
示例15: param_filter_query
# 需要导入模块: from mozaik.tools.mozaik_parametrized import MozaikParametrized [as 别名]
# 或者: from mozaik.tools.mozaik_parametrized.MozaikParametrized import idd [as 别名]
def param_filter_query(dsv,ads_unique=False,rec_unique=False,**kwargs):
"""
It will return DSV with only recordings and ADSs with mozaik parameters
whose values match the parameter values combinations provided in `kwargs`.
To restrict mozaik parameters of the stimuli associated with the ADS or recordings
pre-pend 'st_' to the parameter name.
For the recordings, parameter sheet refers to the sheet for which the recording was done.
Parameters
----------
dsv : DataStoreView
The input DSV.
ads_unique : bool, optional
If True the query will raise an exception if the query does not identify a unique ADS.
rec_unique : bool, optional
If True the query will raise an exception if the query does not identify a unique recording.
\*\*kwargs : dict
Remaining keyword arguments will be interepreted as the mozaik parameter names and their associated values that all ASDs
or recordings have to match. The values of the parameters should be either directly the values to match or list of values in which
case this list is interpreted as *one of* of the values that each returned recording or ASD has to match (thus effectively there
is an *and* operation between the different parameters and *or* operation between the values specified for the given mozaik parameters).
Examples
--------
>>> datastore.param_filter_query(datastore,identifier=['PerNeuronValue','SingleValue'],sheet_name=sheet,value_name='orientation preference')
This command should return DSV containing all recordings and ADSs whose identifier is *PerNeuronValue* or *SingleValue*, and are associated with sheet named *sheet_name* and as their value name have 'orientation preference'.
Note that since recordings do not have these parameters, this query would return a DSV containing only ADSs.
>>> datastore.param_filter_query(datastore,st_orientation=0.5)
This command should return DSV containing all recordings and ADSs that are associated with stimuli whose mozaik parameter orientation has value 0.5.
"""
new_dsv = dsv.fromDataStoreView()
st_kwargs = dict([(k[3:],kwargs[k]) for k in kwargs.keys() if k[0:3] == 'st_'])
kwargs = dict([(k,kwargs[k]) for k in kwargs.keys() if k[0:3] != 'st_'])
seg_st = [MozaikParametrized.idd(seg.annotations['stimulus']) for seg in dsv.block.segments]
ads_st = [MozaikParametrized.idd(ads.stimulus_id) for ads in dsv.analysis_results if ads.stimulus_id != None]
if 'sheet_name' in set(kwargs):
if len(kwargs) == 1:
# This means that there is only one 'non-stimulus' parameter sheet, and thus we need
# to filter out all recordings that are associated with that sheet (otherwsie we do not pass any recordings)
kw = kwargs['sheet_name'] if isinstance(kwargs['sheet_name'],list) else [kwargs['sheet_name']]
seg_filtered = set([s for s in dsv.block.segments if s.annotations['sheet_name'] in kw])
else:
seg_filtered = set([])
else:
seg_filtered = set(dsv.block.segments)
ads_filtered= set(filter_query(dsv.analysis_results,**kwargs))
if st_kwargs != {}:
seg_filtered_st= set(filter_query(seg_st,extra_data_list=dsv.block.segments,**st_kwargs)[1])
ads_filtered_st= set(filter_query(ads_st,extra_data_list=[ads for ads in dsv.analysis_results if ads.stimulus_id != None],**st_kwargs)[1])
else:
ads_filtered_st = set(dsv.analysis_results)
seg_filtered_st = set(dsv.block.segments)
seg = seg_filtered_st & seg_filtered
ads = ads_filtered_st & ads_filtered
new_dsv.sensory_stimulus = dsv.sensory_stimulus_copy()
new_dsv.block.segments = list(seg)
new_dsv.analysis_results = list(ads)
if ads_unique and len(ads) != 1:
raise ValueError("Result was expected to have only single ADS, it contains %d" % len(ads))
if rec_unique and len(seg) != 1:
raise ValueError("Result was expected to have only single Segment, it contains %d" % len(seg))
return new_dsv