本文整理汇总了Python中mantid.api.AnalysisDataService类的典型用法代码示例。如果您正苦于以下问题:Python AnalysisDataService类的具体用法?Python AnalysisDataService怎么用?Python AnalysisDataService使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了AnalysisDataService类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_that_can_load_isis_nexus_file_with_event_data_and_multi_period
def test_that_can_load_isis_nexus_file_with_event_data_and_multi_period(self):
# Arrange
state = SANSLoadTest._get_simple_state(sample_scatter="LARMOR00013065.nxs",
calibration="80tubeCalibration_18-04-2016_r9330-9335.nxs")
# Act
output_workspace_names = {"SampleScatterWorkspace": "sample_scatter",
"SampleScatterMonitorWorkspace": "sample_monitor_scatter"}
load_alg = self._run_load(state, publish_to_cache=True, use_cached=True, move_workspace=False,
output_workspace_names=output_workspace_names)
# Assert
expected_number_of_workspaces = [4, 0, 0, 0, 0, 0]
expected_number_on_ads = 1
workspace_type = [EventWorkspace, None, None, None, None, None]
self._do_test_output(load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type)
# Check that calibration is added
self.assertTrue(SANSLoadTest._has_calibration_been_applied(load_alg))
# Confirm that the ADS workspace contains the calibration file
try:
AnalysisDataService.retrieve("80tubeCalibration_18-04-2016_r9330-9335")
on_ads = True
except RuntimeError:
on_ads = False
self.assertTrue(on_ads)
# Cleanup
remove_all_workspaces_from_ads()
示例2: get_weighted_peak_centres
def get_weighted_peak_centres(self):
""" Get the peak centers found in peak workspace.
Guarantees: the peak centers and its weight (detector counts) are exported
:return: 2-tuple: list of 3-tuple (Qx, Qy, Qz)
list of double (Det_Counts)
"""
# get PeaksWorkspace
if AnalysisDataService.doesExist(self._myPeakWorkspaceName) is False:
raise RuntimeError('PeaksWorkspace %s does ot exit.' % self._myPeakWorkspaceName)
peak_ws = AnalysisDataService.retrieve(self._myPeakWorkspaceName)
# get peak center, peak intensity and etc.
peak_center_list = list()
peak_intensity_list = list()
num_peaks = peak_ws.getNumberPeaks()
for i_peak in xrange(num_peaks):
peak_i = peak_ws.getPeak(i_peak)
center_i = peak_i.getQSampleFrame()
intensity_i = peak_i.getIntensity()
peak_center_list.append((center_i.X(), center_i.Y(), center_i.Z()))
peak_intensity_list.append(intensity_i)
# END-FOR
return peak_center_list, peak_intensity_list
示例3: retrieve_hkl_from_spice_table
def retrieve_hkl_from_spice_table(self):
""" Get averaged HKL from SPICE table
HKL will be averaged from SPICE table by assuming the value in SPICE might be right
:return:
"""
# get SPICE table
spice_table_name = get_spice_table_name(self._myExpNumber, self._myScanNumber)
assert AnalysisDataService.doesExist(spice_table_name), 'Spice table for exp %d scan %d cannot be found.' \
'' % (self._myExpNumber, self._myScanNumber)
spice_table_ws = AnalysisDataService.retrieve(spice_table_name)
# get HKL column indexes
h_col_index = spice_table_ws.getColumnNames().index('h')
k_col_index = spice_table_ws.getColumnNames().index('k')
l_col_index = spice_table_ws.getColumnNames().index('l')
# scan each Pt.
hkl = numpy.array([0., 0., 0.])
num_rows = spice_table_ws.rowCount()
for row_index in xrange(num_rows):
mi_h = spice_table_ws.cell(row_index, h_col_index)
mi_k = spice_table_ws.cell(row_index, k_col_index)
mi_l = spice_table_ws.cell(row_index, l_col_index)
hkl += numpy.array([mi_h, mi_k, mi_l])
# END-FOR
self._spiceHKL = hkl/num_rows
return
示例4: _createTwoCurves
def _createTwoCurves(self, datawsname):
""" Create data workspace
"""
E = np.arange(-50, 50, 1.0)
# curve 1
I = 1000 * np.exp(-E**2/10**2)
err = I ** .5
# curve 2
I2 = 1000 * (1+np.sin(E/5*np.pi))
err2 = I ** .5
# workspace
ws = WorkspaceFactory.create(
"Workspace2D", NVectors=2,
XLength = E.size, YLength = I.size
)
# curve1
ws.dataX(0)[:] = E
ws.dataY(0)[:] = I
ws.dataE(0)[:] = err
# curve2
ws.dataX(1)[:] = E
ws.dataY(1)[:] = I2
ws.dataE(1)[:] = err2
# Add to data service
AnalysisDataService.addOrReplace(datawsname, ws)
return E, I, err, I2, err2
示例5: test_LoadHKLFile
def test_LoadHKLFile(self):
""" Test to load a .hkl file
"""
# 1. Create a test file
hklfilename = "test.hkl"
self._createHKLFile(hklfilename)
# 2.
alg_test = run_algorithm("LoadFullprofFile", Filename = hklfilename,
OutputWorkspace = "Foo", PeakParameterWorkspace = "PeakParameterTable")
self.assertTrue(alg_test.isExecuted())
# 3. Verify some values
tablews = AnalysisDataService.retrieve("PeakParameterTable")
self.assertEqual(4, tablews.rowCount())
# alpha of (11 5 1)/Row 0
self.assertEqual(0.34252, tablews.cell(0, 3))
# 4. Delete the test hkl file
os.remove(hklfilename)
AnalysisDataService.remove("PeakParameterTable")
AnalysisDataService.remove("Foo")
return
示例6: test_LoadPRFFile
def test_LoadPRFFile(self):
""" Test to load a .prf file
"""
# 1. Create test .prf file
prffilename = "test.prf"
self._createPrfFile(prffilename)
# 2. Execute the algorithm
alg_test = run_algorithm("LoadFullprofFile",
Filename = prffilename,
OutputWorkspace = "Data",
PeakParameterWorkspace = "Info")
self.assertTrue(alg_test.isExecuted())
# 3. Check data
dataws = AnalysisDataService.retrieve("Data")
self.assertEqual(dataws.getNumberHistograms(), 4)
self.assertEqual(len(dataws.readX(0)), 36)
# value
self.assertEqual(dataws.readX(0)[13], 5026.3223)
self.assertEqual(dataws.readY(1)[30], 0.3819)
# 4. Clean
os.remove(prffilename)
AnalysisDataService.remove("Data")
AnalysisDataService.remove("Info")
return
示例7: edit_matrix_workspace
def edit_matrix_workspace(sq_name, scale_factor, shift, edited_sq_name=None):
"""
Edit the matrix workspace of S(Q) by scaling and shift
:param sq_name: name of the SofQ workspace
:param scale_factor:
:param shift:
:param edited_sq_name: workspace for the edited S(Q)
:return:
"""
# get the workspace
if AnalysisDataService.doesExist(sq_name) is False:
raise RuntimeError('S(Q) workspace {0} cannot be found in ADS.'.format(sq_name))
if edited_sq_name is not None:
simpleapi.CloneWorkspace(InputWorkspace=sq_name, OutputWorkspace=edited_sq_name)
sq_ws = AnalysisDataService.retrieve(edited_sq_name)
else:
sq_ws = AnalysisDataService.retrieve(sq_name)
# get the vector of Y
sq_ws = sq_ws * scale_factor
sq_ws = sq_ws + shift
if sq_ws.name() != edited_sq_name:
simpleapi.DeleteWorkspace(Workspace=edited_sq_name)
simpleapi.RenameWorkspace(InputWorkspace=sq_ws, OutputWorkspace=edited_sq_name)
assert sq_ws is not None, 'S(Q) workspace cannot be None.'
print('[DB...BAT] S(Q) workspace that is edit is {0}'.format(sq_ws))
示例8: cleanup
def cleanup(self):
if AnalysisDataService.doesExist(self._input_wksp):
DeleteWorkspace(self._input_wksp)
if AnalysisDataService.doesExist(self._output_wksp):
DeleteWorkspace(self._output_wksp)
if AnalysisDataService.doesExist(self._correction_wksp):
DeleteWorkspace(self._correction_wksp)
示例9: test_setTitle
def test_setTitle(self):
run_algorithm('CreateWorkspace', OutputWorkspace='ws1',DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF')
ws1 = AnalysisDataService['ws1']
title = 'test_title'
ws1.setTitle(title)
self.assertEquals(title, ws1.getTitle())
AnalysisDataService.remove(ws1.getName())
示例10: test_add_raises_error_if_name_exists
def test_add_raises_error_if_name_exists(self):
data = [1.0,2.0,3.0]
alg = run_algorithm('CreateWorkspace',DataX=data,DataY=data,NSpec=1,UnitX='Wavelength', child=True)
name = "testws"
ws = alg.getProperty("OutputWorkspace").value
AnalysisDataService.addOrReplace(name, ws)
self.assertRaises(RuntimeError, AnalysisDataService.add, name, ws)
示例11: test_batch_reduction_on_multiperiod_file
def test_batch_reduction_on_multiperiod_file(self):
# Arrange
# Build the data information
data_builder = get_data_builder(SANSFacility.ISIS)
data_builder.set_sample_scatter("SANS2D0005512")
data_info = data_builder.build()
# Get the rest of the state from the user file
user_file_director = StateDirectorISIS(data_info)
user_file_director.set_user_file("MASKSANS2Doptions.091A")
# Set the reduction mode to LAB
user_file_director.set_reduction_builder_reduction_mode(ISISReductionMode.LAB)
state = user_file_director.construct()
# Act
states = [state]
self._run_batch_reduction(states, use_optimizations=False)
# Assert
# We only assert that the expected workspaces exist on the ADS
expected_workspaces = ["5512p1rear_1D_2.0_14.0Phi-45.0_45.0", "5512p2rear_1D_2.0_14.0Phi-45.0_45.0",
"5512p3rear_1D_2.0_14.0Phi-45.0_45.0", "5512p4rear_1D_2.0_14.0Phi-45.0_45.0",
"5512p5rear_1D_2.0_14.0Phi-45.0_45.0", "5512p6rear_1D_2.0_14.0Phi-45.0_45.0",
"5512p7rear_1D_2.0_14.0Phi-45.0_45.0", "5512p8rear_1D_2.0_14.0Phi-45.0_45.0",
"5512p9rear_1D_2.0_14.0Phi-45.0_45.0", "5512p10rear_1D_2.0_14.0Phi-45.0_45.0",
"5512p11rear_1D_2.0_14.0Phi-45.0_45.0", "5512p12rear_1D_2.0_14.0Phi-45.0_45.0",
"5512p13rear_1D_2.0_14.0Phi-45.0_45.0"]
for element in expected_workspaces:
self.assertTrue(AnalysisDataService.doesExist(element))
# Clean up
for element in expected_workspaces:
AnalysisDataService.remove(element)
示例12: test_len_increases_when_item_added
def test_len_increases_when_item_added(self):
wsname = 'ADSTest_test_len_increases_when_item_added'
current_len = len(AnalysisDataService)
self._run_createws(wsname)
self.assertEquals(len(AnalysisDataService), current_len + 1)
# Remove to clean the test up
AnalysisDataService.remove(wsname)
示例13: test_saveGSS
def test_saveGSS(self):
""" Test to Save a GSAS file to match V-drive
"""
# Create a test data file and workspace
binfilename = "testbin.dat"
self._createBinFile(binfilename)
datawsname = "TestInputWorkspace"
self._createDataWorkspace(datawsname)
# Execute
alg_test = run_algorithm("SaveVulcanGSS",
InputWorkspace = datawsname,
BinFilename = binfilename,
OutputWorkspace = datawsname+"_rebinned",
GSSFilename = "tempout.gda")
self.assertTrue(alg_test.isExecuted())
# Verify ....
outputws = AnalysisDataService.retrieve(datawsname+"_rebinned")
#self.assertEqual(4, tablews.rowCount())
# Delete the test hkl file
os.remove(binfilename)
AnalysisDataService.remove("InputWorkspace")
AnalysisDataService.remove(datawsname+"_rebinned")
return
示例14: _createDataWorkspace
def _createDataWorkspace(self, datawsname):
""" Create data workspace
"""
import math
tof0 = 4900.
delta = 0.001
numpts = 200
vecx = []
vecy = []
vece = []
tof = tof0
for n in range(numpts):
vecx.append(tof)
vecy.append(math.sin(tof0))
vece.append(1.)
tof = tof * (1+delta)
# ENDFOR
vecx.append(tof)
dataws = api.CreateWorkspace(DataX = vecx, DataY = vecy, DataE = vece, NSpec = 1,
UnitX = "TOF")
# Add to data service
AnalysisDataService.addOrReplace(datawsname, dataws)
return dataws
示例15: test_that_can_find_can_reduction_if_it_exists
def test_that_can_find_can_reduction_if_it_exists(self):
# Arrange
test_director = TestDirector()
state = test_director.construct()
tagged_workspace_names = {None: "test_ws",
OutputParts.Count: "test_ws_count",
OutputParts.Norm: "test_ws_norm"}
SANSFunctionsTest._prepare_workspaces(number_of_workspaces=4,
tagged_workspace_names=tagged_workspace_names,
state=state,
reduction_mode=ISISReductionMode.LAB)
# Act
workspace, workspace_count, workspace_norm = get_reduced_can_workspace_from_ads(state, output_parts=True,
reduction_mode=ISISReductionMode.LAB) # noqa
# Assert
self.assertTrue(workspace is not None)
self.assertTrue(workspace.name() == AnalysisDataService.retrieve("test_ws").name())
self.assertTrue(workspace_count is not None)
self.assertTrue(workspace_count.name() == AnalysisDataService.retrieve("test_ws_count").name())
self.assertTrue(workspace_norm is not None)
self.assertTrue(workspace_norm.name() == AnalysisDataService.retrieve("test_ws_norm").name())
# Clean up
SANSFunctionsTest._remove_workspaces()