本文整理汇总了Python中xia2.Handlers.Streams.Chatter.banner方法的典型用法代码示例。如果您正苦于以下问题:Python Chatter.banner方法的具体用法?Python Chatter.banner怎么用?Python Chatter.banner使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类xia2.Handlers.Streams.Chatter
的用法示例。
在下文中一共展示了Chatter.banner方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: scale
# 需要导入模块: from xia2.Handlers.Streams import Chatter [as 别名]
# 或者: from xia2.Handlers.Streams.Chatter import banner [as 别名]
def scale(self):
'''Actually perform the scaling - this is delegated to the
implementation.'''
if self._scalr_integraters == { }:
raise RuntimeError, \
'no Integrater implementations assigned for scaling'
xname = self._scalr_xcrystal.get_name()
while not self.get_scaler_finish_done():
while not self.get_scaler_done():
while not self.get_scaler_prepare_done():
Chatter.banner('Preparing %s' % xname)
self._scalr_prepare_done = True
self._scale_prepare()
Chatter.banner('Scaling %s' % xname)
self._scalr_done = True
self._scalr_result = self._scale()
self._scalr_finish_done = True
self._scale_finish()
return self._scalr_result
示例2: integrate
# 需要导入模块: from xia2.Handlers.Streams import Chatter [as 别名]
# 或者: from xia2.Handlers.Streams.Chatter import banner [as 别名]
def integrate(self):
'''Actually perform integration until we think we are done...'''
while not self.get_integrater_finish_done():
while not self.get_integrater_done():
while not self.get_integrater_prepare_done():
Debug.write('Preparing to do some integration...')
self.set_integrater_prepare_done(True)
# if this raises an exception, perhaps the autoindexing
# solution has too high symmetry. if this the case, then
# perform a self._intgr_indexer.eliminate() - this should
# reset the indexing system
try:
self._integrate_prepare()
except BadLatticeError, e:
Journal.banner('eliminated this lattice', size = 80)
Chatter.write('Rejecting bad lattice %s' % str(e))
self._intgr_refiner.eliminate()
self._integrater_reset()
# FIXME x1698 - may be the case that _integrate() returns the
# raw intensities, _integrate_finish() returns intensities
# which may have been adjusted or corrected. See #1698 below.
Debug.write('Doing some integration...')
self.set_integrater_done(True)
template = self.get_integrater_sweep().get_template()
if self._intgr_sweep_name:
if PhilIndex.params.xia2.settings.show_template:
Chatter.banner('Integrating %s (%s)' % \
(self._intgr_sweep_name, template))
else:
Chatter.banner('Integrating %s' % \
(self._intgr_sweep_name))
try:
#1698
self._intgr_hklout_raw = self._integrate()
except BadLatticeError, e:
Chatter.write('Rejecting bad lattice %s' % str(e))
Journal.banner('eliminated this lattice', size = 80)
self._intgr_refiner.eliminate()
self._integrater_reset()
示例3: _index_prepare
# 需要导入模块: from xia2.Handlers.Streams import Chatter [as 别名]
# 或者: from xia2.Handlers.Streams.Chatter import banner [as 别名]
def _index_prepare(self):
Chatter.banner('Spotfinding %s' %self.get_indexer_sweep_name())
super(XDSIndexerII, self)._index_prepare()
from dials.array_family import flex
from dials.util.ascii_art import spot_counts_per_image_plot
reflection_pickle = spot_xds_to_reflection_pickle(
self._indxr_payload['SPOT.XDS'],
working_directory=self.get_working_directory())
refl = flex.reflection_table.from_pickle(reflection_pickle)
Chatter.write(spot_counts_per_image_plot(refl), strip=False)
示例4: index
# 需要导入模块: from xia2.Handlers.Streams import Chatter [as 别名]
# 或者: from xia2.Handlers.Streams.Chatter import banner [as 别名]
def index(self):
if not self.get_indexer_finish_done():
f = inspect.currentframe().f_back.f_back
m = f.f_code.co_filename
l = f.f_lineno
Debug.write('Index in %s called from %s %d' %
(self.__class__.__name__, m, l))
while not self.get_indexer_finish_done():
while not self.get_indexer_done():
while not self.get_indexer_prepare_done():
# --------------
# call prepare()
# --------------
self.set_indexer_prepare_done(True)
self._index_prepare()
# --------------------------------------------
# then do the proper indexing - using the best
# solution already stored if available (c/f
# eliminate above)
# --------------------------------------------
self.set_indexer_done(True)
if self.get_indexer_sweeps():
xsweeps = [ s.get_name() for s in self.get_indexer_sweeps() ]
if len(xsweeps) > 1:
# find "SWEEPn, SWEEP(n+1), (..), SWEEPm" and aggregate to "SWEEPS n-m"
xsweeps = map(lambda x: (int(x[5:]), int(x[5:])) if x.startswith('SWEEP') else x, xsweeps)
xsweeps[0] = [xsweeps[0]]
def compress(seen, nxt):
if isinstance(seen[-1], tuple) and isinstance(nxt, tuple) and (seen[-1][1] + 1 == nxt[0]):
seen[-1] = (seen[-1][0], nxt[1])
else:
seen.append(nxt)
return seen
xsweeps = reduce(compress, xsweeps)
xsweeps = map(lambda x: ('SWEEP%d' % x[0] if x[0] == x[1] else
'SWEEPS %d to %d' % (x[0], x[1])) if isinstance(x, tuple)
else x, xsweeps)
if len(xsweeps) > 1:
sweep_names = ', '.join(xsweeps[:-1])
sweep_names += ' & ' + xsweeps[-1]
else:
sweep_names = xsweeps[0]
if PhilIndex.params.xia2.settings.show_template:
template = self.get_indexer_sweep().get_template()
Chatter.banner(
'Autoindexing %s (%s)' %(sweep_names, template))
else:
Chatter.banner('Autoindexing %s' %sweep_names)
if not self._indxr_helper:
result = self._index()
if not self._indxr_done:
Debug.write(
'Looks like indexing failed - try again!')
continue
solutions = { }
for k in self._indxr_other_lattice_cell.keys():
solutions[k] = self._indxr_other_lattice_cell[k][
'cell']
# create a helper for the indexer to manage solutions
self._indxr_helper = _IndexerHelper(solutions)
solution = self._indxr_helper.get()
# compare these against the final solution, if different
# reject solution and return - correct solution will
# be used next cycle
if self._indxr_lattice != solution[0] and \
not self._indxr_input_cell and \
not PhilIndex.params.xia2.settings.integrate_p1:
Chatter.write('Rerunning indexing lattice %s to %s' %
(self._indxr_lattice, solution[0]))
Debug.write(
'Rerunning indexing with target lattice %s' % \
solution[0])
self.set_indexer_done(False)
else:
# rerun autoindexing with the best known current solution
solution = self._indxr_helper.get()
self._indxr_input_lattice = solution[0]
self._indxr_input_cell = solution[1]
result = self._index()
#.........这里部分代码省略.........
示例5: index
# 需要导入模块: from xia2.Handlers.Streams import Chatter [as 别名]
# 或者: from xia2.Handlers.Streams.Chatter import banner [as 别名]
def index(self):
if not self.get_indexer_finish_done():
f = inspect.currentframe().f_back.f_back
m = f.f_code.co_filename
l = f.f_lineno
Debug.write('Index in %s called from %s %d' %
(self.__class__.__name__, m, l))
while not self.get_indexer_finish_done():
while not self.get_indexer_done():
while not self.get_indexer_prepare_done():
# --------------
# call prepare()
# --------------
self.set_indexer_prepare_done(True)
self._index_prepare()
# --------------------------------------------
# then do the proper indexing - using the best
# solution already stored if available (c/f
# eliminate above)
# --------------------------------------------
self.set_indexer_done(True)
if self.get_indexer_sweeps():
xsweeps = self.get_indexer_sweeps()
if len(xsweeps) > 1:
sweep_names = ', '.join(x.get_name() for x in xsweeps[:-1])
sweep_names += ' & ' + xsweeps[-1].get_name()
else:
sweep_names = xsweeps[0].get_name()
if PhilIndex.params.xia2.settings.show_template:
template = self.get_indexer_sweep().get_template()
Chatter.banner(
'Autoindexing %s (%s)' %(sweep_names, template))
else:
Chatter.banner('Autoindexing %s' %sweep_names)
if not self._indxr_helper:
result = self._index()
if not self._indxr_done:
Debug.write(
'Looks like indexing failed - try again!')
continue
solutions = { }
for k in self._indxr_other_lattice_cell.keys():
solutions[k] = self._indxr_other_lattice_cell[k][
'cell']
# create a helper for the indexer to manage solutions
self._indxr_helper = _IndexerHelper(solutions)
solution = self._indxr_helper.get()
# compare these against the final solution, if different
# reject solution and return - correct solution will
# be used next cycle
if self._indxr_lattice != solution[0] and \
not self._indxr_input_cell and \
not PhilIndex.params.xia2.settings.integrate_p1:
Chatter.write('Rerunning indexing lattice %s to %s' %
(self._indxr_lattice, solution[0]))
Debug.write(
'Rerunning indexing with target lattice %s' % \
solution[0])
self.set_indexer_done(False)
else:
# rerun autoindexing with the best known current solution
solution = self._indxr_helper.get()
self._indxr_input_lattice = solution[0]
self._indxr_input_cell = solution[1]
result = self._index()
# next finish up...
self.set_indexer_finish_done(True)
self._index_finish()
if self._indxr_print:
Chatter.write(self.show_indexer_solutions())
# FIXED 23/OCT/06 at this stage I need to look at the list of
# reasonable solutions and try to figure out if the indexing
# program has picked the highest - if not, then constrain the
# unit cell (need to implement this somewhere, sure it's
# around!) then rerun the autoindexing (perhaps?) with this
# new target - this means that we are always working from the
#.........这里部分代码省略.........
示例6: get_unit_cell_errors
# 需要导入模块: from xia2.Handlers.Streams import Chatter [as 别名]
# 或者: from xia2.Handlers.Streams.Chatter import banner [as 别名]
def get_unit_cell_errors(stop_after=None):
'''Actually process something...'''
wd = os.getcwd()
all_miller_indices, all_two_thetas_obs, reference_cell, reference_lattice, reference_wavelength = load_sweeps_with_common_indexing()
Chatter.banner('Unit cell sampling')
Debug.banner('Unit cell sampling')
span = miller.index_span(all_miller_indices)
Chatter.write("Found %d reflections in 2theta range %.3f - %.3f deg" % (len(all_miller_indices), min(all_two_thetas_obs), max(all_two_thetas_obs)))
Chatter.write("Miller index range: %s - %s" % (str(span.min()), str(span.max())))
unit_cell_info = { 'reflections':
{ 'count': len(all_miller_indices),
'min_2theta': min(all_two_thetas_obs),
'max_2theta': max(all_two_thetas_obs),
'min_miller': list(span.min()),
'max_miller': list(span.max())
} }
# Exclude 1% of reflections to remove potential outliers
# eg. indexed/integrated high angle noise
two_theta_cutoff = sorted(all_two_thetas_obs)[-int(len(all_two_thetas_obs) * 0.01)-1]
Chatter.write("Excluding outermost 1%% of reflections (2theta >= %.3f)" % two_theta_cutoff)
two_thetas_select = all_two_thetas_obs < two_theta_cutoff
all_two_thetas_obs = all_two_thetas_obs.select(two_thetas_select)
all_miller_indices = all_miller_indices.select(two_thetas_select)
Chatter.write("Kept %d reflections in 2theta range %.3f - %.3f deg" % (len(all_miller_indices), min(all_two_thetas_obs), max(all_two_thetas_obs)))
span = miller.index_span(all_miller_indices)
unit_cell_info['reflections_filtered'] = \
{ 'count': len(all_miller_indices),
'min_2theta': min(all_two_thetas_obs),
'max_2theta': max(all_two_thetas_obs),
'min_miller': list(span.min()),
'max_miller': list(span.max())
}
# prepare MonteCarlo sampling
mc_runs = 50
sample_size = min(len(all_miller_indices) // 2, 100)
unit_cell_info['sampling'] = { 'method': 'montecarlo', 'runs': mc_runs, 'used_per_run': sample_size }
unit_cell_info['reference'] = { 'cell': reference_cell.parameters(), 'cell_volume': reference_cell.volume(),
'lattice': reference_lattice, 'wavelength': reference_wavelength }
Chatter.write("\nRandomly sampling %d x %d reflections for Monte Carlo iterations" % (mc_runs, sample_size))
Debug.write("Refinements start with reference unit cell: %s" % reference_cell)
MC = []
MCconstrained = []
used_index_range = flex.miller_index()
used_two_theta_range_min = 1e300
used_two_theta_range_max = 0
used_reflections = set()
for n in range(mc_runs): # MC sampling
# Select sample_size reflections
sample = flex.size_t(random.sample(range(len(all_miller_indices)), sample_size))
used_reflections = used_reflections.union(set(sample))
miller_indices = all_miller_indices.select(sample)
two_thetas_obs = all_two_thetas_obs.select(sample)
# Record
span = miller.index_span(miller_indices)
used_index_range.append(span.min())
used_index_range.append(span.max())
used_two_theta_range_min = min(used_two_theta_range_min, min(two_thetas_obs))
used_two_theta_range_max = max(used_two_theta_range_max, max(two_thetas_obs))
refined = _refinery(two_thetas_obs, miller_indices, reference_wavelength, reference_cell)
MC.append(refined.unit_cell().parameters() + (refined.unit_cell().volume(),))
Debug.write('Run %d refined to: %s' % (n, str(refined.unit_cell())))
if reference_lattice is not None and reference_lattice is not 'aP':
refined = _refinery(two_thetas_obs, miller_indices, reference_wavelength, reference_cell, reference_lattice[0])
MCconstrained.append(refined.unit_cell().parameters() + (refined.unit_cell().volume(),))
Debug.write('Run %d (constrained %s) refined to: %s' % (n, reference_lattice[0], str(refined.unit_cell())))
if (n % 50) == 0:
sys.stdout.write("\n%5s ." % (str(n) if n > 0 else ''))
else:
sys.stdout.write(".")
sys.stdout.flush()
assert used_two_theta_range_min < used_two_theta_range_max
def stats_summary(l):
mean = sum(l) / len(l)
var = 0
for y in l:
var = var + ((y - mean) ** 2)
popvar = var / (len(l)-1)
popstddev = math.sqrt(popvar)
stderr = popstddev / math.sqrt(len(l))
return { 'mean': mean, 'variance': var, 'population_variance': popvar,
'population_standard_deviation': popstddev, 'standard_error': stderr }
print
Chatter.write("")
Chatter.write("Unit cell estimation based on %d Monte Carlo runs," % len(MC))
span = miller.index_span(used_index_range)
Chatter.write("drawn from miller indices between %s and %s" % (str(span.min()), str(span.max())))
Chatter.write("with associated 2theta angles between %.3f and %.3f deg" % (used_two_theta_range_min, used_two_theta_range_max))
#.........这里部分代码省略.........
示例7: _update_scaled_unit_cell
# 需要导入模块: from xia2.Handlers.Streams import Chatter [as 别名]
# 或者: from xia2.Handlers.Streams.Chatter import banner [as 别名]
def _update_scaled_unit_cell(self):
# FIXME this could be brought in-house
fast_mode = PhilIndex.params.dials.fast_mode
if PhilIndex.params.xia2.settings.integrater == 'dials' and not fast_mode:
from xia2.Wrappers.Dials.TwoThetaRefine import TwoThetaRefine
from xia2.lib.bits import auto_logfiler
Chatter.banner('Unit cell refinement')
# Collect a list of all sweeps, grouped by project, crystal, wavelength
groups = {}
self._scalr_cell_dict = {}
tt_refine_experiments, tt_refine_pickles = [], []
for epoch in self._sweep_handler.get_epochs():
si = self._sweep_handler.get_sweep_information(epoch)
pi = '_'.join(si.get_project_info())
intgr = si.get_integrater()
groups[pi] = groups.get(pi, []) + \
[(intgr.get_integrated_experiments(), intgr.get_integrated_reflections())]
# Two theta refine the unit cell for each group
for pi in groups.keys():
tt_grouprefiner = TwoThetaRefine()
tt_grouprefiner.set_working_directory(self.get_working_directory())
auto_logfiler(tt_grouprefiner)
files = zip(*groups[pi])
tt_grouprefiner.set_experiments(files[0])
tt_grouprefiner.set_pickles(files[1])
tt_refine_experiments.extend(files[0])
tt_refine_pickles.extend(files[1])
tt_grouprefiner.set_reindex_operator(self._spacegroup_reindex_operator)
tt_grouprefiner.run()
Chatter.write('%s: %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f' % \
tuple([''.join(pi.split('_')[2:])] + list(tt_grouprefiner.get_unit_cell())))
self._scalr_cell_dict[pi] = (tt_grouprefiner.get_unit_cell(), tt_grouprefiner.get_unit_cell_esd(), tt_grouprefiner.import_cif(), tt_grouprefiner.import_mmcif())
if len(groups) > 1:
cif_in = tt_grouprefiner.import_cif()
cif_out = CIF.get_block(pi)
for key in sorted(cif_in.keys()):
cif_out[key] = cif_in[key]
mmcif_in = tt_grouprefiner.import_mmcif()
mmcif_out = mmCIF.get_block(pi)
for key in sorted(mmcif_in.keys()):
mmcif_out[key] = mmcif_in[key]
# Two theta refine everything together
if len(groups) > 1:
tt_refiner = TwoThetaRefine()
tt_refiner.set_working_directory(self.get_working_directory())
auto_logfiler(tt_refiner)
tt_refiner.set_experiments(tt_refine_experiments)
tt_refiner.set_pickles(tt_refine_pickles)
tt_refiner.set_reindex_operator(self._spacegroup_reindex_operator)
tt_refiner.run()
self._scalr_cell = tt_refiner.get_unit_cell()
Chatter.write('Overall: %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f' % tt_refiner.get_unit_cell())
self._scalr_cell_esd = tt_refiner.get_unit_cell_esd()
cif_in = tt_refiner.import_cif()
mmcif_in = tt_refiner.import_mmcif()
else:
self._scalr_cell, self._scalr_cell_esd, cif_in, mmcif_in = self._scalr_cell_dict.values()[0]
import dials.util.version
cif_out = CIF.get_block('xia2')
mmcif_out = mmCIF.get_block('xia2')
cif_out['_computing_cell_refinement'] = mmcif_out['_computing.cell_refinement'] = 'DIALS 2theta refinement, %s' % dials.util.version.dials_version()
for key in sorted(cif_in.keys()):
cif_out[key] = cif_in[key]
for key in sorted(mmcif_in.keys()):
mmcif_out[key] = mmcif_in[key]
Debug.write('Unit cell obtained by two-theta refinement')
else:
ami = AnalyseMyIntensities()
ami.set_working_directory(self.get_working_directory())
average_unit_cell, ignore_sg = ami.compute_average_cell(
[self._scalr_scaled_refl_files[key] for key in
self._scalr_scaled_refl_files])
Debug.write('Computed average unit cell (will use in all files)')
self._scalr_cell = average_unit_cell
self._scalr_cell_esd = None
# Write average unit cell to .cif
cif_out = CIF.get_block('xia2')
cif_out['_computing_cell_refinement'] = 'AIMLESS averaged unit cell'
for cell, cifname in zip(self._scalr_cell,
['length_a', 'length_b', 'length_c', 'angle_alpha', 'angle_beta', 'angle_gamma']):
cif_out['_cell_%s' % cifname] = cell
Debug.write('%7.3f %7.3f %7.3f %7.3f %7.3f %7.3f' % \
self._scalr_cell)
示例8: _index_prepare
# 需要导入模块: from xia2.Handlers.Streams import Chatter [as 别名]
# 或者: from xia2.Handlers.Streams.Chatter import banner [as 别名]
def _index_prepare(self):
from xia2.Handlers.Citations import Citations
Citations.cite('dials')
#all_images = self.get_matching_images()
#first = min(all_images)
#last = max(all_images)
spot_lists = []
datablocks = []
for imageset, xsweep in zip(self._indxr_imagesets, self._indxr_sweeps):
Chatter.banner('Spotfinding %s' %xsweep.get_name())
first, last = imageset.get_scan().get_image_range()
# at this stage, break out to run the DIALS code: this sets itself up
# now cheat and pass in some information... save re-reading all of the
# image headers
# FIXME need to adjust this to allow (say) three chunks of images
from dxtbx.serialize import dump
from dxtbx.datablock import DataBlock
sweep_filename = os.path.join(
self.get_working_directory(), '%s_datablock.json' %xsweep.get_name())
dump.datablock(DataBlock([imageset]), sweep_filename)
gain = PhilIndex.params.xia2.settings.input.gain
if gain is libtbx.Auto:
gain_estimater = self.EstimateGain()
gain_estimater.set_sweep_filename(sweep_filename)
gain_estimater.run()
gain = gain_estimater.get_gain()
Chatter.write('Estimated gain: %.2f' %gain)
PhilIndex.params.xia2.settings.input.gain = gain
# FIXME this should really use the assigned spot finding regions
#offset = self.get_frame_offset()
spotfinder = self.Spotfinder()
if last - first > 10:
spotfinder.set_write_hot_mask(True)
spotfinder.set_input_sweep_filename(sweep_filename)
spotfinder.set_output_sweep_filename(
'%s_%s_datablock.json' %(spotfinder.get_xpid(), xsweep.get_name()))
spotfinder.set_input_spot_filename(
'%s_%s_strong.pickle' %(spotfinder.get_xpid(), xsweep.get_name()))
if PhilIndex.params.dials.fast_mode:
wedges = self._index_select_images_i(imageset)
spotfinder.set_scan_ranges(wedges)
else:
spotfinder.set_scan_ranges([(first, last)])
if PhilIndex.params.dials.find_spots.phil_file is not None:
spotfinder.set_phil_file(PhilIndex.params.dials.find_spots.phil_file)
min_spot_size = PhilIndex.params.dials.find_spots.min_spot_size
if min_spot_size is libtbx.Auto:
if imageset.get_detector()[0].get_type() == 'SENSOR_PAD':
min_spot_size = 3
else:
min_spot_size = None
if min_spot_size is not None:
spotfinder.set_min_spot_size(min_spot_size)
min_local = PhilIndex.params.dials.find_spots.min_local
if min_local is not None:
spotfinder.set_min_local(min_local)
sigma_strong = PhilIndex.params.dials.find_spots.sigma_strong
if sigma_strong:
spotfinder.set_sigma_strong(sigma_strong)
gain = PhilIndex.params.xia2.settings.input.gain
if gain:
spotfinder.set_gain(gain)
filter_ice_rings = PhilIndex.params.dials.find_spots.filter_ice_rings
if filter_ice_rings:
spotfinder.set_filter_ice_rings(filter_ice_rings)
kernel_size = PhilIndex.params.dials.find_spots.kernel_size
if kernel_size:
spotfinder.set_kernel_size(kernel_size)
global_threshold = PhilIndex.params.dials.find_spots.global_threshold
if global_threshold is not None:
spotfinder.set_global_threshold(global_threshold)
spotfinder.run()
spot_filename = spotfinder.get_spot_filename()
if not os.path.exists(spot_filename):
raise RuntimeError("Spotfinding failed: %s does not exist."
%os.path.basename(spot_filename))
spot_lists.append(spot_filename)
datablocks.append(spotfinder.get_output_sweep_filename())
from libtbx import easy_pickle
from dials.util.ascii_art import spot_counts_per_image_plot
refl = easy_pickle.load(spot_filename)
if not len(refl):
raise RuntimeError('No spots found in sweep %s' %xsweep.get_name())
Chatter.write(spot_counts_per_image_plot(refl), strip=False)
if not PhilIndex.params.dials.fast_mode:
#.........这里部分代码省略.........