本文整理汇总了Python中veles.compat.from_none函数的典型用法代码示例。如果您正苦于以下问题:Python from_none函数的具体用法?Python from_none怎么用?Python from_none使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了from_none函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: initialize
def initialize(self, device, **kwargs):
super(FullBatchLoader, self).initialize(device=device, **kwargs)
assert self.total_samples > 0
self.analyze_original_dataset()
self._map_original_labels()
if isinstance(self.device, NumpyDevice):
return
self.info("Will try to store the entire dataset on the device")
try:
self.init_vectors(self.original_data, self.minibatch_data)
except CLRuntimeError as e:
if e.code == CL_MEM_OBJECT_ALLOCATION_FAILURE:
self.warning("Failed to store the entire dataset on the " "device")
self.force_numpy = True
self.device = NumpyDevice()
return
else:
raise from_none(e)
except CUDARuntimeError as e:
if e.code == CUDA_ERROR_OUT_OF_MEMORY:
self.warning("Failed to store the entire dataset on the " "device")
self.force_numpy = True
self.device = NumpyDevice()
return
else:
raise from_none(e)
if self.has_labels:
self.init_vectors(self._mapped_original_labels_, self.minibatch_labels)
if not self.shuffled_indices:
self.shuffled_indices.mem = numpy.arange(self.total_samples, dtype=Loader.LABEL_DTYPE)
self.init_vectors(self.shuffled_indices, self.minibatch_indices)
示例2: _generate_source
def _generate_source(self, defines, include_dirs, dtype, suffix,
template_kwargs):
if defines and not isinstance(defines, dict):
raise RuntimeError("defines must be a dictionary")
jsuffix = ".j" + suffix
suffix = "." + suffix
lines = []
def define(cdefs, undef=False):
for key, value in sorted(cdefs.items()):
if not undef:
lines.append("#define %(key)s %(value)s\n" % locals())
else:
lines.append("#undef %(key)s\n" % locals())
my_defines = copy(defines) if defines else {}
self._adjust_defines(my_defines, dtype)
define(my_defines)
for name, defs in sorted(self.sources_.items()):
define(defs)
if len(template_kwargs) == 0:
# No templating
lines.append("#include \"%s%s\"\n" % (name, suffix))
else:
try:
self._include_file(include_dirs, name + jsuffix, lines)
except IncludeError:
try:
self._include_file(include_dirs, name + suffix, lines)
except IncludeError:
raise from_none(
IncludeError("Unable to include \"%s(%s|%s)\"" %
(name, jsuffix, suffix)))
define(defs, undef=True)
lines.append("\n")
source = "".join(lines)
if len(template_kwargs) == 0:
return source, my_defines
include_re = re.compile(
r'^\s*#\s*include\s*(<(\w+%(sfx)s)>|"(\w+%(sfx)s)")\s*$' %
{"sfx": "\\" + jsuffix}, flags=re.MULTILINE)
match = include_re.search(source)
while match is not None:
file = match.group(2) or match.group(3)
lines = []
self._include_file(include_dirs, file, lines)
source = include_re.sub("\n" + "".join(lines), source, count=1)
match = include_re.search(source)
try:
source = Template(source).render(**template_kwargs)
except TemplateError as e:
self.error(
"Failed to render the template. Here is the source:\n%s\n",
"".join("%04d\t%s" % (i + 1, l)
for i, l in enumerate(lines)))
raise from_none(e)
return source, my_defines
示例3: initialize
def initialize(self, **kwargs):
"""Loads the data, initializes indices, shuffles the training set.
"""
if self.testing:
self.shuffle_limit = 0
self.global_offset = 0
del self.failed_minibatches[:]
try:
super(Loader, self).initialize(**kwargs)
except AttributeError:
pass
try:
self.load_data()
except AttributeError as e:
self.exception("Failed to load the data")
raise from_none(e)
if self.class_lengths[TRAIN] > 0:
self.reset_normalization()
self.max_minibatch_size = kwargs.get("minibatch_size",
self.max_minibatch_size)
self.on_before_create_minibatch_data()
self._calc_class_end_offsets()
sn_log_str = "Samples number: test: %d, validation: %d, train: %d"
if self.train_ratio == 1.0:
self.info(sn_log_str, *self.class_lengths)
else:
self.info(sn_log_str + " (used: %d)", *(self.class_lengths + [
self.effective_class_end_offsets[TRAIN] -
self.effective_class_end_offsets[VALID]]))
self.minibatch_labels.reset(numpy.zeros(
self.max_minibatch_size, dtype=Loader.LABEL_DTYPE)
if self.has_labels else None)
self.raw_minibatch_labels[:] = (None,) * self.max_minibatch_size
self.minibatch_indices.reset(numpy.zeros(
self.max_minibatch_size, dtype=Loader.INDEX_DTYPE))
try:
self.create_minibatch_data()
except Exception as e:
self.error("Failed to create minibatch data")
raise from_none(e)
if not self.minibatch_data:
raise error.BadFormatError("minibatch_data MUST be initialized in "
"create_minibatch_data()")
self.analyze_dataset()
if self.testing:
self.shuffled_indices.mem = None
if not self.restored_from_snapshot or self.testing:
self.shuffle()
示例4: max_supposed
def max_supposed(self, value):
try:
1.0 + value
except TypeError:
raise from_none(TypeError(
"max_value must be set to floating point number"))
self._max_value = value
示例5: evaluate
def evaluate(self, chromo):
for tune, val in zip(self.tuneables, chromo.numeric):
tune <<= val
chromo.config = copy.deepcopy(self.config)
with NamedTemporaryFile(mode="wb", prefix="veles-optimization-config-",
suffix=".%d.pickle" % best_protocol) as fcfg:
pickle.dump(self.config, fcfg)
fcfg.flush()
with NamedTemporaryFile(
mode="r", prefix="veles-optimization-result-",
suffix=".%d.pickle" % best_protocol) as fres:
argv = ["--result-file", fres.name, "--stealth", "--log-id",
self.launcher.log_id] + self._filtered_argv_ + \
["root.common.disable.publishing=True"]
if self.plotters_are_disabled:
argv = ["-p", ""] + argv
i = -1
while "=" in argv[i]:
i -= 1
argv[i] = fcfg.name
result = self._exec(argv, fres)
if result is None:
raise EvaluationError()
try:
chromo.fitness = result["EvaluationFitness"]
except KeyError:
raise from_none(EvaluationError(
"Failed to find \"EvaluationFitness\" in the evaluation "
"results"))
chromo.snapshot = result.get("Snapshot")
self.info("Chromosome #%d was evaluated to %f", self._chromosome_index,
chromo.fitness)
示例6: upload
def upload(self, token, metadata, reader):
name = metadata["name"]
version = metadata["version"]
rep = self.repos.get(name)
if rep is None:
where = os.path.join(self.root, name)
need_init = True
if os.path.exists(where):
self.warning("%s exists - cleared", where)
shutil.rmtree(where)
os.mkdir(where)
else:
where = dirname(rep.path)
need_init = False
with TarFile.open(mode="r|gz", fileobj=reader) as tar:
tar.extractall(where)
if not need_init:
self.add_version(rep, version)
else:
self.repos[name] = rep = pygit2.init_repository(where)
try:
self.add_version(rep, version)
except Exception as e:
shutil.rmtree(where)
del self.repos[name]
self.error("Failed to initialize %s", name)
raise from_none(e)
rep.config["forge.tokens"] = self.scramble(token)
self._generate_images(metadata, rep)
示例7: run
def run(self, loop=True):
forge = root.common.forge
self.application = web.Application([
(self.uri(forge.service_name), ServiceHandler, {"server": self}),
(self.uri(forge.upload_name), UploadHandler, {"server": self}),
(self.uri(forge.fetch_name), FetchHandler, {"server": self}),
(self.uri("forge.html"), ForgeHandler, {"server": self}),
(self.uri("image.html"), ImagePageHandler),
(self.uri("thumbnails/(.*)"), ThumbnailHandler,
{"path": self.root}),
(self.uri("images/(.*)"), ImageStaticHandler, {"path": self.root}),
(self.uri("((js|css|fonts|img|maps)/.*)"),
web.StaticFileHandler, {'path': root.common.web.root}),
(self.suburi, web.RedirectHandler,
{"url": self.uri("forge.html"), "permanent": True}),
(self.suburi[:-1], web.RedirectHandler,
{"url": self.uri("forge.html"), "permanent": True}),
], template_loader=ForgeTemplateLoader(
root.common.web.templates, root.common.forge.email_templates))
try:
self.application.listen(self.port)
except OSError as e:
self.error("Failed to open port %d", self.port)
raise from_none(e)
self.info("Listening on port %d, suburi %s" % (self.port, self.suburi))
if loop:
IOLoop.instance().start()
示例8: __init__
def __init__(cls, name, bases, clsdict):
super(BackendRegistry, cls).__init__(name, bases, clsdict)
try:
BackendRegistry.backends[clsdict["BACKEND"]] = cls
except KeyError:
raise from_none(KeyError("%s does not define BACKEND" % cls))
assert "PRIORITY" in clsdict, "%s does not define PRIORITY" % cls
示例9: _connectOrBind
def _connectOrBind(self, endpoints):
"""
Connect and/or bind socket to endpoints.
"""
rnd_vals = []
for endpoint in endpoints:
if endpoint.type == ZmqEndpointType.connect:
self.debug("Connecting to %s...", endpoint)
self.socket.connect(endpoint.address)
elif endpoint.type == ZmqEndpointType.bind:
self.debug("Binding to %s...", endpoint)
if endpoint.address.startswith("rndtcp://") or endpoint.address.startswith("rndepgm://"):
try:
endpos = endpoint.address.find("://") + 3
proto = endpoint.address[3:endpos]
splitted = endpoint.address[endpos:].split(":")
min_port, max_port, max_tries = splitted[-3:]
addr = ":".join(splitted[:-3])
except ValueError:
raise from_none(ValueError("Failed to parse %s" % endpoint.address))
rnd_vals.append(
self.socket.bind_to_random_port(proto + addr, int(min_port), int(max_port), int(max_tries))
)
elif endpoint.address.startswith("rndipc://"):
prefix, suffix = endpoint.address[9:].split(":")
ipc_fd, ipc_fn = mkstemp(suffix, prefix)
self.socket.bind("ipc://" + ipc_fn)
rnd_vals.append(ipc_fn)
os.close(ipc_fd)
else:
self.socket.bind(endpoint.address)
else:
assert False, "Unknown endpoint type %r" % endpoint
return rnd_vals
示例10: _get_some_device
def _get_some_device(self, **kwargs):
"""Gets some device from the available CUDA devices.
Returns True if any device was selected, otherwise, False.
"""
device = self.parse_device(**kwargs)
try:
devices = cu.Devices()
except (OSError, cu.CUDARuntimeError):
devices = None
if devices is None or not len(devices):
raise DeviceNotFoundError("No CUDA devices were found")
self._id = device
if device == "":
context = devices.create_some_context()
else:
try:
device = devices[int(device)]
except IndexError:
raise from_none(
DeviceNotFoundError(
"CUDA device %s was not found." % device))
context = device.create_context()
self._context_ = context
device = self.context.device
self.device_info = DeviceInfo(
desc=device.name, memsize=device.total_mem,
memalign=4096, version=device.compute_capability,
device_type="CUDA",
max_work_group_size=device.max_grid_dims,
max_work_item_sizes=device.max_block_dims,
local_memsize=device.max_shared_memory_per_block)
return True
示例11: initialize
def initialize(self, device, **kwargs):
super(Deconv, self).initialize(device, **kwargs)
self._dtype = self.input.dtype
self.weights_shape = (tuple(reversed(self.weights.shape))
if self.weights_transposed
else self.weights.shape)
if hasattr(self, "bias"):
raise ValueError("bias should not be set")
if (len(self.input.shape) != 4 or
self.input.shape[3] != self.n_kernels):
raise ValueError("Incorrectly shaped input encountered")
if (len(self.weights_shape) != 2 or
self.weights_shape[0] != self.n_kernels or
self.weights_shape[1] % (self.kx * self.ky) != 0):
raise ValueError("Incorrectly shaped weights encountered")
output_shape = tuple(self.output_shape_source.shape)
if len(output_shape) != 4:
raise ValueError("Incorrect output_shape_source shape")
if output_shape[0] != self.input.shape[0]:
raise ValueError(
"output_shape_source.shape[0] != input.shape[0]")
try:
self.check_padding_is_safe(self.kx, self.ky, self.sliding)
except ValueError as e:
if not self.unsafe_padding:
raise from_none(e)
self.warning("The padding will be unsafe")
self._create_hits(output_shape)
padding = Deconv.compute_padding(
output_shape[2], output_shape[1], self.kx, self.ky, self.sliding)
if self.padding is None: # pylint: disable=E0203
self.padding = padding
elif self.padding != padding:
if not self.unsafe_padding:
raise ValueError(
"Expected padding %s but got %s" % (padding, self.padding))
self._create_hits(output_shape)
if self.output:
assert self.output.shape[1:] == output_shape[1:]
if not self.output or self.output.shape[0] != output_shape[0]:
self.output.reset(numpy.zeros(output_shape,
dtype=self._dtype))
self._output_shape = output_shape
self._sy, self._sx, self._n_channels = self._output_shape[1:]
self._kernel_size = self.kx * self.ky * self._n_channels
self._kernel_app_per_image = self.input.sample_size // self.n_kernels
self._kernel_app_total = (self._kernel_app_per_image *
self.input.shape[0])
self.init_vectors(self.input, self.weights, self.output, self.hits)
示例12: sigint_handler
def sigint_handler(sign, frame):
"""
Private method - handler for SIGINT.
"""
ThreadPool.interrupted = True
ThreadPool.shutdown_pools(execute_remaining=False, force=True)
log = logging.getLogger("ThreadPool")
try:
# ThreadPool.sigint_initial(sign, frame) does not work on Python 2
sigint_initial = ThreadPool.__dict__['sigint_initial']
if sigint_initial == ThreadPool.sigint_handler:
log.warning("Prevented an infinite recursion: sigint_initial")
else:
sigint_initial(sign, frame)
except KeyboardInterrupt:
if not reactor.running:
if not ThreadPool.sigint_printed:
log.warning("Raising KeyboardInterrupt since "
"Twisted reactor is not running")
ThreadPool.sigint_printed = True
raise from_none(KeyboardInterrupt())
ThreadPool._warn_about_sigint_hysteria(log)
else:
if not ThreadPool.sigint_printed:
log.critical("KeyboardInterrupt")
ThreadPool.debug_deadlocks()
ThreadPool.sigint_printed = True
else:
if not is_interactive():
ThreadPool._warn_about_sigint_hysteria(log)
else:
ThreadPool._warn_about_sigint_interactive_reactor(log)
示例13: load_pickles
def load_pickles(self, index, pickles, pbar):
unpickled = []
for pick in pickles:
try:
with open(pick, "rb") as fin:
self.debug("Loading %s...", pick)
if six.PY3:
loaded = pickle.load(fin, encoding='charmap')
else:
loaded = pickle.load(fin)
unpickled.append(loaded)
pbar.inc()
except Exception as e:
self.warning(
"Failed to load %s (part of %s set)" %
(pick, CLASS_NAME[index]))
raise from_none(e)
data = []
labels = []
for obj, pick in zip(unpickled, pickles):
if not isinstance(obj, dict):
raise TypeError(
"%s has the wrong format (part of %s set)" %
(pick, CLASS_NAME[index]))
try:
data.append(obj["data"])
labels.append(
numpy.array(obj["labels"], dtype=Loader.LABEL_DTYPE))
except KeyError as e:
self.error("%s has the wrong format (part of %s set)",
pick, CLASS_NAME[index])
raise from_none(e)
lengths = [0, sum(len(l) for l in labels)]
for arr in data:
lengths[0] += arr.shape[0]
if arr.shape[1:] != data[0].shape[1:]:
raise error.BadFormatError(
"Array has a different shape: expected %s, got %s"
"(%s set)" % (data[0].shape[1:],
arr.shape[1:], CLASS_NAME[index]))
if lengths[0] != lengths[1]:
raise error.BadFormatError(
"Data and labels has the different number of samples (data %d,"
" labels %d)" % lengths)
length = lengths[0]
self.class_lengths[index] = length
return length, data, labels
示例14: initialize_workflow
def initialize_workflow():
try:
self.workflow.initialize(device=self.device, **kwargs)
except Exception as ie:
self.error("Failed to initialize the workflow")
self._stop_graphics()
self.device_thread_pool_detach()
raise from_none(ie)
示例15: _after_backend_init
def _after_backend_init(self):
try:
self.fill_indices(0, min(self.max_minibatch_size, self.total_samples))
except CLRuntimeError as e:
if e.code == CL_MEM_OBJECT_ALLOCATION_FAILURE:
self.warning("Failed to store the entire dataset on the " "device")
self.force_numpy = True
self.device = NumpyDevice()
else:
raise from_none(e)
except CUDARuntimeError as e:
if e.code == CUDA_ERROR_OUT_OF_MEMORY:
self.warning("Failed to store the entire dataset on the " "device")
self.force_numpy = True
self.device = NumpyDevice()
else:
raise from_none(e)