本文整理汇总了Python中tensorflow.python.util.tf_inspect.getargspec函数的典型用法代码示例。如果您正苦于以下问题:Python getargspec函数的具体用法?Python getargspec怎么用?Python getargspec使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了getargspec函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: fn_args
def fn_args(fn):
"""Get argument names for function-like object.
Args:
fn: Function, or function-like object (e.g., result of `functools.partial`).
Returns:
`tuple` of string argument names.
Raises:
ValueError: if partial function has positionally bound arguments
"""
_, fn = tf_decorator.unwrap(fn)
# Handle callables.
if hasattr(fn, '__call__') and tf_inspect.ismethod(fn.__call__):
return tuple(tf_inspect.getargspec(fn.__call__).args)
# Handle functools.partial and similar objects.
if hasattr(fn, 'func') and hasattr(fn, 'keywords') and hasattr(fn, 'args'):
# Handle nested partial.
original_args = fn_args(fn.func)
if not original_args:
return tuple()
return tuple([
arg for arg in original_args[len(fn.args):]
if arg not in set((fn.keywords or {}).keys())
])
# Handle function.
return tuple(tf_inspect.getargspec(fn).args)
示例2: testGetArgSpecOnPartialArgumentWithConvertibleToFalse
def testGetArgSpecOnPartialArgumentWithConvertibleToFalse(self):
"""Tests getargspec on partial function with args that convert to False."""
def func(m, n):
return 2 * m + n
partial_func = functools.partial(func, m=0)
exception_message = (r"Some arguments \['n'\] do not have default value, "
"but they are positioned after those with default "
"values. This can not be expressed with ArgSpec.")
with self.assertRaisesRegexp(ValueError, exception_message):
tf_inspect.getargspec(partial_func)
示例3: testGetArgSpecOnPartialInvalidArgspec
def testGetArgSpecOnPartialInvalidArgspec(self):
"""Tests getargspec on partial function that doesn't have valid argspec."""
def func(m, n, l, k=4):
return 2 * m + l + n * k
partial_func = functools.partial(func, n=7)
exception_message = (r"Some arguments \['l'\] do not have default value, "
"but they are positioned after those with default "
"values. This can not be expressed with ArgSpec.")
with self.assertRaisesRegexp(ValueError, exception_message):
tf_inspect.getargspec(partial_func)
示例4: _loop_fn_has_config
def _loop_fn_has_config(loop_fn):
"""Test if `loop_fn` has a `pfor_config` argument."""
if tf_inspect.isfunction(loop_fn):
argspec = tf_inspect.getargspec(loop_fn)
return PFOR_CONFIG_ARG in argspec.args
elif isinstance(loop_fn, functools.partial):
fn = loop_fn.func
argspec = tf_inspect.getargspec(fn)
return (PFOR_CONFIG_ARG in argspec.args and
PFOR_CONFIG_ARG not in loop_fn.keywords)
else:
loop_class = tf_decorator.unwrap(loop_fn)[1]
if not hasattr(loop_class, "__call__"):
raise ValueError("loop_fn object did not have a __call__ method")
argspec = tf_inspect.getargspec(loop_class.__call__)
return PFOR_CONFIG_ARG in argspec.args
示例5: check_params
def check_params(self, params):
"""Checks for user typos in "params".
Arguments:
params: dictionary; the parameters to be checked
Raises:
ValueError: if any member of `params` is not a valid argument.
"""
legal_params_fns = [
Sequential.fit, Sequential.predict, Sequential.predict_classes,
Sequential.evaluate
]
if self.build_fn is None:
legal_params_fns.append(self.__call__)
elif (not isinstance(self.build_fn, types.FunctionType) and
not isinstance(self.build_fn, types.MethodType)):
legal_params_fns.append(self.build_fn.__call__)
else:
legal_params_fns.append(self.build_fn)
legal_params = []
for fn in legal_params_fns:
legal_params += tf_inspect.getargspec(fn)[0]
legal_params = set(legal_params)
for params_name in params:
if params_name not in legal_params:
if params_name != 'nb_epoch':
raise ValueError('{} is not a legal parameter'.format(params_name))
示例6: _make_prediction_gan_model
def _make_prediction_gan_model(input_data, input_data_domain_label,
generator_fn, generator_scope):
"""Make a `StarGANModel` from just the generator."""
# If `generator_fn` has an argument `mode`, pass mode to it.
if 'mode' in inspect.getargspec(generator_fn).args:
generator_fn = functools.partial(
generator_fn, mode=model_fn_lib.ModeKeys.PREDICT)
with variable_scope.variable_scope(generator_scope) as gen_scope:
# pylint:disable=protected-access
input_data = tfgan_train._convert_tensor_or_l_or_d(input_data)
input_data_domain_label = tfgan_train._convert_tensor_or_l_or_d(
input_data_domain_label)
# pylint:enable=protected-access
generated_data = generator_fn(input_data, input_data_domain_label)
generator_variables = variable_lib.get_trainable_variables(gen_scope)
return tfgan_tuples.StarGANModel(
input_data=input_data,
input_data_domain_label=None,
generated_data=generated_data,
generated_data_domain_target=input_data_domain_label,
reconstructed_data=None,
discriminator_input_data_source_predication=None,
discriminator_generated_data_source_predication=None,
discriminator_input_data_domain_predication=None,
discriminator_generated_data_domain_predication=None,
generator_variables=generator_variables,
generator_scope=generator_scope,
generator_fn=generator_fn,
discriminator_variables=None,
discriminator_scope=None,
discriminator_fn=None)
示例7: assert_stmt
def assert_stmt(expression1, expression2):
"""Functional form of an assert statement.
This follows the semantics of the Python assert statement, however the
concrete implementations may deviate from it. See the respective
implementation for details.
In general, the assert statement should not be used for control flow.
Furthermore, it is encouraged that the assertion expressions should not have
side effects.
Args:
expression1: Any
expression2: Callable[[], Any], returns the expression to include in the
error message when expression1 evaluates to False. When expression1 is
True, the result of expression2 will not be evaluated, however,
expression2 itself may be evaluated in some implementations.
Returns:
Any, implementation-dependent.
Raises:
ValueError: if any arguments are illegal.
"""
if not callable(expression2):
raise ValueError('{} must be a callable'.format(expression2))
args, _, keywords, _ = tf_inspect.getargspec(expression2)
if args or keywords:
raise ValueError('{} may not have any arguments'.format(expression2))
if tensor_util.is_tensor(expression1):
return _tf_assert_stmt(expression1, expression2)
else:
return _py_assert_stmt(expression1, expression2)
示例8: _recompute_grad
def _recompute_grad(fn, args, use_data_dep=_USE_DEFAULT, tupleize_grads=False):
"""See recompute_grad."""
has_is_recompute_kwarg = "is_recomputing" in tf_inspect.getargspec(fn).args
for arg in args:
if not isinstance(arg, framework_ops.Tensor):
raise ValueError("All inputs to function must be Tensors")
use_data_dep_ = use_data_dep
if use_data_dep_ == _USE_DEFAULT:
use_data_dep_ = _is_on_tpu()
# Use custom_gradient and return a grad_fn that recomputes on the backwards
# pass.
@custom_gradient.custom_gradient
def fn_with_recompute(*args):
"""Wrapper for fn."""
# Capture the variable and arg scopes so we can re-enter them when
# recomputing.
vs = variable_scope.get_variable_scope()
arg_scope = contrib_framework_ops.current_arg_scope()
# Track all variables touched in the function.
with backprop.GradientTape() as tape:
fn_kwargs = {}
if has_is_recompute_kwarg:
fn_kwargs["is_recomputing"] = False
outputs = fn(*args, **fn_kwargs)
original_vars = set(tape.watched_variables())
def _grad_fn(output_grads, variables=None):
# Validate that custom_gradient passes the right variables into grad_fn.
if original_vars:
assert variables, ("Fn created variables but the variables were not "
"passed to the gradient fn.")
if set(variables) != original_vars:
raise ValueError(_WRONG_VARS_ERR)
return _recomputing_grad_fn(
compute_fn=fn,
original_args=args,
original_vars=original_vars,
output_grads=output_grads,
grad_fn_variables=variables,
use_data_dep=use_data_dep_,
tupleize_grads=tupleize_grads,
arg_scope=arg_scope,
var_scope=vs,
has_is_recompute_kwarg=has_is_recompute_kwarg)
# custom_gradient inspects the signature of the function to determine
# whether the user expects variables passed in the grad_fn. If the function
# created variables, the grad_fn should accept the "variables" kwarg.
if original_vars:
def grad_fn(*output_grads, **kwargs):
return _grad_fn(output_grads, kwargs["variables"])
else:
def grad_fn(*output_grads):
return _grad_fn(output_grads)
return outputs, grad_fn
return fn_with_recompute(*args)
示例9: export
def export(self,
estimator,
export_path,
checkpoint_path=None,
eval_result=None):
"""Exports the given Estimator to a specific format.
Args:
estimator: the Estimator to export.
export_path: A string containing a directory where to write the export.
checkpoint_path: The checkpoint path to export. If None (the default),
the strategy may locate a checkpoint (e.g. the most recent) by itself.
eval_result: The output of Estimator.evaluate on this checkpoint. This
should be set only if checkpoint_path is provided (otherwise it is
unclear which checkpoint this eval refers to).
Returns:
The string path to the exported directory.
Raises:
ValueError: if the export_fn does not have the required signature
"""
# don't break existing export_fns that don't accept checkpoint_path and
# eval_result
export_fn_args = tf_inspect.getargspec(self.export_fn).args
kwargs = {}
if 'checkpoint_path' in export_fn_args:
kwargs['checkpoint_path'] = checkpoint_path
if 'eval_result' in export_fn_args:
if 'checkpoint_path' not in export_fn_args:
raise ValueError('An export_fn accepting eval_result must also accept '
'checkpoint_path.')
kwargs['eval_result'] = eval_result
return self.export_fn(estimator, export_path, **kwargs)
示例10: _check_method_supports_args
def _check_method_supports_args(method, kwargs):
"""Checks that the given method supports the given args."""
supported_args = tuple(tf_inspect.getargspec(method).args)
for kwarg in kwargs:
if kwarg not in supported_args:
raise ValueError(
'Argument `{}` is not supported in method {}.'.format(kwarg, method))
示例11: _get_arg_infos
def _get_arg_infos(func, elementwise_args):
"""Returns `_ArgInfo`s for each `func` arg specified by `elementwise_args`.
Args:
func: The function whose arguments should be described.
elementwise_args: The names of the arguments to get info for.
Returns:
A dictionary that maps both names and positions of arguments to
`_ArgInfo` tuples.
"""
arg_infos = {}
# Inspect the func's argspec to find the position of each arg.
arg_spec = tf_inspect.getargspec(func)
for argname in elementwise_args:
assert isinstance(argname, str)
is_list = argname.startswith('[') and argname.endswith(']')
if is_list:
argname = argname[1:-1]
assert argname in arg_spec.args, (func, argname, arg_spec.args)
arg_info = _ArgInfo(argname, arg_spec.args.index(argname), is_list)
arg_infos[arg_info.name] = arg_info
arg_infos[arg_info.position] = arg_info
return arg_infos
示例12: end
def end(self, session):
self._last_step = None
for m in self._monitors:
if "session" in tf_inspect.getargspec(m.end).args:
m.end(session=session)
else:
m.end()
示例13: call
def call(self, inputs, training=None, mask=None):
kwargs = {}
func_args = tf_inspect.getargspec(self.layer.call).args
if 'training' in func_args:
kwargs['training'] = training
if 'mask' in func_args:
kwargs['mask'] = mask
y = self.forward_layer.call(inputs, **kwargs)
y_rev = self.backward_layer.call(inputs, **kwargs)
if self.return_sequences:
y_rev = K.reverse(y_rev, 1)
if self.merge_mode == 'concat':
output = K.concatenate([y, y_rev])
elif self.merge_mode == 'sum':
output = y + y_rev
elif self.merge_mode == 'ave':
output = (y + y_rev) / 2
elif self.merge_mode == 'mul':
output = y * y_rev
elif self.merge_mode is None:
output = [y, y_rev]
# Properly set learning phase
if 0 < self.layer.dropout + self.layer.recurrent_dropout:
if self.merge_mode is None:
for out in output:
out._uses_learning_phase = True
else:
output._uses_learning_phase = True
return output
示例14: check_accepts
def check_accepts(f):
"""Check the types."""
spec = tf_inspect.getargspec(f)
num_function_arguments = len(spec.args)
if len(types) != num_function_arguments:
raise Error(
"Function %r has %d arguments but only %d types were provided in the "
"annotation." % (f, num_function_arguments, len(types)))
if spec.defaults:
num_defaults = len(spec.defaults)
for (name, a, t) in zip(spec.args[-num_defaults:],
spec.defaults,
types[-num_defaults:]):
allowed_type = _replace_forward_references(t, f.__globals__)
if not isinstance(a, allowed_type):
raise Error("default argument value %r of type %r is not an instance "
"of the allowed type %s for the %s argument to %r"
% (a, type(a), _type_repr(allowed_type), name, f))
@functools.wraps(f)
def new_f(*args, **kwds):
"""A helper function."""
for (a, t) in zip(args, types):
allowed_type = _replace_forward_references(t, f.__globals__)
if not isinstance(a, allowed_type):
raise Error("%r of type %r is not an instance of the allowed type %s "
"for %r" % (a, type(a), _type_repr(allowed_type), f))
return f(*args, **kwds)
return new_f
示例15: get_args
def get_args(symbol):
if hasattr(inspect, "signature"):
signature = inspect.signature(symbol)
# Ignore *args and **kwargs for now.
return [param.name for param in signature.parameters.values()
if param.kind == param.POSITIONAL_OR_KEYWORD]
return tf_inspect.getargspec(symbol)[0]