本文整理汇总了Python中tensorflow.python.util.tf_decorator.make_decorator函数的典型用法代码示例。如果您正苦于以下问题:Python make_decorator函数的具体用法?Python make_decorator怎么用?Python make_decorator使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了make_decorator函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: decorated
def decorated(function):
try:
name = function.__name__
except AttributeError:
name = "function"
return tf_decorator.make_decorator(
function, named_defun(function, name, compiled=compiled))
开发者ID:KiaraStarlab,项目名称:tensorflow,代码行数:7,代码来源:function.py
示例2: must_use_result_or_fatal
def must_use_result_or_fatal(fn):
"""Function wrapper that ensures the function's output is used.
If the output is not used, a `tf.compat.v1.logging.fatal` error is raised.
An output is marked as used if any of its attributes are read, modified, or
updated. Examples when the output is a `Tensor` include:
- Using it in any capacity (e.g. `y = t + 0`, `sess.run(t)`)
- Accessing a property (e.g. getting `t.name` or `t.op`).
Note, certain behaviors cannot be tracked - for these the object may not
be marked as used. Examples include:
- `t != 0`. In this case, comparison is done on types / ids.
- `isinstance(t, tf.Tensor)`. Similar to above.
Args:
fn: The function to wrap.
Returns:
The wrapped function.
"""
def wrapped(*args, **kwargs):
return _add_should_use_warning(fn(*args, **kwargs), fatal_error=True)
return tf_decorator.make_decorator(
fn, wrapped, 'must_use_result_or_fatal',
((fn.__doc__ or '') +
('\n\n '
'**NOTE** The output of this function must be used. If it is not, '
'a fatal error will be raised. To mark the output as used, '
'call its .mark_used() method.')))
开发者ID:aritratony,项目名称:tensorflow,代码行数:32,代码来源:tf_should_use.py
示例3: testCompatibleWithNamelessCallables
def testCompatibleWithNamelessCallables(self):
class Callable(object):
def __call__(self):
pass
callable_object = Callable()
# Smoke test: This should not raise an exception, even though
# `callable_object` does not have a `__name__` attribute.
_ = tf_decorator.make_decorator(callable_object, test_wrapper)
partial = functools.partial(test_function, x=1)
# Smoke test: This should not raise an exception, even though `partial` does
# not have `__name__`, `__module__`, and `__doc__` attributes.
_ = tf_decorator.make_decorator(partial, test_wrapper)
开发者ID:adit-chandra,项目名称:tensorflow,代码行数:16,代码来源:tf_decorator_test.py
示例4: testUpdatesDict_doesNotOverridePresentEntries
def testUpdatesDict_doesNotOverridePresentEntries(self):
test_function.foobar = True
test_wrapper.foobar = False
decorated = tf_decorator.make_decorator(test_function, test_wrapper)
self.assertFalse(decorated.foobar)
del test_function.foobar
del test_wrapper.foobar
开发者ID:adit-chandra,项目名称:tensorflow,代码行数:7,代码来源:tf_decorator_test.py
示例5: no_automatic_dependency_tracking
def no_automatic_dependency_tracking(method):
"""Disables automatic dependency tracking on attribute assignment.
Use to decorate any method of a Checkpointable object. Attribute assignment in
that method will not add dependencies (also respected in Model). Harmless if
used in a class which does not do automatic dependency tracking (which means
it's safe to use in base classes which may have subclasses which also inherit
from Checkpointable).
Args:
method: The method to decorate.
Returns:
A decorated method which sets and un-sets automatic dependency tracking for
the object the method is called on (not thread safe).
"""
def _method_wrapper(self, *args, **kwargs):
previous_value = getattr(self, "_setattr_tracking", True)
self._setattr_tracking = False # pylint: disable=protected-access
try:
method(self, *args, **kwargs)
finally:
self._setattr_tracking = previous_value # pylint: disable=protected-access
return tf_decorator.make_decorator(
target=method, decorator_func=_method_wrapper)
开发者ID:neilireson,项目名称:tensorflow,代码行数:26,代码来源:base.py
示例6: update_state_wrapper
def update_state_wrapper(update_state_fn):
"""Decorator to wrap metric `update_state()` with `defun()`, `add_update()`.
Args:
update_state_fn: function that accumulates metric statistics.
Returns:
If eager execution is enabled, returns None.
If graph execution is enabled, returns an update op. This op should be
executed to update the metric state with the given inputs.
"""
def decorated(metric_obj, *args, **kwargs):
"""Decorated function with `defun()` and `add_update()`."""
# Converting update_state_fn() into a graph function, so that
# we can return a single op that performs all of the variable updates.
# Assigning to a different method name to avoid reference cycle.
defuned_update_state_fn = function.defun(update_state_fn)
update_op = defuned_update_state_fn(*args, **kwargs)
if update_op is not None: # update_op will be None in eager execution.
metric_obj.add_update(update_op, inputs=True)
check_is_tensor_or_operation(
update_op, 'Metric {0}\'s update'.format(metric_obj.name))
return update_op
return tf_decorator.make_decorator(update_state_fn, decorated)
开发者ID:StephenOman,项目名称:tensorflow,代码行数:27,代码来源:metrics.py
示例7: _defun_with_scope
def _defun_with_scope(self, scope):
"""Creates a defun wrapped inside a variable creator scope."""
weak_wrapped_fn = None
def wrapped_fn(*args, **kwds):
"""Wraps `self._python_function` in a variable creator scope."""
# We register a variable creator with reduced priority. If an outer
# variable creator is just modifying keyword arguments to the variable
# constructor, this will work harmoniously. Since the `scope` registered
# here actually creates the variable, it taking priority would otherwise
# ignore the outer creator.
#
# If an outer variable creator calls the variable constructor manually,
# for example creating a MirroredVariable, then they won't call our
# creator. This means we won't be able to trace the initialization graph,
# and so variable initializers can't depend on function arguments. This is
# better than the alternative, tracing the initialization graph but giving
# the user a variable type they didn't want.
with ops.get_default_graph()._variable_creator_scope(scope, priority=50): # pylint: disable=protected-access
# __wrapped__ allows AutoGraph to swap in a converted function. We give
# the function a weak reference to itself to avoid a reference cycle.
return weak_wrapped_fn().__wrapped__(*args, **kwds)
weak_wrapped_fn = weakref.ref(wrapped_fn)
# TODO(mdan): Pipe self._experimental_autograph_options through.
return function_lib.defun(
tf_decorator.make_decorator(self._python_function, wrapped_fn),
input_signature=self._input_signature,
autograph=self._autograph,
experimental_autograph_options=self._experimental_autograph_options)
开发者ID:kylin9872,项目名称:tensorflow,代码行数:30,代码来源:def_function.py
示例8: with_name_scope
def with_name_scope(cls, method):
"""Decorator to automatically enter the module name scope.
```
class MyModule(tf.Module):
@tf.Module.with_name_scope
def __call__(self, x):
if not hasattr(self, 'w'):
self.w = tf.Variable(tf.random.normal([x.shape[1], 64]))
return tf.matmul(x, self.w)
```
Using the above module would produce `tf.Variable`s and `tf.Tensor`s whose
names included the module name:
```
mod = MyModule()
mod(tf.ones([8, 32]))
# ==> <tf.Tensor: ...>
mod.w
# ==> <tf.Variable ...'my_module/w:0'>
```
Args:
method: The method to wrap.
Returns:
The original method wrapped such that it enters the module's name scope.
"""
def method_with_name_scope(self, *args, **kwargs):
with self.name_scope:
return method(self, *args, **kwargs)
return tf_decorator.make_decorator(method, method_with_name_scope)
开发者ID:aritratony,项目名称:tensorflow,代码行数:34,代码来源:module.py
示例9: custom_gradient
def custom_gradient(f):
"""Decorator to define a function with a custom gradient.
The input function is expected to return the tuple
(results, gradient_function).
The output function will return results while possibly recording the
gradient_function and inputs in the tape.
Args:
f: function to be decorated.
Returns:
decorated function.
"""
def decorated(*args, **kwargs):
"""Decorated function with custom gradient."""
if context.in_graph_mode():
if kwargs:
raise ValueError(
"custom_gradient in graph mode doesn't support keyword arguments.")
name = "CustomGradient-%s" % tf_ops.uid()
args = [tf_ops.convert_to_tensor(x) for x in args]
result, grad_fn = f(*args)
flat_result = nest.flatten(result)
all_tensors = flat_result + args
@tf_ops.RegisterGradient(name)
def internal_grad_fn(unused_op, *result_grads): # pylint: disable=unused-variable
gradients = nest.flatten(grad_fn(*result_grads[:len(flat_result)]))
# Need to return one value per input to the IdentityN, so pad the
# gradients of the inputs of the custom_gradient function with the
# gradients of the outputs as well.
return ([None] * len(flat_result)) + gradients
with tf_ops.get_default_graph().gradient_override_map(
{"IdentityN": name}):
all_tensors = array_ops.identity_n(all_tensors)
return nest.pack_sequence_as(
structure=result, flat_sequence=all_tensors[:len(flat_result)])
input_tensors = [tf_ops.convert_to_tensor(x) for x in args]
with tape.stop_recording():
result, grad_fn = f(*args, **kwargs)
def actual_grad_fn(*outputs):
return nest.flatten(grad_fn(*outputs))
flat_result = nest.flatten(result)
tape.record_operation(
f.__name__,
flat_result,
input_tensors,
actual_grad_fn)
flat_result = list(flat_result)
return result
return tf_decorator.make_decorator(f, decorated)
开发者ID:DjangoPeng,项目名称:tensorflow,代码行数:60,代码来源:custom_gradient.py
示例10: with_name_scope
def with_name_scope(unbound_method):
"""Patches the given method so it enters the modules name scope."""
def enter_name_scope(self, *args, **kwargs):
"""Decorator that calls the given function in the module name scope.
Args:
self: Module instance.
*args: Positional arguments to `unbound_method`.
**kwargs: Keyword arguments to `unbound_method`.
Returns:
`with self.name_scope: return unbound_method(self, *args, **kwargs)`
"""
try:
module_name_scope = self.name_scope
except AttributeError as exc_value_from:
exc_value = AttributeError(
"The super constructor must be called before any other methods in "
"your constructor. If this is not possible then annotate all the "
"methods called with `@no_module_name_scope`.")
six.raise_from(exc_value, exc_value_from)
with module_name_scope:
# tf.Module enters the module name scope for all methods. To disable this
# for a particular method annotate it with `@no_module_name_scope`.
return unbound_method(self, *args, **kwargs)
return tf_decorator.make_decorator(unbound_method, enter_name_scope)
开发者ID:Wajih-O,项目名称:tensorflow,代码行数:28,代码来源:module.py
示例11: kwarg_only
def kwarg_only(f):
"""A wrapper that throws away all non-kwarg arguments."""
def wrapper(**kwargs):
return f(**kwargs)
return tf_decorator.make_decorator(
f, wrapper, decorator_argspec=tf_inspect.getargspec(f))
开发者ID:ziky90,项目名称:tensorflow,代码行数:7,代码来源:tf_export.py
示例12: _defun_with_scope
def _defun_with_scope(scope, fn, input_signature):
def wrapped_fn(*args, **kwds):
with variable_scope.variable_creator_scope(scope):
return fn(*args, **kwds)
return function_lib.defun(tf_decorator.make_decorator(fn, wrapped_fn),
input_signature=input_signature)
开发者ID:becster,项目名称:tensorflow,代码行数:8,代码来源:def_function.py
示例13: defun
def defun(func):
"""Decorator to compile func into graph_mode.
`defun` converts a function that constructs a TensorFlow graph into a function
that executes the graph. TensorFlow graphs typically execute faster and with a
lower memory-footprint than executing each of the operations that make up the
function individually as the TensorFlow runtime can optimize the graph and
execute sub-operations in parallel.
func must be a Python function that constructs a TensorFlow graph,
typically using functions in the tensorflow module.
Arguments to func can be either Tensor objects or Python
objects. Non-Tensor python objects are treated as constants, and new function
definitions are created internally based on their values.
func must return a tf.Tensor (NOT a Tensor) or a list of tf.Tensor (NOT a
Tensor).
Control flow constructs (e.g., `if`, `while`) are not yet compatible with
`defun`.
Example:
```python
def f(x, y):
return tf.reduce_mean(tf.multiply(x ** 2, 3) + y)
@tfe.defun
def g(x, y):
return tf.reduce_mean(tf.multiply(x ** 2, 3) + y)
x = tf.constant([[2.0, 3.0]])
y = tf.constant([[3.0, -2.0]])
# The plain function and defun-compiled function should return the same value.
assert f(x, y).numpy() == g(x, y).numpy()
# After the first invocation, the defun-compiled (graph) function runs faster
# than the plain function because the defun-compiled function does not involve
# Python interpreter overhead during the execution.
%time print(f(x, y))
%time print(g(x, y))
```
Args:
func: function to be compiled.
Returns:
A callable that will execute the compiled function (and return zero
or more Tensor objects).
"""
# TODO(apassos): deal with captured global state. Deal with control flow.
try:
name = func.__name__
except AttributeError:
name = "function"
return tf_decorator.make_decorator(func, named_defun(func, name))
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:56,代码来源:function.py
示例14: testSetsTFDecoratorArgSpec
def testSetsTFDecoratorArgSpec(self):
argspec = tf_inspect.ArgSpec(
args=['a', 'b', 'c'],
varargs=None,
keywords=None,
defaults=(1, 'hello'))
decorated = tf_decorator.make_decorator(test_function, test_wrapper, '', '',
argspec)
decorator = getattr(decorated, '_tf_decorator')
self.assertEqual(argspec, decorator.decorator_argspec)
开发者ID:adit-chandra,项目名称:tensorflow,代码行数:10,代码来源:tf_decorator_test.py
示例15: decorated
def decorated(inner_function):
try:
name = inner_function.__name__
except AttributeError:
name = "function"
return tf_decorator.make_decorator(
inner_function,
PolymorphicFunction(
inner_function,
name,
input_signature=input_signature))
开发者ID:becster,项目名称:tensorflow,代码行数:11,代码来源:def_function.py
示例16: contextmanager
def contextmanager(target):
"""A tf_decorator-aware wrapper for `contextlib.contextmanager`.
Usage is identical to `contextlib.contextmanager`.
Args:
target: A callable to be wrapped in a contextmanager.
Returns:
A callable that can be used inside of a `with` statement.
"""
context_manager = _contextlib.contextmanager(target)
return tf_decorator.make_decorator(target, context_manager, 'contextmanager')
开发者ID:1000sprites,项目名称:tensorflow,代码行数:12,代码来源:tf_contextlib.py
示例17: wrap_keras_model_for_export
def wrap_keras_model_for_export(model, batch_input_shape,
set_hparams, default_hparams):
"""Wraps `model` for saving and loading as SavedModel."""
if default_hparams is None: default_hparams = {}
hparam_keys = list(default_hparams.keys())
hparam_defaults = tuple(default_hparams.values())
# The goal is to save a function with this argspec...
argspec = tf_inspect.FullArgSpec(
args=(['inputs', 'training'] + hparam_keys),
defaults=((False,) + hparam_defaults),
varargs=None, varkw=None,
kwonlyargs=[], kwonlydefaults=None,
annotations={})
# ...and this behavior:
def call_fn(inputs, training, *args):
if FLAGS.export_print_hparams:
args = [tf.keras.backend.print_tensor(args[i], 'training=%s and %s='
% (training, hparam_keys[i]))
for i in range(len(args))]
kwargs = dict(zip(hparam_keys, args))
if kwargs: set_hparams(model, **kwargs)
return model(inputs, training=training)
# We cannot spell out `args` in def statement for call_fn, but since
# tf.function uses tf_inspect, we can use tf_decorator to wrap it with
# the desired argspec.
def wrapped(*args, **kwargs): # TODO(arnoegw): Can we use call_fn itself?
return call_fn(*args, **kwargs)
traced_call_fn = tf.function(autograph=False)(
tf_decorator.make_decorator(call_fn, wrapped, decorator_argspec=argspec))
# Now we need to trigger traces for
# - training set to Python values True or False (hence two traces),
# - tensor inputs of the expected nesting, shape and dtype,
# - tensor-valued kwargs for hparams, with caller-side defaults.
# Tracing with partially determined shapes requires an input signature,
# so we initiate tracing from a helper function with only tensor inputs.
@tf.function(autograph=False)
def trigger_traces(inputs, **kwargs):
return tuple(traced_call_fn(inputs, training=training, **kwargs)
for training in (True, False))
inputs_spec = tf.TensorSpec(shape=batch_input_shape, dtype=tf.float32)
hparams_spec = {name: tf.TensorSpec.from_tensor(tf.constant(value))
for name, value in default_hparams.items()}
_ = trigger_traces.get_concrete_function(inputs_spec, **hparams_spec)
# Assemble the output object.
obj = tf.train.Checkpoint()
obj.__call__ = traced_call_fn
obj.trainable_variables = model.trainable_variables
obj.variables = model.trainable_variables + model.non_trainable_variables
obj.regularization_losses = [_get_traced_loss(model, i)
for i in range(len(model.losses))]
return obj
开发者ID:Albert-Z-Guo,项目名称:tensorflow,代码行数:52,代码来源:export_mnist_cnn.py
示例18: decorated
def decorated(inner_function):
try:
name = inner_function.__name__
except AttributeError:
name = "function"
return tf_decorator.make_decorator(
inner_function,
Function(
inner_function,
name,
input_signature=input_signature,
autograph=autograph,
experimental_autograph_options=experimental_autograph_options))
开发者ID:kylin9872,项目名称:tensorflow,代码行数:13,代码来源:def_function.py
示例19: _defun_with_scope
def _defun_with_scope(self, scope):
"""Creates a defun wrapped inside a variable creator scope."""
def wrapped_fn(*args, **kwds):
with variable_scope.variable_creator_scope(scope):
# __wrapped__ allows AutoGraph to swap in a converted function.
return wrapped_fn.__wrapped__(*args, **kwds)
# TODO(mdan): Pipe self._experimental_autograph_options through.
return function_lib.defun(
tf_decorator.make_decorator(self._python_function, wrapped_fn),
input_signature=self._input_signature,
autograph=self._autograph)
开发者ID:aeverall,项目名称:tensorflow,代码行数:13,代码来源:def_function.py
示例20: kwarg_only
def kwarg_only(f):
"""A wrapper that throws away all non-kwarg arguments."""
f_argspec = tf_inspect.getargspec(f)
def wrapper(*args, **kwargs):
if args:
raise TypeError(
'{f} only takes keyword args (possible keys: {kwargs}). '
'Please pass these args as kwargs instead.'
.format(f=f.__name__, kwargs=f_argspec.args))
return f(**kwargs)
return tf_decorator.make_decorator(f, wrapper, decorator_argspec=f_argspec)
开发者ID:adit-chandra,项目名称:tensorflow,代码行数:13,代码来源:tf_export.py
注:本文中的tensorflow.python.util.tf_decorator.make_decorator函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论