本文整理汇总了Python中tensorflow.python.eager.graph_only_ops.graph_placeholder函数的典型用法代码示例。如果您正苦于以下问题:Python graph_placeholder函数的具体用法?Python graph_placeholder怎么用?Python graph_placeholder使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了graph_placeholder函数的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: capture_value
def capture_value(tensor_map, value, dtype, name):
"""Capture a value from outside the function, to pass in as an extra arg."""
captured_value = tensor_map.get(ops.tensor_id(value), None)
if captured_value is None:
captured_value = graph_placeholder(
dtype=dtype or value.dtype, shape=value.shape, name=name)
if captured_value.dtype == dtypes_module.resource:
handle_data = value._handle_data # pylint: disable=protected-access
captured_value._handle_data = handle_data # pylint: disable=protected-access
if handle_data is not None and handle_data.is_set:
# Ensure that shapes and dtypes are propagated.
shapes, types = zip(*[(pair.shape, pair.dtype)
for pair in handle_data.shape_and_type])
ranks = [len(s.dim) if not s.unknown_rank else -1 for s in shapes]
shapes = [[d.size for d in s.dim]
if not s.unknown_rank else None for s in shapes]
with errors.raise_exception_on_not_ok_status() as status:
pywrap_tensorflow.TF_GraphSetOutputHandleShapesAndTypes_wrapper(
captured_value._op._graph._c_graph, # pylint: disable=protected-access
captured_value._as_tf_output(), # pylint: disable=protected-access
shapes,
ranks,
types,
status)
tensor_map[ops.tensor_id(value)] = (value, captured_value)
else:
captured_value = captured_value[1]
tape.record_operation("captured_value", [captured_value], [value],
lambda x: [x])
return captured_value
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:31,代码来源:function.py
示例2: testGraphPlaceholder
def testGraphPlaceholder(self):
x_tf = graph_only_ops.graph_placeholder(dtypes.int32, shape=(1,))
y_tf = math_ops.square(x_tf)
with self.cached_session() as sess:
x = np.array([42])
y = sess.run(y_tf, feed_dict={x_tf: np.array([42])})
self.assertAllClose(np.square(x), y)
开发者ID:JonathanRaiman,项目名称:tensorflow,代码行数:7,代码来源:graph_only_ops_test.py
示例3: _compute_backprop
def _compute_backprop(self):
"""Computes the backprop function object for this function."""
self._has_backprop = True
with self._graph.as_default(), context.graph_mode():
c = _CapturingContext()
with c:
filtered_outputs = [
x for x in self._returns if x is not None
]
self._out_grad_placeholders = [
graph_placeholder(x.dtype, x.shape) for x in filtered_outputs
]
in_gradients = gradients_impl.gradients(
filtered_outputs,
self._input_placeholders,
grad_ys=self._out_grad_placeholders)
shapes = [x.shape for x in in_gradients if x is not None]
captures = list(sorted(c.captured_tensors, key=lambda x: x.name))
forward_function_def = make_function_def(
self._graph, self._ops, self._input_placeholders,
filtered_outputs + captures)
self._forward_fdef = _DefinedFunction(forward_function_def)
_register_with_name(_forward_name(self._func_name), forward_function_def)
backward_outputs = [x for x in in_gradients if x is not None]
all_inputs = self._out_grad_placeholders + captures
backward_function_def = make_function_def(
self._graph, [x.op for x in self._out_grad_placeholders
] + list(sorted(c.known_ops, key=lambda x: x.name)),
all_inputs, backward_outputs)
_register_with_name(_backward_name(self._func_name), backward_function_def)
self._backward_function = _GraphModeFunction(
all_inputs, [], backward_function_def, self._graph, c.known_ops,
in_gradients, _map_sequence_obj_to_idx(backward_outputs), shapes)
开发者ID:SylChan,项目名称:tensorflow,代码行数:33,代码来源:function.py
示例4: _convert_to_graph_tensor
def _convert_to_graph_tensor(value, dtype=None, name=None, as_ref=False):
"""Captures a Tensor while building a graph mode function.
Arguments:
value: A Tensor object.
dtype: The datatype of the value produced by the node in the graph.
name: Name of the node in the graph.
as_ref: Ignored (required by register_tensor_conversion_function).
Returns:
Returns a constant (the current value of the tensor) if capturing
is not enabled. A placeholder which will have the value of the
tensor at runtime otherwise.
"""
if context.in_eager_mode():
return value
_ = as_ref
tensor_map = _scoped_captures.tensors
if tensor_map is None:
# Capturing is not enabled.
return constant_op.constant(value.numpy())
captured_value = tensor_map.get(ops.tensor_id(value), None)
if captured_value is None:
captured_value = graph_placeholder(
dtype=dtype or value.dtype, shape=value.shape, name=name)
if captured_value.dtype == dtypes.resource:
captured_value._handle_data = value._handle_data # pylint: disable=protected-access
tensor_map[ops.tensor_id(value)] = (value, captured_value)
else:
captured_value = captured_value[1]
tape.record_operation("captured_value", [captured_value], [value], [],
lambda x: x)
return captured_value
开发者ID:Mazecreator,项目名称:tensorflow,代码行数:33,代码来源:function.py
示例5: _compute_backprop
def _compute_backprop(self):
"""Computes the backprop function object for this function."""
self._has_backprop = True
with self._graph.as_default(), context.graph_mode():
c = _CapturingContext()
with c:
filtered_outputs = [x for x in self._returns if x is not None]
self._out_grad_placeholders = [
graph_placeholder(x.dtype, x.shape) for x in filtered_outputs]
in_gradients = gradients_impl.gradients(
filtered_outputs,
self._input_placeholders,
grad_ys=self._out_grad_placeholders)
shapes = tuple(x.shape for x in in_gradients if x is not None)
captures = list(sorted(c.captured_tensors, key=lambda x: x.name))
forward_name = _forward_name(self._func_name)
self._forward_fdef = _EagerDefinedFunction(
forward_name, self._graph, self._ops, self._input_placeholders,
filtered_outputs + captures)
backward_outputs = tuple(x for x in in_gradients if x is not None)
all_inputs = self._out_grad_placeholders + captures
# Excluding input ops from the body as we do not intend to execute these
# operations when the function is executed.
all_ignored_ops = frozenset(x.op for x in all_inputs)
# Enforce a deterministic order of operations in the generated graph. This
# means rerunning the function-defining code will always define the same
# function, which is useful if we serialize this etc.
function_def_ops = tuple(x
for x in sorted(c.known_ops, key=lambda x: x.name)
if x not in all_ignored_ops)
bname = _backward_name(self._func_name)
self._backward_function = GraphModeFunction(
bname, all_inputs, [], self._graph, function_def_ops,
backward_outputs, in_gradients, shapes)
开发者ID:AbhinavJain13,项目名称:tensorflow,代码行数:34,代码来源:function.py
示例6: _convert_to_graph_constant
def _convert_to_graph_constant(value, dtype=None, name=None, as_ref=False):
"""Captures a tfe Tensor while building a graph mode function.
Creates a placeholder to pass the tensor as an argument.
Arguments:
value: A tfe.Tensor object
dtype: The datatype of the value produced by the node in the graph.
name: Name of the node in the graph.
as_ref: Ignored (required by register_tensor_conversion_function).
Returns:
A placeholder which will, at runtime, have the value of this tensor.
Raises:
ValueError: if called outside a defun context.
"""
if context.in_eager_mode():
return value
_ = as_ref
tensor_map = _scoped_captures.tensors
if tensor_map is None:
raise ValueError(
"Trying to use tfe.Tensor objects in a graph outside graph mode. "
"To build a graph use tfe.defun or tfe.make_template.")
captured_value = tensor_map.get(ops.tensor_id(value), None)
if captured_value is None:
captured_value = graph_placeholder(
dtype=dtype or value.dtype, shape=value.shape, name=name)
if captured_value.dtype == dtypes.resource:
captured_value._handle_data = value._handle_data # pylint: disable=protected-access
tensor_map[ops.tensor_id(value)] = (value, captured_value)
else:
captured_value = captured_value[1]
return captured_value
开发者ID:chdinh,项目名称:tensorflow,代码行数:35,代码来源:function.py
示例7: _create_substitute_placeholder
def _create_substitute_placeholder(value, name=None, dtype=None):
"""Creates a placeholder for `value` and propagates shape info to it."""
# Note: setting ops.control_dependencies(None) ensures we always put
# capturing placeholders outside of any control flow context.
with ops.control_dependencies(None):
placeholder = graph_placeholder(
dtype=dtype or value.dtype, shape=value.shape, name=name)
custom_gradient.copy_handle_data(value, placeholder)
return placeholder
开发者ID:rmlarsen,项目名称:tensorflow,代码行数:9,代码来源:func_graph.py
示例8: _get_defun_inputs
def _get_defun_inputs(args):
"""Maps the inputs args to graph inputs."""
ret = []
flat_args = nest.flatten(args)
for a in flat_args:
if isinstance(a, ops.Tensor):
ret.append(graph_placeholder(a.dtype, a.shape))
else:
ret.append(a)
return nest.pack_sequence_as(args, ret)
开发者ID:Jackiefan,项目名称:tensorflow,代码行数:10,代码来源:function.py
示例9: _get_defun_inputs
def _get_defun_inputs(args):
"""Maps the inputs args to graph inputs."""
ret = []
for a in args:
if isinstance(a, ops.Tensor):
ret.append(graph_placeholder(a.dtype, a.shape))
elif type(a) in (tuple, list):
ret.append(_get_defun_inputs(a))
else:
ret.append(a)
return tuple(ret) if type(args) is tuple else ret
开发者ID:SylChan,项目名称:tensorflow,代码行数:11,代码来源:function.py
示例10: capture_value
def capture_value(tensor_map, value, dtype, name):
"""Capture a value from outside the function, to pass in as an extra arg."""
captured_value = tensor_map.get(ops.tensor_id(value), None)
if captured_value is None:
captured_value = graph_placeholder(
dtype=dtype or value.dtype, shape=value.shape, name=name)
if captured_value.dtype == dtypes.resource:
captured_value._handle_data = value._handle_data # pylint: disable=protected-access
tensor_map[ops.tensor_id(value)] = (value, captured_value)
else:
captured_value = captured_value[1]
tape.record_operation("captured_value", [captured_value], [value],
lambda x: [x])
return captured_value
开发者ID:SylChan,项目名称:tensorflow,代码行数:14,代码来源:function.py
示例11: _construct_backprop_function
def _construct_backprop_function(self):
"""Constructs the backprop function object for this function."""
with self._graph.as_default(), context.graph_mode():
c_known_ops = set()
c_captured_tensors = set()
def add_op_internal(op):
if op.type in ["Variable", "VariableV2", "VarHandleOp"]:
raise ValueError("tfe.defun cannot capture variables created without "
"using tf.get_variable. Op: %s" % op)
c_known_ops.add(op)
for i in op.inputs:
if i.op not in c_known_ops:
c_captured_tensors.add(i)
c = HelperContext(add_op_internal)
with c:
filtered_outputs = [x for x in self._returns if x is not None]
self._out_grad_placeholders = [
graph_placeholder(x.dtype, x.shape) for x in filtered_outputs]
in_gradients = gradients_impl.gradients(
filtered_outputs,
self._input_placeholders,
grad_ys=self._out_grad_placeholders)
backward_outputs = tuple(
grad for grad in _flatten(in_gradients) if grad is not None)
output_shapes = tuple(grad.shape for grad in backward_outputs)
captures = list(sorted(c_captured_tensors, key=lambda x: x.name))
forward_name = _forward_name(self._func_name)
self._forward_fdef = _EagerDefinedFunction(
forward_name, self._graph, self._ops, self._input_placeholders,
filtered_outputs + captures)
all_inputs = self._out_grad_placeholders + captures
# Excluding input ops from the body as we do not intend to execute these
# operations when the function is executed.
all_ignored_ops = frozenset(x.op for x in all_inputs)
# Enforce a deterministic order of operations in the generated graph. This
# means rerunning the function-defining code will always define the same
# function, which is useful if we serialize this etc.
function_def_ops = tuple(x
for x in sorted(c_known_ops, key=lambda x: x.name)
if x not in all_ignored_ops)
bname = _backward_name(self._func_name)
self._backward_function = GraphModeFunction(
bname, all_inputs, [], self._graph, function_def_ops,
backward_outputs, in_gradients, output_shapes)
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:49,代码来源:function.py
示例12: _get_defun_inputs
def _get_defun_inputs(flat_args, names, structure):
"""Maps python function args to graph-construction inputs.
Args:
flat_args: A flat list of user-specified arguments.
names: A list of strings with user-specified argument names, same length as
`flat_args`. May be `None`, in which case a generic name is used.
structure: The original argument list or dictionary.
Returns:
Placeholders with the same structure as `structure`.
"""
func_graph = ops.get_default_graph()
function_inputs = []
if names is None:
names = [None] * len(flat_args)
for arg_value, name in zip(flat_args, names):
for arg in nest.flatten(arg_value):
if isinstance(arg, (ops.Tensor, tensor_spec.TensorSpec)):
if isinstance(arg, tensor_spec.TensorSpec) and arg.name:
requested_name = arg.name
else:
requested_name = name
placeholder = graph_placeholder(
arg.dtype, arg.shape,
name=requested_name)
if name is not None:
# Record the requested/user-specified name in case it's different than
# the uniquified name, for validation when exporting signatures.
placeholder.op._set_attr( # pylint: disable=protected-access
"_user_specified_name",
attr_value_pb2.AttrValue(s=compat.as_bytes(requested_name)))
function_inputs.append(placeholder)
elif isinstance(arg, resource_variable_ops.ResourceVariable):
# Capture arg variables to create placeholders for them. These will be
# removed as captures after the function is traced (since otherwise we'd
# just add it back with a new placeholder when the variable was
# referenced).
placeholder = func_graph.capture(arg.handle, name=name)
placeholder.op._set_attr( # pylint: disable=protected-access
"_user_specified_name",
attr_value_pb2.AttrValue(s=compat.as_bytes(name)))
function_inputs.append(arg)
else:
function_inputs.append(arg)
return nest.pack_sequence_as(structure, function_inputs)
开发者ID:rmlarsen,项目名称:tensorflow,代码行数:46,代码来源:func_graph.py
示例13: capture_value
def capture_value(tensor_map, value, dtype, name):
"""Capture a value from outside the function, to pass in as an extra arg."""
captured_value = tensor_map.get(ops.tensor_id(value), None)
if captured_value is None:
captured_value = graph_placeholder(
dtype=dtype or value.dtype, shape=value.shape, name=name)
if captured_value.dtype == dtypes_module.resource:
if ops._USE_C_SHAPES: # pylint: disable=protected-access
if isinstance(value, ops.EagerTensor):
handle_data = value._handle_data # pylint: disable=protected-access
else:
handle_data = resource_variable_ops.get_resource_handle_data(value)
else:
handle_data = value._handle_data # pylint: disable=protected-access
if handle_data is not None and handle_data.is_set:
# pylint: disable=protected-access
if ops._USE_C_SHAPES:
pywrap_tensorflow.SetResourceHandleShapeAndType(
captured_value.graph._c_graph, captured_value._as_tf_output(),
handle_data.SerializeToString())
else:
captured_value._handle_data = handle_data
# pylint: enable=protected-access
# Ensure that shapes and dtypes are propagated.
shapes, types = zip(*[(pair.shape, pair.dtype)
for pair in handle_data.shape_and_type])
ranks = [len(s.dim) if not s.unknown_rank else -1 for s in shapes]
shapes = [[d.size for d in s.dim]
if not s.unknown_rank else None for s in shapes]
pywrap_tensorflow.TF_GraphSetOutputHandleShapesAndTypes_wrapper(
captured_value._op._graph._c_graph, # pylint: disable=protected-access
captured_value._as_tf_output(), # pylint: disable=protected-access
shapes, ranks, types)
tensor_map[ops.tensor_id(value)] = (value, captured_value)
else:
captured_value = captured_value[1]
tape.record_operation("captured_value", [captured_value], [value],
lambda x: [x])
return captured_value
开发者ID:Jackiefan,项目名称:tensorflow,代码行数:40,代码来源:function.py
示例14: _get_defun_inputs
def _get_defun_inputs(args, names, structure, flat_shapes=None):
"""Maps python function args to graph-construction inputs.
Args:
args: A flat list of user-specified arguments.
names: A list of strings with user-specified argument names, same length as
`args`. May be `None`, in which case a generic name is used.
structure: The original argument list or dictionary.
flat_shapes: A flat list of values that are either `None` or
instances of `TensorShape`. If provided, then length must match
that of `nest.flatten(args)`; and locations where `args` are
instances of `Tensor` must have a corresponding `TensorShape` in
`flat_shapes`. May be `None`, in which case exact shapes are read
directly from the args.
Returns:
Placeholders with the same structure as `structure`.
Raises:
RuntimeError: if `flat_shapes` is provided, but
`len(flat_shapes) != len(nest.flatten(args))`.
RuntimeError: if a shape from `flat_shapes` is not None
for an argument that is not a `Tensor`, `TensorSpec`,
or `ResourceVariable`.
"""
func_graph = ops.get_default_graph()
function_inputs = []
if names is None:
names = [None] * len(args)
if flat_shapes is None:
shapes_iter = itertools.repeat(None)
else:
len_flat_args = len(nest.flatten(args))
if len_flat_args != len(flat_shapes):
raise RuntimeError(
"Length of fully flat shapes (%d) must match that of "
"flatten(args) (%d). args: %s, flat_shapes: %s"
% (len(flat_shapes),
len_flat_args,
args,
flat_shapes))
shapes_iter = iter(flat_shapes)
for arg_value, name in zip(args, names):
flattened = nest.flatten(arg_value)
tensor_specs = [
arg for arg in flattened if isinstance(arg, tensor_spec.TensorSpec)
]
specified_names = [arg.name for arg in tensor_specs if arg.name]
if specified_names and len(specified_names) < len(tensor_specs):
raise ValueError("If specifying TensorSpec names for nested structures, "
"either zero or all names have to be specified.")
for arg in flattened:
# We have a shape entry for each arg, regadless of whether it's a real
# Tensor or not. For non-tensor entries it should be None.
shape = next(shapes_iter)
if isinstance(arg, (ops.Tensor, tensor_spec.TensorSpec)):
if isinstance(arg, tensor_spec.TensorSpec) and arg.name:
requested_name = arg.name
else:
requested_name = name
placeholder_shape = shape if shape is not None else arg.shape
try:
placeholder = graph_placeholder(
arg.dtype, placeholder_shape,
name=requested_name)
except ValueError:
# Sometimes parameter names are not valid op names, so fall back to
# unnamed placeholders.
placeholder = graph_placeholder(arg.dtype, placeholder_shape)
if name is not None:
# Record the requested/user-specified name in case it's different than
# the uniquified name, for validation when exporting signatures.
placeholder.op._set_attr( # pylint: disable=protected-access
"_user_specified_name",
attr_value_pb2.AttrValue(s=compat.as_bytes(requested_name)))
function_inputs.append(placeholder)
elif isinstance(arg, resource_variable_ops.ResourceVariable):
# Capture arg variables to create placeholders for them. These will be
# removed as captures after the function is traced (since otherwise we'd
# just add it back with a new placeholder when the variable was
# referenced).
placeholder = func_graph.capture(arg.handle, name=name)
placeholder.op._set_attr( # pylint: disable=protected-access
"_user_specified_name",
attr_value_pb2.AttrValue(s=compat.as_bytes(name)))
function_inputs.append(arg)
else:
if shape is not None:
raise RuntimeError(
"Expected provided shape override to be None for arg that isn't "
"a Tensor, but saw arg: '%s', shape: '%s'. args: %s"
% (arg, shape, args))
function_inputs.append(arg)
return nest.pack_sequence_as(structure, function_inputs)
开发者ID:kylin9872,项目名称:tensorflow,代码行数:95,代码来源:func_graph.py
注:本文中的tensorflow.python.eager.graph_only_ops.graph_placeholder函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论