本文整理汇总了Python中tensorflow.python.framework.ops.prepend_name_scope函数的典型用法代码示例。如果您正苦于以下问题:Python prepend_name_scope函数的具体用法?Python prepend_name_scope怎么用?Python prepend_name_scope使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了prepend_name_scope函数的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: _init_from_proto
def _init_from_proto(self, variable_def, import_scope=None):
"""Initializes from `VariableDef` proto."""
assert isinstance(variable_def, variable_pb2.VariableDef)
if not variable_def.is_resource:
raise ValueError("Trying to restore Variable as ResourceVariable.")
# Create from variable_def.
g = ops.get_default_graph()
self._handle = g.as_graph_element(
ops.prepend_name_scope(variable_def.variable_name,
import_scope=import_scope))
self._initialize_op = g.as_graph_element(
ops.prepend_name_scope(variable_def.initializer_name,
import_scope=import_scope))
if variable_def.snapshot_name:
self._cached_value = g.as_graph_element(
ops.prepend_name_scope(variable_def.snapshot_name,
import_scope=import_scope))
else:
self._cached_value = None
if variable_def.HasField("save_slice_info_def"):
self._save_slice_info = variables.Variable.SaveSliceInfo(
save_slice_info_def=variable_def.save_slice_info_def)
else:
self._save_slice_info = None
self._caching_device = None
self._dtype = dtypes.as_dtype(self._handle.op.get_attr("dtype"))
开发者ID:chenjun0210,项目名称:tensorflow,代码行数:27,代码来源:resource_variable_ops.py
示例2: _init_from_proto
def _init_from_proto(self, variable_def, import_scope=None):
"""Creates a new variable from `VariableDef` protocol buffer.
Args:
variable_def: `VariableDef` protocol buffer.
import_scope: Optional `string`. Name scope to add.
"""
assert isinstance(variable_def, variable_pb2.VariableDef)
# Create from variable_def.
g = ops.get_default_graph()
self._variable = g.as_graph_element(
ops.prepend_name_scope(variable_def.variable_name,
import_scope=import_scope))
self._initializer_op = g.as_graph_element(
ops.prepend_name_scope(variable_def.initializer_name,
import_scope=import_scope))
self._snapshot = g.as_graph_element(
ops.prepend_name_scope(variable_def.snapshot_name,
import_scope=import_scope))
if variable_def.HasField("save_slice_info_def"):
self._save_slice_info = Variable.SaveSliceInfo(
save_slice_info_def=variable_def.save_slice_info_def)
else:
self._save_slice_info = None
self._caching_device = None
开发者ID:LugarkPirog,项目名称:tensorflow,代码行数:25,代码来源:variables.py
示例3: _init_from_proto
def _init_from_proto(self, queue_runner_def, import_scope=None):
"""Create a QueueRunner from `QueueRunnerDef`.
Args:
queue_runner_def: Optional `QueueRunnerDef` protocol buffer.
import_scope: Optional `string`. Name scope to add.
"""
assert isinstance(queue_runner_def, queue_runner_pb2.QueueRunnerDef)
g = ops.get_default_graph()
self._queue = g.as_graph_element(
ops.prepend_name_scope(queue_runner_def.queue_name, import_scope))
self._enqueue_ops = [g.as_graph_element(
ops.prepend_name_scope(op, import_scope))
for op in queue_runner_def.enqueue_op_name]
self._close_op = g.as_graph_element(ops.prepend_name_scope(
queue_runner_def.close_op_name, import_scope))
self._cancel_op = g.as_graph_element(ops.prepend_name_scope(
queue_runner_def.cancel_op_name, import_scope))
self._queue_closed_exception_types = tuple(
errors.exception_type_from_error_code(code)
for code in queue_runner_def.queue_closed_exception_types)
# Legacy support for old QueueRunnerDefs created before this field
# was added.
if not self._queue_closed_exception_types:
self._queue_closed_exception_types = (errors.OutOfRangeError,)
开发者ID:AutumnQYN,项目名称:tensorflow,代码行数:25,代码来源:queue_runner_impl.py
示例4: _init_from_proto
def _init_from_proto(self, variable_def, import_scope=None):
"""Initializes from `VariableDef` proto."""
# Note that init_from_proto is currently not supported in Eager mode.
assert not context.executing_eagerly()
self._in_graph_mode = True
assert isinstance(variable_def, variable_pb2.VariableDef)
if not variable_def.is_resource:
raise ValueError("Trying to restore Variable as ResourceVariable.")
# Create from variable_def.
g = ops.get_default_graph()
self._handle = g.as_graph_element(
ops.prepend_name_scope(
variable_def.variable_name, import_scope=import_scope))
self._shape = tensor_shape.TensorShape(
self._handle.op.get_attr("shape"))
self._handle_name = self._handle.name
self._unique_id = self._handle_name
self._initializer_op = g.as_graph_element(
ops.prepend_name_scope(
variable_def.initializer_name, import_scope=import_scope))
# Check whether initial_value_name exists for backwards compatibility.
if (hasattr(variable_def, "initial_value_name") and
variable_def.initial_value_name):
self._initial_value = g.as_graph_element(
ops.prepend_name_scope(variable_def.initial_value_name,
import_scope=import_scope))
else:
self._initial_value = None
self._trainable = getattr(variable_def, "trainable", True)
if variable_def.snapshot_name:
snapshot = g.as_graph_element(
ops.prepend_name_scope(
variable_def.snapshot_name, import_scope=import_scope))
self._cached_value = snapshot
while snapshot.op.type != "ReadVariableOp":
snapshot = snapshot.op.inputs[0]
self._graph_element = snapshot
else:
self._cached_value = None
# Legacy case for protos without the snapshot name; assume it's the
# following.
self._graph_element = g.get_tensor_by_name(
self._handle.op.name + "/Read/ReadVariableOp:0")
if variable_def.HasField("save_slice_info_def"):
self._save_slice_info = variables.Variable.SaveSliceInfo(
save_slice_info_def=variable_def.save_slice_info_def,
import_scope=import_scope)
else:
self._save_slice_info = None
self._caching_device = None
self._dtype = dtypes.as_dtype(self._handle.op.get_attr("dtype"))
self._constraint = None
self._cached_shape_as_list = None
开发者ID:LiuCKind,项目名称:tensorflow,代码行数:54,代码来源:resource_variable_ops.py
示例5: __init__
def __init__(self,
full_name=None,
full_shape=None,
var_offset=None,
var_shape=None,
save_slice_info_def=None,
import_scope=None):
"""Create a `SaveSliceInfo`.
Args:
full_name: Name of the full variable of which this `Variable` is a
slice.
full_shape: Shape of the full variable, as a list of int.
var_offset: Offset of this `Variable` into the full variable, as a
list of int.
var_shape: Shape of this `Variable`, as a list of int.
save_slice_info_def: `SaveSliceInfoDef` protocol buffer. If not `None`,
recreates the SaveSliceInfo object its contents.
`save_slice_info_def` and other arguments are mutually
exclusive.
import_scope: Optional `string`. Name scope to add. Only used
when initializing from protocol buffer.
"""
if save_slice_info_def:
assert isinstance(save_slice_info_def, variable_pb2.SaveSliceInfoDef)
self.full_name = ops.prepend_name_scope(
save_slice_info_def.full_name, import_scope=import_scope)
self.full_shape = [i for i in save_slice_info_def.full_shape]
self.var_offset = [i for i in save_slice_info_def.var_offset]
self.var_shape = [i for i in save_slice_info_def.var_shape]
else:
self.full_name = full_name
self.full_shape = full_shape
self.var_offset = var_offset
self.var_shape = var_shape
开发者ID:LugarkPirog,项目名称:tensorflow,代码行数:35,代码来源:variables.py
示例6: _init_from_proto
def _init_from_proto(self, variable_def, import_scope=None):
"""Initializes from `VariableDef` proto."""
# Note that init_from_proto is currently not supported in Eager mode.
assert context.in_graph_mode()
self._in_graph_mode = True
assert isinstance(variable_def, variable_pb2.VariableDef)
if not variable_def.is_resource:
raise ValueError("Trying to restore Variable as ResourceVariable.")
# Create from variable_def.
g = ops.get_default_graph()
self._handle = g.as_graph_element(
ops.prepend_name_scope(
variable_def.variable_name, import_scope=import_scope))
self._shape = tensor_shape.TensorShape(
self._handle.op.get_attr("shape"))
self._handle_device = self._handle.device
self._handle_name = self._handle.name
self._initializer_op = g.as_graph_element(
ops.prepend_name_scope(
variable_def.initializer_name, import_scope=import_scope))
# Check whether initial_value_name exists for backwards compatibility.
if (hasattr(variable_def, "initial_value_name") and
variable_def.initial_value_name):
self._initial_value = g.as_graph_element(
ops.prepend_name_scope(variable_def.initial_value_name,
import_scope=import_scope))
else:
self._initial_value = None
if variable_def.snapshot_name:
self._cached_value = g.as_graph_element(
ops.prepend_name_scope(
variable_def.snapshot_name, import_scope=import_scope))
else:
self._cached_value = None
if variable_def.HasField("save_slice_info_def"):
self._save_slice_info = variables.Variable.SaveSliceInfo(
save_slice_info_def=variable_def.save_slice_info_def,
import_scope=import_scope)
else:
self._save_slice_info = None
self._caching_device = None
self._dtype = dtypes.as_dtype(self._handle.op.get_attr("dtype"))
self._graph_element = self.value()
self._constraint = None
开发者ID:keithc61,项目名称:tensorflow,代码行数:45,代码来源:resource_variable_ops.py
示例7: _restore_collections
def _restore_collections(dest_graph, src_meta_graph_def, collection_keys):
"""Restores collections that we need to keep."""
scope = ""
for key in collection_keys:
collection_def = src_meta_graph_def.collection_def[key]
kind = collection_def.WhichOneof("kind")
if kind is None:
tf_logging.error(
"Cannot identify data type for collection %s. Skipping.", key)
continue
from_proto = ops.get_from_proto_function(key)
if from_proto and kind == "bytes_list":
proto_type = ops.get_collection_proto_type(key)
# It is assumed that there are no Variables Keys in collections
for value in collection_def.bytes_list.value:
proto = proto_type()
proto.ParseFromString(value)
try:
new_value = from_proto(proto, import_scope=scope)
except:
continue
dest_graph.add_to_collection(key, new_value)
else:
field = getattr(collection_def, kind)
if kind == "node_list":
for value in field.value:
name = ops.prepend_name_scope(value, scope)
# Since the graph has been optimized, the node may no longer
# exists
try:
col_op = dest_graph.as_graph_element(name)
except (TypeError, ValueError, KeyError) as e:
continue
dest_graph.add_to_collection(key, col_op)
elif kind == "int64_list":
# NOTE(opensource): This force conversion is to work around the
# fact that Python2 distinguishes between int and long, while
# Python3 has only int.
for value in field.value:
dest_graph.add_to_collection(key, int(value))
else:
for value in field.value:
dest_graph.add_to_collection(key,
ops.prepend_name_scope(value, scope))
开发者ID:aritratony,项目名称:tensorflow,代码行数:44,代码来源:trt_convert.py
示例8: get_element_from_tensor_info
def get_element_from_tensor_info(tensor_info, graph=None, import_scope=None):
"""Returns the element in the graph described by a TensorInfo proto.
Args:
tensor_info: A TensorInfo proto describing an Op or Tensor by name.
graph: The tf.Graph in which tensors are looked up. If None, the current
default graph is used.
import_scope: If not None, names in `tensor_info` are prefixed with this
string before lookup.
Returns:
Op or tensor in `graph` described by `tensor_info`.
Raises:
KeyError: If `tensor_info` does not correspond to an op or tensor in `graph`
"""
graph = graph or ops.get_default_graph()
return graph.as_graph_element(
ops.prepend_name_scope(tensor_info.name, import_scope=import_scope))
开发者ID:Wajih-O,项目名称:tensorflow,代码行数:19,代码来源:utils_impl.py
示例9: import_scoped_meta_graph
#.........这里部分代码省略.........
kind = col_def.WhichOneof("kind")
field = getattr(col_def, kind)
if field.value and (
not input_map or
sorted([compat.as_str(v) for v in field.value]) !=
sorted(input_map)):
raise ValueError("Graph contains unbound inputs: %s. Must "
"provide these inputs through input_map." %
",".join([compat.as_str(v) for v in field.value
if not input_map or v not in input_map]))
break
# Sets graph to default graph if it's not passed in.
graph = graph or ops.get_default_graph()
# Gathers the list of nodes we are interested in.
with graph.as_default():
producer_op_list = None
if meta_graph_def.meta_info_def.HasField("stripped_op_list"):
producer_op_list = meta_graph_def.meta_info_def.stripped_op_list
input_graph_def = meta_graph_def.graph_def
# Remove all the explicit device specifications for this node. This helps to
# make the graph more portable.
if clear_devices:
for node in input_graph_def.node:
node.device = ""
scope_to_prepend_to_names = graph.unique_name(
import_scope or "", mark_as_used=False)
importer.import_graph_def(
input_graph_def,
name=(import_scope or scope_to_prepend_to_names),
input_map=input_map,
producer_op_list=producer_op_list)
# Restores all the other collections.
variable_objects = {}
for key, col_def in sorted(meta_graph_def.collection_def.items()):
# Don't add unbound_inputs to the new graph.
if key == unbound_inputs_col_name:
continue
if not restore_collections_predicate(key):
continue
kind = col_def.WhichOneof("kind")
if kind is None:
logging.error("Cannot identify data type for collection %s. Skipping.",
key)
continue
from_proto = ops.get_from_proto_function(key)
if from_proto and kind == "bytes_list":
proto_type = ops.get_collection_proto_type(key)
if key in ops.GraphKeys._VARIABLE_COLLECTIONS: # pylint: disable=protected-access
for value in col_def.bytes_list.value:
variable = variable_objects.get(value, None)
if variable is None:
proto = proto_type()
proto.ParseFromString(value)
variable = from_proto(
proto, import_scope=scope_to_prepend_to_names)
variable_objects[value] = variable
graph.add_to_collection(key, variable)
else:
for value in col_def.bytes_list.value:
proto = proto_type()
proto.ParseFromString(value)
graph.add_to_collection(
key, from_proto(
proto, import_scope=scope_to_prepend_to_names))
else:
field = getattr(col_def, kind)
if key in _COMPAT_COLLECTION_LIST:
logging.warning(
"The saved meta_graph is possibly from an older release:\n"
"'%s' collection should be of type 'byte_list', but instead "
"is of type '%s'.", key, kind)
if kind == "node_list":
for value in field.value:
col_op = graph.as_graph_element(
ops.prepend_name_scope(value, scope_to_prepend_to_names))
graph.add_to_collection(key, col_op)
elif kind == "int64_list":
# NOTE(opensource): This force conversion is to work around the fact
# that Python2 distinguishes between int and long, while Python3 has
# only int.
for value in field.value:
graph.add_to_collection(key, int(value))
else:
for value in field.value:
graph.add_to_collection(
key, ops.prepend_name_scope(value, scope_to_prepend_to_names))
var_list = {}
variables = graph.get_collection(ops.GraphKeys.GLOBAL_VARIABLES,
scope=scope_to_prepend_to_names)
for v in variables:
var_list[ops.strip_name_scope(v.name, scope_to_prepend_to_names)] = v
return var_list
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:101,代码来源:meta_graph.py
示例10: import_scoped_meta_graph
#.........这里部分代码省略.........
meta_graph_or_file: `MetaGraphDef` protocol buffer or filename (including
the path) containing a `MetaGraphDef`.
clear_devices: Boolean which controls whether to clear device information
from graph_def. Default false.
graph: The `Graph` to import into. If `None`, use the default graph.
import_scope: Optional `string`. Name scope into which to import the
subgraph. If `None`, the graph is imported to the root name scope.
input_map: A dictionary mapping input names (as strings) in `graph_def` to
`Tensor` objects. The values of the named input tensors in the imported
graph will be re-mapped to the respective `Tensor` values.
unbound_inputs_col_name: Collection name for looking up unbound inputs.
Returns:
A dictionary of all the `Variables` imported into the name scope.
Raises:
ValueError: If the graph_def contains unbound inputs.
"""
if isinstance(meta_graph_or_file, meta_graph_pb2.MetaGraphDef):
meta_graph_def = meta_graph_or_file
else:
meta_graph_def = read_meta_graph_file(meta_graph_or_file)
if unbound_inputs_col_name:
for key, col_def in meta_graph_def.collection_def.items():
if key == unbound_inputs_col_name:
kind = col_def.WhichOneof("kind")
field = getattr(col_def, kind)
if field.value and (
not input_map or
sorted([compat.as_str(v) for v in field.value]) !=
sorted(input_map)):
raise ValueError("Graph contains unbound inputs: %s. Must "
"provide these inputs through input_map." %
",".join([compat.as_str(v) for v in field.value]))
break
# Sets graph to default graph if it's not passed in.
graph = graph or ops.get_default_graph()
# Gathers the list of nodes we are interested in.
with graph.as_default():
producer_op_list = None
if meta_graph_def.meta_info_def.HasField("stripped_op_list"):
producer_op_list = meta_graph_def.meta_info_def.stripped_op_list
input_graph_def = meta_graph_def.graph_def
# Remove all the explicit device specifications for this node. This helps to
# make the graph more portable.
if clear_devices:
for node in input_graph_def.node:
node.device = ""
importer.import_graph_def(
input_graph_def, name=(import_scope or ""), input_map=input_map,
producer_op_list=producer_op_list)
# Restores all the other collections.
for key, col_def in meta_graph_def.collection_def.items():
# Don't add unbound_inputs to the new graph.
if key == unbound_inputs_col_name:
continue
kind = col_def.WhichOneof("kind")
if kind is None:
logging.error("Cannot identify data type for collection %s. Skipping.",
key)
continue
from_proto = ops.get_from_proto_function(key)
if from_proto:
assert kind == "bytes_list"
proto_type = ops.get_collection_proto_type(key)
for value in col_def.bytes_list.value:
proto = proto_type()
proto.ParseFromString(value)
graph.add_to_collection(
key, from_proto(proto, import_scope=import_scope))
else:
field = getattr(col_def, kind)
if kind == "node_list":
for value in field.value:
col_op = graph.as_graph_element(
ops.prepend_name_scope(value, import_scope))
graph.add_to_collection(key, col_op)
elif kind == "int64_list":
# NOTE(opensource): This force conversion is to work around the fact
# that Python2 distinguishes between int and long, while Python3 has
# only int.
for value in field.value:
graph.add_to_collection(key, int(value))
else:
for value in field.value:
graph.add_to_collection(
key, ops.prepend_name_scope(value, import_scope))
var_list = {}
variables = graph.get_collection(ops.GraphKeys.VARIABLES,
scope=import_scope)
for v in variables:
var_list[ops.strip_name_scope(v.name, import_scope)] = v
return var_list
开发者ID:DavidNemeskey,项目名称:tensorflow,代码行数:101,代码来源:meta_graph.py
示例11: _get_tensor
def _get_tensor(name):
return graph.get_tensor_by_name(
ops.prepend_name_scope(name, import_scope=import_scope))
开发者ID:ZhangXinNan,项目名称:tensorflow,代码行数:3,代码来源:utils_impl.py
示例12: export_scoped_meta_graph
#.........这里部分代码省略.........
graph = graph or ops.get_default_graph()
exclude_nodes = None
unbound_inputs = []
if export_scope or clear_extraneous_savers or clear_devices:
if graph_def:
new_graph_def = graph_pb2.GraphDef()
new_graph_def.versions.CopyFrom(graph_def.versions)
new_graph_def.library.CopyFrom(graph_def.library)
if clear_extraneous_savers:
exclude_nodes = _find_extraneous_saver_nodes(graph_def, saver_def)
for node_def in graph_def.node:
if _should_include_node(node_def.name, export_scope, exclude_nodes):
new_node_def = _node_def(node_def, export_scope, unbound_inputs,
clear_devices=clear_devices)
new_graph_def.node.extend([new_node_def])
graph_def = new_graph_def
else:
# Only do this complicated work if we want to remove a name scope.
graph_def = graph_pb2.GraphDef()
# pylint: disable=protected-access
graph_def.versions.CopyFrom(graph.graph_def_versions)
bytesize = 0
if clear_extraneous_savers:
exclude_nodes = _find_extraneous_saver_nodes(graph.as_graph_def(),
saver_def)
for key in sorted(graph._nodes_by_id):
if _should_include_node(graph._nodes_by_id[key].name,
export_scope,
exclude_nodes):
value = graph._nodes_by_id[key]
# pylint: enable=protected-access
node_def = _node_def(value.node_def, export_scope, unbound_inputs,
clear_devices=clear_devices)
graph_def.node.extend([node_def])
if value.outputs:
assert "_output_shapes" not in graph_def.node[-1].attr
graph_def.node[-1].attr["_output_shapes"].list.shape.extend([
output.get_shape().as_proto() for output in value.outputs])
bytesize += value.node_def.ByteSize()
if bytesize >= (1 << 31) or bytesize < 0:
raise ValueError("GraphDef cannot be larger than 2GB.")
graph._copy_functions_to_graph_def(graph_def, bytesize) # pylint: disable=protected-access
# It's possible that not all the inputs are in the export_scope.
# If we would like such information included in the exported meta_graph,
# add them to a special unbound_inputs collection.
if unbound_inputs_col_name:
# Clears the unbound_inputs collections.
graph.clear_collection(unbound_inputs_col_name)
for k in unbound_inputs:
graph.add_to_collection(unbound_inputs_col_name, k)
var_list = {}
variables = graph.get_collection(ops.GraphKeys.GLOBAL_VARIABLES,
scope=export_scope)
for v in variables:
if _should_include_node(v, export_scope, exclude_nodes):
var_list[ops.strip_name_scope(v.name, export_scope)] = v
scoped_meta_graph_def = create_meta_graph_def(
graph_def=graph_def,
graph=graph,
export_scope=export_scope,
exclude_nodes=exclude_nodes,
clear_extraneous_savers=clear_extraneous_savers,
saver_def=saver_def,
strip_default_attrs=strip_default_attrs,
**kwargs)
if filename:
graph_io.write_graph(
scoped_meta_graph_def,
os.path.dirname(filename),
os.path.basename(filename),
as_text=as_text)
if save_debug_info:
name, _ = os.path.splitext(filename)
debug_filename = "{name}{ext}".format(name=name, ext=".debug")
# Gets the operation from the graph by the name.
ops_to_export = {}
for node in scoped_meta_graph_def.graph_def.node:
scoped_op_name = ops.prepend_name_scope(node.name, export_scope)
ops_to_export.add(graph.get_operation_by_name(scoped_op_name))
graph_debug_info = create_graph_debug_info_def(ops_to_export)
graph_io.write_graph(
graph_debug_info,
os.path.dirname(debug_filename),
os.path.basename(debug_filename),
as_text=as_text)
return scoped_meta_graph_def, var_list
开发者ID:terrytangyuan,项目名称:tensorflow,代码行数:101,代码来源:meta_graph.py
注:本文中的tensorflow.python.framework.ops.prepend_name_scope函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论