本文整理汇总了Python中tensorflow.python.training.slot_creator.create_slot函数的典型用法代码示例。如果您正苦于以下问题:Python create_slot函数的具体用法?Python create_slot怎么用?Python create_slot使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了create_slot函数的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: apply
def apply(self, var_list=None):
# TODO(touts): op_scope
if var_list is None:
var_list = variables.trainable_variables()
for var in var_list:
if var.dtype.base_dtype not in [dtypes.float32, dtypes.float64]:
raise TypeError(
"The variables must be float or double: %s" % var)
if var in self._averages:
raise ValueError(
"Moving average already computed for: %s" % var)
# For variables: to lower communication bandwidth across devices we keep
# the moving averages on the same device as the variables. For other
# tensors, we rely on the existing device allocation mechanism.
if isinstance(var, variables.Variable):
avg = slot_creator.create_slot(
var, var.initialized_value(), self._name,
colocate_with_primary=True)
else:
avg = slot_creator.create_zeros_slot(
var, self._name, colocate_with_primary=(var.op.type == "Variable"))
self._averages[var] = avg
with ops.name_scope(self._name) as scope:
decay = self._num_updates / (self._num_updates + 1)
updates = []
updates.append(self._num_updates_op)
for var in var_list:
updates.append(assign_moving_average(
self._averages[var], var, decay))
return control_flow_ops.group(*updates, name=scope)
开发者ID:daxiongshu,项目名称:tf_resnet_cifar,代码行数:32,代码来源:simple_moving_averages.py
示例2: _create_lagrangian_multipliers
def _create_lagrangian_multipliers(optimizer_dict, doo_ds):
lag_mul = [slot_creator.create_slot(v, utils.val_or_zero(der, v), 'alpha') for v, der
in zip(optimizer_dict.state, doo_ds)]
[tf.add_to_collection(utils.GraphKeys.LAGRANGIAN_MULTIPLIERS, lm) for lm in lag_mul]
utils.remove_from_collection(utils.GraphKeys.GLOBAL_VARIABLES, *lag_mul)
# this prevents the 'automatic' initialization with tf.global_variables_initializer.
return lag_mul
开发者ID:codealphago,项目名称:FAR-HO,代码行数:7,代码来源:hyper_gradients.py
示例3: testCreateSlotFromVariableRespectsScope
def testCreateSlotFromVariableRespectsScope(self):
# See discussion on #2740.
with self.cached_session():
with variable_scope.variable_scope("scope"):
v = variables.Variable([1.0, 2.5], name="var")
slot = slot_creator.create_slot(v, v.initialized_value(), name="slot")
self.assertEqual("scope/scope/var/slot", slot.op.name)
开发者ID:aeverall,项目名称:tensorflow,代码行数:7,代码来源:slot_creator_test.py
示例4: _create_z
def _create_z(optimizer_dict, hyper, d_init_dynamics_d_hyper):
if d_init_dynamics_d_hyper is None: d_init_dynamics_d_hyper = [None]*len(optimizer_dict.state)
with tf.variable_scope('Z'):
z = [slot_creator.create_slot(v, utils.val_or_zero(der, v), hyper.op.name) for v, der
in zip(optimizer_dict.state, d_init_dynamics_d_hyper)]
[tf.add_to_collection(utils.GraphKeys.ZS, lm) for lm in z]
# utils.remove_from_collection(utils.GraphKeys.GLOBAL_VARIABLES, *z)
# in this case it is completely fine to keep zs into the global variable...
return z
开发者ID:codealphago,项目名称:FAR-HO,代码行数:9,代码来源:hyper_gradients.py
示例5: testCreateSlotFromTensor
def testCreateSlotFromTensor(self):
with self.cached_session():
v = constant_op.constant([1.0, 2.5], name="const")
slot = slot_creator.create_slot(v, v * 2, name="slot")
variables.global_variables_initializer().run()
self.assertEqual("const/slot", slot.op.name)
self.assertEqual([2], slot.get_shape().as_list())
self.assertEqual(dtypes.float32, slot.dtype.base_dtype)
self.assertAllEqual([2.0, 5.0], self.evaluate(slot))
开发者ID:aeverall,项目名称:tensorflow,代码行数:11,代码来源:slot_creator_test.py
示例6: testCreateSlotFromVariable
def testCreateSlotFromVariable(self):
with self.cached_session():
v = variables.Variable([1.0, 2.5], name="var")
slot = slot_creator.create_slot(v, v.initialized_value(), name="slot")
variables.global_variables_initializer().run()
self.assertEqual("var/slot", slot.op.name)
self.assertEqual([2], slot.get_shape().as_list())
self.assertEqual(dtypes.float32, slot.dtype.base_dtype)
self.assertAllEqual([1.0, 2.5], self.evaluate(slot))
开发者ID:aeverall,项目名称:tensorflow,代码行数:11,代码来源:slot_creator_test.py
示例7: testCreateSlotFromTensor
def testCreateSlotFromTensor(self):
with self.test_session():
v = tf.constant([1.0, 2.5], name="const")
slot = slot_creator.create_slot(v, v * 2, name="slot")
tf.initialize_all_variables().run()
self.assertEqual(slot.op.name, "const/slot")
self.assertEqual(slot.get_shape().as_list(), [2])
self.assertEqual(slot.dtype.base_dtype, tf.float32)
self.assertAllEqual(slot.eval(), [2.0, 5.0])
开发者ID:CdricGmd,项目名称:tensorflow,代码行数:11,代码来源:slot_creator_test.py
示例8: testCreateSlotFromVariable
def testCreateSlotFromVariable(self):
with self.test_session():
v = tf.Variable([1.0, 2.5], name="var")
slot = slot_creator.create_slot(v, v.initialized_value(), name="slot")
tf.initialize_all_variables().run()
self.assertEqual(slot.op.name, "var/slot")
self.assertEqual(slot.get_shape().as_list(), [2])
self.assertEqual(slot.dtype.base_dtype, tf.float32)
self.assertAllEqual(slot.eval(), [1.0, 2.5])
开发者ID:CdricGmd,项目名称:tensorflow,代码行数:11,代码来源:slot_creator_test.py
示例9: _create_hypergradient
def _create_hypergradient(hyper, doo_dhypers):
"""
Creates one hyper-gradient as a variable..
:param hyper: the relative hyperparameter
:param doo_dhypers: initialization, that is the derivative of the outer objective w.r.t this hyper
:return:
"""
hgs = slot_creator.create_slot(hyper, utils.val_or_zero(doo_dhypers, hyper), 'hypergradient')
utils.remove_from_collection(utils.GraphKeys.GLOBAL_VARIABLES, hgs)
return hgs
开发者ID:codealphago,项目名称:FAR-HO,代码行数:11,代码来源:hyper_gradients.py
示例10: _get_or_make_slot
def _get_or_make_slot(self, var, val, slot_name, op_name):
"""Find or create a slot for a variable.
Args:
var: A `Variable` object.
val: A `Tensor`. The initial value of the slot.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if var not in named_slots:
named_slots[var] = slot_creator.create_slot(var, val, op_name)
return named_slots[var]
开发者ID:apollos,项目名称:tensorflow,代码行数:17,代码来源:optimizer.py
示例11: _process_slot_restoration
def _process_slot_restoration(self, slot_restoration, variable):
"""Restore a slot variable's value (creating it if necessary)."""
# TODO(allenl): Move this to Optimizer
assert isinstance(self, optimizer_lib.Optimizer)
named_slots = self._slot_dict(slot_restoration.slot_name)
variable_key = optimizer_lib._var_key(variable) # pylint: disable=protected-access
existing_slot_variable = named_slots.get(variable_key, None)
if existing_slot_variable is None:
base_dtype = slot_restoration.value_pointer.dtype.base_dtype
initializer, = io_ops.restore_v2(
prefix=slot_restoration.value_pointer.save_path,
tensor_names=[slot_restoration.value_pointer.checkpoint_key],
shape_and_slices=[""],
dtypes=[base_dtype],
name="checkpoint_initializer")
new_slot_variable = slot_creator.create_slot(variable, initializer,
slot_restoration.slot_name)
if slot_restoration.value_pointer.session is not None:
slot_restoration.value_pointer.session.run(
new_slot_variable.initializer)
named_slots[variable_key] = new_slot_variable
else:
_assign_existing_variable(
existing_slot_variable, value_pointer=slot_restoration.value_pointer)
开发者ID:japrogramer,项目名称:tensorflow,代码行数:24,代码来源:checkpointable.py
示例12: apply
def apply(self, var_list=None):
"""Maintains moving averages of variables.
`var_list` must be a list of `Variable` or `Tensor` objects. This method
creates shadow variables for all elements of `var_list`. Shadow variables
for `Variable` objects are initialized to the variable's initial value.
They will be added to the `GraphKeys.MOVING_AVERAGE_VARIABLES` collection.
For `Tensor` objects, the shadow variables are initialized to 0.
shadow variables are created with `trainable=False` and added to the
`GraphKeys.ALL_VARIABLES` collection. They will be returned by calls to
`tf.all_variables()`.
Returns an op that updates all shadow variables as described above.
Note that `apply()` can be called multiple times with different lists of
variables.
Args:
var_list: A list of Variable or Tensor objects. The variables
and Tensors must be of types float32 or float64.
Returns:
An Operation that updates the moving averages.
Raises:
TypeError: If the arguments are not all float32 or float64.
ValueError: If the moving average of one of the variables is already
being computed.
"""
# TODO(touts): op_scope
if var_list is None:
var_list = variables.trainable_variables()
for var in var_list:
if var.dtype.base_dtype not in [dtypes.float32, dtypes.float64]:
raise TypeError("The variables must be float or double: %s" % var.name)
if var in self._averages:
raise ValueError("Moving average already computed for: %s" % var.name)
# For variables: to lower communication bandwidth across devices we keep
# the moving averages on the same device as the variables. For other
# tensors, we rely on the existing device allocation mechanism.
with ops.control_dependencies(None):
if isinstance(var, variables.Variable):
avg = slot_creator.create_slot(
var, var.initialized_value(), self._name,
colocate_with_primary=True)
else:
avg = slot_creator.create_zeros_slot(
var, self._name,
colocate_with_primary=(var.op.type == "Variable"))
self._averages[var] = avg
ops.add_to_collection(ops.GraphKeys.MOVING_AVERAGE_VARIABLES, var)
with ops.name_scope(self._name) as scope:
decay = ops.convert_to_tensor(self._decay, name="decay")
if self._num_updates is not None:
num_updates = math_ops.cast(self._num_updates, dtypes.float32,
name="num_updates")
decay = math_ops.minimum(decay,
(1.0 + num_updates) / (10.0 + num_updates))
updates = []
for var in var_list:
updates.append(assign_moving_average(self._averages[var], var, decay))
return control_flow_ops.group(*updates, name=scope)
开发者ID:CdricGmd,项目名称:tensorflow,代码行数:65,代码来源:moving_averages.py
示例13: _get_or_make_slot
def _get_or_make_slot(self, var, val, slot_name, op_name):
named_slots = self._slot_dict(slot_name)
if var not in named_slots:
named_slots[var] = slot_creator.create_slot(var, val, op_name)
return named_slots[var]
开发者ID:amusingchao,项目名称:learngit,代码行数:5,代码来源:rmsprop_applier.py
示例14: apply
def apply(self, var_list=None):
"""Maintains moving averages of variables.
`var_list` must be a list of `Variable` or `Tensor` objects. This method
creates shadow variables for all elements of `var_list`. Shadow variables
for `Variable` objects are initialized to the variable's initial value.
They will be added to the `GraphKeys.MOVING_AVERAGE_VARIABLES` collection.
For `Tensor` objects, the shadow variables are initialized to 0 and zero
debiased (see docstring in `assign_moving_average` for more details).
shadow variables are created with `trainable=False` and added to the
`GraphKeys.ALL_VARIABLES` collection. They will be returned by calls to
`tf.global_variables()`.
Returns an op that updates all shadow variables from the current value of
their associated variables.
Note that `apply()` can be called multiple times. When eager execution is
enabled each call to apply will update the variables once, so this needs to
be called in a loop.
Args:
var_list: A list of Variable or Tensor objects. The variables
and Tensors must be of types bfloat16, float16, float32, or float64.
Returns:
An Operation that updates the moving averages.
Raises:
TypeError: If the arguments are not an allowed type.
"""
# TODO(touts): op_scope
if var_list is None:
var_list = variables.trainable_variables()
zero_debias_true = set() # set of vars to set `zero_debias=True`
for var in var_list:
if var.dtype.base_dtype not in [
dtypes.bfloat16, dtypes.float16, dtypes.float32, dtypes.float64
]:
raise TypeError("The variables must be half, float, or double: %s" %
var.name)
if var not in self._averages:
# For variables: to lower communication bandwidth across devices we keep
# the moving averages on the same device as the variables. For other
# tensors, we rely on the existing device allocation mechanism.
with ops.init_scope():
if isinstance(var, variables.Variable):
avg = slot_creator.create_slot(var,
var.initialized_value(),
self.name,
colocate_with_primary=True)
# NOTE(mrry): We only add `tf.Variable` objects to the
# `MOVING_AVERAGE_VARIABLES` collection.
ops.add_to_collection(ops.GraphKeys.MOVING_AVERAGE_VARIABLES, var)
else:
avg = slot_creator.create_zeros_slot(
var,
self.name,
colocate_with_primary=(var.op.type in ["Variable",
"VariableV2",
"VarHandleOp"]))
if self._zero_debias:
zero_debias_true.add(avg)
self._averages[var] = avg
with ops.name_scope(self.name) as scope:
decay = ops.convert_to_tensor(self._decay, name="decay")
if self._num_updates is not None:
num_updates = math_ops.cast(self._num_updates,
dtypes.float32,
name="num_updates")
decay = math_ops.minimum(decay,
(1.0 + num_updates) / (10.0 + num_updates))
updates = []
for var in var_list:
zero_debias = self._averages[var] in zero_debias_true
updates.append(assign_moving_average(
self._averages[var], var, decay, zero_debias=zero_debias))
return control_flow_ops.group(*updates, name=scope)
开发者ID:aeverall,项目名称:tensorflow,代码行数:80,代码来源:moving_averages.py
注:本文中的tensorflow.python.training.slot_creator.create_slot函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论