• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Python control_flow_ops.for_loop函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中tensorflow.python.ops.parallel_for.control_flow_ops.for_loop函数的典型用法代码示例。如果您正苦于以下问题:Python for_loop函数的具体用法?Python for_loop怎么用?Python for_loop使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了for_loop函数的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: create_lstm_per_eg_grad

def create_lstm_per_eg_grad(batch_size, state_size, steps):
  inputs = [
      random_ops.random_normal([batch_size, state_size]) for _ in range(steps)
  ]
  cell = rnn_cell.BasicLSTMCell(state_size)
  init_state = cell.zero_state(batch_size, dtypes.float32)

  def model_fn(inps, init_state):
    state = init_state
    for inp in inps:
      _, state = cell(inp, state)
    output = nn.l2_loss(state.c)
    return gradient_ops.gradients(output, variables.trainable_variables())

  def loop_fn(i):
    loop_inputs = [
        array_ops.expand_dims(array_ops.gather(x, i), 0) for x in inputs
    ]
    loop_init_state = rnn_cell.LSTMStateTuple(
        *[array_ops.expand_dims(array_ops.gather(x, i), 0) for x in init_state])
    return model_fn(loop_inputs, loop_init_state)

  pfor_outputs = control_flow_ops.pfor(loop_fn, batch_size)
  loop_fn_dtypes = [x.dtype for x in variables.trainable_variables()]
  while_outputs = control_flow_ops.for_loop(loop_fn, loop_fn_dtypes, batch_size)
  return pfor_outputs, while_outputs
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:26,代码来源:gradients_test.py


示例2: batch_jacobian

def batch_jacobian(output, inp, use_pfor=True):
  """Computes and stacks jacobians of `output[i,...]` w.r.t. `input[i,...]`.

  e.g.
  x = tf.constant([[1, 2], [3, 4]], dtype=tf.float32)
  y = x * x
  jacobian = batch_jacobian(y, x)
  # => [[[2,  0], [0,  4]], [[6,  0], [0,  8]]]

  Args:
    output: A tensor with shape [b, y1, ..., y_n]. `output[i,...]` should
      only depend on `inp[i,...]`.
    inp: A tensor with shape [b, x1, ..., x_m]
    use_pfor: If true, uses pfor for computing the Jacobian. Else uses a
      tf.while_loop.

  Returns:
    A tensor `t` with shape [b, y_1, ..., y_n, x1, ..., x_m] where `t[i, ...]`
    is the jacobian of `output[i, ...]` w.r.t. `inp[i, ...]`, i.e. stacked
    per-example jacobians.

  Raises:
    ValueError: if first dimension of `output` and `inp` do not match.
  """
  output_shape = output.shape
  if not output_shape[0].is_compatible_with(inp.shape[0]):
    raise ValueError("Need first dimension of output shape (%s) and inp shape "
                     "(%s) to match." % (output.shape, inp.shape))
  if output_shape.is_fully_defined():
    batch_size = int(output_shape[0])
    output_row_size = output_shape.num_elements() // batch_size
  else:
    output_shape = array_ops.shape(output)
    batch_size = output_shape[0]
    output_row_size = array_ops.size(output) // batch_size
  inp_shape = array_ops.shape(inp)
  # Flatten output to 2-D.
  with ops.control_dependencies(
      [check_ops.assert_equal(batch_size, inp_shape[0])]):
    output = array_ops.reshape(output, [batch_size, output_row_size])

  def loop_fn(i):
    y = array_ops.gather(output, i, axis=1)
    return gradient_ops.gradients(y, inp)[0]

  if use_pfor:
    pfor_output = control_flow_ops.pfor(loop_fn, output_row_size)
  else:
    pfor_output = control_flow_ops.for_loop(loop_fn, output.dtype,
                                            output_row_size)
  if pfor_output is None:
    return None
  pfor_output = array_ops.reshape(pfor_output,
                                  [output_row_size, batch_size, -1])
  output = array_ops.transpose(pfor_output, [1, 0, 2])
  new_shape = array_ops.concat([output_shape, inp_shape[1:]], axis=0)
  return array_ops.reshape(output, new_shape)
开发者ID:AnishShah,项目名称:tensorflow,代码行数:57,代码来源:gradients.py


示例3: create_mnist_autobatch

def create_mnist_autobatch(batch_size, data_format, training):
  images = random_ops.random_uniform([batch_size, 28, 28])
  model = Mnist(data_format)
  manual = model(images, training=training)

  def loop_fn(i):
    image = array_ops.gather(images, i)
    return model(image, training=training)

  pfor_outputs = control_flow_ops.pfor(loop_fn, batch_size)
  while_outputs = control_flow_ops.for_loop(
      loop_fn, dtypes.float32, batch_size)

  return pfor_outputs, while_outputs, manual
开发者ID:aeverall,项目名称:tensorflow,代码行数:14,代码来源:gradients_test.py


示例4: benchmark_basic_while

  def benchmark_basic_while(self):
    with ops.Graph().as_default():

      def loop_fn(i):
        _, s = control_flow_ops.while_loop(
            lambda t, x: t < i,
            lambda t, x: (t + 1, x + i),
            [0, 0])
        return s

      iters = 50
      pfor_output = pfor_control_flow_ops.pfor(loop_fn, iters)
      for_loop_output = pfor_control_flow_ops.for_loop(loop_fn, dtypes.int32,
                                                       iters)
      self._run(pfor_output, 100, name="pfor_basic")
      self._run(for_loop_output, 100, name="for_loop_basic")
开发者ID:aritratony,项目名称:tensorflow,代码行数:16,代码来源:control_flow_ops_test.py


示例5: create_mnist_per_eg_jacobian

def create_mnist_per_eg_jacobian(batch_size, data_format, training):
  images = random_ops.random_uniform([batch_size, 28, 28])
  model = Mnist(data_format)

  def loop_fn(i, use_pfor):
    image = array_ops.gather(images, i)
    logits = array_ops.reshape(model(image, training=training), [-1])
    return gradients.jacobian(
        logits, variables.trainable_variables(), use_pfor=use_pfor)

  pfor_outputs = control_flow_ops.pfor(
      functools.partial(loop_fn, use_pfor=True),
      batch_size)
  while_outputs = control_flow_ops.for_loop(
      functools.partial(loop_fn, use_pfor=False),
      [dtypes.float32] * len(variables.trainable_variables()), batch_size)
  return pfor_outputs, while_outputs
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:17,代码来源:gradients_test.py


示例6: jacobian

def jacobian(output, inputs, use_pfor=True):
  """Computes jacobian of `output` w.r.t. `inputs`.

  Args:
    output: A tensor.
    inputs: A tensor or a nested structure of tensor objects.
    use_pfor: If true, uses pfor for computing the jacobian. Else uses
      tf.while_loop.

  Returns:
    A tensor or a nested strucutre of tensors with the same structure as
    `inputs`. Each entry is the jacobian of `output` w.rt. to the corresponding
    value in `inputs`. If output has shape [y_1, ..., y_n] and inputs_i has
    shape [x_1, ..., x_m], the corresponding jacobian has shape
    [y_1, ..., y_n, x_1, ..., x_m].
  """
  flat_inputs = nest.flatten(inputs)
  output_tensor_shape = output.shape
  output_shape = array_ops.shape(output)
  output = array_ops.reshape(output, [-1])

  def loop_fn(i):
    y = array_ops.gather(output, i)
    return gradient_ops.gradients(y, flat_inputs)

  try:
    output_size = int(output.shape[0])
  except TypeError:
    output_size = array_ops.shape(output)[0]

  if use_pfor:
    pfor_outputs = control_flow_ops.pfor(loop_fn, output_size)
  else:
    pfor_outputs = control_flow_ops.for_loop(
        loop_fn, [output.dtype] * len(flat_inputs), output_size)

  for i, out in enumerate(pfor_outputs):
    if out is not None:
      new_shape = array_ops.concat(
          [output_shape, array_ops.shape(out)[1:]], axis=0)
      out = array_ops.reshape(out, new_shape)
      out.set_shape(output_tensor_shape.concatenate(flat_inputs[i].shape))
    pfor_outputs[i] = out

  return nest.pack_sequence_as(inputs, pfor_outputs)
开发者ID:ThunderQi,项目名称:tensorflow,代码行数:45,代码来源:gradients.py


示例7: benchmark_matmul

  def benchmark_matmul(self):
    with ops.Graph().as_default():
      n = 1024
      params = 1000
      x = random_ops.random_normal([n, params])
      y = random_ops.random_normal([params, params])

      def loop_fn(i):
        x_i = array_ops.expand_dims(array_ops.gather(x, i), 0)
        return math_ops.matmul(x_i, y)

      pfor_outputs = pfor_control_flow_ops.pfor(loop_fn, n)
      while_outputs = pfor_control_flow_ops.for_loop(loop_fn, dtypes.float32, n)
      manual = math_ops.matmul(x, y)

      self._run(manual, 1000, name="manual_matmul")
      self._run(pfor_outputs, 1000, name="pfor_matmul")
      self._run(while_outputs, 100, name="while_matmul")
开发者ID:aritratony,项目名称:tensorflow,代码行数:18,代码来源:control_flow_ops_test.py


示例8: benchmark_add

  def benchmark_add(self):
    with ops.Graph().as_default():
      n = 256
      params = 1000
      x = random_ops.random_normal([n, params])
      y = random_ops.random_normal([n, params])

      def loop_fn(i):
        x_i = array_ops.gather(x, i)
        y_i = array_ops.gather(y, i)
        return x_i + y_i

      pfor_outputs = pfor_control_flow_ops.pfor(loop_fn, n)
      while_outputs = pfor_control_flow_ops.for_loop(loop_fn, dtypes.float32, n)
      manual = x + y

      self._run(manual, 1000, name="manual_add")
      self._run(pfor_outputs, 1000, name="pfor_add")
      self._run(while_outputs, 100, name="while_add")
开发者ID:aritratony,项目名称:tensorflow,代码行数:19,代码来源:control_flow_ops_test.py


示例9: create_fc_per_eg_jacobians

def create_fc_per_eg_jacobians(batch_size, activation_size, num_layers):
  model = FullyConnectedModel(activation_size=activation_size,
                              num_layers=num_layers)
  inp = random_ops.random_normal([batch_size, activation_size])
  output = model(inp)
  jacobians = gradients.jacobian(output, variables.trainable_variables())

  def loop_fn(i, use_pfor):
    inp_i = array_ops.expand_dims(array_ops.gather(inp, i), 0)
    output = array_ops.reshape(model(inp_i), [-1])
    return gradients.jacobian(
        output, variables.trainable_variables(), use_pfor=use_pfor)

  per_eg_jacobians_pfor = control_flow_ops.pfor(
      functools.partial(loop_fn, use_pfor=True),
      batch_size)
  per_eg_jacobians_while = control_flow_ops.for_loop(
      functools.partial(loop_fn, use_pfor=False),
      [dtypes.float32] * len(variables.trainable_variables()), batch_size)
  return jacobians, per_eg_jacobians_pfor, per_eg_jacobians_while
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:20,代码来源:gradients_test.py


示例10: test_create_outside_and_write_and_scatter

  def test_create_outside_and_write_and_scatter(self):

    t = tensor_array_ops.TensorArray(dtypes.int32, 10, clear_after_read=False)
    handle = t.handle

    def loop_fn(i):
      ta = t.write(i + 2, 2 * i).write(i, 5)
      ta = ta.scatter([4 + i], [4]).scatter([6 + i, 8 + i], [6 + i, 8 + i])
      return ta.flow

    t1 = pfor_control_flow_ops.pfor(loop_fn, iters=2)
    out1 = tensor_array_ops.TensorArray(
        dtypes.int32, handle=handle, flow=t1[-1]).stack()
    output1 = self._run_targets(out1)

    t2 = pfor_control_flow_ops.for_loop(loop_fn, dtypes.float32, iters=2)
    out2 = tensor_array_ops.TensorArray(
        dtypes.int32, handle=handle, flow=t2[-1]).stack()
    output2 = self._run_targets(out2)
    self.assertAllClose(output2, output1)
开发者ID:aritratony,项目名称:tensorflow,代码行数:20,代码来源:control_flow_ops_test.py


示例11: create_mnist_per_eg_grad

def create_mnist_per_eg_grad(batch_size, data_format, training):
  images = random_ops.random_uniform([batch_size, 28, 28])
  sparse_labels = np.random.randint(
      low=0, high=10, size=[batch_size]).astype(np.int32)
  labels = np.zeros((batch_size, 10)).astype(np.float32)
  labels[np.arange(batch_size), sparse_labels] = 1.
  model = Mnist(data_format)

  def loop_fn(i):
    image = array_ops.gather(images, i)
    label = array_ops.gather(labels, i)
    logits = array_ops.reshape(model(image, training=training), [-1])
    loss = losses.softmax_cross_entropy(
        logits=logits, onehot_labels=label, reduction=losses.Reduction.NONE)
    return gradient_ops.gradients(loss, variables.trainable_variables())

  pfor_outputs = control_flow_ops.pfor(loop_fn, batch_size)
  while_outputs = control_flow_ops.for_loop(
      loop_fn, [dtypes.float32] * len(variables.trainable_variables()),
      batch_size)
  return pfor_outputs, while_outputs
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:21,代码来源:gradients_test.py


示例12: create_fc_per_eg_grad

def create_fc_per_eg_grad(batch_size, activation_size, num_layers):
  inp = random_ops.random_normal([batch_size, activation_size])
  layers = [
      tf_layers.Dense(activation_size, activation=nn.relu)
      for _ in range(num_layers)
  ]
  projection = tf_layers.Dense(1)

  def model_fn(activation):
    for layer in layers:
      activation = layer(activation)
    activation = projection(activation)
    activation = nn.l2_loss(activation)
    return gradient_ops.gradients(activation, variables.trainable_variables())

  def loop_fn(i):
    return model_fn(array_ops.expand_dims(array_ops.gather(inp, i), 0))

  pfor_outputs = control_flow_ops.pfor(loop_fn, batch_size)
  loop_fn_dtypes = [x.dtype for x in variables.trainable_variables()]
  while_outputs = control_flow_ops.for_loop(loop_fn, loop_fn_dtypes, batch_size)
  return pfor_outputs, while_outputs
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:22,代码来源:gradients_test.py


示例13: test_parallel_iterations_zero

 def test_parallel_iterations_zero(self):
   with self.assertRaisesRegexp(ValueError, "positive integer"):
     pfor_control_flow_ops.pfor(lambda i: 1, 8, parallel_iterations=0)
   with self.assertRaisesRegexp(TypeError, "positive integer"):
     pfor_control_flow_ops.for_loop(lambda i: 1, dtypes.int32, 8,
                                    parallel_iterations=0)
开发者ID:aritratony,项目名称:tensorflow,代码行数:6,代码来源:control_flow_ops_test.py


示例14: _test_loop_fn

 def _test_loop_fn(self, loop_fn, iters, loop_fn_dtypes=dtypes.float32):
   t1 = pfor_control_flow_ops.pfor(loop_fn, iters=iters)
   t2 = pfor_control_flow_ops.for_loop(loop_fn, loop_fn_dtypes, iters=iters)
   self.run_and_assert_equal(t1, t2)
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:4,代码来源:control_flow_ops_test.py



注:本文中的tensorflow.python.ops.parallel_for.control_flow_ops.for_loop函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python control_flow_ops.pfor函数代码示例发布时间:2022-05-27
下一篇:
Python nn_ops.xw_plus_b函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap