本文整理汇总了Python中tensorflow.python.ops.parallel_for.gradients.batch_jacobian函数的典型用法代码示例。如果您正苦于以下问题:Python batch_jacobian函数的具体用法?Python batch_jacobian怎么用?Python batch_jacobian使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了batch_jacobian函数的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: create_mnist_batch_jacobian
def create_mnist_batch_jacobian(batch_size, data_format, training):
images = random_ops.random_uniform([batch_size, 28, 28])
model = Mnist(data_format)
logits = model(images, training=training)
pfor_jacobian = gradients.batch_jacobian(logits, images, use_pfor=True)
while_jacobian = gradients.batch_jacobian(logits, images, use_pfor=False)
return pfor_jacobian, while_jacobian
开发者ID:adit-chandra,项目名称:tensorflow,代码行数:8,代码来源:gradients_test.py
示例2: create_lstm_batch_hessian
def create_lstm_batch_hessian(batch_size, state_size, steps):
inp, output = lstm_model_fn(batch_size, state_size, steps)
pfor_jacobian = gradients.batch_jacobian(output, inp, use_pfor=True)
pfor_jacobian = array_ops.reshape(pfor_jacobian, [batch_size, -1])
pfor_hessian = gradients.batch_jacobian(pfor_jacobian, inp, use_pfor=True)
# TODO(agarwal): using two nested while_loop doesn't seem to work here.
# Hence we use pfor_jacobian for computing while_hessian.
while_jacobian = pfor_jacobian
while_hessian = gradients.batch_jacobian(while_jacobian, inp, use_pfor=False)
return pfor_hessian, while_hessian
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:10,代码来源:gradients_test.py
示例3: test_batch_jacobian_fixed_shape
def test_batch_jacobian_fixed_shape(self):
x = random_ops.random_uniform([2, 3, 5])
y = x * x
batch_jacobian_pfor = gradients.batch_jacobian(y, x, use_pfor=True)
batch_jacobian_while = gradients.batch_jacobian(y, x, use_pfor=False)
two_x = 2 * x
answer = array_ops.stack(
[array_ops.diag(two_x[0]),
array_ops.diag(two_x[1])])
self.run_and_assert_equal(answer, batch_jacobian_pfor)
self.run_and_assert_equal(answer, batch_jacobian_while)
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:11,代码来源:gradients_test.py
示例4: test_batch_jacobian_unknown_shape
def test_batch_jacobian_unknown_shape(self):
with self.test_session() as sess:
x = array_ops.placeholder(dtypes.float32)
y = x * x
batch_jacobian_pfor = gradients.batch_jacobian(y, x, use_pfor=True)
batch_jacobian_while = gradients.batch_jacobian(y, x, use_pfor=False)
two_x = 2 * x
answer = array_ops.stack(
[array_ops.diag(two_x[0]),
array_ops.diag(two_x[1])])
ans, pfor_value, while_value = sess.run(
[answer, batch_jacobian_pfor, batch_jacobian_while],
feed_dict={x: [[1, 2], [3, 4]]})
self.assertAllClose(ans, pfor_value)
self.assertAllClose(ans, while_value)
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:15,代码来源:gradients_test.py
示例5: test_batch_jacobian_bad_unknown_shapes
def test_batch_jacobian_bad_unknown_shapes(self):
with self.test_session() as sess:
x = array_ops.placeholder(dtypes.float32)
y = array_ops.concat([x, x], axis=0)
jacobian = gradients.batch_jacobian(y, x)
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"assertion failed"):
sess.run(jacobian, feed_dict={x: [[1, 2], [3, 4]]})
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:8,代码来源:gradients_test.py
示例6: create_dynamic_lstm_batch_jacobian
def create_dynamic_lstm_batch_jacobian(batch_size, state_size, max_steps):
inp, (_, final_state) = dynamic_lstm_model_fn(batch_size, state_size,
max_steps)
pfor_jacobian = gradients.batch_jacobian(final_state.c, inp, use_pfor=True)
# Note that use_pfor=False does not work above given the current limitations
# on implementation of while_loop. So we statically unroll the looping in the
# jacobian computation.
while_gradients = [
gradient_ops.gradients(array_ops.gather(final_state.c, i, axis=1), inp)[0]
for i in range(state_size)
]
return pfor_jacobian, while_gradients
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:12,代码来源:gradients_test.py
示例7: create_fc_batch_jacobian
def create_fc_batch_jacobian(batch_size, activation_size, num_layers):
inp, output = fully_connected_model_fn(batch_size, activation_size,
num_layers)
pfor_jacobian = gradients.batch_jacobian(output, inp, use_pfor=True)
while_jacobian = gradients.batch_jacobian(output, inp, use_pfor=False)
return pfor_jacobian, while_jacobian
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:6,代码来源:gradients_test.py
示例8: test_batch_jacobian_bad_shapes
def test_batch_jacobian_bad_shapes(self):
x = random_ops.random_uniform([2, 2])
y = random_ops.random_uniform([3, 2])
with self.assertRaisesRegexp(ValueError, "Need first dimension of output"):
gradients.batch_jacobian(y, x, use_pfor=True)
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:5,代码来源:gradients_test.py
示例9: create_lstm_batch_jacobian
def create_lstm_batch_jacobian(batch_size, state_size, steps):
inp, output = lstm_model_fn(batch_size, state_size, steps)
pfor_jacobian = gradients.batch_jacobian(output, inp, use_pfor=True)
while_jacobian = gradients.batch_jacobian(output, inp, use_pfor=False)
return pfor_jacobian, while_jacobian
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:5,代码来源:gradients_test.py
示例10: test_batch_jacobian_parallel_iterations
def test_batch_jacobian_parallel_iterations(self):
x = constant_op.constant([[1., 2], [3, 4]])
w = constant_op.constant([[1., 2, 3, 4], [5, 6, 7, 8]])
y = math_ops.matmul(x, w)
self.assertAllClose(gradients.batch_jacobian(y, x, parallel_iterations=2),
gradients.batch_jacobian(y, x, parallel_iterations=3))
开发者ID:aeverall,项目名称:tensorflow,代码行数:6,代码来源:gradients_test.py
注:本文中的tensorflow.python.ops.parallel_for.gradients.batch_jacobian函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论