本文整理汇总了Python中tensorflow.python.ops.parallel_for.gradients.jacobian函数的典型用法代码示例。如果您正苦于以下问题:Python jacobian函数的具体用法?Python jacobian怎么用?Python jacobian使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了jacobian函数的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_jacobian_fixed_shape
def test_jacobian_fixed_shape(self):
x = random_ops.random_uniform([2, 2])
y = math_ops.matmul(x, x, transpose_a=True)
jacobian_pfor = gradients.jacobian(y, x, use_pfor=True)
jacobian_while = gradients.jacobian(y, x, use_pfor=False)
answer = ops.convert_to_tensor([[
gradient_ops.gradients(y[0][0], x)[0],
gradient_ops.gradients(y[0][1], x)[0]
], [
gradient_ops.gradients(y[1][0], x)[0],
gradient_ops.gradients(y[1][1], x)[0]
]])
self.run_and_assert_equal(answer, jacobian_pfor)
self.run_and_assert_equal(answer, jacobian_while)
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:14,代码来源:gradients_test.py
示例2: create_lstm_hessian
def create_lstm_hessian(batch_size, state_size, steps):
_, output = lstm_model_fn(batch_size, state_size, steps)
weights = variables.trainable_variables()
pfor_jacobians = gradients.jacobian(output, weights, use_pfor=True)
pfor_hessians = [
gradients.jacobian(x, weights, use_pfor=True) for x in pfor_jacobians
]
# TODO(agarwal): using two nested while_loop doesn't seem to work here.
# Hence we use pfor_jacobians for computing while_hessians.
while_jacobians = pfor_jacobians
while_hessians = [
gradients.jacobian(x, weights, use_pfor=False) for x in while_jacobians
]
return pfor_hessians, while_hessians
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:14,代码来源:gradients_test.py
示例3: test_jacobian_unknown_shape
def test_jacobian_unknown_shape(self):
with self.test_session() as sess:
x = array_ops.placeholder(dtypes.float32, shape=[None, None])
y = math_ops.matmul(x, x, transpose_a=True)
jacobian_pfor = gradients.jacobian(y, x, use_pfor=True)
jacobian_while = gradients.jacobian(y, x, use_pfor=False)
answer = ops.convert_to_tensor([[
gradient_ops.gradients(y[0][0], x)[0],
gradient_ops.gradients(y[0][1], x)[0]
], [
gradient_ops.gradients(y[1][0], x)[0],
gradient_ops.gradients(y[1][1], x)[0]
]])
ans, pfor_value, while_value = sess.run(
[answer, jacobian_pfor, jacobian_while],
feed_dict={x: [[1, 2], [3, 4]]})
self.assertAllClose(ans, pfor_value)
self.assertAllClose(ans, while_value)
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:18,代码来源:gradients_test.py
示例4: test_jacobian_scan_shape
def test_jacobian_scan_shape(self):
# Shape x: [3, 4]
x = random_ops.random_uniform([3, 4])
elems = random_ops.random_uniform([6])
# Shape y: [6, 3, 4]
y = functional_ops.scan(lambda a, e: a + e, elems, initializer=x)
jacobian = gradients.jacobian(y, x)
expected_shape = [6, 3, 4, 3, 4]
self.assertAllEqual(expected_shape, jacobian.shape.as_list())
开发者ID:aeverall,项目名称:tensorflow,代码行数:10,代码来源:gradients_test.py
示例5: test_jacobian_while_loop_shape
def test_jacobian_while_loop_shape(self):
# Shape x: [3, 4]
x = random_ops.random_uniform([3, 4])
_, y = tf_control_flow_ops.while_loop(lambda i, a: i > 5.,
lambda i, a: (i + 1, a + i),
(constant_op.constant(0.), x))
# Shape y: [2, 3]
y = y[:2, :3]
jacobian = gradients.jacobian(y, x)
expected_shape = [2, 3, 3, 4]
self.assertAllEqual(expected_shape, jacobian.shape.as_list())
开发者ID:aeverall,项目名称:tensorflow,代码行数:12,代码来源:gradients_test.py
示例6: create_fc_per_eg_jacobians
def create_fc_per_eg_jacobians(batch_size, activation_size, num_layers):
model = FullyConnectedModel(activation_size=activation_size,
num_layers=num_layers)
inp = random_ops.random_normal([batch_size, activation_size])
output = model(inp)
jacobians = gradients.jacobian(output, variables.trainable_variables())
def loop_fn(i, use_pfor):
inp_i = array_ops.expand_dims(array_ops.gather(inp, i), 0)
output = array_ops.reshape(model(inp_i), [-1])
return gradients.jacobian(
output, variables.trainable_variables(), use_pfor=use_pfor)
per_eg_jacobians_pfor = control_flow_ops.pfor(
functools.partial(loop_fn, use_pfor=True),
batch_size)
per_eg_jacobians_while = control_flow_ops.for_loop(
functools.partial(loop_fn, use_pfor=False),
[dtypes.float32] * len(variables.trainable_variables()), batch_size)
return jacobians, per_eg_jacobians_pfor, per_eg_jacobians_while
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:20,代码来源:gradients_test.py
示例7: loop_fn
def loop_fn(i, use_pfor):
inp_i = array_ops.expand_dims(array_ops.gather(inp, i), 0)
output = array_ops.reshape(model(inp_i), [-1])
return gradients.jacobian(
output, variables.trainable_variables(), use_pfor=use_pfor)
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:5,代码来源:gradients_test.py
示例8: test_jacobian_parallel_iterations
def test_jacobian_parallel_iterations(self):
x = constant_op.constant([[1., 2], [3, 4]])
y = math_ops.matmul(x, x)
self.assertAllClose(gradients.jacobian(y, x, parallel_iterations=2),
gradients.jacobian(y, x, parallel_iterations=3))
开发者ID:aeverall,项目名称:tensorflow,代码行数:5,代码来源:gradients_test.py
注:本文中的tensorflow.python.ops.parallel_for.gradients.jacobian函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论