本文整理汇总了Python中theano.compile.pfunc.rebuild_collect_shared函数的典型用法代码示例。如果您正苦于以下问题:Python rebuild_collect_shared函数的具体用法?Python rebuild_collect_shared怎么用?Python rebuild_collect_shared使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了rebuild_collect_shared函数的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: clone
def clone(output,
replace=None,
strict=True,
share_inputs=True,
copy_inputs=DEPRECATED_ARG):
"""
Function that allows replacing subgraphs of a computational graph.
It returns a copy of the initial subgraph with the corresponding
substitutions.
Parameters
----------
output : Theano Variables (or Theano expressions)
Theano expression that represents the computational graph.
replace : dict
Dictionary describing which subgraphs should be replaced by what.
share_inputs : bool
If True, use the same inputs (and shared variables) as the original
graph. If False, clone them. Note that cloned shared variables still
use the same underlying storage, so they will always have the same
value.
copy_inputs
Deprecated, use share_inputs.
"""
if copy_inputs is not DEPRECATED_ARG:
warnings.warn('In `clone()` function, the argument `copy_inputs` has been deprecated and renamed into `share_inputs`')
assert share_inputs # since we used `copy_inputs` we should have default value for `share_inputs`
share_inputs = copy_inputs
if isinstance(replace, dict):
items = list(replace.items())
elif isinstance(replace, (list, tuple)):
items = replace
elif replace is None:
items = []
else:
raise ValueError(("replace is neither a dictionary, list, "
"tuple or None ! The value provided is %s,"
"of type %s")%(str(replace), str(type(replace))))
tmp_replace = [(x, x.type()) for x, y in items]
new_replace = [(x, y) for ((_, x), (_, y)) in zip(tmp_replace,
items)]
_, _outs, _ = rebuild_collect_shared(output,
[],
tmp_replace,
[],
strict,
share_inputs)
# TODO Explain why we call it twice ?!
_, outs, _ = rebuild_collect_shared(_outs,
[],
new_replace,
[],
strict,
share_inputs)
return outs
开发者ID:Faruk-Ahmed,项目名称:Theano,代码行数:60,代码来源:scan_utils.py
示例2: clone
def clone(output,
replace=None,
strict=True,
copy_inputs=True):
"""
Function that allows replacing subgraphs of a computational
graph. It returns a copy of the initial subgraph with the corresponding
substitutions.
:type output: Theano Variables (or Theano expressions)
:param outputs: Theano expression that represents the computational
graph
:type replace: dict
:param replace: dictionary describing which subgraphs should be
replaced by what
:type copy_inputs: bool
:param copy_inputs: If True, use the same inputs (and shared variables)
as the original graph. If False, clone them. Note that cloned
shared variables still use the same underlying storage, so they
will always have the same value.
"""
if isinstance(replace, dict):
items = replace.items()
elif isinstance(replace, (list, tuple)):
items = replace
elif replace is None:
items = []
else:
raise ValueError(("replace is neither a dictionary, list, "
"tuple or None ! The value provided is %s,"
"of type %s")%(str(replace), str(type(replace))))
tmp_replace = [(x, x.type()) for x, y in items]
new_replace = [(x, y) for ((_, x), (_, y)) in zip(tmp_replace,
items)]
_, _outs, _ = rebuild_collect_shared(output,
[],
tmp_replace,
[],
strict,
copy_inputs)
_, outs, _ = rebuild_collect_shared(_outs,
[],
new_replace,
[],
strict,
copy_inputs)
return outs
开发者ID:Yangqing,项目名称:Theano,代码行数:51,代码来源:scan_utils.py
示例3: clone
def clone(output, replace=None, strict=True, share_inputs=True):
"""
Function that allows replacing subgraphs of a computational
graph. It returns a copy of the initial subgraph with the corresponding
substitutions.
:type output: Theano Variables (or Theano expressions)
:param outputs: Theano expression that represents the computational
graph
:type replace: dict
:param replace: dictionary describing which subgraphs should be
replaced by what
:type share_inputs: bool
:param share_inputs: If True, use the same inputs (and shared variables)
as the original graph. If False, clone them. Note that cloned
shared variables still use the same underlying storage, so they
will always have the same value.
"""
inps, outs, other_stuff = rebuild_collect_shared(output,
[],
replace,
[],
strict,
share_inputs)
return outs
开发者ID:Ambier,项目名称:Theano,代码行数:27,代码来源:scan_utils.py
示例4: clone
def clone(output,
replace=None,
strict=True,
copy_inputs=True):
"""
Function that allows replacing subgraphs of a computational
graph. It returns a copy of the initial subgraph with the corresponding
substitutions.
:type output: Theano Variables (or Theano expressions)
:param outputs: Theano expression that represents the computational
graph
:type replace: dict
:param replace: dictionary describing which subgraphs should be
replaced by what
"""
inps, outs, other_stuff = rebuild_collect_shared(output,
[],
replace,
[],
strict,
copy_inputs
)
return outs
开发者ID:HaniAlmousli,项目名称:Theano,代码行数:26,代码来源:scan_utils.py
示例5: clone
def clone(output, replace=None, strict=True, share_inputs=True, copy_inputs=DEPRECATED_ARG):
"""
Function that allows replacing subgraphs of a computational
graph. It returns a copy of the initial subgraph with the corresponding
substitutions.
:type output: Theano Variables (or Theano expressions)
:param outputs: Theano expression that represents the computational
graph
:type replace: dict
:param replace: dictionary describing which subgraphs should be
replaced by what
:type share_inputs: bool
:param share_inputs: If True, use the same inputs (and shared variables)
as the original graph. If False, clone them. Note that cloned
shared variables still use the same underlying storage, so they
will always have the same value.
"""
if copy_inputs is not DEPRECATED_ARG:
warnings.warn('In `clone()` function, the argument `copy_inputs` has been deprecated and renamed into `share_inputs`')
assert share_inputs # since we used `copy_inputs` we should have default value for `share_inputs`
share_inputs = copy_inputs
inps, outs, other_stuff = rebuild_collect_shared(output,
[],
replace,
[],
strict,
share_inputs)
return outs
开发者ID:317070,项目名称:Theano,代码行数:32,代码来源:scan_utils.py
示例6: clone_optimized_graph
def clone_optimized_graph(f):
maker_ins = [x for x in f.maker.env.inputs
if not isinstance(x, theano.tensor.sharedvar.SharedVariable)]
inps, outs, _ = rebuild_collect_shared(f.maker.env.outputs,
maker_ins,
copy_inputs_over=False)
ins = [x for x in inps
if not isinstance(x, theano.tensor.sharedvar.SharedVariable)]
return (ins, outs)
开发者ID:HaniAlmousli,项目名称:Theano,代码行数:9,代码来源:test_utils.py
示例7: scan
#.........这里部分代码省略.........
# 3. Generate arguments for the function passed to scan. This will
# function will return the outputs that need to be computed at every
# timesteps
inputs_slices = [input[t] for input in inputs]
states_slices = []
for n, state in enumerate(states_and_outputs_info):
# Check if it is actually a state and not an output
if mintaps[n] != 0:
for k in state['taps']:
states_slices.append(
state['initial'][(t + mintaps[n] + k) % lengths[n]])
# 4. Construct outputs that are to be computed by the inner
# function of scan
args = inputs_slices + states_slices + parameters
cond, states_and_outputs, updates = \
scan_utils.get_updates_and_outputs(fn(*args))
# User is allowed to provide no information if it only behaves like a
# map
if (len(states_and_outputs) != len(states_and_outputs_info) and
len(states_and_outputs_info) == 0):
mintaps = [0] * len(states_and_outputs)
# 5. Construct the scan op
# 5.1 Construct list of shared variables with updates (those that
# can be treated as states (i.e. of TensorType) and those that can not
# (like Random States)
if cond is not None:
_cond = [cond]
else:
_cond = []
rvals = rebuild_collect_shared(
states_and_outputs + _cond,
updates=updates,
rebuild_strict=True,
copy_inputs_over=True,
no_default_updates=False)
# extracting the arguments
input_variables, cloned_outputs, other_rval = rvals
clone_d, update_d, update_expr, shared_inputs = other_rval
additional_input_states = []
additional_output_states = []
additional_lengths = []
additional_mintaps = []
original_numeric_shared_variables = []
non_numeric_input_states = []
non_numeric_output_states = []
original_non_numeric_shared_variables = []
pos = len(lengths)
for sv in shared_inputs:
if sv in update_d:
if isinstance(sv, (TensorVariable, TensorSharedVariable)):
# We can treat it as a sit sot
nw_state = scan_utils.expand(
tensor.unbroadcast(tensor.shape_padleft(sv), 0), T)
additional_lengths.append(scalar_shared(numpy.int64(0),
name='l%d' % pos))
pos = pos + 1
additional_mintaps.append(1)
additional_input_states.append(nw_state)
additional_output_states.append(
scan_utils.clone(tensor.set_subtensor(
开发者ID:HaniAlmousli,项目名称:Theano,代码行数:67,代码来源:scan.py
示例8: __init__
def __init__(self, updated_vars, givens=None):
"""
updated_vars: sequence of (dst, expr) pairs
vals_memo: dict Variable -> [value]
"""
# -- unique_outputs is used here to ensure that there is some
# double-buffering going on, because actually dests and outputs can
# include some of the same variables (e.g. swap values)
dests, outputs = zip(*updated_vars)
#unique_outputs = map(deep_copy_op, outputs)
unique_outputs = outputs
# -- partial graph clone to use givens
stuff = rebuild_collect_shared(
unique_outputs,
inputs=list(dests) + [],
replace=givens,
rebuild_strict=True,
copy_inputs_over=True)
_inputs, unique_outputs_w_giv, other_stuff = stuff
clone_equiv1, _update_d, _update_expr, _shared_inputs = other_stuff
all_inputs = theano.gof.graph.inputs(unique_outputs_w_giv + _inputs)
# -- full graph clone to protect original graph
clone_equiv = {} # -- do not need order here
theano.gof.graph.clone_get_equiv(
[],
unique_outputs_w_giv + _inputs,
copy_inputs_and_orphans=True,
memo=clone_equiv)
# -- redirect through the second clone
for orig_var in clone_equiv1:
tmp = clone_equiv1[orig_var]
if tmp in clone_equiv:
clone_equiv[orig_var] = clone_equiv[tmp]
self.cloned_inputs = [clone_equiv[var] for var in all_inputs]
self.cloned_dests = [clone_equiv[var] for var in dests]
self.cloned_outputs = [clone_equiv[var] for var in unique_outputs_w_giv]
fgraph = theano.gof.fg.FunctionGraph(
self.cloned_inputs,
self.cloned_outputs)
# -- load up fgraph with features necessary to maintain correctness:
for node in fgraph.apply_nodes:
if getattr(node.op, 'destroy_map', None):
if not accept_inplace:
raise TypeError("Graph must not contain inplace operations",
node, node.op)
else:
fgraph.attach_feature(theano.gof.DestroyHandler())
break
# We need to protect all immutable inputs from inplace operations.
fgraph.attach_feature(
theano.compile.function_module.Supervisor(invar
for invar in self.cloned_inputs
if not ((invar in self.cloned_dests) or
(hasattr(fgraph, 'destroyers') and
fgraph.destroyers(input)))))
# If named nodes are replaced, keep the name
for feature in theano.compile.function_module.std_fgraph.features:
fgraph.attach_feature(feature())
fgraph.attach_feature(theano.tensor.opt.ShapeFeature())
# -- pre-install the shape information from the Hints created by
# e.g. SharedStorageWorkspace
done = {} # -- no order ok
for node in fgraph.toposort():
if is_hint_node(node):
if node.inputs[0] in done: continue
hints = OrderedDict(node.op.hints)
if 'shape' in hints:
x = node.inputs[0]
assert x.ndim == len(hints['shape'])
if x in done:
assert done[x] == hints['shape']
else:
var_shape = tuple(
map(theano.tensor.as_tensor_variable,
hints['shape']))
fgraph.shape_feature.shape_of[node.inputs[0]] = var_shape
done[x] = hints['shape']
self.updated_vars = updated_vars
self.all_inputs = all_inputs
self.outputs = outputs
self.unique_outputs = unique_outputs
self.clone_equiv = clone_equiv
self.fgraph = fgraph
开发者ID:jaberg,项目名称:theano_workspace,代码行数:94,代码来源:workspace.py
注:本文中的theano.compile.pfunc.rebuild_collect_shared函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论