本文整理汇总了Python中theano.gof.graph.inputs函数的典型用法代码示例。如果您正苦于以下问题:Python inputs函数的具体用法?Python inputs怎么用?Python inputs使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了inputs函数的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_not_destructive
def test_not_destructive(self):
# Checks that manipulating a cloned graph leaves the original unchanged.
r1, r2, r5 = MyVariable(1), MyVariable(2), MyVariable(5)
node = MyOp.make_node(MyOp.make_node(r1, r2).outputs[0], r5)
_, new = clone([r1, r2, r5], node.outputs, False)
new_node = new[0].owner
new_node.inputs = MyVariable(7), MyVariable(8)
assert self.str(inputs(new_node.outputs), new_node.outputs) == ["MyOp(R7, R8)"]
assert self.str(inputs(node.outputs), node.outputs) == ["MyOp(MyOp(R1, R2), R5)"]
开发者ID:huamichaelchen,项目名称:Theano,代码行数:9,代码来源:test_graph.py
示例2: check_parameter
def check_parameter(name, value):
parameters = set()
constants = set()
observeds = set()
if isinstance(value, SharedVariable):
parameters.add(value)
elif isinstance(value, T.TensorConstant):
constants.add(value)
elif isinstance(value, T.TensorVariable):
inputs = graph.inputs([value])
for var in inputs:
if isinstance(var, SharedVariable):
parameters.add(var)
elif isinstance(var, T.TensorConstant):
constants.add(var)
elif isinstance(var, T.TensorVariable):
if not var.name:
raise ValueError("Observed variables must be named.")
observeds.add(var)
else:
# XXX allow for lists and convert them to ndarray
if isinstance(value, np.ndarray):
value = theano.shared(value, name=name)
else:
value = theano.shared(float(value), name=name)
parameters.add(value)
return value, parameters, constants, observeds
开发者ID:glouppe,项目名称:carl,代码行数:32,代码来源:base.py
示例3: elemwise_logp
def elemwise_logp(model, var):
terms = filter(lambda term: var in inputs([term]), model.factors)
p = function(model.vars, builtin_sum(terms))
def fn(x):
return p(**x)
return fn
开发者ID:B-Rich,项目名称:mcex,代码行数:7,代码来源:gibbs.py
示例4: predict
def predict():
"""
An example of how to load a train model and use it
to predict labels.
"""
# load the saved model
model_file = open('best_model_linear.pkl', 'rb')
classifier = pickle.load(model_file)
model_file.close()
y_pred = classifier.y_pred
# find the input to theano graph
inputs = graph.inputs([y_pred])
# select only x
inputs = [item for item in inputs if item.name == 'x']
# compile a predictor function
predict_model = theano.function(
inputs=inputs,
outputs=y_pred)
X_test = np.random.rand(1000,500)*.75 +.25
X_test= np.append(X_test, np.random.rand(1000,500)*.75,axis=0)
y_test = np.random.rand(1000,)*.3
y_test = np.append(y_test, np.random.rand(1000,)*.3+.7,axis=0)
predicted_values = predict_model(X_test)
print ("Predicted values for the first 10 examples in test set:")
plt.hist(predicted_values)
print (predicted_values)
开发者ID:mkarki2,项目名称:pycharmproject,代码行数:32,代码来源:neural_network_regression.py
示例5: check_parameter
def check_parameter(name, value):
"""Check, convert and extract inputs of a parameter value.
This function wraps scalar or lists into a Theano shared variable, then
acting as a parameter. Theano expressions are left unchanged.
Parameters
----------
* `name` [string]:
The parameter name.
* `value` [theano expression, list or scalar]:
The parameter value.
Returns
-------
* `value` [theano expression]:
The parameter expression.
* `parameters` [set of theano shared variables]:
Set of base shared variables on which `value` depends.
* `constants` [set of theano constants]:
Set of base constants on which `value` depends.
* `observeds` [set of theano tensor variables]:
Set of base unset variables on which `value` depends.
"""
parameters = set()
constants = set()
observeds = set()
if isinstance(value, SharedVariable):
parameters.add(value)
elif isinstance(value, T.TensorConstant):
constants.add(value)
elif isinstance(value, T.TensorVariable):
inputs = graph.inputs([value])
for var in inputs:
if isinstance(var, SharedVariable):
parameters.add(var)
elif isinstance(var, T.TensorConstant):
constants.add(var)
elif isinstance(var, T.TensorVariable):
if not var.name:
raise ValueError("Observed variables must be named.")
observeds.add(var)
else:
if isinstance(value, list):
value = np.ndarray(value)
if isinstance(value, np.ndarray):
value = theano.shared(value, name=name)
else:
value = theano.shared(float(value), name=name)
parameters.add(value)
return value, parameters, constants, observeds
开发者ID:betatim,项目名称:carl,代码行数:60,代码来源:base.py
示例6: _get_variables
def _get_variables(self):
"""Collect variables, updates and auxiliary variables.
In addition collects all :class:`.Scan` ops and recurses in the
respective inner Theano graphs.
"""
updates = OrderedDict()
shared_outputs = [o for o in self.outputs if is_shared_variable(o)]
usual_outputs = [o for o in self.outputs if not is_shared_variable(o)]
variables = shared_outputs
if usual_outputs:
# Sort apply nodes topologically, get variables and remove
# duplicates
inputs = graph.inputs(self.outputs)
sorted_apply_nodes = graph.io_toposort(inputs, usual_outputs)
self.scans = list(unique([node.op for node in sorted_apply_nodes
if isinstance(node.op, Scan)],
key=lambda op: id(op)))
self._scan_graphs = [ComputationGraph(scan.outputs)
for scan in self.scans]
seen = set()
main_vars = (
[var for var in list(chain(
*[apply_node.inputs for apply_node in sorted_apply_nodes]))
if not (var in seen or seen.add(var))] +
[var for var in self.outputs if var not in seen])
# While preserving order add auxiliary variables, and collect
# updates
seen = set()
# Intermediate variables could be auxiliary
seen_avs = set(main_vars)
variables = []
for var in main_vars:
variables.append(var)
for annotation in getattr(var.tag, 'annotations', []):
if annotation not in seen:
seen.add(annotation)
new_avs = [
av for av in annotation.auxiliary_variables
if not (av in seen_avs or seen_avs.add(av))]
variables.extend(new_avs)
updates = dict_union(updates, annotation.updates)
# If shared_variables is assigned default_update (cloned), we cannot eval()
# it to get the real numpy array value, hence, try to trace back
# original shared variable
def shared_variable_filter(var):
if is_shared_variable(var) and hasattr(var, 'default_update'):
for annotation in var.tag.annotations:
if hasattr(annotation, var.name) and \
is_shared_variable(getattr(annotation, var.name)):
return getattr(annotation, var.name)
return var
self.variables = map(shared_variable_filter, variables)
self.updates = updates
开发者ID:trungnt13,项目名称:blocks,代码行数:60,代码来源:__init__.py
示例7: inputvars
def inputvars(a):
"""
Get the inputs into a theano variables
Parameters
----------
a : theano variable
Returns
-------
r : list of tensor variables that are inputs
"""
return [v for v in inputs(makeiter(a)) if isinstance(v, t.TensorVariable)]
开发者ID:21hub,项目名称:pymc3,代码行数:13,代码来源:theanof.py
示例8: _get_variables
def _get_variables(self):
"""Collect variables, updates and auxiliary variables.
In addition collects all :class:`.Scan` ops and recurses in the
respective inner Theano graphs.
"""
updates = OrderedDict()
shared_outputs = [o for o in self.outputs if is_shared_variable(o)]
usual_outputs = [o for o in self.outputs if not is_shared_variable(o)]
variables = shared_outputs
if usual_outputs:
# Sort apply nodes topologically, get variables and remove
# duplicates
inputs = graph.inputs(self.outputs)
self.sorted_apply_nodes = graph.io_toposort(inputs, usual_outputs)
self.scans = list(unique([node.op for node in self.sorted_apply_nodes
if isinstance(node.op, Scan)]))
self.sorted_scan_nodes = [node for node in self.sorted_apply_nodes
if isinstance(node.op, Scan)]
self._scan_graphs = [ComputationGraph(scan.outputs)
for scan in self.scans]
seen = set()
main_vars = (
[var for var in list(chain(
*[apply_node.inputs for apply_node in self.sorted_apply_nodes]))
if not (var in seen or seen.add(var))] +
[var for var in self.outputs if var not in seen])
# While preserving order add auxiliary variables, and collect
# updates
seen = set()
# Intermediate variables could be auxiliary
seen_avs = set(main_vars)
variables = []
for var in main_vars:
variables.append(var)
for annotation in getattr(var.tag, 'annotations', []):
if annotation not in seen:
seen.add(annotation)
new_avs = [
av for av in annotation.auxiliary_variables
if not (av in seen_avs or seen_avs.add(av))]
variables.extend(new_avs)
updates = dict_union(updates, annotation.updates)
self.variables = variables
self.updates = updates
开发者ID:ixtel,项目名称:attention-lvcsr,代码行数:51,代码来源:graph.py
示例9: predict
def predict(X_test, filename='best_model_actual_data.pkl'):
# load the saved model
model_file = open(filename, 'rb')
classifier = pickle.load(model_file)
model_file.close()
y_pred = classifier.y_pred
# find the input to theano graph
inputs = graph.inputs([y_pred])
# select only x
inputs = [item for item in inputs if item.name == 'x']
# compile a predictor function
predict_model = theano.function(
inputs=inputs,
outputs=y_pred)
predicted_values = predict_model(X_test.astype(numpy.float32))
return predicted_values
开发者ID:mkarki2,项目名称:pycharmproject,代码行数:19,代码来源:DBN_fa.py
示例10: test_inputs_deep
def test_inputs_deep(self):
r1, r2, r5 = MyVariable(1), MyVariable(2), MyVariable(5)
node = MyOp.make_node(r1, r2)
node2 = MyOp.make_node(node.outputs[0], r5)
i = inputs(node2.outputs)
assert i == [r1, r2, r5], i
开发者ID:huamichaelchen,项目名称:Theano,代码行数:6,代码来源:test_graph.py
示例11: test_inputs
def test_inputs(self):
r1, r2 = MyVariable(1), MyVariable(2)
node = MyOp.make_node(r1, r2)
assert inputs(node.outputs) == [r1, r2]
开发者ID:huamichaelchen,项目名称:Theano,代码行数:4,代码来源:test_graph.py
示例12: elemwise_logp
def elemwise_logp(model, var):
terms = [term for term in model.factors if var in inputs([term])]
return add(*terms)
开发者ID:Jfeng3,项目名称:pymc,代码行数:3,代码来源:gibbs.py
注:本文中的theano.gof.graph.inputs函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论