本文整理汇总了Python中tensorflow.python.ops.lookup_ops.tables_initializer函数的典型用法代码示例。如果您正苦于以下问题:Python tables_initializer函数的具体用法?Python tables_initializer怎么用?Python tables_initializer使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了tables_initializer函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_asset_loading
def test_asset_loading(self):
first_path = self._v1_asset_saved_model()
imported = load.load(first_path)
self.evaluate(lookup_ops.tables_initializer())
fn = imported.signatures["serving_default"]
self.assertAllClose({"output": [2, 0]},
fn(start=constant_op.constant(["gamma", "alpha"])))
second_path = os.path.join(self.get_temp_dir(), "saved_model",
str(ops.uid()))
save.save(imported, second_path, signatures=imported.signatures)
shutil.rmtree(first_path)
del ops.get_collection_ref(ops.GraphKeys.TABLE_INITIALIZERS)[:]
second_import = load.load(second_path)
self.evaluate(lookup_ops.tables_initializer())
fn = second_import.signatures["serving_default"]
self.assertAllClose({"output": [2, 0]},
fn(start=constant_op.constant(["gamma", "alpha"])))
third_path = os.path.join(self.get_temp_dir(), "saved_model",
str(ops.uid()))
save.save(second_import, third_path, signatures=second_import.signatures)
shutil.rmtree(second_path)
del ops.get_collection_ref(ops.GraphKeys.TABLE_INITIALIZERS)[:]
third_import = load.load(third_path)
self.evaluate(lookup_ops.tables_initializer())
fn = third_import.signatures["serving_default"]
self.assertAllClose({"output": [2, 0]},
fn(start=constant_op.constant(["gamma", "alpha"])))
开发者ID:aritratony,项目名称:tensorflow,代码行数:28,代码来源:load_v1_in_v2_test.py
示例2: testIdTableWithHashBucketsWithMultipleInitializersDifferentDefault
def testIdTableWithHashBucketsWithMultipleInitializersDifferentDefault(self):
vocab_file = self._createVocabFile("feat_to_id_6.txt")
with self.test_session() as sess:
default_value1 = -1
vocab_size = 3
oov_buckets = 0
table1 = lookup_ops.IdTableWithHashBuckets(
lookup_ops.HashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value1),
oov_buckets)
default_value2 = -2
table2 = lookup_ops.IdTableWithHashBuckets(
lookup_ops.HashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value2),
oov_buckets)
lookup_ops.tables_initializer().run()
input_string_1 = constant_op.constant(
["brain", "salad", "surgery", "UNK"])
input_string_2 = constant_op.constant(["fruit", "salad", "UNK"])
out1 = table1.lookup(input_string_1)
out2 = table2.lookup(input_string_2)
out1, out2 = sess.run([out1, out2])
self.assertAllEqual([0, 1, 2, -1], out1)
self.assertAllEqual([-2, 1, -2], out2)
self.assertEquals(vocab_size + oov_buckets, table1.size().eval())
self.assertEquals(vocab_size + oov_buckets, table2.size().eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:33,代码来源:lookup_ops_test.py
示例3: testMultipleHashTables
def testMultipleHashTables(self):
with self.test_session() as sess:
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table1 = lookup_ops.HashTable(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
table2 = lookup_ops.HashTable(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
table3 = lookup_ops.HashTable(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
lookup_ops.tables_initializer().run()
self.assertAllEqual(3, table1.size().eval())
self.assertAllEqual(3, table2.size().eval())
self.assertAllEqual(3, table3.size().eval())
input_string = constant_op.constant(["brain", "salad", "tank"])
output1 = table1.lookup(input_string)
output2 = table2.lookup(input_string)
output3 = table3.lookup(input_string)
out1, out2, out3 = sess.run([output1, output2, output3])
self.assertAllEqual([0, 1, -1], out1)
self.assertAllEqual([0, 1, -1], out2)
self.assertAllEqual([0, 1, -1], out3)
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:27,代码来源:lookup_ops_test.py
示例4: test_duplicate_entries
def test_duplicate_entries(self):
with self.test_session():
vocabulary_list = constant_op.constant(["hello", "hello"])
table = lookup_ops.index_to_string_table_from_tensor(
vocabulary_list=vocabulary_list)
indices = constant_op.constant([0, 1, 4], dtypes.int64)
features = table.lookup(indices)
lookup_ops.tables_initializer().run()
self.assertAllEqual((b"hello", b"hello", b"UNK"), features.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:9,代码来源:lookup_ops_test.py
示例5: test_index_table_from_tensor_empty_vocabulary_list
def test_index_table_from_tensor_empty_vocabulary_list(self):
with self.test_session():
table = lookup_ops.index_table_from_tensor(
vocabulary_list=np.array([], dtype=np.str_), num_oov_buckets=1)
ids = table.lookup(constant_op.constant(["salad", "surgery", "brain"]))
self.assertRaises(errors_impl.OpError, ids.eval)
with self.assertRaisesRegexp(
errors_impl.OpError, "keys and values cannot be empty"):
lookup_ops.tables_initializer().run()
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:9,代码来源:lookup_ops_test.py
示例6: test_index_table_from_tensor_with_tensor_init
def test_index_table_from_tensor_with_tensor_init(self):
with self.test_session():
table = lookup_ops.index_table_from_tensor(
vocabulary_list=("brain", "salad", "surgery"), num_oov_buckets=1)
ids = table.lookup(constant_op.constant(("salad", "surgery", "tarkus")))
self.assertRaises(errors_impl.OpError, ids.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual((1, 2, 3), ids.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:9,代码来源:lookup_ops_test.py
示例7: test_int64_index_table_from_tensor_with_tensor_init
def test_int64_index_table_from_tensor_with_tensor_init(self):
with self.test_session():
table = lookup_ops.index_table_from_tensor(
vocabulary_list=(42, 1, -1000), num_oov_buckets=1, dtype=dtypes.int64)
ids = table.lookup(
constant_op.constant((1, -1000, 11), dtype=dtypes.int64))
self.assertRaises(errors_impl.OpError, ids.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual((1, 2, 3), ids.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:10,代码来源:lookup_ops_test.py
示例8: test_index_to_string_table_with_vocab_size
def test_index_to_string_table_with_vocab_size(self):
vocabulary_file = self._createVocabFile("f2i_vocab7.txt")
with self.test_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=3)
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
self.assertRaises(errors_impl.OpError, features.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual((b"salad", b"surgery", b"UNK"), features.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:10,代码来源:lookup_ops_test.py
示例9: test_string_index_table_from_file
def test_string_index_table_from_file(self):
vocabulary_file = self._createVocabFile("f2i_vocab1.txt")
with self.test_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=1)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
self.assertRaises(errors_impl.OpError, ids.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual((1, 2, 3), ids.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:10,代码来源:lookup_ops_test.py
示例10: test_index_table_from_tensor_with_default_value
def test_index_table_from_tensor_with_default_value(self):
default_value = -42
with self.test_session():
table = lookup_ops.index_table_from_tensor(
vocabulary_list=["brain", "salad", "surgery"],
default_value=default_value)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
self.assertRaises(errors_impl.OpError, ids.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual((1, 2, default_value), ids.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:11,代码来源:lookup_ops_test.py
示例11: test_index_table_from_file_with_vocab_size_too_small
def test_index_table_from_file_with_vocab_size_too_small(self):
vocabulary_file = self._createVocabFile("f2i_vocab6.txt")
with self.test_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=2)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
self.assertRaises(errors_impl.OpError, ids.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual((1, -1, -1), ids.eval())
self.assertEqual(2, table.size().eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:11,代码来源:lookup_ops_test.py
示例12: test_index_table_from_file_with_default_value
def test_index_table_from_file_with_default_value(self):
default_value = -42
vocabulary_file = self._createVocabFile("f2i_vocab4.txt")
with self.test_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, default_value=default_value)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
self.assertRaises(errors_impl.OpError, ids.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual((1, 2, default_value), ids.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:11,代码来源:lookup_ops_test.py
示例13: test_index_to_string_table_with_vocab_size_too_small
def test_index_to_string_table_with_vocab_size_too_small(self):
default_value = b"NONE"
vocabulary_file = self._createVocabFile("f2i_vocab2.txt")
with self.test_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file,
vocab_size=2,
default_value=default_value)
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
self.assertRaises(errors_impl.OpError, features.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual((b"salad", default_value, default_value),
features.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:13,代码来源:lookup_ops_test.py
示例14: test_index_to_string_with_default_value
def test_index_to_string_with_default_value(self):
default_value = b"NONE"
with self.test_session():
vocabulary_list = constant_op.constant(["brain", "salad", "surgery"])
table = lookup_ops.index_to_string_table_from_tensor(
vocabulary_list=vocabulary_list, default_value=default_value)
indices = constant_op.constant([1, 2, 4], dtypes.int64)
features = table.lookup(indices)
self.assertRaises(errors_impl.OpError, features.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual((b"salad", b"surgery", default_value),
features.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:13,代码来源:lookup_ops_test.py
示例15: test_int64_index_table_from_file
def test_int64_index_table_from_file(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab3.txt", values=("42", "1", "-1000"))
with self.test_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
num_oov_buckets=1,
key_dtype=dtypes.int64)
ids = table.lookup(
constant_op.constant((1, -1000, 11), dtype=dtypes.int64))
self.assertRaises(errors_impl.OpError, ids.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual((1, 2, 3), ids.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:14,代码来源:lookup_ops_test.py
示例16: testDecodeExampleWithBranchedLookup
def testDecodeExampleWithBranchedLookup(self):
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/object/class/text': self._BytesFeatureFromList(
np.array(['cat', 'dog', 'guinea pig'])),
}))
serialized_example = example.SerializeToString()
# 'dog' -> 0, 'guinea pig' -> 1, 'cat' -> 2
table = lookup_ops.index_table_from_tensor(
constant_op.constant(['dog', 'guinea pig', 'cat']))
with self.test_session() as sess:
sess.run(lookup_ops.tables_initializer())
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image/object/class/text': parsing_ops.VarLenFeature(dtypes.string),
}
items_to_handlers = {
'labels':
tf_example_decoder.LookupTensor('image/object/class/text', table),
}
decoder = slim_example_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
obtained_class_ids = decoder.decode(serialized_example)[0].eval()
self.assertAllClose([2, 0, 1], obtained_class_ids)
开发者ID:douyuanyuan,项目名称:models,代码行数:30,代码来源:tf_example_decoder_test.py
示例17: _text_vocab_subsample_vocab_helper
def _text_vocab_subsample_vocab_helper(self, vocab_freq_file, vocab_min_count,
vocab_freq_dtype, corpus_size=None):
# The outputs are non-deterministic, so set random seed to help ensure that
# the outputs remain constant for testing.
random_seed.set_random_seed(42)
input_tensor = constant_op.constant([
# keep_prob = (sqrt(30/(0.05*100)) + 1) * (0.05*100/30) = 0.57.
b"the",
b"answer", # Not in vocab. (Always discarded)
b"to", # keep_prob = 0.75.
b"life", # keep_prob > 1. (Always kept)
b"and", # keep_prob = 0.48.
b"universe" # Below vocab threshold of 3. (Always discarded)
])
# keep_prob calculated from vocab file with relative frequencies of:
# and: 40
# life: 8
# the: 30
# to: 20
# universe: 2
tokens, labels = text.skip_gram_sample_with_text_vocab(
input_tensor=input_tensor,
vocab_freq_file=vocab_freq_file,
vocab_token_index=0,
vocab_freq_index=1,
vocab_freq_dtype=vocab_freq_dtype,
vocab_min_count=vocab_min_count,
vocab_subsampling=0.05,
corpus_size=corpus_size,
min_skips=1,
max_skips=1,
seed=123)
expected_tokens, expected_labels = self._split_tokens_labels([
(b"the", b"to"),
(b"to", b"the"),
(b"to", b"life"),
(b"life", b"to"),
])
with self.test_session() as sess:
lookup_ops.tables_initializer().run()
tokens_eval, labels_eval = sess.run([tokens, labels])
self.assertAllEqual(expected_tokens, tokens_eval)
self.assertAllEqual(expected_labels, labels_eval)
开发者ID:1000sprites,项目名称:tensorflow,代码行数:46,代码来源:skip_gram_ops_test.py
示例18: test_index_table_from_file_with_oov_buckets
def test_index_table_from_file_with_oov_buckets(self):
vocabulary_file = self._createVocabFile("f2i_vocab5.txt")
with self.test_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=1000)
ids = table.lookup(
constant_op.constant(["salad", "surgery", "tarkus", "toccata"]))
self.assertRaises(errors_impl.OpError, ids.eval)
lookup_ops.tables_initializer().run()
self.assertAllEqual(
(
1, # From vocabulary file.
2, # From vocabulary file.
867, # 3 + fingerprint("tarkus") mod 300.
860), # 3 + fingerprint("toccata") mod 300.
ids.eval())
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:17,代码来源:lookup_ops_test.py
示例19: testMissingValueInOneHotColumnForSparseColumnWithKeys
def testMissingValueInOneHotColumnForSparseColumnWithKeys(self):
ids = fc.sparse_column_with_keys("ids", ["marlo", "omar", "stringer"])
one_hot = fc.one_hot_column(ids)
features = {"ids": constant_op.constant([["marlo", "unknown", "omar"]])}
one_hot_tensor = feature_column_ops.input_from_feature_columns(
features, [one_hot])
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
sess.run(lookup_ops.tables_initializer())
self.assertAllEqual([[1., 1., 0.]], one_hot_tensor.eval())
开发者ID:ChengYuXiang,项目名称:tensorflow,代码行数:10,代码来源:feature_column_test.py
示例20: _export_graph
def _export_graph(graph, saver, checkpoint_path, export_dir,
default_graph_signature, named_graph_signatures,
exports_to_keep):
"""Exports graph via session_bundle, by creating a Session."""
with graph.as_default():
with tf_session.Session('') as session:
variables.local_variables_initializer()
lookup_ops.tables_initializer()
saver.restore(session, checkpoint_path)
export = exporter.Exporter(saver)
export.init(
init_op=control_flow_ops.group(
variables.local_variables_initializer(),
lookup_ops.tables_initializer()),
default_graph_signature=default_graph_signature,
named_graph_signatures=named_graph_signatures,
assets_collection=ops.get_collection(ops.GraphKeys.ASSET_FILEPATHS))
return export.export(export_dir, contrib_variables.get_global_step(),
session, exports_to_keep=exports_to_keep)
开发者ID:AlbertXiebnu,项目名称:tensorflow,代码行数:20,代码来源:export.py
注:本文中的tensorflow.python.ops.lookup_ops.tables_initializer函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论