本文整理汇总了Python中tensorflow.contrib.data.python.ops.optimization.optimize函数的典型用法代码示例。如果您正苦于以下问题:Python optimize函数的具体用法?Python optimize怎么用?Python optimize使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了optimize函数的18个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: testFilterFusion
def testFilterFusion(self, map_function, predicates):
dataset = dataset_ops.Dataset.range(5).apply(
optimization.assert_next(["Map", "Filter",
"Prefetch"])).map(map_function)
for predicate in predicates:
dataset = dataset.filter(predicate)
dataset = dataset.prefetch(0).apply(
optimization.optimize(["filter_fusion"]))
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
with self.test_session() as sess:
for x in range(5):
r = map_function(x)
filtered = False
for predicate in predicates:
if isinstance(r, tuple):
b = predicate(*r) # Pass tuple as multiple arguments.
else:
b = predicate(r)
if not sess.run(b):
filtered = True
break
if not filtered:
result = sess.run(get_next)
self.assertAllEqual(r, result)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
开发者ID:AnishShah,项目名称:tensorflow,代码行数:29,代码来源:map_and_filter_fusion_test.py
示例2: _benchmarkMapAndFilter
def _benchmarkMapAndFilter(self, chain_length, optimize_dataset):
with ops.Graph().as_default():
dataset = dataset_ops.Dataset.from_tensors(0).repeat(None)
for _ in range(chain_length):
dataset = dataset.map(lambda x: x + 5).filter(
lambda x: math_ops.greater_equal(x - 5, 0))
if optimize_dataset:
dataset = dataset.apply(
optimization.optimize(["map_and_filter_fusion"]))
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
with session.Session() as sess:
for _ in range(10):
sess.run(next_element.op)
deltas = []
for _ in range(100):
start = time.time()
for _ in range(100):
sess.run(next_element.op)
end = time.time()
deltas.append(end - start)
median_wall_time = np.median(deltas) / 100
opt_mark = "opt" if optimize_dataset else "no-opt"
print("Map and filter dataset {} chain length: {} Median wall time: {}".
format(opt_mark, chain_length, median_wall_time))
self.report_benchmark(
iters=1000,
wall_time=median_wall_time,
name="benchmark_map_and_filter_dataset_chain_latency_{}_{}".format(
opt_mark, chain_length))
开发者ID:clsung,项目名称:tensorflow,代码行数:33,代码来源:map_dataset_op_test.py
示例3: testMapFilterFusion
def testMapFilterFusion(self, function, predicate):
dataset = dataset_ops.Dataset.range(10).apply(
optimization.assert_next(
["Map",
"FilterByLastComponent"])).map(function).filter(predicate).apply(
optimization.optimize(["map_and_filter_fusion"]))
self._testMapAndFilter(dataset, function, predicate)
开发者ID:AnishShah,项目名称:tensorflow,代码行数:7,代码来源:map_and_filter_fusion_test.py
示例4: testHoisting
def testHoisting(self, function, will_optimize):
dataset = dataset_ops.Dataset.range(5).apply(
optimization.assert_next(
["Zip[0]", "Map"] if will_optimize else ["Map"])).map(function)
dataset = dataset.apply(optimization.optimize(["hoist_random_uniform"]))
self._testDataset(dataset)
开发者ID:baojianzhou,项目名称:tensorflow,代码行数:7,代码来源:hoist_random_uniform_test.py
示例5: testLatencyStatsOptimization
def testLatencyStatsOptimization(self):
stats_aggregator = stats_ops.StatsAggregator()
dataset = dataset_ops.Dataset.from_tensors(1).apply(
optimization.assert_next(
["LatencyStats", "Map", "LatencyStats", "Prefetch",
"LatencyStats"])).map(lambda x: x * x).prefetch(1).apply(
optimization.optimize(["latency_all_edges"])).apply(
stats_ops.set_stats_aggregator(stats_aggregator))
iterator = dataset.make_initializable_iterator()
get_next = iterator.get_next()
summary_t = stats_aggregator.get_summary()
with self.test_session() as sess:
sess.run(iterator.initializer)
self.assertEqual(1 * 1, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
summary_str = sess.run(summary_t)
self._assertSummaryHasCount(summary_str,
"record_latency_TensorDataset/_1", 1)
self._assertSummaryHasCount(summary_str, "record_latency_MapDataset/_4",
1)
self._assertSummaryHasCount(summary_str,
"record_latency_PrefetchDataset/_6", 1)
开发者ID:ZhangXinNan,项目名称:tensorflow,代码行数:25,代码来源:optimize_dataset_op_test.py
示例6: testOptimizationStatefulFunction
def testOptimizationStatefulFunction(self):
dataset = dataset_ops.Dataset.range(10).map(
lambda _: random_ops.random_uniform([])).batch(10).apply(
optimization.optimize(["map_and_batch_fusion"]))
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(get_next)
开发者ID:clsung,项目名称:tensorflow,代码行数:9,代码来源:optimize_dataset_op_test.py
示例7: testFunctionLibraryDefinitionModification
def testFunctionLibraryDefinitionModification(self):
dataset = dataset_ops.Dataset.from_tensors(0).map(lambda x: x).apply(
optimization.optimize(["_test_only_function_rename"]))
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
with self.test_session() as sess:
with self.assertRaisesRegexp(errors.NotFoundError,
"Function .* is not defined."):
sess.run(get_next)
开发者ID:StephenOman,项目名称:tensorflow,代码行数:10,代码来源:optimize_dataset_op_test.py
示例8: testOptimizationLargeInputFromTensorSlices
def testOptimizationLargeInputFromTensorSlices(self):
input_t = array_ops.placeholder(dtypes.int32, (None, None, None, None))
dataset = dataset_ops.Dataset.from_tensor_slices(input_t).apply(
optimization.optimize())
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op, {input_t: np.ones([1, 512, 1024, 1025], np.int32)})
sess.run(get_next)
开发者ID:clsung,项目名称:tensorflow,代码行数:11,代码来源:optimize_dataset_op_test.py
示例9: testOptimization
def testOptimization(self):
dataset = dataset_ops.Dataset.range(10).apply(
optimization.assert_next(
["MapAndBatch"])).map(lambda x: x * x).batch(10).apply(
optimization.optimize(["map_and_batch_fusion"]))
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
with self.test_session() as sess:
self.assertAllEqual([x * x for x in range(10)], sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
开发者ID:StephenOman,项目名称:tensorflow,代码行数:12,代码来源:optimize_dataset_op_test.py
示例10: testOptimization
def testOptimization(self):
dataset = dataset_ops.Dataset.range(10).map(lambda x: x * x).batch(
10).apply(optimization.optimize(["map_and_batch_fusion"]))
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
with self.test_session() as sess:
graph = graph_pb2.GraphDef().FromString(
sess.run(dataset._as_serialized_graph()))
self.assertTrue(
any([node.op == "MapAndBatchDatasetV2" for node in graph.node]))
self.assertAllEqual([x * x for x in range(10)], sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
开发者ID:LiuCKind,项目名称:tensorflow,代码行数:14,代码来源:optimize_dataset_op_test.py
示例11: testAdditionalInputs
def testAdditionalInputs(self):
a = constant_op.constant(1, dtype=dtypes.float32)
b = constant_op.constant(0, dtype=dtypes.float32)
some_tensor = math_ops.mul(a, b)
def random_with_capture(_):
return some_tensor + random_ops.random_uniform(
[], minval=1, maxval=10, dtype=dtypes.float32, seed=42)
dataset = dataset_ops.Dataset.range(5).apply(
optimization.assert_next(
["Zip[0]", "Map"])).map(random_with_capture).apply(
optimization.optimize(["hoist_random_uniform"]))
self._testDataset(dataset)
开发者ID:baojianzhou,项目名称:tensorflow,代码行数:14,代码来源:hoist_random_uniform_test.py
示例12: testAdditionalInputs
def testAdditionalInputs(self):
a = constant_op.constant(3, dtype=dtypes.int64)
b = constant_op.constant(4, dtype=dtypes.int64)
some_tensor = math_ops.mul(a, b)
function = lambda x: x * x
def predicate(y):
return math_ops.less(math_ops.cast(y, dtypes.int64), some_tensor)
# We are currently not supporting functions with additional inputs.
dataset = dataset_ops.Dataset.range(10).apply(
optimization.assert_next(
["Map", "Filter"])).map(function).filter(predicate).apply(
optimization.optimize(["map_and_filter_fusion"]))
self._testMapAndFilter(dataset, function, predicate)
开发者ID:AnishShah,项目名称:tensorflow,代码行数:16,代码来源:map_and_filter_fusion_test.py
示例13: testMapParallelization
def testMapParallelization(self, function, should_optimize):
next_nodes = ["ParallelMap"] if should_optimize else ["Map"]
dataset = dataset_ops.Dataset.range(5).apply(
optimization.assert_next(next_nodes)).map(function).apply(
optimization.optimize(["map_parallelization"]))
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
with self.test_session() as sess:
for x in range(5):
result = sess.run(get_next)
# No need to run the pipeline if it was not optimized. Also the results
# might be hard to check because of random.
if not should_optimize:
return
r = function(x)
self.assertAllEqual(r, result)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
开发者ID:Jordan1237,项目名称:tensorflow,代码行数:20,代码来源:map_parallelization_test.py
示例14: _compare
def _compare(self, input_dataset, map_fn, batch_size, input_size, str_id):
num_elems = np.prod(input_size)
name_template = "{}__batch_size_{}_input_size_{}_{}"
unoptimized = input_dataset.map(map_fn).batch(batch_size)
unoptimized_op = unoptimized.make_one_shot_iterator().get_next()
optimized = unoptimized.apply(optimization.optimize(["map_vectorization"]))
optimized_op = optimized.make_one_shot_iterator().get_next()
unoptimized_time = self._run(
unoptimized_op,
name=name_template.format(str_id, batch_size, num_elems, "unoptimized"))
optimized_time = self._run(
optimized_op,
name=name_template.format(str_id, batch_size, num_elems, "optimized"))
print("Batch size: {}\n"
"Input size: {}\n"
"Transformation: {}\n"
"Speedup: {}\n".format(batch_size, input_size, str_id,
(unoptimized_time / optimized_time)))
开发者ID:AnishShah,项目名称:tensorflow,代码行数:21,代码来源:map_vectorization_test.py
示例15: testMapFusion
def testMapFusion(self, functions):
dataset = dataset_ops.Dataset.range(5).apply(
optimization.assert_next(["Map", "Prefetch"]))
for function in functions:
dataset = dataset.map(function)
dataset = dataset.prefetch(0).apply(optimization.optimize(["map_fusion"]))
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
with self.cached_session() as sess:
for x in range(5):
result = sess.run(get_next)
r = x
for function in functions:
if isinstance(r, tuple):
r = function(*r) # Pass tuple as multiple arguments.
else:
r = function(r)
self.assertAllEqual(r, result)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
开发者ID:AnishShah,项目名称:tensorflow,代码行数:22,代码来源:map_and_filter_fusion_test.py
示例16: testNoopElimination
def testNoopElimination(self):
a = constant_op.constant(1, dtype=dtypes.int64)
b = constant_op.constant(2, dtype=dtypes.int64)
some_tensor = math_ops.mul(a, b)
dataset = dataset_ops.Dataset.range(5)
dataset = dataset.apply(
optimization.assert_next(
["FiniteRepeat", "FiniteSkip", "Prefetch", "Prefetch"]))
dataset = dataset.repeat(some_tensor).skip(5).prefetch(0).take(-1).skip(
0).repeat(1).prefetch(0)
dataset = dataset.apply(optimization.optimize(["noop_elimination"]))
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
with self.test_session() as sess:
for x in range(5):
result = sess.run(get_next)
self.assertAllEqual(result, x)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
开发者ID:baojianzhou,项目名称:tensorflow,代码行数:23,代码来源:noop_elimination_test.py
示例17: _get_test_datasets
def _get_test_datasets(self,
base_dataset,
map_fn,
num_parallel_calls=None,
expect_optimized=True):
"""Given base dataset and map fn, creates test datasets.
Returns a tuple of (unoptimized, dataset, optimized dataset). The
unoptimized dataset has the assertion that Batch follows Map. The optimized
dataset has the assertion that Map follows Batch, and has the
"map_vectorization" optimization applied.
Args:
base_dataset: Input dataset to map->batch
map_fn: Map function to use
num_parallel_calls: (Optional.) num_parallel_calls argument for map
expect_optimized: (Optional.) Whether we expect the optimization to take
place, in which case we will assert that Batch is followed by Map,
otherwise Map followed by Batch. Defaults to True.
Returns:
Tuple of (unoptimized dataset, optimized dataset).
"""
map_node_name = "Map" if num_parallel_calls is None else "ParallelMap"
batch_size = 100
def _make_dataset(node_names):
return base_dataset.apply(optimization.assert_next(node_names)).map(
map_fn, num_parallel_calls=num_parallel_calls).batch(batch_size)
unoptimized = _make_dataset([map_node_name, "Batch"])
optimized = _make_dataset(["Batch", map_node_name] if expect_optimized else
[map_node_name, "Batch"]).apply(
optimization.optimize(["map_vectorization"]))
return unoptimized, optimized
开发者ID:AnishShah,项目名称:tensorflow,代码行数:36,代码来源:map_vectorization_test.py
示例18: build_dataset
def build_dataset(num_elements, batch_size):
return dataset_ops.Dataset.range(num_elements).map(lambda x: x * x).batch(
batch_size).apply(optimization.optimize(["map_and_batch_fusion"]))
开发者ID:AnishShah,项目名称:tensorflow,代码行数:3,代码来源:optimize_dataset_serialization_test.py
注:本文中的tensorflow.contrib.data.python.ops.optimization.optimize函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论