本文整理汇总了Python中tensorflow.python.training.summary_io.SummaryWriterCache类的典型用法代码示例。如果您正苦于以下问题:Python SummaryWriterCache类的具体用法?Python SummaryWriterCache怎么用?Python SummaryWriterCache使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了SummaryWriterCache类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: __init__
def __init__(self,
save_steps=None,
save_secs=None,
output_dir="",
show_dataflow=True,
show_memory=False):
"""Initializes a hook that takes periodic profiling snapshots.
`options.run_metadata` argument of `tf.Session.Run` is used to collect
metadata about execution. This hook sets the metadata and dumps it in Chrome
Trace format.
Args:
save_steps: `int`, save profile traces every N steps. Exactly one of
`save_secs` and `save_steps` should be set.
save_secs: `int` or `float`, save profile traces every N seconds.
output_dir: `string`, the directory to save the profile traces to.
Defaults to the current directory.
show_dataflow: `bool`, if True, add flow events to the trace connecting
producers and consumers of tensors.
show_memory: `bool`, if True, add object snapshot events to the trace
showing the sizes and lifetimes of tensors.
"""
self._output_file = os.path.join(output_dir, "timeline-{}.json")
self._file_writer = SummaryWriterCache.get(output_dir)
self._show_dataflow = show_dataflow
self._show_memory = show_memory
self._timer = SecondOrStepTimer(
every_secs=save_secs, every_steps=save_steps)
开发者ID:didukhle,项目名称:tensorflow,代码行数:30,代码来源:basic_session_run_hooks.py
示例2: __init__
def __init__(self,
save_steps=100,
output_dir=None,
summary_writer=None,
scaffold=None,
summary_op=None):
"""Initializes a `SummarySaver` monitor.
Args:
save_steps: `int`, save summaries every N steps. See `EveryN`.
output_dir: `string`, the directory to save the summaries to. Only used
if no `summary_writer` is supplied.
summary_writer: `SummaryWriter`. If `None` and an `output_dir` was passed,
one will be created accordingly.
scaffold: `Scaffold` to get summary_op if it's not provided.
summary_op: `Tensor` of type `string`. A serialized `Summary` protocol
buffer, as output by TF summary methods like `scalar_summary` or
`merge_all_summaries`.
"""
# TODO(ipolosukhin): Implement every N seconds.
self._summary_op = summary_op
self._summary_writer = summary_writer
if summary_writer is None and output_dir:
self._summary_writer = SummaryWriterCache.get(output_dir)
self._scaffold = scaffold
self._save_steps = save_steps
开发者ID:KalraA,项目名称:tensorflow,代码行数:26,代码来源:basic_session_run_hooks.py
示例3: __init__
def __init__(self,
checkpoint_dir,
save_secs=None,
save_steps=None,
saver=None,
checkpoint_basename="model.ckpt",
scaffold=None):
"""Initialize CheckpointSaverHook monitor.
Args:
checkpoint_dir: `str`, base directory for the checkpoint files.
save_secs: `int`, save every N secs.
save_steps: `int`, save every N steps.
saver: `Saver` object, used for saving.
checkpoint_basename: `str`, base name for the checkpoint files.
scaffold: `Scaffold`, use to get saver object.
Raises:
ValueError: One of `save_steps` or `save_secs` should be set.
ValueError: Exactly one of saver or scaffold should be set.
"""
logging.info("Create CheckpointSaverHook.")
if ((saver is None and scaffold is None) or
(saver is not None and scaffold is not None)):
raise ValueError("Exactly one of saver or scaffold must be provided.")
self._saver = saver
self._checkpoint_dir = checkpoint_dir
self._summary_writer = SummaryWriterCache.get(checkpoint_dir)
self._save_path = os.path.join(checkpoint_dir, checkpoint_basename)
self._scaffold = scaffold
self._timer = _SecondOrStepTimer(every_secs=save_secs,
every_steps=save_steps)
开发者ID:lijiankou,项目名称:tensorflow,代码行数:32,代码来源:basic_session_run_hooks.py
示例4: after_run
def after_run(self, run_context, run_values):
del run_context # Unused by feature importance summary saver hook.
# Read result tensors.
global_step = run_values.results["global_step"]
feature_names = run_values.results["feature_names"]
feature_usage_counts = run_values.results["feature_usage_counts"]
feature_gains = run_values.results["feature_gains"]
# Ensure summaries are logged at desired frequency
if (self._last_triggered_step is not None and
global_step < self._last_triggered_step + self._every_n_steps):
return
# Validate tensors.
if (len(feature_names) != len(feature_usage_counts) or
len(feature_names) != len(feature_gains)):
raise RuntimeError(
"Feature names and importance measures have inconsistent lengths.")
# Compute total usage.
total_usage_count = 0.0
for usage_count in feature_usage_counts:
total_usage_count += usage_count
usage_count_norm = 1.0 / total_usage_count if total_usage_count else 1.0
# Compute total gain.
total_gain = 0.0
for gain in feature_gains:
total_gain += gain
gain_norm = 1.0 / total_gain if total_gain else 1.0
# Output summary for each feature.
self._last_triggered_step = global_step
for (name, usage_count, gain) in zip(feature_names, feature_usage_counts,
feature_gains):
output_dir = os.path.join(self._model_dir, name.decode("utf-8"))
summary_writer = SummaryWriterCache.get(output_dir)
usage_count_summary = Summary(value=[
Summary.Value(
tag="feature_importance/usage_counts",
simple_value=usage_count)
])
usage_fraction_summary = Summary(value=[
Summary.Value(
tag="feature_importance/usage_fraction",
simple_value=usage_count * usage_count_norm)
])
summary_writer.add_summary(usage_count_summary, global_step)
summary_writer.add_summary(usage_fraction_summary, global_step)
gains_summary = Summary(
value=[Summary.Value(
tag="feature_importance/gains",
simple_value=gain)])
gains_fraction_summary = Summary(
value=[Summary.Value(
tag="feature_importance/gains_fraction",
simple_value=gain * gain_norm)])
summary_writer.add_summary(gains_summary, global_step)
summary_writer.add_summary(gains_fraction_summary, global_step)
开发者ID:Dr4KK,项目名称:tensorflow,代码行数:60,代码来源:trainer_hooks.py
示例5: begin
def begin(self):
# These calls only works because the SessionRunHook api guarantees this
# will get called within a graph context containing our model graph.
self.summary_writer = SummaryWriterCache.get(self.working_dir)
self.weight_tensors = tf.trainable_variables()
self.global_step = tf.train.get_or_create_global_step()
开发者ID:nhu2000,项目名称:minigo,代码行数:7,代码来源:dual_net.py
示例6: __init__
def __init__(self,
save_steps=100,
save_secs=None,
output_dir=None,
summary_writer=None,
scaffold=None,
summary_op=None):
"""Initializes a `SummarySaver` monitor.
Args:
save_steps: `int`, save summaries every N steps. Exactly one of
`save_secs` and `save_steps` should be set.
save_secs: `int`, save summaries every N seconds.
output_dir: `string`, the directory to save the summaries to. Only used
if no `summary_writer` is supplied.
summary_writer: `SummaryWriter`. If `None` and an `output_dir` was passed,
one will be created accordingly.
scaffold: `Scaffold` to get summary_op if it's not provided.
summary_op: `Tensor` of type `string`. A serialized `Summary` protocol
buffer, as output by TF summary methods like `scalar_summary` or
`merge_all_summaries`.
"""
self._summary_op = summary_op
self._summary_writer = summary_writer
if summary_writer is None and output_dir:
self._summary_writer = SummaryWriterCache.get(output_dir)
self._scaffold = scaffold
self._timer = _SecondOrStepTimer(every_secs=save_secs,
every_steps=save_steps)
开发者ID:MrCrumpets,项目名称:tensorflow,代码行数:29,代码来源:basic_session_run_hooks.py
示例7: begin
def begin(self):
if self._summary_writer is None and self._output_dir:
self._summary_writer = SummaryWriterCache.get(self._output_dir)
self._next_step = None
self._global_step_tensor = training_util.get_global_step()
if self._global_step_tensor is None:
raise RuntimeError(
"Global step should be created to use SummarySaverHook.")
开发者ID:1000sprites,项目名称:tensorflow,代码行数:8,代码来源:basic_session_run_hooks.py
示例8: begin
def begin(self):
if self._summary_writer is None and self._output_dir:
self._summary_writer = SummaryWriterCache.get(self._output_dir)
self._next_episode = None
self._current_episode = None
self._global_episode_tensor = get_global_episode()
if self._global_episode_tensor is None:
raise RuntimeError("Global episode should be created to use EpisodeSummarySaverHook.")
开发者ID:AlexMikhalev,项目名称:polyaxon,代码行数:8,代码来源:episode_hooks.py
示例9: begin
def begin(self):
self._summary_writer = SummaryWriterCache.get(self._checkpoint_dir)
self._global_step_tensor = training_util._get_or_create_global_step_read() # pylint: disable=protected-access
if self._global_step_tensor is None:
raise RuntimeError(
"Global step should be created to use CheckpointSaverHook.")
for l in self._listeners:
l.begin()
开发者ID:becster,项目名称:tensorflow,代码行数:8,代码来源:async_checkpoint.py
示例10: begin
def begin(self):
if self._summary_writer is None and self._output_dir:
self._summary_writer = SummaryWriterCache.get(self._output_dir)
self._next_step = None
self._global_step_tensor = training_util._get_or_create_global_step_read() # pylint: disable=protected-access
if self._global_step_tensor is None:
raise RuntimeError(
"Global step should be created to use SummarySaverHook.")
开发者ID:didukhle,项目名称:tensorflow,代码行数:8,代码来源:basic_session_run_hooks.py
示例11: __init__
def __init__(self,
every_n_steps=100,
every_n_secs=None,
output_dir=None,
summary_writer=None):
if (every_n_steps is None) == (every_n_secs is None):
raise ValueError(
"exactly one of every_n_steps and every_n_secs should be provided.")
self._timer = _SecondOrStepTimer(every_steps=every_n_steps,
every_secs=every_n_secs)
self._summary_writer = summary_writer
if summary_writer is None and output_dir:
self._summary_writer = SummaryWriterCache.get(output_dir)
开发者ID:kadeng,项目名称:tensorflow,代码行数:15,代码来源:basic_session_run_hooks.py
示例12: __init__
def __init__(self, every_n_steps=100, output_dir=None, summary_writer=None):
self._summary_tag = "global_step/sec"
self._every_n_steps = every_n_steps
self._summary_writer = summary_writer
if summary_writer is None and output_dir:
self._summary_writer = SummaryWriterCache.get(output_dir)
开发者ID:821760408-sp,项目名称:tensorflow,代码行数:6,代码来源:basic_session_run_hooks.py
注:本文中的tensorflow.python.training.summary_io.SummaryWriterCache类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论