• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Python sparse.as_dense_types函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中tensorflow.python.data.util.sparse.as_dense_types函数的典型用法代码示例。如果您正苦于以下问题:Python as_dense_types函数的具体用法?Python as_dense_types怎么用?Python as_dense_types使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了as_dense_types函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: materialize

  def materialize(self, shared_name=None, container=None):
    """Materialize creates a MaterializedIndexedDataset.

    IndexedDatasets can be combined through operations such as TBD. Therefore,
    they are only materialized when absolutely required.

    Args:
      shared_name: a string for the shared name to use for the resource.
      container: a string for the container to store the resource.

    Returns:
      A MaterializedIndexedDataset.
    """
    if container is None:
      container = ""
    if shared_name is None:
      shared_name = ""
    materialized_resource = (
        ged_ops.experimental_materialized_index_dataset_handle(
            container=container,
            shared_name=shared_name,
            output_types=nest.flatten(
                sparse.as_dense_types(self.output_types, self.output_classes)),
            output_shapes=nest.flatten(
                sparse.as_dense_types(self.output_shapes,
                                      self.output_classes))))

    with ops.colocate_with(materialized_resource):
      materializer = ged_ops.experimental_indexed_dataset_materialize(
          self._as_variant_tensor(), materialized_resource)
    return MaterializedIndexedDataset(materialized_resource, materializer,
                                      self.output_classes, self.output_types,
                                      self.output_shapes)
开发者ID:JonathanRaiman,项目名称:tensorflow,代码行数:33,代码来源:indexed_dataset_ops.py


示例2: get_next

  def get_next(self, name=None):
    """See `tf.data.Iterator.get_next`."""
    self._get_next_call_count += 1
    if self._get_next_call_count > iterator_ops.GET_NEXT_CALL_WARNING_THRESHOLD:
      warnings.warn(iterator_ops.GET_NEXT_CALL_WARNING_MESSAGE)

    flat_result = []
    # TODO(priyag): This will fail if the input size (typically number of
    # batches) is not divisible by number of devices.
    # How do we handle that more gracefully / let the user know?
    for buffer_resource in self._buffering_resources:
      flat_ret = gen_dataset_ops.function_buffering_resource_get_next(
          buffer_resource,
          output_types=data_nest.flatten(sparse.as_dense_types(
              self.output_types, self.output_classes)), name=name)

      ret = sparse.deserialize_sparse_tensors(
          data_nest.pack_sequence_as(self.output_types, flat_ret),
          self.output_types, self.output_shapes, self.output_classes)

      for tensor, shape in zip(
          data_nest.flatten(ret), data_nest.flatten(self.output_shapes)):
        if isinstance(tensor, ops.Tensor):
          tensor.set_shape(shape)
      flat_result.append(ret)

    return nest.pack_sequence_as(self._devices, flat_result)
开发者ID:AnishShah,项目名称:tensorflow,代码行数:27,代码来源:prefetching_ops_v2.py


示例3: get_next

  def get_next(self, name=None):
    """Returns a nested structure of `tf.Tensor`s containing the next element.

    Args:
      name: (Optional.) A name for the created operation.

    Returns:
      A nested structure of `tf.Tensor` objects.
    """
    self._get_next_call_count += 1
    if self._get_next_call_count > GET_NEXT_CALL_WARNING_THRESHOLD:
      warnings.warn(GET_NEXT_CALL_WARNING_MESSAGE)

    return sparse.deserialize_sparse_tensors(
        nest.pack_sequence_as(self._output_types,
                              gen_dataset_ops.iterator_get_next(
                                  self._iterator_resource,
                                  output_types=nest.flatten(
                                      sparse.as_dense_types(
                                          self._output_types,
                                          self._output_classes)),
                                  output_shapes=nest.flatten(
                                      sparse.as_dense_shapes(
                                          self._output_shapes,
                                          self._output_classes)),
                                  name=name)), self._output_types,
        self._output_shapes, self._output_classes)
开发者ID:modkzs,项目名称:tensorflow,代码行数:27,代码来源:iterator_ops.py


示例4: _make_key_func

  def _make_key_func(self, key_func, input_dataset):
    """Make wrapping Defun for key_func."""

    @function.Defun(*nest.flatten(
        sparse.as_dense_types(input_dataset.output_types,
                              input_dataset.output_classes)))
    def tf_key_func(*args):
      """A wrapper for Defun that facilitates shape inference."""
      # Pass in shape information from the input_dataset.
      dense_shapes = sparse.as_dense_shapes(input_dataset.output_shapes,
                                            input_dataset.output_classes)
      for arg, shape in zip(args, nest.flatten(dense_shapes)):
        arg.set_shape(shape)

      nested_args = nest.pack_sequence_as(input_dataset.output_types, args)
      nested_args = sparse.deserialize_sparse_tensors(
          nested_args, input_dataset.output_types, input_dataset.output_shapes,
          input_dataset.output_classes)
      # pylint: disable=protected-access
      if dataset_ops._should_unpack_args(nested_args):
        ret = key_func(*nested_args)
      # pylint: enable=protected-access
      else:
        ret = key_func(nested_args)
      ret = ops.convert_to_tensor(ret, dtype=dtypes.int64)
      if ret.dtype != dtypes.int64:
        raise ValueError("`key_func` must return a single tf.int64 tensor.")
      return ret

    self._key_func = tf_key_func
    self._key_func.add_to_graph(ops.get_default_graph())
开发者ID:AbhinavJain13,项目名称:tensorflow,代码行数:31,代码来源:grouping.py


示例5: _as_variant_tensor

 def _as_variant_tensor(self):
   return gen_dataset_ops.ignore_errors_dataset(
       self._input_dataset._as_variant_tensor(),  # pylint: disable=protected-access
       output_shapes=nest.flatten(
           sparse.as_dense_shapes(self.output_shapes, self.output_classes)),
       output_types=nest.flatten(
           sparse.as_dense_types(self.output_types, self.output_classes)))
开发者ID:AbhinavJain13,项目名称:tensorflow,代码行数:7,代码来源:error_ops.py


示例6: get_next_as_optional

def get_next_as_optional(iterator):
  """Returns an `Optional` that contains the next value from the iterator.

  If `iterator` has reached the end of the sequence, the returned `Optional`
  will have no value.

  Args:
    iterator: A `tf.data.Iterator` object.

  Returns:
    An `Optional` object representing the next value from the iterator (if it
    has one) or no value.
  """
  # pylint: disable=protected-access
  return optional_ops._OptionalImpl(
      gen_dataset_ops.iterator_get_next_as_optional(
          iterator._iterator_resource,
          output_types=nest.flatten(
              sparse.as_dense_types(iterator.output_types,
                                    iterator.output_classes)),
          output_shapes=nest.flatten(
              sparse.as_dense_shapes(iterator.output_shapes,
                                     iterator.output_classes))),
      structure.Structure._from_legacy_structure(iterator.output_types,
                                                 iterator.output_shapes,
                                                 iterator.output_classes))
开发者ID:ThunderQi,项目名称:tensorflow,代码行数:26,代码来源:iterator_ops.py


示例7: _as_variant_tensor

 def _as_variant_tensor(self):
   return gen_dataset_ops.set_stats_aggregator_dataset(
       self._input_dataset._as_variant_tensor(),  # pylint: disable=protected-access
       self._stats_aggregator._resource,  # pylint: disable=protected-access
       output_types=nest.flatten(
           sparse.as_dense_types(self.output_types, self.output_classes)),
       output_shapes=nest.flatten(
           sparse.as_dense_shapes(self.output_shapes, self.output_classes)))
开发者ID:kimr843,项目名称:tensorflow,代码行数:8,代码来源:stats_ops.py


示例8: _as_variant_tensor

 def _as_variant_tensor(self):
   return self._op_function(
       self._input_dataset._as_variant_tensor(),  # pylint: disable=protected-access
       self._tag,
       output_types=nest.flatten(
           sparse.as_dense_types(self.output_types, self.output_classes)),
       output_shapes=nest.flatten(
           sparse.as_dense_shapes(self.output_shapes, self.output_classes)))
开发者ID:ChengYuXiang,项目名称:tensorflow,代码行数:8,代码来源:stats_ops.py


示例9: _as_variant_tensor

 def _as_variant_tensor(self):
   return gen_dataset_ops.random_dataset(
       seed=self._seed,
       seed2=self._seed2,
       output_shapes=nest.flatten(
           sparse.as_dense_shapes(self.output_shapes, self.output_classes)),
       output_types=nest.flatten(
           sparse.as_dense_types(self.output_types, self.output_classes)))
开发者ID:AbhinavJain13,项目名称:tensorflow,代码行数:8,代码来源:random_ops.py


示例10: get

  def get(self, index):
    """Get retrieves a value (or set of values) from the IndexedDataset.

    Args:
      index: A uint64 scalar or vector tensor with the indices to retrieve.

    Returns:
      A tensor containing the values corresponding to `index`.
    """
    # TODO(saeta): nest.pack_sequence_as(...)
    return ged_ops.experimental_indexed_dataset_get(
        self._materialized_resource,
        index,
        output_types=nest.flatten(
            sparse.as_dense_types(self._output_types, self._output_classes)),
        output_shapes=nest.flatten(
            sparse.as_dense_types(self._output_shapes, self._output_classes)))
开发者ID:JonathanRaiman,项目名称:tensorflow,代码行数:17,代码来源:indexed_dataset_ops.py


示例11: _as_variant_tensor

 def _as_variant_tensor(self):
   # pylint: disable=protected-access
   return gen_dataset_ops.directed_interleave_dataset(
       self._selector_input._as_variant_tensor(),
       [data_input._as_variant_tensor() for data_input in self._data_inputs],
       output_shapes=nest.flatten(
           sparse.as_dense_shapes(self.output_shapes, self.output_classes)),
       output_types=nest.flatten(
           sparse.as_dense_types(self.output_types, self.output_classes)))
开发者ID:jfreedman0,项目名称:tensorflow,代码行数:9,代码来源:interleave_ops.py


示例12: _as_variant_tensor

 def _as_variant_tensor(self):
   return gen_dataset_ops.slide_dataset(
       self._input_dataset._as_variant_tensor(),  # pylint: disable=protected-access
       window_size=self._window_size,
       stride=self._stride,
       output_shapes=nest.flatten(
           sparse.as_dense_shapes(self.output_shapes, self.output_classes)),
       output_types=nest.flatten(
           sparse.as_dense_types(self.output_types, self.output_classes)))
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:9,代码来源:sliding.py


示例13: _as_variant_tensor

 def _as_variant_tensor(self):
   return gen_dataset_ops.dense_to_sparse_batch_dataset(
       self._input_dataset._as_variant_tensor(),  # pylint: disable=protected-access
       self._batch_size,
       row_shape=dataset_ops._partial_shape_to_tensor(self._row_shape),  # pylint: disable=protected-access
       output_shapes=nest.flatten(
           sparse.as_dense_shapes(self.output_shapes, self.output_classes)),
       output_types=nest.flatten(
           sparse.as_dense_types(self.output_types, self.output_classes)))
开发者ID:kimr843,项目名称:tensorflow,代码行数:9,代码来源:batching.py


示例14: __init__

  def __init__(self,
               input_dataset,
               one_shot,
               devices,
               buffer_size,
               shared_name=None):
    self._input_dataset = input_dataset
    self._get_next_call_count = 0
    self._one_shot = one_shot
    if shared_name is None:
      shared_name = ""
    self._devices = devices

    if self._one_shot:
      self._input_iterator = input_dataset.make_one_shot_iterator()
    else:
      self._input_iterator = iterator_ops.Iterator.from_structure(
          self._input_dataset.output_types, self._input_dataset.output_shapes,
          shared_name, self._input_dataset.output_classes)
    input_iterator_handle = self._input_iterator.string_handle()

    @function.Defun(dtypes.string)
    def _prefetch_fn(handle):
      """Prefetches one element from `input_iterator`."""
      remote_iterator = iterator_ops.Iterator.from_string_handle(
          handle, self._input_iterator.output_types,
          self._input_iterator.output_shapes,
          self._input_iterator.output_classes)
      ret = remote_iterator.get_next()
      return nest.flatten(sparse.serialize_sparse_tensors(ret))

    target_device = ged_ops.experimental_iterator_get_device(
        self._input_iterator._iterator_resource)
    self._buffering_resources = []
    for device in nest.flatten(self._devices):
      with ops.device(device):
        buffer_resource_handle = prefetching_ops.function_buffering_resource(
            f=_prefetch_fn,
            output_types=data_nest.flatten(
                sparse.as_dense_types(self._input_dataset.output_types,
                                      self._input_dataset.output_classes)),
            target_device=target_device,
            string_arg=input_iterator_handle,
            buffer_size=buffer_size,
            shared_name=shared_name)
        self._buffering_resources.append(buffer_resource_handle)

    if not self._one_shot:
      reset_ops = []
      for buffer_resource in self._buffering_resources:
        reset_ops.append(
            ged_ops.experimental_function_buffering_resource_reset(
                buffer_resource))
      with ops.control_dependencies(reset_ops):
        self._initializer = self._input_iterator.make_initializer(
            self._input_dataset)
开发者ID:Jordan1237,项目名称:tensorflow,代码行数:56,代码来源:prefetching_ops_v2.py


示例15: __init__

  def __init__(self,
               dataset,
               devices,
               prefetch_buffer_size=1,
               source_device="/cpu:0"):
    self._dataset = dataset
    self._devices = devices
    self._source_device = source_device
    self._source_device_tensor = ops.convert_to_tensor(source_device)

    self._flat_output_shapes = nest.flatten(
        sparse.as_dense_shapes(self._dataset.output_shapes,
                               self._dataset.output_classes))
    self._flat_output_types = nest.flatten(
        sparse.as_dense_types(self._dataset.output_types,
                              self._dataset.output_classes))

    # Create the MultiDeviceIterator.
    with ops.device(self._source_device):
      self._multi_device_iterator_resource = (
          gen_dataset_ops.multi_device_iterator(
              devices=self._devices,
              shared_name="",
              container="",
              output_types=self._flat_output_types,
              output_shapes=self._flat_output_shapes))

      # The incarnation ID is used to ensure consistency between the per-device
      # iterators and the multi-device iterator.
      self._incarnation_id = gen_dataset_ops.multi_device_iterator_init(
          self._dataset._as_variant_tensor(),  # pylint: disable=protected-access
          self._multi_device_iterator_resource)

    # TODO(rohanj): Explore the possibility of the MultiDeviceIterator to
    # initialize the device side of the pipeline. This would allow the
    # MultiDeviceIterator to choose, for example, to move some transformations
    # into the device side from its input. It might be useful in rewriting.
    # Create the per device iterators.
    self._device_iterators = []
    i = 0
    for device in self._devices:
      ds = _PerDeviceGenerator(
          i, self._multi_device_iterator_resource, self._incarnation_id,
          self._source_device_tensor, device, self._dataset.output_shapes,
          self._dataset.output_types, self._dataset.output_classes)
      ds = ds.prefetch(prefetch_buffer_size)
      with ops.device(device):
        self._device_iterators.append(ds.make_initializable_iterator())
      i += 1

    device_iterator_initializers = [
        iterator.initializer for iterator in self._device_iterators
    ]
    self._initializer = control_flow_ops.group(*device_iterator_initializers)
开发者ID:StephenOman,项目名称:tensorflow,代码行数:54,代码来源:prefetching_ops.py


示例16: _as_variant_tensor

 def _as_variant_tensor(self):
   input_t = self._input_dataset._as_variant_tensor()  # pylint: disable=protected-access
   return gen_dataset_ops.scan_dataset(
       input_t,
       nest.flatten(self._initial_state),
       self._scan_func.captured_inputs,
       f=self._scan_func,
       output_types=nest.flatten(
           sparse.as_dense_types(self.output_types, self.output_classes)),
       output_shapes=nest.flatten(
           sparse.as_dense_shapes(self.output_shapes, self.output_classes)))
开发者ID:AndrewTwinz,项目名称:tensorflow,代码行数:11,代码来源:scan_ops.py


示例17: _as_variant_tensor

 def _as_variant_tensor(self):
   return gen_dataset_ops.parallel_interleave_dataset(
       self._input_dataset._as_variant_tensor(),  # pylint: disable=protected-access
       self._map_func.captured_inputs,
       self._cycle_length,
       self._block_length,
       self._sloppy,
       f=self._map_func,
       output_types=nest.flatten(
           sparse.as_dense_types(self.output_types, self.output_classes)),
       output_shapes=nest.flatten(
           sparse.as_dense_shapes(self.output_shapes, self.output_classes)))
开发者ID:AbhinavJain13,项目名称:tensorflow,代码行数:12,代码来源:interleave_ops.py


示例18: __init__

  def __init__(self, input_dataset, map_func, cycle_length, block_length,
               sloppy, buffer_output_elements, prefetch_input_elements):
    """See `tf.contrib.data.parallel_interleave()` for details."""
    super(ParallelInterleaveDataset, self).__init__()
    self._input_dataset = input_dataset

    @function.Defun(*nest.flatten(
        sparse.as_dense_types(input_dataset.output_types,
                              input_dataset.output_classes)))
    def tf_map_func(*args):
      """A wrapper for Defun that facilitates shape inference."""
      # Pass in shape information from the input_dataset.
      dense_shapes = sparse.as_dense_shapes(input_dataset.output_shapes,
                                            input_dataset.output_classes)
      for arg, shape in zip(args, nest.flatten(dense_shapes)):
        arg.set_shape(shape)

      nested_args = nest.pack_sequence_as(input_dataset.output_types, args)
      nested_args = sparse.deserialize_sparse_tensors(
          nested_args, input_dataset.output_types, input_dataset.output_shapes,
          input_dataset.output_classes)
      if dataset_ops._should_unpack_args(nested_args):  # pylint: disable=protected-access
        dataset = map_func(*nested_args)
      else:
        dataset = map_func(nested_args)

      if not isinstance(dataset, dataset_ops.Dataset):
        raise TypeError("`map_func` must return a `Dataset` object.")

      self._output_classes = dataset.output_classes
      self._output_types = dataset.output_types
      self._output_shapes = dataset.output_shapes

      return dataset._as_variant_tensor()  # pylint: disable=protected-access

    self._map_func = tf_map_func
    self._map_func.add_to_graph(ops.get_default_graph())

    self._cycle_length = ops.convert_to_tensor(
        cycle_length, dtype=dtypes.int64, name="cycle_length")
    self._block_length = ops.convert_to_tensor(
        block_length, dtype=dtypes.int64, name="block_length")
    self._sloppy = ops.convert_to_tensor(
        sloppy, dtype=dtypes.bool, name="sloppy")
    self._buffer_output_elements = convert.optional_param_to_tensor(
        "buffer_output_elements",
        buffer_output_elements,
        argument_default=2 * block_length)
    self._prefetch_input_elements = convert.optional_param_to_tensor(
        "prefetch_input_elements",
        prefetch_input_elements,
        argument_default=2 * cycle_length)
开发者ID:AnddyWang,项目名称:tensorflow,代码行数:52,代码来源:interleave_ops.py


示例19: __init__

  def __init__(self,
               input_dataset,
               one_shot,
               device,
               buffer_size,
               shared_name=None):
    self._input_dataset = input_dataset
    self._get_next_call_count = 0
    self._one_shot = one_shot
    if shared_name is None:
      shared_name = ""

    if self._one_shot:
      self._input_iterator = input_dataset.make_one_shot_iterator()
    else:
      self._input_iterator = iterator_ops.Iterator.from_structure(
          self._input_dataset.output_types, self._input_dataset.output_shapes,
          shared_name, self._input_dataset.output_classes)
    input_iterator_handle = self._input_iterator.string_handle()

    @function.defun(input_signature=[tensor_spec.TensorSpec([], dtypes.string)])
    # handle is a scalar `tf.Tensor` of type `tf.string`
    def _prefetch_fn(handle):
      """Prefetches one element from `input_iterator`."""
      remote_iterator = iterator_ops.Iterator.from_string_handle(
          handle, self._input_iterator.output_types,
          self._input_iterator.output_shapes,
          self._input_iterator.output_classes)
      ret = remote_iterator.get_next()
      return nest.flatten(sparse.serialize_sparse_tensors(ret))

    self._prefetch_fn = _prefetch_fn._get_concrete_function_internal()  # pylint: disable=protected-access

    iterator_device = ged_ops.experimental_iterator_get_device(
        self._input_iterator._iterator_resource)

    with ops.device(device):
      self._buffering_resource = function_buffering_resource(
          f=self._prefetch_fn,
          target_device=iterator_device,
          string_arg=input_iterator_handle,
          buffer_size=buffer_size,
          shared_name=shared_name,
          output_types=nest.flatten(
              sparse.as_dense_types(self._input_dataset.output_types,
                                    self._input_dataset.output_classes)))

    if not self._one_shot:
      reset_op = function_buffering_resource_reset(self._buffering_resource)
      with ops.control_dependencies([reset_op]):
        self._initializer = self._input_iterator.make_initializer(
            self._input_dataset)
开发者ID:abhinav-upadhyay,项目名称:tensorflow,代码行数:52,代码来源:prefetching_ops.py


示例20: _as_variant_tensor

 def _as_variant_tensor(self):
   # pylint: disable=protected-access
   input_resource = self._input_dataset._as_variant_tensor()
   return gen_dataset_ops.map_and_batch_dataset(
       input_resource,
       self._map_func.captured_inputs,
       f=self._map_func,
       batch_size=self._batch_size,
       num_parallel_batches=self._num_parallel_batches,
       output_types=nest.flatten(
           sparse.as_dense_types(self.output_types, self.output_classes)),
       output_shapes=nest.flatten(
           sparse.as_dense_shapes(self.output_shapes, self.output_classes)))
开发者ID:Kongsea,项目名称:tensorflow,代码行数:13,代码来源:batching.py



注:本文中的tensorflow.python.data.util.sparse.as_dense_types函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python sparse.deserialize_sparse_tensors函数代码示例发布时间:2022-05-27
下一篇:
Python sparse.as_dense_shapes函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap