groupSize,
+ CollectiveInitializeCommunicator.Options... options) {
+ return CollectiveInitializeCommunicator.create(scope, groupKey, rank, groupSize, options);
+ }
+
+ /**
+ * An Op to permute tensors across replicated TPU instances.
+ * Each instance supplies its own input.
+ * For example, suppose there are 4 TPU instances: {@code [A, B, C, D]}. Passing
+ * source_target_pairs={@code [[0,1],[1,2],[2,3],[3,0]]} gets the outputs:
+ * {@code [D, A, B, C]}.
+ *
+ * @param data type for {@code output} output
+ * @param input The local input to be permuted. Currently only supports float and
+ * bfloat16.
+ * @param sourceTargetPairs A tensor with shape [num_pairs, 2].
+ * @param data type for {@code CollectivePermute} output and operands
+ * @return a new instance of CollectivePermute
+ */
+ public CollectivePermute collectivePermute(Operand input,
+ Operand sourceTargetPairs) {
+ return CollectivePermute.create(scope, input, sourceTargetPairs);
+ }
+
+ /**
+ * Mutually reduces multiple tensors of identical type and shape.
+ *
+ * @param data type for {@code data} output
+ * @param input The input value
+ * @param communicator The communicator value
+ * @param groupAssignment The groupAssignment value
+ * @param reduction The value of the reduction attribute
+ * @param options carries optional attribute values
+ * @param data type for {@code CollectiveReduceV3} output and operands
+ * @return a new instance of CollectiveReduce
+ */
+ public CollectiveReduce collectiveReduce(Operand input,
+ Operand extends TType> communicator, Operand groupAssignment, String reduction,
+ CollectiveReduce.Options... options) {
+ return CollectiveReduce.create(scope, input, communicator, groupAssignment, reduction, options);
+ }
+
+ /**
+ * Mutually reduces multiple tensors of identical type and shape and scatters the result.
+ * {@code is_stateless} means each op does not need control dependencies to other
+ * collective ops. In this case, keys that are unique at runtime
+ * (e.g. {@code instance_key}) should be used to distinguish collective groups.
+ *
+ * @param data type for {@code data} output
+ * @param input The input value
+ * @param groupSize The groupSize value
+ * @param groupKey The groupKey value
+ * @param instanceKey The instanceKey value
+ * @param orderingToken The orderingToken value
+ * @param mergeOp The value of the mergeOp attribute
+ * @param finalOp The value of the finalOp attribute
+ * @param options carries optional attribute values
+ * @param data type for {@code CollectiveReduceScatterV2} output and operands
+ * @return a new instance of CollectiveReduceScatter
+ */
+ public CollectiveReduceScatter collectiveReduceScatter(Operand input,
+ Operand groupSize, Operand groupKey, Operand instanceKey,
+ Iterable> orderingToken, String mergeOp, String finalOp,
+ CollectiveReduceScatter.Options... options) {
+ return CollectiveReduceScatter.create(scope, input, groupSize, groupKey, instanceKey, orderingToken, mergeOp, finalOp, options);
+ }
+
+ /**
+ * Get the parent {@link Ops} object.
+ */
+ public final Ops ops() {
+ return ops;
+ }
+}
diff --git a/tensorflow-core/tensorflow-core-api/src/gen/annotations/org/tensorflow/op/DataExperimentalOps.java b/tensorflow-core/tensorflow-core-api/src/gen/annotations/org/tensorflow/op/DataExperimentalOps.java
new file mode 100644
index 00000000000..4644599e7f1
--- /dev/null
+++ b/tensorflow-core/tensorflow-core-api/src/gen/annotations/org/tensorflow/op/DataExperimentalOps.java
@@ -0,0 +1,738 @@
+// Copyright 2020-2022 The TensorFlow Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// ==============================================================================
+//
+// This class has been generated, DO NOT EDIT!
+//
+package org.tensorflow.op;
+
+import java.util.List;
+import org.tensorflow.ConcreteFunction;
+import org.tensorflow.Operand;
+import org.tensorflow.ndarray.Shape;
+import org.tensorflow.op.data.experimental.AssertNextDataset;
+import org.tensorflow.op.data.experimental.AutoShardDataset;
+import org.tensorflow.op.data.experimental.BytesProducedStatsDataset;
+import org.tensorflow.op.data.experimental.CSVDataset;
+import org.tensorflow.op.data.experimental.ChooseFastestDataset;
+import org.tensorflow.op.data.experimental.DatasetCardinality;
+import org.tensorflow.op.data.experimental.DatasetToTFRecord;
+import org.tensorflow.op.data.experimental.DenseToSparseBatchDataset;
+import org.tensorflow.op.data.experimental.DirectedInterleaveDataset;
+import org.tensorflow.op.data.experimental.GroupByReducerDataset;
+import org.tensorflow.op.data.experimental.GroupByWindowDataset;
+import org.tensorflow.op.data.experimental.IgnoreErrorsDataset;
+import org.tensorflow.op.data.experimental.IteratorGetDevice;
+import org.tensorflow.op.data.experimental.LatencyStatsDataset;
+import org.tensorflow.op.data.experimental.LmdbDataset;
+import org.tensorflow.op.data.experimental.MapAndBatchDataset;
+import org.tensorflow.op.data.experimental.MapDataset;
+import org.tensorflow.op.data.experimental.MatchingFilesDataset;
+import org.tensorflow.op.data.experimental.MaxIntraOpParallelismDataset;
+import org.tensorflow.op.data.experimental.NonSerializableDataset;
+import org.tensorflow.op.data.experimental.ParallelInterleaveDataset;
+import org.tensorflow.op.data.experimental.ParseExampleDataset;
+import org.tensorflow.op.data.experimental.PrivateThreadPoolDataset;
+import org.tensorflow.op.data.experimental.RandomDataset;
+import org.tensorflow.op.data.experimental.RebatchDataset;
+import org.tensorflow.op.data.experimental.ScanDataset;
+import org.tensorflow.op.data.experimental.SetStatsAggregatorDataset;
+import org.tensorflow.op.data.experimental.SleepDataset;
+import org.tensorflow.op.data.experimental.SlidingWindowDataset;
+import org.tensorflow.op.data.experimental.SqlDataset;
+import org.tensorflow.op.data.experimental.StatsAggregatorHandle;
+import org.tensorflow.op.data.experimental.StatsAggregatorSummary;
+import org.tensorflow.op.data.experimental.TakeWhileDataset;
+import org.tensorflow.op.data.experimental.ThreadPoolDataset;
+import org.tensorflow.op.data.experimental.ThreadPoolHandle;
+import org.tensorflow.op.data.experimental.UnbatchDataset;
+import org.tensorflow.op.data.experimental.UniqueDataset;
+import org.tensorflow.types.TBool;
+import org.tensorflow.types.TInt64;
+import org.tensorflow.types.TString;
+import org.tensorflow.types.family.TType;
+
+/**
+ * An API for building {@code data.experimental} operations as {@link Op Op}s
+ *
+ * @see {@link Ops}
+ */
+public final class DataExperimentalOps {
+ private final Scope scope;
+
+ private final Ops ops;
+
+ DataExperimentalOps(Ops ops) {
+ this.scope = ops.scope();
+ this.ops = ops;
+ }
+
+ /**
+ * The ExperimentalAssertNextDataset operation
+ *
+ * @param inputDataset The inputDataset value
+ * @param transformations The transformations value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of AssertNextDataset
+ */
+ public AssertNextDataset assertNextDataset(Operand extends TType> inputDataset,
+ Operand transformations, List> outputTypes,
+ List outputShapes) {
+ return AssertNextDataset.create(scope, inputDataset, transformations, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that shards the input dataset.
+ * Creates a dataset that shards the input dataset by num_workers, returning a
+ * sharded dataset for the index-th worker. This attempts to automatically shard
+ * a dataset by examining the Dataset graph and inserting a shard op before the
+ * inputs to a reader Dataset (e.g. CSVDataset, TFRecordDataset).
+ * This dataset will throw a NotFound error if we cannot shard the dataset
+ * automatically.
+ *
+ * @param inputDataset A variant tensor representing the input dataset.
+ * @param numWorkers A scalar representing the number of workers to distribute this dataset across.
+ * @param index A scalar representing the index of the current worker out of num_workers.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @param options carries optional attribute values
+ * @return a new instance of AutoShardDataset
+ */
+ public AutoShardDataset autoShardDataset(Operand extends TType> inputDataset,
+ Operand numWorkers, Operand index, List> outputTypes,
+ List outputShapes, AutoShardDataset.Options... options) {
+ return AutoShardDataset.create(scope, inputDataset, numWorkers, index, outputTypes, outputShapes, options);
+ }
+
+ /**
+ * Records the bytes size of each element of {@code input_dataset} in a StatsAggregator.
+ *
+ * @param inputDataset The inputDataset value
+ * @param tag The tag value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of BytesProducedStatsDataset
+ */
+ public BytesProducedStatsDataset bytesProducedStatsDataset(Operand extends TType> inputDataset,
+ Operand tag, List> outputTypes, List outputShapes) {
+ return BytesProducedStatsDataset.create(scope, inputDataset, tag, outputTypes, outputShapes);
+ }
+
+ /**
+ * The ExperimentalCSVDataset operation
+ *
+ * @param filenames The filenames value
+ * @param compressionType The compressionType value
+ * @param bufferSize The bufferSize value
+ * @param header The header value
+ * @param fieldDelim The fieldDelim value
+ * @param useQuoteDelim The useQuoteDelim value
+ * @param naValue The naValue value
+ * @param selectCols The selectCols value
+ * @param recordDefaults The recordDefaults value
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of CSVDataset
+ */
+ public CSVDataset cSVDataset(Operand filenames, Operand compressionType,
+ Operand bufferSize, Operand header, Operand fieldDelim,
+ Operand useQuoteDelim, Operand naValue, Operand selectCols,
+ Iterable> recordDefaults, List outputShapes) {
+ return CSVDataset.create(scope, filenames, compressionType, bufferSize, header, fieldDelim, useQuoteDelim, naValue, selectCols, recordDefaults, outputShapes);
+ }
+
+ /**
+ * The ExperimentalChooseFastestDataset operation
+ *
+ * @param inputDatasets The inputDatasets value
+ * @param numExperiments The value of the numExperiments attribute
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of ChooseFastestDataset
+ */
+ public ChooseFastestDataset chooseFastestDataset(Iterable> inputDatasets,
+ Long numExperiments, List> outputTypes, List outputShapes) {
+ return ChooseFastestDataset.create(scope, inputDatasets, numExperiments, outputTypes, outputShapes);
+ }
+
+ /**
+ * Returns the cardinality of {@code input_dataset}.
+ * Returns the cardinality of {@code input_dataset}.
+ *
+ * @param inputDataset A variant tensor representing the dataset to return cardinality for.
+ * @return a new instance of DatasetCardinality
+ */
+ public DatasetCardinality datasetCardinality(Operand extends TType> inputDataset) {
+ return DatasetCardinality.create(scope, inputDataset);
+ }
+
+ /**
+ * Writes the given dataset to the given file using the TFRecord format.
+ *
+ * @param inputDataset A variant tensor representing the dataset to write.
+ * @param filename A scalar string tensor representing the filename to use.
+ * @param compressionType A scalar string tensor containing either (i) the empty string (no
+ * compression), (ii) "ZLIB", or (iii) "GZIP".
+ * @return a new instance of DatasetToTFRecord
+ */
+ public DatasetToTFRecord datasetToTFRecord(Operand extends TType> inputDataset,
+ Operand filename, Operand compressionType) {
+ return DatasetToTFRecord.create(scope, inputDataset, filename, compressionType);
+ }
+
+ /**
+ * Creates a dataset that batches input elements into a SparseTensor.
+ *
+ * @param inputDataset A handle to an input dataset. Must have a single component.
+ * @param batchSize A scalar representing the number of elements to accumulate in a
+ * batch.
+ * @param rowShape A vector representing the dense shape of each row in the produced
+ * SparseTensor. The shape may be partially specified, using {@code -1} to indicate
+ * that a particular dimension should use the maximum size of all batch elements.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of DenseToSparseBatchDataset
+ */
+ public DenseToSparseBatchDataset denseToSparseBatchDataset(Operand extends TType> inputDataset,
+ Operand batchSize, Operand rowShape, List> outputTypes,
+ List outputShapes) {
+ return DenseToSparseBatchDataset.create(scope, inputDataset, batchSize, rowShape, outputTypes, outputShapes);
+ }
+
+ /**
+ * A substitute for {@code InterleaveDataset} on a fixed list of {@code N} datasets.
+ *
+ * @param selectorInputDataset A dataset of scalar {@code DT_INT64} elements that determines which of the
+ * {@code N} data inputs should produce the next output element.
+ * @param dataInputDatasets {@code N} datasets with the same type that will be interleaved according to
+ * the values of {@code selector_input_dataset}.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of DirectedInterleaveDataset
+ */
+ public DirectedInterleaveDataset directedInterleaveDataset(
+ Operand extends TType> selectorInputDataset,
+ Iterable> dataInputDatasets,
+ List> outputTypes, List outputShapes) {
+ return DirectedInterleaveDataset.create(scope, selectorInputDataset, dataInputDatasets, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that computes a group-by on {@code input_dataset}.
+ * Creates a dataset that computes a group-by on {@code input_dataset}.
+ *
+ * @param inputDataset A variant tensor representing the input dataset.
+ * @param keyFuncOtherArguments A list of tensors, typically values that were captured when
+ * building a closure for {@code key_func}.
+ * @param initFuncOtherArguments A list of tensors, typically values that were captured when
+ * building a closure for {@code init_func}.
+ * @param reduceFuncOtherArguments A list of tensors, typically values that were captured when
+ * building a closure for {@code reduce_func}.
+ * @param finalizeFuncOtherArguments A list of tensors, typically values that were captured when
+ * building a closure for {@code finalize_func}.
+ * @param keyFunc A function mapping an element of {@code input_dataset}, concatenated
+ * with {@code key_func_other_arguments} to a scalar value of type DT_INT64.
+ * @param initFunc A function mapping a key of type DT_INT64, concatenated with
+ * {@code init_func_other_arguments} to the initial reducer state.
+ * @param reduceFunc A function mapping the current reducer state and an element of {@code input_dataset},
+ * concatenated with {@code reduce_func_other_arguments} to a new reducer state.
+ * @param finalizeFunc A function mapping the final reducer state to an output element.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of GroupByReducerDataset
+ */
+ public GroupByReducerDataset groupByReducerDataset(Operand extends TType> inputDataset,
+ Iterable> keyFuncOtherArguments, Iterable> initFuncOtherArguments,
+ Iterable> reduceFuncOtherArguments,
+ Iterable> finalizeFuncOtherArguments, ConcreteFunction keyFunc,
+ ConcreteFunction initFunc, ConcreteFunction reduceFunc, ConcreteFunction finalizeFunc,
+ List> outputTypes, List outputShapes) {
+ return GroupByReducerDataset.create(scope, inputDataset, keyFuncOtherArguments, initFuncOtherArguments, reduceFuncOtherArguments, finalizeFuncOtherArguments, keyFunc, initFunc, reduceFunc, finalizeFunc, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that computes a windowed group-by on {@code input_dataset}.
+ * // TODO(mrry): Support non-int64 keys.
+ *
+ * @param inputDataset The inputDataset value
+ * @param keyFuncOtherArguments The keyFuncOtherArguments value
+ * @param reduceFuncOtherArguments The reduceFuncOtherArguments value
+ * @param windowSizeFuncOtherArguments The windowSizeFuncOtherArguments value
+ * @param keyFunc A function mapping an element of {@code input_dataset}, concatenated
+ * with {@code key_func_other_arguments} to a scalar value of type DT_INT64.
+ * @param reduceFunc The value of the reduceFunc attribute
+ * @param windowSizeFunc The value of the windowSizeFunc attribute
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of GroupByWindowDataset
+ */
+ public GroupByWindowDataset groupByWindowDataset(Operand extends TType> inputDataset,
+ Iterable> keyFuncOtherArguments, Iterable> reduceFuncOtherArguments,
+ Iterable> windowSizeFuncOtherArguments, ConcreteFunction keyFunc,
+ ConcreteFunction reduceFunc, ConcreteFunction windowSizeFunc,
+ List> outputTypes, List outputShapes) {
+ return GroupByWindowDataset.create(scope, inputDataset, keyFuncOtherArguments, reduceFuncOtherArguments, windowSizeFuncOtherArguments, keyFunc, reduceFunc, windowSizeFunc, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that contains the elements of {@code input_dataset} ignoring errors.
+ *
+ * @param inputDataset The inputDataset value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @param options carries optional attribute values
+ * @return a new instance of IgnoreErrorsDataset
+ */
+ public IgnoreErrorsDataset ignoreErrorsDataset(Operand extends TType> inputDataset,
+ List> outputTypes, List outputShapes,
+ IgnoreErrorsDataset.Options... options) {
+ return IgnoreErrorsDataset.create(scope, inputDataset, outputTypes, outputShapes, options);
+ }
+
+ /**
+ * Returns the name of the device on which {@code resource} has been placed.
+ *
+ * @param resource The resource value
+ * @return a new instance of IteratorGetDevice
+ */
+ public IteratorGetDevice iteratorGetDevice(Operand extends TType> resource) {
+ return IteratorGetDevice.create(scope, resource);
+ }
+
+ /**
+ * Records the latency of producing {@code input_dataset} elements in a StatsAggregator.
+ *
+ * @param inputDataset The inputDataset value
+ * @param tag The tag value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of LatencyStatsDataset
+ */
+ public LatencyStatsDataset latencyStatsDataset(Operand extends TType> inputDataset,
+ Operand tag, List> outputTypes, List outputShapes) {
+ return LatencyStatsDataset.create(scope, inputDataset, tag, outputTypes, outputShapes);
+ }
+
+ /**
+ * The ExperimentalLMDBDataset operation
+ *
+ * @param filenames The filenames value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of LmdbDataset
+ */
+ public LmdbDataset lmdbDataset(Operand filenames,
+ List> outputTypes, List outputShapes) {
+ return LmdbDataset.create(scope, filenames, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that fuses mapping with batching.
+ * Creates a dataset that applies {@code f} to the outputs of {@code input_dataset} and then
+ * batches {@code batch_size} of them.
+ * Unlike a "MapDataset", which applies {@code f} sequentially, this dataset invokes up
+ * to {@code batch_size * num_parallel_batches} copies of {@code f} in parallel.
+ *
+ * @param inputDataset A variant tensor representing the input dataset.
+ * @param otherArguments A list of tensors, typically values that were captured when building a closure
+ * for {@code f}.
+ * @param batchSize A scalar representing the number of elements to accumulate in a
+ * batch. It determines the number of concurrent invocations of {@code f} that process
+ * elements from {@code input_dataset} in parallel.
+ * @param numParallelCalls A scalar representing the maximum number of parallel invocations of the {@code map_fn}
+ * function. Applying the {@code map_fn} on consecutive input elements in parallel has
+ * the potential to improve input pipeline throughput.
+ * @param dropRemainder A scalar representing whether the last batch should be dropped in case its size
+ * is smaller than desired.
+ * @param f A function to apply to the outputs of {@code input_dataset}.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @param options carries optional attribute values
+ * @return a new instance of MapAndBatchDataset
+ */
+ public MapAndBatchDataset mapAndBatchDataset(Operand extends TType> inputDataset,
+ Iterable> otherArguments, Operand batchSize,
+ Operand numParallelCalls, Operand dropRemainder, ConcreteFunction f,
+ List> outputTypes, List outputShapes,
+ MapAndBatchDataset.Options... options) {
+ return MapAndBatchDataset.create(scope, inputDataset, otherArguments, batchSize, numParallelCalls, dropRemainder, f, outputTypes, outputShapes, options);
+ }
+
+ /**
+ * Creates a dataset that applies {@code f} to the outputs of {@code input_dataset}.
+ *
+ * @param inputDataset The inputDataset value
+ * @param otherArguments The otherArguments value
+ * @param f The value of the f attribute
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @param options carries optional attribute values
+ * @return a new instance of MapDataset
+ */
+ public MapDataset mapDataset(Operand extends TType> inputDataset,
+ Iterable> otherArguments, ConcreteFunction f,
+ List> outputTypes, List outputShapes,
+ MapDataset.Options... options) {
+ return MapDataset.create(scope, inputDataset, otherArguments, f, outputTypes, outputShapes, options);
+ }
+
+ /**
+ * The ExperimentalMatchingFilesDataset operation
+ *
+ * @param patterns The patterns value
+ * @return a new instance of MatchingFilesDataset
+ */
+ public MatchingFilesDataset matchingFilesDataset(Operand patterns) {
+ return MatchingFilesDataset.create(scope, patterns);
+ }
+
+ /**
+ * Creates a dataset that overrides the maximum intra-op parallelism.
+ *
+ * @param inputDataset The inputDataset value
+ * @param maxIntraOpParallelism Identifies the maximum intra-op parallelism to use.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of MaxIntraOpParallelismDataset
+ */
+ public MaxIntraOpParallelismDataset maxIntraOpParallelismDataset(
+ Operand extends TType> inputDataset, Operand maxIntraOpParallelism,
+ List> outputTypes, List outputShapes) {
+ return MaxIntraOpParallelismDataset.create(scope, inputDataset, maxIntraOpParallelism, outputTypes, outputShapes);
+ }
+
+ /**
+ * The ExperimentalNonSerializableDataset operation
+ *
+ * @param inputDataset The inputDataset value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of NonSerializableDataset
+ */
+ public NonSerializableDataset nonSerializableDataset(Operand extends TType> inputDataset,
+ List> outputTypes, List outputShapes) {
+ return NonSerializableDataset.create(scope, inputDataset, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that applies {@code f} to the outputs of {@code input_dataset}.
+ * The resulting dataset is similar to the {@code InterleaveDataset}, with the exception
+ * that if retrieving the next value from a dataset would cause the requester to
+ * block, it will skip that input dataset. This dataset is especially useful
+ * when loading data from a variable-latency datastores (e.g. HDFS, GCS), as it
+ * allows the training step to proceed so long as some data is available.
+ * !! WARNING !! This dataset is not deterministic!
+ *
+ * @param inputDataset The inputDataset value
+ * @param otherArguments The otherArguments value
+ * @param cycleLength The cycleLength value
+ * @param blockLength The blockLength value
+ * @param sloppy The sloppy value
+ * @param bufferOutputElements The bufferOutputElements value
+ * @param prefetchInputElements The prefetchInputElements value
+ * @param f A function mapping elements of {@code input_dataset}, concatenated with
+ * {@code other_arguments}, to a Dataset variant that contains elements matching
+ * {@code output_types} and {@code output_shapes}.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of ParallelInterleaveDataset
+ */
+ public ParallelInterleaveDataset parallelInterleaveDataset(Operand extends TType> inputDataset,
+ Iterable> otherArguments, Operand cycleLength, Operand blockLength,
+ Operand sloppy, Operand bufferOutputElements,
+ Operand prefetchInputElements, ConcreteFunction f,
+ List> outputTypes, List outputShapes) {
+ return ParallelInterleaveDataset.create(scope, inputDataset, otherArguments, cycleLength, blockLength, sloppy, bufferOutputElements, prefetchInputElements, f, outputTypes, outputShapes);
+ }
+
+ /**
+ * Transforms {@code input_dataset} containing {@code Example} protos as vectors of DT_STRING into a dataset of {@code Tensor} or {@code SparseTensor} objects representing the parsed features.
+ *
+ * @param inputDataset The inputDataset value
+ * @param numParallelCalls The numParallelCalls value
+ * @param denseDefaults A dict mapping string keys to {@code Tensor}s.
+ * The keys of the dict must match the dense_keys of the feature.
+ * @param sparseKeys A list of string keys in the examples features.
+ * The results for these keys will be returned as {@code SparseTensor} objects.
+ * @param denseKeys A list of Ndense string Tensors (scalars).
+ * The keys expected in the Examples features associated with dense values.
+ * @param sparseTypes A list of {@code DTypes} of the same length as {@code sparse_keys}.
+ * Only {@code tf.float32} ({@code FloatList}), {@code tf.int64} ({@code Int64List}),
+ * and {@code tf.string} ({@code BytesList}) are supported.
+ * @param denseShapes List of tuples with the same length as {@code dense_keys}.
+ * The shape of the data for each dense feature referenced by {@code dense_keys}.
+ * Required for any input tensors identified by {@code dense_keys}. Must be
+ * either fully defined, or may contain an unknown first dimension.
+ * An unknown first dimension means the feature is treated as having
+ * a variable number of blocks, and the output shape along this dimension
+ * is considered unknown at graph build time. Padding is applied for
+ * minibatch elements smaller than the maximum number of blocks for the
+ * given feature along this dimension.
+ * @param outputTypes The type list for the return values.
+ * @param outputShapes The list of shapes being produced.
+ * @param options carries optional attribute values
+ * @return a new instance of ParseExampleDataset
+ */
+ public ParseExampleDataset parseExampleDataset(Operand extends TType> inputDataset,
+ Operand numParallelCalls, Iterable> denseDefaults, List sparseKeys,
+ List denseKeys, List> sparseTypes, List denseShapes,
+ List> outputTypes, List outputShapes,
+ ParseExampleDataset.Options... options) {
+ return ParseExampleDataset.create(scope, inputDataset, numParallelCalls, denseDefaults, sparseKeys, denseKeys, sparseTypes, denseShapes, outputTypes, outputShapes, options);
+ }
+
+ /**
+ * Creates a dataset that uses a custom thread pool to compute {@code input_dataset}.
+ *
+ * @param inputDataset The inputDataset value
+ * @param numThreads Identifies the number of threads to use for the private threadpool.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of PrivateThreadPoolDataset
+ */
+ public PrivateThreadPoolDataset privateThreadPoolDataset(Operand extends TType> inputDataset,
+ Operand numThreads, List> outputTypes,
+ List outputShapes) {
+ return PrivateThreadPoolDataset.create(scope, inputDataset, numThreads, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a Dataset that returns pseudorandom numbers.
+ *
+ * @param seed A scalar seed for the random number generator. If either seed or
+ * seed2 is set to be non-zero, the random number generator is seeded
+ * by the given seed. Otherwise, a random seed is used.
+ * @param seed2 A second scalar seed to avoid seed collision.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of RandomDataset
+ */
+ public RandomDataset randomDataset(Operand seed, Operand seed2,
+ List> outputTypes, List outputShapes) {
+ return RandomDataset.create(scope, seed, seed2, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that changes the batch size.
+ * Creates a dataset that changes the batch size of the dataset to current batch
+ * size // num_replicas.
+ *
+ * @param inputDataset A variant tensor representing the input dataset.
+ * @param numReplicas A scalar representing the number of replicas to distribute this batch across. As
+ * a result of this transformation the current batch size would end up being
+ * divided by this parameter.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @param options carries optional attribute values
+ * @return a new instance of RebatchDataset
+ */
+ public RebatchDataset rebatchDataset(Operand extends TType> inputDataset,
+ Operand numReplicas, List> outputTypes,
+ List outputShapes, RebatchDataset.Options... options) {
+ return RebatchDataset.create(scope, inputDataset, numReplicas, outputTypes, outputShapes, options);
+ }
+
+ /**
+ * Creates a dataset successively reduces {@code f} over the elements of {@code input_dataset}.
+ *
+ * @param inputDataset The inputDataset value
+ * @param initialState The initialState value
+ * @param otherArguments The otherArguments value
+ * @param f The value of the f attribute
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @param options carries optional attribute values
+ * @return a new instance of ScanDataset
+ */
+ public ScanDataset scanDataset(Operand extends TType> inputDataset,
+ Iterable> initialState, Iterable> otherArguments, ConcreteFunction f,
+ List> outputTypes, List outputShapes,
+ ScanDataset.Options... options) {
+ return ScanDataset.create(scope, inputDataset, initialState, otherArguments, f, outputTypes, outputShapes, options);
+ }
+
+ /**
+ * The ExperimentalSetStatsAggregatorDataset operation
+ *
+ * @param inputDataset The inputDataset value
+ * @param statsAggregator The statsAggregator value
+ * @param tag The tag value
+ * @param counterPrefix The counterPrefix value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of SetStatsAggregatorDataset
+ */
+ public SetStatsAggregatorDataset setStatsAggregatorDataset(Operand extends TType> inputDataset,
+ Operand extends TType> statsAggregator, Operand tag,
+ Operand counterPrefix, List> outputTypes,
+ List outputShapes) {
+ return SetStatsAggregatorDataset.create(scope, inputDataset, statsAggregator, tag, counterPrefix, outputTypes, outputShapes);
+ }
+
+ /**
+ * The ExperimentalSleepDataset operation
+ *
+ * @param inputDataset The inputDataset value
+ * @param sleepMicroseconds The sleepMicroseconds value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of SleepDataset
+ */
+ public SleepDataset sleepDataset(Operand extends TType> inputDataset,
+ Operand sleepMicroseconds, List> outputTypes,
+ List outputShapes) {
+ return SleepDataset.create(scope, inputDataset, sleepMicroseconds, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that passes a sliding window over {@code input_dataset}.
+ *
+ * @param inputDataset The inputDataset value
+ * @param windowSize A scalar representing the number of elements in the
+ * sliding window.
+ * @param windowShift A scalar representing the steps moving the sliding window
+ * forward in one iteration. It must be positive.
+ * @param windowStride A scalar representing the stride of the input elements of the sliding window.
+ * It must be positive.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of SlidingWindowDataset
+ */
+ public SlidingWindowDataset slidingWindowDataset(Operand extends TType> inputDataset,
+ Operand windowSize, Operand windowShift, Operand windowStride,
+ List> outputTypes, List outputShapes) {
+ return SlidingWindowDataset.create(scope, inputDataset, windowSize, windowShift, windowStride, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that executes a SQL query and emits rows of the result set.
+ *
+ * @param driverName The database type. Currently, the only supported type is 'sqlite'.
+ * @param dataSourceName A connection string to connect to the database.
+ * @param query A SQL query to execute.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of SqlDataset
+ */
+ public SqlDataset sqlDataset(Operand driverName, Operand dataSourceName,
+ Operand query, List> outputTypes, List outputShapes) {
+ return SqlDataset.create(scope, driverName, dataSourceName, query, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a statistics manager resource.
+ *
+ * @param options carries optional attribute values
+ * @return a new instance of StatsAggregatorHandle
+ */
+ public StatsAggregatorHandle statsAggregatorHandle(StatsAggregatorHandle.Options... options) {
+ return StatsAggregatorHandle.create(scope, options);
+ }
+
+ /**
+ * Produces a summary of any statistics recorded by the given statistics manager.
+ *
+ * @param iterator The iterator value
+ * @return a new instance of StatsAggregatorSummary
+ */
+ public StatsAggregatorSummary statsAggregatorSummary(Operand extends TType> iterator) {
+ return StatsAggregatorSummary.create(scope, iterator);
+ }
+
+ /**
+ * Creates a dataset that stops iteration when predicate` is false.
+ * The {@code predicate} function must return a scalar boolean and accept the
+ * following arguments:
+ *
+ * - One tensor for each component of an element of {@code input_dataset}.
+ * - One tensor for each value in {@code other_arguments}.
+ *
+ *
+ * @param inputDataset The inputDataset value
+ * @param otherArguments A list of tensors, typically values that were captured when
+ * building a closure for {@code predicate}.
+ * @param predicate A function returning a scalar boolean.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of TakeWhileDataset
+ */
+ public TakeWhileDataset takeWhileDataset(Operand extends TType> inputDataset,
+ Iterable> otherArguments, ConcreteFunction predicate,
+ List> outputTypes, List outputShapes) {
+ return TakeWhileDataset.create(scope, inputDataset, otherArguments, predicate, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that uses a custom thread pool to compute {@code input_dataset}.
+ *
+ * @param inputDataset The inputDataset value
+ * @param threadPool A resource produced by the ThreadPoolHandle op.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of ThreadPoolDataset
+ */
+ public ThreadPoolDataset threadPoolDataset(Operand extends TType> inputDataset,
+ Operand extends TType> threadPool, List> outputTypes,
+ List outputShapes) {
+ return ThreadPoolDataset.create(scope, inputDataset, threadPool, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that uses a custom thread pool to compute {@code input_dataset}.
+ *
+ * @param numThreads The number of threads in the thread pool.
+ * @param displayName A human-readable name for the threads that may be visible in some
+ * visualizations.
+ * threadpool.
+ * @param options carries optional attribute values
+ * @return a new instance of ThreadPoolHandle
+ */
+ public ThreadPoolHandle threadPoolHandle(Long numThreads, String displayName,
+ ThreadPoolHandle.Options... options) {
+ return ThreadPoolHandle.create(scope, numThreads, displayName, options);
+ }
+
+ /**
+ * A dataset that splits the elements of its input into multiple elements.
+ *
+ * @param inputDataset The inputDataset value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of UnbatchDataset
+ */
+ public UnbatchDataset unbatchDataset(Operand extends TType> inputDataset,
+ List> outputTypes, List outputShapes) {
+ return UnbatchDataset.create(scope, inputDataset, outputTypes, outputShapes);
+ }
+
+ /**
+ * Creates a dataset that contains the unique elements of {@code input_dataset}.
+ *
+ * @param inputDataset The inputDataset value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of UniqueDataset
+ */
+ public UniqueDataset uniqueDataset(Operand extends TType> inputDataset,
+ List> outputTypes, List outputShapes) {
+ return UniqueDataset.create(scope, inputDataset, outputTypes, outputShapes);
+ }
+
+ /**
+ * Get the parent {@link Ops} object.
+ */
+ public final Ops ops() {
+ return ops;
+ }
+}
diff --git a/tensorflow-core/tensorflow-core-api/src/gen/annotations/org/tensorflow/op/DataOps.java b/tensorflow-core/tensorflow-core-api/src/gen/annotations/org/tensorflow/op/DataOps.java
index b49373aea62..77b0a424cae 100644
--- a/tensorflow-core/tensorflow-core-api/src/gen/annotations/org/tensorflow/op/DataOps.java
+++ b/tensorflow-core/tensorflow-core-api/src/gen/annotations/org/tensorflow/op/DataOps.java
@@ -22,9 +22,11 @@
import org.tensorflow.Operand;
import org.tensorflow.ndarray.Shape;
import org.tensorflow.op.data.AnonymousIterator;
+import org.tensorflow.op.data.AnonymousMemoryCache;
import org.tensorflow.op.data.AnonymousMultiDeviceIterator;
import org.tensorflow.op.data.AssertCardinalityDataset;
import org.tensorflow.op.data.AssertNextDataset;
+import org.tensorflow.op.data.AssertPrevDataset;
import org.tensorflow.op.data.AutoShardDataset;
import org.tensorflow.op.data.BatchDataset;
import org.tensorflow.op.data.BytesProducedStatsDataset;
@@ -32,17 +34,22 @@
import org.tensorflow.op.data.CacheDataset;
import org.tensorflow.op.data.ChooseFastestBranchDataset;
import org.tensorflow.op.data.ChooseFastestDataset;
+import org.tensorflow.op.data.CompressElement;
import org.tensorflow.op.data.ConcatenateDataset;
import org.tensorflow.op.data.DataServiceDataset;
import org.tensorflow.op.data.DatasetCardinality;
+import org.tensorflow.op.data.DatasetFingerprint;
import org.tensorflow.op.data.DatasetFromGraph;
import org.tensorflow.op.data.DatasetToGraph;
import org.tensorflow.op.data.DatasetToSingleElement;
import org.tensorflow.op.data.DatasetToTfRecord;
import org.tensorflow.op.data.DeleteIterator;
+import org.tensorflow.op.data.DeleteMemoryCache;
+import org.tensorflow.op.data.DeleteMultiDeviceIterator;
import org.tensorflow.op.data.DenseToSparseBatchDataset;
import org.tensorflow.op.data.DeserializeIterator;
import org.tensorflow.op.data.DirectedInterleaveDataset;
+import org.tensorflow.op.data.DummyIterationCounter;
import org.tensorflow.op.data.FilterByLastComponentDataset;
import org.tensorflow.op.data.FilterDataset;
import org.tensorflow.op.data.FinalizeDataset;
@@ -55,13 +62,18 @@
import org.tensorflow.op.data.InitializeTableFromDataset;
import org.tensorflow.op.data.InterleaveDataset;
import org.tensorflow.op.data.Iterator;
+import org.tensorflow.op.data.IteratorFromStringHandle;
+import org.tensorflow.op.data.IteratorGetDevice;
import org.tensorflow.op.data.IteratorGetNext;
import org.tensorflow.op.data.IteratorGetNextAsOptional;
import org.tensorflow.op.data.IteratorGetNextSync;
import org.tensorflow.op.data.IteratorToStringHandle;
import org.tensorflow.op.data.LMDBDataset;
import org.tensorflow.op.data.LatencyStatsDataset;
+import org.tensorflow.op.data.LeakyReluGrad;
import org.tensorflow.op.data.LegacyParallelInterleaveDataset;
+import org.tensorflow.op.data.ListDataset;
+import org.tensorflow.op.data.ListSnapshotChunksDataset;
import org.tensorflow.op.data.LoadDataset;
import org.tensorflow.op.data.MakeIterator;
import org.tensorflow.op.data.MapAndBatchDataset;
@@ -69,6 +81,11 @@
import org.tensorflow.op.data.MatchingFilesDataset;
import org.tensorflow.op.data.MaxIntraOpParallelismDataset;
import org.tensorflow.op.data.ModelDataset;
+import org.tensorflow.op.data.MultiDeviceIterator;
+import org.tensorflow.op.data.MultiDeviceIteratorFromStringHandle;
+import org.tensorflow.op.data.MultiDeviceIteratorGetNextFromShard;
+import org.tensorflow.op.data.MultiDeviceIteratorInit;
+import org.tensorflow.op.data.MultiDeviceIteratorToStringHandle;
import org.tensorflow.op.data.NonSerializableDataset;
import org.tensorflow.op.data.OneShotIterator;
import org.tensorflow.op.data.OptimizeDataset;
@@ -79,6 +96,7 @@
import org.tensorflow.op.data.OptionsDataset;
import org.tensorflow.op.data.PaddedBatchDataset;
import org.tensorflow.op.data.ParallelBatchDataset;
+import org.tensorflow.op.data.ParallelFilterDataset;
import org.tensorflow.op.data.ParallelInterleaveDataset;
import org.tensorflow.op.data.ParallelMapDataset;
import org.tensorflow.op.data.ParseExampleDataset;
@@ -90,6 +108,7 @@
import org.tensorflow.op.data.ReduceDataset;
import org.tensorflow.op.data.RegisterDataset;
import org.tensorflow.op.data.RepeatDataset;
+import org.tensorflow.op.data.RewriteDataset;
import org.tensorflow.op.data.SamplingDataset;
import org.tensorflow.op.data.SaveDataset;
import org.tensorflow.op.data.ScanDataset;
@@ -101,9 +120,14 @@
import org.tensorflow.op.data.SkipDataset;
import org.tensorflow.op.data.SleepDataset;
import org.tensorflow.op.data.SlidingWindowDataset;
+import org.tensorflow.op.data.SnapshotChunkDataset;
import org.tensorflow.op.data.SnapshotDataset;
+import org.tensorflow.op.data.SnapshotDatasetReader;
+import org.tensorflow.op.data.SnapshotNestedDatasetReader;
import org.tensorflow.op.data.SparseTensorSliceDataset;
import org.tensorflow.op.data.SqlDataset;
+import org.tensorflow.op.data.StatsAggregatorHandle;
+import org.tensorflow.op.data.StatsAggregatorSetSummaryWriter;
import org.tensorflow.op.data.TakeDataset;
import org.tensorflow.op.data.TakeWhileDataset;
import org.tensorflow.op.data.TensorDataset;
@@ -111,14 +135,18 @@
import org.tensorflow.op.data.TextLineDataset;
import org.tensorflow.op.data.TfRecordDataset;
import org.tensorflow.op.data.ThreadPoolDataset;
+import org.tensorflow.op.data.ThreadPoolHandle;
import org.tensorflow.op.data.UnbatchDataset;
+import org.tensorflow.op.data.UncompressElement;
import org.tensorflow.op.data.UniqueDataset;
import org.tensorflow.op.data.UnwrapDatasetVariant;
import org.tensorflow.op.data.WindowDataset;
+import org.tensorflow.op.data.WindowOp;
import org.tensorflow.op.data.WrapDatasetVariant;
import org.tensorflow.op.data.ZipDataset;
import org.tensorflow.types.TBool;
import org.tensorflow.types.TFloat32;
+import org.tensorflow.types.TInt32;
import org.tensorflow.types.TInt64;
import org.tensorflow.types.TString;
import org.tensorflow.types.family.TNumber;
@@ -130,6 +158,8 @@
* @see {@link Ops}
*/
public final class DataOps {
+ public final DataExperimentalOps experimental;
+
private final Scope scope;
private final Ops ops;
@@ -137,6 +167,7 @@ public final class DataOps {
DataOps(Ops ops) {
this.scope = ops.scope();
this.ops = ops;
+ experimental = new DataExperimentalOps(ops);
}
/**
@@ -151,6 +182,15 @@ public AnonymousIterator anonymousIterator(List> outputTy
return AnonymousIterator.create(scope, outputTypes, outputShapes);
}
+ /**
+ * The AnonymousMemoryCache operation
+ *
+ * @return a new instance of AnonymousMemoryCache
+ */
+ public AnonymousMemoryCache anonymousMemoryCache() {
+ return AnonymousMemoryCache.create(scope);
+ }
+
/**
* A container for a multi device iterator resource.
*
@@ -203,6 +243,30 @@ public AssertNextDataset assertNextDataset(Operand extends TType> inputDataset
return AssertNextDataset.create(scope, inputDataset, transformations, outputTypes, outputShapes);
}
+ /**
+ * A transformation that asserts which transformations happened previously.
+ * This transformation checks the names and, optionally, the attribute name-value
+ * pairs in the {@code transformations} argument against those of the transformations
+ * that preceded this transformation. If there is a mismatch, the transformation
+ * raises an exception.
+ * The check occurs when iterating over the contents of the dataset, which
+ * means that the check happens after any static optimizations are applied
+ * to the dataset graph.
+ *
+ * @param inputDataset A variant tensor representing the input dataset.
+ * {@code data.AssertPrevDataset} passes through the outputs of its input dataset.
+ * @param transformations A {@code tf.string} vector {@code tf.Tensor} identifying the transformations, with optional
+ * attribute name-value pairs, that are expected to have happened previously.
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of AssertPrevDataset
+ */
+ public AssertPrevDataset assertPrevDataset(Operand extends TType> inputDataset,
+ Operand transformations, List> outputTypes,
+ List outputShapes) {
+ return AssertPrevDataset.create(scope, inputDataset, transformations, outputTypes, outputShapes);
+ }
+
/**
* Creates a dataset that shards the input dataset.
* Creates a dataset that shards the input dataset by num_workers, returning a
@@ -334,6 +398,16 @@ public ChooseFastestDataset chooseFastestDataset(Iterable> components) {
+ return CompressElement.create(scope, components);
+ }
+
/**
* Creates a dataset that concatenates {@code input_dataset} with {@code another_dataset}.
*
@@ -382,10 +456,23 @@ public DataServiceDataset dataServiceDataset(Operand datasetId,
* Returns the cardinality of {@code input_dataset}.
*
* @param inputDataset A variant tensor representing the dataset to return cardinality for.
+ * @param options carries optional attribute values
* @return a new instance of DatasetCardinality
*/
- public DatasetCardinality datasetCardinality(Operand extends TType> inputDataset) {
- return DatasetCardinality.create(scope, inputDataset);
+ public DatasetCardinality datasetCardinality(Operand extends TType> inputDataset,
+ DatasetCardinality.Options... options) {
+ return DatasetCardinality.create(scope, inputDataset, options);
+ }
+
+ /**
+ * Returns the fingerprint of {@code input_dataset}.
+ * Returns the fingerprint of {@code input_dataset}.
+ *
+ * @param inputDataset A variant tensor representing the dataset to return fingerprint for.
+ * @return a new instance of DatasetFingerprint
+ */
+ public DatasetFingerprint datasetFingerprint(Operand extends TType> inputDataset) {
+ return DatasetFingerprint.create(scope, inputDataset);
}
/**
@@ -453,6 +540,32 @@ public DeleteIterator deleteIterator(Operand extends TType> handle,
return DeleteIterator.create(scope, handle, deleter);
}
+ /**
+ * The DeleteMemoryCache operation
+ *
+ * @param handle The handle value
+ * @param deleter The deleter value
+ * @return a new instance of DeleteMemoryCache
+ */
+ public DeleteMemoryCache deleteMemoryCache(Operand extends TType> handle,
+ Operand extends TType> deleter) {
+ return DeleteMemoryCache.create(scope, handle, deleter);
+ }
+
+ /**
+ * A container for an iterator resource.
+ *
+ * @param multiDeviceIterator A handle to the multi device iterator to delete.
+ * @param iterators A list of iterator handles (unused). This is added so that automatic control dependencies get added during function tracing that ensure this op runs after all the dependent iterators are deleted.
+ * @param deleter A variant deleter.
+ * @return a new instance of DeleteMultiDeviceIterator
+ */
+ public DeleteMultiDeviceIterator deleteMultiDeviceIterator(
+ Operand extends TType> multiDeviceIterator, Iterable> iterators,
+ Operand extends TType> deleter) {
+ return DeleteMultiDeviceIterator.create(scope, multiDeviceIterator, iterators, deleter);
+ }
+
/**
* Creates a dataset that batches input elements into a SparseTensor.
*
@@ -505,6 +618,15 @@ public DirectedInterleaveDataset directedInterleaveDataset(
return DirectedInterleaveDataset.create(scope, selectorInputDataset, dataInputDatasets, outputTypes, outputShapes, options);
}
+ /**
+ * The DummyIterationCounter operation
+ *
+ * @return a new instance of DummyIterationCounter
+ */
+ public DummyIterationCounter dummyIterationCounter() {
+ return DummyIterationCounter.create(scope);
+ }
+
/**
* Creates a dataset containing elements of first component of {@code input_dataset} having true in the last component.
*
@@ -750,6 +872,29 @@ public Iterator iterator(String sharedName, String container,
return Iterator.create(scope, sharedName, container, outputTypes, outputShapes);
}
+ /**
+ * The IteratorFromStringHandleV2 operation
+ *
+ * @param stringHandle The stringHandle value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param options carries optional attribute values
+ * @return a new instance of IteratorFromStringHandle
+ */
+ public IteratorFromStringHandle iteratorFromStringHandle(Operand stringHandle,
+ List> outputTypes, IteratorFromStringHandle.Options... options) {
+ return IteratorFromStringHandle.create(scope, stringHandle, outputTypes, options);
+ }
+
+ /**
+ * Returns the name of the device on which {@code resource} has been placed.
+ *
+ * @param resource The resource value
+ * @return a new instance of IteratorGetDevice
+ */
+ public IteratorGetDevice iteratorGetDevice(Operand extends TType> resource) {
+ return IteratorGetDevice.create(scope, resource);
+ }
+
/**
* Gets the next output from the given iterator .
*
@@ -839,6 +984,22 @@ public LatencyStatsDataset latencyStatsDataset(Operand extends TType> inputDat
return LatencyStatsDataset.create(scope, inputDataset, tag, outputTypes, outputShapes);
}
+ /**
+ * Computes rectified linear gradients for a LeakyRelu operation.
+ *
+ * @param data type for {@code backprops} output
+ * @param gradients The backpropagated gradients to the corresponding LeakyRelu operation.
+ * @param features The features passed as input to the corresponding LeakyRelu operation,
+ * OR the outputs of that operation (both work equivalently).
+ * @param options carries optional attribute values
+ * @param data type for {@code LeakyReluGrad} output and operands
+ * @return a new instance of LeakyReluGrad
+ */
+ public LeakyReluGrad leakyReluGrad(Operand gradients,
+ Operand features, LeakyReluGrad.Options... options) {
+ return LeakyReluGrad.create(scope, gradients, features, options);
+ }
+
/**
* Creates a dataset that applies {@code f} to the outputs of {@code input_dataset}.
* The resulting dataset is similar to the {@code InterleaveDataset}, with the exception
@@ -871,6 +1032,34 @@ public LegacyParallelInterleaveDataset legacyParallelInterleaveDataset(
return LegacyParallelInterleaveDataset.create(scope, inputDataset, otherArguments, cycleLength, blockLength, bufferOutputElements, prefetchInputElements, f, outputTypes, outputShapes, options);
}
+ /**
+ * Creates a dataset that emits each of {@code tensors} once.
+ *
+ * @param tensors The tensors value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @param options carries optional attribute values
+ * @return a new instance of ListDataset
+ */
+ public ListDataset listDataset(Iterable> tensors,
+ List> outputTypes, List outputShapes,
+ ListDataset.Options... options) {
+ return ListDataset.create(scope, tensors, outputTypes, outputShapes, options);
+ }
+
+ /**
+ * The ListSnapshotChunksDataset operation
+ *
+ * @param snapshotPath The snapshotPath value
+ * @param outputTypes The value of the outputTypes attribute
+ * @param outputShapes The value of the outputShapes attribute
+ * @return a new instance of ListSnapshotChunksDataset
+ */
+ public ListSnapshotChunksDataset listSnapshotChunksDataset(Operand snapshotPath,
+ List