org.tensorflow.util.BundleEntryProto Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of proto Show documentation
Show all versions of proto Show documentation
Java API for TensorFlow protocol buffers.
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: tensorflow/core/protobuf/tensor_bundle.proto
package org.tensorflow.util;
/**
*
* Describes the metadata related to a checkpointed tensor.
*
*
* Protobuf type {@code tensorflow.BundleEntryProto}
*/
public final class BundleEntryProto extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:tensorflow.BundleEntryProto)
BundleEntryProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use BundleEntryProto.newBuilder() to construct.
private BundleEntryProto(com.google.protobuf.GeneratedMessageV3.Builder> builder) {
super(builder);
}
private BundleEntryProto() {
dtype_ = 0;
shardId_ = 0;
offset_ = 0L;
size_ = 0L;
crc32C_ = 0;
slices_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BundleEntryProto(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
dtype_ = rawValue;
break;
}
case 18: {
org.tensorflow.framework.TensorShapeProto.Builder subBuilder = null;
if (shape_ != null) {
subBuilder = shape_.toBuilder();
}
shape_ = input.readMessage(org.tensorflow.framework.TensorShapeProto.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(shape_);
shape_ = subBuilder.buildPartial();
}
break;
}
case 24: {
shardId_ = input.readInt32();
break;
}
case 32: {
offset_ = input.readInt64();
break;
}
case 40: {
size_ = input.readInt64();
break;
}
case 53: {
crc32C_ = input.readFixed32();
break;
}
case 58: {
if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
slices_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000040;
}
slices_.add(
input.readMessage(org.tensorflow.framework.TensorSliceProto.parser(), extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
slices_ = java.util.Collections.unmodifiableList(slices_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.util.TensorBundleProtos.internal_static_tensorflow_BundleEntryProto_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.util.TensorBundleProtos.internal_static_tensorflow_BundleEntryProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.util.BundleEntryProto.class, org.tensorflow.util.BundleEntryProto.Builder.class);
}
private int bitField0_;
public static final int DTYPE_FIELD_NUMBER = 1;
private int dtype_;
/**
*
* The tensor dtype and shape.
*
*
* .tensorflow.DataType dtype = 1;
*/
public int getDtypeValue() {
return dtype_;
}
/**
*
* The tensor dtype and shape.
*
*
* .tensorflow.DataType dtype = 1;
*/
public org.tensorflow.framework.DataType getDtype() {
org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf(dtype_);
return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result;
}
public static final int SHAPE_FIELD_NUMBER = 2;
private org.tensorflow.framework.TensorShapeProto shape_;
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public boolean hasShape() {
return shape_ != null;
}
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public org.tensorflow.framework.TensorShapeProto getShape() {
return shape_ == null ? org.tensorflow.framework.TensorShapeProto.getDefaultInstance() : shape_;
}
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder() {
return getShape();
}
public static final int SHARD_ID_FIELD_NUMBER = 3;
private int shardId_;
/**
*
* The binary content of the tensor lies in:
* File "shard_id": bytes [offset, offset + size).
*
*
* int32 shard_id = 3;
*/
public int getShardId() {
return shardId_;
}
public static final int OFFSET_FIELD_NUMBER = 4;
private long offset_;
/**
* int64 offset = 4;
*/
public long getOffset() {
return offset_;
}
public static final int SIZE_FIELD_NUMBER = 5;
private long size_;
/**
* int64 size = 5;
*/
public long getSize() {
return size_;
}
public static final int CRC32C_FIELD_NUMBER = 6;
private int crc32C_;
/**
*
* The CRC32C checksum of the tensor bytes.
*
*
* fixed32 crc32c = 6;
*/
public int getCrc32C() {
return crc32C_;
}
public static final int SLICES_FIELD_NUMBER = 7;
private java.util.List slices_;
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public java.util.List getSlicesList() {
return slices_;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public java.util.List extends org.tensorflow.framework.TensorSliceProtoOrBuilder>
getSlicesOrBuilderList() {
return slices_;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public int getSlicesCount() {
return slices_.size();
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public org.tensorflow.framework.TensorSliceProto getSlices(int index) {
return slices_.get(index);
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public org.tensorflow.framework.TensorSliceProtoOrBuilder getSlicesOrBuilder(
int index) {
return slices_.get(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (dtype_ != org.tensorflow.framework.DataType.DT_INVALID.getNumber()) {
output.writeEnum(1, dtype_);
}
if (shape_ != null) {
output.writeMessage(2, getShape());
}
if (shardId_ != 0) {
output.writeInt32(3, shardId_);
}
if (offset_ != 0L) {
output.writeInt64(4, offset_);
}
if (size_ != 0L) {
output.writeInt64(5, size_);
}
if (crc32C_ != 0) {
output.writeFixed32(6, crc32C_);
}
for (int i = 0; i < slices_.size(); i++) {
output.writeMessage(7, slices_.get(i));
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (dtype_ != org.tensorflow.framework.DataType.DT_INVALID.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, dtype_);
}
if (shape_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getShape());
}
if (shardId_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, shardId_);
}
if (offset_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(4, offset_);
}
if (size_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(5, size_);
}
if (crc32C_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeFixed32Size(6, crc32C_);
}
for (int i = 0; i < slices_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(7, slices_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.tensorflow.util.BundleEntryProto)) {
return super.equals(obj);
}
org.tensorflow.util.BundleEntryProto other = (org.tensorflow.util.BundleEntryProto) obj;
boolean result = true;
result = result && dtype_ == other.dtype_;
result = result && (hasShape() == other.hasShape());
if (hasShape()) {
result = result && getShape()
.equals(other.getShape());
}
result = result && (getShardId()
== other.getShardId());
result = result && (getOffset()
== other.getOffset());
result = result && (getSize()
== other.getSize());
result = result && (getCrc32C()
== other.getCrc32C());
result = result && getSlicesList()
.equals(other.getSlicesList());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DTYPE_FIELD_NUMBER;
hash = (53 * hash) + dtype_;
if (hasShape()) {
hash = (37 * hash) + SHAPE_FIELD_NUMBER;
hash = (53 * hash) + getShape().hashCode();
}
hash = (37 * hash) + SHARD_ID_FIELD_NUMBER;
hash = (53 * hash) + getShardId();
hash = (37 * hash) + OFFSET_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getOffset());
hash = (37 * hash) + SIZE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getSize());
hash = (37 * hash) + CRC32C_FIELD_NUMBER;
hash = (53 * hash) + getCrc32C();
if (getSlicesCount() > 0) {
hash = (37 * hash) + SLICES_FIELD_NUMBER;
hash = (53 * hash) + getSlicesList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.tensorflow.util.BundleEntryProto parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.util.BundleEntryProto parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.util.BundleEntryProto parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.util.BundleEntryProto parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.util.BundleEntryProto parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.tensorflow.util.BundleEntryProto parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.tensorflow.util.BundleEntryProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.util.BundleEntryProto parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.util.BundleEntryProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.tensorflow.util.BundleEntryProto parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.tensorflow.util.BundleEntryProto parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.tensorflow.util.BundleEntryProto parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.tensorflow.util.BundleEntryProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Describes the metadata related to a checkpointed tensor.
*
*
* Protobuf type {@code tensorflow.BundleEntryProto}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:tensorflow.BundleEntryProto)
org.tensorflow.util.BundleEntryProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.tensorflow.util.TensorBundleProtos.internal_static_tensorflow_BundleEntryProto_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.tensorflow.util.TensorBundleProtos.internal_static_tensorflow_BundleEntryProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.tensorflow.util.BundleEntryProto.class, org.tensorflow.util.BundleEntryProto.Builder.class);
}
// Construct using org.tensorflow.util.BundleEntryProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getSlicesFieldBuilder();
}
}
public Builder clear() {
super.clear();
dtype_ = 0;
if (shapeBuilder_ == null) {
shape_ = null;
} else {
shape_ = null;
shapeBuilder_ = null;
}
shardId_ = 0;
offset_ = 0L;
size_ = 0L;
crc32C_ = 0;
if (slicesBuilder_ == null) {
slices_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
} else {
slicesBuilder_.clear();
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.tensorflow.util.TensorBundleProtos.internal_static_tensorflow_BundleEntryProto_descriptor;
}
public org.tensorflow.util.BundleEntryProto getDefaultInstanceForType() {
return org.tensorflow.util.BundleEntryProto.getDefaultInstance();
}
public org.tensorflow.util.BundleEntryProto build() {
org.tensorflow.util.BundleEntryProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.tensorflow.util.BundleEntryProto buildPartial() {
org.tensorflow.util.BundleEntryProto result = new org.tensorflow.util.BundleEntryProto(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.dtype_ = dtype_;
if (shapeBuilder_ == null) {
result.shape_ = shape_;
} else {
result.shape_ = shapeBuilder_.build();
}
result.shardId_ = shardId_;
result.offset_ = offset_;
result.size_ = size_;
result.crc32C_ = crc32C_;
if (slicesBuilder_ == null) {
if (((bitField0_ & 0x00000040) == 0x00000040)) {
slices_ = java.util.Collections.unmodifiableList(slices_);
bitField0_ = (bitField0_ & ~0x00000040);
}
result.slices_ = slices_;
} else {
result.slices_ = slicesBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.tensorflow.util.BundleEntryProto) {
return mergeFrom((org.tensorflow.util.BundleEntryProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.tensorflow.util.BundleEntryProto other) {
if (other == org.tensorflow.util.BundleEntryProto.getDefaultInstance()) return this;
if (other.dtype_ != 0) {
setDtypeValue(other.getDtypeValue());
}
if (other.hasShape()) {
mergeShape(other.getShape());
}
if (other.getShardId() != 0) {
setShardId(other.getShardId());
}
if (other.getOffset() != 0L) {
setOffset(other.getOffset());
}
if (other.getSize() != 0L) {
setSize(other.getSize());
}
if (other.getCrc32C() != 0) {
setCrc32C(other.getCrc32C());
}
if (slicesBuilder_ == null) {
if (!other.slices_.isEmpty()) {
if (slices_.isEmpty()) {
slices_ = other.slices_;
bitField0_ = (bitField0_ & ~0x00000040);
} else {
ensureSlicesIsMutable();
slices_.addAll(other.slices_);
}
onChanged();
}
} else {
if (!other.slices_.isEmpty()) {
if (slicesBuilder_.isEmpty()) {
slicesBuilder_.dispose();
slicesBuilder_ = null;
slices_ = other.slices_;
bitField0_ = (bitField0_ & ~0x00000040);
slicesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getSlicesFieldBuilder() : null;
} else {
slicesBuilder_.addAllMessages(other.slices_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.tensorflow.util.BundleEntryProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.tensorflow.util.BundleEntryProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int dtype_ = 0;
/**
*
* The tensor dtype and shape.
*
*
* .tensorflow.DataType dtype = 1;
*/
public int getDtypeValue() {
return dtype_;
}
/**
*
* The tensor dtype and shape.
*
*
* .tensorflow.DataType dtype = 1;
*/
public Builder setDtypeValue(int value) {
dtype_ = value;
onChanged();
return this;
}
/**
*
* The tensor dtype and shape.
*
*
* .tensorflow.DataType dtype = 1;
*/
public org.tensorflow.framework.DataType getDtype() {
org.tensorflow.framework.DataType result = org.tensorflow.framework.DataType.valueOf(dtype_);
return result == null ? org.tensorflow.framework.DataType.UNRECOGNIZED : result;
}
/**
*
* The tensor dtype and shape.
*
*
* .tensorflow.DataType dtype = 1;
*/
public Builder setDtype(org.tensorflow.framework.DataType value) {
if (value == null) {
throw new NullPointerException();
}
dtype_ = value.getNumber();
onChanged();
return this;
}
/**
*
* The tensor dtype and shape.
*
*
* .tensorflow.DataType dtype = 1;
*/
public Builder clearDtype() {
dtype_ = 0;
onChanged();
return this;
}
private org.tensorflow.framework.TensorShapeProto shape_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder> shapeBuilder_;
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public boolean hasShape() {
return shapeBuilder_ != null || shape_ != null;
}
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public org.tensorflow.framework.TensorShapeProto getShape() {
if (shapeBuilder_ == null) {
return shape_ == null ? org.tensorflow.framework.TensorShapeProto.getDefaultInstance() : shape_;
} else {
return shapeBuilder_.getMessage();
}
}
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public Builder setShape(org.tensorflow.framework.TensorShapeProto value) {
if (shapeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
shape_ = value;
onChanged();
} else {
shapeBuilder_.setMessage(value);
}
return this;
}
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public Builder setShape(
org.tensorflow.framework.TensorShapeProto.Builder builderForValue) {
if (shapeBuilder_ == null) {
shape_ = builderForValue.build();
onChanged();
} else {
shapeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public Builder mergeShape(org.tensorflow.framework.TensorShapeProto value) {
if (shapeBuilder_ == null) {
if (shape_ != null) {
shape_ =
org.tensorflow.framework.TensorShapeProto.newBuilder(shape_).mergeFrom(value).buildPartial();
} else {
shape_ = value;
}
onChanged();
} else {
shapeBuilder_.mergeFrom(value);
}
return this;
}
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public Builder clearShape() {
if (shapeBuilder_ == null) {
shape_ = null;
onChanged();
} else {
shape_ = null;
shapeBuilder_ = null;
}
return this;
}
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public org.tensorflow.framework.TensorShapeProto.Builder getShapeBuilder() {
onChanged();
return getShapeFieldBuilder().getBuilder();
}
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
public org.tensorflow.framework.TensorShapeProtoOrBuilder getShapeOrBuilder() {
if (shapeBuilder_ != null) {
return shapeBuilder_.getMessageOrBuilder();
} else {
return shape_ == null ?
org.tensorflow.framework.TensorShapeProto.getDefaultInstance() : shape_;
}
}
/**
* .tensorflow.TensorShapeProto shape = 2;
*/
private com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder>
getShapeFieldBuilder() {
if (shapeBuilder_ == null) {
shapeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
org.tensorflow.framework.TensorShapeProto, org.tensorflow.framework.TensorShapeProto.Builder, org.tensorflow.framework.TensorShapeProtoOrBuilder>(
getShape(),
getParentForChildren(),
isClean());
shape_ = null;
}
return shapeBuilder_;
}
private int shardId_ ;
/**
*
* The binary content of the tensor lies in:
* File "shard_id": bytes [offset, offset + size).
*
*
* int32 shard_id = 3;
*/
public int getShardId() {
return shardId_;
}
/**
*
* The binary content of the tensor lies in:
* File "shard_id": bytes [offset, offset + size).
*
*
* int32 shard_id = 3;
*/
public Builder setShardId(int value) {
shardId_ = value;
onChanged();
return this;
}
/**
*
* The binary content of the tensor lies in:
* File "shard_id": bytes [offset, offset + size).
*
*
* int32 shard_id = 3;
*/
public Builder clearShardId() {
shardId_ = 0;
onChanged();
return this;
}
private long offset_ ;
/**
* int64 offset = 4;
*/
public long getOffset() {
return offset_;
}
/**
* int64 offset = 4;
*/
public Builder setOffset(long value) {
offset_ = value;
onChanged();
return this;
}
/**
* int64 offset = 4;
*/
public Builder clearOffset() {
offset_ = 0L;
onChanged();
return this;
}
private long size_ ;
/**
* int64 size = 5;
*/
public long getSize() {
return size_;
}
/**
* int64 size = 5;
*/
public Builder setSize(long value) {
size_ = value;
onChanged();
return this;
}
/**
* int64 size = 5;
*/
public Builder clearSize() {
size_ = 0L;
onChanged();
return this;
}
private int crc32C_ ;
/**
*
* The CRC32C checksum of the tensor bytes.
*
*
* fixed32 crc32c = 6;
*/
public int getCrc32C() {
return crc32C_;
}
/**
*
* The CRC32C checksum of the tensor bytes.
*
*
* fixed32 crc32c = 6;
*/
public Builder setCrc32C(int value) {
crc32C_ = value;
onChanged();
return this;
}
/**
*
* The CRC32C checksum of the tensor bytes.
*
*
* fixed32 crc32c = 6;
*/
public Builder clearCrc32C() {
crc32C_ = 0;
onChanged();
return this;
}
private java.util.List slices_ =
java.util.Collections.emptyList();
private void ensureSlicesIsMutable() {
if (!((bitField0_ & 0x00000040) == 0x00000040)) {
slices_ = new java.util.ArrayList(slices_);
bitField0_ |= 0x00000040;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
org.tensorflow.framework.TensorSliceProto, org.tensorflow.framework.TensorSliceProto.Builder, org.tensorflow.framework.TensorSliceProtoOrBuilder> slicesBuilder_;
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public java.util.List getSlicesList() {
if (slicesBuilder_ == null) {
return java.util.Collections.unmodifiableList(slices_);
} else {
return slicesBuilder_.getMessageList();
}
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public int getSlicesCount() {
if (slicesBuilder_ == null) {
return slices_.size();
} else {
return slicesBuilder_.getCount();
}
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public org.tensorflow.framework.TensorSliceProto getSlices(int index) {
if (slicesBuilder_ == null) {
return slices_.get(index);
} else {
return slicesBuilder_.getMessage(index);
}
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public Builder setSlices(
int index, org.tensorflow.framework.TensorSliceProto value) {
if (slicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSlicesIsMutable();
slices_.set(index, value);
onChanged();
} else {
slicesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public Builder setSlices(
int index, org.tensorflow.framework.TensorSliceProto.Builder builderForValue) {
if (slicesBuilder_ == null) {
ensureSlicesIsMutable();
slices_.set(index, builderForValue.build());
onChanged();
} else {
slicesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public Builder addSlices(org.tensorflow.framework.TensorSliceProto value) {
if (slicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSlicesIsMutable();
slices_.add(value);
onChanged();
} else {
slicesBuilder_.addMessage(value);
}
return this;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public Builder addSlices(
int index, org.tensorflow.framework.TensorSliceProto value) {
if (slicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSlicesIsMutable();
slices_.add(index, value);
onChanged();
} else {
slicesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public Builder addSlices(
org.tensorflow.framework.TensorSliceProto.Builder builderForValue) {
if (slicesBuilder_ == null) {
ensureSlicesIsMutable();
slices_.add(builderForValue.build());
onChanged();
} else {
slicesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public Builder addSlices(
int index, org.tensorflow.framework.TensorSliceProto.Builder builderForValue) {
if (slicesBuilder_ == null) {
ensureSlicesIsMutable();
slices_.add(index, builderForValue.build());
onChanged();
} else {
slicesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public Builder addAllSlices(
java.lang.Iterable extends org.tensorflow.framework.TensorSliceProto> values) {
if (slicesBuilder_ == null) {
ensureSlicesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, slices_);
onChanged();
} else {
slicesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public Builder clearSlices() {
if (slicesBuilder_ == null) {
slices_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000040);
onChanged();
} else {
slicesBuilder_.clear();
}
return this;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public Builder removeSlices(int index) {
if (slicesBuilder_ == null) {
ensureSlicesIsMutable();
slices_.remove(index);
onChanged();
} else {
slicesBuilder_.remove(index);
}
return this;
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public org.tensorflow.framework.TensorSliceProto.Builder getSlicesBuilder(
int index) {
return getSlicesFieldBuilder().getBuilder(index);
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public org.tensorflow.framework.TensorSliceProtoOrBuilder getSlicesOrBuilder(
int index) {
if (slicesBuilder_ == null) {
return slices_.get(index); } else {
return slicesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public java.util.List extends org.tensorflow.framework.TensorSliceProtoOrBuilder>
getSlicesOrBuilderList() {
if (slicesBuilder_ != null) {
return slicesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(slices_);
}
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public org.tensorflow.framework.TensorSliceProto.Builder addSlicesBuilder() {
return getSlicesFieldBuilder().addBuilder(
org.tensorflow.framework.TensorSliceProto.getDefaultInstance());
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public org.tensorflow.framework.TensorSliceProto.Builder addSlicesBuilder(
int index) {
return getSlicesFieldBuilder().addBuilder(
index, org.tensorflow.framework.TensorSliceProto.getDefaultInstance());
}
/**
*
* Iff present, this entry represents a partitioned tensor. The previous
* fields are interpreted as follows:
* "dtype", "shape": describe the full tensor.
* "shard_id", "offset", "size", "crc32c": all IGNORED.
* These information for each slice can be looked up in their own
* BundleEntryProto, keyed by each "slice_name".
*
*
* repeated .tensorflow.TensorSliceProto slices = 7;
*/
public java.util.List
getSlicesBuilderList() {
return getSlicesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
org.tensorflow.framework.TensorSliceProto, org.tensorflow.framework.TensorSliceProto.Builder, org.tensorflow.framework.TensorSliceProtoOrBuilder>
getSlicesFieldBuilder() {
if (slicesBuilder_ == null) {
slicesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
org.tensorflow.framework.TensorSliceProto, org.tensorflow.framework.TensorSliceProto.Builder, org.tensorflow.framework.TensorSliceProtoOrBuilder>(
slices_,
((bitField0_ & 0x00000040) == 0x00000040),
getParentForChildren(),
isClean());
slices_ = null;
}
return slicesBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:tensorflow.BundleEntryProto)
}
// @@protoc_insertion_point(class_scope:tensorflow.BundleEntryProto)
private static final org.tensorflow.util.BundleEntryProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.tensorflow.util.BundleEntryProto();
}
public static org.tensorflow.util.BundleEntryProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser
PARSER = new com.google.protobuf.AbstractParser() {
public BundleEntryProto parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new BundleEntryProto(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser getParserForType() {
return PARSER;
}
public org.tensorflow.util.BundleEntryProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy