Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
* complex with float32 real and imaginary components
*
*
* COMPLEX64 = 14;
*/
COMPLEX64(14),
/**
*
* complex with float64 real and imaginary components
*
*
* COMPLEX128 = 15;
*/
COMPLEX128(15),
/**
*
* Non-IEEE floating-point format based on IEEE754 single-precision
* floating-point number truncated to 16 bits.
* This format has 1 sign bit, 8 exponent bits, and 7 mantissa bits.
*
*
* BFLOAT16 = 16;
*/
BFLOAT16(16),
UNRECOGNIZED(-1),
;
/**
* UNDEFINED = 0;
*/
public static final int UNDEFINED_VALUE = 0;
/**
*
* Basic types.
*
*
* FLOAT = 1;
*/
public static final int FLOAT_VALUE = 1;
/**
*
* uint8_t
*
*
* UINT8 = 2;
*/
public static final int UINT8_VALUE = 2;
/**
*
* int8_t
*
*
* INT8 = 3;
*/
public static final int INT8_VALUE = 3;
/**
*
* uint16_t
*
*
* UINT16 = 4;
*/
public static final int UINT16_VALUE = 4;
/**
*
* int16_t
*
*
* INT16 = 5;
*/
public static final int INT16_VALUE = 5;
/**
*
* int32_t
*
*
* INT32 = 6;
*/
public static final int INT32_VALUE = 6;
/**
*
* int64_t
*
*
* INT64 = 7;
*/
public static final int INT64_VALUE = 7;
/**
*
* string
*
*
* STRING = 8;
*/
public static final int STRING_VALUE = 8;
/**
*
* bool
*
*
* BOOL = 9;
*/
public static final int BOOL_VALUE = 9;
/**
*
* IEEE754 half-precision floating-point format (16 bits wide).
* This format has 1 sign bit, 5 exponent bits, and 10 mantissa bits.
*
*
* FLOAT16 = 10;
*/
public static final int FLOAT16_VALUE = 10;
/**
* DOUBLE = 11;
*/
public static final int DOUBLE_VALUE = 11;
/**
* UINT32 = 12;
*/
public static final int UINT32_VALUE = 12;
/**
* UINT64 = 13;
*/
public static final int UINT64_VALUE = 13;
/**
*
* complex with float32 real and imaginary components
*
*
* COMPLEX64 = 14;
*/
public static final int COMPLEX64_VALUE = 14;
/**
*
* complex with float64 real and imaginary components
*
*
* COMPLEX128 = 15;
*/
public static final int COMPLEX128_VALUE = 15;
/**
*
* Non-IEEE floating-point format based on IEEE754 single-precision
* floating-point number truncated to 16 bits.
* This format has 1 sign bit, 8 exponent bits, and 7 mantissa bits.
*
*
* BFLOAT16 = 16;
*/
public static final int BFLOAT16_VALUE = 16;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static DataType valueOf(int value) {
return forNumber(value);
}
public static DataType forNumber(int value) {
switch (value) {
case 0: return UNDEFINED;
case 1: return FLOAT;
case 2: return UINT8;
case 3: return INT8;
case 4: return UINT16;
case 5: return INT16;
case 6: return INT32;
case 7: return INT64;
case 8: return STRING;
case 9: return BOOL;
case 10: return FLOAT16;
case 11: return DOUBLE;
case 12: return UINT32;
case 13: return UINT64;
case 14: return COMPLEX64;
case 15: return COMPLEX128;
case 16: return BFLOAT16;
default: return null;
}
}
public static org.nd4j.shade.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.nd4j.shade.protobuf.Internal.EnumLiteMap<
DataType> internalValueMap =
new org.nd4j.shade.protobuf.Internal.EnumLiteMap() {
public DataType findValueByNumber(int number) {
return DataType.forNumber(number);
}
};
public final org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.nd4j.ir.TensorNamespace.getDescriptor().getEnumTypes().get(0);
}
private static final DataType[] VALUES = values();
public static DataType valueOf(
org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private DataType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.nd4j.ir.DataType)
}
public interface StringStringEntryProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:org.nd4j.ir.StringStringEntryProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
* string key = 1;
*/
java.lang.String getKey();
/**
* string key = 1;
*/
org.nd4j.shade.protobuf.ByteString
getKeyBytes();
/**
* string value = 2;
*/
java.lang.String getValue();
/**
* string value = 2;
*/
org.nd4j.shade.protobuf.ByteString
getValueBytes();
}
/**
*
* StringStringEntryProto follows the pattern for cross-proto-version maps.
* See https://developers.google.com/protocol-buffers/docs/proto3#maps
*
*
* Protobuf type {@code org.nd4j.ir.StringStringEntryProto}
*/
public static final class StringStringEntryProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:org.nd4j.ir.StringStringEntryProto)
StringStringEntryProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use StringStringEntryProto.newBuilder() to construct.
private StringStringEntryProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private StringStringEntryProto() {
key_ = "";
value_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new StringStringEntryProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private StringStringEntryProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
key_ = s;
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
value_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_StringStringEntryProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_StringStringEntryProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.nd4j.ir.TensorNamespace.StringStringEntryProto.class, org.nd4j.ir.TensorNamespace.StringStringEntryProto.Builder.class);
}
public static final int KEY_FIELD_NUMBER = 1;
private volatile java.lang.Object key_;
/**
* string key = 1;
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
}
}
/**
* string key = 1;
*/
public org.nd4j.shade.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int VALUE_FIELD_NUMBER = 2;
private volatile java.lang.Object value_;
/**
* string value = 2;
*/
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
value_ = s;
return s;
}
}
/**
* string value = 2;
*/
public org.nd4j.shade.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getKeyBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
}
if (!getValueBytes().isEmpty()) {
org.nd4j.shade.protobuf.GeneratedMessageV3.writeString(output, 2, value_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getKeyBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
}
if (!getValueBytes().isEmpty()) {
size += org.nd4j.shade.protobuf.GeneratedMessageV3.computeStringSize(2, value_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.nd4j.ir.TensorNamespace.StringStringEntryProto)) {
return super.equals(obj);
}
org.nd4j.ir.TensorNamespace.StringStringEntryProto other = (org.nd4j.ir.TensorNamespace.StringStringEntryProto) obj;
if (!getKey()
.equals(other.getKey())) return false;
if (!getValue()
.equals(other.getValue())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + KEY_FIELD_NUMBER;
hash = (53 * hash) + getKey().hashCode();
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.nd4j.ir.TensorNamespace.StringStringEntryProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* StringStringEntryProto follows the pattern for cross-proto-version maps.
* See https://developers.google.com/protocol-buffers/docs/proto3#maps
*
*
* Protobuf type {@code org.nd4j.ir.StringStringEntryProto}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:org.nd4j.ir.StringStringEntryProto)
org.nd4j.ir.TensorNamespace.StringStringEntryProtoOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_StringStringEntryProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_StringStringEntryProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.nd4j.ir.TensorNamespace.StringStringEntryProto.class, org.nd4j.ir.TensorNamespace.StringStringEntryProto.Builder.class);
}
// Construct using org.nd4j.ir.TensorNamespace.StringStringEntryProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
key_ = "";
value_ = "";
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_StringStringEntryProto_descriptor;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.StringStringEntryProto getDefaultInstanceForType() {
return org.nd4j.ir.TensorNamespace.StringStringEntryProto.getDefaultInstance();
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.StringStringEntryProto build() {
org.nd4j.ir.TensorNamespace.StringStringEntryProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.StringStringEntryProto buildPartial() {
org.nd4j.ir.TensorNamespace.StringStringEntryProto result = new org.nd4j.ir.TensorNamespace.StringStringEntryProto(this);
result.key_ = key_;
result.value_ = value_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof org.nd4j.ir.TensorNamespace.StringStringEntryProto) {
return mergeFrom((org.nd4j.ir.TensorNamespace.StringStringEntryProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.nd4j.ir.TensorNamespace.StringStringEntryProto other) {
if (other == org.nd4j.ir.TensorNamespace.StringStringEntryProto.getDefaultInstance()) return this;
if (!other.getKey().isEmpty()) {
key_ = other.key_;
onChanged();
}
if (!other.getValue().isEmpty()) {
value_ = other.value_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.nd4j.ir.TensorNamespace.StringStringEntryProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.nd4j.ir.TensorNamespace.StringStringEntryProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object key_ = "";
/**
* string key = 1;
*/
public java.lang.String getKey() {
java.lang.Object ref = key_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
key_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* string key = 1;
*/
public org.nd4j.shade.protobuf.ByteString
getKeyBytes() {
java.lang.Object ref = key_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
key_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
* string key = 1;
*/
public Builder setKey(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
key_ = value;
onChanged();
return this;
}
/**
* string key = 1;
*/
public Builder clearKey() {
key_ = getDefaultInstance().getKey();
onChanged();
return this;
}
/**
* string key = 1;
*/
public Builder setKeyBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
key_ = value;
onChanged();
return this;
}
private java.lang.Object value_ = "";
/**
* string value = 2;
*/
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (!(ref instanceof java.lang.String)) {
org.nd4j.shade.protobuf.ByteString bs =
(org.nd4j.shade.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
value_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* string value = 2;
*/
public org.nd4j.shade.protobuf.ByteString
getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
value_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
/**
* string value = 2;
*/
public Builder setValue(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
value_ = value;
onChanged();
return this;
}
/**
* string value = 2;
*/
public Builder clearValue() {
value_ = getDefaultInstance().getValue();
onChanged();
return this;
}
/**
* string value = 2;
*/
public Builder setValueBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
value_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:org.nd4j.ir.StringStringEntryProto)
}
// @@protoc_insertion_point(class_scope:org.nd4j.ir.StringStringEntryProto)
private static final org.nd4j.ir.TensorNamespace.StringStringEntryProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.nd4j.ir.TensorNamespace.StringStringEntryProto();
}
public static org.nd4j.ir.TensorNamespace.StringStringEntryProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public StringStringEntryProto parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new StringStringEntryProto(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.StringStringEntryProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface TypeProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:org.nd4j.ir.TypeProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
*
* .org.nd4j.ir.TypeProto.TensorDescriptor tensor_type = 1;
*/
private org.nd4j.shade.protobuf.SingleFieldBuilderV3<
org.nd4j.ir.TensorNamespace.TypeProto.TensorDescriptor, org.nd4j.ir.TensorNamespace.TypeProto.TensorDescriptor.Builder, org.nd4j.ir.TensorNamespace.TypeProto.TensorDescriptorOrBuilder>
getTensorTypeFieldBuilder() {
if (tensorTypeBuilder_ == null) {
if (!(valueCase_ == 1)) {
value_ = org.nd4j.ir.TensorNamespace.TypeProto.TensorDescriptor.getDefaultInstance();
}
tensorTypeBuilder_ = new org.nd4j.shade.protobuf.SingleFieldBuilderV3<
org.nd4j.ir.TensorNamespace.TypeProto.TensorDescriptor, org.nd4j.ir.TensorNamespace.TypeProto.TensorDescriptor.Builder, org.nd4j.ir.TensorNamespace.TypeProto.TensorDescriptorOrBuilder>(
(org.nd4j.ir.TensorNamespace.TypeProto.TensorDescriptor) value_,
getParentForChildren(),
isClean());
value_ = null;
}
valueCase_ = 1;
onChanged();;
return tensorTypeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:org.nd4j.ir.TypeProto)
}
// @@protoc_insertion_point(class_scope:org.nd4j.ir.TypeProto)
private static final org.nd4j.ir.TensorNamespace.TypeProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.nd4j.ir.TensorNamespace.TypeProto();
}
public static org.nd4j.ir.TensorNamespace.TypeProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public TypeProto parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new TypeProto(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.TypeProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface TensorShapeProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:org.nd4j.ir.TensorShapeProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
java.util.List
getDimList();
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension getDim(int index);
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
int getDimCount();
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
java.util.List
getDimOrBuilderList();
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
org.nd4j.ir.TensorNamespace.TensorShapeProto.DimensionOrBuilder getDimOrBuilder(
int index);
}
/**
*
* Defines a tensor shape. A dimension can be either an integer value
* or a symbolic variable. A symbolic variable represents an unknown
* dimension.
*
*
* Protobuf type {@code org.nd4j.ir.TensorShapeProto}
*/
public static final class TensorShapeProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:org.nd4j.ir.TensorShapeProto)
TensorShapeProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use TensorShapeProto.newBuilder() to construct.
private TensorShapeProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private TensorShapeProto() {
dim_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new TensorShapeProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TensorShapeProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
dim_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000001;
}
dim_.add(
input.readMessage(org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.parser(), extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
dim_ = java.util.Collections.unmodifiableList(dim_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorShapeProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorShapeProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.nd4j.ir.TensorNamespace.TensorShapeProto.class, org.nd4j.ir.TensorNamespace.TensorShapeProto.Builder.class);
}
public interface DimensionOrBuilder extends
// @@protoc_insertion_point(interface_extends:org.nd4j.ir.TensorShapeProto.Dimension)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
* int64 dim_value = 1;
*/
long getDimValue();
/**
*
*
* string dim_param = 2;
*/
public Builder setDimParamBytes(
org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
valueCase_ = 2;
value_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:org.nd4j.ir.TensorShapeProto.Dimension)
}
// @@protoc_insertion_point(class_scope:org.nd4j.ir.TensorShapeProto.Dimension)
private static final org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension();
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public Dimension parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new Dimension(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public static final int DIM_FIELD_NUMBER = 1;
private java.util.List dim_;
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public java.util.List getDimList() {
return dim_;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public java.util.List
getDimOrBuilderList() {
return dim_;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public int getDimCount() {
return dim_.size();
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension getDim(int index) {
return dim_.get(index);
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public org.nd4j.ir.TensorNamespace.TensorShapeProto.DimensionOrBuilder getDimOrBuilder(
int index) {
return dim_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < dim_.size(); i++) {
output.writeMessage(1, dim_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dim_.size(); i++) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeMessageSize(1, dim_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.nd4j.ir.TensorNamespace.TensorShapeProto)) {
return super.equals(obj);
}
org.nd4j.ir.TensorNamespace.TensorShapeProto other = (org.nd4j.ir.TensorNamespace.TensorShapeProto) obj;
if (!getDimList()
.equals(other.getDimList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDimCount() > 0) {
hash = (37 * hash) + DIM_FIELD_NUMBER;
hash = (53 * hash) + getDimList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.nd4j.ir.TensorNamespace.TensorShapeProto prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* Defines a tensor shape. A dimension can be either an integer value
* or a symbolic variable. A symbolic variable represents an unknown
* dimension.
*
*
* Protobuf type {@code org.nd4j.ir.TensorShapeProto}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:org.nd4j.ir.TensorShapeProto)
org.nd4j.ir.TensorNamespace.TensorShapeProtoOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorShapeProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorShapeProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.nd4j.ir.TensorNamespace.TensorShapeProto.class, org.nd4j.ir.TensorNamespace.TensorShapeProto.Builder.class);
}
// Construct using org.nd4j.ir.TensorNamespace.TensorShapeProto.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getDimFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (dimBuilder_ == null) {
dim_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
dimBuilder_.clear();
}
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorShapeProto_descriptor;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.TensorShapeProto getDefaultInstanceForType() {
return org.nd4j.ir.TensorNamespace.TensorShapeProto.getDefaultInstance();
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.TensorShapeProto build() {
org.nd4j.ir.TensorNamespace.TensorShapeProto result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.TensorShapeProto buildPartial() {
org.nd4j.ir.TensorNamespace.TensorShapeProto result = new org.nd4j.ir.TensorNamespace.TensorShapeProto(this);
int from_bitField0_ = bitField0_;
if (dimBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dim_ = java.util.Collections.unmodifiableList(dim_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dim_ = dim_;
} else {
result.dim_ = dimBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof org.nd4j.ir.TensorNamespace.TensorShapeProto) {
return mergeFrom((org.nd4j.ir.TensorNamespace.TensorShapeProto)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.nd4j.ir.TensorNamespace.TensorShapeProto other) {
if (other == org.nd4j.ir.TensorNamespace.TensorShapeProto.getDefaultInstance()) return this;
if (dimBuilder_ == null) {
if (!other.dim_.isEmpty()) {
if (dim_.isEmpty()) {
dim_ = other.dim_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDimIsMutable();
dim_.addAll(other.dim_);
}
onChanged();
}
} else {
if (!other.dim_.isEmpty()) {
if (dimBuilder_.isEmpty()) {
dimBuilder_.dispose();
dimBuilder_ = null;
dim_ = other.dim_;
bitField0_ = (bitField0_ & ~0x00000001);
dimBuilder_ =
org.nd4j.shade.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getDimFieldBuilder() : null;
} else {
dimBuilder_.addAllMessages(other.dim_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.nd4j.ir.TensorNamespace.TensorShapeProto parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.nd4j.ir.TensorNamespace.TensorShapeProto) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List dim_ =
java.util.Collections.emptyList();
private void ensureDimIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dim_ = new java.util.ArrayList(dim_);
bitField0_ |= 0x00000001;
}
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension, org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.Builder, org.nd4j.ir.TensorNamespace.TensorShapeProto.DimensionOrBuilder> dimBuilder_;
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public java.util.List getDimList() {
if (dimBuilder_ == null) {
return java.util.Collections.unmodifiableList(dim_);
} else {
return dimBuilder_.getMessageList();
}
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public int getDimCount() {
if (dimBuilder_ == null) {
return dim_.size();
} else {
return dimBuilder_.getCount();
}
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension getDim(int index) {
if (dimBuilder_ == null) {
return dim_.get(index);
} else {
return dimBuilder_.getMessage(index);
}
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public Builder setDim(
int index, org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension value) {
if (dimBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDimIsMutable();
dim_.set(index, value);
onChanged();
} else {
dimBuilder_.setMessage(index, value);
}
return this;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public Builder setDim(
int index, org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.Builder builderForValue) {
if (dimBuilder_ == null) {
ensureDimIsMutable();
dim_.set(index, builderForValue.build());
onChanged();
} else {
dimBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public Builder addDim(org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension value) {
if (dimBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDimIsMutable();
dim_.add(value);
onChanged();
} else {
dimBuilder_.addMessage(value);
}
return this;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public Builder addDim(
int index, org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension value) {
if (dimBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDimIsMutable();
dim_.add(index, value);
onChanged();
} else {
dimBuilder_.addMessage(index, value);
}
return this;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public Builder addDim(
org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.Builder builderForValue) {
if (dimBuilder_ == null) {
ensureDimIsMutable();
dim_.add(builderForValue.build());
onChanged();
} else {
dimBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public Builder addDim(
int index, org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.Builder builderForValue) {
if (dimBuilder_ == null) {
ensureDimIsMutable();
dim_.add(index, builderForValue.build());
onChanged();
} else {
dimBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public Builder addAllDim(
java.lang.Iterable values) {
if (dimBuilder_ == null) {
ensureDimIsMutable();
org.nd4j.shade.protobuf.AbstractMessageLite.Builder.addAll(
values, dim_);
onChanged();
} else {
dimBuilder_.addAllMessages(values);
}
return this;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public Builder clearDim() {
if (dimBuilder_ == null) {
dim_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dimBuilder_.clear();
}
return this;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public Builder removeDim(int index) {
if (dimBuilder_ == null) {
ensureDimIsMutable();
dim_.remove(index);
onChanged();
} else {
dimBuilder_.remove(index);
}
return this;
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.Builder getDimBuilder(
int index) {
return getDimFieldBuilder().getBuilder(index);
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public org.nd4j.ir.TensorNamespace.TensorShapeProto.DimensionOrBuilder getDimOrBuilder(
int index) {
if (dimBuilder_ == null) {
return dim_.get(index); } else {
return dimBuilder_.getMessageOrBuilder(index);
}
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public java.util.List
getDimOrBuilderList() {
if (dimBuilder_ != null) {
return dimBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dim_);
}
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.Builder addDimBuilder() {
return getDimFieldBuilder().addBuilder(
org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.getDefaultInstance());
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.Builder addDimBuilder(
int index) {
return getDimFieldBuilder().addBuilder(
index, org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.getDefaultInstance());
}
/**
* repeated .org.nd4j.ir.TensorShapeProto.Dimension dim = 1;
*/
public java.util.List
getDimBuilderList() {
return getDimFieldBuilder().getBuilderList();
}
private org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension, org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.Builder, org.nd4j.ir.TensorNamespace.TensorShapeProto.DimensionOrBuilder>
getDimFieldBuilder() {
if (dimBuilder_ == null) {
dimBuilder_ = new org.nd4j.shade.protobuf.RepeatedFieldBuilderV3<
org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension, org.nd4j.ir.TensorNamespace.TensorShapeProto.Dimension.Builder, org.nd4j.ir.TensorNamespace.TensorShapeProto.DimensionOrBuilder>(
dim_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
dim_ = null;
}
return dimBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:org.nd4j.ir.TensorShapeProto)
}
// @@protoc_insertion_point(class_scope:org.nd4j.ir.TensorShapeProto)
private static final org.nd4j.ir.TensorNamespace.TensorShapeProto DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.nd4j.ir.TensorNamespace.TensorShapeProto();
}
public static org.nd4j.ir.TensorNamespace.TensorShapeProto getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public TensorShapeProto parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new TensorShapeProto(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.TensorShapeProto getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface ValueInfoProtoOrBuilder extends
// @@protoc_insertion_point(interface_extends:org.nd4j.ir.ValueInfoProto)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
*
* This field MUST be present in this version of the IR.
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
* Serializations can either use one of the fields above, or use this
* raw bytes field. The only exception is the string case, where one is
* required to store the content in the repeated bytes string_data field.
* When this raw_data field is used to store tensor value, elements MUST
* be stored in as fixed-width, little-endian order.
* Floating-point data types MUST be stored in IEEE 754 format.
* Complex64 elements must be written as two consecutive FLOAT values, real component first.
* Complex128 elements must be written as two consecutive DOUBLE values, real component first.
* Boolean type MUST be written one byte per tensor element (00000001 for true, 00000000 for false).
* Note: the advantage of specific field rather than the raw_data field is
* that in some cases (e.g. int data), protobuf does a better packing via
* variable length storage, and may lead to smaller binary footprint.
* When this field is present, the data_type field MUST NOT be STRING or UNDEFINED
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
*
* Protobuf type {@code org.nd4j.ir.TensorProto}
*/
public static final class TensorProto extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:org.nd4j.ir.TensorProto)
TensorProtoOrBuilder {
private static final long serialVersionUID = 0L;
// Use TensorProto.newBuilder() to construct.
private TensorProto(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private TensorProto() {
dims_ = emptyLongList();
floatData_ = emptyFloatList();
int32Data_ = emptyIntList();
stringData_ = java.util.Collections.emptyList();
int64Data_ = emptyLongList();
name_ = "";
docString_ = "";
rawData_ = org.nd4j.shade.protobuf.ByteString.EMPTY;
externalData_ = java.util.Collections.emptyList();
dataLocation_ = 0;
doubleData_ = emptyDoubleList();
uint64Data_ = emptyLongList();
halfVal_ = emptyIntList();
boolVal_ = emptyBooleanList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new TensorProto();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TensorProto(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
dims_ = newLongList();
mutable_bitField0_ |= 0x00000001;
}
dims_.addLong(input.readInt64());
break;
}
case 10: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000001) != 0) && input.getBytesUntilLimit() > 0) {
dims_ = newLongList();
mutable_bitField0_ |= 0x00000001;
}
while (input.getBytesUntilLimit() > 0) {
dims_.addLong(input.readInt64());
}
input.popLimit(limit);
break;
}
case 16: {
dataType_ = input.readInt32();
break;
}
case 26: {
org.nd4j.ir.TensorNamespace.TensorProto.Segment.Builder subBuilder = null;
if (segment_ != null) {
subBuilder = segment_.toBuilder();
}
segment_ = input.readMessage(org.nd4j.ir.TensorNamespace.TensorProto.Segment.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(segment_);
segment_ = subBuilder.buildPartial();
}
break;
}
case 37: {
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
floatData_ = newFloatList();
mutable_bitField0_ |= 0x00000002;
}
floatData_.addFloat(input.readFloat());
break;
}
case 34: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000002) != 0) && input.getBytesUntilLimit() > 0) {
floatData_ = newFloatList();
mutable_bitField0_ |= 0x00000002;
}
while (input.getBytesUntilLimit() > 0) {
floatData_.addFloat(input.readFloat());
}
input.popLimit(limit);
break;
}
case 40: {
if (!((mutable_bitField0_ & 0x00000004) != 0)) {
int32Data_ = newIntList();
mutable_bitField0_ |= 0x00000004;
}
int32Data_.addInt(input.readInt32());
break;
}
case 42: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000004) != 0) && input.getBytesUntilLimit() > 0) {
int32Data_ = newIntList();
mutable_bitField0_ |= 0x00000004;
}
while (input.getBytesUntilLimit() > 0) {
int32Data_.addInt(input.readInt32());
}
input.popLimit(limit);
break;
}
case 50: {
if (!((mutable_bitField0_ & 0x00000008) != 0)) {
stringData_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000008;
}
stringData_.add(input.readBytes());
break;
}
case 56: {
if (!((mutable_bitField0_ & 0x00000010) != 0)) {
int64Data_ = newLongList();
mutable_bitField0_ |= 0x00000010;
}
int64Data_.addLong(input.readInt64());
break;
}
case 58: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000010) != 0) && input.getBytesUntilLimit() > 0) {
int64Data_ = newLongList();
mutable_bitField0_ |= 0x00000010;
}
while (input.getBytesUntilLimit() > 0) {
int64Data_.addLong(input.readInt64());
}
input.popLimit(limit);
break;
}
case 66: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 74: {
rawData_ = input.readBytes();
break;
}
case 81: {
if (!((mutable_bitField0_ & 0x00000040) != 0)) {
doubleData_ = newDoubleList();
mutable_bitField0_ |= 0x00000040;
}
doubleData_.addDouble(input.readDouble());
break;
}
case 82: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000040) != 0) && input.getBytesUntilLimit() > 0) {
doubleData_ = newDoubleList();
mutable_bitField0_ |= 0x00000040;
}
while (input.getBytesUntilLimit() > 0) {
doubleData_.addDouble(input.readDouble());
}
input.popLimit(limit);
break;
}
case 88: {
if (!((mutable_bitField0_ & 0x00000080) != 0)) {
uint64Data_ = newLongList();
mutable_bitField0_ |= 0x00000080;
}
uint64Data_.addLong(input.readUInt64());
break;
}
case 90: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000080) != 0) && input.getBytesUntilLimit() > 0) {
uint64Data_ = newLongList();
mutable_bitField0_ |= 0x00000080;
}
while (input.getBytesUntilLimit() > 0) {
uint64Data_.addLong(input.readUInt64());
}
input.popLimit(limit);
break;
}
case 98: {
java.lang.String s = input.readStringRequireUtf8();
docString_ = s;
break;
}
case 106: {
if (!((mutable_bitField0_ & 0x00000020) != 0)) {
externalData_ = new java.util.ArrayList();
mutable_bitField0_ |= 0x00000020;
}
externalData_.add(
input.readMessage(org.nd4j.ir.TensorNamespace.StringStringEntryProto.parser(), extensionRegistry));
break;
}
case 112: {
int rawValue = input.readEnum();
dataLocation_ = rawValue;
break;
}
case 120: {
if (!((mutable_bitField0_ & 0x00000100) != 0)) {
halfVal_ = newIntList();
mutable_bitField0_ |= 0x00000100;
}
halfVal_.addInt(input.readInt32());
break;
}
case 122: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000100) != 0) && input.getBytesUntilLimit() > 0) {
halfVal_ = newIntList();
mutable_bitField0_ |= 0x00000100;
}
while (input.getBytesUntilLimit() > 0) {
halfVal_.addInt(input.readInt32());
}
input.popLimit(limit);
break;
}
case 128: {
if (!((mutable_bitField0_ & 0x00000200) != 0)) {
boolVal_ = newBooleanList();
mutable_bitField0_ |= 0x00000200;
}
boolVal_.addBoolean(input.readBool());
break;
}
case 130: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000200) != 0) && input.getBytesUntilLimit() > 0) {
boolVal_ = newBooleanList();
mutable_bitField0_ |= 0x00000200;
}
while (input.getBytesUntilLimit() > 0) {
boolVal_.addBoolean(input.readBool());
}
input.popLimit(limit);
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
dims_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000002) != 0)) {
floatData_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000004) != 0)) {
int32Data_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000008) != 0)) {
stringData_ = java.util.Collections.unmodifiableList(stringData_); // C
}
if (((mutable_bitField0_ & 0x00000010) != 0)) {
int64Data_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000040) != 0)) {
doubleData_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000080) != 0)) {
uint64Data_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000020) != 0)) {
externalData_ = java.util.Collections.unmodifiableList(externalData_);
}
if (((mutable_bitField0_ & 0x00000100) != 0)) {
halfVal_.makeImmutable(); // C
}
if (((mutable_bitField0_ & 0x00000200) != 0)) {
boolVal_.makeImmutable(); // C
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorProto_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.nd4j.ir.TensorNamespace.TensorProto.class, org.nd4j.ir.TensorNamespace.TensorProto.Builder.class);
}
/**
*
* Location of the data for this tensor. MUST be one of:
* - DEFAULT - data stored inside the protobuf message. Data is stored in raw_data (if set) otherwise in type-specified field.
* - EXTERNAL - data stored in an external location as described by external_data field.
*
*
* Protobuf enum {@code org.nd4j.ir.TensorProto.DataLocation}
*/
public enum DataLocation
implements org.nd4j.shade.protobuf.ProtocolMessageEnum {
/**
* DEFAULT = 0;
*/
DEFAULT(0),
/**
* EXTERNAL = 1;
*/
EXTERNAL(1),
UNRECOGNIZED(-1),
;
/**
* DEFAULT = 0;
*/
public static final int DEFAULT_VALUE = 0;
/**
* EXTERNAL = 1;
*/
public static final int EXTERNAL_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static DataLocation valueOf(int value) {
return forNumber(value);
}
public static DataLocation forNumber(int value) {
switch (value) {
case 0: return DEFAULT;
case 1: return EXTERNAL;
default: return null;
}
}
public static org.nd4j.shade.protobuf.Internal.EnumLiteMap
internalGetValueMap() {
return internalValueMap;
}
private static final org.nd4j.shade.protobuf.Internal.EnumLiteMap<
DataLocation> internalValueMap =
new org.nd4j.shade.protobuf.Internal.EnumLiteMap() {
public DataLocation findValueByNumber(int number) {
return DataLocation.forNumber(number);
}
};
public final org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final org.nd4j.shade.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.nd4j.ir.TensorNamespace.TensorProto.getDescriptor().getEnumTypes().get(0);
}
private static final DataLocation[] VALUES = values();
public static DataLocation valueOf(
org.nd4j.shade.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private DataLocation(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.nd4j.ir.TensorProto.DataLocation)
}
public interface SegmentOrBuilder extends
// @@protoc_insertion_point(interface_extends:org.nd4j.ir.TensorProto.Segment)
org.nd4j.shade.protobuf.MessageOrBuilder {
/**
* int64 begin = 1;
*/
long getBegin();
/**
* int64 end = 2;
*/
long getEnd();
}
/**
*
* For very large tensors, we may want to store them in chunks, in which
* case the following fields will specify the segment that is stored in
* the current TensorProto.
*
*
* Protobuf type {@code org.nd4j.ir.TensorProto.Segment}
*/
public static final class Segment extends
org.nd4j.shade.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:org.nd4j.ir.TensorProto.Segment)
SegmentOrBuilder {
private static final long serialVersionUID = 0L;
// Use Segment.newBuilder() to construct.
private Segment(org.nd4j.shade.protobuf.GeneratedMessageV3.Builder builder) {
super(builder);
}
private Segment() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new Segment();
}
@java.lang.Override
public final org.nd4j.shade.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Segment(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
org.nd4j.shade.protobuf.UnknownFieldSet.Builder unknownFields =
org.nd4j.shade.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
begin_ = input.readInt64();
break;
}
case 16: {
end_ = input.readInt64();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new org.nd4j.shade.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorProto_Segment_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorProto_Segment_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.nd4j.ir.TensorNamespace.TensorProto.Segment.class, org.nd4j.ir.TensorNamespace.TensorProto.Segment.Builder.class);
}
public static final int BEGIN_FIELD_NUMBER = 1;
private long begin_;
/**
* int64 begin = 1;
*/
public long getBegin() {
return begin_;
}
public static final int END_FIELD_NUMBER = 2;
private long end_;
/**
* int64 end = 2;
*/
public long getEnd() {
return end_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(org.nd4j.shade.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (begin_ != 0L) {
output.writeInt64(1, begin_);
}
if (end_ != 0L) {
output.writeInt64(2, end_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (begin_ != 0L) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt64Size(1, begin_);
}
if (end_ != 0L) {
size += org.nd4j.shade.protobuf.CodedOutputStream
.computeInt64Size(2, end_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.nd4j.ir.TensorNamespace.TensorProto.Segment)) {
return super.equals(obj);
}
org.nd4j.ir.TensorNamespace.TensorProto.Segment other = (org.nd4j.ir.TensorNamespace.TensorProto.Segment) obj;
if (getBegin()
!= other.getBegin()) return false;
if (getEnd()
!= other.getEnd()) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + BEGIN_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashLong(
getBegin());
hash = (37 * hash) + END_FIELD_NUMBER;
hash = (53 * hash) + org.nd4j.shade.protobuf.Internal.hashLong(
getEnd());
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseFrom(
java.nio.ByteBuffer data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseFrom(
java.nio.ByteBuffer data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseFrom(
org.nd4j.shade.protobuf.ByteString data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseFrom(
org.nd4j.shade.protobuf.ByteString data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseFrom(byte[] data)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseFrom(
byte[] data,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseDelimitedFrom(
java.io.InputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment parseFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return org.nd4j.shade.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.nd4j.ir.TensorNamespace.TensorProto.Segment prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
* For very large tensors, we may want to store them in chunks, in which
* case the following fields will specify the segment that is stored in
* the current TensorProto.
*
*
* Protobuf type {@code org.nd4j.ir.TensorProto.Segment}
*/
public static final class Builder extends
org.nd4j.shade.protobuf.GeneratedMessageV3.Builder implements
// @@protoc_insertion_point(builder_implements:org.nd4j.ir.TensorProto.Segment)
org.nd4j.ir.TensorNamespace.TensorProto.SegmentOrBuilder {
public static final org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorProto_Segment_descriptor;
}
@java.lang.Override
protected org.nd4j.shade.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorProto_Segment_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.nd4j.ir.TensorNamespace.TensorProto.Segment.class, org.nd4j.ir.TensorNamespace.TensorProto.Segment.Builder.class);
}
// Construct using org.nd4j.ir.TensorNamespace.TensorProto.Segment.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
org.nd4j.shade.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (org.nd4j.shade.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
begin_ = 0L;
end_ = 0L;
return this;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.nd4j.ir.TensorNamespace.internal_static_org_nd4j_ir_TensorProto_Segment_descriptor;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.TensorProto.Segment getDefaultInstanceForType() {
return org.nd4j.ir.TensorNamespace.TensorProto.Segment.getDefaultInstance();
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.TensorProto.Segment build() {
org.nd4j.ir.TensorNamespace.TensorProto.Segment result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.TensorProto.Segment buildPartial() {
org.nd4j.ir.TensorNamespace.TensorProto.Segment result = new org.nd4j.ir.TensorNamespace.TensorProto.Segment(this);
result.begin_ = begin_;
result.end_ = end_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
org.nd4j.shade.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
org.nd4j.shade.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(org.nd4j.shade.protobuf.Message other) {
if (other instanceof org.nd4j.ir.TensorNamespace.TensorProto.Segment) {
return mergeFrom((org.nd4j.ir.TensorNamespace.TensorProto.Segment)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.nd4j.ir.TensorNamespace.TensorProto.Segment other) {
if (other == org.nd4j.ir.TensorNamespace.TensorProto.Segment.getDefaultInstance()) return this;
if (other.getBegin() != 0L) {
setBegin(other.getBegin());
}
if (other.getEnd() != 0L) {
setEnd(other.getEnd());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.nd4j.ir.TensorNamespace.TensorProto.Segment parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (org.nd4j.shade.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.nd4j.ir.TensorNamespace.TensorProto.Segment) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private long begin_ ;
/**
* int64 begin = 1;
*/
public long getBegin() {
return begin_;
}
/**
* int64 begin = 1;
*/
public Builder setBegin(long value) {
begin_ = value;
onChanged();
return this;
}
/**
* int64 begin = 1;
*/
public Builder clearBegin() {
begin_ = 0L;
onChanged();
return this;
}
private long end_ ;
/**
* int64 end = 2;
*/
public long getEnd() {
return end_;
}
/**
* int64 end = 2;
*/
public Builder setEnd(long value) {
end_ = value;
onChanged();
return this;
}
/**
* int64 end = 2;
*/
public Builder clearEnd() {
end_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final org.nd4j.shade.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:org.nd4j.ir.TensorProto.Segment)
}
// @@protoc_insertion_point(class_scope:org.nd4j.ir.TensorProto.Segment)
private static final org.nd4j.ir.TensorNamespace.TensorProto.Segment DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.nd4j.ir.TensorNamespace.TensorProto.Segment();
}
public static org.nd4j.ir.TensorNamespace.TensorProto.Segment getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final org.nd4j.shade.protobuf.Parser
PARSER = new org.nd4j.shade.protobuf.AbstractParser() {
@java.lang.Override
public Segment parsePartialFrom(
org.nd4j.shade.protobuf.CodedInputStream input,
org.nd4j.shade.protobuf.ExtensionRegistryLite extensionRegistry)
throws org.nd4j.shade.protobuf.InvalidProtocolBufferException {
return new Segment(input, extensionRegistry);
}
};
public static org.nd4j.shade.protobuf.Parser parser() {
return PARSER;
}
@java.lang.Override
public org.nd4j.shade.protobuf.Parser getParserForType() {
return PARSER;
}
@java.lang.Override
public org.nd4j.ir.TensorNamespace.TensorProto.Segment getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public static final int DIMS_FIELD_NUMBER = 1;
private org.nd4j.shade.protobuf.Internal.LongList dims_;
/**
*
*
* repeated int64 dims = 1;
*/
public int getDimsCount() {
return dims_.size();
}
/**
*
* The shape of the tensor.
*
*
* repeated int64 dims = 1;
*/
public long getDims(int index) {
return dims_.getLong(index);
}
private int dimsMemoizedSerializedSize = -1;
public static final int DATA_TYPE_FIELD_NUMBER = 2;
private int dataType_;
/**
*
* The data type of the tensor.
* This field MUST have a valid TensorProto.DataType value
*
*
* int32 data_type = 2;
*/
public int getDataType() {
return dataType_;
}
public static final int SEGMENT_FIELD_NUMBER = 3;
private org.nd4j.ir.TensorNamespace.TensorProto.Segment segment_;
/**
* .org.nd4j.ir.TensorProto.Segment segment = 3;
*/
public boolean hasSegment() {
return segment_ != null;
}
/**
* .org.nd4j.ir.TensorProto.Segment segment = 3;
*/
public org.nd4j.ir.TensorNamespace.TensorProto.Segment getSegment() {
return segment_ == null ? org.nd4j.ir.TensorNamespace.TensorProto.Segment.getDefaultInstance() : segment_;
}
/**
* .org.nd4j.ir.TensorProto.Segment segment = 3;
*/
public org.nd4j.ir.TensorNamespace.TensorProto.SegmentOrBuilder getSegmentOrBuilder() {
return getSegment();
}
public static final int FLOAT_DATA_FIELD_NUMBER = 4;
private org.nd4j.shade.protobuf.Internal.FloatList floatData_;
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
*/
public int getFloatDataCount() {
return floatData_.size();
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
*/
public float getFloatData(int index) {
return floatData_.getFloat(index);
}
private int floatDataMemoizedSerializedSize = -1;
public static final int INT32_DATA_FIELD_NUMBER = 5;
private org.nd4j.shade.protobuf.Internal.IntList int32Data_;
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
*/
public int getInt32DataCount() {
return int32Data_.size();
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
*/
public int getInt32Data(int index) {
return int32Data_.getInt(index);
}
private int int32DataMemoizedSerializedSize = -1;
public static final int STRING_DATA_FIELD_NUMBER = 6;
private java.util.List stringData_;
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
*/
public int getStringDataCount() {
return stringData_.size();
}
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
*/
public org.nd4j.shade.protobuf.ByteString getStringData(int index) {
return stringData_.get(index);
}
public static final int INT64_DATA_FIELD_NUMBER = 7;
private org.nd4j.shade.protobuf.Internal.LongList int64Data_;
/**
*
* For int64.
* When this field is present, the data_type field MUST be INT64
*
* A human-readable documentation for this tensor. Markdown is allowed.
*
*
* string doc_string = 12;
*/
public org.nd4j.shade.protobuf.ByteString
getDocStringBytes() {
java.lang.Object ref = docString_;
if (ref instanceof java.lang.String) {
org.nd4j.shade.protobuf.ByteString b =
org.nd4j.shade.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
docString_ = b;
return b;
} else {
return (org.nd4j.shade.protobuf.ByteString) ref;
}
}
public static final int RAW_DATA_FIELD_NUMBER = 9;
private org.nd4j.shade.protobuf.ByteString rawData_;
/**
*
* Serializations can either use one of the fields above, or use this
* raw bytes field. The only exception is the string case, where one is
* required to store the content in the repeated bytes string_data field.
* When this raw_data field is used to store tensor value, elements MUST
* be stored in as fixed-width, little-endian order.
* Floating-point data types MUST be stored in IEEE 754 format.
* Complex64 elements must be written as two consecutive FLOAT values, real component first.
* Complex128 elements must be written as two consecutive DOUBLE values, real component first.
* Boolean type MUST be written one byte per tensor element (00000001 for true, 00000000 for false).
* Note: the advantage of specific field rather than the raw_data field is
* that in some cases (e.g. int data), protobuf does a better packing via
* variable length storage, and may lead to smaller binary footprint.
* When this field is present, the data_type field MUST NOT be STRING or UNDEFINED
*
*
* bytes raw_data = 9;
*/
public org.nd4j.shade.protobuf.ByteString getRawData() {
return rawData_;
}
public static final int EXTERNAL_DATA_FIELD_NUMBER = 13;
private java.util.List externalData_;
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .org.nd4j.ir.StringStringEntryProto external_data = 13;
*/
public int getExternalDataCount() {
return externalData_.size();
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .org.nd4j.ir.StringStringEntryProto external_data = 13;
*/
public org.nd4j.ir.TensorNamespace.StringStringEntryProtoOrBuilder getExternalDataOrBuilder(
int index) {
return externalData_.get(index);
}
public static final int DATA_LOCATION_FIELD_NUMBER = 14;
private int dataLocation_;
/**
*
* If value not set, data is stored in raw_data (if set) otherwise in type-specified field.
*
*
* .org.nd4j.ir.TensorProto.DataLocation data_location = 14;
*/
public int getDataLocationValue() {
return dataLocation_;
}
/**
*
* If value not set, data is stored in raw_data (if set) otherwise in type-specified field.
*
*
* .org.nd4j.ir.TensorProto.DataLocation data_location = 14;
*/
public org.nd4j.ir.TensorNamespace.TensorProto.DataLocation getDataLocation() {
@SuppressWarnings("deprecation")
org.nd4j.ir.TensorNamespace.TensorProto.DataLocation result = org.nd4j.ir.TensorNamespace.TensorProto.DataLocation.valueOf(dataLocation_);
return result == null ? org.nd4j.ir.TensorNamespace.TensorProto.DataLocation.UNRECOGNIZED : result;
}
public static final int DOUBLE_DATA_FIELD_NUMBER = 10;
private org.nd4j.shade.protobuf.Internal.DoubleList doubleData_;
/**
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
*
* repeated double double_data = 10 [packed = true];
*/
public int getDoubleDataCount() {
return doubleData_.size();
}
/**
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
*
* repeated double double_data = 10 [packed = true];
*/
public double getDoubleData(int index) {
return doubleData_.getDouble(index);
}
private int doubleDataMemoizedSerializedSize = -1;
public static final int UINT64_DATA_FIELD_NUMBER = 11;
private org.nd4j.shade.protobuf.Internal.LongList uint64Data_;
/**
*
* For uint64 and uint32 values
* When this field is present, the data_type field MUST be
* UINT32 or UINT64
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
*
* repeated float float_data = 4 [packed = true];
*/
public int getFloatDataCount() {
return floatData_.size();
}
/**
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
* For float and complex64 values
* Complex64 tensors are encoded as a single array of floats,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
*/
public int getInt32DataCount() {
return int32Data_.size();
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
*
* repeated int32 int32_data = 5 [packed = true];
*/
public Builder setInt32Data(
int index, int value) {
ensureInt32DataIsMutable();
int32Data_.setInt(index, value);
onChanged();
return this;
}
/**
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
* For int32, uint8, int8, uint16, int16, bool, and float16 values
* float16 values must be bit-wise converted to an uint16_t prior
* to writing to the buffer.
* When this field is present, the data_type field MUST be
* INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
*/
public int getStringDataCount() {
return stringData_.size();
}
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
*/
public Builder setStringData(
int index, org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureStringDataIsMutable();
stringData_.set(index, value);
onChanged();
return this;
}
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
*
* repeated bytes string_data = 6;
*/
public Builder addStringData(org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureStringDataIsMutable();
stringData_.add(value);
onChanged();
return this;
}
/**
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
* For strings.
* Each element of string_data is a UTF-8 encoded Unicode
* string. No trailing null, no leading BOM. The protobuf "string"
* scalar type is not used to match ML community conventions.
* When this field is present, the data_type field MUST be STRING
*
* Serializations can either use one of the fields above, or use this
* raw bytes field. The only exception is the string case, where one is
* required to store the content in the repeated bytes string_data field.
* When this raw_data field is used to store tensor value, elements MUST
* be stored in as fixed-width, little-endian order.
* Floating-point data types MUST be stored in IEEE 754 format.
* Complex64 elements must be written as two consecutive FLOAT values, real component first.
* Complex128 elements must be written as two consecutive DOUBLE values, real component first.
* Boolean type MUST be written one byte per tensor element (00000001 for true, 00000000 for false).
* Note: the advantage of specific field rather than the raw_data field is
* that in some cases (e.g. int data), protobuf does a better packing via
* variable length storage, and may lead to smaller binary footprint.
* When this field is present, the data_type field MUST NOT be STRING or UNDEFINED
*
* Serializations can either use one of the fields above, or use this
* raw bytes field. The only exception is the string case, where one is
* required to store the content in the repeated bytes string_data field.
* When this raw_data field is used to store tensor value, elements MUST
* be stored in as fixed-width, little-endian order.
* Floating-point data types MUST be stored in IEEE 754 format.
* Complex64 elements must be written as two consecutive FLOAT values, real component first.
* Complex128 elements must be written as two consecutive DOUBLE values, real component first.
* Boolean type MUST be written one byte per tensor element (00000001 for true, 00000000 for false).
* Note: the advantage of specific field rather than the raw_data field is
* that in some cases (e.g. int data), protobuf does a better packing via
* variable length storage, and may lead to smaller binary footprint.
* When this field is present, the data_type field MUST NOT be STRING or UNDEFINED
*
*
* bytes raw_data = 9;
*/
public Builder setRawData(org.nd4j.shade.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
rawData_ = value;
onChanged();
return this;
}
/**
*
* Serializations can either use one of the fields above, or use this
* raw bytes field. The only exception is the string case, where one is
* required to store the content in the repeated bytes string_data field.
* When this raw_data field is used to store tensor value, elements MUST
* be stored in as fixed-width, little-endian order.
* Floating-point data types MUST be stored in IEEE 754 format.
* Complex64 elements must be written as two consecutive FLOAT values, real component first.
* Complex128 elements must be written as two consecutive DOUBLE values, real component first.
* Boolean type MUST be written one byte per tensor element (00000001 for true, 00000000 for false).
* Note: the advantage of specific field rather than the raw_data field is
* that in some cases (e.g. int data), protobuf does a better packing via
* variable length storage, and may lead to smaller binary footprint.
* When this field is present, the data_type field MUST NOT be STRING or UNDEFINED
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .org.nd4j.ir.StringStringEntryProto external_data = 13;
*/
public int getExternalDataCount() {
if (externalDataBuilder_ == null) {
return externalData_.size();
} else {
return externalDataBuilder_.getCount();
}
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .org.nd4j.ir.StringStringEntryProto external_data = 13;
*/
public Builder setExternalData(
int index, org.nd4j.ir.TensorNamespace.StringStringEntryProto value) {
if (externalDataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExternalDataIsMutable();
externalData_.set(index, value);
onChanged();
} else {
externalDataBuilder_.setMessage(index, value);
}
return this;
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .org.nd4j.ir.StringStringEntryProto external_data = 13;
*/
public Builder addExternalData(org.nd4j.ir.TensorNamespace.StringStringEntryProto value) {
if (externalDataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExternalDataIsMutable();
externalData_.add(value);
onChanged();
} else {
externalDataBuilder_.addMessage(value);
}
return this;
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .org.nd4j.ir.StringStringEntryProto external_data = 13;
*/
public Builder addExternalData(
int index, org.nd4j.ir.TensorNamespace.StringStringEntryProto value) {
if (externalDataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExternalDataIsMutable();
externalData_.add(index, value);
onChanged();
} else {
externalDataBuilder_.addMessage(index, value);
}
return this;
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .org.nd4j.ir.StringStringEntryProto external_data = 13;
*/
public org.nd4j.ir.TensorNamespace.StringStringEntryProto.Builder getExternalDataBuilder(
int index) {
return getExternalDataFieldBuilder().getBuilder(index);
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
*
* repeated .org.nd4j.ir.StringStringEntryProto external_data = 13;
*/
public org.nd4j.ir.TensorNamespace.StringStringEntryProtoOrBuilder getExternalDataOrBuilder(
int index) {
if (externalDataBuilder_ == null) {
return externalData_.get(index); } else {
return externalDataBuilder_.getMessageOrBuilder(index);
}
}
/**
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* Data can be stored inside the protobuf file using type-specific fields or raw_data.
* Alternatively, raw bytes data can be stored in an external file, using the external_data field.
* external_data stores key-value pairs describing data location. Recognized keys are:
* - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
* protobuf model was stored
* - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
* Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
* - "length" (optional) - number of bytes containing data. Integer stored as string.
* - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
*
* repeated double double_data = 10 [packed = true];
*/
public int getDoubleDataCount() {
return doubleData_.size();
}
/**
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*
* For double
* Complex128 tensors are encoded as a single array of doubles,
* with the real components appearing in odd numbered positions,
* and the corresponding imaginary component appearing in the
* subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
* is encoded as [1.0, 2.0 ,3.0 ,4.0]
* When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
*